From eea2739c2f4c1ec83eddbcc20998b58862b22808 Mon Sep 17 00:00:00 2001 From: John Sirois Date: Sun, 16 Feb 2025 19:26:43 -0800 Subject: [PATCH] Add support for targeting musl Linux. (#138) --- .github/workflows/ci.yml | 116 ++++++---- .github/workflows/release.yml | 42 ++-- CHANGES.md | 5 + docs/_ext/sphinx_science/toml.py | 26 +-- science/__init__.py | 2 +- science/build_info.py | 4 +- science/commands/__init__.py | 0 science/commands/_psutil.py | 78 +++++++ science/commands/build.py | 12 +- science/commands/complete.py | 6 +- science/commands/doc.py | 9 +- science/commands/download.py | 25 +-- science/commands/lift.py | 34 +-- science/config.py | 19 +- science/data.py | 68 +++++- science/dataclass/deserializer.py | 63 +++++- science/dataclass/reflect.py | 10 +- science/exe.py | 67 ++++-- science/model.py | 29 ++- science/platform.py | 210 ++++++++++++++++--- science/providers/pypy.py | 29 +-- science/providers/python_build_standalone.py | 39 ++-- tests/data/PBS-gnu-and-musl.toml | 63 ++++++ tests/data/platform-specs.toml | 19 ++ tests/test_config.py | 73 ++++++- tests/test_download.py | 4 +- tests/test_exe.py | 11 +- tests/test_installer.py | 11 + uv.lock | 1 + 29 files changed, 861 insertions(+), 214 deletions(-) create mode 100644 science/commands/__init__.py create mode 100644 science/commands/_psutil.py create mode 100644 tests/data/PBS-gnu-and-musl.toml create mode 100644 tests/data/platform-specs.toml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eca1469..59b51d5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -38,19 +38,28 @@ jobs: # non-deprecated ARM Mac runner. include: - os: ubuntu-24.04 - name: Linux x86-64 + name: Linux x86-64 (musl) + docker-image: python:3.12-alpine + docker-platform: linux/amd64 + - os: ubuntu-24.04 + name: Linux x86-64 (glibc) + docker-image: python:3.12-bookworm docker-platform: linux/amd64 - os: ubuntu-24.04 name: Linux aarch64 + docker-image: python:3.12-bookworm docker-platform: linux/arm64 - os: ubuntu-24.04 name: Linux armv7l + docker-image: python:3.12-bookworm docker-platform: linux/arm/v7 - os: ubuntu-24.04 name: Linux s390x + docker-image: python:3.12-bookworm docker-platform: linux/s390x - os: ubuntu-24.04 name: Linux powerpc64le + docker-image: python:3.12-bookworm docker-platform: linux/ppc64le - os: macos-13 name: macOS x86-64 @@ -65,10 +74,10 @@ jobs: SCIENCE_AUTH_API_GITHUB_COM_BEARER: ${{ secrets.GITHUB_TOKEN }} steps: - name: Install the latest version of uv - if: matrix.docker-platform == '' && matrix.os != 'windows-arm64' + if: matrix.docker-image == '' && matrix.os != 'windows-arm64' uses: astral-sh/setup-uv@v5 - name: Setup uv - if: matrix.docker-platform == '' && matrix.os != 'windows-arm64' + if: matrix.docker-image == '' && matrix.os != 'windows-arm64' run: | export UV="$(which uv)" "${UV}" -V @@ -98,7 +107,7 @@ jobs: "${UV}" python install ${UV_PYTHON_VERSION} echo UV_PYTHON="${UV_PYTHON_VERSION}" >> ${GITHUB_ENV} - name: Installing emulators - if: matrix.docker-platform != '' + if: matrix.docker-image != '' run: docker run --privileged --rm tonistiigi/binfmt --install all - name: Checkout Lift uses: actions/checkout@v4 @@ -112,25 +121,32 @@ jobs: key: ${{ matrix.docker-platform || format('{0}-{1}', matrix.os, runner.arch) }}-a-scie-lift-mypy-v1-${{ github.run_id }} restore-keys: ${{ matrix.docker-platform || format('{0}-{1}', matrix.os, runner.arch) }}-a-scie-lift-mypy-v1 - name: Check Formatting & Lints - if: matrix.docker-platform == '' + if: matrix.docker-image == '' run: | "${UV}" run dev-cmd ci --skip test - name: Check Formatting & Lints - if: matrix.docker-platform != '' + if: matrix.docker-image != '' run: | + cat << EOF > _fmt_lint_check.sh + pip install --root-user-action ignore uv + addgroup --gid $(id -g) build + if [ "${{ matrix.docker-image }}" = "python:3.12-alpine" ]; then + adduser -D -g '' -G build -u $(id -u) build + apk add gcc git linux-headers musl-dev python3-dev + else + adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build + fi + + su build -c 'uv run dev-cmd ci --skip test' + EOF + docker run --rm \ -e FORCE_COLOR \ -e SCIENCE_AUTH_API_GITHUB_COM_BEARER \ -v $PWD:/code \ -w /code \ --platform ${{ matrix.docker-platform }} \ - python:3.12-bookworm \ - bash -c " - pip install --root-user-action ignore uv && - addgroup --gid $(id -g) build && - adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build && - su build -c 'uv run dev-cmd ci --skip test' - " + ${{ matrix.docker-image }} sh -eu _fmt_lint_check.sh - name: Cache MyPy uses: actions/cache/save@v4 if: github.ref == 'refs/heads/main' @@ -144,26 +160,34 @@ jobs: echo PYTEST_ADDOPTS="--basetemp C:/tmp/gha/pytest" >> ${GITHUB_ENV} echo SCIE_BASE=C:/tmp/gha/nce >> ${GITHUB_ENV} - name: Unit Tests - if: matrix.docker-platform == '' + if: matrix.docker-image == '' run: | "${UV}" run dev-cmd test -- -vvs - name: Unit Tests - if: matrix.docker-platform != '' + if: matrix.docker-image != '' run: | - cat << EOF > _ci_test.sh - set -euo pipefail - if [[ "${{ matrix.docker-platform }}" == "linux/s390x" ]]; then + cat << EOF > _test.sh + if [ "${{ matrix.docker-platform }}" = "linux/s390x" ]; then # This hack gets the PyPy provider tests working on this image. The old PyPy s390x # distributions dynamically link libffi at an older version than I've been able to # find a multi-platform image with s390x support for. ln -s /usr/lib/s390x-linux-gnu/libffi.so.8 /usr/lib/s390x-linux-gnu/libffi.so.6 - elif [[ "${{ matrix.docker-platform }}" == "linux/ppc64le" ]]; then + elif [ "${{ matrix.docker-platform }}" = "linux/ppc64le" ]; then echo "Skipping tests on ppc64le." exit 0 fi + pip install --root-user-action ignore uv addgroup --gid $(id -g) build - adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build + if [ "${{ matrix.docker-image }}" = "python:3.12-alpine" ]; then + adduser -D -g '' -G build -u $(id -u) build + # N.B.: The bash and curl packages are additional needs for tests. The rest just + # supports building the psutil wheel needed for the science Python distribution. + apk add bash curl gcc git linux-headers musl-dev python3-dev + else + adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build + fi + su build -c "uv run dev-cmd test -- -vvs" EOF @@ -173,47 +197,61 @@ jobs: -v $PWD:/code \ -w /code \ --platform ${{ matrix.docker-platform }} \ - python:3.12-bookworm bash _ci_test.sh + ${{ matrix.docker-image }} sh -eu _test.sh - name: Build & Package - if: matrix.docker-platform == '' + if: matrix.docker-image == '' run: | "${UV}" run dev-cmd package - name: Build & Package - if: matrix.docker-platform != '' + if: matrix.docker-image != '' run: | + cat << EOF > _package.sh + pip install --root-user-action ignore uv + addgroup --gid $(id -g) build + if [ "${{ matrix.docker-image }}" = "python:3.12-alpine" ]; then + adduser -D -g '' -G build -u $(id -u) build + apk add gcc git linux-headers musl-dev python3-dev + else + adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build + fi + + su build -c 'uv run dev-cmd package' + EOF + docker run --rm \ -e FORCE_COLOR \ -e SCIENCE_AUTH_API_GITHUB_COM_BEARER \ -v $PWD:/code \ -w /code \ --platform ${{ matrix.docker-platform }} \ - python:3.12-bookworm \ - bash -c " - pip install --root-user-action ignore uv && - addgroup --gid $(id -g) build && - adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build && - su build -c 'uv run dev-cmd package' - " + ${{ matrix.docker-image }} sh -eu _package.sh - name: Generate Doc Site - if: matrix.docker-platform == '' + if: matrix.docker-image == '' run: | "${UV}" run dev-cmd doc linkcheck - name: Generate Doc Site - if: matrix.docker-platform != '' + if: matrix.docker-image != '' run: | + cat << EOF > _doc_linkcheck.sh + pip install --root-user-action ignore uv + addgroup --gid $(id -g) build + if [ "${{ matrix.docker-image }}" = "python:3.12-alpine" ]; then + adduser -D -g '' -G build -u $(id -u) build + apk add gcc git linux-headers musl-dev python3-dev + else + adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build + fi + + su build -c 'uv run dev-cmd doc linkcheck' + EOF + docker run --rm \ -e FORCE_COLOR \ -e SCIENCE_AUTH_API_GITHUB_COM_BEARER \ -v $PWD:/code \ -w /code \ --platform ${{ matrix.docker-platform }} \ - python:3.12-bookworm \ - bash -c " - pip install --root-user-action ignore uv && - addgroup --gid $(id -g) build && - adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build && - su build -c 'uv run dev-cmd doc linkcheck' - " + ${{ matrix.docker-image }} sh -eu _doc_linkcheck.sh - name: Cleanup if: always() run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 957449a..4f088f6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -55,19 +55,28 @@ jobs: # non-deprecated ARM Mac runner. include: - os: ubuntu-24.04 - name: Linux x86-64 + name: Linux x86-64 (musl) + docker-image: python:3.12-alpine + docker-platform: linux/amd64 + - os: ubuntu-24.04 + name: Linux x86-64 (glibc) + docker-image: python:3.12-bookworm docker-platform: linux/amd64 - os: ubuntu-24.04 name: Linux aarch64 + docker-image: python:3.12-bookworm docker-platform: linux/arm64 - os: ubuntu-24.04 name: Linux armv7l + docker-image: python:3.12-bookworm docker-platform: linux/arm/v7 - os: ubuntu-24.04 name: Linux s390x + docker-image: python:3.12-bookworm docker-platform: linux/s390x - os: ubuntu-24.04 name: Linux powerpc64le + docker-image: python:3.12-bookworm docker-platform: linux/ppc64le - os: macos-13 name: macOS x86-64 @@ -88,10 +97,10 @@ jobs: discussions: write steps: - name: Install the latest version of uv - if: matrix.docker-platform == '' && matrix.os != 'windows-arm64' + if: matrix.docker-image == '' && matrix.os != 'windows-arm64' uses: astral-sh/setup-uv@v5 - name: Setup uv - if: matrix.docker-platform == '' && matrix.os != 'windows-arm64' + if: matrix.docker-image == '' && matrix.os != 'windows-arm64' run: | export UV="$(which uv)" "${UV}" -V @@ -121,32 +130,39 @@ jobs: "${UV}" python install ${UV_PYTHON_VERSION} echo UV_PYTHON_ARGS="--python ${UV_PYTHON_VERSION}" >> ${GITHUB_ENV} - name: Installing emulators - if: matrix.docker-platform != '' + if: matrix.docker-image != '' run: docker run --privileged --rm tonistiigi/binfmt --install all - name: Checkout lift ${{ needs.determine-tag.outputs.release-tag }} uses: actions/checkout@v4 with: ref: ${{ needs.determine-tag.outputs.release-tag }} - name: Package science ${{ needs.determine-tag.outputs.release-tag }} binary - if: matrix.docker-platform == '' + if: matrix.docker-image == '' run: | "${UV}" run ${UV_PYTHON_ARGS} dev-cmd package - name: Package science ${{ needs.determine-tag.outputs.release-tag }} binary - if: matrix.docker-platform != '' + if: matrix.docker-image != '' run: | + cat << EOF > _package.sh + pip install --root-user-action ignore uv + addgroup --gid $(id -g) build + if [ "${{ matrix.docker-image }}" = "python:3.12-alpine" ]; then + adduser -D -g '' -G build -u $(id -u) build + apk add gcc git linux-headers musl-dev python3-dev + else + adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build + fi + + su build -c 'uv run dev-cmd package' + EOF + docker run --rm \ -e FORCE_COLOR \ -e SCIENCE_AUTH_API_GITHUB_COM_BEARER \ -v $PWD:/code \ -w /code \ --platform ${{ matrix.docker-platform }} \ - python:3.12-bookworm \ - bash -c " - pip install --root-user-action ignore uv && - addgroup --gid $(id -g) build && - adduser --disabled-password --gecos '' --gid $(id -g) --uid $(id -u) build && - su build -c 'uv run dev-cmd package' - " + ${{ matrix.docker-image }} sh -eu _package.sh - name: Generate science ${{ needs.determine-tag.outputs.release-tag }} artifact attestations uses: actions/attest-build-provenance@v1 with: diff --git a/CHANGES.md b/CHANGES.md index 0eecc1b..16b61a4 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,10 @@ # Release Notes +## 0.12.0 + +This release adds support for targeting musl libc systems and dogfoods this to ship a `science` +scie for 64 bit musl Linux. + ## 0.11.3 Upgrade the science internal Python distribution to [PBS][PBS] CPython 3.12.9. diff --git a/docs/_ext/sphinx_science/toml.py b/docs/_ext/sphinx_science/toml.py index a4809c9..0519a7e 100644 --- a/docs/_ext/sphinx_science/toml.py +++ b/docs/_ext/sphinx_science/toml.py @@ -31,13 +31,13 @@ class TOMLType: label: str - def render_value(self, value: Any) -> str: + def render_value(self, value: Any) -> Any: return repr(value) @dataclass(frozen=True) class PrimitiveType(TOMLType): - def render_value(self, value: Any) -> str: + def render_value(self, value: Any) -> Any: if isinstance(value, bool): return "true" if value else "false" return repr(value) @@ -51,8 +51,8 @@ def create(cls, item_type: TOMLType) -> Self: item_type: TOMLType - def render_value(self, value: Any) -> str: - return repr([self.item_type.render_value(item) for item in value]) + def render_value(self, value: Any) -> Any: + return [self.item_type.render_value(item) for item in value] @dataclass(frozen=True) @@ -63,10 +63,10 @@ def create(cls, value_type: TOMLType) -> Self: value_type: TOMLType | None = None - def render_value(self, value: Any) -> str: + def render_value(self, value: Any) -> Any: if self.value_type: - return repr({key: self.value_type.render_value(val) for key, val in value.items()}) - return repr(value) + return {key: self.value_type.render_value(val) for key, val in value.items()} + return value @dataclass(frozen=True) @@ -84,9 +84,9 @@ def for_enum( renderer=lambda value: enum_type(value).value, ) - renderer: Callable[[Any], str] + renderer: Callable[[Any], Any] - def render_value(self, value: Any) -> str: + def render_value(self, value: Any) -> Any: return self.renderer(value) @@ -105,9 +105,9 @@ def for_type_info( renderer=lambda value: toml_type_factory(type(value)).render_value(value), ) - renderer: Callable[[Any], str] + renderer: Callable[[Any], Any] - def render_value(self, value: Any) -> str: + def render_value(self, value: Any) -> Any: return self.renderer(value) @@ -270,7 +270,9 @@ def render_dataclass(self, data_type: type[Dataclass]) -> Iterator[nodes.Node]: fields.extendleft(dataclass_info(field_dataclass_type).field_info) continue - field_section = dataclass_section.create_subsection(title=field.name, name=field.name) + field_section = dataclass_section.create_subsection( + title=field.display_name, name=field.display_name + ) field_section.extend(self.render_field(field, owner=class_info.type)) if self._recurse_tables: diff --git a/science/__init__.py b/science/__init__.py index c53dcfd..af02635 100644 --- a/science/__init__.py +++ b/science/__init__.py @@ -3,6 +3,6 @@ from packaging.version import Version -__version__ = "0.11.3" +__version__ = "0.12.0" VERSION = Version(__version__) diff --git a/science/build_info.py b/science/build_info.py index 045e78b..2783577 100644 --- a/science/build_info.py +++ b/science/build_info.py @@ -18,7 +18,7 @@ from science.doc import DOC_SITE_URL from science.frozendict import FrozenDict from science.hashing import Digest, Provenance -from science.platform import CURRENT_PLATFORM +from science.platform import CURRENT_PLATFORM_SPEC logger = logging.getLogger(__name__) @@ -63,7 +63,7 @@ def to_dict(self, **extra_app_info: Any) -> dict[str, Any]: version=__version__, url=( f"https://github.com/a-scie/lift/releases/download/v{__version__}/" - f"{CURRENT_PLATFORM.qualified_binary_name("science")}" + f"{CURRENT_PLATFORM_SPEC.qualified_binary_name("science")}" ), ) if self.digest: diff --git a/science/commands/__init__.py b/science/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/science/commands/_psutil.py b/science/commands/_psutil.py new file mode 100644 index 0000000..e3957cc --- /dev/null +++ b/science/commands/_psutil.py @@ -0,0 +1,78 @@ +# Copyright 2025 Science project contributors. +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +"""Just enough stubbing to get a version of psutil that limps along for musl systems. + +Our PBS interpreter does not support dynamic linking and psutil builds with shared objects. As such, +we stub out just enough functionality for the complete and doc commands to function on musl systems. +""" + +from __future__ import annotations + +import dataclasses +import errno +import os +import signal +from dataclasses import dataclass +from typing import Iterable + +from science.platform import CURRENT_PLATFORM_SPEC + +try: + import psutil + + Error = psutil.Error + Process = psutil.Process + NoSuchProcess = psutil.NoSuchProcess +except ImportError: + + class Error(Exception): # type: ignore[no-redef] + pass + + class NoSuchProcess(Error): # type: ignore[no-redef] + pass + + @dataclass(frozen=True) + class Process: # type: ignore[no-redef] + @staticmethod + def unavailable_error() -> Error: + return Error(f"The psutil module is not available on {CURRENT_PLATFORM_SPEC}.") + + pid: int = dataclasses.field(default_factory=os.getpid) + + def create_time(self) -> float | None: + return None + + def cmdline(self) -> Iterable[str]: + raise self.unavailable_error() + + def parent(self) -> Process | None: + return None + + def name(self) -> str: + return "" + + def is_running(self) -> bool: + try: + os.kill(self.pid, 0) + return True + except OSError: + return False + + def terminate(self) -> None: + try: + os.kill(self.pid, signal.SIGTERM) + except OSError as e: + if e.errno == errno.ESRCH: + raise NoSuchProcess(str(e)) + raise Error(str(e)) + + def kill(self) -> None: + try: + # SIGKILL does not exist in Windows, but we have the real psutil importable there; + # so this code is not reached. + os.kill(self.pid, signal.SIGKILL) # type: ignore[attr-defined] + except OSError as e: + if e.errno == errno.ESRCH: + raise NoSuchProcess(str(e)) + raise Error(str(e)) diff --git a/science/commands/build.py b/science/commands/build.py index c799e88..a9b4e1b 100644 --- a/science/commands/build.py +++ b/science/commands/build.py @@ -13,7 +13,7 @@ from science.commands import lift from science.commands.lift import LiftConfig, PlatformInfo from science.model import Application -from science.platform import CURRENT_PLATFORM, Platform +from science.platform import CURRENT_PLATFORM, PlatformSpec @dataclass(frozen=True) @@ -33,7 +33,7 @@ def assemble_scies( lift_config: LiftConfig, application: Application, dest_dir: Path, - platforms: Iterable[Platform], + platform_specs: Iterable[PlatformSpec], platform_info: PlatformInfo, use_jump: Path | None, hash_functions: list[str], @@ -41,13 +41,13 @@ def assemble_scies( native_jump_path = (a_scie.custom_jump(repo_path=use_jump) if use_jump else a_scie.jump()).path scies = list[ScieAssembly]() - for platform, lift_manifest in lift.export_manifest( - lift_config, application, dest_dir=dest_dir, platforms=platforms + for platform_spec, lift_manifest in lift.export_manifest( + lift_config, application, dest_dir=dest_dir, platform_specs=platform_specs ): jump_path = ( a_scie.custom_jump(repo_path=use_jump) if use_jump - else a_scie.jump(specification=application.scie_jump, platform=platform) + else a_scie.jump(specification=application.scie_jump, platform=platform_spec.platform) ).path platform_export_dir = lift_manifest.parent subprocess.run( @@ -58,7 +58,7 @@ def assemble_scies( ) src_binary = platform_export_dir / CURRENT_PLATFORM.binary_name(application.name) - dst_binary_name = platform_info.binary_name(application.name, target_platform=platform) + dst_binary_name = platform_info.binary_name(application.name, target_platform=platform_spec) dst_binary = platform_export_dir / dst_binary_name if src_binary != dst_binary: os.rename(src=src_binary, dst=dst_binary) diff --git a/science/commands/complete.py b/science/commands/complete.py index 4aaefb7..f961ac5 100644 --- a/science/commands/complete.py +++ b/science/commands/complete.py @@ -1,14 +1,16 @@ # Copyright 2023 Science project contributors. # Licensed under the Apache License, Version 2.0 (see LICENSE). +import logging from enum import Enum from functools import cache from typing import Self -import psutil - +from science.commands import _psutil as psutil from science.os import EXE_EXT +logger = logging.getLogger(__name__) + class Shell(Enum): @classmethod diff --git a/science/commands/doc.py b/science/commands/doc.py index ca0325a..d628e5c 100644 --- a/science/commands/doc.py +++ b/science/commands/doc.py @@ -17,10 +17,9 @@ from pathlib import Path, PurePath from typing import Any -import psutil - from science import __version__ from science.cache import science_cache +from science.commands import _psutil as psutil from science.platform import CURRENT_PLATFORM logger = logging.getLogger(__name__) @@ -46,10 +45,12 @@ def _render_unix_time(unix_time: float) -> str: class ServerInfo: url: str pid: int - create_time: float + create_time: float | None def __str__(self) -> str: - return f"{self.url} @ {self.pid} (started at {_render_unix_time(self.create_time)})" + if self.create_time: + return f"{self.url} @ {self.pid} (started at {_render_unix_time(self.create_time)})" + return f"{self.url} @ {self.pid}" @dataclass(frozen=True) diff --git a/science/commands/download.py b/science/commands/download.py index 70ce8ce..e3a0674 100644 --- a/science/commands/download.py +++ b/science/commands/download.py @@ -14,7 +14,7 @@ from science.errors import InputError from science.fetcher import fetch_and_verify from science.model import Fetch, Identifier -from science.platform import Platform +from science.platform import Platform, PlatformSpec from science.providers import ProviderInfo logger = logging.getLogger(__name__) @@ -53,7 +53,7 @@ def download_a_scie_executables( def download_provider_distribution( provider_info: ProviderInfo, - platforms: Iterable[Platform], + platform_specs: Iterable[PlatformSpec], explicit_platforms: bool, dest_dir: Path, **kwargs: list[Any], @@ -85,18 +85,16 @@ def iter_values(name) -> Iterator[Tuple[str, Any]]: for field in provider_info.config_fields() if (value := getattr(config, field.name)) ) - supported_platforms = provider.supported_platforms() - for platform in platforms: - if platform not in supported_platforms: - continue - if dist := provider.distribution(platform): + for platform_spec in provider.iter_supported_platforms(platform_specs): + if dist := provider.distribution(platform_spec): assert isinstance(dist.file.source, Fetch), ( - f"Expected {provider_info.name} to fetch distributions by URL but {config_desc} for " - f"{platform} has a source of {dist.file.source}." + f"Expected {provider_info.name} to fetch distributions by URL but " + f"{config_desc} for {platform_spec} has a source of {dist.file.source}." ) dest = base_dir / dist.file.source.url.rel_path click.echo( - f"Downloading {provider_info.name} {config_desc} for {platform} to {dest}...", + f"Downloading {provider_info.name} {config_desc} for {platform_spec} to " + f"{dest}...", err=True, ) result = fetch_and_verify( @@ -111,9 +109,12 @@ def iter_values(name) -> Iterator[Tuple[str, Any]]: f"{result.digest.fingerprint} {exe_flag}{dest.name}" ) elif explicit_platforms: - raise InputError(f"There is no {provider_info.name} {config_desc} for {platform}.") + raise InputError( + f"There is no {provider_info.name} {config_desc} for {platform_spec}." + ) else: click.secho( - f"There is no {provider_info.name} {config_desc} for {platform}, skipping.", + f"There is no {provider_info.name} {config_desc} for {platform_spec}, " + f"skipping.", fg="yellow", ) diff --git a/science/commands/lift.py b/science/commands/lift.py index 9c63a79..143a633 100644 --- a/science/commands/lift.py +++ b/science/commands/lift.py @@ -25,7 +25,7 @@ InterpreterGroup, ScieJump, ) -from science.platform import CURRENT_PLATFORM, Platform +from science.platform import CURRENT_PLATFORM_SPEC, PlatformSpec @dataclass(frozen=True) @@ -71,13 +71,13 @@ def create(cls, application: Application, use_suffix: bool | None = None) -> Pla use_suffix=( use_suffix if use_suffix is not None - else application.platforms != frozenset([CURRENT_PLATFORM]) + else application.platform_specs != frozenset([CURRENT_PLATFORM_SPEC]) ), ) use_suffix: bool - def binary_name(self, name: str, target_platform: Platform) -> str: + def binary_name(self, name: str, target_platform: PlatformSpec) -> str: return ( target_platform.qualified_binary_name(name) if self.use_suffix @@ -92,7 +92,7 @@ class LiftConfig: include_provenance: bool = False app_info: tuple[AppInfo, ...] = () app_name: str | None = None - platforms: tuple[Platform, ...] = () + platform_specs: tuple[PlatformSpec, ...] = () def export_manifest( @@ -100,12 +100,12 @@ def export_manifest( application: Application, dest_dir: Path, *, - platforms: Iterable[Platform] | None = None, -) -> Iterator[tuple[Platform, Path]]: + platform_specs: Iterable[PlatformSpec] | None = None, +) -> Iterator[tuple[PlatformSpec, Path]]: app_info = AppInfo.assemble(lift_config.app_info) - for platform in platforms or application.platforms: - chroot = dest_dir / platform.value + for platform_spec in platform_specs or application.platform_specs: + chroot = dest_dir / platform_spec.value chroot.mkdir(parents=True, exist_ok=True) bindings = list[Command]() @@ -135,11 +135,11 @@ def maybe_invert_lazy(file: File) -> File: return file for interpreter in application.interpreters: - distribution = interpreter.provider.distribution(platform) + distribution = interpreter.provider.distribution(platform_spec) if distribution is None: raise InputError( f"No compatible {providers.name(interpreter.provider)} distribution was found " - f"for {platform}." + f"for {platform_spec}." ) if distribution: distributions.append(distribution) @@ -155,7 +155,7 @@ def maybe_invert_lazy(file: File) -> File: isinstance(file.source, Fetch) and file.source.lazy for file in requested_files ) if application.ptex or fetches_present: - ptex = a_scie.ptex(specification=application.ptex, platform=platform) + ptex = a_scie.ptex(specification=application.ptex, platform=platform_spec.platform) (chroot / ptex.binary_name).symlink_to(ptex.path) ptex_key = application.ptex.id if application.ptex and application.ptex.id else "ptex" ptex_file = File( @@ -240,7 +240,7 @@ def maybe_invert_lazy(file: File) -> File: load_dotenv=application.load_dotenv, base=application.base, scie_jump=application.scie_jump or ScieJump(), - platform=platform, + platform_spec=platform_spec, distributions=distributions, interpreter_groups=application.interpreter_groups, files=requested_files, @@ -250,7 +250,7 @@ def maybe_invert_lazy(file: File) -> File: build_info=build_info, app_info=app_info, ) - yield platform, lift_manifest + yield platform_spec, lift_manifest def _render_file(file: File) -> dict[str, Any]: @@ -275,7 +275,7 @@ def _render_file(file: File) -> dict[str, Any]: def _render_command( command: Command, - platform: Platform, + platform_spec: PlatformSpec, distributions: Iterable[Distribution], interpreter_groups: Iterable[InterpreterGroup], ) -> tuple[str, dict[str, Any]]: @@ -285,7 +285,7 @@ def expand_placeholders(text: str) -> str: for distribution in distributions: text = distribution.expand_placeholders(text) for interpreter_group in interpreter_groups: - text, ig_env = interpreter_group.expand_placeholders(platform, text) + text, ig_env = interpreter_group.expand_placeholders(platform_spec, text) env.update(ig_env) return text @@ -320,7 +320,7 @@ def _emit_manifest( load_dotenv: bool, base: str | None, scie_jump: ScieJump, - platform: Platform, + platform_spec: PlatformSpec, distributions: Iterable[Distribution], interpreter_groups: Iterable[InterpreterGroup], files: Iterable[File], @@ -335,7 +335,7 @@ def render_files() -> list[dict[str, Any]]: def render_commands(cmds: Iterable[Command]) -> dict[str, dict[str, Any]]: return dict( - _render_command(cmd, platform, distributions, interpreter_groups) for cmd in cmds + _render_command(cmd, platform_spec, distributions, interpreter_groups) for cmd in cmds ) lift_data = { diff --git a/science/config.py b/science/config.py index ab47b64..4d4929c 100644 --- a/science/config.py +++ b/science/config.py @@ -18,6 +18,7 @@ from science.build_info import BuildInfo from science.data import Accessor, Data from science.dataclass import Dataclass +from science.dataclass.deserializer import HeterogeneousParser from science.dataclass.deserializer import parse as parse_dataclass from science.dataclass.reflect import FieldInfo, dataclass_info from science.doc import DOC_SITE_URL @@ -31,6 +32,7 @@ InterpreterGroup, Provider, ) +from science.platform import LibC, Platform, PlatformSpec from science.providers import get_provider @@ -56,6 +58,15 @@ def parse_build_info(data: Data) -> BuildInfo: ) +def parse_platform_spec(data: Data | str) -> PlatformSpec: + if isinstance(data, str): + return PlatformSpec(Platform.parse(data)) + + libc_value = data.get_str("libc", default="") + libc = LibC(libc_value) if libc_value else None + return PlatformSpec(platform=Platform.parse(data.get_str("platform")), libc=libc) + + @dataclass(frozen=True) class ProviderFields(Dataclass): id: Identifier @@ -166,7 +177,13 @@ def parse_interpreter_group(ig_data: Data) -> InterpreterGroup: lift, Application, interpreters=tuple(interpreters_by_id.values()), - custom_parsers={BuildInfo: parse_build_info, InterpreterGroup: parse_interpreter_group}, + custom_parsers={ + BuildInfo: parse_build_info, + InterpreterGroup: parse_interpreter_group, + PlatformSpec: HeterogeneousParser.wrap( + parse_platform_spec, Data, str, output_type=PlatformSpec + ), + }, ) unrecognized_config = gather_unrecognized_application_config(lift, index_start=1) diff --git a/science/data.py b/science/data.py index 6748c71..4fc792d 100644 --- a/science/data.py +++ b/science/data.py @@ -98,6 +98,51 @@ def get_float(self, key: str, default: float | Required = REQUIRED) -> float: def get_bool(self, key: str, default: bool | Required = REQUIRED) -> bool: return self.get_value(key, expected_type=bool, default=default) + def get_heterogeneous_list( + self, + key: str, + expected_item_types: tuple[type, ...], + default: list | Required = REQUIRED, + used: bool = True, + ) -> list: + if len(expected_item_types) < 2: + raise InputError( + f"Can only extract a heterogeneous list given two or more expected item types." + f"{os.linesep}Given: {' '.join(map(str, expected_item_types))}" + ) + + value = self.get_value( + key, + expected_type=list, + default=default, + used=used, # type: ignore[type-abstract] + ) + + items = [] + invalid_entries = {} + for index, item in enumerate(value, start=1): + if isinstance(item, expected_item_types): + items.append(item) + elif isinstance(item, dict) and Data in expected_item_types: + items.append(Data(provenance=self.provenance, data=FrozenDict(item), path="")) + else: + invalid_entries[index] = item + + if invalid_entries: + invalid_items = [ + f"item {index}: {item} of type {self._typename(type(item))}" + for index, item in invalid_entries.items() + ] + head_types = ", ".join(self._typename(it) for it in expected_item_types[:-1]) + tail_type = self._typename(expected_item_types[-1]) + raise InputError( + f"Expected {self.config(key)} defined in {self.provenance.source} to be a list " + f"with items of type {head_types} or {tail_type} but got {len(invalid_entries)} " + f"out of {len(value)} entries of the wrong type:{os.linesep}" + f"{os.linesep.join(invalid_items)}" + ) + return items + def get_list( self, key: str, @@ -131,8 +176,12 @@ def get_list( for index, item in invalid_entries.items() ] expected_values = "" - if issubclass(expected_item_type, Enum): - expected_values = f" from {{{", ".join(repr(expected.value) for expected in expected_item_type)}}}" + try: + if issubclass(expected_item_type, Enum): + enum_values = ", ".join(repr(expected.value) for expected in expected_item_type) + expected_values = f" from {{{enum_values}}}" + except TypeError: + pass raise InputError( f"Expected {self.config(key)} defined in {self.provenance.source} to be a list " @@ -145,7 +194,7 @@ def get_list( def get_data_list( self, key: str, - default: list[dict] | Required = REQUIRED, + default: list[Data] | Required = REQUIRED, ) -> list[Data]: data_list = [ Data( @@ -154,12 +203,21 @@ def get_data_list( path=f"{self.path}.{key}[{index}]" if self.path else key, ) for index, data in enumerate( - self.get_list(key, expected_item_type=Mapping, default=default, used=False), start=1 + self.get_list( + key, + expected_item_type=dict, + default=[] if default is not Data.REQUIRED else Data.REQUIRED, + used=False, + ), + start=1, ) ] if data_list: self._unused_data[key] = data_list - return data_list + return data_list + + assert default is not Data.REQUIRED + return default @staticmethod def _typename(type_: type) -> str: diff --git a/science/dataclass/deserializer.py b/science/dataclass/deserializer.py index d6816b7..3c7714c 100644 --- a/science/dataclass/deserializer.py +++ b/science/dataclass/deserializer.py @@ -7,15 +7,50 @@ import os import typing from collections import OrderedDict +from dataclasses import dataclass from textwrap import dedent -from typing import Any, Collection, Mapping, TypeVar, cast +from typing import Any, Callable, Collection, Mapping, TypeVar, cast from science.data import Data from science.dataclass import Dataclass +from science.dataclass.reflect import dataclass_info from science.errors import InputError from science.frozendict import FrozenDict from science.types import TypeInfo + +@dataclass(frozen=True) +class HeterogeneousParser[O]: + @classmethod + def wrap( + cls, + parser: Callable[[Any], O], + input_type: type, + another_input_type: type, + *remaining_input_types: type, + output_type: type[O], + ) -> HeterogeneousParser[O]: + return cls(parser, (input_type, another_input_type, *remaining_input_types), output_type) + + parser: Callable[[Any], O] + input_types: tuple[type, ...] + output_type: type[O] + + def __post_init__(self): + if not self.input_types: + raise InputError( + "A HeterogeneousParser must accept two or more input types. Given none." + ) + if len(self.input_types) == 1: + raise InputError( + f"A HeterogeneousParser must accept two or more input types. Given just one: " + f"{self.input_types[0]}" + ) + + def __call__(self, data: Any) -> O: + return self.parser(data) + + _F = TypeVar("_F") @@ -24,7 +59,7 @@ def _parse_field( type_: TypeInfo[_F], default: _F | Data.Required, data: Data, - custom_parsers: Mapping[type, typing.Callable[[Data], Any]], + custom_parsers: Mapping[type, Callable[[Data], Any]], ) -> _F: if type_.has_origin_type and (parser := custom_parsers.get(type_.origin_type)): return parser(data) @@ -66,7 +101,15 @@ def _parse_field( item_type = type_.item_type items: list[Any] = [] if dataclasses.is_dataclass(item_type) or isinstance(item_type, Mapping): - data_list = data.get_data_list(name, default=cast(list | Data.Required, default)) + custom_parser = custom_parsers.get(item_type) + if isinstance(custom_parser, HeterogeneousParser): + data_list = data.get_heterogeneous_list( + name, + expected_item_types=(*custom_parser.input_types, custom_parser.output_type), + default=cast(list | Data.Required, default), + ) + else: + data_list = data.get_data_list(name, default=cast(list | Data.Required, default)) if isinstance(item_type, Mapping): items.extend( @@ -129,12 +172,15 @@ def parse( data: Data, data_type: type[_D], *, - custom_parsers: Mapping[type, typing.Callable[[Data], Any]] = FrozenDict(), + custom_parsers: Mapping[type, Callable[[Data], Any]] = FrozenDict(), **pre_parsed_fields: Any, ) -> _D: if not dataclasses.is_dataclass(data_type): raise InputError(f"Cannot parse data_type {data_type}, it is not a @dataclass.") + if isinstance(data, data_type): + return cast(_D, data) + if parser := custom_parsers.get(data_type): return parser(data) @@ -151,21 +197,20 @@ def get_type(field_name: str, putative_type: Any) -> TypeInfo: ) kwargs = {} - for field in dataclasses.fields(data_type): + for field in dataclass_info(data_type).field_info: if value := pre_parsed_fields.get(field.name): kwargs[field.name] = value continue - type_info = get_type(field_name=field.name, putative_type=field.type) - if type_info.optional: + if field.type.optional: kwargs[field.name] = None errors = OrderedDict[TypeInfo, Exception]() - for field_type in type_info.iter_types(): + for field_type in field.type.iter_types(): def parse_field(data: Data) -> Any: return _parse_field( - name=field.name, + name=field.display_name, type_=field_type, default=( field.default if field.default is not dataclasses.MISSING else Data.REQUIRED diff --git a/science/dataclass/reflect.py b/science/dataclass/reflect.py index b51f0e4..6f6bfae 100644 --- a/science/dataclass/reflect.py +++ b/science/dataclass/reflect.py @@ -30,6 +30,7 @@ class FieldMetadata: DEFAULT: ClassVar[FieldMetadata] + alias: str | None = None doc_func: Callable[[], str] | None = None reference: bool = False inline: bool = False @@ -46,6 +47,7 @@ def doc(self) -> str: def metadata( doc: str | Callable[[], str] = "", *, + alias: str | None = None, reference: bool = False, inline: bool = False, hidden: bool = False, @@ -59,7 +61,7 @@ def doc_func() -> str: return { _FIELD_METADATA_KEY: FieldMetadata( - doc_func=doc_func, reference=reference, inline=inline, hidden=hidden + alias=alias, doc_func=doc_func, reference=reference, inline=inline, hidden=hidden ) } @@ -144,6 +146,7 @@ def doc_func() -> str: @dataclass(frozen=True) class FieldInfo(Generic[_F]): name: str + alias: str | None type: TypeInfo[_F] default: Any doc: str @@ -151,6 +154,10 @@ class FieldInfo(Generic[_F]): inline: bool = False hidden: bool = False + @property + def display_name(self) -> str: + return self.alias or self.name + @property def has_default(self) -> bool: return self.default is not MISSING @@ -181,6 +188,7 @@ def iter_field_info() -> Iterator[FieldInfo]: field_metadata = field.metadata.get(_FIELD_METADATA_KEY, FieldMetadata.DEFAULT) yield FieldInfo( name=field.name, + alias=field_metadata.alias, type=TypeInfo(type_hints.get(field.name, field.type)), default=field.default, doc=field_metadata.doc, diff --git a/science/exe.py b/science/exe.py index 1bf2d37..93e2b1d 100644 --- a/science/exe.py +++ b/science/exe.py @@ -44,7 +44,7 @@ from science.model import Application from science.options import OptionDescriptor, mutually_exclusive, to_option_string from science.os import EXE_EXT -from science.platform import CURRENT_PLATFORM, Platform +from science.platform import CURRENT_PLATFORM, CURRENT_PLATFORM_SPEC, LibC, Platform, PlatformSpec from science.providers import ALL_PROVIDERS, ProviderInfo logger = logging.getLogger(__name__) @@ -268,7 +268,7 @@ def _close_doc() -> None: @dataclass(frozen=True) class DownloadConfig: - platforms: tuple[Platform, ...] + platform_specs: tuple[PlatformSpec, ...] explicit_set: bool @@ -302,8 +302,22 @@ class DownloadConfig: "`--platform`. By default, only binaries for the current platform are downloaded." ), ) +@click.option( + "--libc", + "libcs", + type=click.Choice([libc.value for libc in LibC]), + multiple=True, + default=[], + callback=lambda _ctx, _param, value: [LibC(v) for v in value], + help=( + "Choose binaries that link to the specified libc when downloading for a Linux platform. " + "Binaries that link against gnu libc by will be chosen by default." + ), +) @click.pass_context -def _download(ctx: click.Context, platforms: list[Platform], all_platforms: bool) -> None: +def _download( + ctx: click.Context, platforms: list[Platform], all_platforms: bool, libcs: list[LibC | None] +) -> None: """Download binaries for offline use.""" if platforms: @@ -313,10 +327,17 @@ def _download(ctx: click.Context, platforms: list[Platform], all_platforms: bool platforms = list(Platform) explicit_set = False else: - platforms = [CURRENT_PLATFORM] + platforms = [CURRENT_PLATFORM_SPEC.platform] explicit_set = True - ctx.obj = DownloadConfig(platforms=tuple(platforms), explicit_set=explicit_set) + libcs = libcs or [CURRENT_PLATFORM_SPEC.libc] + + ctx.obj = DownloadConfig( + platform_specs=tuple( + PlatformSpec(platform, libc) for platform in platforms for libc in libcs + ), + explicit_set=explicit_set, + ) download_dest_dir = click.argument("dest_dir", metavar="DEST_DIR", type=Path) @@ -336,7 +357,7 @@ def _create_provider_download_func( def func(download_config: DownloadConfig, dest_dir: Path, **kwargs: Any) -> None: download_provider_distribution( provider_info=provider_info, - platforms=download_config.platforms, + platform_specs=download_config.platform_specs, explicit_platforms=download_config.explicit_set, dest_dir=dest_dir, **kwargs, @@ -392,7 +413,9 @@ def _download_ptex( project_name="ptex", binary_name="ptex", versions=versions, - platforms=download_config.platforms, + platforms=dict.fromkeys( + platform_spec.platform for platform_spec in download_config.platform_specs + ), dest_dir=dest_dir, ) @@ -409,7 +432,9 @@ def _download_scie_jump( project_name="jump", binary_name="scie-jump", versions=versions, - platforms=download_config.platforms, + platforms=dict.fromkeys( + platform_spec.platform for platform_spec in download_config.platform_specs + ), dest_dir=dest_dir, ) @@ -654,6 +679,15 @@ def _list(emit_json: bool) -> None: default=[], help="Override any configured platforms and target these platforms instead.", ) +@click.option( + "--libc", + "libcs", + type=click.Choice([libc.value for libc in LibC]), + multiple=True, + default=[], + callback=lambda _ctx, _param, value: [LibC(v) for v in value], + help="Override any configured libc providers and use these libc providers instead.", +) @click.pass_context def _lift( ctx: click.Context, @@ -663,15 +697,20 @@ def _lift( app_name: str | None, app_info: list[AppInfo], platforms: list[Platform], + libcs: list[LibC | None], ) -> None: # N.B.: Help is defined above in the _lift group decorator since it's a dynamic string. + + libcs = libcs or [None] ctx.obj = LiftConfig( file_mappings=tuple(file_mappings), invert_lazy_ids=frozenset(invert_lazy_ids), include_provenance=include_provenance or bool(app_info), app_info=tuple(app_info), app_name=app_name, - platforms=tuple(platforms), + platform_specs=tuple( + PlatformSpec(platform, libc) for platform in platforms for libc in libcs + ), ) @@ -744,7 +783,7 @@ def export( platform_info = PlatformInfo.create(application, use_suffix=use_platform_suffix) with temporary_directory(cleanup=True) as td: for _, manifest_path in lift.export_manifest( - lift_config, application, dest_dir=td, platforms=lift_config.platforms + lift_config, application, dest_dir=td, platform_specs=lift_config.platform_specs ): lift_manifest = dest_dir / ( manifest_path.relative_to(td) if platform_info.use_suffix else manifest_path.name @@ -841,14 +880,14 @@ def _build( application = parse_application(lift_config, config) platform_info = PlatformInfo.create(application, use_suffix=use_platform_suffix) - platforms = lift_config.platforms or application.platforms + platform_specs = lift_config.platform_specs or application.platform_specs if use_jump and use_platform_suffix: logger.warning("Cannot use a custom scie jump build with a multi-platform configuration.") logger.warning( "Restricting requested platforms of " - f"{", ".join(sorted(platform.value for platform in platforms))} to {CURRENT_PLATFORM}", + f"{", ".join(sorted(platform.value for platform in platform_specs))} to {CURRENT_PLATFORM}", ) - platforms = frozenset([CURRENT_PLATFORM]) + platform_specs = frozenset([CURRENT_PLATFORM_SPEC]) scie_jump_version = application.scie_jump.version if application.scie_jump else None if scie_jump_version and scie_jump_version < version.parse("0.9.0"): @@ -864,7 +903,7 @@ def _build( lift_config=lift_config, application=application, dest_dir=td, - platforms=platforms, + platform_specs=platform_specs, platform_info=platform_info, use_jump=use_jump, hash_functions=hash_functions, diff --git a/science/model.py b/science/model.py index 7ad96af..15c9152 100644 --- a/science/model.py +++ b/science/model.py @@ -35,7 +35,7 @@ from science.errors import InputError from science.frozendict import FrozenDict from science.hashing import Digest, ExpectedDigest -from science.platform import CURRENT_PLATFORM, Platform +from science.platform import CURRENT_PLATFORM_SPEC, PlatformSpec class FileType(Enum): @@ -362,8 +362,11 @@ def serialize(self, base_dir: Path) -> None: ... @runtime_checkable class Provider(Protocol[ConfigDataclass]): @classmethod - def supported_platforms(cls) -> frozenset[Platform]: - return frozenset(Platform) + def iter_supported_platforms( + cls, requested_platforms: Iterable[PlatformSpec] + ) -> Iterator[PlatformSpec]: + for platform_spec in requested_platforms: + yield platform_spec @classmethod def config_dataclass(cls) -> type[ConfigDataclass]: ... @@ -373,7 +376,7 @@ def create(cls, identifier: Identifier, lazy: bool, config: ConfigDataclass) -> def distributions(self) -> DistributionsManifest: ... - def distribution(self, platform: Platform) -> Distribution | None: ... + def distribution(self, platform_spec: PlatformSpec) -> Distribution | None: ... @documented_dataclass( @@ -498,13 +501,15 @@ def create(cls, id_: Identifier, selector: str, interpreters: Iterable[Interpret ) ) - def _expand_placeholder(self, platform: Platform, match: Match) -> tuple[str, dict[str, str]]: + def _expand_placeholder( + self, platform_spec: PlatformSpec, match: Match + ) -> tuple[str, dict[str, str]]: if placeholder := match.group("placeholder"): env = {} ph = Identifier(placeholder) env_var_prefix = f"_SCIENCE_IG_{self.id}_{placeholder}_" for member in self.members: - distribution = member.provider.distribution(platform) + distribution = member.provider.distribution(platform_spec) if distribution: ph_value = distribution.placeholders[ph] env[f"={env_var_prefix}{distribution.id}"] = ph_value @@ -514,11 +519,13 @@ def _expand_placeholder(self, platform: Platform, match: Match) -> tuple[str, di return path, env return self.selector, {} - def expand_placeholders(self, platform: Platform, value: str) -> tuple[str, dict[str, str]]: + def expand_placeholders( + self, platform_spec: PlatformSpec, value: str + ) -> tuple[str, dict[str, str]]: env = {} def expand_placeholder(match: Match) -> str: - expansion, ig_env = self._expand_placeholder(platform, match) + expansion, ig_env = self._expand_placeholder(platform_spec, match) env.update(ig_env) return expansion @@ -536,7 +543,9 @@ class Application(Dataclass): description: str | None = None load_dotenv: bool = False build_info: BuildInfo | None = dataclasses.field(default=None, metadata=metadata(inline=True)) - platforms: frozenset[Platform] = frozenset([CURRENT_PLATFORM]) + platform_specs: frozenset[PlatformSpec] = dataclasses.field( + default=frozenset([CURRENT_PLATFORM_SPEC]), metadata=metadata(alias="platforms") + ) base: str | None = dataclasses.field( default=None, metadata=metadata("An alternate path to use for the scie base `nce` CAS."), @@ -579,7 +588,7 @@ def iter_command_names() -> Iterator[str]: ) def __post_init__(self) -> None: - if not self.platforms: + if not self.platform_specs: raise InputError( "There must be at least one platform defined for a science application. Leave " "un-configured to request just the current platform." diff --git a/science/platform.py b/science/platform.py index 024c5da..e5f9bf5 100644 --- a/science/platform.py +++ b/science/platform.py @@ -5,25 +5,105 @@ import os import platform +import subprocess +import sys from enum import Enum +from functools import cache, cached_property +from science.dataclass.reflect import documented_dataclass from science.errors import InputError -class Platform(Enum): - Linux_aarch64 = "linux-aarch64" - Linux_armv7l = "linux-armv7l" - Linux_powerpc64le = "linux-powerpc64" - Linux_s390x = "linux-s390x" - Linux_x86_64 = "linux-x86_64" - Macos_aarch64 = "macos-aarch64" - Macos_x86_64 = "macos-x86_64" - Windows_aarch64 = "windows-aarch64" - Windows_x86_64 = "windows-x86_64" +class Os(Enum): + Linux = "linux" + Macos = "macos" + Windows = "windows" @classmethod + def current(cls) -> Os: + match platform.system().lower(): + case "linux": + return cls.Linux + case "darwin": + return cls.Macos + case "windows": + return cls.Windows + case system: + raise InputError(f"The current operating system is not supported!: {system}") + + def __str__(self) -> str: + return self.value + + +CURRENT_OS = Os.current() + + +class Arch(Enum): + ARM64 = "aarch64" + ARMv7l = "armv7l" + PPC64le = "powerpc64" + S390X = "s390x" + X86_64 = "x86_64" + + def __str__(self) -> str: + return self.value + + +class OsArch: + def __init__(self, os_: Os, arch: Arch): + self.os = os_ + self.arch = arch + + @cached_property + def value(self) -> str: + return f"{self.os.value}-{self.arch.value}" + + @property + def is_windows(self) -> bool: + return self.os is Os.Windows + + @property + def extension(self): + return ".exe" if self.is_windows else "" + + def binary_name(self, binary_name: str) -> str: + return f"{binary_name}{self.extension}" + + def qualified_binary_name(self, binary_name: str, *extra_qualifiers: str) -> str: + return f"{binary_name}-{'-'.join((*extra_qualifiers, self.value))}{self.extension}" + + def __str__(self) -> str: + return self.value + + +class Platform(OsArch, Enum): + Linux_aarch64 = Os.Linux, Arch.ARM64 + Linux_armv7l = Os.Linux, Arch.ARMv7l + Linux_powerpc64le = Os.Linux, Arch.PPC64le + Linux_s390x = Os.Linux, Arch.S390X + Linux_x86_64 = Os.Linux, Arch.X86_64 + Macos_aarch64 = Os.Macos, Arch.ARM64 + Macos_x86_64 = Os.Macos, Arch.X86_64 + Windows_aarch64 = Os.Windows, Arch.ARM64 + Windows_x86_64 = Os.Windows, Arch.X86_64 + + @classmethod + @cache def parse(cls, value: str) -> Platform: - return CURRENT_PLATFORM if "current" == value else Platform(value) + if "current" == value: + return CURRENT_PLATFORM + + known_values: list[str] = [] + for plat in cls: + if value == plat.value: + return plat + known_values.append(plat.value) + + raise InputError( + f"Invalid platform string {value!r}.{os.linesep}" + f"Known values are:{os.linesep}" + f"{os.linesep.join(f'+ {v}' for v in known_values)}" + ) @classmethod def current(cls) -> Platform: @@ -33,24 +113,24 @@ def current(cls) -> Platform: if current := os.environ.get("__SCIENCE_CURRENT_PLATFORM__"): return cls.parse(current) - match (system := platform.system().lower(), machine := platform.machine().lower()): - case ("linux", "aarch64" | "arm64"): + match (system := CURRENT_OS, machine := platform.machine().lower()): + case (Os.Linux, "aarch64" | "arm64"): return cls.Linux_aarch64 - case ("linux", "armv7l" | "armv8l"): + case (Os.Linux, "armv7l" | "armv8l"): return cls.Linux_armv7l - case ("linux", "ppc64le"): + case (Os.Linux, "ppc64le"): return cls.Linux_powerpc64le - case ("linux", "s390x"): + case (Os.Linux, "s390x"): return cls.Linux_s390x - case ("linux", "amd64" | "x86_64"): + case (Os.Linux, "amd64" | "x86_64"): return cls.Linux_x86_64 - case ("darwin", "aarch64" | "arm64"): + case (Os.Macos, "aarch64" | "arm64"): return cls.Macos_aarch64 - case ("darwin", "amd64" | "x86_64"): + case (Os.Macos, "amd64" | "x86_64"): return cls.Macos_x86_64 - case ("windows", "aarch64" | "arm64"): + case (Os.Windows, "aarch64" | "arm64"): return cls.Windows_aarch64 - case ("windows", "amd64" | "x86_64"): + case (Os.Windows, "amd64" | "x86_64"): return cls.Windows_x86_64 case _: raise InputError( @@ -58,22 +138,88 @@ def current(cls) -> Platform: f"{system} / {machine}" ) - @property - def is_windows(self) -> bool: - return self in (self.Windows_aarch64, self.Windows_x86_64) - @property - def extension(self): - return ".exe" if self.is_windows else "" +CURRENT_PLATFORM = Platform.current() - def binary_name(self, binary_name: str) -> str: - return f"{binary_name}{self.extension}" - def qualified_binary_name(self, binary_name: str) -> str: - return f"{binary_name}-{self.value}{self.extension}" +class LibC(Enum): + @classmethod + @cache + def current(cls) -> LibC | None: + if CURRENT_PLATFORM.os is not Os.Linux or CURRENT_PLATFORM.arch is not Arch.X86_64: + return None + if ( + "musl" + in subprocess.run( + args=["ldd", sys.executable], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + ).stdout + ): + return LibC.MUSL + return LibC.GLIBC + + GLIBC = "gnu" + MUSL = "musl" def __str__(self) -> str: return self.value -CURRENT_PLATFORM = Platform.current() +@documented_dataclass(frozen=True, alias="platform specification") +class PlatformSpec: + """A specification of a platform a scie targets. + + A platform specification at its most basic is a string identifying the operating system / + processor architecture pair, a.k.a. the platform; e.g.: `"linux-x86_64"`. + + For some systems more detail is needed to distinguish between available software ecosystems and + additional available hardware details. If you need to pick out this level of detail, instead of + supplying a platform string, supply a table with a platform string entry and any other entries + needed to narrow down the platform specification. For example, to specify a musl Linux system + you might use: `{platform = "linux-x86_64", libc = "musl"}`. + + You are free to mix simple platform strings with platform specification tables as values in + arrays and tables that accept platform specification values. For example, this is a valid list + of lift platforms to target in a lift manifest: + ```toml + [lift] + platforms = [ + "linux-aarch64", + {platform = "linux-x86_64", libc = "glibc"}, + {platform = "linux-x86_64", libc = "musl"}, + "macos-aarch64", + "macos-x86_64", + ] + ``` + """ + + platform: Platform + libc: LibC | None = None + + def binary_name(self, binary_name: str) -> str: + return self.platform.binary_name(binary_name) + + def qualified_binary_name(self, binary_name: str) -> str: + if LibC.MUSL is self.libc: + return self.platform.qualified_binary_name(binary_name, self.libc.value) + return self.platform.qualified_binary_name(binary_name) + + @property + def value(self) -> str: + if self.libc is LibC.MUSL: + return f"{self.platform.value}-{self.libc.value}" + return self.platform.value + + @property + def is_windows(self) -> bool: + return self.platform.is_windows + + def __repr__(self) -> str: + if self.libc: + return f"""{{platform = "{self.platform}", libc = "{self.libc}"}}""" + return self.platform.value + + +CURRENT_PLATFORM_SPEC = PlatformSpec(CURRENT_PLATFORM, libc=LibC.current()) diff --git a/science/providers/pypy.py b/science/providers/pypy.py index a9fe045..67b49dd 100644 --- a/science/providers/pypy.py +++ b/science/providers/pypy.py @@ -10,7 +10,7 @@ from dataclasses import dataclass from datetime import timedelta from pathlib import Path -from typing import Any +from typing import Any, Iterable, Iterator from bs4 import BeautifulSoup from packaging.version import Version @@ -30,7 +30,7 @@ Provider, Url, ) -from science.platform import Platform +from science.platform import LibC, Platform, PlatformSpec @dataclass(frozen=True) @@ -170,18 +170,21 @@ class PyPy(Provider[Config]): """ @classmethod - def supported_platforms(cls) -> frozenset[Platform]: - return frozenset( - ( + def iter_supported_platforms( + cls, requested_platforms: Iterable[PlatformSpec] + ) -> Iterator[PlatformSpec]: + for platform_spec in requested_platforms: + if platform_spec.platform in ( Platform.Linux_aarch64, Platform.Linux_s390x, - Platform.Linux_x86_64, Platform.Macos_aarch64, Platform.Macos_x86_64, Platform.Windows_aarch64, Platform.Windows_x86_64, - ) - ) + ): + yield PlatformSpec(platform_spec.platform) + elif platform_spec.platform is Platform.Linux_x86_64: + yield PlatformSpec(Platform.Linux_x86_64, LibC.GLIBC) @staticmethod def rank_compatibility(platform: Platform, arch: str) -> int | None: @@ -304,13 +307,13 @@ def version(self) -> Version: def distributions(self) -> DistributionsManifest: return self._distributions - def distribution(self, platform: Platform) -> Distribution | None: + def distribution(self, platform_spec: PlatformSpec) -> Distribution | None: selected_asset: FingerprintedAsset | None = None asset_rank: int | None = None for asset in self._distributions.assets: - if (rank := self.rank_compatibility(platform, asset.arch)) is not None and ( - asset_rank is None or rank < asset_rank - ): + if ( + rank := self.rank_compatibility(platform_spec.platform, asset.arch) + ) is not None and (asset_rank is None or rank < asset_rank): asset_rank = rank selected_asset = asset if selected_asset is None: @@ -352,7 +355,7 @@ def distribution(self, platform: Platform) -> Distribution | None: # We correct for that discrepency here: top_level_archive_dir = re.sub(r"-portable$", "", selected_asset.file_stem()) - if platform.is_windows: + if platform_spec.is_windows: pypy_binary = f"{top_level_archive_dir}\\{pypy}.exe" placeholders[Identifier("pypy")] = pypy_binary placeholders[Identifier("python")] = pypy_binary diff --git a/science/providers/python_build_standalone.py b/science/providers/python_build_standalone.py index 70b8936..cfa0408 100644 --- a/science/providers/python_build_standalone.py +++ b/science/providers/python_build_standalone.py @@ -30,7 +30,7 @@ Provider, Url, ) -from science.platform import Platform +from science.platform import LibC, Platform, PlatformSpec @dataclass(frozen=True) @@ -168,6 +168,10 @@ class Config: """ ), ) + libc: LibC | None = dataclasses.field( + default=None, + metadata=metadata("For Linux x86_64 platforms, the libc to link against."), + ) flavor: str = dataclasses.field( default="install_only", metadata=metadata( @@ -241,7 +245,7 @@ class PythonBuildStandalone(Provider[Config]): """ @staticmethod - def rank_compatibility(platform: Platform, target_triple: str) -> int | None: + def rank_compatibility(platform: Platform, libc: LibC, target_triple: str) -> int | None: match platform: case Platform.Linux_aarch64: match target_triple: @@ -262,17 +266,17 @@ def rank_compatibility(platform: Platform, target_triple: str) -> int | None: case "s390x-unknown-linux-gnu": return 0 case Platform.Linux_x86_64: - match target_triple: - case "x86_64-unknown-linux-gnu": + match libc, target_triple: + case LibC.MUSL, "x86_64-unknown-linux-musl": return 0 - case "x86_64_v2-unknown-linux-gnu": + case LibC.GLIBC, "x86_64-unknown-linux-gnu": + return 0 + case LibC.GLIBC, "x86_64_v2-unknown-linux-gnu": return 1 - case "x86_64_v3-unknown-linux-gnu": + case LibC.GLIBC, "x86_64_v3-unknown-linux-gnu": return 2 - case "x86_64-unknown-linux-musl": + case LibC.GLIBC, "x86_64_v4-unknown-linux-gnu": return 3 - case "x86_64_v4-unknown-linux-gnu": - return 4 case Platform.Macos_aarch64: match target_triple: case "aarch64-apple-darwin": @@ -303,6 +307,7 @@ def create(cls, identifier: Identifier, lazy: bool, config: Config) -> PythonBui return cls( id=identifier, lazy=lazy, + libc=config.libc, _distributions=Distributions.fetch( base_url=config.base_url, version=version, @@ -386,6 +391,7 @@ def create(cls, identifier: Identifier, lazy: bool, config: Config) -> PythonBui return cls( id=identifier, lazy=lazy, + libc=config.libc, _distributions=Distributions( release=release, latest=config.release is None, @@ -398,6 +404,7 @@ def create(cls, identifier: Identifier, lazy: bool, config: Config) -> PythonBui id: Identifier lazy: bool + libc: LibC | None _distributions: Distributions @property @@ -407,13 +414,17 @@ def version(self) -> Version: def distributions(self) -> DistributionsManifest: return self._distributions - def distribution(self, platform: Platform) -> Distribution | None: + def distribution(self, platform_spec: PlatformSpec) -> Distribution | None: selected_asset: FingerprintedAsset | None = None asset_rank: int | None = None for asset in self._distributions.assets: - if (rank := self.rank_compatibility(platform, asset.target_triple)) is not None and ( - asset_rank is None or rank < asset_rank - ): + if ( + rank := self.rank_compatibility( + platform_spec.platform, + self.libc or platform_spec.libc or LibC.GLIBC, + asset.target_triple, + ) + ) is not None and (asset_rank is None or rank < asset_rank): asset_rank = rank selected_asset = asset if selected_asset is None: @@ -433,7 +444,7 @@ def distribution(self, platform: Platform) -> Distribution | None: placeholders = {} match self._distributions.flavor: case "install_only" | "install_only_stripped": - if platform.is_windows: + if platform_spec.is_windows: placeholders[Identifier("python")] = "python\\python.exe" else: version = f"{selected_asset.version.major}.{selected_asset.version.minor}" diff --git a/tests/data/PBS-gnu-and-musl.toml b/tests/data/PBS-gnu-and-musl.toml new file mode 100644 index 0000000..1e7027b --- /dev/null +++ b/tests/data/PBS-gnu-and-musl.toml @@ -0,0 +1,63 @@ +[lift] +name = "gnu-and-musl" +description = "Tests a Python scie that can run on either a gnu or musl libc Linux machine." + +[[lift.files]] +name = "busybox" +type = "blob" +is_executable = true +[lift.files.source] +url = "https://www.busybox.net/downloads/binaries/1.35.0-x86_64-linux-musl/busybox" +lazy = false +[lift.files.digest] +fingerprint = "6e123e7f3202a8c1e9b1f94d8941580a25135382b99e8d3e34fb858bba311348" +size = 1131168 + +[[lift.interpreters]] +id = "cpython-gnu" +libc = "gnu" +provider = "PythonBuildStandalone" +release = "20250212" +lazy = false +version = "3.13.2" +flavor = "install_only_stripped" + +[[lift.interpreters]] +id = "cpython-musl" +libc = "musl" +provider = "PythonBuildStandalone" +release = "20250212" +lazy = false +version = "3.13.2" +flavor = "install_only_stripped" + +[[lift.interpreter_groups]] +id = "cpython" +selector = "{scie.bindings.configure:PYTHON}" +members = [ + "cpython-gnu", + "cpython-musl", +] + +[[lift.commands]] +exe = "#{cpython:python}" +args = ["-V"] + +[[lift.bindings]] +name = "configure" +exe = "{busybox}" +args = [ + "sh", + "-euc", + """\ +# Poor man's musl system detection. The /bin/sh binary path is a Posix guaranty and so we're really +# just banking on the binary there being dynamically linked; which it almost always is. +if ldd /bin/sh 2>&1 | {busybox} grep musl >/dev/null; then + echo "PYTHON=cpython-musl" >> "${{SCIE_BINDING_ENV}" +else + echo "PYTHON=cpython-gnu" >> "${{SCIE_BINDING_ENV}" +fi +echo >&2 Configured: +cat >&2 "${{SCIE_BINDING_ENV}" +""" +] diff --git a/tests/data/platform-specs.toml b/tests/data/platform-specs.toml new file mode 100644 index 0000000..39aebec --- /dev/null +++ b/tests/data/platform-specs.toml @@ -0,0 +1,19 @@ +[lift] +name = "platform-specs" +description = "Test heterogeneous platform specifiers work." + +platforms = [ + "linux-aarch64", + "linux-armv7l", + "linux-powerpc64", + "linux-s390x", + {platform = "linux-x86_64", libc = "gnu"}, + {platform = "linux-x86_64", libc = "musl"}, + "macos-aarch64", + "macos-x86_64", + "windows-aarch64", + "windows-x86_64", +] + +[[lift.commands]] +exe = "ignored" diff --git a/tests/test_config.py b/tests/test_config.py index eb2b492..efead6f 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -10,9 +10,17 @@ from pathlib import Path from textwrap import dedent +import pytest + from science.config import parse_config_file from science.model import Identifier -from science.platform import CURRENT_PLATFORM +from science.platform import ( + CURRENT_PLATFORM, + CURRENT_PLATFORM_SPEC, + LibC, + Platform, + PlatformSpec, +) def test_parse(build_root: Path) -> None: @@ -21,7 +29,7 @@ def test_parse(build_root: Path) -> None: assert 1 == len(interpreters), "Expected science to ship on a single fixed interpreter." interpreter = interpreters[0] - distribution = interpreter.provider.distribution(CURRENT_PLATFORM) + distribution = interpreter.provider.distribution(CURRENT_PLATFORM_SPEC) assert ( distribution is not None ), "Expected a Python interpreter distribution to be available for each platform tests run on." @@ -189,3 +197,64 @@ def test_unrecognized_config_fields(tmp_path: Path, science_pyz: Path) -> None: ) == result.stderr ) + + +def test_platform_specs() -> None: + with resources.as_file(resources.files("data") / "platform-specs.toml") as config: + app = parse_config_file(config) + assert ( + frozenset( + ( + PlatformSpec(Platform.Linux_aarch64), + PlatformSpec(Platform.Linux_armv7l), + PlatformSpec(Platform.Linux_powerpc64le), + PlatformSpec(Platform.Linux_s390x), + PlatformSpec(Platform.Linux_x86_64, LibC.GLIBC), + PlatformSpec(Platform.Linux_x86_64, LibC.MUSL), + PlatformSpec(Platform.Macos_aarch64), + PlatformSpec(Platform.Macos_x86_64), + PlatformSpec(Platform.Windows_aarch64), + PlatformSpec(Platform.Windows_x86_64), + ) + ) + == app.platform_specs + ) + + +@pytest.mark.skipif( + CURRENT_PLATFORM is not Platform.Linux_x86_64, + reason="This test needs to run a Linux x86_64 scie.", +) +def test_PBS_gnu_and_musl(tmp_path: Path, science_pyz: Path) -> None: + with resources.as_file(resources.files("data") / "PBS-gnu-and-musl.toml") as config: + subprocess.run( + args=[ + sys.executable, + str(science_pyz), + "lift", + "build", + "--dest-dir", + str(tmp_path), + str(config), + ], + check=True, + ) + exe_path = tmp_path / CURRENT_PLATFORM.binary_name("gnu-and-musl") + scie_base = tmp_path / "scie-base" + result = subprocess.run( + args=[exe_path], + env={**os.environ, "SCIE_BASE": str(scie_base)}, + capture_output=True, + text=True, + check=True, + ) + assert ( + dedent( + f"""\ + Configured: + PYTHON=cpython-{CURRENT_PLATFORM_SPEC.libc} + """ + ) + == result.stderr + ) + assert "Python 3.13.2\n" == result.stdout diff --git a/tests/test_download.py b/tests/test_download.py index c19c344..175faa2 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -16,7 +16,7 @@ from pytest import MonkeyPatch from science.hashing import Digest -from science.platform import CURRENT_PLATFORM, Platform +from science.platform import CURRENT_PLATFORM_SPEC, Platform from science.providers import PyPy @@ -187,7 +187,7 @@ def test_pbs_mirror(tmp_path: Path, current_platform: Platform) -> None: @pytest.mark.skipif( - CURRENT_PLATFORM not in PyPy.supported_platforms(), + CURRENT_PLATFORM_SPEC not in frozenset(PyPy.iter_supported_platforms([CURRENT_PLATFORM_SPEC])), reason="PyPy does not have pre-built distributions for the current platform.", ) def test_pypy_mirror(tmp_path: Path, current_platform: Platform) -> None: diff --git a/tests/test_exe.py b/tests/test_exe.py index e967e73..d90f5fb 100644 --- a/tests/test_exe.py +++ b/tests/test_exe.py @@ -27,7 +27,8 @@ from science import __version__ from science.config import parse_config_file from science.os import IS_WINDOWS -from science.platform import CURRENT_PLATFORM, Platform +from science.platform import CURRENT_PLATFORM, CURRENT_PLATFORM_SPEC, Platform +from science.providers import PyPy @pytest.fixture(scope="module") @@ -69,7 +70,7 @@ def science_exe( def test_use_platform_suffix( tmp_path: Path, science_exe: Path, config: Path, science_pyz: Path, docsite: Path ) -> None: - expected_executable = tmp_path / CURRENT_PLATFORM.qualified_binary_name("science") + expected_executable = tmp_path / CURRENT_PLATFORM_SPEC.qualified_binary_name("science") assert not expected_executable.exists() subprocess.run( args=[ @@ -88,7 +89,7 @@ def test_use_platform_suffix( check=True, ) assert expected_executable.is_file() - assert not (tmp_path / CURRENT_PLATFORM.binary_name("science")).exists() + assert not (tmp_path / CURRENT_PLATFORM_SPEC.binary_name("science")).exists() def test_no_use_platform_suffix( @@ -753,6 +754,10 @@ def working_pypy_versions() -> list[str]: return ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10"] +@pytest.mark.skipif( + CURRENT_PLATFORM_SPEC not in PyPy.iter_supported_platforms([CURRENT_PLATFORM_SPEC]), + reason=f"PyPy does not support the current platform: {CURRENT_PLATFORM_SPEC}", +) @pytest.mark.parametrize("version", working_pypy_versions()) def test_pypy_provider(tmp_path: Path, science_exe: Path, version: str) -> None: dest = tmp_path / "dest" diff --git a/tests/test_installer.py b/tests/test_installer.py index 9c52da4..9fbff2e 100644 --- a/tests/test_installer.py +++ b/tests/test_installer.py @@ -8,6 +8,7 @@ from _pytest.tmpdir import TempPathFactory from science.os import IS_WINDOWS +from science.platform import CURRENT_PLATFORM_SPEC, LibC, Platform, PlatformSpec @pytest.fixture(scope="module") @@ -30,6 +31,15 @@ def test_installer_help(installer: list): assert long_help in result.stdout, f"Expected '{long_help}' in tool output" +# TODO(John Sirois): Remove this skip once science is released with 64 bit musl Linux support. +# https://github.com/a-scie/lift/issues/139 +skip_64bit_musl_linux = pytest.mark.skipif( + CURRENT_PLATFORM_SPEC == PlatformSpec(Platform.Linux_x86_64, LibC.MUSL), + reason="No science executable has been released yet for 64 bit musl Linux.", +) + + +@skip_64bit_musl_linux def test_installer_fetch_latest(tmp_path_factory: TempPathFactory, installer: list): """Invokes install.sh to fetch the latest science release binary, then invokes it.""" test_dir = tmp_path_factory.mktemp("install-test-default") @@ -44,6 +54,7 @@ def test_installer_fetch_latest(tmp_path_factory: TempPathFactory, installer: li assert result.stdout.strip(), "Expected version output in tool stdout" +@skip_64bit_musl_linux def test_installer_fetch_argtest(tmp_path_factory: TempPathFactory, installer: list): """Exercises all the options in the installer.""" test_dir = tmp_path_factory.mktemp("install-test") diff --git a/uv.lock b/uv.lock index 4cb23f8..a3c5219 100644 --- a/uv.lock +++ b/uv.lock @@ -1,4 +1,5 @@ version = 1 +revision = 1 requires-python = "==3.12.*" [[package]]