diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000000..2773a29dbb3 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,566 @@ +# Pants Build System Contributor Guide + +This is the Pants build system repo -- a self-hosting build system where `./pants` runs Pants from source. Follow the conventions, architecture, and workflows specific to this codebase. + +## Critical Rules + +### ALWAYS use Pants, NEVER use raw Python tools + +This is the most important rule. **Never** run `pytest`, `mypy`, `black`, `isort`, `flake8`, `ruff`, or any other Python tool directly. All Python tools must be invoked through Pants: + +```bash +# CORRECT - always use Pants +pants test src/python/pants/backend/python/goals/pytest_runner_test.py +pants lint src/python/pants/backend/python/goals/pytest_runner.py +pants fmt src/python/pants/backend/python/goals/pytest_runner.py +pants check src/python/pants/backend/python/goals/pytest_runner.py + +# WRONG - never do this +pytest src/python/pants/backend/python/goals/pytest_runner_test.py +mypy src/python/pants/backend/python/goals/pytest_runner.py +black src/python/pants/backend/python/goals/pytest_runner.py +ruff check src/python/pants/backend/python/goals/pytest_runner.py +``` + +### Running Pants from source + +In this repo, `./pants` is a special bootstrap script that runs Pants from the local source tree. Use `pants` (which resolves to the `./pants` script) for all operations. The first run compiles the Rust engine and may take several minutes. + +### Test file naming + +Test files must be named `*_test.py` (not `test_*.py`). Pants discovers tests by this suffix. + +### BUILD file conventions + +Every directory with source code needs a `BUILD` file. Common patterns: + +```python +# Standard source + test pattern +python_sources() +python_tests(name="tests") + +# With test utilities (conftest, fixtures, helpers) +python_test_utils(name="test_utils") + +# With overrides for specific files +python_sources( + overrides={ + "special.py": {"dependencies": ["//some/dep"]}, + }, +) +``` + +All BUILD files must have the copyright header: +```python +# Copyright 2024 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). +``` + +## Common Commands + +| Task | Command | +|------|---------| +| Run specific test file | `pants test path/to/file_test.py` | +| Run tests for changed files | `pants --changed-since=HEAD test` | +| Run tests with transitive deps | `pants --changed-since=HEAD --changed-dependents=transitive test` | +| Run a single test function | `pants test path/to/file_test.py -- -k test_function_name` | +| Debug a test (interactive) | `pants test --debug path/to/file_test.py` | +| Debug with specific test | `pants test --debug path/to/file_test.py -- -k test_name` | +| Lint changed files | `pants --changed-since=HEAD lint` | +| Format changed files | `pants --changed-since=HEAD fmt` | +| Type-check changed files | `pants --changed-since=HEAD check` | +| List all targets in dir | `pants list path/to/dir:` | +| Show dependencies | `pants dependencies path/to/target` | +| Show dependents | `pants dependents path/to/target` | +| Show target info | `pants peek path/to/target` | +| Generate BUILD files | `pants tailor` | +| Validate BUILD files | `pants lint --only=ruff-format '**BUILD'` | +| Run pre-push checks | `build-support/githooks/pre-push` | + +### Test options + +```bash +# Run tests matching a pattern +pants test path/to/file_test.py -- -k "test_foo or test_bar" + +# Run with verbose output +pants test path/to/file_test.py -- -vvs + +# Run with no timeout (useful for debugging) +pants test --timeout-default=0 path/to/file_test.py + +# Show test output even on success +pants test --output=all path/to/file_test.py + +# Force re-run (skip cache) +pants --no-local-cache test path/to/file_test.py +``` + +### Target address syntax + +- `path/to/dir:target_name` - specific target +- `path/to/dir:` - all targets in directory +- `path/to/dir::` - all targets recursively +- `path/to/file.py` - file address (inferred target) +- `::` - everything in the repo + +## Key Directories + +| Path | Purpose | +|------|---------| +| `src/python/pants/` | Core Pants Python source code | +| `src/python/pants/backend/` | Language-specific backends (python, go, java, docker, etc.) | +| `src/python/pants/core/` | Core goals (test, lint, fmt, check, package, run) | +| `src/python/pants/engine/` | The Pants engine (rules, targets, processes, fs) | +| `src/rust/engine/` | Rust engine implementation | +| `pants-plugins/` | In-repo plugins (internal_plugins, pants_explorer) | +| `3rdparty/python/` | Third-party Python dependency declarations and lockfiles | +| `build-support/` | Build scripts, CI helpers, git hooks | +| `testprojects/` | Test fixture projects used by integration tests | +| `docs/docs/` | Documentation source (Docusaurus MDX) | + +## Python Resolves (Lockfiles) + +The repo uses multiple Python resolves configured in `pants.toml`: +- `python-default` - Main resolve for Pants source code +- `pytest` - Test runner dependencies +- `mypy` - Type checker dependencies +- `flake8` - Linter dependencies + +To regenerate lockfiles: `pants generate-lockfiles --resolve=` + +## Engine Architecture + +### The Rule Graph + +Pants's engine is built around a **rule graph** - a directed graph where: +- **Rules** (`@rule` decorated async functions) are internal nodes +- **Queries** are entry points (roots) +- **Params** are typed, hashable input values (leaves) + +Key properties: +- Rules are **pure functions** mapping input types to output types +- The engine handles **memoization**, **concurrency**, and **caching** +- Type annotations are used at runtime for dependency injection +- The engine uses **exact type matching** (no subtyping) + +### Rule Execution + +```python +from pants.engine.rules import rule, collect_rules, concurrently, implicitly +from pants.engine.intrinsics import execute_process + +@rule +async def my_rule(request: MyRequest) -> MyResult: + # Await other rules (engine manages execution) + intermediate = await some_other_rule(request.field, **implicitly()) + + # Run external processes through the engine + process_result = await execute_process( + Process(argv=["/bin/echo", "hello"], description="Echo"), + **implicitly() + ) + + # Parallel execution with concurrently() + results = await concurrently( + process_thing(item, **implicitly()) for item in items + ) + + return MyResult(data=process_result.stdout) +``` + +### Union Rules (Polymorphism) + +Since the engine uses exact type matching, polymorphism is achieved through **unions**: + +```python +from pants.engine.unions import UnionRule, union + +@union +class LintRequest: + pass + +@dataclass(frozen=True) +class MyLinterRequest(LintRequest): + ... + +# Register the union member +def rules(): + return [ + *collect_rules(), + UnionRule(LintRequest, MyLinterRequest), + ] +``` + +### Rule Decorators + +| Decorator | Purpose | +|-----------|---------| +| `@rule` | Standard async rule (cached, pure) | +| `@goal_rule` | Top-level CLI goal entry point | +| `@uncacheable_rule` | Rule that should not be memoized | + +### Key Engine Types + +| Type | Module | Purpose | +|------|--------|---------| +| `Process` | `pants.engine.process` | Run an external process | +| `ProcessResult` | `pants.engine.process` | Process execution result | +| `Digest` | `pants.engine.fs` | Content-addressed file tree | +| `Snapshot` | `pants.engine.fs` | Digest with file/dir listing | +| `MergeDigests` | `pants.engine.fs` | Combine multiple digests | +| `CreateDigest` | `pants.engine.fs` | Create files from content | +| `Target` | `pants.engine.target` | A build target | +| `Field` | `pants.engine.target` | A target field | +| `FieldSet` | `pants.engine.target` | Subset of fields for a rule | +| `Address` | `pants.engine.addresses` | Target address | + +## Backend Structure + +Each backend in `src/python/pants/backend//` follows this pattern: + +### register.py (entry point) + +```python +from pants.backend.foo import target_types_rules +from pants.backend.foo.goals import test, lint +from pants.backend.foo.target_types import FooTarget, FooTestTarget + +def rules(): + return [ + *target_types_rules.rules(), + *test.rules(), + *lint.rules(), + ] + +def target_types(): + return [FooTarget, FooTestTarget] +``` + +The backend is activated in `pants.toml` via `backend_packages`: + +```toml +[GLOBAL] +backend_packages = ["pants.backend.foo"] +``` + +### Target types + +```python +from pants.engine.target import ( + Target, + SingleSourceField, + MultipleSourcesField, + Dependencies, + StringField, +) + +class FooSourceField(SingleSourceField): + expected_file_extensions = (".foo",) + +class FooTarget(Target): + alias = "foo_source" + core_fields = (FooSourceField, Dependencies) + help = "A Foo source file." +``` + +### Goal rules + +```python +from pants.core.goals.test import TestResult, TestRequest + +@dataclass(frozen=True) +class FooTestFieldSet(TestRequest.FieldSet): + required_fields = (FooTestSourceField,) + source: FooTestSourceField + +class FooTestRequest(TestRequest): + tool_subsystem = FooSubsystem + field_set_type = FooTestFieldSet + +@rule +async def run_foo_test(batch: FooTestRequest.Batch) -> TestResult: + ... + +def rules(): + return [ + *collect_rules(), + *FooTestRequest.rules(), + ] +``` + +## Configuration + +### pants.toml + +The main configuration file. Key sections: +- `[GLOBAL]` - Backend packages, pythonpath, sandboxing +- `[source]` - Source root patterns +- `[python]` - Python interpreter constraints, resolves, pip version +- `[pytest]` - Test runner args, resolve +- `[mypy]` - Type checker args, resolve +- `[test]` - Test timeout, env vars +- `[flake8]`, `[shellcheck]`, `[shfmt]` - Linter configs + +### Python version + +The repo currently targets **Python 3.14** (`interpreter_constraints = ["==3.14.*"]` in `pants.toml`). + +## Style Guide + +### Python style + +- **f-strings**: Use f-strings, not `.format()` or `%` +- **Ternary expressions**: Prefer `x = "a" if cond else "b"` over if/else blocks +- **Early returns**: Prefer guard clauses, avoid deep nesting +- **Collection literals**: Use `{a}`, `(a, b)`, `{"k": v}` not constructors +- **Unpacking for merging**: `[*l1, *l2, "elem"]` not `l1 + l2 + ["elem"]` +- **Comprehensions**: Prefer over `map`/`filter` and explicit loops +- **Frozen dataclasses**: Always use `@dataclass(frozen=True)` for engine types +- **Type annotations**: Required on all functions (params + return type) +- **`cast()` over annotations**: Prefer `x = cast(str, untyped())` over `x: str = untyped()` +- **Pytest style**: Never use `unittest.TestCase`; use plain functions with `assert` +- **Error codes in type:ignore**: `# type: ignore[assignment]` not `# type: ignore` +- **Protocols for params**: Use `Iterable`, `Sequence`, `Mapping` not `List`, `Dict` in function params +- **Precise return types**: Return types should be concrete (`list[str]`, not `Iterable[str]`) + +### Comments + +- Must start with `# ` (space after hash) +- Must be complete sentences ending with a period +- Max 100 characters per line +- Explain **why**, not **what** +- TODOs must reference a GitHub issue: `# TODO(#1234): Description.` + +### Help strings + +- Use `softwrap()` for multiline strings +- Use `help_text()` for `Field`/`Target` subclasses if needed for mypy +- Use backticks for config sections, CLI args, target names, inline code +- Use 2-space indentation for bullet/numbered lists (or `bullet_list()`) +- Never use indentation for code blocks; use triple-backtick blocks only + +### File headers + +All source files must have the copyright header: + +```python +# Copyright 2024 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). +``` + +### Imports + +- Imports are auto-sorted by `isort` (via ruff) +- Prefer absolute imports +- Group: stdlib, third-party, pants imports + +## Engine-Specific Conventions + +### Types for the rule graph + +- Rule params and returns must be **hashable** and **immutable** +- Use `tuple` not `list`, `FrozenDict` not `dict`, `FrozenOrderedSet` not `set` +- Use `Collection[T]` to newtype a `tuple` +- Use `DeduplicatedCollection[T]` to newtype a `FrozenOrderedSet` +- The engine uses **exact type matching** (no subtype consideration) +- Newtype freely to disambiguate: `class Name(str): pass` + +### Rule patterns + +```python +# Standard rule with collect_rules +from pants.engine.rules import collect_rules, rule + +@rule +async def my_rule(request: MyRequest) -> MyResult: + ... + +def rules(): + return collect_rules() +``` + +```python +# Using concurrently (NEVER await in a loop) +results = await concurrently( + process_item(item, **implicitly()) for item in items +) +``` + +```python +# Running a process +from pants.engine.intrinsics import execute_process +from pants.engine.process import Process + +result = await execute_process( + Process( + argv=["tool", "--flag", "arg"], + input_digest=my_digest, + description="Running tool", + ), + **implicitly() +) +``` + +### Registering rules + +Each module has a `rules()` function, aggregated in `register.py`: + +```python +# In each module +def rules(): + return collect_rules() + +# In register.py +def rules(): + return [ + *module1.rules(), + *module2.rules(), + ] + +def target_types(): + return [Target1, Target2] +``` + +## Testing + +### Test function naming + +- Test files: `*_test.py` (NOT `test_*.py`) +- Test functions: `def test_descriptive_name() -> None:` +- All test functions must have `-> None` return annotation + +### Integration vs unit tests + +- Unit tests: `*_test.py` - run in normal test target +- Integration tests: `*_integration_test.py` - get their own BUILD target with longer timeouts + +```python +# In BUILD files, integration tests are separated: +python_tests( + name="tests", + sources=["*_test.py", "!*_integration_test.py"], +) +python_tests( + name="integration", + sources=["*_integration_test.py"], + timeout=240, +) +``` + +### Skipping tests + +The repo conftest.py enforces `--noskip` - skipped tests are treated as errors. If a test legitimately needs to be skippable (e.g., platform-specific), mark it: + +```python +@pytest.mark.no_error_if_skipped +def test_platform_specific() -> None: + ... +``` + +### RuleRunner fixtures + +```python +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[*my_rules(), QueryRule(Output, [Input])], + target_types=[MyTarget], + ) +``` + +- Each test gets a fresh `RuleRunner` instance (via fixture) +- Use `rule_runner.write_files()` to set up test content +- Use `rule_runner.set_options()` to configure options +- Use `rule_runner.request()` to invoke rules +- Use `rule_runner.run_goal_rule()` for goal rules + +### Testing approaches + +**Unit tests** - Test pure Python functions without the engine: +```python +def test_my_helper() -> None: + assert my_function("input") == "expected" +``` + +**run_rule_with_mocks** - Test rule logic with mocked dependencies: +```python +from pants.testutil.rule_runner import run_rule_with_mocks + +def test_my_rule() -> None: + result = run_rule_with_mocks( + my_rule, + rule_args=[MyRequest(...)], + mock_calls={ + "my.module.some_dependency": lambda req: MockResult(...), + }, + ) + assert result == expected +``` + +**RuleRunner** - Test rules with a real engine and isolated filesystem: +```python +def test_integration(rule_runner: RuleRunner) -> None: + rule_runner.write_files({ + "src/BUILD": "my_target(source='f.py')", + "src/f.py": "content", + }) + rule_runner.set_options(["--my-option=value"]) + result = rule_runner.request(MyOutput, [MyInput(...)]) + assert result.field == expected +``` + +**run_pants()** - Full integration tests: +```python +from pants.testutil.pants_integration_test import run_pants, setup_tmpdir + +def test_end_to_end() -> None: + sources = { + "src/BUILD": "python_sources()", + "src/app.py": "print('hello')", + } + with setup_tmpdir(sources) as tmpdir: + result = run_pants([ + "--backend-packages=['pants.backend.python']", + "lint", + f"{tmpdir}/src:", + ]) + result.assert_success() +``` + +## PR and Contribution Workflow + +### Before submitting + +1. Format: `pants --changed-since=HEAD fmt` +2. Lint: `pants --changed-since=HEAD lint` +3. Type-check: `pants --changed-since=HEAD check` +4. Test: `pants --changed-since=HEAD --changed-dependents=transitive test` +5. Or run the pre-push hook: `build-support/githooks/pre-push` + +### Commit messages + +- Start with a verb (Add, Fix, Update, Remove, Refactor) +- Be concise but descriptive +- Reference GitHub issues where applicable + +## Maintenance Tasks + +### Updating external tool versions + +For `ExternalTool` subclasses (downloaded binaries): +1. Download new version for each platform +2. Compute sha256 and byte length: `tee >(shasum -a 256) >(wc -c) > /dev/null < archive` +3. Update `default_version` and `default_known_versions` + +### Updating Python tool versions + +For `PythonToolBase` subclasses (PyPI packages): +1. Update `default_requirements` and/or `default_interpreter_constraints` +2. Run `build-support/bin/generate_builtin_lockfiles.py ` + +### Updating PEX + +PEX is special - update both: +1. The `pex-cli` subsystem in `src/python/pants/backend/python/util_rules/pex_cli.py` +2. The requirement in `3rdparty/python/requirements.txt` and regenerate lockfile diff --git a/src/python/pants/backend/docker/engine_types.py b/src/python/pants/backend/docker/engine_types.py new file mode 100644 index 00000000000..ca6b4c01664 --- /dev/null +++ b/src/python/pants/backend/docker/engine_types.py @@ -0,0 +1,27 @@ +# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). +from dataclasses import dataclass +from enum import Enum + + +class DockerBuildEngine(Enum): + DOCKER = "docker" + BUILDCTL = "buildctl" + PODMAN = "podman" + + +class DockerPushEngine(Enum): + DOCKER = "docker" + PODMAN = "podman" + + +class DockerRunEngine(Enum): + DOCKER = "docker" + PODMAN = "podman" + + +@dataclass(frozen=True) +class DockerEngines: + build: DockerBuildEngine = DockerBuildEngine.DOCKER + push: DockerPushEngine = DockerPushEngine.DOCKER + run: DockerRunEngine = DockerRunEngine.DOCKER diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 801351d4972..035d58d4efe 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -12,6 +12,7 @@ from itertools import chain from typing import Literal, cast +from pants.backend.docker.engine_types import DockerBuildEngine from pants.backend.docker.package_types import ( BuiltDockerImage, DockerPushOnPackageBehavior, @@ -20,13 +21,6 @@ from pants.backend.docker.registries import DockerRegistries, DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import ( - DockerBuildKitOptionField, - DockerBuildOptionFieldListOfMultiValueDictMixin, - DockerBuildOptionFieldMixin, - DockerBuildOptionFieldMultiValueDictMixin, - DockerBuildOptionFieldMultiValueMixin, - DockerBuildOptionFieldValueMixin, - DockerBuildOptionFlagFieldMixin, DockerImageBuildImageOutputField, DockerImageContextRootField, DockerImageRegistriesField, @@ -35,9 +29,18 @@ DockerImageTagsField, DockerImageTagsRequest, DockerImageTargetStageField, + OptionValueFormatter, + ValidateOptionsMixin, get_docker_image_tags, ) -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import ( + BuildctlBinary, + DockerBinary, + PodmanBinary, + get_buildctl, + get_docker, + get_podman, +) from pants.backend.docker.util_rules.docker_build_context import ( DockerBuildContext, DockerBuildContextRequest, @@ -45,6 +48,7 @@ ) from pants.backend.docker.utils import format_rename_suggestion from pants.core.goals.package import BuiltPackage, OutputPathField, PackageFieldSet +from pants.core.goals.publish import PublishFieldSet from pants.engine.collection import Collection from pants.engine.fs import EMPTY_DIGEST, CreateDigest, FileContent from pants.engine.internals.graph import resolve_target @@ -91,8 +95,9 @@ class DockerPackageFieldSet(PackageFieldSet): def pushes_on_package(self) -> bool: """Returns True if this docker_image target would push to a registry during packaging.""" - value_or_default = self.output.value or self.output.default - return value_or_default.get("push") == "true" or value_or_default["type"] == "registry" + return bool(self.output.value) and ( + self.output.value.get("push") == "true" or self.output.value["type"] == "registry" + ) def format_tag(self, tag: str, interpolation_context: InterpolationContext) -> str: source = InterpolationContext.TextSource( @@ -323,73 +328,54 @@ class DockerInfoV1ImageTag: name: str +def get_value_formatter( + context: DockerBuildContext, target: Target, field_alias: str +) -> OptionValueFormatter: + return partial( + context.interpolation_context.format, + source=InterpolationContext.TextSource( + address=target.address, target_alias=target.alias, field_alias=field_alias + ), + error_cls=DockerImageOptionValueError, + ) + + def get_build_options( context: DockerBuildContext, - field_set: DockerPackageFieldSet, - global_target_stage_option: str | None, - global_build_hosts_options: dict | None, - global_build_no_cache_option: bool | None, - use_buildx_option: bool, + docker_options: DockerOptions, target: Target, ) -> Iterator[str]: - # Build options from target fields inheriting from DockerBuildOptionFieldMixin + gen_options_func_name = ( + "buildctl_options" + if docker_options.build_engine == DockerBuildEngine.BUILDCTL + else "docker_build_options" + ) for field_type in target.field_types: - if issubclass(field_type, DockerBuildKitOptionField): - if use_buildx_option is not True: - if target[field_type].value != target[field_type].default: - raise DockerImageOptionValueError( - f"The {target[field_type].alias} field on the = `{target.alias}` target in `{target.address}` was set to `{target[field_type].value}`" - f" and buildx is not enabled. Buildx must be enabled via the Docker subsystem options in order to use this field." - ) - else: - # Case where BuildKit option has a default value - still should not be generated - continue - - if issubclass( - field_type, - ( - DockerBuildOptionFieldMixin, - DockerBuildOptionFieldMultiValueDictMixin, - DockerBuildOptionFieldListOfMultiValueDictMixin, - DockerBuildOptionFieldValueMixin, - DockerBuildOptionFieldMultiValueMixin, - DockerBuildOptionFlagFieldMixin, - ), + if ( + issubclass(field_type, ValidateOptionsMixin) + and target[field_type].validate_options(docker_options, context) + and (gen_options_func := getattr(target[field_type], gen_options_func_name, None)) ): - source = InterpolationContext.TextSource( - address=target.address, target_alias=target.alias, field_alias=field_type.alias - ) - format = partial( - context.interpolation_context.format, - source=source, - error_cls=DockerImageOptionValueError, - ) - yield from target[field_type].options( - format, global_build_hosts_options=global_build_hosts_options + yield from gen_options_func( + docker=docker_options, + value_formatter=get_value_formatter(context, target, field_type.alias), ) - # Target stage - target_stage = None - if global_target_stage_option in context.stages: - target_stage = global_target_stage_option - elif field_set.target_stage.value: - target_stage = field_set.target_stage.value - if target_stage not in context.stages: - raise DockerBuildTargetStageError( - f"The {field_set.target_stage.alias!r} field in `{target.alias}` " - f"{field_set.address} was set to {target_stage!r}" - + ( - f", but there is no such stage in `{context.dockerfile}`. " - f"Available stages: {', '.join(context.stages)}." - if context.stages - else f", but there are no named stages in `{context.dockerfile}`." - ) - ) - - if target_stage: - yield from ("--target", target_stage) + # Special handling for global options + if docker_options.build_target_stage in context.stages: + compute_options_func = ( + DockerImageTargetStageField.compute_buildctl_options + if docker_options.build_engine == DockerBuildEngine.BUILDCTL + else DockerImageTargetStageField.compute_docker_build_options + ) + yield from compute_options_func( + docker_options.build_target_stage, + docker=docker_options, + value_formatter=get_value_formatter(context, target, DockerImageTargetStageField.alias), + ) - if global_build_no_cache_option: + # This is the same for docker and buildkit + if docker_options.build_no_cache: yield "--no-cache" @@ -451,7 +437,7 @@ class DockerImageBuildProcess: @rule async def get_docker_image_build_process( - field_set: DockerPackageFieldSet, options: DockerOptions, docker: DockerBinary + field_set: DockerPackageFieldSet, options: DockerOptions ) -> DockerImageBuildProcess: context, wrapped_target, image_refs = await concurrently( create_docker_build_context( @@ -493,25 +479,31 @@ async def get_docker_image_build_process( "__UPSTREAM_IMAGE_IDS": ",".join(context.upstream_image_ids), } context_root = field_set.get_context_root(options.default_context_root) - process = docker.build_image( + binary: BuildctlBinary | PodmanBinary | DockerBinary + match options.build_engine: + case DockerBuildEngine.BUILDCTL: + binary = await get_buildctl(**implicitly()) + case DockerBuildEngine.PODMAN: + binary = await get_podman(**implicitly()) + case _: + binary = await get_docker(**implicitly()) + + process = binary.build_image( build_args=context.build_args, digest=context.digest, dockerfile=context.dockerfile, context_root=context_root, env=env, tags=tags, - use_buildx=options.use_buildx, + output=field_set.output.value, extra_args=tuple( get_build_options( context=context, - field_set=field_set, - global_target_stage_option=options.build_target_stage, - global_build_hosts_options=options.build_hosts, - global_build_no_cache_option=options.build_no_cache, - use_buildx_option=options.use_buildx, + docker_options=options, target=wrapped_target.target, ) ), + is_publish=isinstance(field_set, PublishFieldSet), ) return DockerImageBuildProcess( process=process, @@ -527,7 +519,6 @@ async def build_docker_image( field_set: DockerPackageFieldSet, options: DockerOptions, global_options: GlobalOptions, - docker: DockerBinary, keep_sandboxes: KeepSandboxes, ) -> BuiltPackage: """Build a Docker image using `docker build`.""" @@ -547,7 +538,7 @@ async def build_docker_image( result = await execute_process(build_process.process, **implicitly()) if result.exit_code != 0: - msg = f"Docker build failed for `docker_image` {field_set.address}." + msg = f"{options.build_engine.value.capitalize()} build failed for `docker_image` {field_set.address}." if options.suggest_renames: maybe_help_msg = format_docker_build_context_help_message( context_root=build_process.context_root, @@ -567,10 +558,15 @@ async def build_docker_image( keep_sandboxes=keep_sandboxes, ) - image_id = parse_image_id_from_docker_build_output(docker, result.stdout, result.stderr) + parse_image_id = ( + parse_image_id_from_podman_build_output + if options.build_engine == DockerBuildEngine.PODMAN + else parse_image_id_from_buildkit_output + ) + image_id = parse_image_id(result.stdout, result.stderr) or "" docker_build_output_msg = "\n".join( ( - f"Docker build output for {build_process.tags[0]}:", + f"{options.build_engine.value.capitalize()} build output for {build_process.tags[0]}:", "stdout:", result.stdout.decode(), "stderr:", @@ -593,58 +589,62 @@ async def build_docker_image( ) -def parse_image_id_from_docker_build_output(docker: DockerBinary, *outputs: bytes) -> str: +def parse_image_id_from_buildkit_output(*outputs: bytes) -> str | None: """Outputs are typically the stdout/stderr pair from the `docker build` process.""" # NB: We use the extracted image id for invalidation. The short_id may theoretically # not be unique enough, although in a non adversarial situation, this is highly unlikely # to be an issue in practice. - if docker.is_podman: - for output in outputs: - try: - _, image_id, success, *__ = reversed(output.decode().split("\n")) - except ValueError: - continue - - if success.startswith("Successfully tagged"): - return image_id - - else: - image_id_regexp = re.compile( - "|".join( - ( - # BuildKit output. - r"(writing image (?Psha256:\S+))", - # BuildKit with containerd-snapshotter output. - r"(exporting manifest list (?Psha256:\S+))", - # BuildKit with containerd-snapshotter output and no attestation. - r"(exporting manifest (?Psha256:\S+))", - # Docker output. - r"(Successfully built (?P\S+))", - ), - ) + image_id_regexp = re.compile( + "|".join( + ( + # BuildKit output. + r"(writing image (?Psha256:\S+))", + # Buildkit with --push=true output. + r"(pushing manifest for (?P\S+))", + # BuildKit with containerd-snapshotter output. + r"(exporting manifest list (?Psha256:\S+))", + # BuildKit with containerd-snapshotter output and no attestation. + r"(exporting manifest (?Psha256:\S+))", + # Docker output. + r"(Successfully built (?P\S+))", + ), ) - for output in outputs: - image_id_match = next( - ( - match - for match in ( - re.search(image_id_regexp, line) - for line in reversed(output.decode().split("\n")) - ) - if match - ), - None, - ) - if image_id_match: - image_id = ( - image_id_match.group("digest") - or image_id_match.group("short_id") - or image_id_match.group("manifest_list") - or image_id_match.group("manifest") + ) + for output in outputs: + image_id_match = next( + ( + match + for match in ( + re.search(image_id_regexp, line) + for line in reversed(output.decode().split("\n")) ) - return image_id + if match + ), + None, + ) + if image_id_match: + image_id = ( + image_id_match.group("digest") + or image_id_match.group("pushed_manifest") + or image_id_match.group("short_id") + or image_id_match.group("manifest_list") + or image_id_match.group("manifest") + ) + return image_id + + return None + + +def parse_image_id_from_podman_build_output(*outputs: bytes) -> str | None: + for output in outputs: + try: + _, image_id, success, *__ = reversed(output.decode().split("\n")) + except ValueError: + continue - return "" + if success.startswith("Successfully tagged"): + return image_id + return None def format_docker_build_context_help_message( diff --git a/src/python/pants/backend/docker/goals/package_image_test.py b/src/python/pants/backend/docker/goals/package_image_test.py index 9f769c39e7e..cc6b9c2cb06 100644 --- a/src/python/pants/backend/docker/goals/package_image_test.py +++ b/src/python/pants/backend/docker/goals/package_image_test.py @@ -13,10 +13,9 @@ import pytest +from pants.backend.docker.engine_types import DockerBuildEngine, DockerEngines from pants.backend.docker.goals.package_image import ( - DockerBuildTargetStageError, DockerImageBuildProcess, - DockerImageOptionValueError, DockerImageRefs, DockerImageTagValueError, DockerInfoV1, @@ -28,7 +27,8 @@ build_docker_image, get_docker_image_build_process, get_image_refs, - parse_image_id_from_docker_build_output, + parse_image_id_from_buildkit_output, + parse_image_id_from_podman_build_output, rules, ) from pants.backend.docker.package_types import ( @@ -44,7 +44,7 @@ DockerImageTagsRequest, DockerImageTarget, ) -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import BuildctlBinary, DockerBinary, PodmanBinary from pants.backend.docker.util_rules.docker_build_args import ( DockerBuildArgs, DockerBuildArgsRequest, @@ -141,7 +141,7 @@ def build_context_mock(request: DockerBuildContextRequest) -> DockerBuildContext def _setup_docker_options(rule_runner: RuleRunner, options: dict | None) -> DockerOptions: """Setup DockerOptions with sensible defaults.""" - if options: + if options is not None: opts = options.copy() opts.setdefault("registries", {}) opts.setdefault("default_repository", "{name}") @@ -151,7 +151,7 @@ def _setup_docker_options(rule_runner: RuleRunner, options: dict | None) -> Dock opts.setdefault("build_hosts", None) opts.setdefault("build_verbose", False) opts.setdefault("build_no_cache", False) - opts.setdefault("use_buildx", False) + opts.setdefault("engine", DockerEngines()) opts.setdefault("env_vars", []) opts.setdefault("suggest_renames", True) opts.setdefault("push_on_package", DockerPushOnPackageBehavior.WARN) @@ -178,6 +178,7 @@ def assert_build_process( build_context_snapshot: Snapshot = EMPTY_SNAPSHOT, version_tags: tuple[str, ...] = (), image_refs: DockerImageRefs | None = None, + binary: DockerBinary | PodmanBinary | BuildctlBinary = DockerBinary("/dummy/docker"), ) -> DockerImageBuildProcess: """Test helper for get_docker_image_build_process rule. @@ -214,17 +215,25 @@ def assert_build_process( ) docker_options = _setup_docker_options(rule_runner, options) + match binary: + case BuildctlBinary(): + binary_rule_path = "pants.backend.docker.util_rules.binaries.get_buildctl" + case PodmanBinary(): + binary_rule_path = "pants.backend.docker.util_rules.binaries.get_podman" + case _: + binary_rule_path = "pants.backend.docker.util_rules.binaries.get_docker" + result = run_rule_with_mocks( get_docker_image_build_process, rule_args=[ DockerPackageFieldSet.create(tgt), docker_options, - DockerBinary("/dummy/docker"), ], mock_calls={ "pants.backend.docker.util_rules.docker_build_context.create_docker_build_context": build_context_mock, "pants.engine.internals.graph.resolve_target": lambda _: WrappedTarget(tgt), "pants.backend.docker.goals.package_image.get_image_refs": lambda _: image_refs, + binary_rule_path: lambda *args: binary, }, union_membership=_create_union_membership(), show_warnings=False, @@ -980,7 +989,6 @@ def mock_create_digest(request: CreateDigest) -> Digest: under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls={ @@ -1406,10 +1414,8 @@ def test_docker_cache_to_option(rule_runner: RuleRunner) -> None: def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", - "buildx", "build", "--cache-to=type=local,dest=/tmp/docker/pants-test-cache", - "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1443,11 +1449,9 @@ def test_docker_cache_from_option(rule_runner: RuleRunner) -> None: def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", - "buildx", "build", "--cache-from=type=local,dest=/tmp/docker/pants-test-cache1", "--cache-from=type=local,dest=/tmp/docker/pants-test-cache2", - "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1496,7 +1500,6 @@ def test_docker_output_option( def check_build_process(result: DockerImageBuildProcess) -> None: assert result.process.argv == ( "/dummy/docker", - "buildx", "build", expected_output_arg, "--pull=False", @@ -1549,7 +1552,6 @@ def test_docker_output_option_when_push_on_package_error( def check_build_process(result: DockerImageBuildProcess) -> None: assert result.process.argv == ( "/dummy/docker", - "buildx", "build", expected_output_arg, "--pull=False", @@ -1611,7 +1613,6 @@ def mock_get_build_process_success(field_set: DockerPackageFieldSet) -> DockerIm under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls={ @@ -1668,7 +1669,6 @@ def test_docker_output_option_when_push_on_package_warn( def check_build_process(result: DockerImageBuildProcess) -> None: assert result.process.argv == ( "/dummy/docker", - "buildx", "build", expected_output_arg, "--pull=False", @@ -1734,7 +1734,6 @@ def mock_create_digest(_request: CreateDigest) -> Digest: under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls={ @@ -1755,7 +1754,8 @@ def mock_create_digest(_request: CreateDigest) -> Digest: @pytest.mark.parametrize( ["output", "expected_output_arg"], [ - (None, "--output=type=docker"), + (None, None), + ({"type": "docker"}, "--output=type=docker"), ({"type": "registry"}, None), ({"type": "image", "push": "true"}, None), ], @@ -1783,14 +1783,14 @@ def test_docker_output_option_when_push_on_package_ignore( tgt = rule_runner.get_target(Address("docker/test", target_name="img1")) under_test_fs = DockerPackageFieldSet.create(tgt) - if expected_output_arg: + if expected_output_arg or output is None: # Step 1: Validate Process construction using assert_build_process + output_args = [expected_output_arg] if expected_output_arg else [] def check_build_process(result: DockerImageBuildProcess) -> None: assert result.process.argv == ( "/dummy/docker", - "buildx", "build", - expected_output_arg, + *output_args, "--pull=False", "--tag", "img1:latest", @@ -1852,7 +1852,6 @@ def mock_create_digest(_request: CreateDigest) -> Digest: under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls=mock_calls, @@ -1863,30 +1862,6 @@ def mock_create_digest(_request: CreateDigest) -> Digest: assert len(result.artifacts) == (1 if expected_output_arg else 0) -def test_docker_output_option_raises_when_no_buildkit(rule_runner: RuleRunner) -> None: - rule_runner.write_files( - { - "docker/test/BUILD": dedent( - """\ - docker_image( - name="img1", - output={"type": "image"} - ) - """ - ), - } - ) - - with pytest.raises( - DockerImageOptionValueError, - match=r"Buildx must be enabled via the Docker subsystem options in order to use this field.", - ): - assert_build_process( - rule_runner, - Address("docker/test", target_name="img1"), - ) - - def test_docker_build_network_option(rule_runner: RuleRunner) -> None: rule_runner.write_files( { @@ -2080,10 +2055,6 @@ def test_docker_build_fail_logs( rule_runner.write_files({"docker/test/BUILD": f"docker_image(context_root={context_root!r})"}) build_context_files = ("docker/test/Dockerfile", *build_context_files) build_context_snapshot = rule_runner.make_snapshot_of_empty_files(build_context_files) - suggest_renames_arg = ( - "--docker-suggest-renames" if suggest_renames else "--no-docker-suggest-renames" - ) - rule_runner.set_options([suggest_renames_arg]) # Step 1: Get the build process tgt = rule_runner.get_target(Address("docker/test")) @@ -2092,7 +2063,7 @@ def test_docker_build_fail_logs( build_context_mock = _create_build_context_mock( rule_runner, address, build_context_snapshot, copy_sources, (), () ) - docker_options = _setup_docker_options(rule_runner, None) + docker_options = _setup_docker_options(rule_runner, {"suggest_renames": suggest_renames}) global_options = rule_runner.request(GlobalOptions, []) # Get image refs @@ -2119,17 +2090,18 @@ def test_docker_build_fail_logs( # Step 2: Create the build process with the get_docker_image_build_process rule under_test_fs = DockerPackageFieldSet.create(tgt) + docker = DockerBinary("/dummy/docker") build_process = run_rule_with_mocks( get_docker_image_build_process, rule_args=[ under_test_fs, docker_options, - DockerBinary("/dummy/docker"), ], mock_calls={ "pants.backend.docker.util_rules.docker_build_context.create_docker_build_context": build_context_mock, "pants.engine.internals.graph.resolve_target": lambda _: WrappedTarget(tgt), "pants.backend.docker.goals.package_image.get_image_refs": lambda _: image_refs, + "pants.backend.docker.util_rules.binaries.get_docker": lambda *args: docker, }, show_warnings=False, ) @@ -2171,7 +2143,6 @@ def mock_execute_process(_process: Process) -> FallibleProcessResult: under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls={ @@ -2217,8 +2188,7 @@ def check_build_process(result: DockerImageBuildProcess): "/dummy/docker", "build", "--pull=False", - "--target", - expected_target, + f"--target={expected_target}", "--tag", "image:latest", "--file", @@ -2236,6 +2206,7 @@ def check_build_process(result: DockerImageBuildProcess): def test_invalid_build_target_stage(rule_runner: RuleRunner) -> None: + """An invalid target_stage is passed through to Docker which will report the error.""" rule_runner.write_files( { "BUILD": "docker_image(name='image', target_stage='bad')", @@ -2249,17 +2220,26 @@ def test_invalid_build_target_stage(rule_runner: RuleRunner) -> None: } ) - err = ( - r"The 'target_stage' field in `docker_image` //:image was set to 'bad', but there is no " - r"such stage in `Dockerfile`\. Available stages: build, dev, prod\." - ) - with pytest.raises(DockerBuildTargetStageError, match=err): - assert_build_process( - rule_runner, - Address("", target_name="image"), - version_tags=("build latest", "dev latest", "prod latest"), + def check_build_process(result: DockerImageBuildProcess): + assert result.process.argv == ( + "/dummy/docker", + "build", + "--pull=False", + "--target=bad", + "--tag", + "image:latest", + "--file", + "Dockerfile", + ".", ) + assert_build_process( + rule_runner, + Address("", target_name="image"), + build_process_assertions=check_build_process, + version_tags=("build latest", "dev latest", "prod latest"), + ) + @pytest.mark.parametrize( "default_context_root, context_root, expected_context_root", @@ -2322,14 +2302,14 @@ def test_get_context_root( "docker, expected, stdout, stderr", [ ( - DockerBinary("/bin/docker", "1234", is_podman=False), - "", + DockerBinary("/bin/docker", "1234"), + None, "", "", ), # Docker ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "0e09b442b572", "", dedent( @@ -2345,7 +2325,7 @@ def test_get_context_root( ), # Buildkit without step duration ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:7805a7da5f45a70bb9e47e8de09b1f5acd8f479dda06fb144c5590b9d2b86dd7", dedent( """\ @@ -2368,7 +2348,7 @@ def test_get_context_root( ), # Buildkit with step duration ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:7805a7da5f45a70bb9e47e8de09b1f5acd8f479dda06fb144c5590b9d2b86dd7", dedent( """\ @@ -2387,7 +2367,7 @@ def test_get_context_root( ), # Buildkit with containerd-snapshotter 0.12.1 ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:b2b51838586286a9e544ddb31b3dbf7f6a99654d275b6e56b5f69f90138b4c0e", dedent( """\ @@ -2406,7 +2386,7 @@ def test_get_context_root( ), # Buildkit with containerd-snapshotter and cross platform 0.12.1 ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:3c72de0e05bb75247e68e124e6500700f6e0597425db2ee9f08fd59ef28cea0f", dedent( """\ @@ -2428,7 +2408,7 @@ def test_get_context_root( ), # Buildkit with containerd-snapshotter 0.13.1 ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:d15432046b4feaebb70370fad4710151dd8f0b9741cb8bc4d20c08ed8847f17a", dedent( """\ @@ -2451,7 +2431,7 @@ def test_get_context_root( ), # Buildkit with containerd-snapshotter 0.17.1 and disabled attestations ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:6c3aff6414781126578b3e7b4a217682e89c616c0eac864d5b3ea7c87f1094d0", dedent( """\ @@ -2469,28 +2449,27 @@ def test_get_context_root( ), "", ), - # Podman - ( - DockerBinary("/bin/podman", "abcd", is_podman=True), - "a85499e9039a4add9712f7ea96a4aa9f0edd57d1008c6565822561ceed927eee", - dedent( - """\ - STEP 5/5: COPY ./ . - COMMIT example - --> a85499e9039a - Successfully tagged localhost/example:latest - a85499e9039a4add9712f7ea96a4aa9f0edd57d1008c6565822561ceed927eee - """ - ), - "", - ), ], ) def test_parse_image_id_from_docker_build_output( - docker: DockerBinary, expected: str, stdout: str, stderr: str + docker: DockerBinary, expected: str | None, stdout: str, stderr: str ) -> None: - assert expected == parse_image_id_from_docker_build_output( - docker, stdout.encode(), stderr.encode() + assert expected == parse_image_id_from_buildkit_output(stdout.encode(), stderr.encode()) + + +def test_parse_image_id_from_podman_build_output() -> None: + stdout = dedent( + """\ + STEP 5/5: COPY ./ . + COMMIT example + --> a85499e9039a + Successfully tagged localhost/example:latest + a85499e9039a4add9712f7ea96a4aa9f0edd57d1008c6565822561ceed927eee + """ + ) + assert ( + "a85499e9039a4add9712f7ea96a4aa9f0edd57d1008c6565822561ceed927eee" + == parse_image_id_from_podman_build_output(stdout.encode(), b"") ) @@ -2972,3 +2951,61 @@ def test_field_set_pushes_on_package(output: dict | None, expected: bool) -> Non rule_runner.get_target(Address("", target_name="image")) ) assert field_set.pushes_on_package() is expected + + +def test_docker_build_process_buildctl_engine(rule_runner: RuleRunner) -> None: + rule_runner.write_files( + { + "docker/test/BUILD": dedent( + """\ + docker_image( + name="img1", + output + secrets={ + "system-secret": "/var/run/secrets/mysecret", + "target-secret": "./mysecret", + }, + ssh=["default"], + image_labels={ + "version": "1.0", + }, + build_platform=["linux/amd64", "linux/arm64"], + ) + """ + ), + } + ) + + def check_build_process(result: DockerImageBuildProcess) -> None: + assert result.process.argv == ( + "/dummy/buildctl", + "build", + "--frontend", + "dockerfile.v0", + "--local", + "context=.", + "--local", + "dockerfile=docker/test", + "--opt", + "filename=Dockerfile", + "--opt", + "platform=linux/amd64,linux/arm64", + "--opt", + "label:version=1.0", + "--secret", + "id=system-secret,src=/var/run/secrets/mysecret", + "--secret", + f"id=target-secret,src={rule_runner.build_root}/docker/test/mysecret", + "--ssh", + "default", + "--output", + "type=image,name=img1:latest,push=true", + ) + + assert_build_process( + rule_runner, + Address("docker/test", target_name="img1"), + options=dict(engine=DockerEngines(build=DockerBuildEngine.BUILDCTL)), + binary=BuildctlBinary("/dummy/buildctl"), + build_process_assertions=check_build_process, + ) diff --git a/src/python/pants/backend/docker/goals/publish.py b/src/python/pants/backend/docker/goals/publish.py index eeae6b6dbe1..2b998deff94 100644 --- a/src/python/pants/backend/docker/goals/publish.py +++ b/src/python/pants/backend/docker/goals/publish.py @@ -9,6 +9,7 @@ from itertools import chain from typing import DefaultDict, cast +from pants.backend.docker.engine_types import DockerBuildEngine, DockerPushEngine from pants.backend.docker.goals.package_image import ( DockerPackageFieldSet, GetImageRefsRequest, @@ -19,7 +20,12 @@ from pants.backend.docker.registries import DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import DockerImageRegistriesField, DockerImageSkipPushField -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import ( + DockerBinary, + PodmanBinary, + get_docker, + get_podman, +) from pants.core.goals.package import PackageFieldSet from pants.core.goals.publish import ( CheckSkipRequest, @@ -106,7 +112,7 @@ async def check_if_skip_push( ) return ( CheckSkipResult.skip(skip_packaging_only=True) - if request.package_fs.pushes_on_package() + if request.package_fs.pushes_on_package() or options.build_engine == DockerBuildEngine.BUILDCTL else CheckSkipResult.no_skip() ) @@ -114,11 +120,10 @@ async def check_if_skip_push( @rule async def push_docker_images( request: PublishDockerImageRequest, - docker: DockerBinary, options: DockerOptions, options_env_aware: DockerOptions.EnvironmentAware, ) -> PublishProcesses: - if cast(DockerPackageFieldSet, request.field_set).pushes_on_package(): + if cast(PublishDockerImageFieldSet, request.field_set).pushes_on_package() or options.build_engine == DockerBuildEngine.BUILDCTL: build_process = await get_docker_image_build_process(request.field_set, **implicitly()) return PublishProcesses( [ @@ -148,6 +153,12 @@ async def push_docker_images( jobs: list[PublishPackages] = [] refs: list[str] = [] processes: list[Process | InteractiveProcess] = [] + binary: DockerBinary | PodmanBinary + match options.push_engine: + case DockerPushEngine.DOCKER: + binary = await get_docker(**implicitly()) + case DockerPushEngine.PODMAN: + binary = await get_podman(**implicitly()) for tag in tags: for registry in options.registries().registries.values(): @@ -159,7 +170,7 @@ async def push_docker_images( break else: refs.append(tag) - push_process = docker.push_image(tag, env) + push_process = binary.push_image(tag, env) if options.publish_noninteractively: processes.append(push_process) else: diff --git a/src/python/pants/backend/docker/goals/publish_test.py b/src/python/pants/backend/docker/goals/publish_test.py index b063fde3a56..7738407616d 100644 --- a/src/python/pants/backend/docker/goals/publish_test.py +++ b/src/python/pants/backend/docker/goals/publish_test.py @@ -9,6 +9,7 @@ import pytest +from pants.backend.docker.engine_types import DockerEngines from pants.backend.docker.goals.package_image import ( DockerImageBuildProcess, DockerImageRefs, @@ -27,7 +28,7 @@ from pants.backend.docker.registries import DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import DockerImageTarget -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import DockerBinary, PodmanBinary from pants.core.goals.package import BuiltPackage from pants.core.goals.publish import ( CheckSkipResult, @@ -96,16 +97,17 @@ def run_publish( options: dict | None = None, env_vars: list[str] | None = None, mock_calls: dict | None = None, -) -> tuple[PublishProcesses, DockerBinary]: + binary: DockerBinary | PodmanBinary = DockerBinary("/dummy/docker"), +) -> tuple[PublishProcesses, DockerBinary | PodmanBinary]: opts = options or {} opts.setdefault("registries", {}) opts.setdefault("default_repository", "{directory}/{name}") opts.setdefault("publish_noninteractively", False) + opts.setdefault("engine", DockerEngines()) docker_options = create_subsystem(DockerOptions, **opts) tgt = cast(DockerImageTarget, rule_runner.get_target(address)) fs = PublishDockerImageFieldSet.create(tgt) packages = build(tgt, docker_options) - docker = DockerBinary("/dummy/docker") mock_env_aware = MagicMock(spec=DockerOptions.EnvironmentAware) if env_vars: mock_env_aware.env_vars = env_vars @@ -113,14 +115,17 @@ def run_publish( mock_calls = mock_calls or { "pants.core.util_rules.env_vars.environment_vars_subset": lambda *args: rule_runner.request( EnvironmentVars, args - ) + ), + "pants.backend.docker.util_rules.binaries.get_podman" + if isinstance(binary, PodmanBinary) + else "pants.backend.docker.util_rules.binaries.get_docker": lambda *args: binary, } result = run_rule_with_mocks( push_docker_images, - rule_args=[fs._request(packages), docker, docker_options, mock_env_aware], + rule_args=[fs._request(packages), docker_options, mock_env_aware], mock_calls=mock_calls, ) - return result, docker + return result, binary def assert_publish( @@ -455,7 +460,7 @@ def test_docker_push_env(rule_runner: RuleRunner) -> None: def test_docker_push_on_package(rule_runner: RuleRunner) -> None: """Test push_docker_images when pushes_on_package() returns True.""" - docker = DockerBinary("/dummy/docker") + docker: DockerBinary | PodmanBinary = DockerBinary("/dummy/docker") # Create mock build process that will be returned by get_docker_image_build_process mock_tags = ("push-on-package/push-on-package:latest",) diff --git a/src/python/pants/backend/docker/goals/run_image.py b/src/python/pants/backend/docker/goals/run_image.py index 499eb817e6e..a0c84037580 100644 --- a/src/python/pants/backend/docker/goals/run_image.py +++ b/src/python/pants/backend/docker/goals/run_image.py @@ -14,7 +14,7 @@ DockerImageRunExtraArgsField, DockerImageSourceField, ) -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import DockerBinary from pants.core.goals.package import PackageFieldSet, build_package from pants.core.goals.run import RunFieldSet, RunInSandboxBehavior, RunRequest from pants.core.util_rules.env_vars import environment_vars_subset diff --git a/src/python/pants/backend/docker/rules.py b/src/python/pants/backend/docker/rules.py index 6f748e0a853..451922a732a 100644 --- a/src/python/pants/backend/docker/rules.py +++ b/src/python/pants/backend/docker/rules.py @@ -4,8 +4,8 @@ from pants.backend.docker.goals import package_image, publish, run_image from pants.backend.docker.subsystems import dockerfile_parser from pants.backend.docker.util_rules import ( + binaries, dependencies, - docker_binary, docker_build_args, docker_build_context, docker_build_env, @@ -15,8 +15,8 @@ def rules(): return [ + *binaries.rules(), *dependencies.rules(), - *docker_binary.rules(), *docker_build_args.rules(), *docker_build_context.rules(), *docker_build_env.rules(), diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index 1787b3a0adf..b697ae4ecbd 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -3,14 +3,22 @@ from __future__ import annotations +import logging import sys -from typing import Any +from typing import Any, cast +from pants.backend.docker.engine_types import ( + DockerBuildEngine, + DockerEngines, + DockerPushEngine, + DockerRunEngine, +) from pants.backend.docker.package_types import DockerPushOnPackageBehavior from pants.backend.docker.registries import DockerRegistries from pants.core.util_rules.search_paths import ExecutableSearchPathsOptionMixin from pants.option.option_types import ( BoolOption, + DataclassOption, DictOption, EnumOption, ShellStrListOption, @@ -29,6 +37,8 @@ ), } +logger = logging.getLogger(__name__) + class DockerOptions(Subsystem): options_scope = "docker" @@ -146,15 +156,78 @@ def env_vars(self) -> tuple[str, ...]: """ ), ) + engine = DataclassOption( + default=DockerEngines(), + mutually_exclusive_group="engines", + help=softwrap( + """ + The engines to use for Docker builds and runs. + + Valid values for `build` are: + + - `docker`: Use the Docker CLI to build images. (https://docs.docker.com/reference/cli/docker/buildx/build/) + - `buildkit`: Invoke buildkit directly to build images. (https://github.com/moby/buildkit/blob/master/docs/reference/buildctl.md#build) + - `podman`: Use Podman to build images. (https://docs.podman.io/en/latest/markdown/podman-build.1.html) + + Valid values for `run` are: + + - `docker`: Use the Docker CLI to run containers. (https://docs.docker.com/reference/cli/docker/run/) + - `podman`: Use Podman to run containers. (https://docs.podman.io/en/latest/markdown/podman-run.1.html) + """ + ), + ) use_buildx = BoolOption( - default=False, + default=True, help=softwrap( """ + DEPRECATED: Use [docker.engine].build = "docker" instead. + + See here for using the legacy builder: https://docs.docker.com/reference/cli/docker/build-legacy/ + Use [buildx](https://github.com/docker/buildx#buildx) (and BuildKit) for builds. """ ), + deprecation_start_version="2.31.0", + mutually_exclusive_group="engines", ) + def _experimental_enable_podman_warning[ + E: DockerBuildEngine | DockerRunEngine | DockerPushEngine + ](self, engine_type: type[E], engine_opt: str) -> E: + experimental_enable_podman = self.options.get("experimental_enable_podman", None) + match experimental_enable_podman: + case None: + return cast(E, getattr(self.engine, engine_opt)) + case True: + engine = cast(E, engine_type.PODMAN) + case False: + engine = cast(E, engine_type.DOCKER) + logger.warning( + f'`[docker].experimental_enable_podman` is deprecated. Use `[docker.engine].{engine_opt} = "{engine.value}"` instead.' + ) + return engine + + @property + def build_engine(self) -> DockerBuildEngine: + use_buildx = self.options.get("use_buildx") + if use_buildx is not None: + warning = '`[docker].use_buildx` is deprecated. Buildx is now the default Docker build engine. Use `[docker.engine].build = "docker"` instead.' + if not use_buildx: + warning += ( + " To use the legacy engine, add `DOCKER_BUILDKIT=0` to `[docker].env_vars`." + ) + logger.warning(warning) + return DockerBuildEngine.DOCKER + return self._experimental_enable_podman_warning(DockerBuildEngine, "build") + + @property + def run_engine(self) -> DockerRunEngine: + return self._experimental_enable_podman_warning(DockerRunEngine, "run") + + @property + def push_engine(self) -> DockerPushEngine: + return self._experimental_enable_podman_warning(DockerPushEngine, "push") + _build_args = ShellStrListOption( help=softwrap( f""" diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index 47a24701821..00045da5a00 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -6,9 +6,9 @@ import os import re from abc import ABC, abstractmethod -from collections.abc import Callable, Iterator +from collections.abc import Callable, Iterator, Mapping, Sequence from dataclasses import dataclass -from typing import ClassVar, cast, final +from typing import TYPE_CHECKING, ClassVar, final from pants.backend.docker.registries import ALL_DEFAULT_REGISTRIES from pants.backend.docker.subsystems.docker_options import DockerOptions @@ -38,9 +38,12 @@ ) from pants.engine.unions import union from pants.util.docutil import bin_name, doc_url -from pants.util.frozendict import FrozenDict +from pants.util.meta import classproperty from pants.util.strutil import help_text, softwrap +if TYPE_CHECKING: + from pants.backend.docker.util_rules.docker_build_context import DockerBuildContext + # Common help text to be applied to each field that supports value interpolation. _interpolation_help = ( "{kind} may use placeholders in curly braces to be interpolated. The placeholders are derived " @@ -155,21 +158,6 @@ class DockerImageTagsField(StringSequenceField): ) -class DockerImageTargetStageField(StringField): - alias = "target_stage" - help = help_text( - """ - Specify target build stage, rather than building the entire `Dockerfile`. - - When using multi-stage build, you may name your stages, and can target them when building - to only selectively build a certain stage. See also the `--docker-build-target-stage` - option. - - Read more about [multi-stage Docker builds](https://docs.docker.com/develop/develop-images/multistage-build/#stop-at-a-specific-build-stage) - """ - ) - - class DockerImageDependenciesField(Dependencies): supports_transitive_excludes = True @@ -238,109 +226,388 @@ class DockerImageSkipPushField(BoolField): OptionValueFormatter = Callable[[str], str] -class DockerBuildOptionFieldMixin(ABC): - """Inherit this mixin class to provide options to `docker build`.""" +class ValidateOptionsMixin(Field, ABC): + def validate_options(self, options: DockerOptions, context: DockerBuildContext) -> bool: + """Hook method telling Pants to ignore this option in certain contexts. + Can also be used to throw errors simply by raising an exception. + """ + return True + + +class DockerBuildOptionsFieldMixin(ValidateOptionsMixin, ABC): docker_build_option: ClassVar[str] + @classmethod @abstractmethod - def option_values( - self, *, value_formatter: OptionValueFormatter, global_build_hosts_options: dict + def compute_docker_build_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + """Subclasses must implement this, to turn `value` into none, one or more option values.""" + + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - """Subclasses must implement this, to turn their `self.value` into none, one or more option - values.""" + return self.compute_docker_build_options( + self.value, docker=docker, value_formatter=value_formatter + ) + +class DockerBuildOptionFieldValueMixin(DockerBuildOptionsFieldMixin, ABC): + """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) + to `docker build`.""" + + @classmethod @final - def options( - self, value_formatter: OptionValueFormatter, global_build_hosts_options + def compute_docker_build_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - for value in self.option_values( - value_formatter=value_formatter, global_build_hosts_options=global_build_hosts_options - ): - yield from (self.docker_build_option, value) + if value is not None: + yield f"{cls.docker_build_option}={value}" -class DockerImageBuildImageLabelsOptionField(DockerBuildOptionFieldMixin, DictStringToStringField): - alias = "image_labels" - help = help_text( - f""" - Provide image metadata. +class DockerBuildOptionFieldMultiValueMixin(StringSequenceField, DockerBuildOptionsFieldMixin, ABC): + """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to `docker + build`.""" - {_interpolation_help.format(kind="Label value")} + @classmethod + @final + def compute_docker_build_options( + cls, + value: Sequence[str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, + ) -> Iterator[str]: + if value: + yield f"{cls.docker_build_option}={','.join(value)}" - See [Docker labels](https://docs.docker.com/config/labels-custom-metadata/#manage-labels-on-objects) - for more information. - """ - ) - docker_build_option = "--label" - def option_values(self, value_formatter: OptionValueFormatter, **kwargs) -> Iterator[str]: - for label, value in (self.value or {}).items(): - yield f"{label}={value_formatter(value)}" +class DockerBuildOptionFieldMultiValueDictMixin( + DictStringToStringField, DockerBuildOptionsFieldMixin, ABC +): + """Inherit this mixin class to provide options in the form of `--flag=key1=value1,key2=value2` + to `docker build`.""" + @classmethod + @final + def compute_docker_build_options( + cls, + value: Mapping[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, + ) -> Iterator[str]: + if value: + yield f"{cls.docker_build_option}=" + ",".join( + f"{key}={value_formatter(v)}" for key, v in value.items() + ) -class DockerImageBuildImageExtraHostsField(DockerBuildOptionFieldMixin, DictStringToStringField): - alias = "extra_build_hosts" - help = help_text( - """ - Extra hosts entries to be added to a container's `/etc/hosts` file. - Use `[docker].build_hosts` to set default host entries for all images. - """ - ) - docker_build_option = "--add-host" +class DockerBuildOptionFieldListOfMultiValueDictMixin( + ListOfDictStringToStringField, DockerBuildOptionsFieldMixin, ABC +): + """Inherit this mixin class to provide multiple key-value options to docker build: - def option_values( - self, value_formatter: OptionValueFormatter, global_build_hosts_options: dict = {} + `--flag=key1=value1,key2=value2 --flag=key3=value3,key4=value4` + """ + + @classmethod + @final + def compute_docker_build_options( + cls, + value: Sequence[Mapping[str, str]] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - if self.value: - merged_values = {**global_build_hosts_options, **self.value} - for label, value in merged_values.items(): - yield f"{label}:{value_formatter(value)}" + if value: + for item in value: + yield f"{cls.docker_build_option}=" + ",".join( + f"{key}={value_formatter(v)}" for key, v in item.items() + ) -class DockerBuildOptionFieldMultiValueDictMixin(DictStringToStringField): - """Inherit this mixin class to provide options in the form of `--flag=key1=value1,key2=value2` - to `docker build`.""" +class DockerBuildOptionFlagFieldMixin(BoolField, DockerBuildOptionsFieldMixin, ABC): + """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is + `True`) to `docker build`.""" - docker_build_option: ClassVar[str] + @classmethod + @final + def compute_docker_build_options( + cls, value: bool, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if value: + yield f"{cls.docker_build_option}" + + +class BuildctlOptionsFieldMixin(ValidateOptionsMixin, ABC): + buildctl_option: ClassVar[str] + + @classmethod + @abstractmethod + def compute_buildctl_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + """Subclasses must implement this, to turn `value` into none, one or more option values.""" + + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + return self.compute_buildctl_options( + self.value, docker=docker, value_formatter=value_formatter + ) + + +class DockerBuildkitPassthroughFieldMixin( + BuildctlOptionsFieldMixin, DockerBuildOptionsFieldMixin, ABC +): + @classproperty + def docker_build_option(cls) -> str: + return cls.buildctl_option + + @classmethod + def compute_docker_build_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + return cls.compute_buildctl_options(value, docker=docker, value_formatter=value_formatter) + +class BuildctlOptionFieldMultiValueDictMixin( + DictStringToStringField, BuildctlOptionsFieldMixin, ABC +): + """Inherit this mixin class to provide options in the form of `--flag=key1=value1,key2=value2` + to `buildctl build`.""" + + @classmethod @final - def options(self, value_formatter: OptionValueFormatter, **kwargs) -> Iterator[str]: - if self.value: - yield f"{self.docker_build_option}=" + ",".join( - f"{key}={value_formatter(value)}" for key, value in self.value.items() + def compute_buildctl_options( + cls, + value: Mapping[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, + ) -> Iterator[str]: + if value: + yield f"{cls.buildctl_option}=" + ",".join( + f"{key}={value_formatter(v)}" for key, v in value.items() ) -class DockerBuildOptionFieldListOfMultiValueDictMixin(ListOfDictStringToStringField): - """Inherit this mixin class to provide multiple key-value options to docker build: +class BuildctlOptionFieldListOfMultiValueDictMixin( + ListOfDictStringToStringField, BuildctlOptionsFieldMixin, ABC +): + """Inherit this mixin class to provide multiple key-value options to buildctl build: `--flag=key1=value1,key2=value2 --flag=key3=value3,key4=value4` """ - docker_build_option: ClassVar[str] - + @classmethod @final - def options(self, value_formatter: OptionValueFormatter, **kwargs) -> Iterator[str]: - if self.value: - for item in self.value: - yield f"{self.docker_build_option}=" + ",".join( - f"{key}={value_formatter(value)}" for key, value in item.items() + def compute_buildctl_options( + cls, + value: Sequence[Mapping[str, str]] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, + ) -> Iterator[str]: + if value: + for item in value: + yield f"{cls.buildctl_option}=" + ",".join( + f"{key}={value_formatter(v)}" for key, v in item.items() ) -class DockerBuildKitOptionField: - """Mixin to indicate a BuildKit-specific option.""" +class BuildctlOptionFieldValueMixin(BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) + to `buildctl build`.""" - @abstractmethod - def options(self, value_formatter: OptionValueFormatter) -> Iterator[str]: ... + @classmethod + @final + def compute_buildctl_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if value is not None: + yield f"{cls.buildctl_option}={value}" + + +class BuildctlLayeredOptionFieldValueMixin(BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide layered options (i.e. option in the form of `--flag + suboption=value`) to `buildctl build`. + + You can override the option-value delimiter (default is `=`) by setting the + `suboption_value_delimiter` class variable. + """ + + suboption: ClassVar[str] + suboption_value_delimiter: ClassVar[str] = "=" + + @classmethod + @final + def compute_buildctl_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if value is not None: + yield cls.buildctl_option + yield f"{cls.suboption}{cls.suboption_value_delimiter}{value}" - required_help = "This option requires BuildKit to be enabled via the Docker subsystem options." + +class BuildctlOptionFieldMultiValueMixin(StringSequenceField, BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to + `buildctl build`.""" + + @classmethod + @final + def compute_buildctl_options( + cls, + value: Sequence[str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, + ) -> Iterator[str]: + if value: + yield f"{cls.buildctl_option}={','.join(value)}" + + +class BuildctlLayeredOptionFieldMultiValueMixin( + StringSequenceField, BuildctlOptionsFieldMixin, ABC +): + """Inherit this mixin class to provide layered options in the form of `--flag + suboption=value1,value2` to `buildctl build`. + + You can override the option-values delimiter (default is `=`) by setting the + `suboption_value_delimiter` class variable. + """ + + suboption: ClassVar[str] + suboption_value_delimiter: ClassVar[str] = "=" + + @classmethod + @final + def compute_buildctl_options( + cls, + value: Sequence[str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, + ) -> Iterator[str]: + if value: + yield cls.buildctl_option + yield f"{cls.suboption}{cls.suboption_value_delimiter}{','.join(value)}" + + +class BuildctlOptionFlagFieldMixin(BoolField, BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is + `True`) to `buildctl build`.""" + + @classmethod + @final + def compute_buildctl_options( + cls, value: bool, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if value: + yield f"{cls.buildctl_option}" + + +class DockerImageTargetStageField( + DockerBuildOptionFieldValueMixin, BuildctlLayeredOptionFieldValueMixin, StringField +): + alias = "target_stage" + help = help_text( + """ + Specify target build stage, rather than building the entire `Dockerfile`. + + When using multi-stage build, you may name your stages, and can target them when building + to only selectively build a certain stage. See also the `--docker-build-target-stage` + option. + + Read more about [multi-stage Docker builds](https://docs.docker.com/develop/develop-images/multistage-build/#stop-at-a-specific-build-stage) + """ + ) + docker_build_option = "--target" + buildctl_option = "--opt" + suboption = "target" + + @staticmethod + def validate_options(options: DockerOptions, context: DockerBuildContext) -> bool: + # Defer to global option if set and matches a stage + return options.build_target_stage not in context.stages + + +class DockerImageBuildImageLabelsOptionField( + DockerBuildOptionsFieldMixin, + BuildctlOptionsFieldMixin, + DictStringToStringField, +): + alias = "image_labels" + help = help_text( + f""" + Provide image metadata. + + {_interpolation_help.format(kind="Label value")} + + See [Docker labels](https://docs.docker.com/config/labels-custom-metadata/#manage-labels-on-objects) + for more information. + """ + ) + docker_build_option = "--label" + buildctl_option = "--opt" + + @classmethod + def compute_docker_build_options( + cls, + value: Mapping[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, + ) -> Iterator[str]: + for label, v in (value or {}).items(): + yield cls.docker_build_option + yield f"{label}={value_formatter(v)}" + + @classmethod + def compute_buildctl_options( + cls, + value: Mapping[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, + ) -> Iterator[str]: + for label, v in (value or {}).items(): + yield cls.buildctl_option + yield f"label:{label}={value_formatter(v)}" + + +class DockerImageBuildImageExtraHostsField(DockerBuildOptionsFieldMixin, DictStringToStringField): + alias = "extra_build_hosts" + help = help_text( + """ + Extra hosts entries to be added to a container's `/etc/hosts` file. + + Use `[docker].build_hosts` to set default host entries for all images. + """ + ) + docker_build_option = "--add-host" + + @classmethod + def compute_docker_build_options( + cls, + value: Mapping[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, + ) -> Iterator[str]: + if value: + merged_values = {**docker.build_hosts, **value} + for label, v in merged_values.items(): + yield cls.docker_build_option + yield f"{label}:{value_formatter(v)}" class DockerImageBuildImageCacheToField( - DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField, DockerBuildKitOptionField + DockerBuildOptionFieldMultiValueDictMixin, + BuildctlOptionFieldMultiValueDictMixin, + DictStringToStringField, ): alias = "cache_to" help = help_text( @@ -351,7 +618,7 @@ class DockerImageBuildImageCacheToField( multiple cache sources - Pants will pass these as multiple `--cache_from` arguments to the Docker CLI. Docker will only use the first cache hit (i.e. the image exists) in the build. - {DockerBuildKitOptionField.required_help} + If you're using the legacy builder, this option is not supported. Example: @@ -371,19 +638,24 @@ class DockerImageBuildImageCacheToField( """ ) docker_build_option = "--cache-to" + buildctl_option = "--export-cache" + + @staticmethod + def validate_options(options: DockerOptions, context: DockerBuildContext) -> bool: + return not options.build_no_cache class DockerImageBuildImageCacheFromField( DockerBuildOptionFieldListOfMultiValueDictMixin, + BuildctlOptionFieldListOfMultiValueDictMixin, ListOfDictStringToStringField, - DockerBuildKitOptionField, ): alias = "cache_from" help = help_text( f""" Use external cache sources when building the image. - {DockerBuildKitOptionField.required_help} + If you're using the legacy builder, this option is not supported. Example: @@ -409,18 +681,20 @@ class DockerImageBuildImageCacheFromField( """ ) docker_build_option = "--cache-from" + buildctl_option = "--import-cache" + @staticmethod + def validate_options(options: DockerOptions, context: DockerBuildContext) -> bool: + return not options.build_no_cache -class DockerImageBuildImageOutputField( - DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField, DockerBuildKitOptionField -): + +class DockerImageBuildImageOutputField(DictStringToStringField): alias = "output" - default = FrozenDict({"type": "docker"}) help = help_text( f""" Sets the export action for the build result. - {DockerBuildKitOptionField.required_help} + If you're using the legacy builder, this option is not supported. When using `pants publish` to publish Docker images to a registry, the output type must be 'docker', as `publish` expects that the built images exist in the local @@ -429,11 +703,12 @@ class DockerImageBuildImageOutputField( {_interpolation_help.format(kind="Values")} """ ) - docker_build_option = "--output" class DockerImageBuildSecretsOptionField( - AsyncFieldMixin, DockerBuildOptionFieldMixin, DictStringToStringField + AsyncFieldMixin, + ValidateOptionsMixin, + DictStringToStringField, ): alias = "secrets" help = help_text( @@ -459,9 +734,12 @@ class DockerImageBuildSecretsOptionField( """ ) + buildctl_option = "--secret" docker_build_option = "--secret" - def option_values(self, **kwargs) -> Iterator[str]: + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: # os.path.join() discards preceding parts if encountering an abs path, e.g. if the secret # `path` is an absolute path, the `buildroot` and `spec_path` will not be considered. Also, # an empty path part is ignored. @@ -471,11 +749,19 @@ def option_values(self, **kwargs) -> Iterator[str]: self.address.spec_path if re.match(r"\.{1,2}/", path) else "", os.path.expanduser(path), ) - + yield self.buildctl_option yield f"id={secret},src={os.path.normpath(full_path)}" + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + return self.buildctl_options(docker=docker, value_formatter=value_formatter) + -class DockerImageBuildSSHOptionField(DockerBuildOptionFieldMixin, StringSequenceField): +class DockerImageBuildSSHOptionField( + DockerBuildkitPassthroughFieldMixin, + StringSequenceField, +): alias = "ssh" default = () help = help_text( @@ -493,34 +779,15 @@ class DockerImageBuildSSHOptionField(DockerBuildOptionFieldMixin, StringSequence """ ) - docker_build_option = "--ssh" - - def option_values(self, **kwargs) -> Iterator[str]: - yield from cast("tuple[str]", self.value) - - -class DockerBuildOptionFieldValueMixin(Field): - """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) - to `docker build`.""" - - docker_build_option: ClassVar[str] - - @final - def options(self, *args, **kwargs) -> Iterator[str]: - if self.value is not None: - yield f"{self.docker_build_option}={self.value}" - - -class DockerBuildOptionFieldMultiValueMixin(StringSequenceField): - """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to `docker - build`.""" - - docker_build_option: ClassVar[str] + buildctl_option = "--ssh" - @final - def options(self, *args, **kwargs) -> Iterator[str]: - if self.value: - yield f"{self.docker_build_option}={','.join(list(self.value))}" + @classmethod + def compute_buildctl_options( + cls, value: Sequence[str], *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + for v in value: + yield cls.buildctl_option + yield v class DockerImageBuildPullOptionField(DockerBuildOptionFieldValueMixin, BoolField): @@ -537,18 +804,6 @@ class DockerImageBuildPullOptionField(DockerBuildOptionFieldValueMixin, BoolFiel docker_build_option = "--pull" -class DockerBuildOptionFlagFieldMixin(BoolField, ABC): - """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is - `True`) to `docker build`.""" - - docker_build_option: ClassVar[str] - - @final - def options(self, *args, **kwargs) -> Iterator[str]: - if self.value: - yield f"{self.docker_build_option}" - - class DockerImageBuildSquashOptionField(DockerBuildOptionFlagFieldMixin): alias = "squash" default = False @@ -576,7 +831,9 @@ class DockerImageBuildNetworkOptionField(DockerBuildOptionFieldValueMixin, Strin class DockerImageBuildPlatformOptionField( - DockerBuildOptionFieldMultiValueMixin, StringSequenceField + DockerBuildOptionFieldMultiValueMixin, + BuildctlLayeredOptionFieldMultiValueMixin, + StringSequenceField, ): alias = "build_platform" default = None @@ -586,6 +843,8 @@ class DockerImageBuildPlatformOptionField( """ ) docker_build_option = "--platform" + buildctl_option = "--opt" + suboption = "platform" class DockerImageRunExtraArgsField(StringSequenceField): diff --git a/src/python/pants/backend/docker/target_types_test.py b/src/python/pants/backend/docker/target_types_test.py index cffc054859f..903b4780ae0 100644 --- a/src/python/pants/backend/docker/target_types_test.py +++ b/src/python/pants/backend/docker/target_types_test.py @@ -2,9 +2,11 @@ # Licensed under the Apache License, Version 2.0 (see LICENSE). import os from pathlib import Path +from unittest.mock import MagicMock import pytest +from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import DockerImageBuildSecretsOptionField from pants.engine.internals.native_engine import Address @@ -36,6 +38,10 @@ def test_secret_path_resolvement(src: str, expected: str): secrets_option_field = DockerImageBuildSecretsOptionField( {"mysecret": src}, address=Address("") ) - values = list(secrets_option_field.option_values()) + values = list( + secrets_option_field.buildctl_options( + docker=MagicMock(spec=DockerOptions), value_formatter=lambda x: x + ) + ) - assert values == [f"id=mysecret,src={expected}"] + assert values == ["--secret", f"id=mysecret,src={expected}"] diff --git a/src/python/pants/backend/docker/util_rules/docker_binary.py b/src/python/pants/backend/docker/util_rules/binaries.py similarity index 55% rename from src/python/pants/backend/docker/util_rules/docker_binary.py rename to src/python/pants/backend/docker/util_rules/binaries.py index 292ff766f9a..d43281057da 100644 --- a/src/python/pants/backend/docker/util_rules/docker_binary.py +++ b/src/python/pants/backend/docker/util_rules/binaries.py @@ -1,13 +1,10 @@ # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import annotations - -import logging import os +from abc import ABC from collections.abc import Mapping, Sequence from dataclasses import dataclass -from typing import cast +from typing import Protocol, TypeVar, cast from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.util_rules.docker_build_args import DockerBuildArgs @@ -21,35 +18,30 @@ find_binary, ) from pants.engine.fs import Digest -from pants.engine.internals.selectors import concurrently from pants.engine.process import Process, ProcessCacheScope -from pants.engine.rules import collect_rules, implicitly, rule +from pants.engine.rules import collect_rules, concurrently, implicitly, rule from pants.util.logging import LogLevel from pants.util.strutil import pluralize -logger = logging.getLogger(__name__) +T = TypeVar("T", bound="BaseBinary") @dataclass(frozen=True) -class DockerBinary(BinaryPath): - """The `docker` binary.""" +class BaseBinary(BinaryPath, ABC): + """Base class for all binary paths.""" extra_env: Mapping[str, str] extra_input_digests: Mapping[str, Digest] | None - is_podman: bool - def __init__( self, path: str, fingerprint: str | None = None, extra_env: Mapping[str, str] | None = None, extra_input_digests: Mapping[str, Digest] | None = None, - is_podman: bool = False, ) -> None: object.__setattr__(self, "extra_env", {} if extra_env is None else extra_env) object.__setattr__(self, "extra_input_digests", extra_input_digests) - object.__setattr__(self, "is_podman", is_podman) super().__init__(path, fingerprint) def _get_process_environment(self, env: Mapping[str, str]) -> Mapping[str, str]: @@ -64,6 +56,8 @@ def _get_process_environment(self, env: Mapping[str, str]) -> Mapping[str, str]: ) return res + +class BuildBinaryProtocol(Protocol): def build_image( self, tags: tuple[str, ...], @@ -72,15 +66,37 @@ def build_image( build_args: DockerBuildArgs, context_root: str, env: Mapping[str, str], - use_buildx: bool, + output: dict[str, str] | None, extra_args: tuple[str, ...] = (), + is_publish: bool = False, + ) -> Process: ... + + +class PushBinaryProtocol(Protocol): + def push_image(self, tag: str, env: Mapping[str, str] | None = None) -> Process: ... + + +def _comma_sep_dict_args(d: Mapping[str, str]) -> str: + return ",".join(f"{k}={v}" for k, v in d.items()) + + +class _DockerPodmanMixin(BaseBinary): + def build_image( + self, + tags: tuple[str, ...], + digest: Digest, + dockerfile: str, + build_args: DockerBuildArgs, + context_root: str, + env: Mapping[str, str], + output: Mapping[str, str] | None, + extra_args: tuple[str, ...] = (), + is_publish: bool = False, ) -> Process: - if use_buildx: - build_commands = ["buildx", "build"] - else: - build_commands = ["build"] + args = [self.path, "build", *extra_args] - args = [self.path, *build_commands, *extra_args] + if output: + args.extend(["--output", _comma_sep_dict_args(output)]) for tag in tags: args.extend(["--tag", tag]) @@ -125,7 +141,13 @@ def run_image( env: Mapping[str, str] | None = None, ) -> Process: return Process( - argv=(self.path, "run", *(docker_run_args or []), tag, *(image_args or [])), + argv=( + self.path, + "run", + *(docker_run_args or []), + tag, + *(image_args or []), + ), cache_scope=ProcessCacheScope.PER_SESSION, description=f"Running docker image {tag}", env=self._get_process_environment(env or {}), @@ -133,6 +155,78 @@ def run_image( ) +class DockerBinary(_DockerPodmanMixin): + """The `docker` binary.""" + + +class PodmanBinary(_DockerPodmanMixin): + """The `podman` binary.""" + + +class BuildctlBinary(BaseBinary): + """The `buildctl` binary.""" + + @staticmethod + def _special_output_handling(output: Mapping[str, str] | None) -> bool: + if output: + try: + output_type = output["type"] + except KeyError as ke: + raise ValueError( + "docker_image output type field is required when specifying output" + ) from ke + return output_type == "image" and "name" not in output + return True + + def build_image( + self, + tags: tuple[str, ...], + digest: Digest, + dockerfile: str, + build_args: DockerBuildArgs, + context_root: str, + env: Mapping[str, str], + output: Mapping[str, str] | None, + extra_args: tuple[str, ...] = (), + is_publish: bool = False, + ) -> Process: + args = [ + self.path, + "build", + "--frontend", + "dockerfile.v0", + "--local", + f"context={context_root}", + "--local", + f"dockerfile={os.path.dirname(dockerfile)}", + "--opt", + f"filename={os.path.basename(dockerfile)}", + *extra_args, + ] + + for build_arg in build_args: + args.extend(["--opt", f"build-arg:{build_arg}"]) + + if self._special_output_handling(output): + publish_suffix = ",push=true" if is_publish else "" + for tag in tags: + args.extend(["--output", f"type=image,name={tag}{publish_suffix}"]) + else: + args.extend(["--output", _comma_sep_dict_args(cast(Mapping[str, str], output))]) + + return Process( + argv=tuple(args), + description=( + f"Building docker image {tags[0]}" + + (f" +{pluralize(len(tags) - 1, 'additional tag')}." if len(tags) > 1 else "") + ), + env=self._get_process_environment(env), + input_digest=digest, + immutable_input_digests=self.extra_input_digests, + cache_scope=ProcessCacheScope.PER_SESSION, + ) + + async def _get_docker_tools_shims( *, tools: Sequence[str], @@ -189,58 +283,59 @@ async def _get_docker_tools_shims( return tools_shims -@rule(desc="Finding the `docker` binary and related tooling", level=LogLevel.DEBUG) -async def get_docker( - docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware -) -> DockerBinary: +async def get_binary( + binary_name: str, + binary_cls: type[T], + docker_options: DockerOptions, + docker_options_env_aware: DockerOptions.EnvironmentAware, +) -> T: search_path = docker_options_env_aware.executable_search_path - first_path: BinaryPath | None = None - is_podman = False - - if getattr(docker_options.options, "experimental_enable_podman", False): - # Enable podman support with `pants.backend.experimental.docker.podman` - request = BinaryPathRequest( - binary_name="podman", - search_path=search_path, - test=BinaryPathTest(args=["-v"]), - ) - paths = await find_binary(request, **implicitly()) - first_path = paths.first_path - if first_path: - is_podman = True - logger.warning("podman found. Podman support is experimental.") - - if not first_path: - request = BinaryPathRequest( - binary_name="docker", - search_path=search_path, - test=BinaryPathTest(args=["-v"]), - ) - paths = await find_binary(request, **implicitly()) - first_path = paths.first_path_or_raise(request, rationale="interact with the docker daemon") + request = BinaryPathRequest( + binary_name=binary_name, + search_path=search_path, + test=BinaryPathTest(args=["-v"]), + ) + paths = await find_binary(request, **implicitly()) + first_path = paths.first_path_or_raise(request, rationale="interact with the docker daemon") if not docker_options.tools and not docker_options.optional_tools: - return DockerBinary(first_path.path, first_path.fingerprint, is_podman=is_podman) + return binary_cls(first_path.path, first_path.fingerprint) tools_shims = await _get_docker_tools_shims( tools=docker_options.tools, optional_tools=docker_options.optional_tools, search_path=search_path, - rationale="use docker", + rationale=f"use {binary_name}", ) - - extra_env = {"PATH": tools_shims.path_component} - extra_input_digests = tools_shims.immutable_input_digests - - return DockerBinary( + return binary_cls( first_path.path, first_path.fingerprint, - extra_env=extra_env, - extra_input_digests=extra_input_digests, - is_podman=is_podman, + extra_env={"PATH": tools_shims.path_component}, + extra_input_digests=tools_shims.immutable_input_digests, ) +@rule(desc="Finding the `docker` binary and related tooling", level=LogLevel.DEBUG) +async def get_docker( + docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware +) -> DockerBinary: + return await get_binary("docker", DockerBinary, docker_options, docker_options_env_aware) + + +@rule(desc="Finding the `podman` binary and related tooling", level=LogLevel.DEBUG) +async def get_podman( + docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware +) -> PodmanBinary: + return await get_binary("podman", PodmanBinary, docker_options, docker_options_env_aware) + + +@rule(desc="Finding the `buildctl` binary and related tooling", level=LogLevel.DEBUG) +async def get_buildctl( + docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware +) -> BuildctlBinary: + return await get_binary("buildctl", BuildctlBinary, docker_options, docker_options_env_aware) + + def rules(): return collect_rules() diff --git a/src/python/pants/backend/docker/util_rules/docker_binary_test.py b/src/python/pants/backend/docker/util_rules/docker_binary_test.py index c32ee58eefe..06685b96b32 100644 --- a/src/python/pants/backend/docker/util_rules/docker_binary_test.py +++ b/src/python/pants/backend/docker/util_rules/docker_binary_test.py @@ -7,7 +7,7 @@ import pytest from pants.backend.docker.subsystems.docker_options import DockerOptions -from pants.backend.docker.util_rules.docker_binary import DockerBinary, get_docker, rules +from pants.backend.docker.util_rules.binaries import DockerBinary, get_docker, rules from pants.backend.docker.util_rules.docker_build_args import DockerBuildArgs from pants.backend.experimental.docker.podman.register import rules as podman_rules from pants.core.util_rules.system_binaries import ( @@ -61,7 +61,6 @@ def test_docker_binary_build_image(docker_path: str, docker: DockerBinary) -> No build_args=DockerBuildArgs.from_strings("arg1=2"), context_root="build/context", env=env, - use_buildx=False, extra_args=("--pull", "--squash"), ) @@ -114,26 +113,18 @@ def test_docker_binary_run_image(docker_path: str, docker: DockerBinary) -> None assert run_request.description == f"Running docker image {image_ref}" -@pytest.mark.parametrize("podman_enabled", [True, False]) -@pytest.mark.parametrize("podman_found", [True, False]) -def test_get_docker(rule_runner: RuleRunner, podman_enabled: bool, podman_found: bool) -> None: +def test_get_docker(rule_runner: RuleRunner) -> None: docker_options = create_subsystem( DockerOptions, - experimental_enable_podman=podman_enabled, tools=[], optional_tools=[], ) docker_options_env_aware = mock.MagicMock(spec=DockerOptions.EnvironmentAware) def mock_find_binary(request: BinaryPathRequest) -> BinaryPaths: - if request.binary_name == "podman" and podman_found: - return BinaryPaths("podman", paths=[BinaryPath("/bin/podman")]) - - elif request.binary_name == "docker": + if request.binary_name == "docker": return BinaryPaths("docker", [BinaryPath("/bin/docker")]) - - else: - return BinaryPaths(request.binary_name, ()) + return BinaryPaths(request.binary_name, ()) def mock_create_binary_shims(request: BinaryShimsRequest) -> BinaryShims: return BinaryShims(EMPTY_DIGEST, "cache_name") @@ -147,12 +138,8 @@ def mock_create_binary_shims(request: BinaryShimsRequest) -> BinaryShims: }, ) - if podman_enabled and podman_found: - assert result.path == "/bin/podman" - assert result.is_podman - else: - assert result.path == "/bin/docker" - assert not result.is_podman + assert isinstance(result, DockerBinary) + assert result.path == "/bin/docker" def test_get_docker_with_tools(rule_runner: RuleRunner) -> None: @@ -170,7 +157,6 @@ def mock_create_binary_shims(request: BinaryShimsRequest) -> BinaryShims: def run(tools: list[str], optional_tools: list[str]) -> None: docker_options = create_subsystem( DockerOptions, - experimental_enable_podman=False, tools=tools, optional_tools=optional_tools, ) diff --git a/src/python/pants/backend/docker/util_rules/docker_build_context_test.py b/src/python/pants/backend/docker/util_rules/docker_build_context_test.py index c6ec2cf58d9..dd4fd074345 100644 --- a/src/python/pants/backend/docker/util_rules/docker_build_context_test.py +++ b/src/python/pants/backend/docker/util_rules/docker_build_context_test.py @@ -17,8 +17,8 @@ from pants.backend.docker.subsystems.dockerfile_parser import DockerfileInfo from pants.backend.docker.target_types import DockerImageTarget from pants.backend.docker.util_rules import ( + binaries, dependencies, - docker_binary, docker_build_args, docker_build_context, docker_build_env, @@ -56,7 +56,7 @@ def create_rule_runner() -> RuleRunner: rules=[ *core_target_types_rules(), *dependencies.rules(), - *docker_binary.rules(), + *binaries.rules(), *docker_build_args.rules(), *docker_build_context.rules(), *docker_build_env.rules(), diff --git a/src/python/pants/backend/experimental/docker/podman/register.py b/src/python/pants/backend/experimental/docker/podman/register.py index caf6aff4f1a..8ece60cfa81 100644 --- a/src/python/pants/backend/experimental/docker/podman/register.py +++ b/src/python/pants/backend/experimental/docker/podman/register.py @@ -8,11 +8,15 @@ class ExperimentalPodmanOptions: experimental_enable_podman = BoolOption( default=True, + mutually_exclusive_group="engines", help=softwrap( """ + DEPRECATED: Use [docker].run_engine = "podman" instead. + Allow support for podman when available. """ ), + deprecation_start_version="2.31.0", ) diff --git a/src/python/pants/backend/helm/util_rules/post_renderer.py b/src/python/pants/backend/helm/util_rules/post_renderer.py index 452b67e1747..73427249eba 100644 --- a/src/python/pants/backend/helm/util_rules/post_renderer.py +++ b/src/python/pants/backend/helm/util_rules/post_renderer.py @@ -17,7 +17,7 @@ get_docker_image_tags, ) from pants.backend.docker.util_rules import ( - docker_binary, + binaries, docker_build_args, docker_build_context, docker_build_env, @@ -166,7 +166,7 @@ def find_replacement(value: tuple[str, Address]) -> str | None: def rules(): return [ *collect_rules(), - *docker_binary.rules(), + *binaries.rules(), *docker_build_args.rules(), *docker_build_context.rules(), *docker_build_env.rules(), diff --git a/src/python/pants/ng/external_binary.py b/src/python/pants/ng/external_binary.py index 8cf9f8ef29c..9fb0b4b671f 100644 --- a/src/python/pants/ng/external_binary.py +++ b/src/python/pants/ng/external_binary.py @@ -14,7 +14,8 @@ class ExternalBinary(ContextualSubsystem): @option(help="Version of the binary to use", default=lambda cls: cls.version_default) - def version(self) -> str: ... + def version(self) -> str: + """Provided by the @option decorator.""" exe_help = softwrap( """ @@ -28,7 +29,8 @@ def exe_default(cls) -> str: raise NotImplementedError("Subclasses must implement to provide the default executable") @option(help=exe_help, default=lambda cls: cls.exe_default) - def exe(self) -> str: ... + def exe(self) -> str: + """Provided by the @option decorator.""" known_versions_help = softwrap( """ @@ -58,7 +60,8 @@ def exe(self) -> str: ... ) @option(help=known_versions_help, default=lambda cls: cls.known_versions_default) - def known_versions(self) -> str: ... + def known_versions(self) -> str: + """Provided by the @option decorator.""" def get_download_request(self) -> ExternalToolRequest: known_versions = self.known_versions diff --git a/src/python/pants/option/option_types.py b/src/python/pants/option/option_types.py index 8326b76928b..fe02c655dd4 100644 --- a/src/python/pants/option/option_types.py +++ b/src/python/pants/option/option_types.py @@ -781,6 +781,136 @@ def _convert_(self, val: Any) -> dict[str, _ValueT]: return cast("dict[str, _ValueT]", val) +# ----------------------------------------------------------------------------------------------- +# Generic Dataclass Concrete Option Classes +# ----------------------------------------------------------------------------------------------- + + +class DataclassOption(_OptionBase[_OptT, _DefaultT]): + """A dataclass option. + + - If you provide a static non-None `default` parameter, the `dataclass_type` parameter will be + inferred from the type of the default. + - If you provide a dynamic `default` or `default` is `None`, you must also provide `dataclass_type`. + """ + + @overload + def __new__( + cls, + flag_name: str | None = None, + *, + default: _OptT, + help: _HelpT, + register_if: _RegisterIfFuncT | None = None, + advanced: bool | None = None, + default_help_repr: str | None = None, + fromfile: bool | None = None, + metavar: str | None = None, + mutually_exclusive_group: str | None = None, + removal_version: str | None = None, + removal_hint: _HelpT | None = None, + deprecation_start_version: str | None = None, + daemon: bool | None = None, + fingerprint: bool | None = None, + ) -> DataclassOption[_OptT, _OptT]: ... + + @overload + def __new__( + cls, + flag_name: str | None = None, + *, + dataclass_type: type[_OptT], + default: _DynamicDefaultT, + help: _HelpT, + register_if: _RegisterIfFuncT | None = None, + advanced: bool | None = None, + default_help_repr: str | None = None, + fromfile: bool | None = None, + metavar: str | None = None, + mutually_exclusive_group: str | None = None, + removal_version: str | None = None, + removal_hint: _HelpT | None = None, + deprecation_start_version: str | None = None, + daemon: bool | None = None, + fingerprint: bool | None = None, + ) -> DataclassOption[_OptT, _OptT]: ... + + @overload + def __new__( + cls, + flag_name: str | None = None, + *, + dataclass_type: type[_OptT], + default: None, + help: _HelpT, + register_if: _RegisterIfFuncT | None = None, + advanced: bool | None = None, + default_help_repr: str | None = None, + fromfile: bool | None = None, + metavar: str | None = None, + mutually_exclusive_group: str | None = None, + removal_version: str | None = None, + removal_hint: _HelpT | None = None, + deprecation_start_version: str | None = None, + daemon: bool | None = None, + fingerprint: bool | None = None, + ) -> DataclassOption[_OptT, None]: ... + + def __new__( + cls, + flag_name: str | None = None, + *, + dataclass_type: type[_OptT] | None = None, + default: _MaybeDynamicT[_OptT] | None, + help: _HelpT, + register_if: _RegisterIfFuncT | None = None, + advanced: bool | None = None, + default_help_repr: str | None = None, + fromfile: bool | None = None, + metavar: str | None = None, + mutually_exclusive_group: str | None = None, + removal_version: str | None = None, + removal_hint: _HelpT | None = None, + deprecation_start_version: str | None = None, + daemon: bool | None = None, + fingerprint: bool | None = None, + ): + instance = super().__new__( + cls, + flag_name, + default=default, # type: ignore[arg-type] + help=help, + register_if=register_if, + advanced=advanced, + default_help_repr=default_help_repr, + fromfile=fromfile, + metavar=metavar, + mutually_exclusive_group=mutually_exclusive_group, + removal_version=removal_version, + removal_hint=removal_hint, + deprecation_start_version=deprecation_start_version, + daemon=daemon, + fingerprint=fingerprint, + ) + instance._dataclass_type = dataclass_type + return instance + + def get_option_type(self, subsystem_cls): + dataclass_type = self._dataclass_type + default = _eval_maybe_dynamic(self._default, subsystem_cls) + if dataclass_type is None: + if default is None: + raise ValueError( + "`dataclass_type` must be provided to the constructor if `default` isn't provided." + ) + return type(default) + elif default is not None and not isinstance(default, dataclass_type): + raise ValueError( + f"Expected the default value to be of type '{dataclass_type}', got '{type(default)}'" + ) + return dataclass_type + + # ----------------------------------------------------------------------------------------------- # "Specialized" Concrete Option Classes # ----------------------------------------------------------------------------------------------- diff --git a/src/python/pants/option/option_types_test.py b/src/python/pants/option/option_types_test.py index 10b3ad45106..f62f6a304f9 100644 --- a/src/python/pants/option/option_types_test.py +++ b/src/python/pants/option/option_types_test.py @@ -4,6 +4,7 @@ from __future__ import annotations import unittest.mock +from dataclasses import dataclass from enum import Enum from types import SimpleNamespace from typing import TYPE_CHECKING, Any @@ -15,6 +16,7 @@ ArgsListOption, BoolListOption, BoolOption, + DataclassOption, DictOption, DirListOption, DirOption, @@ -49,6 +51,12 @@ class MyEnum(Enum): Val2 = "val2" +@dataclass(frozen=True) +class MyDataclass: + key1: str + key2: int + + def opt_info(*names, **options): return OptionInfo( args=names, @@ -129,6 +137,9 @@ def __init__(self): self.options.dyn_enum_list_prop = [MyEnum.Val2] self.options.defaultless_enum_list_prop = [MyEnum.Val2] self.options.dict_prop = {"key1": "val1"} + self.options.dataclass_prop = MyDataclass(key1="val1", key2=11) + self.options.optional_dataclass_prop = MyDataclass(key1="val2", key2=22) + self.options.dyn_dataclass_prop = MyDataclass(key1="val3", key2=33) enum_prop = EnumOption(default=MyEnum.Val1, help="") dyn_enum_prop = EnumOption( @@ -145,11 +156,19 @@ def __init__(self): ) defaultless_enum_list_prop = EnumListOption(enum_type=MyEnum, help="") dict_prop = DictOption[Any](help="") + dataclass_prop = DataclassOption(default=MyDataclass(key1="val4", key2=44), help="") + optional_dataclass_prop = DataclassOption(dataclass_type=MyDataclass, default=None, help="") + dyn_dataclass_prop = DataclassOption( + dataclass_type=MyDataclass, + default=lambda cls: cls.dyn_default_dataclass, + help=lambda cls: f"{cls.dyn_help}", + ) class MySubsystem(MyBaseSubsystem): dyn_help = "Dynamic Help" dyn_default = MyEnum.Val1 dyn_default_list = [MyEnum.Val1] + dyn_default_dataclass = MyDataclass(key1="val5", key2=55) assert list(collect_options_info(MySubsystem)) == [ opt_info("--enum-prop", default=MyEnum.Val1, help="", type=MyEnum), @@ -167,6 +186,16 @@ class MySubsystem(MyBaseSubsystem): "--defaultless-enum-list-prop", default=[], help="", type=list, member_type=MyEnum ), opt_info("--dict-prop", default={}, help="", type=dict), + opt_info( + "--dataclass-prop", default=MyDataclass(key1="val4", key2=44), help="", type=MyDataclass + ), + opt_info("--optional-dataclass-prop", default=None, help="", type=MyDataclass), + opt_info( + "--dyn-dataclass-prop", + default=MyDataclass(key1="val5", key2=55), + help="Dynamic Help", + type=MyDataclass, + ), ] my_subsystem = MySubsystem() @@ -177,6 +206,9 @@ class MySubsystem(MyBaseSubsystem): assert my_subsystem.dyn_enum_list_prop == (MyEnum.Val2,) assert my_subsystem.defaultless_enum_list_prop == (MyEnum.Val2,) assert my_subsystem.dict_prop == {"key1": "val1"} + assert my_subsystem.dataclass_prop == MyDataclass(key1="val1", key2=11) + assert my_subsystem.optional_dataclass_prop == MyDataclass(key1="val2", key2=22) + assert my_subsystem.dyn_dataclass_prop == MyDataclass(key1="val3", key2=33) def test_specialized_options() -> None: