From 27d8d21208e24696406def4d0769658ffd21eec6 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Fri, 16 Jan 2026 13:00:09 -0500 Subject: [PATCH 01/25] initial commit --- .../pants/backend/docker/package_types.py | 7 + src/python/pants/backend/docker/rules.py | 2 + .../docker/subsystems/docker_options.py | 20 +++ .../docker/util_rules/buildctl_binary.py | 138 ++++++++++++++++++ 4 files changed, 167 insertions(+) create mode 100644 src/python/pants/backend/docker/util_rules/buildctl_binary.py diff --git a/src/python/pants/backend/docker/package_types.py b/src/python/pants/backend/docker/package_types.py index bcce33b6654..858c6e88364 100644 --- a/src/python/pants/backend/docker/package_types.py +++ b/src/python/pants/backend/docker/package_types.py @@ -4,11 +4,18 @@ from __future__ import annotations from dataclasses import dataclass +from enum import Enum from pants.core.goals.package import BuiltPackageArtifact from pants.util.strutil import bullet_list, pluralize +class DockerBuildEngine(Enum): + DOCKER = "docker" + LEGACY = "legacy" + BUILDKIT = "buildkit" + + @dataclass(frozen=True) class BuiltDockerImage(BuiltPackageArtifact): # We don't really want a default for this field, but the superclass has a field with diff --git a/src/python/pants/backend/docker/rules.py b/src/python/pants/backend/docker/rules.py index 6f748e0a853..9360b66d980 100644 --- a/src/python/pants/backend/docker/rules.py +++ b/src/python/pants/backend/docker/rules.py @@ -4,6 +4,7 @@ from pants.backend.docker.goals import package_image, publish, run_image from pants.backend.docker.subsystems import dockerfile_parser from pants.backend.docker.util_rules import ( + buildctl_binary, dependencies, docker_binary, docker_build_args, @@ -15,6 +16,7 @@ def rules(): return [ + *buildctl_binary.rules(), *dependencies.rules(), *docker_binary.rules(), *docker_build_args.rules(), diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index 3a2003175e5..41dc9fb5154 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -6,11 +6,13 @@ import sys from typing import Any +from pants.backend.docker.package_types import DockerBuildEngine from pants.backend.docker.registries import DockerRegistries from pants.core.util_rules.search_paths import ExecutableSearchPathsOptionMixin from pants.option.option_types import ( BoolOption, DictOption, + EnumOption, ShellStrListOption, StrListOption, StrOption, @@ -144,13 +146,31 @@ def env_vars(self) -> tuple[str, ...]: """ ), ) + build_engine = EnumOption( + default=DockerBuildEngine.DOCKER, + enum_type=DockerBuildEngine, + help=softwrap( + """ + The engine to use for Docker builds. + + Valid values are: + + - `docker`: Use the Docker CLI with buildx to build images. (https://docs.docker.com/reference/cli/docker/buildx/build/) + - `legacy`: Use the legacy engine to build images. (https://docs.docker.com/reference/cli/docker/build-legacy/) + - `buildkit`: Invoke buildkit directly to build images. (https://github.com/moby/buildkit/blob/master/docs/reference/buildctl.md#build) + """ + ), + ) use_buildx = BoolOption( default=False, help=softwrap( """ + DEPRECATED: Use [docker].build_engine = "docker" instead. + Use [buildx](https://github.com/docker/buildx#buildx) (and BuildKit) for builds. """ ), + deprecation_start_version="2.31.0", ) _build_args = ShellStrListOption( diff --git a/src/python/pants/backend/docker/util_rules/buildctl_binary.py b/src/python/pants/backend/docker/util_rules/buildctl_binary.py new file mode 100644 index 00000000000..cb1028a67bd --- /dev/null +++ b/src/python/pants/backend/docker/util_rules/buildctl_binary.py @@ -0,0 +1,138 @@ +# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). +import os +from collections.abc import Mapping +from dataclasses import dataclass +from pathlib import Path + +from pants.backend.docker.subsystems.docker_options import DockerOptions +from pants.backend.docker.util_rules.docker_build_args import DockerBuildArgs +from pants.core.util_rules.system_binaries import ( + BinaryPath, + BinaryPathRequest, + BinaryPathTest, + find_binary, +) +from pants.engine.fs import Digest +from pants.engine.process import Process, ProcessCacheScope +from pants.engine.rules import collect_rules, implicitly, rule +from pants.util.logging import LogLevel +from pants.util.strutil import pluralize + +from .docker_binary import _get_docker_tools_shims + + +@dataclass(frozen=True) +class BuildctlBinary(BinaryPath): + """The `buildctl` binary.""" + + extra_env: Mapping[str, str] + extra_input_digests: Mapping[str, Digest] | None + + def __init__( + self, + path: str, + fingerprint: str | None = None, + extra_env: Mapping[str, str] | None = None, + extra_input_digests: Mapping[str, Digest] | None = None, + ) -> None: + object.__setattr__(self, "extra_env", {} if extra_env is None else extra_env) + object.__setattr__(self, "extra_input_digests", extra_input_digests) + super().__init__(path, fingerprint) + + def _get_process_environment(self, env: Mapping[str, str]) -> Mapping[str, str]: + if not self.extra_env: + return env + + res = {**self.extra_env, **env} + + # Merge the PATH entries, in case they are present in both `env` and `self.extra_env`. + res["PATH"] = os.pathsep.join( + p for p in (m.get("PATH") for m in (self.extra_env, env)) if p + ) + return res + + def build_image( + self, + tags: tuple[str, ...], + digest: Digest, + dockerfile: Path, + build_args: DockerBuildArgs, + context_root: str, + env: Mapping[str, str], + target_stage: str | None = None, + extra_args: tuple[str, ...] = (), + ) -> Process: + args = [ + self.path, + *extra_args, + "build", + "--frontend", + "dockerfile.v0", + "--local", + f"context={context_root}", + "--local", + f"dockerfile={dockerfile.parent}", + "--opt", + f"filename={dockerfile.name}", + ] + + for build_arg in build_args: + args.extend(["--opt", f"build-arg:{build_arg}"]) + + if target_stage: + args.extend(["--opt", f"--target={target_stage}"]) + + for tag in tags: + args.extend(["--output", f"type=image,name={tag},push=true"]) + + return Process( + argv=tuple(args), + description=( + f"Building docker image {tags[0]}" + + (f" +{pluralize(len(tags) - 1, 'additional tag')}." if len(tags) > 1 else "") + ), + env=self._get_process_environment(env), + input_digest=digest, + immutable_input_digests=self.extra_input_digests, + cache_scope=ProcessCacheScope.PER_SESSION, + ) + + +@rule(desc="Finding the `buildctl` binary and related tooling", level=LogLevel.DEBUG) +async def get_docker( + docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware +) -> BuildctlBinary: + search_path = docker_options_env_aware.executable_search_path + + request = BinaryPathRequest( + binary_name="buildctl", + search_path=search_path, + test=BinaryPathTest(args=["-v"]), + ) + paths = await find_binary(request, **implicitly()) + first_path = paths.first_path_or_raise(request, rationale="interact with the docker daemon") + + if not docker_options.tools and not docker_options.optional_tools: + return BuildctlBinary(first_path.path, first_path.fingerprint) + + tools_shims = await _get_docker_tools_shims( + tools=docker_options.tools, + optional_tools=docker_options.optional_tools, + search_path=search_path, + rationale="use docker", + ) + + extra_env = {"PATH": tools_shims.path_component} + extra_input_digests = tools_shims.immutable_input_digests + + return BuildctlBinary( + first_path.path, + first_path.fingerprint, + extra_env=extra_env, + extra_input_digests=extra_input_digests, + ) + + +def rules(): + return collect_rules() From 7d7bf7f2ee0639a9c254094b1d90446d0c8d24f1 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Fri, 16 Jan 2026 14:46:48 -0500 Subject: [PATCH 02/25] lots of reorganizing --- .../pants/backend/docker/engine_types.py | 15 ++ .../backend/docker/goals/package_image.py | 72 ++++--- .../docker/goals/package_image_test.py | 2 +- .../pants/backend/docker/goals/publish.py | 2 +- .../backend/docker/goals/publish_test.py | 2 +- .../pants/backend/docker/goals/run_image.py | 2 +- .../pants/backend/docker/package_types.py | 7 - src/python/pants/backend/docker/rules.py | 6 +- .../docker/subsystems/docker_options.py | 23 ++- .../{docker_binary.py => binaries.py} | 193 ++++++++++++------ .../docker/util_rules/buildctl_binary.py | 138 ------------- .../docker/util_rules/docker_binary_test.py | 2 +- .../experimental/docker/podman/register.py | 4 + 13 files changed, 226 insertions(+), 242 deletions(-) create mode 100644 src/python/pants/backend/docker/engine_types.py rename src/python/pants/backend/docker/util_rules/{docker_binary.py => binaries.py} (58%) delete mode 100644 src/python/pants/backend/docker/util_rules/buildctl_binary.py diff --git a/src/python/pants/backend/docker/engine_types.py b/src/python/pants/backend/docker/engine_types.py new file mode 100644 index 00000000000..fa1466cf55d --- /dev/null +++ b/src/python/pants/backend/docker/engine_types.py @@ -0,0 +1,15 @@ +# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). +from enum import Enum + + +class DockerBuildEngine(Enum): + DOCKER = "docker" + LEGACY = "legacy" + BUILDKIT = "buildkit" + PODMAN = "podman" + + +class DockerRunEngine(Enum): + DOCKER = "docker" + PODMAN = "podman" diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 902c5f1a506..c66c3ae19d7 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -10,11 +10,14 @@ from dataclasses import asdict, dataclass from functools import partial from itertools import chain +from pathlib import Path from typing import Literal, cast +from pants.backend.docker.engine_types import DockerBuildEngine + # Re-exporting BuiltDockerImage here, as it has its natural home here, but has moved out to resolve # a dependency cycle from docker_build_context. -from pants.backend.docker.package_types import BuiltDockerImage as BuiltDockerImage +from pants.backend.docker.package_types import BuiltDockerImage from pants.backend.docker.registries import DockerRegistries, DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import ( @@ -34,7 +37,12 @@ DockerImageTargetStageField, get_docker_image_tags, ) -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import ( + BuildctlBinary, + DockerBinary, + get_buildctl, + get_docker, +) from pants.backend.docker.util_rules.docker_build_context import ( DockerBuildContext, DockerBuildContextRequest, @@ -388,7 +396,6 @@ async def build_docker_image( field_set: DockerPackageFieldSet, options: DockerOptions, global_options: GlobalOptions, - docker: DockerBinary, keep_sandboxes: KeepSandboxes, union_membership: UnionMembership, ) -> BuiltPackage: @@ -444,26 +451,40 @@ async def build_docker_image( "__UPSTREAM_IMAGE_IDS": ",".join(context.upstream_image_ids), } context_root = field_set.get_context_root(options.default_context_root) - process = docker.build_image( - build_args=context.build_args, - digest=context.digest, - dockerfile=context.dockerfile, - context_root=context_root, - env=env, - tags=tags, - use_buildx=options.use_buildx, - extra_args=tuple( - get_build_options( - context=context, - field_set=field_set, - global_target_stage_option=options.build_target_stage, - global_build_hosts_options=options.build_hosts, - global_build_no_cache_option=options.build_no_cache, - use_buildx_option=options.use_buildx, - target=wrapped_target.target, - ) - ), - ) + if options.build_engine == DockerBuildEngine.BUILDKIT: + buildctl = await get_buildctl(**implicitly()) + process = buildctl.build_image( + build_args=context.build_args, + digest=context.digest, + dockerfile=Path(context.dockerfile), + context_root=context_root, + env=env, + tags=tags, + ) + parse_image_id = partial(parse_image_id_from_buildctl_build_output, buildctl) + else: + docker = await get_docker(**implicitly()) + process = docker.build_image( + build_args=context.build_args, + digest=context.digest, + dockerfile=context.dockerfile, + context_root=context_root, + env=env, + tags=tags, + use_buildx=options.use_buildx, + extra_args=tuple( + get_build_options( + context=context, + field_set=field_set, + global_target_stage_option=options.build_target_stage, + global_build_hosts_options=options.build_hosts, + global_build_no_cache_option=options.build_no_cache, + use_buildx_option=options.use_buildx, + target=wrapped_target.target, + ) + ), + ) + parse_image_id = partial(parse_image_id_from_docker_build_output, docker) result = await execute_process(process, **implicitly()) if result.exit_code != 0: @@ -487,7 +508,7 @@ async def build_docker_image( keep_sandboxes=keep_sandboxes, ) - image_id = parse_image_id_from_docker_build_output(docker, result.stdout, result.stderr) + image_id = parse_image_id(result.stdout, result.stderr) docker_build_output_msg = "\n".join( ( f"Docker build output for {tags[0]}:", @@ -567,6 +588,9 @@ def parse_image_id_from_docker_build_output(docker: DockerBinary, *outputs: byte return "" +def parse_image_id_from_buildctl_build_output(buildctl: BuildctlBinary, *outputs: bytes) -> str: ... + + def format_docker_build_context_help_message( context_root: str, context: DockerBuildContext, colors: bool ) -> str | None: diff --git a/src/python/pants/backend/docker/goals/package_image_test.py b/src/python/pants/backend/docker/goals/package_image_test.py index 7e58a07605a..6fc0aa932c0 100644 --- a/src/python/pants/backend/docker/goals/package_image_test.py +++ b/src/python/pants/backend/docker/goals/package_image_test.py @@ -34,7 +34,7 @@ DockerImageTagsRequest, DockerImageTarget, ) -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import DockerBinary from pants.backend.docker.util_rules.docker_build_args import ( DockerBuildArgs, DockerBuildArgsRequest, diff --git a/src/python/pants/backend/docker/goals/publish.py b/src/python/pants/backend/docker/goals/publish.py index a9ec861566c..c013bcd96e0 100644 --- a/src/python/pants/backend/docker/goals/publish.py +++ b/src/python/pants/backend/docker/goals/publish.py @@ -12,7 +12,7 @@ from pants.backend.docker.goals.package_image import BuiltDockerImage from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import DockerImageRegistriesField, DockerImageSkipPushField -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import DockerBinary from pants.core.goals.publish import ( PublishFieldSet, PublishOutputData, diff --git a/src/python/pants/backend/docker/goals/publish_test.py b/src/python/pants/backend/docker/goals/publish_test.py index 212d3175f50..9a5277688b5 100644 --- a/src/python/pants/backend/docker/goals/publish_test.py +++ b/src/python/pants/backend/docker/goals/publish_test.py @@ -17,7 +17,7 @@ from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import DockerImageTarget from pants.backend.docker.util_rules import docker_binary -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import DockerBinary from pants.core.goals.package import BuiltPackage from pants.core.goals.publish import PublishPackages, PublishProcesses from pants.engine.addresses import Address diff --git a/src/python/pants/backend/docker/goals/run_image.py b/src/python/pants/backend/docker/goals/run_image.py index b4217d67136..dde4e89c17e 100644 --- a/src/python/pants/backend/docker/goals/run_image.py +++ b/src/python/pants/backend/docker/goals/run_image.py @@ -13,7 +13,7 @@ DockerImageRunExtraArgsField, DockerImageSourceField, ) -from pants.backend.docker.util_rules.docker_binary import DockerBinary +from pants.backend.docker.util_rules.binaries import DockerBinary from pants.core.goals.package import PackageFieldSet, build_package from pants.core.goals.run import RunFieldSet, RunInSandboxBehavior, RunRequest from pants.core.util_rules.env_vars import environment_vars_subset diff --git a/src/python/pants/backend/docker/package_types.py b/src/python/pants/backend/docker/package_types.py index 858c6e88364..bcce33b6654 100644 --- a/src/python/pants/backend/docker/package_types.py +++ b/src/python/pants/backend/docker/package_types.py @@ -4,18 +4,11 @@ from __future__ import annotations from dataclasses import dataclass -from enum import Enum from pants.core.goals.package import BuiltPackageArtifact from pants.util.strutil import bullet_list, pluralize -class DockerBuildEngine(Enum): - DOCKER = "docker" - LEGACY = "legacy" - BUILDKIT = "buildkit" - - @dataclass(frozen=True) class BuiltDockerImage(BuiltPackageArtifact): # We don't really want a default for this field, but the superclass has a field with diff --git a/src/python/pants/backend/docker/rules.py b/src/python/pants/backend/docker/rules.py index 9360b66d980..451922a732a 100644 --- a/src/python/pants/backend/docker/rules.py +++ b/src/python/pants/backend/docker/rules.py @@ -4,9 +4,8 @@ from pants.backend.docker.goals import package_image, publish, run_image from pants.backend.docker.subsystems import dockerfile_parser from pants.backend.docker.util_rules import ( - buildctl_binary, + binaries, dependencies, - docker_binary, docker_build_args, docker_build_context, docker_build_env, @@ -16,9 +15,8 @@ def rules(): return [ - *buildctl_binary.rules(), + *binaries.rules(), *dependencies.rules(), - *docker_binary.rules(), *docker_build_args.rules(), *docker_build_context.rules(), *docker_build_env.rules(), diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index 41dc9fb5154..b43f4853d5d 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -6,7 +6,7 @@ import sys from typing import Any -from pants.backend.docker.package_types import DockerBuildEngine +from pants.backend.docker.engine_types import DockerBuildEngine, DockerRunEngine from pants.backend.docker.registries import DockerRegistries from pants.core.util_rules.search_paths import ExecutableSearchPathsOptionMixin from pants.option.option_types import ( @@ -146,9 +146,18 @@ def env_vars(self) -> tuple[str, ...]: """ ), ) + global_options = ShellStrListOption( + default=[], + help=softwrap( + """ + Global options to use for all Docker and BuildKit invocations. + """ + ), + ) build_engine = EnumOption( default=DockerBuildEngine.DOCKER, enum_type=DockerBuildEngine, + mutually_exclusive_group="build_engine", help=softwrap( """ The engine to use for Docker builds. @@ -163,6 +172,7 @@ def env_vars(self) -> tuple[str, ...]: ) use_buildx = BoolOption( default=False, + mutually_exclusive_group="build_engine", help=softwrap( """ DEPRECATED: Use [docker].build_engine = "docker" instead. @@ -172,7 +182,16 @@ def env_vars(self) -> tuple[str, ...]: ), deprecation_start_version="2.31.0", ) - + run_engine = EnumOption( + default=DockerRunEngine.DOCKER, + enum_type=DockerRunEngine, + mutually_exclusive_group="run_engine", + help=softwrap( + """ + The engine to use for Docker runs. + """ + ), + ) _build_args = ShellStrListOption( help=softwrap( f""" diff --git a/src/python/pants/backend/docker/util_rules/docker_binary.py b/src/python/pants/backend/docker/util_rules/binaries.py similarity index 58% rename from src/python/pants/backend/docker/util_rules/docker_binary.py rename to src/python/pants/backend/docker/util_rules/binaries.py index 292ff766f9a..4e9e04cf6e1 100644 --- a/src/python/pants/backend/docker/util_rules/docker_binary.py +++ b/src/python/pants/backend/docker/util_rules/binaries.py @@ -1,13 +1,10 @@ -# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). +# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import annotations - -import logging import os +from abc import ABC, abstractmethod from collections.abc import Mapping, Sequence from dataclasses import dataclass -from typing import cast +from typing import TypeVar, cast from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.util_rules.docker_build_args import DockerBuildArgs @@ -21,35 +18,33 @@ find_binary, ) from pants.engine.fs import Digest -from pants.engine.internals.selectors import concurrently from pants.engine.process import Process, ProcessCacheScope -from pants.engine.rules import collect_rules, implicitly, rule +from pants.engine.rules import collect_rules, concurrently, implicitly, rule from pants.util.logging import LogLevel from pants.util.strutil import pluralize -logger = logging.getLogger(__name__) +T = TypeVar("T", bound="BaseBinary") @dataclass(frozen=True) -class DockerBinary(BinaryPath): - """The `docker` binary.""" +class BaseBinary(BinaryPath, ABC): + """Base class for all binary paths.""" + global_options: tuple[str, ...] extra_env: Mapping[str, str] extra_input_digests: Mapping[str, Digest] | None - is_podman: bool - def __init__( self, path: str, fingerprint: str | None = None, + global_options: tuple[str, ...] = (), extra_env: Mapping[str, str] | None = None, extra_input_digests: Mapping[str, Digest] | None = None, - is_podman: bool = False, ) -> None: + object.__setattr__(self, "global_options", global_options) object.__setattr__(self, "extra_env", {} if extra_env is None else extra_env) object.__setattr__(self, "extra_input_digests", extra_input_digests) - object.__setattr__(self, "is_podman", is_podman) super().__init__(path, fingerprint) def _get_process_environment(self, env: Mapping[str, str]) -> Mapping[str, str]: @@ -64,6 +59,7 @@ def _get_process_environment(self, env: Mapping[str, str]) -> Mapping[str, str]: ) return res + @abstractmethod def build_image( self, tags: tuple[str, ...], @@ -72,15 +68,22 @@ def build_image( build_args: DockerBuildArgs, context_root: str, env: Mapping[str, str], - use_buildx: bool, extra_args: tuple[str, ...] = (), - ) -> Process: - if use_buildx: - build_commands = ["buildx", "build"] - else: - build_commands = ["build"] + ) -> Process: ... - args = [self.path, *build_commands, *extra_args] + +class _DockerPodmanMixin(BaseBinary): + def build_image( + self, + tags: tuple[str, ...], + digest: Digest, + dockerfile: str, + build_args: DockerBuildArgs, + context_root: str, + env: Mapping[str, str], + extra_args: tuple[str, ...] = (), + ) -> Process: + args = [self.path, *self.global_options, "build", *extra_args] for tag in tags: args.extend(["--tag", tag]) @@ -109,7 +112,7 @@ def build_image( def push_image(self, tag: str, env: Mapping[str, str] | None = None) -> Process: return Process( - argv=(self.path, "push", tag), + argv=(self.path, *self.global_options, "push", tag), cache_scope=ProcessCacheScope.PER_SESSION, description=f"Pushing docker image {tag}", env=self._get_process_environment(env or {}), @@ -125,7 +128,14 @@ def run_image( env: Mapping[str, str] | None = None, ) -> Process: return Process( - argv=(self.path, "run", *(docker_run_args or []), tag, *(image_args or [])), + argv=( + self.path, + *self.global_options, + "run", + *(docker_run_args or []), + tag, + *(image_args or []), + ), cache_scope=ProcessCacheScope.PER_SESSION, description=f"Running docker image {tag}", env=self._get_process_environment(env or {}), @@ -133,6 +143,61 @@ def run_image( ) +class DockerBinary(_DockerPodmanMixin): + """The `docker` binary.""" + + +class PodmanBinary(_DockerPodmanMixin): + """The `podman` binary.""" + + +class BuildctlBinary(BaseBinary): + """The `buildctl` binary.""" + + def build_image( + self, + tags: tuple[str, ...], + digest: Digest, + dockerfile: str, + build_args: DockerBuildArgs, + context_root: str, + env: Mapping[str, str], + extra_args: tuple[str, ...] = (), + ) -> Process: + args = [ + self.path, + *self.global_options, + "build", + "--frontend", + "dockerfile.v0", + "--local", + f"context={context_root}", + "--local", + f"dockerfile={os.path.dirname(dockerfile)}", + "--opt", + f"filename={os.path.basename(dockerfile)}", + *extra_args, + ] + + for build_arg in build_args: + args.extend(["--opt", f"build-arg:{build_arg}"]) + + for tag in tags: + args.extend(["--output", f"type=image,name={tag},push=true"]) + + return Process( + argv=tuple(args), + description=( + f"Building docker image {tags[0]}" + + (f" +{pluralize(len(tags) - 1, 'additional tag')}." if len(tags) > 1 else "") + ), + env=self._get_process_environment(env), + input_digest=digest, + immutable_input_digests=self.extra_input_digests, + cache_scope=ProcessCacheScope.PER_SESSION, + ) + + async def _get_docker_tools_shims( *, tools: Sequence[str], @@ -189,58 +254,62 @@ async def _get_docker_tools_shims( return tools_shims -@rule(desc="Finding the `docker` binary and related tooling", level=LogLevel.DEBUG) -async def get_docker( - docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware -) -> DockerBinary: +async def get_binary( + binary_name: str, + binary_cls: type[T], + docker_options: DockerOptions, + docker_options_env_aware: DockerOptions.EnvironmentAware, +) -> T: search_path = docker_options_env_aware.executable_search_path - first_path: BinaryPath | None = None - is_podman = False - - if getattr(docker_options.options, "experimental_enable_podman", False): - # Enable podman support with `pants.backend.experimental.docker.podman` - request = BinaryPathRequest( - binary_name="podman", - search_path=search_path, - test=BinaryPathTest(args=["-v"]), - ) - paths = await find_binary(request, **implicitly()) - first_path = paths.first_path - if first_path: - is_podman = True - logger.warning("podman found. Podman support is experimental.") - - if not first_path: - request = BinaryPathRequest( - binary_name="docker", - search_path=search_path, - test=BinaryPathTest(args=["-v"]), - ) - paths = await find_binary(request, **implicitly()) - first_path = paths.first_path_or_raise(request, rationale="interact with the docker daemon") + request = BinaryPathRequest( + binary_name=binary_name, + search_path=search_path, + test=BinaryPathTest(args=["-v"]), + ) + paths = await find_binary(request, **implicitly()) + first_path = paths.first_path_or_raise(request, rationale="interact with the docker daemon") if not docker_options.tools and not docker_options.optional_tools: - return DockerBinary(first_path.path, first_path.fingerprint, is_podman=is_podman) + return binary_cls( + first_path.path, first_path.fingerprint, global_options=docker_options.global_options + ) tools_shims = await _get_docker_tools_shims( tools=docker_options.tools, optional_tools=docker_options.optional_tools, search_path=search_path, - rationale="use docker", + rationale=f"use {binary_name}", ) - - extra_env = {"PATH": tools_shims.path_component} - extra_input_digests = tools_shims.immutable_input_digests - - return DockerBinary( + return binary_cls( first_path.path, first_path.fingerprint, - extra_env=extra_env, - extra_input_digests=extra_input_digests, - is_podman=is_podman, + global_options=docker_options.global_options, + extra_env={"PATH": tools_shims.path_component}, + extra_input_digests=tools_shims.immutable_input_digests, ) +@rule(desc="Finding the `docker` binary and related tooling", level=LogLevel.DEBUG) +async def get_docker( + docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware +) -> DockerBinary: + return await get_binary("docker", DockerBinary, docker_options, docker_options_env_aware) + + +@rule(desc="Finding the `podman` binary and related tooling", level=LogLevel.DEBUG) +async def get_podman( + docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware +) -> PodmanBinary: + return await get_binary("podman", PodmanBinary, docker_options, docker_options_env_aware) + + +@rule(desc="Finding the `buildctl` binary and related tooling", level=LogLevel.DEBUG) +async def get_buildctl( + docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware +) -> BuildctlBinary: + return await get_binary("buildctl", BuildctlBinary, docker_options, docker_options_env_aware) + + def rules(): return collect_rules() diff --git a/src/python/pants/backend/docker/util_rules/buildctl_binary.py b/src/python/pants/backend/docker/util_rules/buildctl_binary.py deleted file mode 100644 index cb1028a67bd..00000000000 --- a/src/python/pants/backend/docker/util_rules/buildctl_binary.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). -import os -from collections.abc import Mapping -from dataclasses import dataclass -from pathlib import Path - -from pants.backend.docker.subsystems.docker_options import DockerOptions -from pants.backend.docker.util_rules.docker_build_args import DockerBuildArgs -from pants.core.util_rules.system_binaries import ( - BinaryPath, - BinaryPathRequest, - BinaryPathTest, - find_binary, -) -from pants.engine.fs import Digest -from pants.engine.process import Process, ProcessCacheScope -from pants.engine.rules import collect_rules, implicitly, rule -from pants.util.logging import LogLevel -from pants.util.strutil import pluralize - -from .docker_binary import _get_docker_tools_shims - - -@dataclass(frozen=True) -class BuildctlBinary(BinaryPath): - """The `buildctl` binary.""" - - extra_env: Mapping[str, str] - extra_input_digests: Mapping[str, Digest] | None - - def __init__( - self, - path: str, - fingerprint: str | None = None, - extra_env: Mapping[str, str] | None = None, - extra_input_digests: Mapping[str, Digest] | None = None, - ) -> None: - object.__setattr__(self, "extra_env", {} if extra_env is None else extra_env) - object.__setattr__(self, "extra_input_digests", extra_input_digests) - super().__init__(path, fingerprint) - - def _get_process_environment(self, env: Mapping[str, str]) -> Mapping[str, str]: - if not self.extra_env: - return env - - res = {**self.extra_env, **env} - - # Merge the PATH entries, in case they are present in both `env` and `self.extra_env`. - res["PATH"] = os.pathsep.join( - p for p in (m.get("PATH") for m in (self.extra_env, env)) if p - ) - return res - - def build_image( - self, - tags: tuple[str, ...], - digest: Digest, - dockerfile: Path, - build_args: DockerBuildArgs, - context_root: str, - env: Mapping[str, str], - target_stage: str | None = None, - extra_args: tuple[str, ...] = (), - ) -> Process: - args = [ - self.path, - *extra_args, - "build", - "--frontend", - "dockerfile.v0", - "--local", - f"context={context_root}", - "--local", - f"dockerfile={dockerfile.parent}", - "--opt", - f"filename={dockerfile.name}", - ] - - for build_arg in build_args: - args.extend(["--opt", f"build-arg:{build_arg}"]) - - if target_stage: - args.extend(["--opt", f"--target={target_stage}"]) - - for tag in tags: - args.extend(["--output", f"type=image,name={tag},push=true"]) - - return Process( - argv=tuple(args), - description=( - f"Building docker image {tags[0]}" - + (f" +{pluralize(len(tags) - 1, 'additional tag')}." if len(tags) > 1 else "") - ), - env=self._get_process_environment(env), - input_digest=digest, - immutable_input_digests=self.extra_input_digests, - cache_scope=ProcessCacheScope.PER_SESSION, - ) - - -@rule(desc="Finding the `buildctl` binary and related tooling", level=LogLevel.DEBUG) -async def get_docker( - docker_options: DockerOptions, docker_options_env_aware: DockerOptions.EnvironmentAware -) -> BuildctlBinary: - search_path = docker_options_env_aware.executable_search_path - - request = BinaryPathRequest( - binary_name="buildctl", - search_path=search_path, - test=BinaryPathTest(args=["-v"]), - ) - paths = await find_binary(request, **implicitly()) - first_path = paths.first_path_or_raise(request, rationale="interact with the docker daemon") - - if not docker_options.tools and not docker_options.optional_tools: - return BuildctlBinary(first_path.path, first_path.fingerprint) - - tools_shims = await _get_docker_tools_shims( - tools=docker_options.tools, - optional_tools=docker_options.optional_tools, - search_path=search_path, - rationale="use docker", - ) - - extra_env = {"PATH": tools_shims.path_component} - extra_input_digests = tools_shims.immutable_input_digests - - return BuildctlBinary( - first_path.path, - first_path.fingerprint, - extra_env=extra_env, - extra_input_digests=extra_input_digests, - ) - - -def rules(): - return collect_rules() diff --git a/src/python/pants/backend/docker/util_rules/docker_binary_test.py b/src/python/pants/backend/docker/util_rules/docker_binary_test.py index c32ee58eefe..9dd062489ec 100644 --- a/src/python/pants/backend/docker/util_rules/docker_binary_test.py +++ b/src/python/pants/backend/docker/util_rules/docker_binary_test.py @@ -7,7 +7,7 @@ import pytest from pants.backend.docker.subsystems.docker_options import DockerOptions -from pants.backend.docker.util_rules.docker_binary import DockerBinary, get_docker, rules +from pants.backend.docker.util_rules.binaries import DockerBinary, get_docker, rules from pants.backend.docker.util_rules.docker_build_args import DockerBuildArgs from pants.backend.experimental.docker.podman.register import rules as podman_rules from pants.core.util_rules.system_binaries import ( diff --git a/src/python/pants/backend/experimental/docker/podman/register.py b/src/python/pants/backend/experimental/docker/podman/register.py index caf6aff4f1a..472bdb0a8db 100644 --- a/src/python/pants/backend/experimental/docker/podman/register.py +++ b/src/python/pants/backend/experimental/docker/podman/register.py @@ -8,11 +8,15 @@ class ExperimentalPodmanOptions: experimental_enable_podman = BoolOption( default=True, + mutually_exclusive_group="run_engine", help=softwrap( """ + DEPRECATED: Use [docker].run_engine = "podman" instead. + Allow support for podman when available. """ ), + deprecation_start_version="2.31.0", ) From 683672d82f4f2d77e37c11dc22d4b8f4316e90bb Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 21 Jan 2026 11:06:12 -0500 Subject: [PATCH 03/25] in progress --- .../pants/backend/docker/engine_types.py | 1 - .../backend/docker/goals/package_image.py | 159 +++++++++--------- .../docker/subsystems/docker_options.py | 58 ++++++- 3 files changed, 131 insertions(+), 87 deletions(-) diff --git a/src/python/pants/backend/docker/engine_types.py b/src/python/pants/backend/docker/engine_types.py index fa1466cf55d..af5c410d3d6 100644 --- a/src/python/pants/backend/docker/engine_types.py +++ b/src/python/pants/backend/docker/engine_types.py @@ -5,7 +5,6 @@ class DockerBuildEngine(Enum): DOCKER = "docker" - LEGACY = "legacy" BUILDKIT = "buildkit" PODMAN = "podman" diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index c66c3ae19d7..48af3734413 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -42,6 +42,7 @@ DockerBinary, get_buildctl, get_docker, + get_podman ) from pants.backend.docker.util_rules.docker_build_context import ( DockerBuildContext, @@ -451,40 +452,37 @@ async def build_docker_image( "__UPSTREAM_IMAGE_IDS": ",".join(context.upstream_image_ids), } context_root = field_set.get_context_root(options.default_context_root) - if options.build_engine == DockerBuildEngine.BUILDKIT: - buildctl = await get_buildctl(**implicitly()) - process = buildctl.build_image( - build_args=context.build_args, - digest=context.digest, - dockerfile=Path(context.dockerfile), - context_root=context_root, - env=env, - tags=tags, - ) - parse_image_id = partial(parse_image_id_from_buildctl_build_output, buildctl) - else: - docker = await get_docker(**implicitly()) - process = docker.build_image( - build_args=context.build_args, - digest=context.digest, - dockerfile=context.dockerfile, - context_root=context_root, - env=env, - tags=tags, - use_buildx=options.use_buildx, - extra_args=tuple( - get_build_options( - context=context, - field_set=field_set, - global_target_stage_option=options.build_target_stage, - global_build_hosts_options=options.build_hosts, - global_build_no_cache_option=options.build_no_cache, - use_buildx_option=options.use_buildx, - target=wrapped_target.target, - ) - ), - ) - parse_image_id = partial(parse_image_id_from_docker_build_output, docker) + match options.build_engine: + case DockerBuildEngine.BUILDKIT: + binary = await get_buildctl(**implicitly()) + parse_image_id = parse_image_id_from_buildctl_build_output + case DockerBuildEngine.PODMAN: + binary = await get_podman(**implicitly()) + parse_image_id = parse_image_id_from_podman_build_output + case _: + binary = await get_docker(**implicitly()) + parse_image_id = parse_image_id_from_docker_build_output + + process = binary.build_image( + build_args=context.build_args, + digest=context.digest, + dockerfile=context.dockerfile, + context_root=context_root, + env=env, + tags=tags, + use_buildx=options.use_buildx, + extra_args=tuple( + get_build_options( + context=context, + field_set=field_set, + global_target_stage_option=options.build_target_stage, + global_build_hosts_options=options.build_hosts, + global_build_no_cache_option=options.build_no_cache, + use_buildx_option=options.use_buildx, + target=wrapped_target.target, + ) + ), + ) result = await execute_process(process, **implicitly()) if result.exit_code != 0: @@ -534,61 +532,62 @@ async def build_docker_image( ) -def parse_image_id_from_docker_build_output(docker: DockerBinary, *outputs: bytes) -> str: +def parse_image_id_from_docker_build_output(*outputs: bytes) -> str | None: """Outputs are typically the stdout/stderr pair from the `docker build` process.""" # NB: We use the extracted image id for invalidation. The short_id may theoretically # not be unique enough, although in a non adversarial situation, this is highly unlikely # to be an issue in practice. - if docker.is_podman: - for output in outputs: - try: - _, image_id, success, *__ = reversed(output.decode().split("\n")) - except ValueError: - continue - - if success.startswith("Successfully tagged"): - return image_id - - else: - image_id_regexp = re.compile( - "|".join( - ( - # BuildKit output. - r"(writing image (?Psha256:\S+))", - # BuildKit with containerd-snapshotter output. - r"(exporting manifest list (?Psha256:\S+))", - # BuildKit with containerd-snapshotter output and no attestation. - r"(exporting manifest (?Psha256:\S+))", - # Docker output. - r"(Successfully built (?P\S+))", - ), - ) + image_id_regexp = re.compile( + "|".join( + ( + # BuildKit output. + r"(writing image (?Psha256:\S+))", + # BuildKit with containerd-snapshotter output. + r"(exporting manifest list (?Psha256:\S+))", + # BuildKit with containerd-snapshotter output and no attestation. + r"(exporting manifest (?Psha256:\S+))", + # Docker output. + r"(Successfully built (?P\S+))", + ), ) - for output in outputs: - image_id_match = next( - ( - match - for match in ( - re.search(image_id_regexp, line) - for line in reversed(output.decode().split("\n")) - ) - if match - ), - None, - ) - if image_id_match: - image_id = ( - image_id_match.group("digest") - or image_id_match.group("short_id") - or image_id_match.group("manifest_list") - or image_id_match.group("manifest") + ) + for output in outputs: + image_id_match = next( + ( + match + for match in ( + re.search(image_id_regexp, line) + for line in reversed(output.decode().split("\n")) ) - return image_id + if match + ), + None, + ) + if image_id_match: + image_id = ( + image_id_match.group("digest") + or image_id_match.group("short_id") + or image_id_match.group("manifest_list") + or image_id_match.group("manifest") + ) + return image_id + + return None + + +def parse_image_id_from_podman_build_output(*outputs: bytes) -> str: + for output in outputs: + try: + _, image_id, success, *__ = reversed(output.decode().split("\n")) + except ValueError: + continue - return "" + if success.startswith("Successfully tagged"): + return image_id + return None -def parse_image_id_from_buildctl_build_output(buildctl: BuildctlBinary, *outputs: bytes) -> str: ... +def parse_image_id_from_buildctl_build_output(*outputs: bytes) -> str: ... def format_docker_build_context_help_message( diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index b43f4853d5d..3214518da1b 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -3,11 +3,13 @@ from __future__ import annotations +import logging import sys from typing import Any from pants.backend.docker.engine_types import DockerBuildEngine, DockerRunEngine from pants.backend.docker.registries import DockerRegistries +from pants.base.deprecated import resolve_conflicting_options from pants.core.util_rules.search_paths import ExecutableSearchPathsOptionMixin from pants.option.option_types import ( BoolOption, @@ -29,6 +31,8 @@ ), } +logger = logging.getLogger(__name__) + class DockerOptions(Subsystem): options_scope = "docker" @@ -154,10 +158,9 @@ def env_vars(self) -> tuple[str, ...]: """ ), ) - build_engine = EnumOption( + _build_engine = EnumOption( default=DockerBuildEngine.DOCKER, enum_type=DockerBuildEngine, - mutually_exclusive_group="build_engine", help=softwrap( """ The engine to use for Docker builds. @@ -171,8 +174,7 @@ def env_vars(self) -> tuple[str, ...]: ), ) use_buildx = BoolOption( - default=False, - mutually_exclusive_group="build_engine", + default=True, help=softwrap( """ DEPRECATED: Use [docker].build_engine = "docker" instead. @@ -182,16 +184,60 @@ def env_vars(self) -> tuple[str, ...]: ), deprecation_start_version="2.31.0", ) - run_engine = EnumOption( + + @property + def build_engine(self) -> DockerBuildEngine: + result: DockerBuildEngine | bool = resolve_conflicting_options( + old_option="use_buildx", + new_option="build_engine", + old_scope=self.options_scope, + new_scope=self.options_scope, + old_container=self.options, + new_container=self.options, + ) + if isinstance(result, bool): + warning = '`[docker].use_buildx` is deprecated. Buildx is now the default Docker build engine. Use `[docker].build_engine = "docker"` instead.' + if not result: + warning += " To use the legacy engine, add `DOCKER_BUILDKIT=0` to `[docker].env_vars`." + logger.warning(warning) + explicit = result + result = DockerBuildEngine.DOCKER + used_option = "[docker].use_buildx" + else: + explicit = not self.options.is_default("build_engine") + used_option = '[docker].build_engine != "podman"' + experimental_enable_podman = self.options.get("experimental_enable_podman", None) + if experimental_enable_podman is not None: + logger.warning('`[docker].experimental_enable_podman` is deprecated. Use `[docker].build_engine = "podman"` instead.') + if experimental_enable_podman: + if explicit and result != DockerBuildEngine.PODMAN: + raise ValueError(f"Conflicting options `{used_option}` and `[docker].experimental_enable_podman` both enabled.") + result = DockerBuildEngine.PODMAN + return result + + _run_engine = EnumOption( default=DockerRunEngine.DOCKER, enum_type=DockerRunEngine, - mutually_exclusive_group="run_engine", help=softwrap( """ The engine to use for Docker runs. """ ), ) + + @property + def run_engine(self) -> DockerRunEngine: + experimental_enable_podman = self.options.get("experimental_enable_podman", None) + if experimental_enable_podman is not None: + logger.warning('`[docker].experimental_enable_podman` is deprecated. Use `[docker].run_engine = "podman"` instead.') + if experimental_enable_podman: + if not self.options.is_default("run_engine"): + raise ValueError(f'Conflicting options `[docker].run_engine != "podman"` and `[docker].experimental_enable_podman` both enabled.') + return DockerRunEngine.PODMAN + if self._run_engine == DockerRunEngine.PODMAN: + raise ValueError('`[docker].run_engine` is set to "podman", but the deprecated option `[docker].experimental_enable_podman` is disabled.') + return self._run_engine + _build_args = ShellStrListOption( help=softwrap( f""" From 7a3769761be1199b0803aa0d8ee797d44a5a4cca Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 21 Jan 2026 12:54:44 -0500 Subject: [PATCH 04/25] update `get_build_options` for docker only --- .../backend/docker/goals/package_image.py | 79 ++---------- .../docker/subsystems/docker_options.py | 28 +++- .../pants/backend/docker/target_types.py | 122 ++++++++++-------- 3 files changed, 101 insertions(+), 128 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 48af3734413..13bd089537e 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -10,7 +10,6 @@ from dataclasses import asdict, dataclass from functools import partial from itertools import chain -from pathlib import Path from typing import Literal, cast from pants.backend.docker.engine_types import DockerBuildEngine @@ -21,13 +20,7 @@ from pants.backend.docker.registries import DockerRegistries, DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import ( - DockerBuildKitOptionField, - DockerBuildOptionFieldListOfMultiValueDictMixin, - DockerBuildOptionFieldMixin, - DockerBuildOptionFieldMultiValueDictMixin, - DockerBuildOptionFieldMultiValueMixin, - DockerBuildOptionFieldValueMixin, - DockerBuildOptionFlagFieldMixin, + DockerBuildOptionsFieldMixin, DockerImageContextRootField, DockerImageRegistriesField, DockerImageRepositoryField, @@ -37,13 +30,7 @@ DockerImageTargetStageField, get_docker_image_tags, ) -from pants.backend.docker.util_rules.binaries import ( - BuildctlBinary, - DockerBinary, - get_buildctl, - get_docker, - get_podman -) +from pants.backend.docker.util_rules.binaries import get_buildctl, get_docker, get_podman from pants.backend.docker.util_rules.docker_build_context import ( DockerBuildContext, DockerBuildContextRequest, @@ -325,70 +312,31 @@ class DockerInfoV1ImageTag: def get_build_options( context: DockerBuildContext, field_set: DockerPackageFieldSet, - global_target_stage_option: str | None, - global_build_hosts_options: dict | None, - global_build_no_cache_option: bool | None, - use_buildx_option: bool, + docker_options: DockerOptions, target: Target, ) -> Iterator[str]: # Build options from target fields inheriting from DockerBuildOptionFieldMixin for field_type in target.field_types: - if issubclass(field_type, DockerBuildKitOptionField): - if use_buildx_option is not True: - if target[field_type].value != target[field_type].default: - raise DockerImageOptionValueError( - f"The {target[field_type].alias} field on the = `{target.alias}` target in `{target.address}` was set to `{target[field_type].value}`" - f" and buildx is not enabled. Buildx must be enabled via the Docker subsystem options in order to use this field." - ) - else: - # Case where BuildKit option has a default value - still should not be generated - continue - - if issubclass( - field_type, - ( - DockerBuildOptionFieldMixin, - DockerBuildOptionFieldMultiValueDictMixin, - DockerBuildOptionFieldListOfMultiValueDictMixin, - DockerBuildOptionFieldValueMixin, - DockerBuildOptionFieldMultiValueMixin, - DockerBuildOptionFlagFieldMixin, - ), - ): + if issubclass(field_type, DockerBuildOptionsFieldMixin): source = InterpolationContext.TextSource( address=target.address, target_alias=target.alias, field_alias=field_type.alias ) - format = partial( + value_formatter = partial( context.interpolation_context.format, source=source, error_cls=DockerImageOptionValueError, ) - yield from target[field_type].options( - format, global_build_hosts_options=global_build_hosts_options + yield from target[field_type].docker_build_options( + docker=docker_options, value_formatter=value_formatter ) # Target stage - target_stage = None - if global_target_stage_option in context.stages: - target_stage = global_target_stage_option + if docker_options.build_target_stage in context.stages: + yield from ("--target", docker_options.build_target_stage) elif field_set.target_stage.value: - target_stage = field_set.target_stage.value - if target_stage not in context.stages: - raise DockerBuildTargetStageError( - f"The {field_set.target_stage.alias!r} field in `{target.alias}` " - f"{field_set.address} was set to {target_stage!r}" - + ( - f", but there is no such stage in `{context.dockerfile}`. " - f"Available stages: {', '.join(context.stages)}." - if context.stages - else f", but there are no named stages in `{context.dockerfile}`." - ) - ) - - if target_stage: - yield from ("--target", target_stage) + yield from ("--target", field_set.target_stage.value) - if global_build_no_cache_option: + if docker_options.build_no_cache: yield "--no-cache" @@ -475,10 +423,7 @@ async def build_docker_image( get_build_options( context=context, field_set=field_set, - global_target_stage_option=options.build_target_stage, - global_build_hosts_options=options.build_hosts, - global_build_no_cache_option=options.build_no_cache, - use_buildx_option=options.use_buildx, + docker_options=options, target=wrapped_target.target, ) ), diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index 3214518da1b..f6de0796382 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -168,8 +168,8 @@ def env_vars(self) -> tuple[str, ...]: Valid values are: - `docker`: Use the Docker CLI with buildx to build images. (https://docs.docker.com/reference/cli/docker/buildx/build/) - - `legacy`: Use the legacy engine to build images. (https://docs.docker.com/reference/cli/docker/build-legacy/) - `buildkit`: Invoke buildkit directly to build images. (https://github.com/moby/buildkit/blob/master/docs/reference/buildctl.md#build) + - `podman`: Use Podman to build images. (https://docs.podman.io/en/latest/markdown/podman-build.1.html) """ ), ) @@ -179,6 +179,8 @@ def env_vars(self) -> tuple[str, ...]: """ DEPRECATED: Use [docker].build_engine = "docker" instead. + See here for using the legacy builder: https://docs.docker.com/reference/cli/docker/build-legacy/ + Use [buildx](https://github.com/docker/buildx#buildx) (and BuildKit) for builds. """ ), @@ -198,7 +200,9 @@ def build_engine(self) -> DockerBuildEngine: if isinstance(result, bool): warning = '`[docker].use_buildx` is deprecated. Buildx is now the default Docker build engine. Use `[docker].build_engine = "docker"` instead.' if not result: - warning += " To use the legacy engine, add `DOCKER_BUILDKIT=0` to `[docker].env_vars`." + warning += ( + " To use the legacy engine, add `DOCKER_BUILDKIT=0` to `[docker].env_vars`." + ) logger.warning(warning) explicit = result result = DockerBuildEngine.DOCKER @@ -208,10 +212,14 @@ def build_engine(self) -> DockerBuildEngine: used_option = '[docker].build_engine != "podman"' experimental_enable_podman = self.options.get("experimental_enable_podman", None) if experimental_enable_podman is not None: - logger.warning('`[docker].experimental_enable_podman` is deprecated. Use `[docker].build_engine = "podman"` instead.') + logger.warning( + '`[docker].experimental_enable_podman` is deprecated. Use `[docker].build_engine = "podman"` instead.' + ) if experimental_enable_podman: if explicit and result != DockerBuildEngine.PODMAN: - raise ValueError(f"Conflicting options `{used_option}` and `[docker].experimental_enable_podman` both enabled.") + raise ValueError( + f"Conflicting options `{used_option}` and `[docker].experimental_enable_podman` both enabled." + ) result = DockerBuildEngine.PODMAN return result @@ -229,13 +237,19 @@ def build_engine(self) -> DockerBuildEngine: def run_engine(self) -> DockerRunEngine: experimental_enable_podman = self.options.get("experimental_enable_podman", None) if experimental_enable_podman is not None: - logger.warning('`[docker].experimental_enable_podman` is deprecated. Use `[docker].run_engine = "podman"` instead.') + logger.warning( + '`[docker].experimental_enable_podman` is deprecated. Use `[docker].run_engine = "podman"` instead.' + ) if experimental_enable_podman: if not self.options.is_default("run_engine"): - raise ValueError(f'Conflicting options `[docker].run_engine != "podman"` and `[docker].experimental_enable_podman` both enabled.') + raise ValueError( + f'Conflicting options `[docker].run_engine != "podman"` and `[docker].experimental_enable_podman` both enabled.' + ) return DockerRunEngine.PODMAN if self._run_engine == DockerRunEngine.PODMAN: - raise ValueError('`[docker].run_engine` is set to "podman", but the deprecated option `[docker].experimental_enable_podman` is disabled.') + raise ValueError( + '`[docker].run_engine` is set to "podman", but the deprecated option `[docker].experimental_enable_podman` is disabled.' + ) return self._run_engine _build_args = ShellStrListOption( diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index 47a24701821..b720821b2b7 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -238,29 +238,42 @@ class DockerImageSkipPushField(BoolField): OptionValueFormatter = Callable[[str], str] -class DockerBuildOptionFieldMixin(ABC): +class DockerBuildOptionsFieldMixin(ABC): + docker_build_option: ClassVar[str] + + @abstractmethod + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + """Subclasses must implement this, to turn their `self.value` into none, one or more option + values.""" + + +class DockerBuildOptionMultiValueFieldMixin(DockerBuildOptionsFieldMixin, ABC): """Inherit this mixin class to provide options to `docker build`.""" docker_build_option: ClassVar[str] @abstractmethod - def option_values( - self, *, value_formatter: OptionValueFormatter, global_build_hosts_options: dict + def docker_build_option_values( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: """Subclasses must implement this, to turn their `self.value` into none, one or more option values.""" @final - def options( - self, value_formatter: OptionValueFormatter, global_build_hosts_options + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - for value in self.option_values( - value_formatter=value_formatter, global_build_hosts_options=global_build_hosts_options + for value in self.docker_build_option_values( + docker=docker, value_formatter=value_formatter ): yield from (self.docker_build_option, value) -class DockerImageBuildImageLabelsOptionField(DockerBuildOptionFieldMixin, DictStringToStringField): +class DockerImageBuildImageLabelsOptionField( + DockerBuildOptionMultiValueFieldMixin, DictStringToStringField +): alias = "image_labels" help = help_text( f""" @@ -274,12 +287,16 @@ class DockerImageBuildImageLabelsOptionField(DockerBuildOptionFieldMixin, DictSt ) docker_build_option = "--label" - def option_values(self, value_formatter: OptionValueFormatter, **kwargs) -> Iterator[str]: + def docker_build_option_values( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: for label, value in (self.value or {}).items(): yield f"{label}={value_formatter(value)}" -class DockerImageBuildImageExtraHostsField(DockerBuildOptionFieldMixin, DictStringToStringField): +class DockerImageBuildImageExtraHostsField( + DockerBuildOptionMultiValueFieldMixin, DictStringToStringField +): alias = "extra_build_hosts" help = help_text( """ @@ -290,39 +307,43 @@ class DockerImageBuildImageExtraHostsField(DockerBuildOptionFieldMixin, DictStri ) docker_build_option = "--add-host" - def option_values( - self, value_formatter: OptionValueFormatter, global_build_hosts_options: dict = {} + def docker_build_option_values( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: if self.value: - merged_values = {**global_build_hosts_options, **self.value} + merged_values = {**docker.build_hosts, **self.value} for label, value in merged_values.items(): yield f"{label}:{value_formatter(value)}" -class DockerBuildOptionFieldMultiValueDictMixin(DictStringToStringField): +class DockerBuildOptionFieldMultiValueDictMixin( + DictStringToStringField, DockerBuildOptionsFieldMixin, ABC +): """Inherit this mixin class to provide options in the form of `--flag=key1=value1,key2=value2` to `docker build`.""" - docker_build_option: ClassVar[str] - @final - def options(self, value_formatter: OptionValueFormatter, **kwargs) -> Iterator[str]: + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: if self.value: yield f"{self.docker_build_option}=" + ",".join( f"{key}={value_formatter(value)}" for key, value in self.value.items() ) -class DockerBuildOptionFieldListOfMultiValueDictMixin(ListOfDictStringToStringField): +class DockerBuildOptionFieldListOfMultiValueDictMixin( + ListOfDictStringToStringField, DockerBuildOptionsFieldMixin, ABC +): """Inherit this mixin class to provide multiple key-value options to docker build: `--flag=key1=value1,key2=value2 --flag=key3=value3,key4=value4` """ - docker_build_option: ClassVar[str] - @final - def options(self, value_formatter: OptionValueFormatter, **kwargs) -> Iterator[str]: + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: if self.value: for item in self.value: yield f"{self.docker_build_option}=" + ",".join( @@ -330,17 +351,8 @@ def options(self, value_formatter: OptionValueFormatter, **kwargs) -> Iterator[s ) -class DockerBuildKitOptionField: - """Mixin to indicate a BuildKit-specific option.""" - - @abstractmethod - def options(self, value_formatter: OptionValueFormatter) -> Iterator[str]: ... - - required_help = "This option requires BuildKit to be enabled via the Docker subsystem options." - - class DockerImageBuildImageCacheToField( - DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField, DockerBuildKitOptionField + DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField ): alias = "cache_to" help = help_text( @@ -351,7 +363,7 @@ class DockerImageBuildImageCacheToField( multiple cache sources - Pants will pass these as multiple `--cache_from` arguments to the Docker CLI. Docker will only use the first cache hit (i.e. the image exists) in the build. - {DockerBuildKitOptionField.required_help} + If you're using the legacy builder, this option is not supported. Example: @@ -374,16 +386,14 @@ class DockerImageBuildImageCacheToField( class DockerImageBuildImageCacheFromField( - DockerBuildOptionFieldListOfMultiValueDictMixin, - ListOfDictStringToStringField, - DockerBuildKitOptionField, + DockerBuildOptionFieldListOfMultiValueDictMixin, ListOfDictStringToStringField ): alias = "cache_from" help = help_text( f""" Use external cache sources when building the image. - {DockerBuildKitOptionField.required_help} + If you're using the legacy builder, this option is not supported. Example: @@ -412,7 +422,7 @@ class DockerImageBuildImageCacheFromField( class DockerImageBuildImageOutputField( - DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField, DockerBuildKitOptionField + DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField ): alias = "output" default = FrozenDict({"type": "docker"}) @@ -420,7 +430,7 @@ class DockerImageBuildImageOutputField( f""" Sets the export action for the build result. - {DockerBuildKitOptionField.required_help} + If you're using the legacy builder, this option is not supported. When using `pants publish` to publish Docker images to a registry, the output type must be 'docker', as `publish` expects that the built images exist in the local @@ -433,7 +443,7 @@ class DockerImageBuildImageOutputField( class DockerImageBuildSecretsOptionField( - AsyncFieldMixin, DockerBuildOptionFieldMixin, DictStringToStringField + AsyncFieldMixin, DockerBuildOptionMultiValueFieldMixin, DictStringToStringField ): alias = "secrets" help = help_text( @@ -461,7 +471,9 @@ class DockerImageBuildSecretsOptionField( docker_build_option = "--secret" - def option_values(self, **kwargs) -> Iterator[str]: + def docker_build_option_values( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: # os.path.join() discards preceding parts if encountering an abs path, e.g. if the secret # `path` is an absolute path, the `buildroot` and `spec_path` will not be considered. Also, # an empty path part is ignored. @@ -475,7 +487,7 @@ def option_values(self, **kwargs) -> Iterator[str]: yield f"id={secret},src={os.path.normpath(full_path)}" -class DockerImageBuildSSHOptionField(DockerBuildOptionFieldMixin, StringSequenceField): +class DockerImageBuildSSHOptionField(DockerBuildOptionMultiValueFieldMixin, StringSequenceField): alias = "ssh" default = () help = help_text( @@ -495,30 +507,32 @@ class DockerImageBuildSSHOptionField(DockerBuildOptionFieldMixin, StringSequence docker_build_option = "--ssh" - def option_values(self, **kwargs) -> Iterator[str]: + def docker_build_option_values( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: yield from cast("tuple[str]", self.value) -class DockerBuildOptionFieldValueMixin(Field): +class DockerBuildOptionFieldValueMixin(Field, DockerBuildOptionsFieldMixin, ABC): """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) to `docker build`.""" - docker_build_option: ClassVar[str] - @final - def options(self, *args, **kwargs) -> Iterator[str]: + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: if self.value is not None: yield f"{self.docker_build_option}={self.value}" -class DockerBuildOptionFieldMultiValueMixin(StringSequenceField): +class DockerBuildOptionFieldMultiValueMixin(StringSequenceField, DockerBuildOptionsFieldMixin, ABC): """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to `docker build`.""" - docker_build_option: ClassVar[str] - @final - def options(self, *args, **kwargs) -> Iterator[str]: + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: if self.value: yield f"{self.docker_build_option}={','.join(list(self.value))}" @@ -537,14 +551,14 @@ class DockerImageBuildPullOptionField(DockerBuildOptionFieldValueMixin, BoolFiel docker_build_option = "--pull" -class DockerBuildOptionFlagFieldMixin(BoolField, ABC): +class DockerBuildOptionFlagFieldMixin(BoolField, DockerBuildOptionsFieldMixin, ABC): """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is `True`) to `docker build`.""" - docker_build_option: ClassVar[str] - @final - def options(self, *args, **kwargs) -> Iterator[str]: + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: if self.value: yield f"{self.docker_build_option}" From 046285236da27f13ccd886d58e264d8d5212948a Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 21 Jan 2026 13:31:52 -0500 Subject: [PATCH 05/25] add buildctl base classes --- .../backend/docker/goals/package_image.py | 27 +- .../pants/backend/docker/target_types.py | 256 +++++++++++++----- 2 files changed, 199 insertions(+), 84 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 13bd089537e..10b6350c535 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -28,6 +28,7 @@ DockerImageTagsField, DockerImageTagsRequest, DockerImageTargetStageField, + OptionValueFormatter, get_docker_image_tags, ) from pants.backend.docker.util_rules.binaries import get_buildctl, get_docker, get_podman @@ -309,25 +310,29 @@ class DockerInfoV1ImageTag: name: str +def get_value_formatter( + context: DockerBuildContext, target: Target, field_alias: str +) -> OptionValueFormatter: + return partial( + context.interpolation_context.format, + source=InterpolationContext.TextSource( + address=target.address, target_alias=target.alias, field_alias=field_alias + ), + error_cls=DockerImageOptionValueError, + ) + + def get_build_options( context: DockerBuildContext, field_set: DockerPackageFieldSet, docker_options: DockerOptions, target: Target, ) -> Iterator[str]: - # Build options from target fields inheriting from DockerBuildOptionFieldMixin for field_type in target.field_types: - if issubclass(field_type, DockerBuildOptionsFieldMixin): - source = InterpolationContext.TextSource( - address=target.address, target_alias=target.alias, field_alias=field_type.alias - ) - value_formatter = partial( - context.interpolation_context.format, - source=source, - error_cls=DockerImageOptionValueError, - ) + if issubclass(field_type, DockerBuildOptionsFieldMixin) and field_type.validate_with_options(docker_options): yield from target[field_type].docker_build_options( - docker=docker_options, value_formatter=value_formatter + docker=docker_options, + value_formatter=get_value_formatter(context, target, field_type.alias), ) # Target stage diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index b720821b2b7..d5343b58b6e 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -238,7 +238,12 @@ class DockerImageSkipPushField(BoolField): OptionValueFormatter = Callable[[str], str] -class DockerBuildOptionsFieldMixin(ABC): +class _ValidateWithOptionsMixin(ABC): + def validate_with_options(self, options: DockerOptions) -> bool: + return True + + +class DockerBuildOptionsFieldMixin(_ValidateWithOptionsMixin, ABC): docker_build_option: ClassVar[str] @abstractmethod @@ -252,8 +257,6 @@ def docker_build_options( class DockerBuildOptionMultiValueFieldMixin(DockerBuildOptionsFieldMixin, ABC): """Inherit this mixin class to provide options to `docker build`.""" - docker_build_option: ClassVar[str] - @abstractmethod def docker_build_option_values( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter @@ -271,49 +274,28 @@ def docker_build_options( yield from (self.docker_build_option, value) -class DockerImageBuildImageLabelsOptionField( - DockerBuildOptionMultiValueFieldMixin, DictStringToStringField -): - alias = "image_labels" - help = help_text( - f""" - Provide image metadata. - - {_interpolation_help.format(kind="Label value")} - - See [Docker labels](https://docs.docker.com/config/labels-custom-metadata/#manage-labels-on-objects) - for more information. - """ - ) - docker_build_option = "--label" +class DockerBuildOptionFieldValueMixin(Field, DockerBuildOptionsFieldMixin, ABC): + """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) + to `docker build`.""" - def docker_build_option_values( + @final + def docker_build_options( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - for label, value in (self.value or {}).items(): - yield f"{label}={value_formatter(value)}" - + if self.value is not None: + yield f"{self.docker_build_option}={self.value}" -class DockerImageBuildImageExtraHostsField( - DockerBuildOptionMultiValueFieldMixin, DictStringToStringField -): - alias = "extra_build_hosts" - help = help_text( - """ - Extra hosts entries to be added to a container's `/etc/hosts` file. - Use `[docker].build_hosts` to set default host entries for all images. - """ - ) - docker_build_option = "--add-host" +class DockerBuildOptionFieldMultiValueMixin(StringSequenceField, DockerBuildOptionsFieldMixin, ABC): + """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to `docker + build`.""" - def docker_build_option_values( + @final + def docker_build_options( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: if self.value: - merged_values = {**docker.build_hosts, **self.value} - for label, value in merged_values.items(): - yield f"{label}:{value_formatter(value)}" + yield f"{self.docker_build_option}={','.join(list(self.value))}" class DockerBuildOptionFieldMultiValueDictMixin( @@ -351,6 +333,164 @@ def docker_build_options( ) +class DockerBuildOptionFlagFieldMixin(BoolField, DockerBuildOptionsFieldMixin, ABC): + """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is + `True`) to `docker build`.""" + + @final + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if self.value: + yield f"{self.docker_build_option}" + + +class BuildctlOptionsFieldMixin(_ValidateWithOptionsMixin, ABC): + buildctl_option: ClassVar[str] + + @abstractmethod + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + """Subclasses must implement this, to turn their `self.value` into none, one or more option + values.""" + + +class BuildctlOptionMultiValueFieldMixin(BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide multi-value options to `buildctl build`. + + Yields multiple `--opt value1 --opt value2` pairs. + """ + + @abstractmethod + def buildctl_option_values( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + """Subclasses must implement this, to turn their `self.value` into none, one or more option + values.""" + + @final + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + for value in self.buildctl_option_values( + docker=docker, value_formatter=value_formatter + ): + yield from (self.buildctl_option, value) + + +class BuildctlOptionFieldMultiValueDictMixin(DictStringToStringField, BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide options in the form of `--flag=key1=value1,key2=value2` + to `buildctl build`.""" + + @final + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if self.value: + yield f"{self.buildctl_option}=" + ",".join( + f"{key}={value_formatter(value)}" for key, value in self.value.items() + ) + + +class BuildctlOptionFieldListOfMultiValueDictMixin(ListOfDictStringToStringField, BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide multiple key-value options to buildctl build: + + `--flag=key1=value1,key2=value2 --flag=key3=value3,key4=value4` + """ + + @final + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if self.value: + for item in self.value: + yield f"{self.buildctl_option}=" + ",".join( + f"{key}={value_formatter(value)}" for key, value in item.items() + ) + + +class BuildctlOptionFieldValueMixin(Field, BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) + to `buildctl build`.""" + + @final + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if self.value is not None: + yield f"{self.buildctl_option}={self.value}" + + +class BuildctlOptionFieldMultiValueMixin(StringSequenceField, BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to + `buildctl build`.""" + + @final + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if self.value: + yield f"{self.buildctl_option}={','.join(list(self.value))}" + + +class BuildctlOptionFlagFieldMixin(BoolField, BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is + `True`) to `buildctl build`.""" + + @final + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if self.value: + yield f"{self.buildctl_option}" + + +class DockerImageBuildImageLabelsOptionField( + DockerBuildOptionMultiValueFieldMixin, DictStringToStringField +): + alias = "image_labels" + help = help_text( + f""" + Provide image metadata. + + {_interpolation_help.format(kind="Label value")} + + See [Docker labels](https://docs.docker.com/config/labels-custom-metadata/#manage-labels-on-objects) + for more information. + """ + ) + docker_build_option = "--label" + + def docker_build_option_values( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + for label, value in (self.value or {}).items(): + yield f"{label}={value_formatter(value)}" + + +class DockerImageBuildImageExtraHostsField( + DockerBuildOptionMultiValueFieldMixin, DictStringToStringField +): + alias = "extra_build_hosts" + help = help_text( + """ + Extra hosts entries to be added to a container's `/etc/hosts` file. + + Use `[docker].build_hosts` to set default host entries for all images. + """ + ) + docker_build_option = "--add-host" + + def docker_build_option_values( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if self.value: + merged_values = {**docker.build_hosts, **self.value} + for label, value in merged_values.items(): + yield f"{label}:{value_formatter(value)}" + + class DockerImageBuildImageCacheToField( DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField ): @@ -384,6 +524,9 @@ class DockerImageBuildImageCacheToField( ) docker_build_option = "--cache-to" + def validate_with_options(self, options: DockerOptions) -> bool: + return not options.build_no_cache + class DockerImageBuildImageCacheFromField( DockerBuildOptionFieldListOfMultiValueDictMixin, ListOfDictStringToStringField @@ -420,6 +563,9 @@ class DockerImageBuildImageCacheFromField( ) docker_build_option = "--cache-from" + def validate_with_options(self, options: DockerOptions) -> bool: + return not options.build_no_cache + class DockerImageBuildImageOutputField( DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField @@ -513,30 +659,6 @@ def docker_build_option_values( yield from cast("tuple[str]", self.value) -class DockerBuildOptionFieldValueMixin(Field, DockerBuildOptionsFieldMixin, ABC): - """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) - to `docker build`.""" - - @final - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter - ) -> Iterator[str]: - if self.value is not None: - yield f"{self.docker_build_option}={self.value}" - - -class DockerBuildOptionFieldMultiValueMixin(StringSequenceField, DockerBuildOptionsFieldMixin, ABC): - """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to `docker - build`.""" - - @final - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter - ) -> Iterator[str]: - if self.value: - yield f"{self.docker_build_option}={','.join(list(self.value))}" - - class DockerImageBuildPullOptionField(DockerBuildOptionFieldValueMixin, BoolField): alias = "pull" default = False @@ -551,18 +673,6 @@ class DockerImageBuildPullOptionField(DockerBuildOptionFieldValueMixin, BoolFiel docker_build_option = "--pull" -class DockerBuildOptionFlagFieldMixin(BoolField, DockerBuildOptionsFieldMixin, ABC): - """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is - `True`) to `docker build`.""" - - @final - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter - ) -> Iterator[str]: - if self.value: - yield f"{self.docker_build_option}" - - class DockerImageBuildSquashOptionField(DockerBuildOptionFlagFieldMixin): alias = "squash" default = False From 833b4692ac200ed7c1c3a19f418e8395b2c3c9ad Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 21 Jan 2026 14:31:58 -0500 Subject: [PATCH 06/25] some options implemented --- .../pants/backend/docker/target_types.py | 127 +++++++++++++++--- 1 file changed, 109 insertions(+), 18 deletions(-) diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index d5343b58b6e..1c357ebb050 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -39,6 +39,7 @@ from pants.engine.unions import union from pants.util.docutil import bin_name, doc_url from pants.util.frozendict import FrozenDict +from pants.util.meta import classproperty from pants.util.strutil import help_text, softwrap # Common help text to be applied to each field that supports value interpolation. @@ -356,6 +357,19 @@ def buildctl_options( values.""" +class DockerBuildkitPassthroughFieldMixin( + BuildctlOptionsFieldMixin, DockerBuildOptionsFieldMixin, ABC +): + @classproperty + def docker_build_option(cls) -> str: + return cls.buildctl_option + + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + return super().buildctl_options(docker=docker, value_formatter=value_formatter) + + class BuildctlOptionMultiValueFieldMixin(BuildctlOptionsFieldMixin, ABC): """Inherit this mixin class to provide multi-value options to `buildctl build`. @@ -373,13 +387,13 @@ def buildctl_option_values( def buildctl_options( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - for value in self.buildctl_option_values( - docker=docker, value_formatter=value_formatter - ): + for value in self.buildctl_option_values(docker=docker, value_formatter=value_formatter): yield from (self.buildctl_option, value) -class BuildctlOptionFieldMultiValueDictMixin(DictStringToStringField, BuildctlOptionsFieldMixin, ABC): +class BuildctlOptionFieldMultiValueDictMixin( + DictStringToStringField, BuildctlOptionsFieldMixin, ABC +): """Inherit this mixin class to provide options in the form of `--flag=key1=value1,key2=value2` to `buildctl build`.""" @@ -393,7 +407,9 @@ def buildctl_options( ) -class BuildctlOptionFieldListOfMultiValueDictMixin(ListOfDictStringToStringField, BuildctlOptionsFieldMixin, ABC): +class BuildctlOptionFieldListOfMultiValueDictMixin( + ListOfDictStringToStringField, BuildctlOptionsFieldMixin, ABC +): """Inherit this mixin class to provide multiple key-value options to buildctl build: `--flag=key1=value1,key2=value2 --flag=key3=value3,key4=value4` @@ -422,6 +438,28 @@ def buildctl_options( yield f"{self.buildctl_option}={self.value}" +class BuildctlLayeredOptionFieldValueMixin(Field, BuildctlOptionsFieldMixin, ABC): + """Inherit this mixin class to provide layered options (i.e. option in the form of `--flag + suboption:value`) to `buildctl build`. + + You can override the option-value delimiter (default is `:`) by setting the + `suboption_value_delimiter` class variable. + """ + + suboption: ClassVar[str] + suboption_value_delimiter: ClassVar[str] = ":" + + @final + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if self.value is not None: + yield from ( + self.buildctl_option, + f"{self.suboption}{self.suboption_value_delimiter}{self.value}", + ) + + class BuildctlOptionFieldMultiValueMixin(StringSequenceField, BuildctlOptionsFieldMixin, ABC): """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to `buildctl build`.""" @@ -434,6 +472,30 @@ def buildctl_options( yield f"{self.buildctl_option}={','.join(list(self.value))}" +class BuildctlLayeredOptionFieldMultiValueMixin( + StringSequenceField, BuildctlOptionsFieldMixin, ABC +): + """Inherit this mixin class to provide layered options in the form of `--flag + suboption:value1,value2` to `buildctl build`. + + You can override the option-values delimiter (default is `:`) by setting the + `suboption_value_delimiter` class variable. + """ + + suboption: ClassVar[str] + suboption_value_delimiter: ClassVar[str] = ":" + + @final + def buildctl_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + if self.value: + yield from ( + self.buildctl_option, + f"{self.suboption}{self.suboption_value_delimiter}{','.join(list(self.value))}", + ) + + class BuildctlOptionFlagFieldMixin(BoolField, BuildctlOptionsFieldMixin, ABC): """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is `True`) to `buildctl build`.""" @@ -447,7 +509,9 @@ def buildctl_options( class DockerImageBuildImageLabelsOptionField( - DockerBuildOptionMultiValueFieldMixin, DictStringToStringField + DockerBuildOptionMultiValueFieldMixin, + BuildctlOptionMultiValueFieldMixin, + DictStringToStringField, ): alias = "image_labels" help = help_text( @@ -461,6 +525,7 @@ class DockerImageBuildImageLabelsOptionField( """ ) docker_build_option = "--label" + buildctl_option = "--opt" def docker_build_option_values( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter @@ -468,6 +533,12 @@ def docker_build_option_values( for label, value in (self.value or {}).items(): yield f"{label}={value_formatter(value)}" + def buildctl_option_values( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + for label, value in (self.value or {}).items(): + yield f"label:{label}={value_formatter(value)}" + class DockerImageBuildImageExtraHostsField( DockerBuildOptionMultiValueFieldMixin, DictStringToStringField @@ -492,7 +563,9 @@ def docker_build_option_values( class DockerImageBuildImageCacheToField( - DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField + DockerBuildOptionFieldMultiValueDictMixin, + BuildctlOptionFieldMultiValueDictMixin, + DictStringToStringField, ): alias = "cache_to" help = help_text( @@ -523,13 +596,16 @@ class DockerImageBuildImageCacheToField( """ ) docker_build_option = "--cache-to" + buildctl_option = "--export-cache" def validate_with_options(self, options: DockerOptions) -> bool: return not options.build_no_cache class DockerImageBuildImageCacheFromField( - DockerBuildOptionFieldListOfMultiValueDictMixin, ListOfDictStringToStringField + DockerBuildOptionFieldListOfMultiValueDictMixin, + BuildctlOptionFieldListOfMultiValueDictMixin, + ListOfDictStringToStringField, ): alias = "cache_from" help = help_text( @@ -562,13 +638,16 @@ class DockerImageBuildImageCacheFromField( """ ) docker_build_option = "--cache-from" + buildctl_option = "--import-cache" def validate_with_options(self, options: DockerOptions) -> bool: return not options.build_no_cache class DockerImageBuildImageOutputField( - DockerBuildOptionFieldMultiValueDictMixin, DictStringToStringField + BuildctlOptionFieldMultiValueDictMixin, + DockerBuildkitPassthroughFieldMixin, + DictStringToStringField, ): alias = "output" default = FrozenDict({"type": "docker"}) @@ -585,11 +664,14 @@ class DockerImageBuildImageOutputField( {_interpolation_help.format(kind="Values")} """ ) - docker_build_option = "--output" + buildctl_option = "--output" class DockerImageBuildSecretsOptionField( - AsyncFieldMixin, DockerBuildOptionMultiValueFieldMixin, DictStringToStringField + AsyncFieldMixin, + BuildctlOptionMultiValueFieldMixin, + DockerBuildkitPassthroughFieldMixin, + DictStringToStringField, ): alias = "secrets" help = help_text( @@ -615,9 +697,9 @@ class DockerImageBuildSecretsOptionField( """ ) - docker_build_option = "--secret" + buildctl_option = "--secret" - def docker_build_option_values( + def buildctl_option_values( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: # os.path.join() discards preceding parts if encountering an abs path, e.g. if the secret @@ -633,7 +715,11 @@ def docker_build_option_values( yield f"id={secret},src={os.path.normpath(full_path)}" -class DockerImageBuildSSHOptionField(DockerBuildOptionMultiValueFieldMixin, StringSequenceField): +class DockerImageBuildSSHOptionField( + BuildctlOptionMultiValueFieldMixin, + DockerBuildkitPassthroughFieldMixin, + StringSequenceField, +): alias = "ssh" default = () help = help_text( @@ -651,12 +737,12 @@ class DockerImageBuildSSHOptionField(DockerBuildOptionMultiValueFieldMixin, Stri """ ) - docker_build_option = "--ssh" + buildctl_option = "--ssh" - def docker_build_option_values( + def buildctl_option_values( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - yield from cast("tuple[str]", self.value) + yield from cast(tuple[str, ...], self.value) class DockerImageBuildPullOptionField(DockerBuildOptionFieldValueMixin, BoolField): @@ -700,7 +786,9 @@ class DockerImageBuildNetworkOptionField(DockerBuildOptionFieldValueMixin, Strin class DockerImageBuildPlatformOptionField( - DockerBuildOptionFieldMultiValueMixin, StringSequenceField + DockerBuildOptionFieldMultiValueMixin, + BuildctlLayeredOptionFieldMultiValueMixin, + StringSequenceField, ): alias = "build_platform" default = None @@ -710,6 +798,9 @@ class DockerImageBuildPlatformOptionField( """ ) docker_build_option = "--platform" + buildctl_option = "--opt" + suboption = "platform" + suboption_value_delimiter = "=" class DockerImageRunExtraArgsField(StringSequenceField): From ec9aaa50b48131372eaca8803823e6c0dc7f2317 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 21 Jan 2026 15:39:02 -0500 Subject: [PATCH 07/25] making progress, one more centralizing thing --- .../backend/docker/goals/package_image.py | 24 +++++--- .../pants/backend/docker/target_types.py | 60 ++++++++++++------- 2 files changed, 55 insertions(+), 29 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 10b6350c535..a24d9a27352 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -20,6 +20,7 @@ from pants.backend.docker.registries import DockerRegistries, DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import ( + BuildctlOptionsFieldMixin, DockerBuildOptionsFieldMixin, DockerImageContextRootField, DockerImageRegistriesField, @@ -328,19 +329,29 @@ def get_build_options( docker_options: DockerOptions, target: Target, ) -> Iterator[str]: + engine_build_options_field_type, gen_options_func_name = ( + (BuildctlOptionsFieldMixin, "buildctl_options") + if docker_options.build_engine == DockerBuildEngine.BUILDKIT + else (DockerBuildOptionsFieldMixin, "docker_build_options") + ) for field_type in target.field_types: - if issubclass(field_type, DockerBuildOptionsFieldMixin) and field_type.validate_with_options(docker_options): - yield from target[field_type].docker_build_options( + if issubclass(field_type, engine_build_options_field_type) and field_type.validate_options( + docker_options, context + ): + gen_options_func = getattr(field_type, gen_options_func_name) + yield from gen_options_func( docker=docker_options, value_formatter=get_value_formatter(context, target, field_type.alias), ) - # Target stage + # Special handling for global options if docker_options.build_target_stage in context.stages: - yield from ("--target", docker_options.build_target_stage) - elif field_set.target_stage.value: - yield from ("--target", field_set.target_stage.value) + if docker_options.build_engine == DockerBuildEngine.BUILDKIT: + yield from ("--opt", f"target={docker_options.build_target_stage}") + else: + yield from ("--target", docker_options.build_target_stage) + # This is the same for docker and buildkit if docker_options.build_no_cache: yield "--no-cache" @@ -423,7 +434,6 @@ async def build_docker_image( context_root=context_root, env=env, tags=tags, - use_buildx=options.use_buildx, extra_args=tuple( get_build_options( context=context, diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index 1c357ebb050..ac0770a81dd 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -8,7 +8,7 @@ from abc import ABC, abstractmethod from collections.abc import Callable, Iterator from dataclasses import dataclass -from typing import ClassVar, cast, final +from typing import TYPE_CHECKING, ClassVar, cast, final from pants.backend.docker.registries import ALL_DEFAULT_REGISTRIES from pants.backend.docker.subsystems.docker_options import DockerOptions @@ -42,6 +42,9 @@ from pants.util.meta import classproperty from pants.util.strutil import help_text, softwrap +if TYPE_CHECKING: + from pants.backend.docker.util_rules.docker_build_context import DockerBuildContext + # Common help text to be applied to each field that supports value interpolation. _interpolation_help = ( "{kind} may use placeholders in curly braces to be interpolated. The placeholders are derived " @@ -156,21 +159,6 @@ class DockerImageTagsField(StringSequenceField): ) -class DockerImageTargetStageField(StringField): - alias = "target_stage" - help = help_text( - """ - Specify target build stage, rather than building the entire `Dockerfile`. - - When using multi-stage build, you may name your stages, and can target them when building - to only selectively build a certain stage. See also the `--docker-build-target-stage` - option. - - Read more about [multi-stage Docker builds](https://docs.docker.com/develop/develop-images/multistage-build/#stop-at-a-specific-build-stage) - """ - ) - - class DockerImageDependenciesField(Dependencies): supports_transitive_excludes = True @@ -239,12 +227,16 @@ class DockerImageSkipPushField(BoolField): OptionValueFormatter = Callable[[str], str] -class _ValidateWithOptionsMixin(ABC): - def validate_with_options(self, options: DockerOptions) -> bool: +class ValidateOptionsMixin(ABC): + def validate_options(self, options: DockerOptions, context: DockerBuildContext) -> bool: + """Hook method telling Pants to ignore this option in certain contexts. + + Can also be used to throw errors simply by raising an exception. + """ return True -class DockerBuildOptionsFieldMixin(_ValidateWithOptionsMixin, ABC): +class DockerBuildOptionsFieldMixin(ValidateOptionsMixin, ABC): docker_build_option: ClassVar[str] @abstractmethod @@ -346,7 +338,7 @@ def docker_build_options( yield f"{self.docker_build_option}" -class BuildctlOptionsFieldMixin(_ValidateWithOptionsMixin, ABC): +class BuildctlOptionsFieldMixin(ValidateOptionsMixin, ABC): buildctl_option: ClassVar[str] @abstractmethod @@ -508,6 +500,30 @@ def buildctl_options( yield f"{self.buildctl_option}" +class DockerImageTargetStageField( + DockerBuildOptionFieldValueMixin, BuildctlLayeredOptionFieldValueMixin, StringField +): + alias = "target_stage" + help = help_text( + """ + Specify target build stage, rather than building the entire `Dockerfile`. + + When using multi-stage build, you may name your stages, and can target them when building + to only selectively build a certain stage. See also the `--docker-build-target-stage` + option. + + Read more about [multi-stage Docker builds](https://docs.docker.com/develop/develop-images/multistage-build/#stop-at-a-specific-build-stage) + """ + ) + docker_build_option = "--target" + buildctl_option = "--opt" + suboption = "target" + + def validate_options(self, options: DockerOptions, context: DockerBuildContext) -> bool: + # Defer to global option if set and matches a stage + return options.build_target_stage not in context.stages + + class DockerImageBuildImageLabelsOptionField( DockerBuildOptionMultiValueFieldMixin, BuildctlOptionMultiValueFieldMixin, @@ -598,7 +614,7 @@ class DockerImageBuildImageCacheToField( docker_build_option = "--cache-to" buildctl_option = "--export-cache" - def validate_with_options(self, options: DockerOptions) -> bool: + def validate_options(self, options: DockerOptions, context: DockerBuildContext) -> bool: return not options.build_no_cache @@ -640,7 +656,7 @@ class DockerImageBuildImageCacheFromField( docker_build_option = "--cache-from" buildctl_option = "--import-cache" - def validate_with_options(self, options: DockerOptions) -> bool: + def validate_options(self, options: DockerOptions, context: DockerBuildContext) -> bool: return not options.build_no_cache From b10dbe9ce93d8e4c5c29a79945a4e8ee2227be50 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 21 Jan 2026 16:37:10 -0500 Subject: [PATCH 08/25] fix some stuff --- .../backend/docker/goals/package_image.py | 24 +++++++++++-------- .../pants/backend/docker/target_types.py | 22 +++++++++-------- 2 files changed, 26 insertions(+), 20 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index a24d9a27352..f76cbeec086 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -325,7 +325,6 @@ def get_value_formatter( def get_build_options( context: DockerBuildContext, - field_set: DockerPackageFieldSet, docker_options: DockerOptions, target: Target, ) -> Iterator[str]: @@ -335,10 +334,10 @@ def get_build_options( else (DockerBuildOptionsFieldMixin, "docker_build_options") ) for field_type in target.field_types: - if issubclass(field_type, engine_build_options_field_type) and field_type.validate_options( - docker_options, context - ): - gen_options_func = getattr(field_type, gen_options_func_name) + if issubclass(field_type, engine_build_options_field_type) and target[ + field_type + ].validate_options(docker_options, context): + gen_options_func = getattr(target[field_type], gen_options_func_name) yield from gen_options_func( docker=docker_options, value_formatter=get_value_formatter(context, target, field_type.alias), @@ -347,9 +346,15 @@ def get_build_options( # Special handling for global options if docker_options.build_target_stage in context.stages: if docker_options.build_engine == DockerBuildEngine.BUILDKIT: - yield from ("--opt", f"target={docker_options.build_target_stage}") + yield from ( + DockerImageTargetStageField.buildctl_option, + f"{DockerImageTargetStageField.suboption}{DockerImageTargetStageField.suboption_value_delimiter}{docker_options.build_target_stage}", + ) else: - yield from ("--target", docker_options.build_target_stage) + yield from ( + DockerImageTargetStageField.docker_build_option, + docker_options.build_target_stage, + ) # This is the same for docker and buildkit if docker_options.build_no_cache: @@ -437,7 +442,6 @@ async def build_docker_image( extra_args=tuple( get_build_options( context=context, - field_set=field_set, docker_options=options, target=wrapped_target.target, ) @@ -446,7 +450,7 @@ async def build_docker_image( result = await execute_process(process, **implicitly()) if result.exit_code != 0: - msg = f"Docker build failed for `docker_image` {field_set.address}." + msg = f"{options.build_engine.value.capitalize()} build failed for `docker_image` {field_set.address}." if options.suggest_renames: maybe_help_msg = format_docker_build_context_help_message( context_root=context_root, @@ -469,7 +473,7 @@ async def build_docker_image( image_id = parse_image_id(result.stdout, result.stderr) docker_build_output_msg = "\n".join( ( - f"Docker build output for {tags[0]}:", + f"{options.build_engine.value.capitalize()} build output for {tags[0]}:", "stdout:", result.stdout.decode(), "stderr:", diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index ac0770a81dd..10f1a342407 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -432,14 +432,14 @@ def buildctl_options( class BuildctlLayeredOptionFieldValueMixin(Field, BuildctlOptionsFieldMixin, ABC): """Inherit this mixin class to provide layered options (i.e. option in the form of `--flag - suboption:value`) to `buildctl build`. + suboption=value`) to `buildctl build`. - You can override the option-value delimiter (default is `:`) by setting the + You can override the option-value delimiter (default is `=`) by setting the `suboption_value_delimiter` class variable. """ suboption: ClassVar[str] - suboption_value_delimiter: ClassVar[str] = ":" + suboption_value_delimiter: ClassVar[str] = "=" @final def buildctl_options( @@ -468,14 +468,14 @@ class BuildctlLayeredOptionFieldMultiValueMixin( StringSequenceField, BuildctlOptionsFieldMixin, ABC ): """Inherit this mixin class to provide layered options in the form of `--flag - suboption:value1,value2` to `buildctl build`. + suboption=value1,value2` to `buildctl build`. - You can override the option-values delimiter (default is `:`) by setting the + You can override the option-values delimiter (default is `=`) by setting the `suboption_value_delimiter` class variable. """ suboption: ClassVar[str] - suboption_value_delimiter: ClassVar[str] = ":" + suboption_value_delimiter: ClassVar[str] = "=" @final def buildctl_options( @@ -519,7 +519,8 @@ class DockerImageTargetStageField( buildctl_option = "--opt" suboption = "target" - def validate_options(self, options: DockerOptions, context: DockerBuildContext) -> bool: + @staticmethod + def validate_options(options: DockerOptions, context: DockerBuildContext) -> bool: # Defer to global option if set and matches a stage return options.build_target_stage not in context.stages @@ -614,7 +615,8 @@ class DockerImageBuildImageCacheToField( docker_build_option = "--cache-to" buildctl_option = "--export-cache" - def validate_options(self, options: DockerOptions, context: DockerBuildContext) -> bool: + @staticmethod + def validate_options(options: DockerOptions, context: DockerBuildContext) -> bool: return not options.build_no_cache @@ -656,7 +658,8 @@ class DockerImageBuildImageCacheFromField( docker_build_option = "--cache-from" buildctl_option = "--import-cache" - def validate_options(self, options: DockerOptions, context: DockerBuildContext) -> bool: + @staticmethod + def validate_options(options: DockerOptions, context: DockerBuildContext) -> bool: return not options.build_no_cache @@ -816,7 +819,6 @@ class DockerImageBuildPlatformOptionField( docker_build_option = "--platform" buildctl_option = "--opt" suboption = "platform" - suboption_value_delimiter = "=" class DockerImageRunExtraArgsField(StringSequenceField): From dbe12aadf664432a304e4ac9e581eec745519110 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Tue, 24 Feb 2026 11:30:51 -0500 Subject: [PATCH 09/25] parse buildkit output --- .../pants/backend/docker/engine_types.py | 7 ++ .../backend/docker/goals/package_image.py | 20 +--- .../docker/goals/package_image_test.py | 4 +- .../docker/subsystems/docker_options.py | 100 +++++++---------- .../experimental/docker/podman/register.py | 2 +- src/python/pants/option/option_types.py | 106 ++++++++++++++++++ 6 files changed, 161 insertions(+), 78 deletions(-) diff --git a/src/python/pants/backend/docker/engine_types.py b/src/python/pants/backend/docker/engine_types.py index af5c410d3d6..f811f71a726 100644 --- a/src/python/pants/backend/docker/engine_types.py +++ b/src/python/pants/backend/docker/engine_types.py @@ -1,6 +1,7 @@ # Copyright 2026 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from enum import Enum +from dataclasses import dataclass class DockerBuildEngine(Enum): @@ -12,3 +13,9 @@ class DockerBuildEngine(Enum): class DockerRunEngine(Enum): DOCKER = "docker" PODMAN = "podman" + + +@dataclass(frozen=True) +class DockerEngines: + build: DockerBuildEngine = DockerBuildEngine.DOCKER + run: DockerRunEngine = DockerRunEngine.DOCKER diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 4e24a95c817..15a0f0e01d8 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -22,9 +22,7 @@ from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import ( BuildctlOptionsFieldMixin, - DockerBuildKitOptionField, DockerBuildOptionFieldListOfMultiValueDictMixin, - DockerBuildOptionFieldMixin, DockerBuildOptionFieldMultiValueDictMixin, DockerBuildOptionFieldMultiValueMixin, DockerBuildOptionFieldValueMixin, @@ -553,13 +551,7 @@ async def build_docker_image( keep_sandboxes=keep_sandboxes, ) - match options.build_engine: - case DockerBuildEngine.BUILDKIT: - parse_image_id = parse_image_id_from_buildctl_build_output - case DockerBuildEngine.PODMAN: - parse_image_id = parse_image_id_from_podman_build_output - case _: - parse_image_id = parse_image_id_from_docker_build_output + parse_image_id = parse_image_id_from_podman_build_output if options.build_engine == DockerBuildEngine.PODMAN else parse_image_id_from_buildkit_output image_id = parse_image_id(result.stdout, result.stderr) docker_build_output_msg = "\n".join( ( @@ -586,7 +578,7 @@ async def build_docker_image( ) -def parse_image_id_from_docker_build_output(*outputs: bytes) -> str | None: +def parse_image_id_from_buildkit_output(*outputs: bytes) -> str | None: """Outputs are typically the stdout/stderr pair from the `docker build` process.""" # NB: We use the extracted image id for invalidation. The short_id may theoretically # not be unique enough, although in a non adversarial situation, this is highly unlikely @@ -596,6 +588,8 @@ def parse_image_id_from_docker_build_output(*outputs: bytes) -> str | None: ( # BuildKit output. r"(writing image (?Psha256:\S+))", + # Buildkit with --push=true output. + r"(pushing manifest for (?P\S+))", # BuildKit with containerd-snapshotter output. r"(exporting manifest list (?Psha256:\S+))", # BuildKit with containerd-snapshotter output and no attestation. @@ -620,6 +614,7 @@ def parse_image_id_from_docker_build_output(*outputs: bytes) -> str | None: if image_id_match: image_id = ( image_id_match.group("digest") + or image_id_match.group("pushed_manifest") or image_id_match.group("short_id") or image_id_match.group("manifest_list") or image_id_match.group("manifest") @@ -629,7 +624,7 @@ def parse_image_id_from_docker_build_output(*outputs: bytes) -> str | None: return None -def parse_image_id_from_podman_build_output(*outputs: bytes) -> str: +def parse_image_id_from_podman_build_output(*outputs: bytes) -> str | None: for output in outputs: try: _, image_id, success, *__ = reversed(output.decode().split("\n")) @@ -641,9 +636,6 @@ def parse_image_id_from_podman_build_output(*outputs: bytes) -> str: return None -def parse_image_id_from_buildctl_build_output(*outputs: bytes) -> str: ... - - def format_docker_build_context_help_message( context_root: str, context: DockerBuildContext, colors: bool ) -> str | None: diff --git a/src/python/pants/backend/docker/goals/package_image_test.py b/src/python/pants/backend/docker/goals/package_image_test.py index e4df3610fe6..6adb7492e78 100644 --- a/src/python/pants/backend/docker/goals/package_image_test.py +++ b/src/python/pants/backend/docker/goals/package_image_test.py @@ -28,7 +28,7 @@ build_docker_image, get_docker_image_build_process, get_image_refs, - parse_image_id_from_docker_build_output, + parse_image_id_from_buildkit_output, rules, ) from pants.backend.docker.package_types import ( @@ -2489,7 +2489,7 @@ def test_get_context_root( def test_parse_image_id_from_docker_build_output( docker: DockerBinary, expected: str, stdout: str, stderr: str ) -> None: - assert expected == parse_image_id_from_docker_build_output( + assert expected == parse_image_id_from_buildkit_output( docker, stdout.encode(), stderr.encode() ) diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index 7bd9f858dbb..a370b1067a6 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -7,13 +7,14 @@ import sys from typing import Any -from pants.backend.docker.engine_types import DockerBuildEngine, DockerRunEngine +from pants.backend.docker.engine_types import DockerBuildEngine, DockerEngines, DockerRunEngine from pants.backend.docker.package_types import DockerPushOnPackageBehavior from pants.backend.docker.registries import DockerRegistries from pants.base.deprecated import resolve_conflicting_options from pants.core.util_rules.search_paths import ExecutableSearchPathsOptionMixin from pants.option.option_types import ( BoolOption, + DataclassOption, DictOption, EnumOption, ShellStrListOption, @@ -159,26 +160,27 @@ def env_vars(self) -> tuple[str, ...]: """ ), ) - _build_engine = EnumOption( - default=DockerBuildEngine.DOCKER, - enum_type=DockerBuildEngine, - help=softwrap( - """ - The engine to use for Docker builds. + engine = DataclassOption(default=DockerEngines(), mutually_exclusive_group="engines", help=softwrap( + """ + The engines to use for Docker builds and runs. - Valid values are: + Valid values for `build` are: - - `docker`: Use the Docker CLI with buildx to build images. (https://docs.docker.com/reference/cli/docker/buildx/build/) - - `buildkit`: Invoke buildkit directly to build images. (https://github.com/moby/buildkit/blob/master/docs/reference/buildctl.md#build) - - `podman`: Use Podman to build images. (https://docs.podman.io/en/latest/markdown/podman-build.1.html) - """ - ), - ) + - `docker`: Use the Docker CLI to build images. (https://docs.docker.com/reference/cli/docker/buildx/build/) + - `buildkit`: Invoke buildkit directly to build images. (https://github.com/moby/buildkit/blob/master/docs/reference/buildctl.md#build) + - `podman`: Use Podman to build images. (https://docs.podman.io/en/latest/markdown/podman-build.1.html) + + Valid values for `run` are: + + - `docker`: Use the Docker CLI to run containers. (https://docs.docker.com/reference/cli/docker/run/) + - `podman`: Use Podman to run containers. (https://docs.podman.io/en/latest/markdown/podman-run.1.html) + """ + )) use_buildx = BoolOption( default=True, help=softwrap( """ - DEPRECATED: Use [docker].build_engine = "docker" instead. + DEPRECATED: Use [docker.engine].build = "docker" instead. See here for using the legacy builder: https://docs.docker.com/reference/cli/docker/build-legacy/ @@ -186,43 +188,23 @@ def env_vars(self) -> tuple[str, ...]: """ ), deprecation_start_version="2.31.0", + mutually_exclusive_group="engines", ) @property def build_engine(self) -> DockerBuildEngine: - result: DockerBuildEngine | bool = resolve_conflicting_options( - old_option="use_buildx", - new_option="build_engine", - old_scope=self.options_scope, - new_scope=self.options_scope, - old_container=self.options, - new_container=self.options, - ) - if isinstance(result, bool): - warning = '`[docker].use_buildx` is deprecated. Buildx is now the default Docker build engine. Use `[docker].build_engine = "docker"` instead.' - if not result: - warning += ( - " To use the legacy engine, add `DOCKER_BUILDKIT=0` to `[docker].env_vars`." - ) + use_buildx = self.options.get("use_buildx") + if use_buildx is not None: + warning = '`[docker].use_buildx` is deprecated. Buildx is now the default Docker build engine. Use `[docker.engine].build = "docker"` instead.' + if not use_buildx: + warning += " To use the legacy engine, add `DOCKER_BUILDKIT=0` to `[docker].env_vars`." logger.warning(warning) - explicit = result - result = DockerBuildEngine.DOCKER - used_option = "[docker].use_buildx" - else: - explicit = not self.options.is_default("build_engine") - used_option = '[docker].build_engine != "podman"' - experimental_enable_podman = self.options.get("experimental_enable_podman", None) + return DockerBuildEngine.DOCKER + experimental_enable_podman = self.options.get("experimental_enable_podman") if experimental_enable_podman is not None: - logger.warning( - '`[docker].experimental_enable_podman` is deprecated. Use `[docker].build_engine = "podman"` instead.' - ) - if experimental_enable_podman: - if explicit and result != DockerBuildEngine.PODMAN: - raise ValueError( - f"Conflicting options `{used_option}` and `[docker].experimental_enable_podman` both enabled." - ) - result = DockerBuildEngine.PODMAN - return result + logger.warning('`[docker].experimental_enable_podman` is deprecated. Use `[docker.engine].build = "podman"` instead.') + return DockerBuildEngine.PODMAN if experimental_enable_podman else DockerBuildEngine.DOCKER + return self.engine.build _run_engine = EnumOption( default=DockerRunEngine.DOCKER, @@ -237,21 +219,17 @@ def build_engine(self) -> DockerBuildEngine: @property def run_engine(self) -> DockerRunEngine: experimental_enable_podman = self.options.get("experimental_enable_podman", None) - if experimental_enable_podman is not None: - logger.warning( - '`[docker].experimental_enable_podman` is deprecated. Use `[docker].run_engine = "podman"` instead.' - ) - if experimental_enable_podman: - if not self.options.is_default("run_engine"): - raise ValueError( - f'Conflicting options `[docker].run_engine != "podman"` and `[docker].experimental_enable_podman` both enabled.' - ) - return DockerRunEngine.PODMAN - if self._run_engine == DockerRunEngine.PODMAN: - raise ValueError( - '`[docker].run_engine` is set to "podman", but the deprecated option `[docker].experimental_enable_podman` is disabled.' - ) - return self._run_engine + match experimental_enable_podman: + case None: + return self.engine.run + case True: + engine = DockerRunEngine.PODMAN + case False: + engine = DockerRunEngine.DOCKER + logger.warning( + f'`[docker].experimental_enable_podman` is deprecated. Use `[docker.engine].run = "{engine.value}"` instead.' + ) + return engine _build_args = ShellStrListOption( help=softwrap( diff --git a/src/python/pants/backend/experimental/docker/podman/register.py b/src/python/pants/backend/experimental/docker/podman/register.py index 472bdb0a8db..8ece60cfa81 100644 --- a/src/python/pants/backend/experimental/docker/podman/register.py +++ b/src/python/pants/backend/experimental/docker/podman/register.py @@ -8,7 +8,7 @@ class ExperimentalPodmanOptions: experimental_enable_podman = BoolOption( default=True, - mutually_exclusive_group="run_engine", + mutually_exclusive_group="engines", help=softwrap( """ DEPRECATED: Use [docker].run_engine = "podman" instead. diff --git a/src/python/pants/option/option_types.py b/src/python/pants/option/option_types.py index 8326b76928b..df867519c7c 100644 --- a/src/python/pants/option/option_types.py +++ b/src/python/pants/option/option_types.py @@ -781,6 +781,112 @@ def _convert_(self, val: Any) -> dict[str, _ValueT]: return cast("dict[str, _ValueT]", val) +# ----------------------------------------------------------------------------------------------- +# Generic Dataclass Concrete Option Classes +# ----------------------------------------------------------------------------------------------- + +class DataclassOption(_OptionBase[_OptT, _OptT]): + """A dataclass option. + + - If you provide a static non-None `default` parameter, the `dataclass_type` parameter will be + inferred from the type of the default. + - If you provide a dynamic `default` or `default` is `None`, you must also provide `dataclass_type`. + """ + + @overload + def __new__( + cls, + flag_name: str | None = None, + *, + default: _MaybeDynamicT[_OptT], + help: _HelpT, + register_if: _RegisterIfFuncT | None = None, + advanced: bool | None = None, + default_help_repr: str | None = None, + fromfile: bool | None = None, + metavar: str | None = None, + mutually_exclusive_group: str | None = None, + removal_version: str | None = None, + removal_hint: _HelpT | None = None, + deprecation_start_version: str | None = None, + daemon: bool | None = None, + fingerprint: bool | None = None, + ) -> DataclassOption[_OptT, _OptT]: ... + + @overload + def __new__( + cls, + flag_name: str | None = None, + *, + dataclass_type: type[_OptT], + default: None, + help: _HelpT, + register_if: _RegisterIfFuncT | None = None, + advanced: bool | None = None, + default_help_repr: str | None = None, + fromfile: bool | None = None, + metavar: str | None = None, + mutually_exclusive_group: str | None = None, + removal_version: str | None = None, + removal_hint: _HelpT | None = None, + deprecation_start_version: str | None = None, + daemon: bool | None = None, + fingerprint: bool | None = None, + ) -> DataclassOption[_OptT, None]: ... + + def __new__( + cls, + flag_name: str | None = None, + *, + dataclass_type: type[_OptT] | None = None, + default: _MaybeDynamicT[_OptT] | None, + help: _HelpT, + register_if: _RegisterIfFuncT | None = None, + advanced: bool | None = None, + default_help_repr: str | None = None, + fromfile: bool | None = None, + metavar: str | None = None, + mutually_exclusive_group: str | None = None, + removal_version: str | None = None, + removal_hint: _HelpT | None = None, + deprecation_start_version: str | None = None, + daemon: bool | None = None, + fingerprint: bool | None = None, + ): + instance = super().__new__( + flag_name, + default=default, + help=help, + register_if=register_if, + advanced=advanced, + default_help_repr=default_help_repr, + fromfile=fromfile, + metavar=metavar, + mutually_exclusive_group=mutually_exclusive_group, + removal_version=removal_version, + removal_hint=removal_hint, + deprecation_start_version=deprecation_start_version, + daemon=daemon, + fingerprint=fingerprint, + ) + instance._dataclass_type = dataclass_type + return instance + + def get_option_type(self, subsystem_cls): + dataclass_type = self._dataclass_type + default = _eval_maybe_dynamic(self._default, subsystem_cls) + if dataclass_type is None: + if default is None: + raise ValueError( + "`dataclass_type` must be provided to the constructor if `default` isn't provided." + ) + return type(default) + elif default is not None and not isinstance(default, dataclass_type): + raise ValueError( + f"Expected the default value to be of type '{dataclass_type}', got '{type(default)}'" + ) + return dataclass_type + # ----------------------------------------------------------------------------------------------- # "Specialized" Concrete Option Classes # ----------------------------------------------------------------------------------------------- From 4c056364c9e64fd41f22eaabb1fb89b839a85da1 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Tue, 24 Feb 2026 11:43:11 -0500 Subject: [PATCH 10/25] fix --- .../pants/backend/docker/engine_types.py | 2 +- .../backend/docker/goals/package_image.py | 11 +++---- .../docker/goals/package_image_test.py | 4 +-- .../docker/subsystems/docker_options.py | 33 +++++++++---------- src/python/pants/option/option_types.py | 3 +- 5 files changed, 25 insertions(+), 28 deletions(-) diff --git a/src/python/pants/backend/docker/engine_types.py b/src/python/pants/backend/docker/engine_types.py index f811f71a726..73fcbbd1d3c 100644 --- a/src/python/pants/backend/docker/engine_types.py +++ b/src/python/pants/backend/docker/engine_types.py @@ -1,7 +1,7 @@ # Copyright 2026 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -from enum import Enum from dataclasses import dataclass +from enum import Enum class DockerBuildEngine(Enum): diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 15a0f0e01d8..59c2f2e88cd 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -22,11 +22,6 @@ from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import ( BuildctlOptionsFieldMixin, - DockerBuildOptionFieldListOfMultiValueDictMixin, - DockerBuildOptionFieldMultiValueDictMixin, - DockerBuildOptionFieldMultiValueMixin, - DockerBuildOptionFieldValueMixin, - DockerBuildOptionFlagFieldMixin, DockerBuildOptionsFieldMixin, DockerImageBuildImageOutputField, DockerImageContextRootField, @@ -551,7 +546,11 @@ async def build_docker_image( keep_sandboxes=keep_sandboxes, ) - parse_image_id = parse_image_id_from_podman_build_output if options.build_engine == DockerBuildEngine.PODMAN else parse_image_id_from_buildkit_output + parse_image_id = ( + parse_image_id_from_podman_build_output + if options.build_engine == DockerBuildEngine.PODMAN + else parse_image_id_from_buildkit_output + ) image_id = parse_image_id(result.stdout, result.stderr) docker_build_output_msg = "\n".join( ( diff --git a/src/python/pants/backend/docker/goals/package_image_test.py b/src/python/pants/backend/docker/goals/package_image_test.py index 6adb7492e78..f8e71e65b25 100644 --- a/src/python/pants/backend/docker/goals/package_image_test.py +++ b/src/python/pants/backend/docker/goals/package_image_test.py @@ -2489,9 +2489,7 @@ def test_get_context_root( def test_parse_image_id_from_docker_build_output( docker: DockerBinary, expected: str, stdout: str, stderr: str ) -> None: - assert expected == parse_image_id_from_buildkit_output( - docker, stdout.encode(), stderr.encode() - ) + assert expected == parse_image_id_from_buildkit_output(docker, stdout.encode(), stderr.encode()) ImageRefTest = namedtuple( diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index a370b1067a6..1bdc649cacb 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -10,7 +10,6 @@ from pants.backend.docker.engine_types import DockerBuildEngine, DockerEngines, DockerRunEngine from pants.backend.docker.package_types import DockerPushOnPackageBehavior from pants.backend.docker.registries import DockerRegistries -from pants.base.deprecated import resolve_conflicting_options from pants.core.util_rules.search_paths import ExecutableSearchPathsOptionMixin from pants.option.option_types import ( BoolOption, @@ -160,8 +159,11 @@ def env_vars(self) -> tuple[str, ...]: """ ), ) - engine = DataclassOption(default=DockerEngines(), mutually_exclusive_group="engines", help=softwrap( - """ + engine = DataclassOption( + default=DockerEngines(), + mutually_exclusive_group="engines", + help=softwrap( + """ The engines to use for Docker builds and runs. Valid values for `build` are: @@ -175,7 +177,8 @@ def env_vars(self) -> tuple[str, ...]: - `docker`: Use the Docker CLI to run containers. (https://docs.docker.com/reference/cli/docker/run/) - `podman`: Use Podman to run containers. (https://docs.podman.io/en/latest/markdown/podman-run.1.html) """ - )) + ), + ) use_buildx = BoolOption( default=True, help=softwrap( @@ -197,25 +200,21 @@ def build_engine(self) -> DockerBuildEngine: if use_buildx is not None: warning = '`[docker].use_buildx` is deprecated. Buildx is now the default Docker build engine. Use `[docker.engine].build = "docker"` instead.' if not use_buildx: - warning += " To use the legacy engine, add `DOCKER_BUILDKIT=0` to `[docker].env_vars`." + warning += ( + " To use the legacy engine, add `DOCKER_BUILDKIT=0` to `[docker].env_vars`." + ) logger.warning(warning) return DockerBuildEngine.DOCKER experimental_enable_podman = self.options.get("experimental_enable_podman") if experimental_enable_podman is not None: - logger.warning('`[docker].experimental_enable_podman` is deprecated. Use `[docker.engine].build = "podman"` instead.') - return DockerBuildEngine.PODMAN if experimental_enable_podman else DockerBuildEngine.DOCKER + logger.warning( + '`[docker].experimental_enable_podman` is deprecated. Use `[docker.engine].build = "podman"` instead.' + ) + return ( + DockerBuildEngine.PODMAN if experimental_enable_podman else DockerBuildEngine.DOCKER + ) return self.engine.build - _run_engine = EnumOption( - default=DockerRunEngine.DOCKER, - enum_type=DockerRunEngine, - help=softwrap( - """ - The engine to use for Docker runs. - """ - ), - ) - @property def run_engine(self) -> DockerRunEngine: experimental_enable_podman = self.options.get("experimental_enable_podman", None) diff --git a/src/python/pants/option/option_types.py b/src/python/pants/option/option_types.py index df867519c7c..3d63090df8b 100644 --- a/src/python/pants/option/option_types.py +++ b/src/python/pants/option/option_types.py @@ -785,7 +785,7 @@ def _convert_(self, val: Any) -> dict[str, _ValueT]: # Generic Dataclass Concrete Option Classes # ----------------------------------------------------------------------------------------------- -class DataclassOption(_OptionBase[_OptT, _OptT]): +class DataclassOption(_OptionBase[_OptT, _DefaultT]): """A dataclass option. - If you provide a static non-None `default` parameter, the `dataclass_type` parameter will be @@ -854,6 +854,7 @@ def __new__( fingerprint: bool | None = None, ): instance = super().__new__( + cls, flag_name, default=default, help=help, From 62f82ecb3af2d1c31828d6f6c1218bfabe8a00e3 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Tue, 24 Feb 2026 11:48:21 -0500 Subject: [PATCH 11/25] default to --- src/python/pants/backend/docker/goals/package_image.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 59c2f2e88cd..15e5bc78f87 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -551,7 +551,7 @@ async def build_docker_image( if options.build_engine == DockerBuildEngine.PODMAN else parse_image_id_from_buildkit_output ) - image_id = parse_image_id(result.stdout, result.stderr) + image_id = parse_image_id(result.stdout, result.stderr) or "" docker_build_output_msg = "\n".join( ( f"{options.build_engine.value.capitalize()} build output for {build_process.tags[0]}:", From e2339bcfaf4343b3dabe8701a0a573c1f169e918 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Tue, 24 Feb 2026 11:56:19 -0500 Subject: [PATCH 12/25] remove ai hallucinated global options --- .../backend/docker/subsystems/docker_options.py | 8 -------- .../pants/backend/docker/util_rules/binaries.py | 16 ++++------------ 2 files changed, 4 insertions(+), 20 deletions(-) diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index 1bdc649cacb..dacbbee507c 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -151,14 +151,6 @@ def env_vars(self) -> tuple[str, ...]: """ ), ) - global_options = ShellStrListOption( - default=[], - help=softwrap( - """ - Global options to use for all Docker and BuildKit invocations. - """ - ), - ) engine = DataclassOption( default=DockerEngines(), mutually_exclusive_group="engines", diff --git a/src/python/pants/backend/docker/util_rules/binaries.py b/src/python/pants/backend/docker/util_rules/binaries.py index 4e9e04cf6e1..033f159633e 100644 --- a/src/python/pants/backend/docker/util_rules/binaries.py +++ b/src/python/pants/backend/docker/util_rules/binaries.py @@ -1,4 +1,4 @@ -# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md). +# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). import os from abc import ABC, abstractmethod @@ -30,7 +30,6 @@ class BaseBinary(BinaryPath, ABC): """Base class for all binary paths.""" - global_options: tuple[str, ...] extra_env: Mapping[str, str] extra_input_digests: Mapping[str, Digest] | None @@ -38,11 +37,9 @@ def __init__( self, path: str, fingerprint: str | None = None, - global_options: tuple[str, ...] = (), extra_env: Mapping[str, str] | None = None, extra_input_digests: Mapping[str, Digest] | None = None, ) -> None: - object.__setattr__(self, "global_options", global_options) object.__setattr__(self, "extra_env", {} if extra_env is None else extra_env) object.__setattr__(self, "extra_input_digests", extra_input_digests) super().__init__(path, fingerprint) @@ -83,7 +80,7 @@ def build_image( env: Mapping[str, str], extra_args: tuple[str, ...] = (), ) -> Process: - args = [self.path, *self.global_options, "build", *extra_args] + args = [self.path, "build", *extra_args] for tag in tags: args.extend(["--tag", tag]) @@ -112,7 +109,7 @@ def build_image( def push_image(self, tag: str, env: Mapping[str, str] | None = None) -> Process: return Process( - argv=(self.path, *self.global_options, "push", tag), + argv=(self.path, "push", tag), cache_scope=ProcessCacheScope.PER_SESSION, description=f"Pushing docker image {tag}", env=self._get_process_environment(env or {}), @@ -130,7 +127,6 @@ def run_image( return Process( argv=( self.path, - *self.global_options, "run", *(docker_run_args or []), tag, @@ -166,7 +162,6 @@ def build_image( ) -> Process: args = [ self.path, - *self.global_options, "build", "--frontend", "dockerfile.v0", @@ -271,9 +266,7 @@ async def get_binary( first_path = paths.first_path_or_raise(request, rationale="interact with the docker daemon") if not docker_options.tools and not docker_options.optional_tools: - return binary_cls( - first_path.path, first_path.fingerprint, global_options=docker_options.global_options - ) + return binary_cls(first_path.path, first_path.fingerprint) tools_shims = await _get_docker_tools_shims( tools=docker_options.tools, @@ -284,7 +277,6 @@ async def get_binary( return binary_cls( first_path.path, first_path.fingerprint, - global_options=docker_options.global_options, extra_env={"PATH": tools_shims.path_component}, extra_input_digests=tools_shims.immutable_input_digests, ) From fd48a7577a3d357de7a1b2b6f2afcb8060b963bf Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Tue, 24 Feb 2026 12:09:26 -0500 Subject: [PATCH 13/25] use push engine for publish --- .../pants/backend/docker/engine_types.py | 6 +++ .../pants/backend/docker/goals/publish.py | 11 +++-- .../docker/subsystems/docker_options.py | 43 +++++++++---------- .../backend/docker/util_rules/binaries.py | 11 +++-- 4 files changed, 43 insertions(+), 28 deletions(-) diff --git a/src/python/pants/backend/docker/engine_types.py b/src/python/pants/backend/docker/engine_types.py index 73fcbbd1d3c..c856c9f0265 100644 --- a/src/python/pants/backend/docker/engine_types.py +++ b/src/python/pants/backend/docker/engine_types.py @@ -10,6 +10,11 @@ class DockerBuildEngine(Enum): PODMAN = "podman" +class DockerPushEngine(Enum): + DOCKER = "docker" + PODMAN = "podman" + + class DockerRunEngine(Enum): DOCKER = "docker" PODMAN = "podman" @@ -18,4 +23,5 @@ class DockerRunEngine(Enum): @dataclass(frozen=True) class DockerEngines: build: DockerBuildEngine = DockerBuildEngine.DOCKER + push: DockerPushEngine = DockerPushEngine.DOCKER run: DockerRunEngine = DockerRunEngine.DOCKER diff --git a/src/python/pants/backend/docker/goals/publish.py b/src/python/pants/backend/docker/goals/publish.py index db4ac7d1687..18bd4cbdb70 100644 --- a/src/python/pants/backend/docker/goals/publish.py +++ b/src/python/pants/backend/docker/goals/publish.py @@ -9,6 +9,7 @@ from itertools import chain from typing import DefaultDict, cast +from pants.backend.docker.engine_types import DockerPushEngine from pants.backend.docker.goals.package_image import ( DockerPackageFieldSet, GetImageRefsRequest, @@ -19,7 +20,7 @@ from pants.backend.docker.registries import DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import DockerImageRegistriesField, DockerImageSkipPushField -from pants.backend.docker.util_rules.binaries import DockerBinary +from pants.backend.docker.util_rules.binaries import DockerBinary, get_docker, get_podman from pants.core.goals.package import PackageFieldSet from pants.core.goals.publish import ( CheckSkipRequest, @@ -114,7 +115,6 @@ async def check_if_skip_push( @rule async def push_docker_images( request: PublishDockerImageRequest, - docker: DockerBinary, options: DockerOptions, options_env_aware: DockerOptions.EnvironmentAware, ) -> PublishProcesses: @@ -148,6 +148,11 @@ async def push_docker_images( jobs: list[PublishPackages] = [] refs: list[str] = [] processes: list[Process | InteractiveProcess] = [] + match options.push_engine: + case DockerPushEngine.DOCKER: + binary = await get_docker(**implicitly()) + case DockerPushEngine.PODMAN: + binary = await get_podman(**implicitly()) for tag in tags: for registry in options.registries().registries.values(): @@ -159,7 +164,7 @@ async def push_docker_images( break else: refs.append(tag) - push_process = docker.push_image(tag, env) + push_process = binary.push_image(tag, env) if options.publish_noninteractively: processes.append(push_process) else: diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index dacbbee507c..77e976dd922 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -7,7 +7,7 @@ import sys from typing import Any -from pants.backend.docker.engine_types import DockerBuildEngine, DockerEngines, DockerRunEngine +from pants.backend.docker.engine_types import DockerBuildEngine, DockerEngines, DockerPushEngine, DockerRunEngine from pants.backend.docker.package_types import DockerPushOnPackageBehavior from pants.backend.docker.registries import DockerRegistries from pants.core.util_rules.search_paths import ExecutableSearchPathsOptionMixin @@ -186,6 +186,20 @@ def env_vars(self) -> tuple[str, ...]: mutually_exclusive_group="engines", ) + def _experimental_enable_podman_warning[E: DockerBuildEngine | DockerRunEngine | DockerPushEngine](self, engine_type: type[E], engine_opt: str) -> E | None: + experimental_enable_podman = self.options.get("experimental_enable_podman", None) + match experimental_enable_podman: + case None: + return getattr(self.engine, engine_opt) + case True: + engine = engine_type.PODMAN + case False: + engine = engine_type.DOCKER + logger.warning( + f'`[docker].experimental_enable_podman` is deprecated. Use `[docker.engine].{engine_opt} = "{engine.value}"` instead.' + ) + return engine + @property def build_engine(self) -> DockerBuildEngine: use_buildx = self.options.get("use_buildx") @@ -197,30 +211,15 @@ def build_engine(self) -> DockerBuildEngine: ) logger.warning(warning) return DockerBuildEngine.DOCKER - experimental_enable_podman = self.options.get("experimental_enable_podman") - if experimental_enable_podman is not None: - logger.warning( - '`[docker].experimental_enable_podman` is deprecated. Use `[docker.engine].build = "podman"` instead.' - ) - return ( - DockerBuildEngine.PODMAN if experimental_enable_podman else DockerBuildEngine.DOCKER - ) - return self.engine.build + return self._experimental_enable_podman_warning(DockerBuildEngine, "build") @property def run_engine(self) -> DockerRunEngine: - experimental_enable_podman = self.options.get("experimental_enable_podman", None) - match experimental_enable_podman: - case None: - return self.engine.run - case True: - engine = DockerRunEngine.PODMAN - case False: - engine = DockerRunEngine.DOCKER - logger.warning( - f'`[docker].experimental_enable_podman` is deprecated. Use `[docker.engine].run = "{engine.value}"` instead.' - ) - return engine + return self._experimental_enable_podman_warning(DockerRunEngine, "run") + + @property + def push_engine(self) -> DockerPushEngine: + return self._experimental_enable_podman_warning(DockerPushEngine, "push") _build_args = ShellStrListOption( help=softwrap( diff --git a/src/python/pants/backend/docker/util_rules/binaries.py b/src/python/pants/backend/docker/util_rules/binaries.py index 033f159633e..db90240c88c 100644 --- a/src/python/pants/backend/docker/util_rules/binaries.py +++ b/src/python/pants/backend/docker/util_rules/binaries.py @@ -1,10 +1,10 @@ # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). import os -from abc import ABC, abstractmethod +from abc import ABC from collections.abc import Mapping, Sequence from dataclasses import dataclass -from typing import TypeVar, cast +from typing import Protocol, TypeVar, cast from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.util_rules.docker_build_args import DockerBuildArgs @@ -56,7 +56,8 @@ def _get_process_environment(self, env: Mapping[str, str]) -> Mapping[str, str]: ) return res - @abstractmethod + +class BuildBinaryProtocol(Protocol): def build_image( self, tags: tuple[str, ...], @@ -69,6 +70,10 @@ def build_image( ) -> Process: ... +class PushBinaryProtocol(Protocol): + def push_image(self, tag: str, env: Mapping[str, str] | None = None) -> Process: ... + + class _DockerPodmanMixin(BaseBinary): def build_image( self, From 184159fe963a0f281dbdfd968a511fdcc0976753 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Tue, 24 Mar 2026 12:07:07 -0400 Subject: [PATCH 14/25] claude skill --- .claude/skills/pants-contributor/SKILL.md | 200 ++++++++++ .../skills/pants-contributor/architecture.md | 358 ++++++++++++++++++ .../pants-contributor/build-commands.md | 201 ++++++++++ .../pants-contributor/style-conventions.md | 225 +++++++++++ 4 files changed, 984 insertions(+) create mode 100644 .claude/skills/pants-contributor/SKILL.md create mode 100644 .claude/skills/pants-contributor/architecture.md create mode 100644 .claude/skills/pants-contributor/build-commands.md create mode 100644 .claude/skills/pants-contributor/style-conventions.md diff --git a/.claude/skills/pants-contributor/SKILL.md b/.claude/skills/pants-contributor/SKILL.md new file mode 100644 index 00000000000..a620d6c2136 --- /dev/null +++ b/.claude/skills/pants-contributor/SKILL.md @@ -0,0 +1,200 @@ +--- +name: pants-contributor +description: Expert guide for contributing to the Pants build system repo. Use when working on Pants source code, writing or running tests, creating or modifying backends/plugins, interacting with the build system, understanding the codebase architecture, or when the user needs help with any Pants development workflow. Triggers on questions about BUILD files, rules, targets, goals, running tests, linting, formatting, or any Pants build system interaction. +allowed-tools: Read, Grep, Glob, Bash, Edit, Write, Agent +--- + +# Pants Build System Contributor Guide + +You are an expert contributor to the [Pants](https://github.com/pantsbuild/pants) build system. This is the Pants repo itself -- a self-hosting build system where `./pants` runs Pants from source. You must follow the conventions, architecture, and workflows specific to this codebase. + +For detailed reference on specific topics, see: +- [Build System Commands](./build-commands.md) - How to run tests, lint, format, typecheck +- [Architecture Guide](./architecture.md) - Codebase structure, rules API, backends +- [Style and Conventions](./style-conventions.md) - Code style, patterns, PR workflow + +## Critical Rules + +### ALWAYS use Pants, NEVER use raw Python tools + +This is the most important rule. **Never** run `pytest`, `mypy`, `black`, `isort`, `flake8`, `ruff`, or any other Python tool directly. All Python tools must be invoked through Pants: + +```bash +# CORRECT - always use Pants +pants test src/python/pants/backend/python/goals/pytest_runner_test.py +pants lint src/python/pants/backend/python/goals/pytest_runner.py +pants fmt src/python/pants/backend/python/goals/pytest_runner.py +pants check src/python/pants/backend/python/goals/pytest_runner.py + +# WRONG - never do this +pytest src/python/pants/backend/python/goals/pytest_runner_test.py +mypy src/python/pants/backend/python/goals/pytest_runner.py +black src/python/pants/backend/python/goals/pytest_runner.py +ruff check src/python/pants/backend/python/goals/pytest_runner.py +``` + +### Running Pants from source + +In this repo, `./pants` is a special bootstrap script that runs Pants from the local source tree. Use `pants` (which resolves to the `./pants` script) for all operations. The first run compiles the Rust engine and may take several minutes. + +### Test file naming + +Test files must be named `*_test.py` (not `test_*.py`). Pants discovers tests by this suffix. + +### BUILD file conventions + +Every directory with source code needs a `BUILD` file. Common patterns in this repo: + +```python +# Standard source + test pattern +python_sources() +python_tests(name="tests") + +# With test utilities (conftest, fixtures, helpers) +python_test_utils(name="test_utils") + +# With overrides for specific files +python_sources( + overrides={ + "special.py": {"dependencies": ["//some/dep"]}, + }, +) +``` + +All BUILD files must have the copyright header: +```python +# Copyright 2024 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). +``` + +## Quick Reference + +### Common commands + +| Task | Command | +|------|---------| +| Run specific test file | `pants test path/to/file_test.py` | +| Run tests for changed files | `pants --changed-since=HEAD test` | +| Run tests with transitive deps | `pants --changed-since=HEAD --changed-dependents=transitive test` | +| Run a single test function | `pants test path/to/file_test.py -- -k test_function_name` | +| Debug a test (interactive) | `pants test --debug path/to/file_test.py` | +| Debug with specific test | `pants test --debug path/to/file_test.py -- -k test_name` | +| Lint changed files | `pants --changed-since=HEAD lint` | +| Format changed files | `pants --changed-since=HEAD fmt` | +| Type-check changed files | `pants --changed-since=HEAD check` | +| List all targets in dir | `pants list path/to/dir:` | +| Show dependencies | `pants dependencies path/to/target` | +| Show dependents | `pants dependents path/to/target` | +| Show target info | `pants peek path/to/target` | +| Generate BUILD files | `pants tailor` | +| Validate BUILD files | `pants lint --only=ruff-format '**BUILD'` | +| Run pre-push checks | `build-support/githooks/pre-push` | + +### Target address syntax + +- `path/to/dir:target_name` - specific target +- `path/to/dir:` - all targets in directory +- `path/to/dir::` - all targets recursively +- `path/to/file.py` - file address (inferred target) +- `::` - everything in the repo + +### Key directories + +| Path | Purpose | +|------|---------| +| `src/python/pants/` | Core Pants Python source code | +| `src/python/pants/backend/` | Language-specific backends (python, go, java, docker, etc.) | +| `src/python/pants/core/` | Core goals (test, lint, fmt, check, package, run) | +| `src/python/pants/engine/` | The Pants engine (rules, targets, processes, fs) | +| `src/rust/engine/` | Rust engine implementation | +| `pants-plugins/` | In-repo plugins (internal_plugins, pants_explorer) | +| `3rdparty/python/` | Third-party Python dependency declarations and lockfiles | +| `build-support/` | Build scripts, CI helpers, git hooks | +| `testprojects/` | Test fixture projects used by integration tests | +| `docs/docs/` | Documentation source (Docusaurus MDX) | + +### Python resolves (lockfiles) + +The repo uses multiple Python resolves configured in `pants.toml`: +- `python-default` - Main resolve for Pants source code +- `pytest` - Test runner dependencies +- `mypy` - Type checker dependencies +- `flake8` - Linter dependencies + +To regenerate lockfiles: `pants generate-lockfiles --resolve=` + +## Writing Rules (Plugin Code) + +### Rule basics + +Rules are async Python functions decorated with `@rule`: + +```python +from pants.engine.rules import rule, collect_rules + +@rule +async def my_rule(request: MyRequest) -> MyResult: + # Pure function - no side effects! + # Use engine APIs for processes, filesystem, etc. + return MyResult(...) + +def rules(): + return collect_rules() +``` + +### Key patterns + +- **Frozen dataclasses** for all request/result types: `@dataclass(frozen=True)` +- **`await` engine intrinsics** for processes, filesystem ops, etc. +- **`concurrently()`** for parallel rule execution (never `await` in a loop) +- **`**implicitly()`** for implicit parameter injection +- **`collect_rules()`** to auto-discover rules in a module +- **Register in `register.py`** with `rules()` and `target_types()` functions + +### Testing rules + +Tests use `RuleRunner` for integration testing or `run_rule_with_mocks` for unit testing: + +```python +from pants.testutil.rule_runner import RuleRunner, QueryRule + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[*my_module.rules(), QueryRule(Output, [Input])], + target_types=[MyTarget], + ) + +def test_my_rule(rule_runner: RuleRunner) -> None: + rule_runner.write_files({ + "src/BUILD": "my_target(name='t', source='f.py')", + "src/f.py": "...", + }) + result = rule_runner.request(Output, [Input(...)]) + assert result == expected +``` + +### Backend structure + +Each backend lives in `src/python/pants/backend//` with: +- `register.py` - Entry point: exports `rules()`, `target_types()`, `build_file_aliases()` +- `target_types.py` - Target and field definitions +- `goals/` - Goal implementations (test, lint, fmt, etc.) +- `util_rules/` - Shared utility rules +- `subsystems/` - Tool subsystem definitions +- `dependency_inference/` - Dep inference rules + +## Style Guide Essentials + +- Use **f-strings** (not `.format()` or `%`) +- Prefer **conditional expressions** (ternary) +- Prefer **early returns** +- Use **collection literals** and **unpacking** for merging +- Prefer **comprehensions** over loops for creating collections +- Use **frozen dataclasses** (`@dataclass(frozen=True)`) +- **All functions must have type annotations** (parameters + return type) +- Use **`cast()`** over variable annotations for type overrides +- Use **Pytest-style** tests (not `unittest`) +- Comments: complete sentences, end with period, max 100 chars, space after `#` +- TODOs: link to GitHub issue `# TODO(#1234): Description.` +- Use `softwrap` helper for multiline help strings diff --git a/.claude/skills/pants-contributor/architecture.md b/.claude/skills/pants-contributor/architecture.md new file mode 100644 index 00000000000..edb4e032c6e --- /dev/null +++ b/.claude/skills/pants-contributor/architecture.md @@ -0,0 +1,358 @@ +# Pants Codebase Architecture + +## Overview + +Pants is a build system with a Python frontend and a Rust engine backend. The codebase is self-hosting: the `./pants` script in the repo root runs Pants from the local sources, compiling the Rust engine as needed. + +## Source Code Layout + +``` +src/ + python/ + pants/ # Core Pants package + backend/ # Language-specific backends + python/ # Python support (goals, rules, target types) + go/ # Go support + java/ # Java support + scala/ # Scala support + docker/ # Docker support + shell/ # Shell support + javascript/ # JavaScript/TypeScript support + helm/ # Helm chart support + k8s/ # Kubernetes support + cc/ # C/C++ support + rust/ # Rust support + experimental/ # Experimental backends + build_files/ # BUILD file formatting/fixing + codegen/ # Code generation (protobuf, thrift, etc.) + project_info/ # Introspection goals (filedeps, peek, etc.) + plugin_development/ # Plugin dev support + core/ # Core goals and subsystems + goals/ # test, lint, fmt, check, package, run, etc. + target_types.py # Core target types (files, resources, etc.) + util_rules/ # Core utility rules + engine/ # The Python side of the engine + rules.py # @rule, collect_rules, concurrently, implicitly + target.py # Target, Field base classes + process.py # Process execution types + fs.py # Filesystem types (Digest, Snapshot, etc.) + intrinsics.py # Engine intrinsic functions + unions.py # Union/polymorphism system + internals/ # Internal engine implementation + bin/ # Entry points (pants_loader, native_client) + option/ # Options/configuration system + build_graph/ # Build graph construction + goal/ # Goal infrastructure + help/ # Help system + pantsd/ # Pants daemon + util/ # Utility modules + testutil/ # Test utilities (RuleRunner, etc.) + rule_runner.py # RuleRunner for integration tests + pants_integration_test.py # Full integration test harness + rust/ + engine/ # Rust engine implementation + Cargo.toml # Rust workspace root + rule_graph/ # Rule graph construction + process_execution/ # Process sandboxing and execution + fs/ # Filesystem implementation + watch/ # File watching + ... + +pants-plugins/ # In-repo plugins + internal_plugins/ # Plugins used by the Pants repo itself + releases/ # Release management plugin + test_lockfile_fixtures/ # Test lockfile fixture plugin + pants_explorer/ # Pants Explorer web UI backend + +3rdparty/ + python/ # Python dependency declarations + requirements.txt # Main requirements + user_reqs.lock # Main lockfile (python-default resolve) + pytest.lock # Pytest resolve lockfile + mypy.lock # MyPy resolve lockfile + flake8.lock # Flake8 resolve lockfile + +build-support/ + bin/ # Build support scripts + generate_builtin_lockfiles.py # Regenerate tool lockfiles + generate_completions.py # Generate shell completions + flake8/ # Flake8 configuration and plugins + githooks/ # Git hooks (pre-push) + migration-support/ # Migration helpers + preambles/ # License header templates + +testprojects/ # Test fixture projects + src/python/ # Python test projects + src/go/ # Go test projects + src/java/ # Java test projects + src/shell/ # Shell test projects + +docs/ + docs/ # Documentation (Docusaurus) + introduction/ # What is Pants, key concepts + getting-started/ # Installation, first steps + using-pants/ # User guide + writing-plugins/ # Plugin development guide + python/ # Python-specific docs + contributions/ # Contributor guide + development/ # Dev setup, style guide, architecture + releases/ # Release process +``` + +## Engine Architecture + +### The Rule Graph + +Pants's engine is built around a **rule graph** - a directed graph where: +- **Rules** (`@rule` decorated async functions) are internal nodes +- **Queries** are entry points (roots) +- **Params** are typed, hashable input values (leaves) + +Key properties: +- Rules are **pure functions** mapping input types to output types +- The engine handles **memoization**, **concurrency**, and **caching** +- Type annotations are used at runtime for dependency injection +- The engine uses **exact type matching** (no subtyping) + +### Rule Execution + +```python +from pants.engine.rules import rule, collect_rules, concurrently, implicitly +from pants.engine.intrinsics import execute_process + +@rule +async def my_rule(request: MyRequest) -> MyResult: + # Await other rules (engine manages execution) + intermediate = await some_other_rule(request.field, **implicitly()) + + # Run external processes through the engine + process_result = await execute_process( + Process(argv=["/bin/echo", "hello"], description="Echo"), + **implicitly() + ) + + # Parallel execution with concurrently() + results = await concurrently( + process_thing(item, **implicitly()) for item in items + ) + + return MyResult(data=process_result.stdout) +``` + +### Union Rules (Polymorphism) + +Since the engine uses exact type matching, polymorphism is achieved through **unions**: + +```python +from pants.engine.unions import UnionRule, union + +@union +class LintRequest: + pass + +@dataclass(frozen=True) +class MyLinterRequest(LintRequest): + ... + +# Register the union member +def rules(): + return [ + *collect_rules(), + UnionRule(LintRequest, MyLinterRequest), + ] +``` + +### Rule Decorators + +| Decorator | Purpose | +|-----------|---------| +| `@rule` | Standard async rule (cached, pure) | +| `@goal_rule` | Top-level CLI goal entry point | +| `@uncacheable_rule` | Rule that should not be memoized | + +### Key Engine Types + +| Type | Module | Purpose | +|------|--------|---------| +| `Process` | `pants.engine.process` | Run an external process | +| `ProcessResult` | `pants.engine.process` | Process execution result | +| `Digest` | `pants.engine.fs` | Content-addressed file tree | +| `Snapshot` | `pants.engine.fs` | Digest with file/dir listing | +| `MergeDigests` | `pants.engine.fs` | Combine multiple digests | +| `CreateDigest` | `pants.engine.fs` | Create files from content | +| `Target` | `pants.engine.target` | A build target | +| `Field` | `pants.engine.target` | A target field | +| `FieldSet` | `pants.engine.target` | Subset of fields for a rule | +| `Address` | `pants.engine.addresses` | Target address | + +## Backend Structure + +Each backend in `src/python/pants/backend//` follows this pattern: + +### register.py (entry point) + +```python +from pants.backend.foo import target_types_rules +from pants.backend.foo.goals import test, lint +from pants.backend.foo.target_types import FooTarget, FooTestTarget + +def rules(): + return [ + *target_types_rules.rules(), + *test.rules(), + *lint.rules(), + ] + +def target_types(): + return [FooTarget, FooTestTarget] +``` + +The backend is activated in `pants.toml` via `backend_packages`: + +```toml +[GLOBAL] +backend_packages = ["pants.backend.foo"] +``` + +### Target types + +```python +from pants.engine.target import ( + Target, + SingleSourceField, + MultipleSourcesField, + Dependencies, + StringField, +) + +class FooSourceField(SingleSourceField): + expected_file_extensions = (".foo",) + +class FooTarget(Target): + alias = "foo_source" + core_fields = (FooSourceField, Dependencies) + help = "A Foo source file." +``` + +### Goal rules + +```python +from pants.core.goals.test import TestResult, TestRequest + +@dataclass(frozen=True) +class FooTestFieldSet(TestRequest.FieldSet): + required_fields = (FooTestSourceField,) + source: FooTestSourceField + +class FooTestRequest(TestRequest): + tool_subsystem = FooSubsystem + field_set_type = FooTestFieldSet + +@rule +async def run_foo_test(batch: FooTestRequest.Batch) -> TestResult: + ... + +def rules(): + return [ + *collect_rules(), + *FooTestRequest.rules(), + ] +``` + +## Configuration + +### pants.toml + +The main configuration file. Key sections: +- `[GLOBAL]` - Backend packages, pythonpath, sandboxing +- `[source]` - Source root patterns +- `[python]` - Python interpreter constraints, resolves, pip version +- `[pytest]` - Test runner args, resolve +- `[mypy]` - Type checker args, resolve +- `[test]` - Test timeout, env vars +- `[flake8]`, `[shellcheck]`, `[shfmt]` - Linter configs + +### pants.ci.toml + +CI-specific overrides that layer on top of `pants.toml`. + +### Python version + +The repo currently targets **Python 3.14** (`interpreter_constraints = ["==3.14.*"]` in `pants.toml`). + +## Testing Approaches + +### 1. Unit tests (plain Python) + +Test pure Python functions without the engine: + +```python +def test_my_helper() -> None: + assert my_function("input") == "expected" +``` + +### 2. run_rule_with_mocks (rule unit tests) + +Test rule logic with mocked dependencies: + +```python +from pants.testutil.rule_runner import run_rule_with_mocks + +def test_my_rule() -> None: + result = run_rule_with_mocks( + my_rule, + rule_args=[MyRequest(...)], + mock_calls={ + "my.module.some_dependency": lambda req: MockResult(...), + }, + ) + assert result == expected +``` + +### 3. RuleRunner (rule integration tests) + +Test rules with a real engine and isolated filesystem: + +```python +from pants.testutil.rule_runner import RuleRunner, QueryRule + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ + *my_module.rules(), + QueryRule(MyOutput, [MyInput]), + ], + target_types=[MyTarget], + ) + +def test_integration(rule_runner: RuleRunner) -> None: + rule_runner.write_files({ + "src/BUILD": "my_target(source='f.py')", + "src/f.py": "content", + }) + rule_runner.set_options(["--my-option=value"]) + result = rule_runner.request(MyOutput, [MyInput(...)]) + assert result.field == expected +``` + +### 4. run_pants() (full integration tests) + +Test with a real Pants process: + +```python +from pants.testutil.pants_integration_test import run_pants, setup_tmpdir + +def test_end_to_end() -> None: + sources = { + "src/BUILD": "python_sources()", + "src/app.py": "print('hello')", + } + with setup_tmpdir(sources) as tmpdir: + result = run_pants([ + "--backend-packages=['pants.backend.python']", + "lint", + f"{tmpdir}/src:", + ]) + result.assert_success() +``` diff --git a/.claude/skills/pants-contributor/build-commands.md b/.claude/skills/pants-contributor/build-commands.md new file mode 100644 index 00000000000..f8fb1c71e28 --- /dev/null +++ b/.claude/skills/pants-contributor/build-commands.md @@ -0,0 +1,201 @@ +# Pants Build Commands Reference + +## Running Tests + +### Basic test commands + +```bash +# Test a specific file +pants test src/python/pants/backend/python/goals/pytest_runner_test.py + +# Test all targets in a directory +pants test src/python/pants/backend/python/goals: + +# Test recursively +pants test src/python/pants/backend/python/goals:: + +# Test changed files only +pants --changed-since=HEAD test + +# Test changed files and their transitive dependents +pants --changed-since=HEAD --changed-dependents=transitive test + +# Alias for the above +pants --all-changed test +``` + +### Test options + +```bash +# Run a specific test function +pants test path/to/file_test.py -- -k test_function_name + +# Run tests matching a pattern +pants test path/to/file_test.py -- -k "test_foo or test_bar" + +# Run in debug mode (interactive, for debuggers) +pants test --debug path/to/file_test.py + +# Run with verbose output +pants test path/to/file_test.py -- -vvs + +# Run with no timeout (useful for debugging) +pants test --timeout-default=0 path/to/file_test.py + +# Show test output even on success +pants test --output=all path/to/file_test.py + +# Force re-run (skip cache) +pants --no-local-cache test path/to/file_test.py +``` + +### Test configuration + +Tests are configured in `pants.toml`: +- Default args: `--no-header --noskip -vv` +- Default timeout: 60 seconds +- Pytest is installed from the `pytest` resolve +- Test execution slot variable: `TEST_EXECUTION_SLOT` + +## Linting + +```bash +# Lint specific files +pants lint src/python/pants/backend/python/goals/pytest_runner.py + +# Lint changed files +pants --changed-since=HEAD lint + +# Lint with a specific linter only +pants lint --only=flake8 path/to/file.py +pants lint --only=ruff-check path/to/file.py +pants lint --only=ruff-format path/to/file.py + +# Lint BUILD files +pants lint --only=ruff-format '**BUILD' +``` + +Active linters (configured in `pants.toml`): +- `flake8` - Python linter (config at `build-support/flake8/.flake8`) +- `ruff check` - Fast Python linter +- `ruff format` - Python and BUILD file formatter +- `shellcheck` - Shell script linter +- `shfmt` - Shell script formatter +- `hadolint` - Dockerfile linter + +## Formatting + +```bash +# Format specific files +pants fmt src/python/pants/backend/python/goals/pytest_runner.py + +# Format changed files +pants --changed-since=HEAD fmt + +# Format everything (use sparingly) +pants fmt :: +``` + +## Type Checking + +```bash +# Type-check specific files +pants check src/python/pants/backend/python/goals/pytest_runner.py + +# Type-check changed files +pants --changed-since=HEAD check + +# Type-check with transitive dependents +pants --changed-since=HEAD --changed-dependents=transitive check +``` + +MyPy is configured in `pants.toml` and installed from the `mypy` resolve. + +## Dependency Management + +```bash +# Show dependencies of a target +pants dependencies src/python/pants/backend/python: + +# Show reverse dependencies (what depends on this) +pants dependents src/python/pants/backend/python: + +# Generate/update lockfiles +pants generate-lockfiles --resolve=python-default +pants generate-lockfiles --resolve=pytest +pants generate-lockfiles --resolve=mypy + +# Generate all lockfiles +pants generate-lockfiles +``` + +Third-party Python dependencies are declared in `3rdparty/python/` using `python_requirements()` targets referencing `requirements.txt` files. + +## Introspection + +```bash +# List targets in a directory +pants list src/python/pants/backend/python: + +# Show detailed target info (as JSON) +pants peek src/python/pants/backend/python/goals:tests + +# Show file dependencies +pants filedeps src/python/pants/backend/python/goals: + +# Show the paths between two targets +pants paths --from=src/python/pants/engine --to=src/python/pants/core + +# Count lines of code +pants count-loc :: +``` + +## BUILD File Management + +```bash +# Auto-generate BUILD files for new source files +pants tailor + +# Check for BUILD file issues +pants lint '**BUILD' + +# Update BUILD file formatting +pants fmt '**BUILD' + +# Update BUILD files for deprecations +pants update-build-files :: +``` + +## Running Scripts + +```bash +# Run a Python script through Pants +pants run src/python/pants/backend/python/providers/python_build_standalone/scripts/generate_urls.py + +# Run build support scripts +pants run build-support/bin/generate_builtin_lockfiles.py -- +``` + +## Pre-push Checks + +Before pushing, run the pre-push hook to catch common issues: + +```bash +build-support/githooks/pre-push +``` + +Or run format + lint on changed files: + +```bash +pants --changed-since=HEAD fmt +pants --changed-since=HEAD lint +pants --changed-since=HEAD check +``` + +## Performance Tips + +- Use `--changed-since=HEAD` to only operate on changed files +- Use `pantsd` (enabled by default) for faster startup +- For benchmarking: `hyperfine --warmup=1 --runs=5 'pants '` +- Cold cache benchmark: `hyperfine --runs=5 'pants --no-pantsd --no-local-cache '` +- To clear caches: restart pantsd with `pants --no-pantsd ` or kill it diff --git a/.claude/skills/pants-contributor/style-conventions.md b/.claude/skills/pants-contributor/style-conventions.md new file mode 100644 index 00000000000..4f6e89c64b9 --- /dev/null +++ b/.claude/skills/pants-contributor/style-conventions.md @@ -0,0 +1,225 @@ +# Style Guide and Conventions + +## Code Style + +### Python style + +- **f-strings**: Use f-strings, not `.format()` or `%` +- **Ternary expressions**: Prefer `x = "a" if cond else "b"` over if/else blocks +- **Early returns**: Prefer guard clauses, avoid deep nesting +- **Collection literals**: Use `{a}`, `(a, b)`, `{"k": v}` not constructors +- **Unpacking for merging**: `[*l1, *l2, "elem"]` not `l1 + l2 + ["elem"]` +- **Comprehensions**: Prefer over `map`/`filter` and explicit loops +- **Frozen dataclasses**: Always use `@dataclass(frozen=True)` for engine types +- **Type annotations**: Required on all functions (params + return type) +- **`cast()` over annotations**: Prefer `x = cast(str, untyped())` over `x: str = untyped()` +- **Pytest style**: Never use `unittest.TestCase`; use plain functions with `assert` +- **Error codes in type:ignore**: `# type: ignore[assignment]` not `# type: ignore` +- **Protocols for params**: Use `Iterable`, `Sequence`, `Mapping` not `List`, `Dict` in function params +- **Precise return types**: Return types should be concrete (`list[str]`, not `Iterable[str]`) + +### Comments + +- Must start with `# ` (space after hash) +- Must be complete sentences ending with a period +- Max 100 characters per line +- Explain **why**, not **what** +- TODOs must reference a GitHub issue: `# TODO(#1234): Description.` + +### Help strings + +For subsystem/target help strings that render as documentation: +- Use `softwrap()` for multiline strings +- Use `help_text()` for `Field`/`Target` subclasses if needed for mypy +- Use backticks for config sections, CLI args, target names, inline code +- Use 2-space indentation for bullet/numbered lists (or `bullet_list()`) +- Never use indentation for code blocks; use triple-backtick blocks only +- Text in angle brackets (``) will be ignored unless wrapped in backticks + +### Imports + +- Imports are auto-sorted by `isort` (via ruff) +- Prefer absolute imports +- Group: stdlib, third-party, pants imports + +### File headers + +All source files must have this copyright header: + +```python +# Copyright 2024 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). +``` + +BUILD files use the same header (configured in `[tailor]` in `pants.toml`). + +## Engine-Specific Conventions + +### Types for the rule graph + +- Rule params and returns must be **hashable** and **immutable** +- Use `tuple` not `list`, `FrozenDict` not `dict`, `FrozenOrderedSet` not `set` +- Use `Collection[T]` to newtype a `tuple` +- Use `DeduplicatedCollection[T]` to newtype a `FrozenOrderedSet` +- The engine uses **exact type matching** (no subtype consideration) +- Newtype freely to disambiguate: `class Name(str): pass` + +### Rule patterns + +```python +# Standard rule with collect_rules +from pants.engine.rules import collect_rules, rule + +@rule +async def my_rule(request: MyRequest) -> MyResult: + ... + +def rules(): + return collect_rules() +``` + +```python +# Using concurrently (NEVER await in a loop) +results = await concurrently( + process_item(item, **implicitly()) for item in items +) +``` + +```python +# Running a process +from pants.engine.intrinsics import execute_process +from pants.engine.process import Process + +result = await execute_process( + Process( + argv=["tool", "--flag", "arg"], + input_digest=my_digest, + description="Running tool", + ), + **implicitly() +) +``` + +### Registering rules + +Each module has a `rules()` function, aggregated in `register.py`: + +```python +# In each module +def rules(): + return collect_rules() + +# In register.py +def rules(): + return [ + *module1.rules(), + *module2.rules(), + ] + +def target_types(): + return [Target1, Target2] +``` + +## Testing Conventions + +### Test function naming + +- Test files: `*_test.py` (NOT `test_*.py`) +- Test functions: `def test_descriptive_name() -> None:` +- All test functions must have `-> None` return annotation + +### Integration vs unit tests + +- Unit tests: `*_test.py` - run in normal test target +- Integration tests: `*_integration_test.py` - get their own BUILD target with longer timeouts + +```python +# In BUILD files, integration tests are separated: +python_tests( + name="tests", + sources=["*_test.py", "!*_integration_test.py"], +) +python_tests( + name="integration", + sources=["*_integration_test.py"], + timeout=240, +) +``` + +### Skipping tests + +The repo conftest.py enforces `--noskip` - skipped tests are treated as errors. If a test legitimately needs to be skippable (e.g., platform-specific), mark it: + +```python +@pytest.mark.no_error_if_skipped +def test_platform_specific() -> None: + ... +``` + +### RuleRunner fixtures + +```python +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[*my_rules(), QueryRule(Output, [Input])], + target_types=[MyTarget], + ) +``` + +- Each test gets a fresh `RuleRunner` instance (via fixture) +- Use `rule_runner.write_files()` to set up test content +- Use `rule_runner.set_options()` to configure options +- Use `rule_runner.request()` to invoke rules +- Use `rule_runner.run_goal_rule()` for goal rules + +### Integration tests + +- Use `setup_tmpdir()` + `run_pants()` for full integration tests +- `run_pants()` is hermetic by default (doesn't read `pants.toml`) +- Pass `--backend-packages` explicitly in test args +- Use `result.assert_success()` / `result.assert_failure()` + +## PR and Contribution Workflow + +### Before submitting + +1. Format: `pants --changed-since=HEAD fmt` +2. Lint: `pants --changed-since=HEAD lint` +3. Type-check: `pants --changed-since=HEAD check` +4. Test: `pants --changed-since=HEAD --changed-dependents=transitive test` +5. Or run the pre-push hook: `build-support/githooks/pre-push` + +### Commit messages + +- Start with a verb (Add, Fix, Update, Remove, Refactor) +- Be concise but descriptive +- Reference GitHub issues where applicable + +### Cherry-picking + +For backporting fixes to stable branches: +1. Label PR as `needs-cherrypick` +2. Set milestone to oldest release branch +3. Automation handles cherry-pick after merge + +## Maintenance Tasks + +### Updating external tool versions + +For `ExternalTool` subclasses (downloaded binaries): +1. Download new version for each platform +2. Compute sha256 and byte length: `tee >(shasum -a 256) >(wc -c) > /dev/null < archive` +3. Update `default_version` and `default_known_versions` + +### Updating Python tool versions + +For `PythonToolBase` subclasses (PyPI packages): +1. Update `default_requirements` and/or `default_interpreter_constraints` +2. Run `build-support/bin/generate_builtin_lockfiles.py ` + +### Updating PEX + +PEX is special - update both: +1. The `pex-cli` subsystem in `src/python/pants/backend/python/util_rules/pex_cli.py` +2. The requirement in `3rdparty/python/requirements.txt` and regenerate lockfile From e8047d851ffa2c2214cd8a179e4bf36e6576d777 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Tue, 24 Mar 2026 14:11:07 -0400 Subject: [PATCH 15/25] typechecking --- .../backend/docker/goals/package_image.py | 17 +++++++++---- .../docker/goals/package_image_test.py | 22 ++++++++--------- .../pants/backend/docker/goals/publish.py | 8 ++++++- .../docker/subsystems/docker_options.py | 19 ++++++++++----- .../pants/backend/docker/target_types.py | 2 +- .../pants/backend/docker/target_types_test.py | 8 ++++++- .../docker/util_rules/docker_binary_test.py | 24 ++++--------------- .../util_rules/docker_build_context_test.py | 4 ++-- .../backend/helm/util_rules/post_renderer.py | 4 ++-- src/python/pants/ng/external_binary.py | 9 ++++--- src/python/pants/option/option_types.py | 4 +++- 11 files changed, 70 insertions(+), 51 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 15e5bc78f87..ed1d3c575bc 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -32,9 +32,17 @@ DockerImageTagsRequest, DockerImageTargetStageField, OptionValueFormatter, + ValidateOptionsMixin, get_docker_image_tags, ) -from pants.backend.docker.util_rules.binaries import get_buildctl, get_docker, get_podman +from pants.backend.docker.util_rules.binaries import ( + BuildctlBinary, + DockerBinary, + PodmanBinary, + get_buildctl, + get_docker, + get_podman, +) from pants.backend.docker.util_rules.docker_build_context import ( DockerBuildContext, DockerBuildContextRequest, @@ -343,9 +351,9 @@ def get_build_options( else (DockerBuildOptionsFieldMixin, "docker_build_options") ) for field_type in target.field_types: - if issubclass(field_type, engine_build_options_field_type) and target[ - field_type - ].validate_options(docker_options, context): + if issubclass(field_type, engine_build_options_field_type) and cast( + ValidateOptionsMixin, target[field_type] + ).validate_options(docker_options, context): gen_options_func = getattr(target[field_type], gen_options_func_name) yield from gen_options_func( docker=docker_options, @@ -470,6 +478,7 @@ async def get_docker_image_build_process( "__UPSTREAM_IMAGE_IDS": ",".join(context.upstream_image_ids), } context_root = field_set.get_context_root(options.default_context_root) + binary: BuildctlBinary | PodmanBinary | DockerBinary match options.build_engine: case DockerBuildEngine.BUILDKIT: binary = await get_buildctl(**implicitly()) diff --git a/src/python/pants/backend/docker/goals/package_image_test.py b/src/python/pants/backend/docker/goals/package_image_test.py index f8e71e65b25..45d8c5995bc 100644 --- a/src/python/pants/backend/docker/goals/package_image_test.py +++ b/src/python/pants/backend/docker/goals/package_image_test.py @@ -44,7 +44,7 @@ DockerImageTagsRequest, DockerImageTarget, ) -from pants.backend.docker.util_rules.binaries import DockerBinary +from pants.backend.docker.util_rules.binaries import DockerBinary, PodmanBinary from pants.backend.docker.util_rules.docker_build_args import ( DockerBuildArgs, DockerBuildArgsRequest, @@ -2322,14 +2322,14 @@ def test_get_context_root( "docker, expected, stdout, stderr", [ ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "", "", "", ), # Docker ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "0e09b442b572", "", dedent( @@ -2345,7 +2345,7 @@ def test_get_context_root( ), # Buildkit without step duration ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:7805a7da5f45a70bb9e47e8de09b1f5acd8f479dda06fb144c5590b9d2b86dd7", dedent( """\ @@ -2368,7 +2368,7 @@ def test_get_context_root( ), # Buildkit with step duration ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:7805a7da5f45a70bb9e47e8de09b1f5acd8f479dda06fb144c5590b9d2b86dd7", dedent( """\ @@ -2387,7 +2387,7 @@ def test_get_context_root( ), # Buildkit with containerd-snapshotter 0.12.1 ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:b2b51838586286a9e544ddb31b3dbf7f6a99654d275b6e56b5f69f90138b4c0e", dedent( """\ @@ -2406,7 +2406,7 @@ def test_get_context_root( ), # Buildkit with containerd-snapshotter and cross platform 0.12.1 ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:3c72de0e05bb75247e68e124e6500700f6e0597425db2ee9f08fd59ef28cea0f", dedent( """\ @@ -2428,7 +2428,7 @@ def test_get_context_root( ), # Buildkit with containerd-snapshotter 0.13.1 ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:d15432046b4feaebb70370fad4710151dd8f0b9741cb8bc4d20c08ed8847f17a", dedent( """\ @@ -2451,7 +2451,7 @@ def test_get_context_root( ), # Buildkit with containerd-snapshotter 0.17.1 and disabled attestations ( - DockerBinary("/bin/docker", "1234", is_podman=False), + DockerBinary("/bin/docker", "1234"), "sha256:6c3aff6414781126578b3e7b4a217682e89c616c0eac864d5b3ea7c87f1094d0", dedent( """\ @@ -2471,7 +2471,7 @@ def test_get_context_root( ), # Podman ( - DockerBinary("/bin/podman", "abcd", is_podman=True), + PodmanBinary("/bin/podman", "abcd"), "a85499e9039a4add9712f7ea96a4aa9f0edd57d1008c6565822561ceed927eee", dedent( """\ @@ -2489,7 +2489,7 @@ def test_get_context_root( def test_parse_image_id_from_docker_build_output( docker: DockerBinary, expected: str, stdout: str, stderr: str ) -> None: - assert expected == parse_image_id_from_buildkit_output(docker, stdout.encode(), stderr.encode()) + assert expected == parse_image_id_from_buildkit_output(stdout.encode(), stderr.encode()) ImageRefTest = namedtuple( diff --git a/src/python/pants/backend/docker/goals/publish.py b/src/python/pants/backend/docker/goals/publish.py index 18bd4cbdb70..0073fd827d2 100644 --- a/src/python/pants/backend/docker/goals/publish.py +++ b/src/python/pants/backend/docker/goals/publish.py @@ -20,7 +20,12 @@ from pants.backend.docker.registries import DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import DockerImageRegistriesField, DockerImageSkipPushField -from pants.backend.docker.util_rules.binaries import DockerBinary, get_docker, get_podman +from pants.backend.docker.util_rules.binaries import ( + DockerBinary, + PodmanBinary, + get_docker, + get_podman, +) from pants.core.goals.package import PackageFieldSet from pants.core.goals.publish import ( CheckSkipRequest, @@ -148,6 +153,7 @@ async def push_docker_images( jobs: list[PublishPackages] = [] refs: list[str] = [] processes: list[Process | InteractiveProcess] = [] + binary: DockerBinary | PodmanBinary match options.push_engine: case DockerPushEngine.DOCKER: binary = await get_docker(**implicitly()) diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index 77e976dd922..b697ae4ecbd 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -5,9 +5,14 @@ import logging import sys -from typing import Any +from typing import Any, cast -from pants.backend.docker.engine_types import DockerBuildEngine, DockerEngines, DockerPushEngine, DockerRunEngine +from pants.backend.docker.engine_types import ( + DockerBuildEngine, + DockerEngines, + DockerPushEngine, + DockerRunEngine, +) from pants.backend.docker.package_types import DockerPushOnPackageBehavior from pants.backend.docker.registries import DockerRegistries from pants.core.util_rules.search_paths import ExecutableSearchPathsOptionMixin @@ -186,15 +191,17 @@ def env_vars(self) -> tuple[str, ...]: mutually_exclusive_group="engines", ) - def _experimental_enable_podman_warning[E: DockerBuildEngine | DockerRunEngine | DockerPushEngine](self, engine_type: type[E], engine_opt: str) -> E | None: + def _experimental_enable_podman_warning[ + E: DockerBuildEngine | DockerRunEngine | DockerPushEngine + ](self, engine_type: type[E], engine_opt: str) -> E: experimental_enable_podman = self.options.get("experimental_enable_podman", None) match experimental_enable_podman: case None: - return getattr(self.engine, engine_opt) + return cast(E, getattr(self.engine, engine_opt)) case True: - engine = engine_type.PODMAN + engine = cast(E, engine_type.PODMAN) case False: - engine = engine_type.DOCKER + engine = cast(E, engine_type.DOCKER) logger.warning( f'`[docker].experimental_enable_podman` is deprecated. Use `[docker.engine].{engine_opt} = "{engine.value}"` instead.' ) diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index 10f1a342407..5c676ae95ef 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -359,7 +359,7 @@ def docker_build_option(cls) -> str: def docker_build_options( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - return super().buildctl_options(docker=docker, value_formatter=value_formatter) + return super().buildctl_options(docker=docker, value_formatter=value_formatter) # type: ignore[safe-super] class BuildctlOptionMultiValueFieldMixin(BuildctlOptionsFieldMixin, ABC): diff --git a/src/python/pants/backend/docker/target_types_test.py b/src/python/pants/backend/docker/target_types_test.py index cffc054859f..d3225b3fe5b 100644 --- a/src/python/pants/backend/docker/target_types_test.py +++ b/src/python/pants/backend/docker/target_types_test.py @@ -2,9 +2,11 @@ # Licensed under the Apache License, Version 2.0 (see LICENSE). import os from pathlib import Path +from unittest.mock import MagicMock import pytest +from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import DockerImageBuildSecretsOptionField from pants.engine.internals.native_engine import Address @@ -36,6 +38,10 @@ def test_secret_path_resolvement(src: str, expected: str): secrets_option_field = DockerImageBuildSecretsOptionField( {"mysecret": src}, address=Address("") ) - values = list(secrets_option_field.option_values()) + values = list( + secrets_option_field.buildctl_option_values( + docker=MagicMock(spec=DockerOptions), value_formatter=lambda x: x + ) + ) assert values == [f"id=mysecret,src={expected}"] diff --git a/src/python/pants/backend/docker/util_rules/docker_binary_test.py b/src/python/pants/backend/docker/util_rules/docker_binary_test.py index 9dd062489ec..06685b96b32 100644 --- a/src/python/pants/backend/docker/util_rules/docker_binary_test.py +++ b/src/python/pants/backend/docker/util_rules/docker_binary_test.py @@ -61,7 +61,6 @@ def test_docker_binary_build_image(docker_path: str, docker: DockerBinary) -> No build_args=DockerBuildArgs.from_strings("arg1=2"), context_root="build/context", env=env, - use_buildx=False, extra_args=("--pull", "--squash"), ) @@ -114,26 +113,18 @@ def test_docker_binary_run_image(docker_path: str, docker: DockerBinary) -> None assert run_request.description == f"Running docker image {image_ref}" -@pytest.mark.parametrize("podman_enabled", [True, False]) -@pytest.mark.parametrize("podman_found", [True, False]) -def test_get_docker(rule_runner: RuleRunner, podman_enabled: bool, podman_found: bool) -> None: +def test_get_docker(rule_runner: RuleRunner) -> None: docker_options = create_subsystem( DockerOptions, - experimental_enable_podman=podman_enabled, tools=[], optional_tools=[], ) docker_options_env_aware = mock.MagicMock(spec=DockerOptions.EnvironmentAware) def mock_find_binary(request: BinaryPathRequest) -> BinaryPaths: - if request.binary_name == "podman" and podman_found: - return BinaryPaths("podman", paths=[BinaryPath("/bin/podman")]) - - elif request.binary_name == "docker": + if request.binary_name == "docker": return BinaryPaths("docker", [BinaryPath("/bin/docker")]) - - else: - return BinaryPaths(request.binary_name, ()) + return BinaryPaths(request.binary_name, ()) def mock_create_binary_shims(request: BinaryShimsRequest) -> BinaryShims: return BinaryShims(EMPTY_DIGEST, "cache_name") @@ -147,12 +138,8 @@ def mock_create_binary_shims(request: BinaryShimsRequest) -> BinaryShims: }, ) - if podman_enabled and podman_found: - assert result.path == "/bin/podman" - assert result.is_podman - else: - assert result.path == "/bin/docker" - assert not result.is_podman + assert isinstance(result, DockerBinary) + assert result.path == "/bin/docker" def test_get_docker_with_tools(rule_runner: RuleRunner) -> None: @@ -170,7 +157,6 @@ def mock_create_binary_shims(request: BinaryShimsRequest) -> BinaryShims: def run(tools: list[str], optional_tools: list[str]) -> None: docker_options = create_subsystem( DockerOptions, - experimental_enable_podman=False, tools=tools, optional_tools=optional_tools, ) diff --git a/src/python/pants/backend/docker/util_rules/docker_build_context_test.py b/src/python/pants/backend/docker/util_rules/docker_build_context_test.py index c6ec2cf58d9..dd4fd074345 100644 --- a/src/python/pants/backend/docker/util_rules/docker_build_context_test.py +++ b/src/python/pants/backend/docker/util_rules/docker_build_context_test.py @@ -17,8 +17,8 @@ from pants.backend.docker.subsystems.dockerfile_parser import DockerfileInfo from pants.backend.docker.target_types import DockerImageTarget from pants.backend.docker.util_rules import ( + binaries, dependencies, - docker_binary, docker_build_args, docker_build_context, docker_build_env, @@ -56,7 +56,7 @@ def create_rule_runner() -> RuleRunner: rules=[ *core_target_types_rules(), *dependencies.rules(), - *docker_binary.rules(), + *binaries.rules(), *docker_build_args.rules(), *docker_build_context.rules(), *docker_build_env.rules(), diff --git a/src/python/pants/backend/helm/util_rules/post_renderer.py b/src/python/pants/backend/helm/util_rules/post_renderer.py index 452b67e1747..73427249eba 100644 --- a/src/python/pants/backend/helm/util_rules/post_renderer.py +++ b/src/python/pants/backend/helm/util_rules/post_renderer.py @@ -17,7 +17,7 @@ get_docker_image_tags, ) from pants.backend.docker.util_rules import ( - docker_binary, + binaries, docker_build_args, docker_build_context, docker_build_env, @@ -166,7 +166,7 @@ def find_replacement(value: tuple[str, Address]) -> str | None: def rules(): return [ *collect_rules(), - *docker_binary.rules(), + *binaries.rules(), *docker_build_args.rules(), *docker_build_context.rules(), *docker_build_env.rules(), diff --git a/src/python/pants/ng/external_binary.py b/src/python/pants/ng/external_binary.py index 8cf9f8ef29c..9fb0b4b671f 100644 --- a/src/python/pants/ng/external_binary.py +++ b/src/python/pants/ng/external_binary.py @@ -14,7 +14,8 @@ class ExternalBinary(ContextualSubsystem): @option(help="Version of the binary to use", default=lambda cls: cls.version_default) - def version(self) -> str: ... + def version(self) -> str: + """Provided by the @option decorator.""" exe_help = softwrap( """ @@ -28,7 +29,8 @@ def exe_default(cls) -> str: raise NotImplementedError("Subclasses must implement to provide the default executable") @option(help=exe_help, default=lambda cls: cls.exe_default) - def exe(self) -> str: ... + def exe(self) -> str: + """Provided by the @option decorator.""" known_versions_help = softwrap( """ @@ -58,7 +60,8 @@ def exe(self) -> str: ... ) @option(help=known_versions_help, default=lambda cls: cls.known_versions_default) - def known_versions(self) -> str: ... + def known_versions(self) -> str: + """Provided by the @option decorator.""" def get_download_request(self) -> ExternalToolRequest: known_versions = self.known_versions diff --git a/src/python/pants/option/option_types.py b/src/python/pants/option/option_types.py index 3d63090df8b..36adae0345a 100644 --- a/src/python/pants/option/option_types.py +++ b/src/python/pants/option/option_types.py @@ -785,6 +785,7 @@ def _convert_(self, val: Any) -> dict[str, _ValueT]: # Generic Dataclass Concrete Option Classes # ----------------------------------------------------------------------------------------------- + class DataclassOption(_OptionBase[_OptT, _DefaultT]): """A dataclass option. @@ -856,7 +857,7 @@ def __new__( instance = super().__new__( cls, flag_name, - default=default, + default=default, # type: ignore[arg-type] help=help, register_if=register_if, advanced=advanced, @@ -888,6 +889,7 @@ def get_option_type(self, subsystem_cls): ) return dataclass_type + # ----------------------------------------------------------------------------------------------- # "Specialized" Concrete Option Classes # ----------------------------------------------------------------------------------------------- From eed60f1c82cf806ac0f2ad3f488373512fa9a597 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Tue, 24 Mar 2026 16:58:40 -0400 Subject: [PATCH 16/25] lazy tuesday vibes --- .../docker/goals/package_image_test.py | 146 +++++++++--------- .../backend/docker/goals/publish_test.py | 17 +- .../pants/backend/docker/target_types.py | 2 +- 3 files changed, 84 insertions(+), 81 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image_test.py b/src/python/pants/backend/docker/goals/package_image_test.py index 45d8c5995bc..ab311aa3d08 100644 --- a/src/python/pants/backend/docker/goals/package_image_test.py +++ b/src/python/pants/backend/docker/goals/package_image_test.py @@ -13,10 +13,9 @@ import pytest +from pants.backend.docker.engine_types import DockerEngines from pants.backend.docker.goals.package_image import ( - DockerBuildTargetStageError, DockerImageBuildProcess, - DockerImageOptionValueError, DockerImageRefs, DockerImageTagValueError, DockerInfoV1, @@ -29,6 +28,7 @@ get_docker_image_build_process, get_image_refs, parse_image_id_from_buildkit_output, + parse_image_id_from_podman_build_output, rules, ) from pants.backend.docker.package_types import ( @@ -141,7 +141,7 @@ def build_context_mock(request: DockerBuildContextRequest) -> DockerBuildContext def _setup_docker_options(rule_runner: RuleRunner, options: dict | None) -> DockerOptions: """Setup DockerOptions with sensible defaults.""" - if options: + if options is not None: opts = options.copy() opts.setdefault("registries", {}) opts.setdefault("default_repository", "{name}") @@ -151,7 +151,7 @@ def _setup_docker_options(rule_runner: RuleRunner, options: dict | None) -> Dock opts.setdefault("build_hosts", None) opts.setdefault("build_verbose", False) opts.setdefault("build_no_cache", False) - opts.setdefault("use_buildx", False) + opts.setdefault("engine", DockerEngines()) opts.setdefault("env_vars", []) opts.setdefault("suggest_renames", True) opts.setdefault("push_on_package", DockerPushOnPackageBehavior.WARN) @@ -178,6 +178,7 @@ def assert_build_process( build_context_snapshot: Snapshot = EMPTY_SNAPSHOT, version_tags: tuple[str, ...] = (), image_refs: DockerImageRefs | None = None, + binary: DockerBinary | PodmanBinary = DockerBinary("/dummy/docker"), ) -> DockerImageBuildProcess: """Test helper for get_docker_image_build_process rule. @@ -214,17 +215,26 @@ def assert_build_process( ) docker_options = _setup_docker_options(rule_runner, options) + if isinstance(binary, PodmanBinary): + binary_mocks = { + "pants.backend.docker.util_rules.binaries.get_podman": lambda *args: binary, + } + else: + binary_mocks = { + "pants.backend.docker.util_rules.binaries.get_docker": lambda *args: binary, + } + result = run_rule_with_mocks( get_docker_image_build_process, rule_args=[ DockerPackageFieldSet.create(tgt), docker_options, - DockerBinary("/dummy/docker"), ], mock_calls={ "pants.backend.docker.util_rules.docker_build_context.create_docker_build_context": build_context_mock, "pants.engine.internals.graph.resolve_target": lambda _: WrappedTarget(tgt), "pants.backend.docker.goals.package_image.get_image_refs": lambda _: image_refs, + **binary_mocks, }, union_membership=_create_union_membership(), show_warnings=False, @@ -846,6 +856,7 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", "--tag", "env1:1.2.3", @@ -980,7 +991,6 @@ def mock_create_digest(request: CreateDigest) -> Digest: under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls={ @@ -1014,6 +1024,7 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=True", "--tag", "args1:latest", @@ -1045,6 +1056,7 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", "--squash", "--tag", @@ -1058,6 +1070,7 @@ def check_build_process_no_squash(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", "--tag", "args2:latest", @@ -1094,6 +1107,7 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", "--tag", "args1:1.2.3", @@ -1197,6 +1211,7 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1249,6 +1264,7 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", "--secret", "id=system-secret,src=/var/run/secrets/mysecret", @@ -1288,6 +1304,7 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", "--ssh", "default", @@ -1328,6 +1345,7 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", "--no-cache", "--tag", @@ -1374,6 +1392,7 @@ def check_build_process(result: DockerImageBuildProcess): "docker:10.180.0.1", "--add-host", "docker2:10.180.0.2", + "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1406,7 +1425,6 @@ def test_docker_cache_to_option(rule_runner: RuleRunner) -> None: def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", - "buildx", "build", "--cache-to=type=local,dest=/tmp/docker/pants-test-cache", "--output=type=docker", @@ -1443,7 +1461,6 @@ def test_docker_cache_from_option(rule_runner: RuleRunner) -> None: def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", - "buildx", "build", "--cache-from=type=local,dest=/tmp/docker/pants-test-cache1", "--cache-from=type=local,dest=/tmp/docker/pants-test-cache2", @@ -1496,7 +1513,6 @@ def test_docker_output_option( def check_build_process(result: DockerImageBuildProcess) -> None: assert result.process.argv == ( "/dummy/docker", - "buildx", "build", expected_output_arg, "--pull=False", @@ -1549,7 +1565,6 @@ def test_docker_output_option_when_push_on_package_error( def check_build_process(result: DockerImageBuildProcess) -> None: assert result.process.argv == ( "/dummy/docker", - "buildx", "build", expected_output_arg, "--pull=False", @@ -1611,7 +1626,6 @@ def mock_get_build_process_success(field_set: DockerPackageFieldSet) -> DockerIm under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls={ @@ -1668,7 +1682,6 @@ def test_docker_output_option_when_push_on_package_warn( def check_build_process(result: DockerImageBuildProcess) -> None: assert result.process.argv == ( "/dummy/docker", - "buildx", "build", expected_output_arg, "--pull=False", @@ -1734,7 +1747,6 @@ def mock_create_digest(_request: CreateDigest) -> Digest: under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls={ @@ -1788,7 +1800,6 @@ def test_docker_output_option_when_push_on_package_ignore( def check_build_process(result: DockerImageBuildProcess) -> None: assert result.process.argv == ( "/dummy/docker", - "buildx", "build", expected_output_arg, "--pull=False", @@ -1852,7 +1863,6 @@ def mock_create_digest(_request: CreateDigest) -> Digest: under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls=mock_calls, @@ -1863,30 +1873,6 @@ def mock_create_digest(_request: CreateDigest) -> Digest: assert len(result.artifacts) == (1 if expected_output_arg else 0) -def test_docker_output_option_raises_when_no_buildkit(rule_runner: RuleRunner) -> None: - rule_runner.write_files( - { - "docker/test/BUILD": dedent( - """\ - docker_image( - name="img1", - output={"type": "image"} - ) - """ - ), - } - ) - - with pytest.raises( - DockerImageOptionValueError, - match=r"Buildx must be enabled via the Docker subsystem options in order to use this field.", - ): - assert_build_process( - rule_runner, - Address("docker/test", target_name="img1"), - ) - - def test_docker_build_network_option(rule_runner: RuleRunner) -> None: rule_runner.write_files( { @@ -1906,6 +1892,7 @@ def check_build_process(result: DockerImageBuildProcess): "/dummy/docker", "build", "--network=host", + "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1940,6 +1927,7 @@ def check_build_process(result: DockerImageBuildProcess): "/dummy/docker", "build", "--platform=linux/amd64,linux/arm64,linux/arm/v7", + "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1984,6 +1972,7 @@ def check_build_process(result: DockerImageBuildProcess): "build.host=tbs06", "--label", "build.job=13934", + "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -2080,10 +2069,6 @@ def test_docker_build_fail_logs( rule_runner.write_files({"docker/test/BUILD": f"docker_image(context_root={context_root!r})"}) build_context_files = ("docker/test/Dockerfile", *build_context_files) build_context_snapshot = rule_runner.make_snapshot_of_empty_files(build_context_files) - suggest_renames_arg = ( - "--docker-suggest-renames" if suggest_renames else "--no-docker-suggest-renames" - ) - rule_runner.set_options([suggest_renames_arg]) # Step 1: Get the build process tgt = rule_runner.get_target(Address("docker/test")) @@ -2092,7 +2077,7 @@ def test_docker_build_fail_logs( build_context_mock = _create_build_context_mock( rule_runner, address, build_context_snapshot, copy_sources, (), () ) - docker_options = _setup_docker_options(rule_runner, None) + docker_options = _setup_docker_options(rule_runner, {"suggest_renames": suggest_renames}) global_options = rule_runner.request(GlobalOptions, []) # Get image refs @@ -2119,17 +2104,18 @@ def test_docker_build_fail_logs( # Step 2: Create the build process with the get_docker_image_build_process rule under_test_fs = DockerPackageFieldSet.create(tgt) + docker = DockerBinary("/dummy/docker") build_process = run_rule_with_mocks( get_docker_image_build_process, rule_args=[ under_test_fs, docker_options, - DockerBinary("/dummy/docker"), ], mock_calls={ "pants.backend.docker.util_rules.docker_build_context.create_docker_build_context": build_context_mock, "pants.engine.internals.graph.resolve_target": lambda _: WrappedTarget(tgt), "pants.backend.docker.goals.package_image.get_image_refs": lambda _: image_refs, + "pants.backend.docker.util_rules.binaries.get_docker": lambda *args: docker, }, show_warnings=False, ) @@ -2171,7 +2157,6 @@ def mock_execute_process(_process: Process) -> FallibleProcessResult: under_test_fs, docker_options, global_options, - DockerBinary("/dummy/docker"), KeepSandboxes.never, ], mock_calls={ @@ -2216,9 +2201,9 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", - "--target", - expected_target, + f"--target={expected_target}", "--tag", "image:latest", "--file", @@ -2236,6 +2221,7 @@ def check_build_process(result: DockerImageBuildProcess): def test_invalid_build_target_stage(rule_runner: RuleRunner) -> None: + """An invalid target_stage is passed through to Docker which will report the error.""" rule_runner.write_files( { "BUILD": "docker_image(name='image', target_stage='bad')", @@ -2249,17 +2235,27 @@ def test_invalid_build_target_stage(rule_runner: RuleRunner) -> None: } ) - err = ( - r"The 'target_stage' field in `docker_image` //:image was set to 'bad', but there is no " - r"such stage in `Dockerfile`\. Available stages: build, dev, prod\." - ) - with pytest.raises(DockerBuildTargetStageError, match=err): - assert_build_process( - rule_runner, - Address("", target_name="image"), - version_tags=("build latest", "dev latest", "prod latest"), + def check_build_process(result: DockerImageBuildProcess): + assert result.process.argv == ( + "/dummy/docker", + "build", + "--output=type=docker", + "--pull=False", + "--target=bad", + "--tag", + "image:latest", + "--file", + "Dockerfile", + ".", ) + assert_build_process( + rule_runner, + Address("", target_name="image"), + build_process_assertions=check_build_process, + version_tags=("build latest", "dev latest", "prod latest"), + ) + @pytest.mark.parametrize( "default_context_root, context_root, expected_context_root", @@ -2323,7 +2319,7 @@ def test_get_context_root( [ ( DockerBinary("/bin/docker", "1234"), - "", + None, "", "", ), @@ -2469,29 +2465,30 @@ def test_get_context_root( ), "", ), - # Podman - ( - PodmanBinary("/bin/podman", "abcd"), - "a85499e9039a4add9712f7ea96a4aa9f0edd57d1008c6565822561ceed927eee", - dedent( - """\ - STEP 5/5: COPY ./ . - COMMIT example - --> a85499e9039a - Successfully tagged localhost/example:latest - a85499e9039a4add9712f7ea96a4aa9f0edd57d1008c6565822561ceed927eee - """ - ), - "", - ), ], ) def test_parse_image_id_from_docker_build_output( - docker: DockerBinary, expected: str, stdout: str, stderr: str + docker: DockerBinary, expected: str | None, stdout: str, stderr: str ) -> None: assert expected == parse_image_id_from_buildkit_output(stdout.encode(), stderr.encode()) +def test_parse_image_id_from_podman_build_output() -> None: + stdout = dedent( + """\ + STEP 5/5: COPY ./ . + COMMIT example + --> a85499e9039a + Successfully tagged localhost/example:latest + a85499e9039a4add9712f7ea96a4aa9f0edd57d1008c6565822561ceed927eee + """ + ) + assert ( + "a85499e9039a4add9712f7ea96a4aa9f0edd57d1008c6565822561ceed927eee" + == parse_image_id_from_podman_build_output(stdout.encode(), b"") + ) + + ImageRefTest = namedtuple( "ImageRefTest", "docker_image, registries, default_repository, expect_refs, expect_error", @@ -2811,6 +2808,7 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", + "--output=type=docker", "--pull=False", *tag_flags, "--file", diff --git a/src/python/pants/backend/docker/goals/publish_test.py b/src/python/pants/backend/docker/goals/publish_test.py index 4e8f2ede053..8c27a5c3847 100644 --- a/src/python/pants/backend/docker/goals/publish_test.py +++ b/src/python/pants/backend/docker/goals/publish_test.py @@ -9,6 +9,7 @@ import pytest +from pants.backend.docker.engine_types import DockerEngines from pants.backend.docker.goals.package_image import ( DockerImageBuildProcess, DockerImageRefs, @@ -27,7 +28,7 @@ from pants.backend.docker.registries import DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import DockerImageTarget -from pants.backend.docker.util_rules.binaries import DockerBinary +from pants.backend.docker.util_rules.binaries import DockerBinary, PodmanBinary from pants.core.goals.package import BuiltPackage from pants.core.goals.publish import ( CheckSkipResult, @@ -96,16 +97,17 @@ def run_publish( options: dict | None = None, env_vars: list[str] | None = None, mock_calls: dict | None = None, -) -> tuple[PublishProcesses, DockerBinary]: + binary: DockerBinary | PodmanBinary = DockerBinary("/dummy/docker"), +) -> tuple[PublishProcesses, DockerBinary | PodmanBinary]: opts = options or {} opts.setdefault("registries", {}) opts.setdefault("default_repository", "{directory}/{name}") opts.setdefault("publish_noninteractively", False) + opts.setdefault("engine", DockerEngines()) docker_options = create_subsystem(DockerOptions, **opts) tgt = cast(DockerImageTarget, rule_runner.get_target(address)) fs = PublishDockerImageFieldSet.create(tgt) packages = build(tgt, docker_options) - docker = DockerBinary("/dummy/docker") mock_env_aware = MagicMock(spec=DockerOptions.EnvironmentAware) if env_vars: mock_env_aware.env_vars = env_vars @@ -113,14 +115,17 @@ def run_publish( mock_calls = mock_calls or { "pants.core.util_rules.env_vars.environment_vars_subset": lambda *args: rule_runner.request( EnvironmentVars, args - ) + ), + "pants.backend.docker.util_rules.binaries.get_podman" + if isinstance(binary, PodmanBinary) + else "pants.backend.docker.util_rules.binaries.get_docker": lambda *args: binary, } result = run_rule_with_mocks( push_docker_images, - rule_args=[fs._request(packages), docker, docker_options, mock_env_aware], + rule_args=[fs._request(packages), docker_options, mock_env_aware], mock_calls=mock_calls, ) - return result, docker + return result, binary def assert_publish( diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index 5c676ae95ef..b5af337cc0e 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -359,7 +359,7 @@ def docker_build_option(cls) -> str: def docker_build_options( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - return super().buildctl_options(docker=docker, value_formatter=value_formatter) # type: ignore[safe-super] + return self.buildctl_options(docker=docker, value_formatter=value_formatter) class BuildctlOptionMultiValueFieldMixin(BuildctlOptionsFieldMixin, ABC): From c0df6794542189ede383c6add09c115d3679001d Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Tue, 24 Mar 2026 17:03:23 -0400 Subject: [PATCH 17/25] typechecking --- .../backend/docker/goals/package_image_test.py | 14 +++----------- .../pants/backend/docker/goals/publish_test.py | 2 +- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image_test.py b/src/python/pants/backend/docker/goals/package_image_test.py index ab311aa3d08..f52acaf147a 100644 --- a/src/python/pants/backend/docker/goals/package_image_test.py +++ b/src/python/pants/backend/docker/goals/package_image_test.py @@ -214,16 +214,6 @@ def assert_build_process( rule_runner, address, build_context_snapshot, copy_sources, copy_build_args, version_tags ) docker_options = _setup_docker_options(rule_runner, options) - - if isinstance(binary, PodmanBinary): - binary_mocks = { - "pants.backend.docker.util_rules.binaries.get_podman": lambda *args: binary, - } - else: - binary_mocks = { - "pants.backend.docker.util_rules.binaries.get_docker": lambda *args: binary, - } - result = run_rule_with_mocks( get_docker_image_build_process, rule_args=[ @@ -234,7 +224,9 @@ def assert_build_process( "pants.backend.docker.util_rules.docker_build_context.create_docker_build_context": build_context_mock, "pants.engine.internals.graph.resolve_target": lambda _: WrappedTarget(tgt), "pants.backend.docker.goals.package_image.get_image_refs": lambda _: image_refs, - **binary_mocks, + "pants.backend.docker.util_rules.binaries.get_podman" + if isinstance(binary, PodmanBinary) + else "pants.backend.docker.util_rules.binaries.get_docker": lambda *args: binary, }, union_membership=_create_union_membership(), show_warnings=False, diff --git a/src/python/pants/backend/docker/goals/publish_test.py b/src/python/pants/backend/docker/goals/publish_test.py index 8c27a5c3847..7738407616d 100644 --- a/src/python/pants/backend/docker/goals/publish_test.py +++ b/src/python/pants/backend/docker/goals/publish_test.py @@ -460,7 +460,7 @@ def test_docker_push_env(rule_runner: RuleRunner) -> None: def test_docker_push_on_package(rule_runner: RuleRunner) -> None: """Test push_docker_images when pushes_on_package() returns True.""" - docker = DockerBinary("/dummy/docker") + docker: DockerBinary | PodmanBinary = DockerBinary("/dummy/docker") # Create mock build process that will be returned by get_docker_image_build_process mock_tags = ("push-on-package/push-on-package:latest",) From 23bc0bfa57f13b8a2d7649171ce82b9c3b2a4101 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 25 Mar 2026 12:37:13 -0400 Subject: [PATCH 18/25] use claude md instead of skill --- .claude/skills/pants-contributor/SKILL.md | 200 ------- .../skills/pants-contributor/architecture.md | 358 ----------- .../pants-contributor/build-commands.md | 201 ------- .../pants-contributor/style-conventions.md | 225 ------- CLAUDE.md | 566 ++++++++++++++++++ 5 files changed, 566 insertions(+), 984 deletions(-) delete mode 100644 .claude/skills/pants-contributor/SKILL.md delete mode 100644 .claude/skills/pants-contributor/architecture.md delete mode 100644 .claude/skills/pants-contributor/build-commands.md delete mode 100644 .claude/skills/pants-contributor/style-conventions.md create mode 100644 CLAUDE.md diff --git a/.claude/skills/pants-contributor/SKILL.md b/.claude/skills/pants-contributor/SKILL.md deleted file mode 100644 index a620d6c2136..00000000000 --- a/.claude/skills/pants-contributor/SKILL.md +++ /dev/null @@ -1,200 +0,0 @@ ---- -name: pants-contributor -description: Expert guide for contributing to the Pants build system repo. Use when working on Pants source code, writing or running tests, creating or modifying backends/plugins, interacting with the build system, understanding the codebase architecture, or when the user needs help with any Pants development workflow. Triggers on questions about BUILD files, rules, targets, goals, running tests, linting, formatting, or any Pants build system interaction. -allowed-tools: Read, Grep, Glob, Bash, Edit, Write, Agent ---- - -# Pants Build System Contributor Guide - -You are an expert contributor to the [Pants](https://github.com/pantsbuild/pants) build system. This is the Pants repo itself -- a self-hosting build system where `./pants` runs Pants from source. You must follow the conventions, architecture, and workflows specific to this codebase. - -For detailed reference on specific topics, see: -- [Build System Commands](./build-commands.md) - How to run tests, lint, format, typecheck -- [Architecture Guide](./architecture.md) - Codebase structure, rules API, backends -- [Style and Conventions](./style-conventions.md) - Code style, patterns, PR workflow - -## Critical Rules - -### ALWAYS use Pants, NEVER use raw Python tools - -This is the most important rule. **Never** run `pytest`, `mypy`, `black`, `isort`, `flake8`, `ruff`, or any other Python tool directly. All Python tools must be invoked through Pants: - -```bash -# CORRECT - always use Pants -pants test src/python/pants/backend/python/goals/pytest_runner_test.py -pants lint src/python/pants/backend/python/goals/pytest_runner.py -pants fmt src/python/pants/backend/python/goals/pytest_runner.py -pants check src/python/pants/backend/python/goals/pytest_runner.py - -# WRONG - never do this -pytest src/python/pants/backend/python/goals/pytest_runner_test.py -mypy src/python/pants/backend/python/goals/pytest_runner.py -black src/python/pants/backend/python/goals/pytest_runner.py -ruff check src/python/pants/backend/python/goals/pytest_runner.py -``` - -### Running Pants from source - -In this repo, `./pants` is a special bootstrap script that runs Pants from the local source tree. Use `pants` (which resolves to the `./pants` script) for all operations. The first run compiles the Rust engine and may take several minutes. - -### Test file naming - -Test files must be named `*_test.py` (not `test_*.py`). Pants discovers tests by this suffix. - -### BUILD file conventions - -Every directory with source code needs a `BUILD` file. Common patterns in this repo: - -```python -# Standard source + test pattern -python_sources() -python_tests(name="tests") - -# With test utilities (conftest, fixtures, helpers) -python_test_utils(name="test_utils") - -# With overrides for specific files -python_sources( - overrides={ - "special.py": {"dependencies": ["//some/dep"]}, - }, -) -``` - -All BUILD files must have the copyright header: -```python -# Copyright 2024 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). -``` - -## Quick Reference - -### Common commands - -| Task | Command | -|------|---------| -| Run specific test file | `pants test path/to/file_test.py` | -| Run tests for changed files | `pants --changed-since=HEAD test` | -| Run tests with transitive deps | `pants --changed-since=HEAD --changed-dependents=transitive test` | -| Run a single test function | `pants test path/to/file_test.py -- -k test_function_name` | -| Debug a test (interactive) | `pants test --debug path/to/file_test.py` | -| Debug with specific test | `pants test --debug path/to/file_test.py -- -k test_name` | -| Lint changed files | `pants --changed-since=HEAD lint` | -| Format changed files | `pants --changed-since=HEAD fmt` | -| Type-check changed files | `pants --changed-since=HEAD check` | -| List all targets in dir | `pants list path/to/dir:` | -| Show dependencies | `pants dependencies path/to/target` | -| Show dependents | `pants dependents path/to/target` | -| Show target info | `pants peek path/to/target` | -| Generate BUILD files | `pants tailor` | -| Validate BUILD files | `pants lint --only=ruff-format '**BUILD'` | -| Run pre-push checks | `build-support/githooks/pre-push` | - -### Target address syntax - -- `path/to/dir:target_name` - specific target -- `path/to/dir:` - all targets in directory -- `path/to/dir::` - all targets recursively -- `path/to/file.py` - file address (inferred target) -- `::` - everything in the repo - -### Key directories - -| Path | Purpose | -|------|---------| -| `src/python/pants/` | Core Pants Python source code | -| `src/python/pants/backend/` | Language-specific backends (python, go, java, docker, etc.) | -| `src/python/pants/core/` | Core goals (test, lint, fmt, check, package, run) | -| `src/python/pants/engine/` | The Pants engine (rules, targets, processes, fs) | -| `src/rust/engine/` | Rust engine implementation | -| `pants-plugins/` | In-repo plugins (internal_plugins, pants_explorer) | -| `3rdparty/python/` | Third-party Python dependency declarations and lockfiles | -| `build-support/` | Build scripts, CI helpers, git hooks | -| `testprojects/` | Test fixture projects used by integration tests | -| `docs/docs/` | Documentation source (Docusaurus MDX) | - -### Python resolves (lockfiles) - -The repo uses multiple Python resolves configured in `pants.toml`: -- `python-default` - Main resolve for Pants source code -- `pytest` - Test runner dependencies -- `mypy` - Type checker dependencies -- `flake8` - Linter dependencies - -To regenerate lockfiles: `pants generate-lockfiles --resolve=` - -## Writing Rules (Plugin Code) - -### Rule basics - -Rules are async Python functions decorated with `@rule`: - -```python -from pants.engine.rules import rule, collect_rules - -@rule -async def my_rule(request: MyRequest) -> MyResult: - # Pure function - no side effects! - # Use engine APIs for processes, filesystem, etc. - return MyResult(...) - -def rules(): - return collect_rules() -``` - -### Key patterns - -- **Frozen dataclasses** for all request/result types: `@dataclass(frozen=True)` -- **`await` engine intrinsics** for processes, filesystem ops, etc. -- **`concurrently()`** for parallel rule execution (never `await` in a loop) -- **`**implicitly()`** for implicit parameter injection -- **`collect_rules()`** to auto-discover rules in a module -- **Register in `register.py`** with `rules()` and `target_types()` functions - -### Testing rules - -Tests use `RuleRunner` for integration testing or `run_rule_with_mocks` for unit testing: - -```python -from pants.testutil.rule_runner import RuleRunner, QueryRule - -@pytest.fixture -def rule_runner() -> RuleRunner: - return RuleRunner( - rules=[*my_module.rules(), QueryRule(Output, [Input])], - target_types=[MyTarget], - ) - -def test_my_rule(rule_runner: RuleRunner) -> None: - rule_runner.write_files({ - "src/BUILD": "my_target(name='t', source='f.py')", - "src/f.py": "...", - }) - result = rule_runner.request(Output, [Input(...)]) - assert result == expected -``` - -### Backend structure - -Each backend lives in `src/python/pants/backend//` with: -- `register.py` - Entry point: exports `rules()`, `target_types()`, `build_file_aliases()` -- `target_types.py` - Target and field definitions -- `goals/` - Goal implementations (test, lint, fmt, etc.) -- `util_rules/` - Shared utility rules -- `subsystems/` - Tool subsystem definitions -- `dependency_inference/` - Dep inference rules - -## Style Guide Essentials - -- Use **f-strings** (not `.format()` or `%`) -- Prefer **conditional expressions** (ternary) -- Prefer **early returns** -- Use **collection literals** and **unpacking** for merging -- Prefer **comprehensions** over loops for creating collections -- Use **frozen dataclasses** (`@dataclass(frozen=True)`) -- **All functions must have type annotations** (parameters + return type) -- Use **`cast()`** over variable annotations for type overrides -- Use **Pytest-style** tests (not `unittest`) -- Comments: complete sentences, end with period, max 100 chars, space after `#` -- TODOs: link to GitHub issue `# TODO(#1234): Description.` -- Use `softwrap` helper for multiline help strings diff --git a/.claude/skills/pants-contributor/architecture.md b/.claude/skills/pants-contributor/architecture.md deleted file mode 100644 index edb4e032c6e..00000000000 --- a/.claude/skills/pants-contributor/architecture.md +++ /dev/null @@ -1,358 +0,0 @@ -# Pants Codebase Architecture - -## Overview - -Pants is a build system with a Python frontend and a Rust engine backend. The codebase is self-hosting: the `./pants` script in the repo root runs Pants from the local sources, compiling the Rust engine as needed. - -## Source Code Layout - -``` -src/ - python/ - pants/ # Core Pants package - backend/ # Language-specific backends - python/ # Python support (goals, rules, target types) - go/ # Go support - java/ # Java support - scala/ # Scala support - docker/ # Docker support - shell/ # Shell support - javascript/ # JavaScript/TypeScript support - helm/ # Helm chart support - k8s/ # Kubernetes support - cc/ # C/C++ support - rust/ # Rust support - experimental/ # Experimental backends - build_files/ # BUILD file formatting/fixing - codegen/ # Code generation (protobuf, thrift, etc.) - project_info/ # Introspection goals (filedeps, peek, etc.) - plugin_development/ # Plugin dev support - core/ # Core goals and subsystems - goals/ # test, lint, fmt, check, package, run, etc. - target_types.py # Core target types (files, resources, etc.) - util_rules/ # Core utility rules - engine/ # The Python side of the engine - rules.py # @rule, collect_rules, concurrently, implicitly - target.py # Target, Field base classes - process.py # Process execution types - fs.py # Filesystem types (Digest, Snapshot, etc.) - intrinsics.py # Engine intrinsic functions - unions.py # Union/polymorphism system - internals/ # Internal engine implementation - bin/ # Entry points (pants_loader, native_client) - option/ # Options/configuration system - build_graph/ # Build graph construction - goal/ # Goal infrastructure - help/ # Help system - pantsd/ # Pants daemon - util/ # Utility modules - testutil/ # Test utilities (RuleRunner, etc.) - rule_runner.py # RuleRunner for integration tests - pants_integration_test.py # Full integration test harness - rust/ - engine/ # Rust engine implementation - Cargo.toml # Rust workspace root - rule_graph/ # Rule graph construction - process_execution/ # Process sandboxing and execution - fs/ # Filesystem implementation - watch/ # File watching - ... - -pants-plugins/ # In-repo plugins - internal_plugins/ # Plugins used by the Pants repo itself - releases/ # Release management plugin - test_lockfile_fixtures/ # Test lockfile fixture plugin - pants_explorer/ # Pants Explorer web UI backend - -3rdparty/ - python/ # Python dependency declarations - requirements.txt # Main requirements - user_reqs.lock # Main lockfile (python-default resolve) - pytest.lock # Pytest resolve lockfile - mypy.lock # MyPy resolve lockfile - flake8.lock # Flake8 resolve lockfile - -build-support/ - bin/ # Build support scripts - generate_builtin_lockfiles.py # Regenerate tool lockfiles - generate_completions.py # Generate shell completions - flake8/ # Flake8 configuration and plugins - githooks/ # Git hooks (pre-push) - migration-support/ # Migration helpers - preambles/ # License header templates - -testprojects/ # Test fixture projects - src/python/ # Python test projects - src/go/ # Go test projects - src/java/ # Java test projects - src/shell/ # Shell test projects - -docs/ - docs/ # Documentation (Docusaurus) - introduction/ # What is Pants, key concepts - getting-started/ # Installation, first steps - using-pants/ # User guide - writing-plugins/ # Plugin development guide - python/ # Python-specific docs - contributions/ # Contributor guide - development/ # Dev setup, style guide, architecture - releases/ # Release process -``` - -## Engine Architecture - -### The Rule Graph - -Pants's engine is built around a **rule graph** - a directed graph where: -- **Rules** (`@rule` decorated async functions) are internal nodes -- **Queries** are entry points (roots) -- **Params** are typed, hashable input values (leaves) - -Key properties: -- Rules are **pure functions** mapping input types to output types -- The engine handles **memoization**, **concurrency**, and **caching** -- Type annotations are used at runtime for dependency injection -- The engine uses **exact type matching** (no subtyping) - -### Rule Execution - -```python -from pants.engine.rules import rule, collect_rules, concurrently, implicitly -from pants.engine.intrinsics import execute_process - -@rule -async def my_rule(request: MyRequest) -> MyResult: - # Await other rules (engine manages execution) - intermediate = await some_other_rule(request.field, **implicitly()) - - # Run external processes through the engine - process_result = await execute_process( - Process(argv=["/bin/echo", "hello"], description="Echo"), - **implicitly() - ) - - # Parallel execution with concurrently() - results = await concurrently( - process_thing(item, **implicitly()) for item in items - ) - - return MyResult(data=process_result.stdout) -``` - -### Union Rules (Polymorphism) - -Since the engine uses exact type matching, polymorphism is achieved through **unions**: - -```python -from pants.engine.unions import UnionRule, union - -@union -class LintRequest: - pass - -@dataclass(frozen=True) -class MyLinterRequest(LintRequest): - ... - -# Register the union member -def rules(): - return [ - *collect_rules(), - UnionRule(LintRequest, MyLinterRequest), - ] -``` - -### Rule Decorators - -| Decorator | Purpose | -|-----------|---------| -| `@rule` | Standard async rule (cached, pure) | -| `@goal_rule` | Top-level CLI goal entry point | -| `@uncacheable_rule` | Rule that should not be memoized | - -### Key Engine Types - -| Type | Module | Purpose | -|------|--------|---------| -| `Process` | `pants.engine.process` | Run an external process | -| `ProcessResult` | `pants.engine.process` | Process execution result | -| `Digest` | `pants.engine.fs` | Content-addressed file tree | -| `Snapshot` | `pants.engine.fs` | Digest with file/dir listing | -| `MergeDigests` | `pants.engine.fs` | Combine multiple digests | -| `CreateDigest` | `pants.engine.fs` | Create files from content | -| `Target` | `pants.engine.target` | A build target | -| `Field` | `pants.engine.target` | A target field | -| `FieldSet` | `pants.engine.target` | Subset of fields for a rule | -| `Address` | `pants.engine.addresses` | Target address | - -## Backend Structure - -Each backend in `src/python/pants/backend//` follows this pattern: - -### register.py (entry point) - -```python -from pants.backend.foo import target_types_rules -from pants.backend.foo.goals import test, lint -from pants.backend.foo.target_types import FooTarget, FooTestTarget - -def rules(): - return [ - *target_types_rules.rules(), - *test.rules(), - *lint.rules(), - ] - -def target_types(): - return [FooTarget, FooTestTarget] -``` - -The backend is activated in `pants.toml` via `backend_packages`: - -```toml -[GLOBAL] -backend_packages = ["pants.backend.foo"] -``` - -### Target types - -```python -from pants.engine.target import ( - Target, - SingleSourceField, - MultipleSourcesField, - Dependencies, - StringField, -) - -class FooSourceField(SingleSourceField): - expected_file_extensions = (".foo",) - -class FooTarget(Target): - alias = "foo_source" - core_fields = (FooSourceField, Dependencies) - help = "A Foo source file." -``` - -### Goal rules - -```python -from pants.core.goals.test import TestResult, TestRequest - -@dataclass(frozen=True) -class FooTestFieldSet(TestRequest.FieldSet): - required_fields = (FooTestSourceField,) - source: FooTestSourceField - -class FooTestRequest(TestRequest): - tool_subsystem = FooSubsystem - field_set_type = FooTestFieldSet - -@rule -async def run_foo_test(batch: FooTestRequest.Batch) -> TestResult: - ... - -def rules(): - return [ - *collect_rules(), - *FooTestRequest.rules(), - ] -``` - -## Configuration - -### pants.toml - -The main configuration file. Key sections: -- `[GLOBAL]` - Backend packages, pythonpath, sandboxing -- `[source]` - Source root patterns -- `[python]` - Python interpreter constraints, resolves, pip version -- `[pytest]` - Test runner args, resolve -- `[mypy]` - Type checker args, resolve -- `[test]` - Test timeout, env vars -- `[flake8]`, `[shellcheck]`, `[shfmt]` - Linter configs - -### pants.ci.toml - -CI-specific overrides that layer on top of `pants.toml`. - -### Python version - -The repo currently targets **Python 3.14** (`interpreter_constraints = ["==3.14.*"]` in `pants.toml`). - -## Testing Approaches - -### 1. Unit tests (plain Python) - -Test pure Python functions without the engine: - -```python -def test_my_helper() -> None: - assert my_function("input") == "expected" -``` - -### 2. run_rule_with_mocks (rule unit tests) - -Test rule logic with mocked dependencies: - -```python -from pants.testutil.rule_runner import run_rule_with_mocks - -def test_my_rule() -> None: - result = run_rule_with_mocks( - my_rule, - rule_args=[MyRequest(...)], - mock_calls={ - "my.module.some_dependency": lambda req: MockResult(...), - }, - ) - assert result == expected -``` - -### 3. RuleRunner (rule integration tests) - -Test rules with a real engine and isolated filesystem: - -```python -from pants.testutil.rule_runner import RuleRunner, QueryRule - -@pytest.fixture -def rule_runner() -> RuleRunner: - return RuleRunner( - rules=[ - *my_module.rules(), - QueryRule(MyOutput, [MyInput]), - ], - target_types=[MyTarget], - ) - -def test_integration(rule_runner: RuleRunner) -> None: - rule_runner.write_files({ - "src/BUILD": "my_target(source='f.py')", - "src/f.py": "content", - }) - rule_runner.set_options(["--my-option=value"]) - result = rule_runner.request(MyOutput, [MyInput(...)]) - assert result.field == expected -``` - -### 4. run_pants() (full integration tests) - -Test with a real Pants process: - -```python -from pants.testutil.pants_integration_test import run_pants, setup_tmpdir - -def test_end_to_end() -> None: - sources = { - "src/BUILD": "python_sources()", - "src/app.py": "print('hello')", - } - with setup_tmpdir(sources) as tmpdir: - result = run_pants([ - "--backend-packages=['pants.backend.python']", - "lint", - f"{tmpdir}/src:", - ]) - result.assert_success() -``` diff --git a/.claude/skills/pants-contributor/build-commands.md b/.claude/skills/pants-contributor/build-commands.md deleted file mode 100644 index f8fb1c71e28..00000000000 --- a/.claude/skills/pants-contributor/build-commands.md +++ /dev/null @@ -1,201 +0,0 @@ -# Pants Build Commands Reference - -## Running Tests - -### Basic test commands - -```bash -# Test a specific file -pants test src/python/pants/backend/python/goals/pytest_runner_test.py - -# Test all targets in a directory -pants test src/python/pants/backend/python/goals: - -# Test recursively -pants test src/python/pants/backend/python/goals:: - -# Test changed files only -pants --changed-since=HEAD test - -# Test changed files and their transitive dependents -pants --changed-since=HEAD --changed-dependents=transitive test - -# Alias for the above -pants --all-changed test -``` - -### Test options - -```bash -# Run a specific test function -pants test path/to/file_test.py -- -k test_function_name - -# Run tests matching a pattern -pants test path/to/file_test.py -- -k "test_foo or test_bar" - -# Run in debug mode (interactive, for debuggers) -pants test --debug path/to/file_test.py - -# Run with verbose output -pants test path/to/file_test.py -- -vvs - -# Run with no timeout (useful for debugging) -pants test --timeout-default=0 path/to/file_test.py - -# Show test output even on success -pants test --output=all path/to/file_test.py - -# Force re-run (skip cache) -pants --no-local-cache test path/to/file_test.py -``` - -### Test configuration - -Tests are configured in `pants.toml`: -- Default args: `--no-header --noskip -vv` -- Default timeout: 60 seconds -- Pytest is installed from the `pytest` resolve -- Test execution slot variable: `TEST_EXECUTION_SLOT` - -## Linting - -```bash -# Lint specific files -pants lint src/python/pants/backend/python/goals/pytest_runner.py - -# Lint changed files -pants --changed-since=HEAD lint - -# Lint with a specific linter only -pants lint --only=flake8 path/to/file.py -pants lint --only=ruff-check path/to/file.py -pants lint --only=ruff-format path/to/file.py - -# Lint BUILD files -pants lint --only=ruff-format '**BUILD' -``` - -Active linters (configured in `pants.toml`): -- `flake8` - Python linter (config at `build-support/flake8/.flake8`) -- `ruff check` - Fast Python linter -- `ruff format` - Python and BUILD file formatter -- `shellcheck` - Shell script linter -- `shfmt` - Shell script formatter -- `hadolint` - Dockerfile linter - -## Formatting - -```bash -# Format specific files -pants fmt src/python/pants/backend/python/goals/pytest_runner.py - -# Format changed files -pants --changed-since=HEAD fmt - -# Format everything (use sparingly) -pants fmt :: -``` - -## Type Checking - -```bash -# Type-check specific files -pants check src/python/pants/backend/python/goals/pytest_runner.py - -# Type-check changed files -pants --changed-since=HEAD check - -# Type-check with transitive dependents -pants --changed-since=HEAD --changed-dependents=transitive check -``` - -MyPy is configured in `pants.toml` and installed from the `mypy` resolve. - -## Dependency Management - -```bash -# Show dependencies of a target -pants dependencies src/python/pants/backend/python: - -# Show reverse dependencies (what depends on this) -pants dependents src/python/pants/backend/python: - -# Generate/update lockfiles -pants generate-lockfiles --resolve=python-default -pants generate-lockfiles --resolve=pytest -pants generate-lockfiles --resolve=mypy - -# Generate all lockfiles -pants generate-lockfiles -``` - -Third-party Python dependencies are declared in `3rdparty/python/` using `python_requirements()` targets referencing `requirements.txt` files. - -## Introspection - -```bash -# List targets in a directory -pants list src/python/pants/backend/python: - -# Show detailed target info (as JSON) -pants peek src/python/pants/backend/python/goals:tests - -# Show file dependencies -pants filedeps src/python/pants/backend/python/goals: - -# Show the paths between two targets -pants paths --from=src/python/pants/engine --to=src/python/pants/core - -# Count lines of code -pants count-loc :: -``` - -## BUILD File Management - -```bash -# Auto-generate BUILD files for new source files -pants tailor - -# Check for BUILD file issues -pants lint '**BUILD' - -# Update BUILD file formatting -pants fmt '**BUILD' - -# Update BUILD files for deprecations -pants update-build-files :: -``` - -## Running Scripts - -```bash -# Run a Python script through Pants -pants run src/python/pants/backend/python/providers/python_build_standalone/scripts/generate_urls.py - -# Run build support scripts -pants run build-support/bin/generate_builtin_lockfiles.py -- -``` - -## Pre-push Checks - -Before pushing, run the pre-push hook to catch common issues: - -```bash -build-support/githooks/pre-push -``` - -Or run format + lint on changed files: - -```bash -pants --changed-since=HEAD fmt -pants --changed-since=HEAD lint -pants --changed-since=HEAD check -``` - -## Performance Tips - -- Use `--changed-since=HEAD` to only operate on changed files -- Use `pantsd` (enabled by default) for faster startup -- For benchmarking: `hyperfine --warmup=1 --runs=5 'pants '` -- Cold cache benchmark: `hyperfine --runs=5 'pants --no-pantsd --no-local-cache '` -- To clear caches: restart pantsd with `pants --no-pantsd ` or kill it diff --git a/.claude/skills/pants-contributor/style-conventions.md b/.claude/skills/pants-contributor/style-conventions.md deleted file mode 100644 index 4f6e89c64b9..00000000000 --- a/.claude/skills/pants-contributor/style-conventions.md +++ /dev/null @@ -1,225 +0,0 @@ -# Style Guide and Conventions - -## Code Style - -### Python style - -- **f-strings**: Use f-strings, not `.format()` or `%` -- **Ternary expressions**: Prefer `x = "a" if cond else "b"` over if/else blocks -- **Early returns**: Prefer guard clauses, avoid deep nesting -- **Collection literals**: Use `{a}`, `(a, b)`, `{"k": v}` not constructors -- **Unpacking for merging**: `[*l1, *l2, "elem"]` not `l1 + l2 + ["elem"]` -- **Comprehensions**: Prefer over `map`/`filter` and explicit loops -- **Frozen dataclasses**: Always use `@dataclass(frozen=True)` for engine types -- **Type annotations**: Required on all functions (params + return type) -- **`cast()` over annotations**: Prefer `x = cast(str, untyped())` over `x: str = untyped()` -- **Pytest style**: Never use `unittest.TestCase`; use plain functions with `assert` -- **Error codes in type:ignore**: `# type: ignore[assignment]` not `# type: ignore` -- **Protocols for params**: Use `Iterable`, `Sequence`, `Mapping` not `List`, `Dict` in function params -- **Precise return types**: Return types should be concrete (`list[str]`, not `Iterable[str]`) - -### Comments - -- Must start with `# ` (space after hash) -- Must be complete sentences ending with a period -- Max 100 characters per line -- Explain **why**, not **what** -- TODOs must reference a GitHub issue: `# TODO(#1234): Description.` - -### Help strings - -For subsystem/target help strings that render as documentation: -- Use `softwrap()` for multiline strings -- Use `help_text()` for `Field`/`Target` subclasses if needed for mypy -- Use backticks for config sections, CLI args, target names, inline code -- Use 2-space indentation for bullet/numbered lists (or `bullet_list()`) -- Never use indentation for code blocks; use triple-backtick blocks only -- Text in angle brackets (``) will be ignored unless wrapped in backticks - -### Imports - -- Imports are auto-sorted by `isort` (via ruff) -- Prefer absolute imports -- Group: stdlib, third-party, pants imports - -### File headers - -All source files must have this copyright header: - -```python -# Copyright 2024 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). -``` - -BUILD files use the same header (configured in `[tailor]` in `pants.toml`). - -## Engine-Specific Conventions - -### Types for the rule graph - -- Rule params and returns must be **hashable** and **immutable** -- Use `tuple` not `list`, `FrozenDict` not `dict`, `FrozenOrderedSet` not `set` -- Use `Collection[T]` to newtype a `tuple` -- Use `DeduplicatedCollection[T]` to newtype a `FrozenOrderedSet` -- The engine uses **exact type matching** (no subtype consideration) -- Newtype freely to disambiguate: `class Name(str): pass` - -### Rule patterns - -```python -# Standard rule with collect_rules -from pants.engine.rules import collect_rules, rule - -@rule -async def my_rule(request: MyRequest) -> MyResult: - ... - -def rules(): - return collect_rules() -``` - -```python -# Using concurrently (NEVER await in a loop) -results = await concurrently( - process_item(item, **implicitly()) for item in items -) -``` - -```python -# Running a process -from pants.engine.intrinsics import execute_process -from pants.engine.process import Process - -result = await execute_process( - Process( - argv=["tool", "--flag", "arg"], - input_digest=my_digest, - description="Running tool", - ), - **implicitly() -) -``` - -### Registering rules - -Each module has a `rules()` function, aggregated in `register.py`: - -```python -# In each module -def rules(): - return collect_rules() - -# In register.py -def rules(): - return [ - *module1.rules(), - *module2.rules(), - ] - -def target_types(): - return [Target1, Target2] -``` - -## Testing Conventions - -### Test function naming - -- Test files: `*_test.py` (NOT `test_*.py`) -- Test functions: `def test_descriptive_name() -> None:` -- All test functions must have `-> None` return annotation - -### Integration vs unit tests - -- Unit tests: `*_test.py` - run in normal test target -- Integration tests: `*_integration_test.py` - get their own BUILD target with longer timeouts - -```python -# In BUILD files, integration tests are separated: -python_tests( - name="tests", - sources=["*_test.py", "!*_integration_test.py"], -) -python_tests( - name="integration", - sources=["*_integration_test.py"], - timeout=240, -) -``` - -### Skipping tests - -The repo conftest.py enforces `--noskip` - skipped tests are treated as errors. If a test legitimately needs to be skippable (e.g., platform-specific), mark it: - -```python -@pytest.mark.no_error_if_skipped -def test_platform_specific() -> None: - ... -``` - -### RuleRunner fixtures - -```python -@pytest.fixture -def rule_runner() -> RuleRunner: - return RuleRunner( - rules=[*my_rules(), QueryRule(Output, [Input])], - target_types=[MyTarget], - ) -``` - -- Each test gets a fresh `RuleRunner` instance (via fixture) -- Use `rule_runner.write_files()` to set up test content -- Use `rule_runner.set_options()` to configure options -- Use `rule_runner.request()` to invoke rules -- Use `rule_runner.run_goal_rule()` for goal rules - -### Integration tests - -- Use `setup_tmpdir()` + `run_pants()` for full integration tests -- `run_pants()` is hermetic by default (doesn't read `pants.toml`) -- Pass `--backend-packages` explicitly in test args -- Use `result.assert_success()` / `result.assert_failure()` - -## PR and Contribution Workflow - -### Before submitting - -1. Format: `pants --changed-since=HEAD fmt` -2. Lint: `pants --changed-since=HEAD lint` -3. Type-check: `pants --changed-since=HEAD check` -4. Test: `pants --changed-since=HEAD --changed-dependents=transitive test` -5. Or run the pre-push hook: `build-support/githooks/pre-push` - -### Commit messages - -- Start with a verb (Add, Fix, Update, Remove, Refactor) -- Be concise but descriptive -- Reference GitHub issues where applicable - -### Cherry-picking - -For backporting fixes to stable branches: -1. Label PR as `needs-cherrypick` -2. Set milestone to oldest release branch -3. Automation handles cherry-pick after merge - -## Maintenance Tasks - -### Updating external tool versions - -For `ExternalTool` subclasses (downloaded binaries): -1. Download new version for each platform -2. Compute sha256 and byte length: `tee >(shasum -a 256) >(wc -c) > /dev/null < archive` -3. Update `default_version` and `default_known_versions` - -### Updating Python tool versions - -For `PythonToolBase` subclasses (PyPI packages): -1. Update `default_requirements` and/or `default_interpreter_constraints` -2. Run `build-support/bin/generate_builtin_lockfiles.py ` - -### Updating PEX - -PEX is special - update both: -1. The `pex-cli` subsystem in `src/python/pants/backend/python/util_rules/pex_cli.py` -2. The requirement in `3rdparty/python/requirements.txt` and regenerate lockfile diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000000..2773a29dbb3 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,566 @@ +# Pants Build System Contributor Guide + +This is the Pants build system repo -- a self-hosting build system where `./pants` runs Pants from source. Follow the conventions, architecture, and workflows specific to this codebase. + +## Critical Rules + +### ALWAYS use Pants, NEVER use raw Python tools + +This is the most important rule. **Never** run `pytest`, `mypy`, `black`, `isort`, `flake8`, `ruff`, or any other Python tool directly. All Python tools must be invoked through Pants: + +```bash +# CORRECT - always use Pants +pants test src/python/pants/backend/python/goals/pytest_runner_test.py +pants lint src/python/pants/backend/python/goals/pytest_runner.py +pants fmt src/python/pants/backend/python/goals/pytest_runner.py +pants check src/python/pants/backend/python/goals/pytest_runner.py + +# WRONG - never do this +pytest src/python/pants/backend/python/goals/pytest_runner_test.py +mypy src/python/pants/backend/python/goals/pytest_runner.py +black src/python/pants/backend/python/goals/pytest_runner.py +ruff check src/python/pants/backend/python/goals/pytest_runner.py +``` + +### Running Pants from source + +In this repo, `./pants` is a special bootstrap script that runs Pants from the local source tree. Use `pants` (which resolves to the `./pants` script) for all operations. The first run compiles the Rust engine and may take several minutes. + +### Test file naming + +Test files must be named `*_test.py` (not `test_*.py`). Pants discovers tests by this suffix. + +### BUILD file conventions + +Every directory with source code needs a `BUILD` file. Common patterns: + +```python +# Standard source + test pattern +python_sources() +python_tests(name="tests") + +# With test utilities (conftest, fixtures, helpers) +python_test_utils(name="test_utils") + +# With overrides for specific files +python_sources( + overrides={ + "special.py": {"dependencies": ["//some/dep"]}, + }, +) +``` + +All BUILD files must have the copyright header: +```python +# Copyright 2024 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). +``` + +## Common Commands + +| Task | Command | +|------|---------| +| Run specific test file | `pants test path/to/file_test.py` | +| Run tests for changed files | `pants --changed-since=HEAD test` | +| Run tests with transitive deps | `pants --changed-since=HEAD --changed-dependents=transitive test` | +| Run a single test function | `pants test path/to/file_test.py -- -k test_function_name` | +| Debug a test (interactive) | `pants test --debug path/to/file_test.py` | +| Debug with specific test | `pants test --debug path/to/file_test.py -- -k test_name` | +| Lint changed files | `pants --changed-since=HEAD lint` | +| Format changed files | `pants --changed-since=HEAD fmt` | +| Type-check changed files | `pants --changed-since=HEAD check` | +| List all targets in dir | `pants list path/to/dir:` | +| Show dependencies | `pants dependencies path/to/target` | +| Show dependents | `pants dependents path/to/target` | +| Show target info | `pants peek path/to/target` | +| Generate BUILD files | `pants tailor` | +| Validate BUILD files | `pants lint --only=ruff-format '**BUILD'` | +| Run pre-push checks | `build-support/githooks/pre-push` | + +### Test options + +```bash +# Run tests matching a pattern +pants test path/to/file_test.py -- -k "test_foo or test_bar" + +# Run with verbose output +pants test path/to/file_test.py -- -vvs + +# Run with no timeout (useful for debugging) +pants test --timeout-default=0 path/to/file_test.py + +# Show test output even on success +pants test --output=all path/to/file_test.py + +# Force re-run (skip cache) +pants --no-local-cache test path/to/file_test.py +``` + +### Target address syntax + +- `path/to/dir:target_name` - specific target +- `path/to/dir:` - all targets in directory +- `path/to/dir::` - all targets recursively +- `path/to/file.py` - file address (inferred target) +- `::` - everything in the repo + +## Key Directories + +| Path | Purpose | +|------|---------| +| `src/python/pants/` | Core Pants Python source code | +| `src/python/pants/backend/` | Language-specific backends (python, go, java, docker, etc.) | +| `src/python/pants/core/` | Core goals (test, lint, fmt, check, package, run) | +| `src/python/pants/engine/` | The Pants engine (rules, targets, processes, fs) | +| `src/rust/engine/` | Rust engine implementation | +| `pants-plugins/` | In-repo plugins (internal_plugins, pants_explorer) | +| `3rdparty/python/` | Third-party Python dependency declarations and lockfiles | +| `build-support/` | Build scripts, CI helpers, git hooks | +| `testprojects/` | Test fixture projects used by integration tests | +| `docs/docs/` | Documentation source (Docusaurus MDX) | + +## Python Resolves (Lockfiles) + +The repo uses multiple Python resolves configured in `pants.toml`: +- `python-default` - Main resolve for Pants source code +- `pytest` - Test runner dependencies +- `mypy` - Type checker dependencies +- `flake8` - Linter dependencies + +To regenerate lockfiles: `pants generate-lockfiles --resolve=` + +## Engine Architecture + +### The Rule Graph + +Pants's engine is built around a **rule graph** - a directed graph where: +- **Rules** (`@rule` decorated async functions) are internal nodes +- **Queries** are entry points (roots) +- **Params** are typed, hashable input values (leaves) + +Key properties: +- Rules are **pure functions** mapping input types to output types +- The engine handles **memoization**, **concurrency**, and **caching** +- Type annotations are used at runtime for dependency injection +- The engine uses **exact type matching** (no subtyping) + +### Rule Execution + +```python +from pants.engine.rules import rule, collect_rules, concurrently, implicitly +from pants.engine.intrinsics import execute_process + +@rule +async def my_rule(request: MyRequest) -> MyResult: + # Await other rules (engine manages execution) + intermediate = await some_other_rule(request.field, **implicitly()) + + # Run external processes through the engine + process_result = await execute_process( + Process(argv=["/bin/echo", "hello"], description="Echo"), + **implicitly() + ) + + # Parallel execution with concurrently() + results = await concurrently( + process_thing(item, **implicitly()) for item in items + ) + + return MyResult(data=process_result.stdout) +``` + +### Union Rules (Polymorphism) + +Since the engine uses exact type matching, polymorphism is achieved through **unions**: + +```python +from pants.engine.unions import UnionRule, union + +@union +class LintRequest: + pass + +@dataclass(frozen=True) +class MyLinterRequest(LintRequest): + ... + +# Register the union member +def rules(): + return [ + *collect_rules(), + UnionRule(LintRequest, MyLinterRequest), + ] +``` + +### Rule Decorators + +| Decorator | Purpose | +|-----------|---------| +| `@rule` | Standard async rule (cached, pure) | +| `@goal_rule` | Top-level CLI goal entry point | +| `@uncacheable_rule` | Rule that should not be memoized | + +### Key Engine Types + +| Type | Module | Purpose | +|------|--------|---------| +| `Process` | `pants.engine.process` | Run an external process | +| `ProcessResult` | `pants.engine.process` | Process execution result | +| `Digest` | `pants.engine.fs` | Content-addressed file tree | +| `Snapshot` | `pants.engine.fs` | Digest with file/dir listing | +| `MergeDigests` | `pants.engine.fs` | Combine multiple digests | +| `CreateDigest` | `pants.engine.fs` | Create files from content | +| `Target` | `pants.engine.target` | A build target | +| `Field` | `pants.engine.target` | A target field | +| `FieldSet` | `pants.engine.target` | Subset of fields for a rule | +| `Address` | `pants.engine.addresses` | Target address | + +## Backend Structure + +Each backend in `src/python/pants/backend//` follows this pattern: + +### register.py (entry point) + +```python +from pants.backend.foo import target_types_rules +from pants.backend.foo.goals import test, lint +from pants.backend.foo.target_types import FooTarget, FooTestTarget + +def rules(): + return [ + *target_types_rules.rules(), + *test.rules(), + *lint.rules(), + ] + +def target_types(): + return [FooTarget, FooTestTarget] +``` + +The backend is activated in `pants.toml` via `backend_packages`: + +```toml +[GLOBAL] +backend_packages = ["pants.backend.foo"] +``` + +### Target types + +```python +from pants.engine.target import ( + Target, + SingleSourceField, + MultipleSourcesField, + Dependencies, + StringField, +) + +class FooSourceField(SingleSourceField): + expected_file_extensions = (".foo",) + +class FooTarget(Target): + alias = "foo_source" + core_fields = (FooSourceField, Dependencies) + help = "A Foo source file." +``` + +### Goal rules + +```python +from pants.core.goals.test import TestResult, TestRequest + +@dataclass(frozen=True) +class FooTestFieldSet(TestRequest.FieldSet): + required_fields = (FooTestSourceField,) + source: FooTestSourceField + +class FooTestRequest(TestRequest): + tool_subsystem = FooSubsystem + field_set_type = FooTestFieldSet + +@rule +async def run_foo_test(batch: FooTestRequest.Batch) -> TestResult: + ... + +def rules(): + return [ + *collect_rules(), + *FooTestRequest.rules(), + ] +``` + +## Configuration + +### pants.toml + +The main configuration file. Key sections: +- `[GLOBAL]` - Backend packages, pythonpath, sandboxing +- `[source]` - Source root patterns +- `[python]` - Python interpreter constraints, resolves, pip version +- `[pytest]` - Test runner args, resolve +- `[mypy]` - Type checker args, resolve +- `[test]` - Test timeout, env vars +- `[flake8]`, `[shellcheck]`, `[shfmt]` - Linter configs + +### Python version + +The repo currently targets **Python 3.14** (`interpreter_constraints = ["==3.14.*"]` in `pants.toml`). + +## Style Guide + +### Python style + +- **f-strings**: Use f-strings, not `.format()` or `%` +- **Ternary expressions**: Prefer `x = "a" if cond else "b"` over if/else blocks +- **Early returns**: Prefer guard clauses, avoid deep nesting +- **Collection literals**: Use `{a}`, `(a, b)`, `{"k": v}` not constructors +- **Unpacking for merging**: `[*l1, *l2, "elem"]` not `l1 + l2 + ["elem"]` +- **Comprehensions**: Prefer over `map`/`filter` and explicit loops +- **Frozen dataclasses**: Always use `@dataclass(frozen=True)` for engine types +- **Type annotations**: Required on all functions (params + return type) +- **`cast()` over annotations**: Prefer `x = cast(str, untyped())` over `x: str = untyped()` +- **Pytest style**: Never use `unittest.TestCase`; use plain functions with `assert` +- **Error codes in type:ignore**: `# type: ignore[assignment]` not `# type: ignore` +- **Protocols for params**: Use `Iterable`, `Sequence`, `Mapping` not `List`, `Dict` in function params +- **Precise return types**: Return types should be concrete (`list[str]`, not `Iterable[str]`) + +### Comments + +- Must start with `# ` (space after hash) +- Must be complete sentences ending with a period +- Max 100 characters per line +- Explain **why**, not **what** +- TODOs must reference a GitHub issue: `# TODO(#1234): Description.` + +### Help strings + +- Use `softwrap()` for multiline strings +- Use `help_text()` for `Field`/`Target` subclasses if needed for mypy +- Use backticks for config sections, CLI args, target names, inline code +- Use 2-space indentation for bullet/numbered lists (or `bullet_list()`) +- Never use indentation for code blocks; use triple-backtick blocks only + +### File headers + +All source files must have the copyright header: + +```python +# Copyright 2024 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). +``` + +### Imports + +- Imports are auto-sorted by `isort` (via ruff) +- Prefer absolute imports +- Group: stdlib, third-party, pants imports + +## Engine-Specific Conventions + +### Types for the rule graph + +- Rule params and returns must be **hashable** and **immutable** +- Use `tuple` not `list`, `FrozenDict` not `dict`, `FrozenOrderedSet` not `set` +- Use `Collection[T]` to newtype a `tuple` +- Use `DeduplicatedCollection[T]` to newtype a `FrozenOrderedSet` +- The engine uses **exact type matching** (no subtype consideration) +- Newtype freely to disambiguate: `class Name(str): pass` + +### Rule patterns + +```python +# Standard rule with collect_rules +from pants.engine.rules import collect_rules, rule + +@rule +async def my_rule(request: MyRequest) -> MyResult: + ... + +def rules(): + return collect_rules() +``` + +```python +# Using concurrently (NEVER await in a loop) +results = await concurrently( + process_item(item, **implicitly()) for item in items +) +``` + +```python +# Running a process +from pants.engine.intrinsics import execute_process +from pants.engine.process import Process + +result = await execute_process( + Process( + argv=["tool", "--flag", "arg"], + input_digest=my_digest, + description="Running tool", + ), + **implicitly() +) +``` + +### Registering rules + +Each module has a `rules()` function, aggregated in `register.py`: + +```python +# In each module +def rules(): + return collect_rules() + +# In register.py +def rules(): + return [ + *module1.rules(), + *module2.rules(), + ] + +def target_types(): + return [Target1, Target2] +``` + +## Testing + +### Test function naming + +- Test files: `*_test.py` (NOT `test_*.py`) +- Test functions: `def test_descriptive_name() -> None:` +- All test functions must have `-> None` return annotation + +### Integration vs unit tests + +- Unit tests: `*_test.py` - run in normal test target +- Integration tests: `*_integration_test.py` - get their own BUILD target with longer timeouts + +```python +# In BUILD files, integration tests are separated: +python_tests( + name="tests", + sources=["*_test.py", "!*_integration_test.py"], +) +python_tests( + name="integration", + sources=["*_integration_test.py"], + timeout=240, +) +``` + +### Skipping tests + +The repo conftest.py enforces `--noskip` - skipped tests are treated as errors. If a test legitimately needs to be skippable (e.g., platform-specific), mark it: + +```python +@pytest.mark.no_error_if_skipped +def test_platform_specific() -> None: + ... +``` + +### RuleRunner fixtures + +```python +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[*my_rules(), QueryRule(Output, [Input])], + target_types=[MyTarget], + ) +``` + +- Each test gets a fresh `RuleRunner` instance (via fixture) +- Use `rule_runner.write_files()` to set up test content +- Use `rule_runner.set_options()` to configure options +- Use `rule_runner.request()` to invoke rules +- Use `rule_runner.run_goal_rule()` for goal rules + +### Testing approaches + +**Unit tests** - Test pure Python functions without the engine: +```python +def test_my_helper() -> None: + assert my_function("input") == "expected" +``` + +**run_rule_with_mocks** - Test rule logic with mocked dependencies: +```python +from pants.testutil.rule_runner import run_rule_with_mocks + +def test_my_rule() -> None: + result = run_rule_with_mocks( + my_rule, + rule_args=[MyRequest(...)], + mock_calls={ + "my.module.some_dependency": lambda req: MockResult(...), + }, + ) + assert result == expected +``` + +**RuleRunner** - Test rules with a real engine and isolated filesystem: +```python +def test_integration(rule_runner: RuleRunner) -> None: + rule_runner.write_files({ + "src/BUILD": "my_target(source='f.py')", + "src/f.py": "content", + }) + rule_runner.set_options(["--my-option=value"]) + result = rule_runner.request(MyOutput, [MyInput(...)]) + assert result.field == expected +``` + +**run_pants()** - Full integration tests: +```python +from pants.testutil.pants_integration_test import run_pants, setup_tmpdir + +def test_end_to_end() -> None: + sources = { + "src/BUILD": "python_sources()", + "src/app.py": "print('hello')", + } + with setup_tmpdir(sources) as tmpdir: + result = run_pants([ + "--backend-packages=['pants.backend.python']", + "lint", + f"{tmpdir}/src:", + ]) + result.assert_success() +``` + +## PR and Contribution Workflow + +### Before submitting + +1. Format: `pants --changed-since=HEAD fmt` +2. Lint: `pants --changed-since=HEAD lint` +3. Type-check: `pants --changed-since=HEAD check` +4. Test: `pants --changed-since=HEAD --changed-dependents=transitive test` +5. Or run the pre-push hook: `build-support/githooks/pre-push` + +### Commit messages + +- Start with a verb (Add, Fix, Update, Remove, Refactor) +- Be concise but descriptive +- Reference GitHub issues where applicable + +## Maintenance Tasks + +### Updating external tool versions + +For `ExternalTool` subclasses (downloaded binaries): +1. Download new version for each platform +2. Compute sha256 and byte length: `tee >(shasum -a 256) >(wc -c) > /dev/null < archive` +3. Update `default_version` and `default_known_versions` + +### Updating Python tool versions + +For `PythonToolBase` subclasses (PyPI packages): +1. Update `default_requirements` and/or `default_interpreter_constraints` +2. Run `build-support/bin/generate_builtin_lockfiles.py ` + +### Updating PEX + +PEX is special - update both: +1. The `pex-cli` subsystem in `src/python/pants/backend/python/util_rules/pex_cli.py` +2. The requirement in `3rdparty/python/requirements.txt` and regenerate lockfile From 38be1c1ab2b69a538329f19cf8c8fb6771d8e06e Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 25 Mar 2026 15:23:13 -0400 Subject: [PATCH 19/25] all existing tests passing --- .../backend/docker/goals/package_image.py | 37 ++- .../pants/backend/docker/target_types.py | 303 ++++++++++-------- .../pants/backend/docker/target_types_test.py | 4 +- 3 files changed, 189 insertions(+), 155 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index ed1d3c575bc..82563576384 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -21,8 +21,6 @@ from pants.backend.docker.registries import DockerRegistries, DockerRegistryOptions from pants.backend.docker.subsystems.docker_options import DockerOptions from pants.backend.docker.target_types import ( - BuildctlOptionsFieldMixin, - DockerBuildOptionsFieldMixin, DockerImageBuildImageOutputField, DockerImageContextRootField, DockerImageRegistriesField, @@ -345,16 +343,17 @@ def get_build_options( docker_options: DockerOptions, target: Target, ) -> Iterator[str]: - engine_build_options_field_type, gen_options_func_name = ( - (BuildctlOptionsFieldMixin, "buildctl_options") + gen_options_func_name = ( + "buildctl_options" if docker_options.build_engine == DockerBuildEngine.BUILDKIT - else (DockerBuildOptionsFieldMixin, "docker_build_options") + else "docker_build_options" ) for field_type in target.field_types: - if issubclass(field_type, engine_build_options_field_type) and cast( - ValidateOptionsMixin, target[field_type] - ).validate_options(docker_options, context): - gen_options_func = getattr(target[field_type], gen_options_func_name) + if ( + issubclass(field_type, ValidateOptionsMixin) + and target[field_type].validate_options(docker_options, context) + and (gen_options_func := getattr(target[field_type], gen_options_func_name, None)) + ): yield from gen_options_func( docker=docker_options, value_formatter=get_value_formatter(context, target, field_type.alias), @@ -362,16 +361,16 @@ def get_build_options( # Special handling for global options if docker_options.build_target_stage in context.stages: - if docker_options.build_engine == DockerBuildEngine.BUILDKIT: - yield from ( - DockerImageTargetStageField.buildctl_option, - f"{DockerImageTargetStageField.suboption}{DockerImageTargetStageField.suboption_value_delimiter}{docker_options.build_target_stage}", - ) - else: - yield from ( - DockerImageTargetStageField.docker_build_option, - docker_options.build_target_stage, - ) + compute_options_func = ( + DockerImageTargetStageField.compute_buildctl_options + if docker_options.build_engine == DockerBuildEngine.BUILDKIT + else DockerImageTargetStageField.compute_docker_build_options + ) + yield from compute_options_func( + docker_options.build_target_stage, + docker=docker_options, + value_formatter=get_value_formatter(context, target, DockerImageTargetStageField.alias), + ) # This is the same for docker and buildkit if docker_options.build_no_cache: diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index b5af337cc0e..111edea1e4f 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -8,7 +8,7 @@ from abc import ABC, abstractmethod from collections.abc import Callable, Iterator from dataclasses import dataclass -from typing import TYPE_CHECKING, ClassVar, cast, final +from typing import TYPE_CHECKING, ClassVar, final from pants.backend.docker.registries import ALL_DEFAULT_REGISTRIES from pants.backend.docker.subsystems.docker_options import DockerOptions @@ -239,56 +239,49 @@ def validate_options(self, options: DockerOptions, context: DockerBuildContext) class DockerBuildOptionsFieldMixin(ValidateOptionsMixin, ABC): docker_build_option: ClassVar[str] + @classmethod @abstractmethod - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter - ) -> Iterator[str]: - """Subclasses must implement this, to turn their `self.value` into none, one or more option - values.""" - - -class DockerBuildOptionMultiValueFieldMixin(DockerBuildOptionsFieldMixin, ABC): - """Inherit this mixin class to provide options to `docker build`.""" - - @abstractmethod - def docker_build_option_values( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_docker_build_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - """Subclasses must implement this, to turn their `self.value` into none, one or more option - values.""" + """Subclasses must implement this, to turn `value` into none, one or more option values.""" - @final def docker_build_options( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - for value in self.docker_build_option_values( - docker=docker, value_formatter=value_formatter - ): - yield from (self.docker_build_option, value) + return self.compute_docker_build_options( + self.value, docker=docker, value_formatter=value_formatter + ) class DockerBuildOptionFieldValueMixin(Field, DockerBuildOptionsFieldMixin, ABC): """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) to `docker build`.""" + @classmethod @final - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_docker_build_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - if self.value is not None: - yield f"{self.docker_build_option}={self.value}" + if value is not None: + yield f"{cls.docker_build_option}={value}" class DockerBuildOptionFieldMultiValueMixin(StringSequenceField, DockerBuildOptionsFieldMixin, ABC): """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to `docker build`.""" + @classmethod @final - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_docker_build_options( + cls, + value: tuple[str, ...] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - if self.value: - yield f"{self.docker_build_option}={','.join(list(self.value))}" + if value: + yield f"{cls.docker_build_option}={','.join(value)}" class DockerBuildOptionFieldMultiValueDictMixin( @@ -297,13 +290,19 @@ class DockerBuildOptionFieldMultiValueDictMixin( """Inherit this mixin class to provide options in the form of `--flag=key1=value1,key2=value2` to `docker build`.""" + @classmethod @final - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_docker_build_options( + cls, + value: FrozenDict[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - if self.value: - yield f"{self.docker_build_option}=" + ",".join( - f"{key}={value_formatter(value)}" for key, value in self.value.items() + if value: + yield f"{cls.docker_build_option}=" + ",".join( + f"{key}={value_formatter(v)}" + for key, v in value.items() # type: ignore[union-attr] ) @@ -315,14 +314,19 @@ class DockerBuildOptionFieldListOfMultiValueDictMixin( `--flag=key1=value1,key2=value2 --flag=key3=value3,key4=value4` """ + @classmethod @final - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_docker_build_options( + cls, + value: tuple[FrozenDict[str, str]] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - if self.value: - for item in self.value: - yield f"{self.docker_build_option}=" + ",".join( - f"{key}={value_formatter(value)}" for key, value in item.items() + if value: + for item in value: # type: ignore[union-attr] + yield f"{cls.docker_build_option}=" + ",".join( + f"{key}={value_formatter(v)}" for key, v in item.items() ) @@ -330,23 +334,31 @@ class DockerBuildOptionFlagFieldMixin(BoolField, DockerBuildOptionsFieldMixin, A """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is `True`) to `docker build`.""" + @classmethod @final - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_docker_build_options( + cls, value: bool, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - if self.value: - yield f"{self.docker_build_option}" + if value: + yield f"{cls.docker_build_option}" class BuildctlOptionsFieldMixin(ValidateOptionsMixin, ABC): buildctl_option: ClassVar[str] + @classmethod @abstractmethod + def compute_buildctl_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + """Subclasses must implement this, to turn `value` into none, one or more option values.""" + def buildctl_options( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - """Subclasses must implement this, to turn their `self.value` into none, one or more option - values.""" + return self.compute_buildctl_options( + self.value, docker=docker, value_formatter=value_formatter + ) class DockerBuildkitPassthroughFieldMixin( @@ -356,31 +368,11 @@ class DockerBuildkitPassthroughFieldMixin( def docker_build_option(cls) -> str: return cls.buildctl_option - def docker_build_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter - ) -> Iterator[str]: - return self.buildctl_options(docker=docker, value_formatter=value_formatter) - - -class BuildctlOptionMultiValueFieldMixin(BuildctlOptionsFieldMixin, ABC): - """Inherit this mixin class to provide multi-value options to `buildctl build`. - - Yields multiple `--opt value1 --opt value2` pairs. - """ - - @abstractmethod - def buildctl_option_values( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter - ) -> Iterator[str]: - """Subclasses must implement this, to turn their `self.value` into none, one or more option - values.""" - - @final - def buildctl_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + @classmethod + def compute_docker_build_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - for value in self.buildctl_option_values(docker=docker, value_formatter=value_formatter): - yield from (self.buildctl_option, value) + return cls.compute_buildctl_options(value, docker=docker, value_formatter=value_formatter) class BuildctlOptionFieldMultiValueDictMixin( @@ -389,13 +381,19 @@ class BuildctlOptionFieldMultiValueDictMixin( """Inherit this mixin class to provide options in the form of `--flag=key1=value1,key2=value2` to `buildctl build`.""" + @classmethod @final - def buildctl_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_buildctl_options( + cls, + value: FrozenDict[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - if self.value: - yield f"{self.buildctl_option}=" + ",".join( - f"{key}={value_formatter(value)}" for key, value in self.value.items() + if value: + yield f"{cls.buildctl_option}=" + ",".join( + f"{key}={value_formatter(v)}" + for key, v in value.items() # type: ignore[union-attr] ) @@ -407,14 +405,19 @@ class BuildctlOptionFieldListOfMultiValueDictMixin( `--flag=key1=value1,key2=value2 --flag=key3=value3,key4=value4` """ + @classmethod @final - def buildctl_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_buildctl_options( + cls, + value: tuple[FrozenDict[str, str], ...] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - if self.value: - for item in self.value: - yield f"{self.buildctl_option}=" + ",".join( - f"{key}={value_formatter(value)}" for key, value in item.items() + if value: + for item in value: # type: ignore[union-attr] + yield f"{cls.buildctl_option}=" + ",".join( + f"{key}={value_formatter(v)}" for key, v in item.items() ) @@ -422,12 +425,13 @@ class BuildctlOptionFieldValueMixin(Field, BuildctlOptionsFieldMixin, ABC): """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) to `buildctl build`.""" + @classmethod @final - def buildctl_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_buildctl_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - if self.value is not None: - yield f"{self.buildctl_option}={self.value}" + if value is not None: + yield f"{cls.buildctl_option}={value}" class BuildctlLayeredOptionFieldValueMixin(Field, BuildctlOptionsFieldMixin, ABC): @@ -441,27 +445,31 @@ class BuildctlLayeredOptionFieldValueMixin(Field, BuildctlOptionsFieldMixin, ABC suboption: ClassVar[str] suboption_value_delimiter: ClassVar[str] = "=" + @classmethod @final - def buildctl_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_buildctl_options( + cls, value, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - if self.value is not None: - yield from ( - self.buildctl_option, - f"{self.suboption}{self.suboption_value_delimiter}{self.value}", - ) + if value is not None: + yield cls.buildctl_option + yield f"{cls.suboption}{cls.suboption_value_delimiter}{value}" class BuildctlOptionFieldMultiValueMixin(StringSequenceField, BuildctlOptionsFieldMixin, ABC): """Inherit this mixin class to provide options in the form of `--flag=value1,value2` to `buildctl build`.""" + @classmethod @final - def buildctl_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_buildctl_options( + cls, + value: tuple[str, ...] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - if self.value: - yield f"{self.buildctl_option}={','.join(list(self.value))}" + if value: + yield f"{cls.buildctl_option}={','.join(value)}" class BuildctlLayeredOptionFieldMultiValueMixin( @@ -477,27 +485,31 @@ class BuildctlLayeredOptionFieldMultiValueMixin( suboption: ClassVar[str] suboption_value_delimiter: ClassVar[str] = "=" + @classmethod @final - def buildctl_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_buildctl_options( + cls, + value: tuple[str, ...] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - if self.value: - yield from ( - self.buildctl_option, - f"{self.suboption}{self.suboption_value_delimiter}{','.join(list(self.value))}", - ) + if value: + yield cls.buildctl_option + yield f"{cls.suboption}{cls.suboption_value_delimiter}{','.join(value)}" class BuildctlOptionFlagFieldMixin(BoolField, BuildctlOptionsFieldMixin, ABC): """Inherit this mixin class to provide optional flags (i.e. add `--flag` only when the value is `True`) to `buildctl build`.""" + @classmethod @final - def buildctl_options( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + def compute_buildctl_options( + cls, value: bool, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - if self.value: - yield f"{self.buildctl_option}" + if value: + yield f"{cls.buildctl_option}" class DockerImageTargetStageField( @@ -526,8 +538,8 @@ def validate_options(options: DockerOptions, context: DockerBuildContext) -> boo class DockerImageBuildImageLabelsOptionField( - DockerBuildOptionMultiValueFieldMixin, - BuildctlOptionMultiValueFieldMixin, + DockerBuildOptionsFieldMixin, + BuildctlOptionsFieldMixin, DictStringToStringField, ): alias = "image_labels" @@ -544,22 +556,32 @@ class DockerImageBuildImageLabelsOptionField( docker_build_option = "--label" buildctl_option = "--opt" - def docker_build_option_values( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + @classmethod + def compute_docker_build_options( + cls, + value: FrozenDict[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - for label, value in (self.value or {}).items(): - yield f"{label}={value_formatter(value)}" + for label, v in (value or {}).items(): # type: ignore[union-attr] + yield cls.docker_build_option + yield f"{label}={value_formatter(v)}" - def buildctl_option_values( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + @classmethod + def compute_buildctl_options( + cls, + value: FrozenDict[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - for label, value in (self.value or {}).items(): - yield f"label:{label}={value_formatter(value)}" + for label, v in (value or {}).items(): # type: ignore[union-attr] + yield cls.buildctl_option + yield f"label:{label}={value_formatter(v)}" -class DockerImageBuildImageExtraHostsField( - DockerBuildOptionMultiValueFieldMixin, DictStringToStringField -): +class DockerImageBuildImageExtraHostsField(DockerBuildOptionsFieldMixin, DictStringToStringField): alias = "extra_build_hosts" help = help_text( """ @@ -570,13 +592,19 @@ class DockerImageBuildImageExtraHostsField( ) docker_build_option = "--add-host" - def docker_build_option_values( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + @classmethod + def compute_docker_build_options( + cls, + value: FrozenDict[str, str] | None, + *, + docker: DockerOptions, + value_formatter: OptionValueFormatter, ) -> Iterator[str]: - if self.value: - merged_values = {**docker.build_hosts, **self.value} - for label, value in merged_values.items(): - yield f"{label}:{value_formatter(value)}" + if value: + merged_values = {**docker.build_hosts, **value} + for label, v in merged_values.items(): + yield cls.docker_build_option + yield f"{label}:{value_formatter(v)}" class DockerImageBuildImageCacheToField( @@ -688,8 +716,7 @@ class DockerImageBuildImageOutputField( class DockerImageBuildSecretsOptionField( AsyncFieldMixin, - BuildctlOptionMultiValueFieldMixin, - DockerBuildkitPassthroughFieldMixin, + ValidateOptionsMixin, DictStringToStringField, ): alias = "secrets" @@ -717,8 +744,9 @@ class DockerImageBuildSecretsOptionField( ) buildctl_option = "--secret" + docker_build_option = "--secret" - def buildctl_option_values( + def buildctl_options( self, *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: # os.path.join() discards preceding parts if encountering an abs path, e.g. if the secret @@ -730,12 +758,16 @@ def buildctl_option_values( self.address.spec_path if re.match(r"\.{1,2}/", path) else "", os.path.expanduser(path), ) - + yield self.buildctl_option yield f"id={secret},src={os.path.normpath(full_path)}" + def docker_build_options( + self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + ) -> Iterator[str]: + return self.buildctl_options(docker=docker, value_formatter=value_formatter) + class DockerImageBuildSSHOptionField( - BuildctlOptionMultiValueFieldMixin, DockerBuildkitPassthroughFieldMixin, StringSequenceField, ): @@ -758,10 +790,13 @@ class DockerImageBuildSSHOptionField( buildctl_option = "--ssh" - def buildctl_option_values( - self, *, docker: DockerOptions, value_formatter: OptionValueFormatter + @classmethod + def compute_buildctl_options( + cls, value: tuple[str, ...], *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: - yield from cast(tuple[str, ...], self.value) + for v in value: + yield cls.buildctl_option + yield v class DockerImageBuildPullOptionField(DockerBuildOptionFieldValueMixin, BoolField): diff --git a/src/python/pants/backend/docker/target_types_test.py b/src/python/pants/backend/docker/target_types_test.py index d3225b3fe5b..903b4780ae0 100644 --- a/src/python/pants/backend/docker/target_types_test.py +++ b/src/python/pants/backend/docker/target_types_test.py @@ -39,9 +39,9 @@ def test_secret_path_resolvement(src: str, expected: str): {"mysecret": src}, address=Address("") ) values = list( - secrets_option_field.buildctl_option_values( + secrets_option_field.buildctl_options( docker=MagicMock(spec=DockerOptions), value_formatter=lambda x: x ) ) - assert values == [f"id=mysecret,src={expected}"] + assert values == ["--secret", f"id=mysecret,src={expected}"] From b6f2381e0df7265be80e802223394b886f5de170 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 25 Mar 2026 15:27:02 -0400 Subject: [PATCH 20/25] typechecking --- .../pants/backend/docker/target_types.py | 22 +++++++++---------- 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index 111edea1e4f..01c578780d3 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -227,7 +227,7 @@ class DockerImageSkipPushField(BoolField): OptionValueFormatter = Callable[[str], str] -class ValidateOptionsMixin(ABC): +class ValidateOptionsMixin(Field, ABC): def validate_options(self, options: DockerOptions, context: DockerBuildContext) -> bool: """Hook method telling Pants to ignore this option in certain contexts. @@ -254,7 +254,7 @@ def docker_build_options( ) -class DockerBuildOptionFieldValueMixin(Field, DockerBuildOptionsFieldMixin, ABC): +class DockerBuildOptionFieldValueMixin(DockerBuildOptionsFieldMixin, ABC): """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) to `docker build`.""" @@ -301,8 +301,7 @@ def compute_docker_build_options( ) -> Iterator[str]: if value: yield f"{cls.docker_build_option}=" + ",".join( - f"{key}={value_formatter(v)}" - for key, v in value.items() # type: ignore[union-attr] + f"{key}={value_formatter(v)}" for key, v in value.items() ) @@ -324,7 +323,7 @@ def compute_docker_build_options( value_formatter: OptionValueFormatter, ) -> Iterator[str]: if value: - for item in value: # type: ignore[union-attr] + for item in value: yield f"{cls.docker_build_option}=" + ",".join( f"{key}={value_formatter(v)}" for key, v in item.items() ) @@ -392,8 +391,7 @@ def compute_buildctl_options( ) -> Iterator[str]: if value: yield f"{cls.buildctl_option}=" + ",".join( - f"{key}={value_formatter(v)}" - for key, v in value.items() # type: ignore[union-attr] + f"{key}={value_formatter(v)}" for key, v in value.items() ) @@ -415,13 +413,13 @@ def compute_buildctl_options( value_formatter: OptionValueFormatter, ) -> Iterator[str]: if value: - for item in value: # type: ignore[union-attr] + for item in value: yield f"{cls.buildctl_option}=" + ",".join( f"{key}={value_formatter(v)}" for key, v in item.items() ) -class BuildctlOptionFieldValueMixin(Field, BuildctlOptionsFieldMixin, ABC): +class BuildctlOptionFieldValueMixin(BuildctlOptionsFieldMixin, ABC): """Inherit this mixin class to provide unary options (i.e. option in the form of `--flag=value`) to `buildctl build`.""" @@ -434,7 +432,7 @@ def compute_buildctl_options( yield f"{cls.buildctl_option}={value}" -class BuildctlLayeredOptionFieldValueMixin(Field, BuildctlOptionsFieldMixin, ABC): +class BuildctlLayeredOptionFieldValueMixin(BuildctlOptionsFieldMixin, ABC): """Inherit this mixin class to provide layered options (i.e. option in the form of `--flag suboption=value`) to `buildctl build`. @@ -564,7 +562,7 @@ def compute_docker_build_options( docker: DockerOptions, value_formatter: OptionValueFormatter, ) -> Iterator[str]: - for label, v in (value or {}).items(): # type: ignore[union-attr] + for label, v in (value or {}).items(): yield cls.docker_build_option yield f"{label}={value_formatter(v)}" @@ -576,7 +574,7 @@ def compute_buildctl_options( docker: DockerOptions, value_formatter: OptionValueFormatter, ) -> Iterator[str]: - for label, v in (value or {}).items(): # type: ignore[union-attr] + for label, v in (value or {}).items(): yield cls.buildctl_option yield f"label:{label}={value_formatter(v)}" From 05345b8b34d82c3bb21950d6c4e02e1879f05fd5 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 25 Mar 2026 16:08:44 -0400 Subject: [PATCH 21/25] option types test --- src/python/pants/option/option_types_test.py | 32 ++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/src/python/pants/option/option_types_test.py b/src/python/pants/option/option_types_test.py index 10b3ad45106..f62f6a304f9 100644 --- a/src/python/pants/option/option_types_test.py +++ b/src/python/pants/option/option_types_test.py @@ -4,6 +4,7 @@ from __future__ import annotations import unittest.mock +from dataclasses import dataclass from enum import Enum from types import SimpleNamespace from typing import TYPE_CHECKING, Any @@ -15,6 +16,7 @@ ArgsListOption, BoolListOption, BoolOption, + DataclassOption, DictOption, DirListOption, DirOption, @@ -49,6 +51,12 @@ class MyEnum(Enum): Val2 = "val2" +@dataclass(frozen=True) +class MyDataclass: + key1: str + key2: int + + def opt_info(*names, **options): return OptionInfo( args=names, @@ -129,6 +137,9 @@ def __init__(self): self.options.dyn_enum_list_prop = [MyEnum.Val2] self.options.defaultless_enum_list_prop = [MyEnum.Val2] self.options.dict_prop = {"key1": "val1"} + self.options.dataclass_prop = MyDataclass(key1="val1", key2=11) + self.options.optional_dataclass_prop = MyDataclass(key1="val2", key2=22) + self.options.dyn_dataclass_prop = MyDataclass(key1="val3", key2=33) enum_prop = EnumOption(default=MyEnum.Val1, help="") dyn_enum_prop = EnumOption( @@ -145,11 +156,19 @@ def __init__(self): ) defaultless_enum_list_prop = EnumListOption(enum_type=MyEnum, help="") dict_prop = DictOption[Any](help="") + dataclass_prop = DataclassOption(default=MyDataclass(key1="val4", key2=44), help="") + optional_dataclass_prop = DataclassOption(dataclass_type=MyDataclass, default=None, help="") + dyn_dataclass_prop = DataclassOption( + dataclass_type=MyDataclass, + default=lambda cls: cls.dyn_default_dataclass, + help=lambda cls: f"{cls.dyn_help}", + ) class MySubsystem(MyBaseSubsystem): dyn_help = "Dynamic Help" dyn_default = MyEnum.Val1 dyn_default_list = [MyEnum.Val1] + dyn_default_dataclass = MyDataclass(key1="val5", key2=55) assert list(collect_options_info(MySubsystem)) == [ opt_info("--enum-prop", default=MyEnum.Val1, help="", type=MyEnum), @@ -167,6 +186,16 @@ class MySubsystem(MyBaseSubsystem): "--defaultless-enum-list-prop", default=[], help="", type=list, member_type=MyEnum ), opt_info("--dict-prop", default={}, help="", type=dict), + opt_info( + "--dataclass-prop", default=MyDataclass(key1="val4", key2=44), help="", type=MyDataclass + ), + opt_info("--optional-dataclass-prop", default=None, help="", type=MyDataclass), + opt_info( + "--dyn-dataclass-prop", + default=MyDataclass(key1="val5", key2=55), + help="Dynamic Help", + type=MyDataclass, + ), ] my_subsystem = MySubsystem() @@ -177,6 +206,9 @@ class MySubsystem(MyBaseSubsystem): assert my_subsystem.dyn_enum_list_prop == (MyEnum.Val2,) assert my_subsystem.defaultless_enum_list_prop == (MyEnum.Val2,) assert my_subsystem.dict_prop == {"key1": "val1"} + assert my_subsystem.dataclass_prop == MyDataclass(key1="val1", key2=11) + assert my_subsystem.optional_dataclass_prop == MyDataclass(key1="val2", key2=22) + assert my_subsystem.dyn_dataclass_prop == MyDataclass(key1="val3", key2=33) def test_specialized_options() -> None: From 7a2f94e6c702366b4793dcc7d9a5aad778dd2d92 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Wed, 25 Mar 2026 16:13:41 -0400 Subject: [PATCH 22/25] typechecking --- src/python/pants/option/option_types.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/src/python/pants/option/option_types.py b/src/python/pants/option/option_types.py index 36adae0345a..fe02c655dd4 100644 --- a/src/python/pants/option/option_types.py +++ b/src/python/pants/option/option_types.py @@ -799,7 +799,28 @@ def __new__( cls, flag_name: str | None = None, *, - default: _MaybeDynamicT[_OptT], + default: _OptT, + help: _HelpT, + register_if: _RegisterIfFuncT | None = None, + advanced: bool | None = None, + default_help_repr: str | None = None, + fromfile: bool | None = None, + metavar: str | None = None, + mutually_exclusive_group: str | None = None, + removal_version: str | None = None, + removal_hint: _HelpT | None = None, + deprecation_start_version: str | None = None, + daemon: bool | None = None, + fingerprint: bool | None = None, + ) -> DataclassOption[_OptT, _OptT]: ... + + @overload + def __new__( + cls, + flag_name: str | None = None, + *, + dataclass_type: type[_OptT], + default: _DynamicDefaultT, help: _HelpT, register_if: _RegisterIfFuncT | None = None, advanced: bool | None = None, From 0d71002054e3e61b7c038accb6a8d24ae6bc5111 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Thu, 26 Mar 2026 15:13:35 -0400 Subject: [PATCH 23/25] in progress commit --- .../pants/backend/docker/engine_types.py | 2 +- .../backend/docker/goals/package_image.py | 9 +- .../docker/goals/package_image_test.py | 103 +++++++++++++----- .../pants/backend/docker/goals/publish.py | 6 +- .../pants/backend/docker/target_types.py | 1 - .../backend/docker/util_rules/binaries.py | 1 + 6 files changed, 85 insertions(+), 37 deletions(-) diff --git a/src/python/pants/backend/docker/engine_types.py b/src/python/pants/backend/docker/engine_types.py index c856c9f0265..ca6b4c01664 100644 --- a/src/python/pants/backend/docker/engine_types.py +++ b/src/python/pants/backend/docker/engine_types.py @@ -6,7 +6,7 @@ class DockerBuildEngine(Enum): DOCKER = "docker" - BUILDKIT = "buildkit" + BUILDCTL = "buildctl" PODMAN = "podman" diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 82563576384..679d5ba0774 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -94,8 +94,7 @@ class DockerPackageFieldSet(PackageFieldSet): def pushes_on_package(self) -> bool: """Returns True if this docker_image target would push to a registry during packaging.""" - value_or_default = self.output.value or self.output.default - return value_or_default.get("push") == "true" or value_or_default["type"] == "registry" + return self.output.value and (self.output.value.get("push") == "true" or self.output.value["type"] == "registry") def format_tag(self, tag: str, interpolation_context: InterpolationContext) -> str: source = InterpolationContext.TextSource( @@ -345,7 +344,7 @@ def get_build_options( ) -> Iterator[str]: gen_options_func_name = ( "buildctl_options" - if docker_options.build_engine == DockerBuildEngine.BUILDKIT + if docker_options.build_engine == DockerBuildEngine.BUILDCTL else "docker_build_options" ) for field_type in target.field_types: @@ -363,7 +362,7 @@ def get_build_options( if docker_options.build_target_stage in context.stages: compute_options_func = ( DockerImageTargetStageField.compute_buildctl_options - if docker_options.build_engine == DockerBuildEngine.BUILDKIT + if docker_options.build_engine == DockerBuildEngine.BUILDCTL else DockerImageTargetStageField.compute_docker_build_options ) yield from compute_options_func( @@ -479,7 +478,7 @@ async def get_docker_image_build_process( context_root = field_set.get_context_root(options.default_context_root) binary: BuildctlBinary | PodmanBinary | DockerBinary match options.build_engine: - case DockerBuildEngine.BUILDKIT: + case DockerBuildEngine.BUILDCTL: binary = await get_buildctl(**implicitly()) case DockerBuildEngine.PODMAN: binary = await get_podman(**implicitly()) diff --git a/src/python/pants/backend/docker/goals/package_image_test.py b/src/python/pants/backend/docker/goals/package_image_test.py index f52acaf147a..cc6b9c2cb06 100644 --- a/src/python/pants/backend/docker/goals/package_image_test.py +++ b/src/python/pants/backend/docker/goals/package_image_test.py @@ -13,7 +13,7 @@ import pytest -from pants.backend.docker.engine_types import DockerEngines +from pants.backend.docker.engine_types import DockerBuildEngine, DockerEngines from pants.backend.docker.goals.package_image import ( DockerImageBuildProcess, DockerImageRefs, @@ -44,7 +44,7 @@ DockerImageTagsRequest, DockerImageTarget, ) -from pants.backend.docker.util_rules.binaries import DockerBinary, PodmanBinary +from pants.backend.docker.util_rules.binaries import BuildctlBinary, DockerBinary, PodmanBinary from pants.backend.docker.util_rules.docker_build_args import ( DockerBuildArgs, DockerBuildArgsRequest, @@ -178,7 +178,7 @@ def assert_build_process( build_context_snapshot: Snapshot = EMPTY_SNAPSHOT, version_tags: tuple[str, ...] = (), image_refs: DockerImageRefs | None = None, - binary: DockerBinary | PodmanBinary = DockerBinary("/dummy/docker"), + binary: DockerBinary | PodmanBinary | BuildctlBinary = DockerBinary("/dummy/docker"), ) -> DockerImageBuildProcess: """Test helper for get_docker_image_build_process rule. @@ -214,6 +214,15 @@ def assert_build_process( rule_runner, address, build_context_snapshot, copy_sources, copy_build_args, version_tags ) docker_options = _setup_docker_options(rule_runner, options) + + match binary: + case BuildctlBinary(): + binary_rule_path = "pants.backend.docker.util_rules.binaries.get_buildctl" + case PodmanBinary(): + binary_rule_path = "pants.backend.docker.util_rules.binaries.get_podman" + case _: + binary_rule_path = "pants.backend.docker.util_rules.binaries.get_docker" + result = run_rule_with_mocks( get_docker_image_build_process, rule_args=[ @@ -224,9 +233,7 @@ def assert_build_process( "pants.backend.docker.util_rules.docker_build_context.create_docker_build_context": build_context_mock, "pants.engine.internals.graph.resolve_target": lambda _: WrappedTarget(tgt), "pants.backend.docker.goals.package_image.get_image_refs": lambda _: image_refs, - "pants.backend.docker.util_rules.binaries.get_podman" - if isinstance(binary, PodmanBinary) - else "pants.backend.docker.util_rules.binaries.get_docker": lambda *args: binary, + binary_rule_path: lambda *args: binary, }, union_membership=_create_union_membership(), show_warnings=False, @@ -848,7 +855,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", "--tag", "env1:1.2.3", @@ -1016,7 +1022,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=True", "--tag", "args1:latest", @@ -1048,7 +1053,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", "--squash", "--tag", @@ -1062,7 +1066,6 @@ def check_build_process_no_squash(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", "--tag", "args2:latest", @@ -1099,7 +1102,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", "--tag", "args1:1.2.3", @@ -1203,7 +1205,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1256,7 +1257,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", "--secret", "id=system-secret,src=/var/run/secrets/mysecret", @@ -1296,7 +1296,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", "--ssh", "default", @@ -1337,7 +1336,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", "--no-cache", "--tag", @@ -1384,7 +1382,6 @@ def check_build_process(result: DockerImageBuildProcess): "docker:10.180.0.1", "--add-host", "docker2:10.180.0.2", - "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1419,7 +1416,6 @@ def check_build_process(result: DockerImageBuildProcess): "/dummy/docker", "build", "--cache-to=type=local,dest=/tmp/docker/pants-test-cache", - "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1456,7 +1452,6 @@ def check_build_process(result: DockerImageBuildProcess): "build", "--cache-from=type=local,dest=/tmp/docker/pants-test-cache1", "--cache-from=type=local,dest=/tmp/docker/pants-test-cache2", - "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1759,7 +1754,8 @@ def mock_create_digest(_request: CreateDigest) -> Digest: @pytest.mark.parametrize( ["output", "expected_output_arg"], [ - (None, "--output=type=docker"), + (None, None), + ({"type": "docker"}, "--output=type=docker"), ({"type": "registry"}, None), ({"type": "image", "push": "true"}, None), ], @@ -1787,13 +1783,14 @@ def test_docker_output_option_when_push_on_package_ignore( tgt = rule_runner.get_target(Address("docker/test", target_name="img1")) under_test_fs = DockerPackageFieldSet.create(tgt) - if expected_output_arg: + if expected_output_arg or output is None: # Step 1: Validate Process construction using assert_build_process + output_args = [expected_output_arg] if expected_output_arg else [] def check_build_process(result: DockerImageBuildProcess) -> None: assert result.process.argv == ( "/dummy/docker", "build", - expected_output_arg, + *output_args, "--pull=False", "--tag", "img1:latest", @@ -1884,7 +1881,6 @@ def check_build_process(result: DockerImageBuildProcess): "/dummy/docker", "build", "--network=host", - "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1919,7 +1915,6 @@ def check_build_process(result: DockerImageBuildProcess): "/dummy/docker", "build", "--platform=linux/amd64,linux/arm64,linux/arm/v7", - "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -1964,7 +1959,6 @@ def check_build_process(result: DockerImageBuildProcess): "build.host=tbs06", "--label", "build.job=13934", - "--output=type=docker", "--pull=False", "--tag", "img1:latest", @@ -2193,7 +2187,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", f"--target={expected_target}", "--tag", @@ -2231,7 +2224,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", "--target=bad", "--tag", @@ -2800,7 +2792,6 @@ def check_build_process(result: DockerImageBuildProcess): assert result.process.argv == ( "/dummy/docker", "build", - "--output=type=docker", "--pull=False", *tag_flags, "--file", @@ -2960,3 +2951,61 @@ def test_field_set_pushes_on_package(output: dict | None, expected: bool) -> Non rule_runner.get_target(Address("", target_name="image")) ) assert field_set.pushes_on_package() is expected + + +def test_docker_build_process_buildctl_engine(rule_runner: RuleRunner) -> None: + rule_runner.write_files( + { + "docker/test/BUILD": dedent( + """\ + docker_image( + name="img1", + output + secrets={ + "system-secret": "/var/run/secrets/mysecret", + "target-secret": "./mysecret", + }, + ssh=["default"], + image_labels={ + "version": "1.0", + }, + build_platform=["linux/amd64", "linux/arm64"], + ) + """ + ), + } + ) + + def check_build_process(result: DockerImageBuildProcess) -> None: + assert result.process.argv == ( + "/dummy/buildctl", + "build", + "--frontend", + "dockerfile.v0", + "--local", + "context=.", + "--local", + "dockerfile=docker/test", + "--opt", + "filename=Dockerfile", + "--opt", + "platform=linux/amd64,linux/arm64", + "--opt", + "label:version=1.0", + "--secret", + "id=system-secret,src=/var/run/secrets/mysecret", + "--secret", + f"id=target-secret,src={rule_runner.build_root}/docker/test/mysecret", + "--ssh", + "default", + "--output", + "type=image,name=img1:latest,push=true", + ) + + assert_build_process( + rule_runner, + Address("docker/test", target_name="img1"), + options=dict(engine=DockerEngines(build=DockerBuildEngine.BUILDCTL)), + binary=BuildctlBinary("/dummy/buildctl"), + build_process_assertions=check_build_process, + ) diff --git a/src/python/pants/backend/docker/goals/publish.py b/src/python/pants/backend/docker/goals/publish.py index 0073fd827d2..2b998deff94 100644 --- a/src/python/pants/backend/docker/goals/publish.py +++ b/src/python/pants/backend/docker/goals/publish.py @@ -9,7 +9,7 @@ from itertools import chain from typing import DefaultDict, cast -from pants.backend.docker.engine_types import DockerPushEngine +from pants.backend.docker.engine_types import DockerBuildEngine, DockerPushEngine from pants.backend.docker.goals.package_image import ( DockerPackageFieldSet, GetImageRefsRequest, @@ -112,7 +112,7 @@ async def check_if_skip_push( ) return ( CheckSkipResult.skip(skip_packaging_only=True) - if request.package_fs.pushes_on_package() + if request.package_fs.pushes_on_package() or options.build_engine == DockerBuildEngine.BUILDCTL else CheckSkipResult.no_skip() ) @@ -123,7 +123,7 @@ async def push_docker_images( options: DockerOptions, options_env_aware: DockerOptions.EnvironmentAware, ) -> PublishProcesses: - if cast(DockerPackageFieldSet, request.field_set).pushes_on_package(): + if cast(PublishDockerImageFieldSet, request.field_set).pushes_on_package() or options.build_engine == DockerBuildEngine.BUILDCTL: build_process = await get_docker_image_build_process(request.field_set, **implicitly()) return PublishProcesses( [ diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index 01c578780d3..51e19eddd83 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -695,7 +695,6 @@ class DockerImageBuildImageOutputField( DictStringToStringField, ): alias = "output" - default = FrozenDict({"type": "docker"}) help = help_text( f""" Sets the export action for the build result. diff --git a/src/python/pants/backend/docker/util_rules/binaries.py b/src/python/pants/backend/docker/util_rules/binaries.py index db90240c88c..d3c5de139f4 100644 --- a/src/python/pants/backend/docker/util_rules/binaries.py +++ b/src/python/pants/backend/docker/util_rules/binaries.py @@ -155,6 +155,7 @@ class PodmanBinary(_DockerPodmanMixin): class BuildctlBinary(BaseBinary): """The `buildctl` binary.""" + # TODO: add output arg here def build_image( self, tags: tuple[str, ...], From 013eb99ea114c9461895a21b43856c7740294543 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Thu, 26 Mar 2026 16:30:34 -0400 Subject: [PATCH 24/25] end of week commit --- .../backend/docker/goals/package_image.py | 7 +++- .../pants/backend/docker/target_types.py | 32 +++++++---------- .../backend/docker/util_rules/binaries.py | 34 +++++++++++++++++-- 3 files changed, 50 insertions(+), 23 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 679d5ba0774..5faa3cf4a1b 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -48,6 +48,7 @@ ) from pants.backend.docker.utils import format_rename_suggestion from pants.core.goals.package import BuiltPackage, OutputPathField, PackageFieldSet +from pants.core.goals.publish import PublishFieldSet from pants.engine.collection import Collection from pants.engine.fs import EMPTY_DIGEST, CreateDigest, FileContent from pants.engine.internals.graph import resolve_target @@ -94,7 +95,9 @@ class DockerPackageFieldSet(PackageFieldSet): def pushes_on_package(self) -> bool: """Returns True if this docker_image target would push to a registry during packaging.""" - return self.output.value and (self.output.value.get("push") == "true" or self.output.value["type"] == "registry") + return self.output.value and ( + self.output.value.get("push") == "true" or self.output.value["type"] == "registry" + ) def format_tag(self, tag: str, interpolation_context: InterpolationContext) -> str: source = InterpolationContext.TextSource( @@ -492,6 +495,7 @@ async def get_docker_image_build_process( context_root=context_root, env=env, tags=tags, + output=field_set.output.value, extra_args=tuple( get_build_options( context=context, @@ -499,6 +503,7 @@ async def get_docker_image_build_process( target=wrapped_target.target, ) ), + is_publish=isinstance(field_set, PublishFieldSet), ) return DockerImageBuildProcess( process=process, diff --git a/src/python/pants/backend/docker/target_types.py b/src/python/pants/backend/docker/target_types.py index 51e19eddd83..00045da5a00 100644 --- a/src/python/pants/backend/docker/target_types.py +++ b/src/python/pants/backend/docker/target_types.py @@ -6,7 +6,7 @@ import os import re from abc import ABC, abstractmethod -from collections.abc import Callable, Iterator +from collections.abc import Callable, Iterator, Mapping, Sequence from dataclasses import dataclass from typing import TYPE_CHECKING, ClassVar, final @@ -38,7 +38,6 @@ ) from pants.engine.unions import union from pants.util.docutil import bin_name, doc_url -from pants.util.frozendict import FrozenDict from pants.util.meta import classproperty from pants.util.strutil import help_text, softwrap @@ -275,7 +274,7 @@ class DockerBuildOptionFieldMultiValueMixin(StringSequenceField, DockerBuildOpti @final def compute_docker_build_options( cls, - value: tuple[str, ...] | None, + value: Sequence[str] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -294,7 +293,7 @@ class DockerBuildOptionFieldMultiValueDictMixin( @final def compute_docker_build_options( cls, - value: FrozenDict[str, str] | None, + value: Mapping[str, str] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -317,7 +316,7 @@ class DockerBuildOptionFieldListOfMultiValueDictMixin( @final def compute_docker_build_options( cls, - value: tuple[FrozenDict[str, str]] | None, + value: Sequence[Mapping[str, str]] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -384,7 +383,7 @@ class BuildctlOptionFieldMultiValueDictMixin( @final def compute_buildctl_options( cls, - value: FrozenDict[str, str] | None, + value: Mapping[str, str] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -407,7 +406,7 @@ class BuildctlOptionFieldListOfMultiValueDictMixin( @final def compute_buildctl_options( cls, - value: tuple[FrozenDict[str, str], ...] | None, + value: Sequence[Mapping[str, str]] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -461,7 +460,7 @@ class BuildctlOptionFieldMultiValueMixin(StringSequenceField, BuildctlOptionsFie @final def compute_buildctl_options( cls, - value: tuple[str, ...] | None, + value: Sequence[str] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -487,7 +486,7 @@ class BuildctlLayeredOptionFieldMultiValueMixin( @final def compute_buildctl_options( cls, - value: tuple[str, ...] | None, + value: Sequence[str] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -557,7 +556,7 @@ class DockerImageBuildImageLabelsOptionField( @classmethod def compute_docker_build_options( cls, - value: FrozenDict[str, str] | None, + value: Mapping[str, str] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -569,7 +568,7 @@ def compute_docker_build_options( @classmethod def compute_buildctl_options( cls, - value: FrozenDict[str, str] | None, + value: Mapping[str, str] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -593,7 +592,7 @@ class DockerImageBuildImageExtraHostsField(DockerBuildOptionsFieldMixin, DictStr @classmethod def compute_docker_build_options( cls, - value: FrozenDict[str, str] | None, + value: Mapping[str, str] | None, *, docker: DockerOptions, value_formatter: OptionValueFormatter, @@ -689,11 +688,7 @@ def validate_options(options: DockerOptions, context: DockerBuildContext) -> boo return not options.build_no_cache -class DockerImageBuildImageOutputField( - BuildctlOptionFieldMultiValueDictMixin, - DockerBuildkitPassthroughFieldMixin, - DictStringToStringField, -): +class DockerImageBuildImageOutputField(DictStringToStringField): alias = "output" help = help_text( f""" @@ -708,7 +703,6 @@ class DockerImageBuildImageOutputField( {_interpolation_help.format(kind="Values")} """ ) - buildctl_option = "--output" class DockerImageBuildSecretsOptionField( @@ -789,7 +783,7 @@ class DockerImageBuildSSHOptionField( @classmethod def compute_buildctl_options( - cls, value: tuple[str, ...], *, docker: DockerOptions, value_formatter: OptionValueFormatter + cls, value: Sequence[str], *, docker: DockerOptions, value_formatter: OptionValueFormatter ) -> Iterator[str]: for v in value: yield cls.buildctl_option diff --git a/src/python/pants/backend/docker/util_rules/binaries.py b/src/python/pants/backend/docker/util_rules/binaries.py index d3c5de139f4..cdbf73ad82f 100644 --- a/src/python/pants/backend/docker/util_rules/binaries.py +++ b/src/python/pants/backend/docker/util_rules/binaries.py @@ -66,7 +66,9 @@ def build_image( build_args: DockerBuildArgs, context_root: str, env: Mapping[str, str], + output: dict[str, str] | None, extra_args: tuple[str, ...] = (), + is_publish: bool = False, ) -> Process: ... @@ -74,6 +76,10 @@ class PushBinaryProtocol(Protocol): def push_image(self, tag: str, env: Mapping[str, str] | None = None) -> Process: ... +def _comma_sep_dict_args(d: Mapping[str, str]) -> str: + return ",".join(f"{k}={v}" for k, v in d.items()) + + class _DockerPodmanMixin(BaseBinary): def build_image( self, @@ -83,10 +89,15 @@ def build_image( build_args: DockerBuildArgs, context_root: str, env: Mapping[str, str], + output: Mapping[str, str] | None, extra_args: tuple[str, ...] = (), + is_publish: bool = False, ) -> Process: args = [self.path, "build", *extra_args] + if output: + args.extend(["--output", _comma_sep_dict_args(output)]) + for tag in tags: args.extend(["--tag", tag]) @@ -155,7 +166,18 @@ class PodmanBinary(_DockerPodmanMixin): class BuildctlBinary(BaseBinary): """The `buildctl` binary.""" - # TODO: add output arg here + @staticmethod + def _special_output_handling(output: Mapping[str, str] | None) -> bool: + if output: + try: + output_type = output["type"] + except KeyError as ke: + raise ValueError( + "docker_image output type field is required when specifying output" + ) from ke + return output_type == "image" and "name" not in output + return True + def build_image( self, tags: tuple[str, ...], @@ -164,7 +186,9 @@ def build_image( build_args: DockerBuildArgs, context_root: str, env: Mapping[str, str], + output: Mapping[str, str] | None, extra_args: tuple[str, ...] = (), + is_publish: bool = False, ) -> Process: args = [ self.path, @@ -183,8 +207,12 @@ def build_image( for build_arg in build_args: args.extend(["--opt", f"build-arg:{build_arg}"]) - for tag in tags: - args.extend(["--output", f"type=image,name={tag},push=true"]) + if self._special_output_handling(output): + publish_suffix = ",push=true" if is_publish else "" + for tag in tags: + args.extend(["--output", f"type=image,name={tag}{publish_suffix}"]) + else: + args.extend(["--output", _comma_sep_dict_args(output)]) return Process( argv=tuple(args), From 51b4d6d01e1cc116ee1d599a448d8a302eaff756 Mon Sep 17 00:00:00 2001 From: Nick Dellosa Date: Thu, 26 Mar 2026 16:33:04 -0400 Subject: [PATCH 25/25] eow commit typechecking --- src/python/pants/backend/docker/goals/package_image.py | 2 +- src/python/pants/backend/docker/util_rules/binaries.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/python/pants/backend/docker/goals/package_image.py b/src/python/pants/backend/docker/goals/package_image.py index 5faa3cf4a1b..035d58d4efe 100644 --- a/src/python/pants/backend/docker/goals/package_image.py +++ b/src/python/pants/backend/docker/goals/package_image.py @@ -95,7 +95,7 @@ class DockerPackageFieldSet(PackageFieldSet): def pushes_on_package(self) -> bool: """Returns True if this docker_image target would push to a registry during packaging.""" - return self.output.value and ( + return bool(self.output.value) and ( self.output.value.get("push") == "true" or self.output.value["type"] == "registry" ) diff --git a/src/python/pants/backend/docker/util_rules/binaries.py b/src/python/pants/backend/docker/util_rules/binaries.py index cdbf73ad82f..d43281057da 100644 --- a/src/python/pants/backend/docker/util_rules/binaries.py +++ b/src/python/pants/backend/docker/util_rules/binaries.py @@ -212,7 +212,7 @@ def build_image( for tag in tags: args.extend(["--output", f"type=image,name={tag}{publish_suffix}"]) else: - args.extend(["--output", _comma_sep_dict_args(output)]) + args.extend(["--output", _comma_sep_dict_args(cast(Mapping[str, str], output))]) return Process( argv=tuple(args),