Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions docs/notes/2.31.x.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ pantsd now preserves web proxy environment variables (`HTTP_PROXY`, `HTTPS_PROXY

### Backends

#### Docker
[Fixed](https://github.com/pantsbuild/pants/issues/22575) a bug that would make docker hashes unstable when using dynamic tags.

#### Helm

#### JVM
Expand Down
53 changes: 50 additions & 3 deletions src/python/pants/backend/docker/util_rules/docker_build_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,20 @@
from pants.core.target_types import FileSourceField
from pants.core.util_rules.source_files import SourceFilesRequest, determine_source_files
from pants.engine.addresses import Address, UnparsedAddressInputs
from pants.engine.fs import Digest, MergeDigests, Snapshot
from pants.engine.fs import (
CreateDigest,
Digest,
FileContent,
MergeDigests,
Snapshot,
)
from pants.engine.internals.graph import (
find_valid_field_sets,
resolve_targets,
resolve_unparsed_address_inputs,
)
from pants.engine.internals.graph import transitive_targets as transitive_targets_get
from pants.engine.intrinsics import digest_to_snapshot
from pants.engine.intrinsics import create_digest, digest_to_snapshot, get_digest_contents
from pants.engine.rules import collect_rules, concurrently, implicitly, rule
from pants.engine.target import (
Dependencies,
Expand Down Expand Up @@ -332,7 +338,48 @@ async def create_docker_build_context(
else:
logger.debug("Did not build any packages for Docker image")

embedded_pkgs_digest = [built_package.digest for built_package in embedded_pkgs]
# Create digests for embedded packages. For upstream Docker images, we use only the image ID
# to ensure hash stability. This prevents changes in image tags (which may include timestamps
# or other dynamic values) from affecting the build context hash of dependent images.
embedded_pkgs_digest = []

# For Docker images, we need to extract the metadata filename and create stable digests
docker_packages = []
for field_set, built_package in zip(pkgs_wanting_embedding, embedded_pkgs):
if request.build_upstream_images and isinstance(
getattr(field_set, "source", None), DockerImageSourceField
):
docker_packages.append(built_package)
else:
embedded_pkgs_digest.append(built_package.digest)

if docker_packages:
docker_metadata_contents = await concurrently(
get_digest_contents(built_package.digest) for built_package in docker_packages
)

for metadata_contents, built_package in zip(docker_metadata_contents, docker_packages):

if metadata_contents:
for artifact in built_package.artifacts:
if isinstance(artifact, BuiltDockerImage):
stable_content = artifact.image_id.encode()
# Add `stable` to the file name to emphasize that it contains only
# metadata that is stable for given inputs, and not fields, such as tags,
# that may contain timestamps or otherwise change on each rebuild.
stable_filename = "docker-info.stable.json"
stable_digest = await create_digest(
CreateDigest([FileContent(stable_filename, stable_content)])
)
embedded_pkgs_digest.append(stable_digest)
break
else:
# Fallback if no BuiltDockerImage found (shouldn't happen)
embedded_pkgs_digest.append(built_package.digest)
else:
# Fallback if no contents in digest
embedded_pkgs_digest.append(built_package.digest)

all_digests = (dockerfile_info.digest, sources.snapshot.digest, *embedded_pkgs_digest)

# Merge all digests to get the final docker build context digest.
Expand Down
Loading