Skip to content

Commit 8a4e4b7

Browse files
committed
merge redesign-pipeline branch
1 parent e465582 commit 8a4e4b7

File tree

5 files changed

+38
-256
lines changed

5 files changed

+38
-256
lines changed

scripts/release/atomic_pipeline.py

Lines changed: 27 additions & 197 deletions
Original file line numberDiff line numberDiff line change
@@ -9,38 +9,23 @@
99
from concurrent.futures import ProcessPoolExecutor
1010
from copy import copy
1111
from queue import Queue
12-
from typing import Dict, List, Optional, Tuple, Union
12+
from typing import Dict, List, Optional, Tuple
1313

1414
import requests
1515
import semver
1616
from opentelemetry import trace
1717
from packaging.version import Version
1818

1919
from lib.base_logger import logger
20-
from scripts.evergreen.release.agent_matrix import (
21-
get_supported_operator_versions,
22-
)
2320
from scripts.evergreen.release.images_signing import (
2421
sign_image,
2522
verify_signature,
2623
)
2724
from scripts.release.build.image_build_configuration import ImageBuildConfiguration
28-
2925
from .build_images import process_image
3026
from .optimized_operator_build import build_operator_image_fast
3127

3228
TRACER = trace.get_tracer("evergreen-agent")
33-
DEFAULT_NAMESPACE = "default"
34-
35-
36-
def make_list_of_str(value: Union[None, str, List[str]]) -> List[str]:
37-
if value is None:
38-
return []
39-
40-
if isinstance(value, str):
41-
return [e.strip() for e in value.split(",")]
42-
43-
return value
4429

4530

4631
def get_tools_distro(tools_version: str) -> Dict[str, str]:
@@ -55,11 +40,6 @@ def is_running_in_evg_pipeline():
5540
return os.getenv("RUNNING_IN_EVG", "") == "true"
5641

5742

58-
def is_running_in_patch():
59-
is_patch = os.environ.get("is_patch")
60-
return is_patch is not None and is_patch.lower() == "true"
61-
62-
6343
def load_release_file() -> Dict:
6444
with open("release.json") as release:
6545
return json.load(release)
@@ -190,14 +170,6 @@ def build_database_image(build_configuration: ImageBuildConfiguration):
190170
)
191171

192172

193-
def should_skip_arm64():
194-
"""
195-
Determines if arm64 builds should be skipped based on environment.
196-
Returns True if running in Evergreen pipeline as a patch.
197-
"""
198-
return is_running_in_evg_pipeline() and is_running_in_patch()
199-
200-
201173
@TRACER.start_as_current_span("sign_image_in_repositories")
202174
def sign_image_in_repositories(args: Dict[str, str], arch: str = None):
203175
span = trace.get_current_span()
@@ -289,7 +261,6 @@ def build_image_generic(
289261
dockerfile_path: str,
290262
build_configuration: ImageBuildConfiguration,
291263
extra_args: dict | None = None,
292-
multi_arch_args_list: list[dict] | None = None,
293264
):
294265
"""
295266
Build one or more platform-specific images, then (optionally)
@@ -298,24 +269,20 @@ def build_image_generic(
298269

299270
registry = build_configuration.registry
300271
image_name = build_configuration.image_name()
301-
args_list = multi_arch_args_list or [extra_args or {}]
302-
version = args_list[0].get("version", "")
303-
platforms = [args.get("architecture") for args in args_list]
304-
305-
for base_args in args_list:
306-
# merge in the registry without mutating caller’s dict
307-
build_args = {**base_args, "quay_registry": registry}
308-
logger.debug(f"Build args: {build_args}")
309-
310-
# TODO: why are we iteration over platforms here? this should be multi-arch build
311-
for arch in platforms:
312-
logger.debug(f"Building {image_name} for arch={arch}")
313-
logger.debug(f"build image generic - registry={registry}")
314-
pipeline_process_image(
315-
dockerfile_path=dockerfile_path,
316-
build_configuration=build_configuration,
317-
dockerfile_args=build_args,
318-
)
272+
args_list = extra_args or {}
273+
version = args_list.get("version", "")
274+
275+
# merge in the registry without mutating caller’s dict
276+
build_args = {**args_list, "quay_registry": registry}
277+
logger.debug(f"Build args: {build_args}")
278+
279+
logger.debug(f"Building {image_name} for platforms={build_configuration.platforms}")
280+
logger.debug(f"build image generic - registry={registry}")
281+
pipeline_process_image(
282+
dockerfile_path=dockerfile_path,
283+
build_configuration=build_configuration,
284+
dockerfile_args=build_args,
285+
)
319286

320287
if build_configuration.sign:
321288
sign_image(registry, version)
@@ -352,26 +319,17 @@ def build_readiness_probe_image(build_configuration: ImageBuildConfiguration):
352319
Builds image used for readiness probe.
353320
"""
354321

355-
version = build_configuration.version
356322
golang_version = os.getenv("GOLANG_VERSION", "1.24")
357323

358-
# Extract architectures from platforms for build args
359-
architectures = [platform.split("/")[-1] for platform in build_configuration.platforms]
360-
multi_arch_args_list = []
361-
362-
for arch in architectures:
363-
arch_args = {
364-
"version": version,
365-
"GOLANG_VERSION": golang_version,
366-
"architecture": arch,
367-
"TARGETARCH": arch, # TODO: redundant ?
368-
}
369-
multi_arch_args_list.append(arch_args)
324+
extra_args = {
325+
"version": build_configuration.version,
326+
"GOLANG_VERSION": golang_version,
327+
}
370328

371329
build_image_generic(
372330
dockerfile_path="docker/mongodb-kubernetes-readinessprobe/Dockerfile",
373331
build_configuration=build_configuration,
374-
multi_arch_args_list=multi_arch_args_list,
332+
extra_args=extra_args,
375333
)
376334

377335

@@ -380,26 +338,17 @@ def build_upgrade_hook_image(build_configuration: ImageBuildConfiguration):
380338
Builds image used for version upgrade post-start hook.
381339
"""
382340

383-
version = build_configuration.version
384341
golang_version = os.getenv("GOLANG_VERSION", "1.24")
385342

386-
# Extract architectures from platforms for build args
387-
architectures = [platform.split("/")[-1] for platform in build_configuration.platforms]
388-
multi_arch_args_list = []
389-
390-
for arch in architectures:
391-
arch_args = {
392-
"version": version,
393-
"GOLANG_VERSION": golang_version,
394-
"architecture": arch,
395-
"TARGETARCH": arch, # TODO: redundant ?
396-
}
397-
multi_arch_args_list.append(arch_args)
343+
extra_args = {
344+
"version": build_configuration.version,
345+
"GOLANG_VERSION": golang_version,
346+
}
398347

399348
build_image_generic(
400349
dockerfile_path="docker/mongodb-kubernetes-upgrade-hook/Dockerfile",
401350
build_configuration=build_configuration,
402-
multi_arch_args_list=multi_arch_args_list,
351+
extra_args=extra_args,
403352
)
404353

405354

@@ -434,55 +383,6 @@ def build_agent_pipeline(
434383
)
435384

436385

437-
def build_multi_arch_agent_in_sonar(
438-
build_configuration: ImageBuildConfiguration,
439-
image_version,
440-
tools_version,
441-
):
442-
"""
443-
Creates the multi-arch non-operator suffixed version of the agent.
444-
This is a drop-in replacement for the agent
445-
release from MCO.
446-
This should only be called during releases.
447-
Which will lead to a release of the multi-arch
448-
images to quay and ecr.
449-
"""
450-
451-
logger.info(f"building multi-arch base image for: {image_version}")
452-
args = {
453-
"version": image_version,
454-
"tools_version": tools_version,
455-
}
456-
457-
arch_arm = {
458-
"agent_distro": "amzn2_aarch64",
459-
"tools_distro": get_tools_distro(tools_version=tools_version)["arm"],
460-
"architecture": "arm64",
461-
}
462-
arch_amd = {
463-
"agent_distro": "rhel9_x86_64",
464-
"tools_distro": get_tools_distro(tools_version=tools_version)["amd"],
465-
"architecture": "amd64",
466-
}
467-
468-
new_rhel_tool_version = "100.10.0"
469-
if Version(tools_version) >= Version(new_rhel_tool_version):
470-
arch_arm["tools_distro"] = "rhel93-aarch64"
471-
arch_amd["tools_distro"] = "rhel93-x86_64"
472-
473-
joined_args = [args | arch_amd]
474-
475-
# Only include arm64 if we shouldn't skip it
476-
if not should_skip_arm64():
477-
joined_args.append(args | arch_arm)
478-
479-
build_image_generic(
480-
dockerfile_path="docker/mongodb-agent-non-matrix/Dockerfile",
481-
build_configuration=build_configuration,
482-
multi_arch_args_list=joined_args,
483-
)
484-
485-
486386
def build_agent_default_case(build_configuration: ImageBuildConfiguration):
487387
"""
488388
Build the agent only for the latest operator for patches and operator releases.
@@ -511,10 +411,10 @@ def build_agent_default_case(build_configuration: ImageBuildConfiguration):
511411
with ProcessPoolExecutor(max_workers=max_workers) as executor:
512412
logger.info(f"running with factor of {max_workers}")
513413
print(f"======= Versions to build {agent_versions_to_build} =======")
514-
for agent_version in agent_versions_to_build:
414+
for idx, agent_version in enumerate(agent_versions_to_build):
515415
# We don't need to keep create and push the same image on every build.
516416
# It is enough to create and push the non-operator suffixed images only during releases to ecr and quay.
517-
print(f"======= Building Agent {agent_version} =======")
417+
print(f"======= Building Agent {agent_version} ({idx}/{len(agent_versions_to_build)})")
518418
_build_agent_operator(
519419
agent_version,
520420
build_configuration,
@@ -526,76 +426,6 @@ def build_agent_default_case(build_configuration: ImageBuildConfiguration):
526426
queue_exception_handling(tasks_queue)
527427

528428

529-
def build_agent_on_agent_bump(build_configuration: ImageBuildConfiguration):
530-
"""
531-
Build the agent matrix (operator version x agent version), triggered by PCT.
532-
533-
We have three cases where we need to build the agent:
534-
- e2e test runs
535-
- operator releases
536-
- OM/CM bumps via PCT
537-
538-
We don’t require building a full matrix on e2e test runs and operator releases.
539-
"Operator releases" and "e2e test runs" require only the latest operator x agents
540-
541-
In OM/CM bumps, we release a new agent which we potentially require to release to older operators as well.
542-
This function takes care of that.
543-
"""
544-
release = load_release_file()
545-
is_release = build_configuration.is_release_scenario()
546-
547-
if build_configuration.all_agents:
548-
# We need to release [all agents x latest operator] on operator releases to make e2e tests work
549-
# This was changed previously in https://github.com/mongodb/mongodb-kubernetes/pull/3960
550-
agent_versions_to_build = gather_all_supported_agent_versions(release)
551-
else:
552-
# we only need to release the latest images, we don't need to re-push old images, as we don't clean them up anymore.
553-
agent_versions_to_build = gather_latest_agent_versions(release)
554-
555-
legacy_agent_versions_to_build = release["supportedImages"]["mongodb-agent"]["versions"]
556-
557-
tasks_queue = Queue()
558-
max_workers = 1
559-
if build_configuration.parallel:
560-
max_workers = None
561-
if build_configuration.parallel_factor > 0:
562-
max_workers = build_configuration.parallel_factor
563-
with ProcessPoolExecutor(max_workers=max_workers) as executor:
564-
logger.info(f"running with factor of {max_workers}")
565-
566-
# We need to regularly push legacy agents, otherwise ecr lifecycle policy will expire them.
567-
# We only need to push them once in a while to ecr, so no quay required
568-
if not is_release:
569-
for legacy_agent in legacy_agent_versions_to_build:
570-
tasks_queue.put(
571-
executor.submit(
572-
build_multi_arch_agent_in_sonar,
573-
build_configuration,
574-
legacy_agent,
575-
# we assume that all legacy agents are build using that tools version
576-
"100.9.4",
577-
)
578-
)
579-
580-
for agent_version in agent_versions_to_build:
581-
# We don't need to keep create and push the same image on every build.
582-
# It is enough to create and push the non-operator suffixed images only during releases to ecr and quay.
583-
if build_configuration.all_agents:
584-
tasks_queue.put(
585-
executor.submit(
586-
build_multi_arch_agent_in_sonar,
587-
build_configuration,
588-
agent_version[0],
589-
agent_version[1],
590-
)
591-
)
592-
for operator_version in get_supported_operator_versions():
593-
logger.info(f"Building Agent versions: {agent_version} for Operator versions: {operator_version}")
594-
_build_agent_operator(agent_version, build_configuration, executor, operator_version, tasks_queue)
595-
596-
queue_exception_handling(tasks_queue)
597-
598-
599429
def queue_exception_handling(tasks_queue):
600430
exceptions_found = False
601431
for task in tasks_queue.queue:

scripts/release/build/build_scenario.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,13 @@ def infer_scenario_from_environment(cls) -> "BuildScenario":
2727
# Release scenario and the git tag will be used for promotion process only
2828
scenario = BuildScenario.RELEASE
2929
logger.info(f"Build scenario: {scenario} (git_tag: {git_tag})")
30-
elif is_patch:
30+
elif is_patch or is_evg:
3131
scenario = BuildScenario.PATCH
3232
logger.info(f"Build scenario: {scenario} (patch_id: {patch_id})")
33-
elif is_evg:
34-
scenario = BuildScenario.STAGING
35-
logger.info(f"Build scenario: {scenario} (patch_id: {patch_id})")
33+
# TODO: Uncomment the following lines when starting to work on staging builds
34+
# elif is_evg:
35+
# scenario = BuildScenario.STAGING
36+
# logger.info(f"Build scenario: {scenario} (patch_id: {patch_id})")
3637
else:
3738
scenario = BuildScenario.DEVELOPMENT
3839
logger.info(f"Build scenario: {scenario}")
@@ -45,10 +46,10 @@ def get_version(self, repository_path: str, changelog_sub_path: str, initial_com
4546

4647
match self:
4748
case BuildScenario.PATCH:
48-
build_id = os.environ["BUILD_ID"]
49-
if not build_id:
50-
raise ValueError(f"BUILD_ID environment variable is not set for `{self}` build scenario")
51-
return build_id
49+
patch_id = os.getenv("version_id")
50+
if not patch_id:
51+
raise ValueError(f"version_id environment variable is not set for `{self}` build scenario")
52+
return patch_id
5253
case BuildScenario.STAGING:
5354
return repo.head.object.hexsha[:COMMIT_SHA_LENGTH]
5455
case BuildScenario.RELEASE:

scripts/release/build/image_build_configuration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from dataclasses import dataclass
22
from typing import List, Optional
33

4-
from scripts.release.build_context import BuildScenario
4+
from scripts.release.build.build_scenario import BuildScenario
55

66
SUPPORTED_PLATFORMS = ["linux/amd64", "linux/arm64"]
77

0 commit comments

Comments
 (0)