Skip to content

Commit 588a2a2

Browse files
committed
refactoring and code cleanup
1 parent 04f4424 commit 588a2a2

File tree

7 files changed

+20
-247
lines changed

7 files changed

+20
-247
lines changed

scripts/minikube/minikube_host.sh

Lines changed: 7 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,6 @@
11
#!/usr/bin/env bash
22

3-
# This is a helper script for running tests on s390x Hosts.
4-
# It allows to configure minikube clusters and expose remote API servers on a local machine to
5-
# enable local development while running minikube cluster on s390x instance.
6-
# Run "minikube_host.sh help" command to see the full usage.
7-
# Similar to evg_host.sh but uses minikube instead of kind.
3+
# This is a helper script for running tests on IBM Hosts using RHEL and Minikube.
84

95
set -Eeou pipefail
106

@@ -16,7 +12,7 @@ source scripts/funcs/install
1612

1713
if [[ -z "${MINIKUBE_HOST_NAME}" ]]; then
1814
echo "MINIKUBE_HOST_NAME env var is missing"
19-
echo "Set it to your s390x host connection string (e.g., user@hostname)"
15+
echo "Set it to your IBM RHEL Host host connection string (e.g., user@hostname)"
2016
exit 1
2117
fi
2218

@@ -35,7 +31,7 @@ kubeconfig_path="${HOME}/.operator-dev/minikube-host.kubeconfig"
3531
configure() {
3632
ssh -T -q "${host_url}" "sudo chown \$(whoami):\$(whoami) ~/.docker || true; mkdir -p ~/.docker"
3733
if [[ -f "${HOME}/.docker/config.json" ]]; then
38-
echo "Copying local ~/.docker/config.json authorization credentials to s390x host"
34+
echo "Copying local ~/.docker/config.json authorization credentials to IBM RHEL Host host"
3935
jq '. | with_entries(select(.key == "auths"))' "${HOME}/.docker/config.json" | ssh -T -q "${host_url}" 'cat > ~/.docker/config.json'
4036
fi
4137

@@ -79,7 +75,7 @@ remote-prepare-local-e2e-run() {
7975

8076
get-kubeconfig() {
8177
# For minikube, we need to get the kubeconfig and certificates
82-
echo "Getting kubeconfig from minikube on s390x host..."
78+
echo "Getting kubeconfig from minikube on IBM RHEL Host host..."
8379

8480
# Create local minikube directory structure
8581
mkdir -p "${HOME}/.minikube"
@@ -102,15 +98,6 @@ get-kubeconfig() {
10298
echo "Copied minikube kubeconfig and certificates to ${kubeconfig_path}"
10399
}
104100

105-
recreate-minikube-cluster() {
106-
configure "$(detect_architecture)" 2>&1| prepend "minikube_host.sh configure"
107-
echo "Recreating minikube cluster on ${MINIKUBE_HOST_NAME} (${host_url})..."
108-
# shellcheck disable=SC2088
109-
ssh -T "${host_url}" "cd ~/mongodb-kubernetes; export KUBE_ENVIRONMENT_NAME=minikube; minikube delete || true; ./scripts/minikube/setup_minikube_host.sh"
110-
echo "Copying kubeconfig to ${kubeconfig_path}"
111-
get-kubeconfig
112-
}
113-
114101
tunnel() {
115102
shift 1
116103
echo "Setting up tunnel for minikube cluster..."
@@ -175,20 +162,19 @@ usage() {
175162
minikube_host.sh <command>
176163
177164
PREREQUISITES:
178-
- s390x host with SSH access
165+
- IBM RHEL Host host with SSH access
179166
- define MINIKUBE_HOST_NAME env var (e.g., export MINIKUBE_HOST_NAME=user@hostname)
180167
- SSH key-based authentication configured
181168
182169
COMMANDS:
183170
configure <architecture> installs on a host: calls sync, switches context, installs necessary software (auto-detects arch)
184171
sync rsync of project directory
185-
recreate-minikube-cluster recreates minikube cluster and executes get-kubeconfig
186172
remote-prepare-local-e2e-run executes prepare-local-e2e on the remote host
187173
get-kubeconfig copies remote minikube kubeconfig locally to ~/.operator-dev/minikube-host.kubeconfig
188174
tunnel [args] creates ssh session with tunneling to all API servers
189175
ssh [args] creates ssh session passing optional arguments to ssh
190-
cmd [command with args] execute command as if being on s390x host
191-
upload-my-ssh-private-key uploads your ssh keys (~/.ssh/id_rsa) to s390x host
176+
cmd [command with args] execute command as if being on IBM RHEL Host host
177+
upload-my-ssh-private-key uploads your ssh keys (~/.ssh/id_rsa) to IBM RHEL Host host
192178
help this message
193179
194180
EXAMPLES:
@@ -200,7 +186,6 @@ EXAMPLES:
200186

201187
case ${cmd} in
202188
configure) configure "$@" ;;
203-
recreate-minikube-cluster) recreate-minikube-cluster "$@" ;;
204189
get-kubeconfig) get-kubeconfig ;;
205190
remote-prepare-local-e2e-run) remote-prepare-local-e2e-run ;;
206191
ssh) ssh_to_host "$@" ;;

scripts/minikube/setup_minikube_host.sh

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -174,21 +174,6 @@ start_minikube_cluster() {
174174
setup_podman() {
175175
echo "Setting up podman for ${ARCH}..."
176176

177-
# Check if podman is already available
178-
if command -v podman &> /dev/null; then
179-
echo "✅ Podman already installed"
180-
181-
# Diagnose podman state
182-
echo "=== Podman Diagnostics ==="
183-
echo "User: $(whoami), UID: $(id -u)"
184-
echo "User namespace support: $(cat /proc/self/uid_map 2>/dev/null || echo 'not available')"
185-
echo "Systemctl user status:"
186-
systemctl --user status podman.socket 2>/dev/null || echo "podman.socket not active"
187-
echo "Running 'sudo podman info' command..."
188-
sudo podman info 2>&1
189-
fi
190-
191-
192177
# Configure podman to use cgroupfs instead of systemd in CI
193178
mkdir -p ~/.config/containers
194179
cat > ~/.config/containers/containers.conf << EOF

scripts/release/agent/detect_ops_manager_changes.py

Lines changed: 0 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -60,41 +60,6 @@ def extract_ops_manager_mapping(release_data: Dict) -> Dict:
6060
return release_data.get("supportedImages", {}).get("mongodb-agent", {}).get("opsManagerMapping", {})
6161

6262

63-
def _is_later_agent_version(version1: str, version2: str) -> bool:
64-
"""
65-
Compare two agent versions and return True if version1 is later than version2.
66-
Agent versions are in format like "13.37.0.9590-1" or "108.0.12.8846-1"
67-
"""
68-
if not version1 or not version2:
69-
return False
70-
71-
def split_version(version: str) -> List[int]:
72-
"""Split version string into numeric parts, ignoring suffix after '-'"""
73-
parts = []
74-
version_part = version.split("-")[0] # Remove suffix like "-1"
75-
for part in version_part.split("."):
76-
try:
77-
parts.append(int(part))
78-
except ValueError:
79-
# If we can't parse a part as int, skip it
80-
continue
81-
return parts
82-
83-
v1_parts = split_version(version1)
84-
v2_parts = split_version(version2)
85-
86-
# Compare each part
87-
max_len = max(len(v1_parts), len(v2_parts))
88-
for i in range(max_len):
89-
v1_part = v1_parts[i] if i < len(v1_parts) else 0
90-
v2_part = v2_parts[i] if i < len(v2_parts) else 0
91-
92-
if v1_part != v2_part:
93-
return v1_part > v2_part
94-
95-
return False # Versions are equal
96-
97-
9863
def get_changed_agents(current_mapping: Dict, base_mapping: Dict) -> List[Tuple[str, str]]:
9964
"""Returns list of (agent_version, tools_version) tuples for added/changed agents"""
10065
added_agents = []

scripts/release/agent/validation.py

Lines changed: 6 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,8 @@ def agent_info(self):
2626
def generate_tools_build_args(self, platforms: List[str], tools_version: str) -> Dict[str, str]:
2727
"""
2828
Generate build arguments for MongoDB tools based on platform mappings.
29-
Uses the same validation logic to ensure consistency.
29+
The build arguments are based on the agent build info by verifying different
30+
filename possibilities for each platform.
3031
3132
Args:
3233
platforms: List of platforms (e.g., ["linux/amd64", "linux/arm64"])
@@ -82,7 +83,9 @@ def generate_agent_build_args(self, platforms: List[str], agent_version: str, to
8283
if agent_filename and tools_filename:
8384
build_args[f"mongodb_agent_version_{arch}"] = agent_filename
8485
build_args[f"mongodb_tools_version_{arch}"] = tools_filename
85-
logger.debug(f"Added build args for {platform}: agent={agent_filename}, tools={tools_filename}")
86+
logger.debug(
87+
f"Validated agent and tools versions for {platform}: agent={agent_filename}, tools={tools_filename}"
88+
)
8689
else:
8790
logger.warning(f"Skipping build args for {platform} - missing agent or tools filename")
8891
logger.debug(f" agent_filename: {agent_filename}")
@@ -95,11 +98,6 @@ def generate_agent_build_args(self, platforms: List[str], agent_version: str, to
9598
_platform_config = PlatformConfiguration()
9699

97100

98-
def load_agent_build_info():
99-
"""Load agent platform mappings from build_info_agent.json"""
100-
return _platform_config.agent_info
101-
102-
103101
def generate_tools_build_args(platforms: List[str], tools_version: str) -> Dict[str, str]:
104102
"""Generate build arguments for MongoDB tools based on platform mappings."""
105103
return _platform_config.generate_tools_build_args(platforms, tools_version)
@@ -179,7 +177,7 @@ def _find_working_filename(
179177
return ""
180178

181179

182-
def _get_available_platforms_with_fallback(
180+
def _get_available_platforms(
183181
version: str,
184182
platforms: List[str],
185183
base_url: str,
@@ -213,31 +211,6 @@ def _get_available_platforms_with_fallback(
213211
return available_platforms
214212

215213

216-
def get_available_platforms_for_agent(agent_version: str, platforms: List[str]) -> List[str]:
217-
"""
218-
Get the list of platforms where the agent version is actually available.
219-
Tries multiple RHEL versions for each platform to find working binaries.
220-
221-
Args:
222-
agent_version: MongoDB agent version to check
223-
platforms: List of platforms to check
224-
225-
Returns:
226-
List of platforms where the agent version exists
227-
"""
228-
agent_base_url = (
229-
"https://mciuploads.s3.amazonaws.com/mms-automation/mongodb-mms-build-agent/builds/automation-agent/prod"
230-
)
231-
232-
return _get_available_platforms_with_fallback(
233-
version=agent_version,
234-
platforms=platforms,
235-
base_url=agent_base_url,
236-
filenames_builder=_build_agent_filenames,
237-
version_type="agent",
238-
)
239-
240-
241214
def get_working_agent_filename(agent_version: str, platform: str) -> str:
242215
"""
243216
Get the actual working agent filename for a specific platform and version.
@@ -272,26 +245,3 @@ def get_working_tools_filename(tools_version: str, platform: str) -> str:
272245
tools_base_url = "https://fastdl.mongodb.org/tools/db"
273246

274247
return _find_working_filename(tools_version, platform, tools_base_url, _build_tools_filenames, "tools")
275-
276-
277-
def get_available_platforms_for_tools(tools_version: str, platforms: List[str]) -> List[str]:
278-
"""
279-
Get the list of platforms where the tools version is actually available.
280-
Tries multiple RHEL versions for each platform to find working binaries.
281-
282-
Args:
283-
tools_version: MongoDB tools version to check
284-
platforms: List of platforms to check
285-
286-
Returns:
287-
List of platforms where the tools version exists
288-
"""
289-
tools_base_url = "https://fastdl.mongodb.org/tools/db"
290-
291-
return _get_available_platforms_with_fallback(
292-
version=tools_version,
293-
platforms=platforms,
294-
base_url=tools_base_url,
295-
filenames_builder=_build_tools_filenames,
296-
version_type="tools",
297-
)

scripts/release/atomic_pipeline.py

Lines changed: 7 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,6 @@
2222
from scripts.release.agent.validation import (
2323
generate_agent_build_args,
2424
generate_tools_build_args,
25-
get_available_platforms_for_agent,
26-
get_available_platforms_for_tools,
2725
)
2826
from scripts.release.build.image_build_configuration import ImageBuildConfiguration
2927
from scripts.release.build.image_build_process import execute_docker_build
@@ -256,13 +254,12 @@ def build_init_appdb_image(build_configuration: ImageBuildConfiguration):
256254

257255
tools_version = extract_tools_version_from_release(release)
258256

259-
if not get_available_platforms_for_tools(tools_version, build_configuration.platforms):
260-
logger.warning(f"Skipping build for init-appdb - tools version {tools_version} not found in repository")
261-
return
262-
263257
platform_build_args = generate_tools_build_args(
264258
platforms=build_configuration.platforms, tools_version=tools_version
265259
)
260+
if not platform_build_args:
261+
logger.warning(f"Skipping build for init-appdb - tools version {tools_version} not found in repository")
262+
return
266263

267264
args = {
268265
"version": build_configuration.version,
@@ -282,14 +279,12 @@ def build_init_database_image(build_configuration: ImageBuildConfiguration):
282279

283280
tools_version = extract_tools_version_from_release(release)
284281

285-
# Validate that the tools version exists before attempting to build
286-
if not get_available_platforms_for_tools(tools_version, build_configuration.platforms):
287-
logger.warning(f"Skipping build for init-database - tools version {tools_version} not found in repository")
288-
return
289-
290282
platform_build_args = generate_tools_build_args(
291283
platforms=build_configuration.platforms, tools_version=tools_version
292284
)
285+
if not platform_build_args:
286+
logger.warning(f"Skipping build for init-database - tools version {tools_version} not found in repository")
287+
return
293288

294289
args = {
295290
"version": build_configuration.version,
@@ -354,57 +349,15 @@ def build_agent(build_configuration: ImageBuildConfiguration):
354349
logger.info(f"Running with factor of {max_workers}")
355350
logger.info(f"======= Agent versions to build {agent_versions_to_build} =======")
356351

357-
successful_builds = []
358-
skipped_builds = []
359-
360352
for idx, agent_tools_version in enumerate(agent_versions_to_build):
361-
agent_version = agent_tools_version[0]
362-
tools_version = agent_tools_version[1]
363-
364-
available_agent_platforms = get_available_platforms_for_agent(agent_version, build_configuration.platforms)
365-
available_tools_platforms = get_available_platforms_for_tools(tools_version, build_configuration.platforms)
366-
available_platforms = list(set(available_agent_platforms) & set(available_tools_platforms))
367-
368-
logger.info(
369-
f"======= Building Agent {agent_tools_version} for platforms: {available_platforms}, ({idx + 1}/{len(agent_versions_to_build)})"
370-
)
371-
372-
# Check if amd64 is available - if not, skip the entire build
373-
if "linux/amd64" not in available_platforms:
374-
logger.warning(
375-
f"Skipping agent version {agent_version} - amd64 platform not available (required platform)"
376-
)
377-
if available_platforms:
378-
logger.info(f" Other available platforms were: {available_platforms}")
379-
skipped_builds.append(agent_tools_version)
380-
continue
381-
382-
if not available_platforms:
383-
logger.warning(
384-
f"Skipping agent version {agent_version} - no platforms available for both agent and tools"
385-
)
386-
skipped_builds.append(agent_tools_version)
387-
continue
388-
389-
if available_platforms != build_configuration.platforms:
390-
logger.info(
391-
f"Building agent {agent_version} for available platforms: {available_platforms} "
392-
f"(skipping: {set(build_configuration.platforms) - set(available_platforms)})"
393-
)
394-
395-
successful_builds.append(agent_tools_version)
396353
_build_agent(
397354
agent_tools_version,
398355
build_configuration,
399-
available_platforms,
356+
build_configuration.platforms,
400357
executor,
401358
tasks_queue,
402359
)
403360

404-
logger.info(f"Build summary: {len(successful_builds)} successful, {len(skipped_builds)} skipped")
405-
if skipped_builds:
406-
logger.info(f"Skipped versions: {skipped_builds}")
407-
408361
queue_exception_handling(tasks_queue)
409362

410363

@@ -436,8 +389,6 @@ def build_agent_pipeline(
436389
f"======== Building agent pipeline for version {agent_version}, build configuration version: {build_configuration.version}"
437390
)
438391

439-
# Note: Validation is now done earlier in the build_agent function
440-
# Generate platform-specific build arguments using the mapping
441392
platform_build_args = generate_agent_build_args(
442393
platforms=available_platforms, agent_version=agent_version, tools_version=tools_version
443394
)

0 commit comments

Comments
 (0)