diff --git a/lib/charms/mongodb/v0/helpers.py b/lib/charms/mongodb/v0/helpers.py index 2eb37686c..38dda403c 100644 --- a/lib/charms/mongodb/v0/helpers.py +++ b/lib/charms/mongodb/v0/helpers.py @@ -213,6 +213,24 @@ def copy_licenses_to_unit(): _StrOrBytes = Union[str, bytes] +def process_pbm_error_k8s(status_str: str, unit_name: str) -> Optional[str]: + """Processes the pbm error for the k8s charm. + + Unlike the VM charm, the K8s pbm command does not cause an exception when it fails and it is + necessary to process the errors manually + """ + try: + status_str = json.loads(status_str) + for node_info in status_str["cluster"][0]["nodes"]: + if unit_name.replace("/", "-") not in node_info["host"]: + continue + + return process_pbm_error(node_info["errors"][0]) + except KeyError: + # if the keys for parsing errors are not present, proceed as normal + pass + + def process_pbm_error(error_string: Optional[_StrOrBytes]) -> str: """Parses pbm error string and returns a user friendly message.""" message = "couldn't configure s3 backup option" diff --git a/lib/charms/mongodb/v0/mongodb_backups.py b/lib/charms/mongodb/v0/mongodb_backups.py index bfc8035a3..c7fca807d 100644 --- a/lib/charms/mongodb/v0/mongodb_backups.py +++ b/lib/charms/mongodb/v0/mongodb_backups.py @@ -19,11 +19,18 @@ from charms.mongodb.v0.helpers import ( current_pbm_op, process_pbm_error, + process_pbm_error_k8s, process_pbm_status, ) from charms.operator_libs_linux.v2 import snap from ops.framework import Object -from ops.model import BlockedStatus, MaintenanceStatus, StatusBase, WaitingStatus +from ops.model import ( + ActiveStatus, + BlockedStatus, + MaintenanceStatus, + StatusBase, + WaitingStatus, +) from ops.pebble import ExecError from tenacity import ( Retrying, @@ -104,10 +111,11 @@ def _restore_retry_stop_condition(retry_state) -> bool: class MongoDBBackups(Object): """Manages MongoDB backups.""" - def __init__(self, charm): + def __init__(self, charm, substrate="vm"): """Manager of MongoDB client relations.""" super().__init__(charm, "client-relations") self.charm = charm + self.substrate = substrate # s3 relation handles the config options for s3 backups self.s3_client = S3Requirer(self.charm, S3_RELATION) @@ -433,13 +441,24 @@ def _get_pbm_status(self) -> Optional[StatusBase]: previous_pbm_status = self.charm.unit.status pbm_status = self.charm.run_pbm_command(PBM_STATUS_CMD) self._log_backup_restore_result(pbm_status, previous_pbm_status) - return process_pbm_status(pbm_status) + unit_status_pbm = process_pbm_status(pbm_status) + + # K8s charms require special processing for pbm errors + pbm_error = ( + process_pbm_error_k8s(pbm_status, self.charm.unit.name) + if self.substrate == "k8s" + else None + ) + if unit_status_pbm == ActiveStatus() and pbm_error: + return BlockedStatus(pbm_error) + + return unit_status_pbm except ExecError as e: logger.error(f"Failed to get pbm status. {e}") return BlockedStatus(process_pbm_error(e.stdout)) except subprocess.CalledProcessError as e: - # pbm pipes a return code of 1, but its output shows the true error code so it is - # necessary to parse the output + # VM deployments only - pbm pipes a return code of 1, but its output shows the true + # error code so it is necessary to parse the output return BlockedStatus(process_pbm_error(e.output)) except Exception as e: # pbm pipes a return code of 1, but its output shows the true error code so it is diff --git a/src/charm.py b/src/charm.py index 46b6d677c..cc173a775 100755 --- a/src/charm.py +++ b/src/charm.py @@ -114,7 +114,7 @@ def __init__(self, *args): self.client_relations = MongoDBProvider(self) self.tls = MongoDBTLS(self, Config.Relations.PEERS, Config.SUBSTRATE) - self.backups = MongoDBBackups(self) + self.backups = MongoDBBackups(self, Config.SUBSTRATE) self.metrics_endpoint = MetricsEndpointProvider( self, refresh_event=self.on.start, jobs=Config.Monitoring.JOBS diff --git a/tests/integration/backup_tests/helpers.py b/tests/integration/backup_tests/helpers.py index b4cb47682..fe5e43f29 100644 --- a/tests/integration/backup_tests/helpers.py +++ b/tests/integration/backup_tests/helpers.py @@ -1,7 +1,5 @@ # Copyright 2023 Canonical Ltd. # See LICENSE file for licensing details. -import os - import ops from pymongo import MongoClient from pytest_operator.plugin import OpsTest @@ -106,11 +104,11 @@ async def count_failed_backups(db_unit: ops.model.Unit) -> int: return failed_backups -async def set_credentials(ops_test: OpsTest, cloud: str) -> None: +async def set_credentials(ops_test: OpsTest, github_secrets, cloud: str) -> None: """Sets the s3 crednetials for the provided cloud, valid options are AWS or GCP.""" # set access key and secret keys - access_key = os.environ.get(f"{cloud}_ACCESS_KEY", False) - secret_key = os.environ.get(f"{cloud}_SECRET_KEY", False) + access_key = github_secrets[f"{cloud}_ACCESS_KEY"] + secret_key = github_secrets[f"{cloud}_SECRET_KEY"] assert access_key and secret_key, f"{cloud} access key and secret key not provided." s3_integrator_unit = ops_test.model.applications[S3_APP_NAME].units[0] diff --git a/tests/integration/backup_tests/test_backups.py b/tests/integration/backup_tests/test_backups.py index 93dea9ef3..bed771fe2 100644 --- a/tests/integration/backup_tests/test_backups.py +++ b/tests/integration/backup_tests/test_backups.py @@ -142,12 +142,12 @@ async def test_blocked_incorrect_creds(ops_test: OpsTest) -> None: @pytest.mark.skip("Skipping tests until fixing backup tests are addressed (DPE-4264).") @pytest.mark.group(1) @pytest.mark.abort_on_fail -async def test_blocked_incorrect_conf(ops_test: OpsTest) -> None: +async def test_blocked_incorrect_conf(ops_test: OpsTest, github_secrets) -> None: """Verifies that the charm goes into blocked status when s3 config options are incorrect.""" db_app_name = await get_app_name(ops_test) # set correct AWS credentials for s3 storage but incorrect configs - await helpers.set_credentials(ops_test, cloud="AWS") + await helpers.set_credentials(ops_test, github_secrets, cloud="AWS") # wait for both applications to be idle with the correct statuses await asyncio.gather( @@ -215,7 +215,7 @@ async def test_create_and_list_backups(ops_test: OpsTest) -> None: @pytest.mark.skip("Skipping tests until fixing backup tests are addressed (DPE-4264).") @pytest.mark.group(1) @pytest.mark.abort_on_fail -async def test_multi_backup(ops_test: OpsTest, continuous_writes_to_db) -> None: +async def test_multi_backup(ops_test: OpsTest, continuous_writes_to_db, github_secrets) -> None: """With writes in the DB test creating a backup while another one is running. Note that before creating the second backup we change the bucket and change the s3 storage @@ -236,7 +236,7 @@ async def test_multi_backup(ops_test: OpsTest, continuous_writes_to_db) -> None: # while first backup is running change access key, secret keys, and bucket name # for GCP - await helpers.set_credentials(ops_test, cloud="GCP") + await helpers.set_credentials(ops_test, github_secrets, cloud="GCP") # change to GCP configs and wait for PBM to resync configuration_parameters = { @@ -279,7 +279,7 @@ async def test_multi_backup(ops_test: OpsTest, continuous_writes_to_db) -> None: assert backups == 1, "Backup not created in first bucket on GCP." # set AWS credentials, set configs for s3 storage, and wait to resync - await helpers.set_credentials(ops_test, cloud="AWS") + await helpers.set_credentials(ops_test, github_secrets, cloud="AWS") configuration_parameters = { "bucket": "data-charms-testing", "region": "us-east-1", @@ -366,11 +366,13 @@ async def test_restore(ops_test: OpsTest, continuous_writes_to_db) -> None: @pytest.mark.group(1) @pytest.mark.unstable @pytest.mark.parametrize("cloud_provider", ["AWS", "GCP"]) -async def test_restore_new_cluster(ops_test: OpsTest, continuous_writes_to_db, cloud_provider): +async def test_restore_new_cluster( + ops_test: OpsTest, continuous_writes_to_db, cloud_provider, github_secrets +): # configure test for the cloud provider db_app_name = await get_app_name(ops_test) leader_unit = await helpers.get_leader_unit(ops_test, db_app_name) - await helpers.set_credentials(ops_test, cloud=cloud_provider) + await helpers.set_credentials(ops_test, github_secrets, cloud=cloud_provider) if cloud_provider == "AWS": configuration_parameters = { "bucket": "data-charms-testing", diff --git a/tests/integration/ha_tests/helpers.py b/tests/integration/ha_tests/helpers.py index 12ab43bd6..48f78303c 100644 --- a/tests/integration/ha_tests/helpers.py +++ b/tests/integration/ha_tests/helpers.py @@ -130,10 +130,8 @@ async def relate_mongodb_and_application( async def deploy_and_scale_mongodb( ops_test: OpsTest, - check_for_existing_application: bool = True, mongodb_application_name: str = APP_NAME, num_units: int = 3, - charm_path: Optional[Path] = None, ) -> str: """Deploys and scales the mongodb application charm. @@ -143,41 +141,24 @@ async def deploy_and_scale_mongodb( in the model mongodb_application_name: The name of the mongodb application if it is to be deployed num_units: The desired number of units - charm_path: The location of a prebuilt mongodb-k8s charm """ - application_name = await get_application_name(ops_test, mongodb_application_name) - - if check_for_existing_application and application_name: - await scale_application(ops_test, application_name, num_units) - - return application_name - - global mongodb_charm - # if provided an existing charm, use it instead of building - if charm_path: - mongodb_charm = charm_path - if not mongodb_charm: - charm = await ops_test.build_charm(".") - # Cache the built charm to avoid rebuilding it between tests - mongodb_charm = charm - + charm = await ops_test.build_charm(".") resources = {"mongodb-image": METADATA["resources"]["mongodb-image"]["upstream-source"]} - async with ops_test.fast_forward(): - await ops_test.model.deploy( - mongodb_charm, - application_name=mongodb_application_name, - resources=resources, - num_units=num_units, - series="jammy", - ) + await ops_test.model.deploy( + charm, + application_name=mongodb_application_name, + resources=resources, + num_units=num_units, + series="jammy", + ) - await ops_test.model.wait_for_idle( - apps=[mongodb_application_name], - status="active", - raise_on_blocked=True, - timeout=TIMEOUT, - ) + await ops_test.model.wait_for_idle( + apps=[mongodb_application_name], + status="active", + raise_on_blocked=True, + timeout=TIMEOUT, + ) return mongodb_application_name diff --git a/tests/integration/ha_tests/test_ha.py b/tests/integration/ha_tests/test_ha.py index 8ec5a1f63..5cb04ff85 100644 --- a/tests/integration/ha_tests/test_ha.py +++ b/tests/integration/ha_tests/test_ha.py @@ -123,9 +123,7 @@ async def test_build_and_deploy(ops_test: OpsTest, cmd_mongodb_charm) -> None: num_units = 3 if not mongodb_application_name: - mongodb_application_name = await deploy_and_scale_mongodb( - ops_test, charm_path=cmd_mongodb_charm, num_units=num_units - ) + mongodb_application_name = await deploy_and_scale_mongodb(ops_test, num_units=num_units) else: check_or_scale_app(ops_test, mongodb_application_name, num_units) diff --git a/tests/integration/relation_tests/test_charm_relations.py b/tests/integration/relation_tests/test_charm_relations.py index 7b6779a63..9cd37477a 100644 --- a/tests/integration/relation_tests/test_charm_relations.py +++ b/tests/integration/relation_tests/test_charm_relations.py @@ -145,7 +145,7 @@ async def test_database_relation_with_charm_libraries(ops_test: OpsTest): await ops_test.model.integrate( f"{APPLICATION_APP_NAME}:{FIRST_DATABASE_RELATION_NAME}", db_app_name ) - await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active", idle_period=30) connection_string = await get_connection_string( ops_test, APPLICATION_APP_NAME, FIRST_DATABASE_RELATION_NAME