Skip to content

Commit 1425f6d

Browse files
[DPE-6965] Wait for all storages to detatch in smoke test (#869)
* Separate storage pools Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Fix charm test Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Fix async replication Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Fix replica bootstrap Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Fix data directory removal on restore Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Fix backup do microceph test Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Fix upgrade integration tests Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Add test to check new multiple storages Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Add storages'descriptions Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Fix test_charm_garbage_ignorance Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Fix restore cluster test Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Fix storage re-use test Signed-off-by: Marcelo Henrique Neppel <[email protected]> * Reduce volume size * List storage * Poor man's jubilant * Cleanup * Correct charm path * Fix base * Debug missing storage * Wait for storage to be available --------- Signed-off-by: Marcelo Henrique Neppel <[email protected]> Co-authored-by: Marcelo Henrique Neppel <[email protected]>
1 parent ff02253 commit 1425f6d

File tree

2 files changed

+17
-18
lines changed

2 files changed

+17
-18
lines changed

.github/workflows/integration_test.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ jobs:
157157
- name: juju status
158158
timeout-minutes: 1
159159
if: ${{ !contains(matrix.job.spread_job, 'juju29') && (success() || (failure() && steps.spread.outcome == 'failure')) }}
160-
run: sudo juju status --color --relations | tee ~/logs/juju-status.txt
160+
run: sudo juju status --color --relations --storage | tee ~/logs/juju-status.txt
161161
- name: juju debug-log
162162
timeout-minutes: 3
163163
if: ${{ !contains(matrix.job.spread_job, 'juju29') && (success() || (failure() && steps.spread.outcome == 'failure')) }}

tests/integration/ha_tests/test_smoke.py

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
# Copyright 2021 Canonical Ltd.
33
# See LICENSE file for licensing details.
44

5-
import asyncio
65
import logging
76

87
import pytest
@@ -43,10 +42,10 @@ async def test_app_force_removal(ops_test: OpsTest, charm: str):
4342
num_units=1,
4443
base=CHARM_BASE,
4544
storage={
46-
"archive": {"pool": "lxd-btrfs", "size": 8046},
47-
"data": {"pool": "lxd-btrfs", "size": 8046},
48-
"logs": {"pool": "lxd-btrfs", "size": 8046},
49-
"temp": {"pool": "lxd-btrfs", "size": 8046},
45+
"archive": {"pool": "lxd-btrfs", "size": 2048},
46+
"data": {"pool": "lxd-btrfs", "size": 2048},
47+
"logs": {"pool": "lxd-btrfs", "size": 2048},
48+
"temp": {"pool": "lxd-btrfs", "size": 2048},
5049
},
5150
config={"profile": "testing"},
5251
)
@@ -66,7 +65,7 @@ async def test_app_force_removal(ops_test: OpsTest, charm: str):
6665
storage_ids = get_storage_ids(ops_test, primary_name)
6766

6867
# Check if storage exists after application deployed
69-
logger.info("werifing is storage exists")
68+
logger.info("verifying that storage exists")
7069
for storage_id in storage_ids:
7170
for attempt in Retrying(
7271
stop=stop_after_delay(15 * 3), wait=wait_fixed(3), reraise=True
@@ -89,7 +88,7 @@ async def test_app_force_removal(ops_test: OpsTest, charm: str):
8988
)
9089

9190
# Storage should remain
92-
logger.info("werifing is storage exists")
91+
logger.info("verifying that storage exists")
9392
for storage_id in storage_ids:
9493
for attempt in Retrying(
9594
stop=stop_after_delay(15 * 3), wait=wait_fixed(3), reraise=True
@@ -107,6 +106,8 @@ async def test_charm_garbage_ignorance(ops_test: OpsTest, charm: str):
107106
for attempt in Retrying(stop=stop_after_delay(30 * 3), wait=wait_fixed(3), reraise=True):
108107
with attempt:
109108
garbage_storages = await get_detached_storages(ops_test)
109+
assert len(garbage_storages) == 4
110+
logger.info(f"Collected storages: {garbage_storages}")
110111

111112
logger.info("add unit with attached storage")
112113
await add_unit_with_storage(ops_test, APPLICATION_NAME, garbage_storages)
@@ -146,31 +147,29 @@ async def test_app_resources_conflicts_v3(ops_test: OpsTest, charm: str):
146147
"""Test application deploy in dirty environment with garbage storage from another application."""
147148
async with ops_test.fast_forward():
148149
logger.info("checking garbage storage")
149-
garbage_storage = None
150+
garbage_storages = None
150151
for attempt in Retrying(stop=stop_after_delay(30 * 3), wait=wait_fixed(3), reraise=True):
151152
with attempt:
152-
garbage_storage = await get_detached_storages(ops_test)
153+
garbage_storages = await get_detached_storages(ops_test)
154+
assert len(garbage_storages) == 4
155+
logger.info(f"Collected storages: {garbage_storages}")
153156

154157
logger.info("deploying duplicate application with attached storage")
155158
await ops_test.model.deploy(
156159
charm,
157160
application_name=DUP_APPLICATION_NAME,
158161
num_units=1,
159162
base=CHARM_BASE,
160-
attach_storage=[tag.storage(storage) for storage in garbage_storage],
163+
attach_storage=[tag.storage(storage) for storage in garbage_storages],
161164
config={"profile": "testing"},
162165
)
163166

164167
# Reducing the update status frequency to speed up the triggering of deferred events.
165-
await ops_test.model.set_config({"update-status-hook-interval": "10s"})
166-
167-
logger.info("waiting for duplicate application to be blocked")
168-
try:
168+
async with ops_test.fast_forward("60s"):
169+
logger.info("waiting for duplicate application to be waiting")
169170
await ops_test.model.wait_for_idle(
170-
apps=[DUP_APPLICATION_NAME], timeout=1000, status="blocked"
171+
apps=[DUP_APPLICATION_NAME], timeout=1000, status="waiting", idle_period=30
171172
)
172-
except asyncio.TimeoutError:
173-
logger.info("Application is not in blocked state. Checking logs...")
174173

175174
# Since application have postgresql db in storage from external application it should not be able to connect due to new password
176175
logger.info("checking operator password auth")

0 commit comments

Comments
 (0)