Skip to content

Commit 2c14db4

Browse files
Run integration tests on arm64 (#450)
1 parent 8b1d2cb commit 2c14db4

File tree

11 files changed

+125
-63
lines changed

11 files changed

+125
-63
lines changed

.github/workflows/ci.yaml

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -65,24 +65,32 @@ jobs:
6565
fail-fast: false
6666
matrix:
6767
juju:
68-
- agent: 2.9.49
68+
- agent: 2.9.49 # renovate: juju-agent-pin-minor
6969
libjuju: ^2
70-
allure: false
71-
- agent: 3.4.3
72-
allure: true
73-
name: Integration test charm | ${{ matrix.juju.agent }}
70+
allure_on_amd64: false
71+
- agent: 3.4.3 # renovate: juju-agent-pin-minor
72+
allure_on_amd64: true
73+
architecture:
74+
- amd64
75+
include:
76+
- juju:
77+
agent: 3.4.3 # renovate: juju-agent-pin-minor
78+
allure_on_amd64: true
79+
architecture: arm64
80+
name: Integration test charm | ${{ matrix.juju.agent }} | ${{ matrix.architecture }}
7481
needs:
7582
- lint
7683
- unit-test
7784
- build
7885
uses: canonical/data-platform-workflows/.github/workflows/[email protected]
7986
with:
8087
artifact-prefix: ${{ needs.build.outputs.artifact-prefix }}
88+
architecture: ${{ matrix.architecture }}
8189
cloud: microk8s
8290
microk8s-snap-channel: 1.28-strict/stable
8391
juju-agent-version: ${{ matrix.juju.agent }}
8492
libjuju-version-constraint: ${{ matrix.juju.libjuju }}
85-
_beta_allure_report: ${{ matrix.juju.allure }}
93+
_beta_allure_report: ${{ matrix.juju.allure_on_amd64 && matrix.architecture == 'amd64' }}
8694
secrets:
8795
# GitHub appears to redact each line of a multi-line secret
8896
# Avoid putting `{` or `}` on a line by itself so that it doesn't get redacted in logs

tests/integration/architecture.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# Copyright 2024 Canonical Ltd.
2+
# See LICENSE file for licensing details.
3+
import subprocess
4+
5+
architecture = subprocess.run(
6+
["dpkg", "--print-architecture"], capture_output=True, check=True, encoding="utf-8"
7+
).stdout.strip()

tests/integration/helpers.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
import itertools
55
import json
6+
import pathlib
67
import secrets
78
import string
89
import subprocess
@@ -556,13 +557,13 @@ async def write_content_to_file_in_unit(
556557
"""
557558
pod_name = unit.name.replace("/", "-")
558559

559-
with tempfile.NamedTemporaryFile(mode="w") as temp_file:
560+
with tempfile.NamedTemporaryFile(mode="w", dir=pathlib.Path.home()) as temp_file:
560561
temp_file.write(content)
561562
temp_file.flush()
562563

563564
subprocess.run(
564565
[
565-
"kubectl",
566+
"microk8s.kubectl",
566567
"cp",
567568
"-n",
568569
ops_test.model.info.name,
@@ -591,10 +592,10 @@ async def read_contents_from_file_in_unit(
591592
"""
592593
pod_name = unit.name.replace("/", "-")
593594

594-
with tempfile.NamedTemporaryFile(mode="r+") as temp_file:
595+
with tempfile.NamedTemporaryFile(mode="r+", dir=pathlib.Path.home()) as temp_file:
595596
subprocess.run(
596597
[
597-
"kubectl",
598+
"microk8s.kubectl",
598599
"cp",
599600
"-n",
600601
ops_test.model.info.name,

tests/integration/high_availability/high_availability_helpers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -259,7 +259,7 @@ def deploy_chaos_mesh(namespace: str) -> None:
259259
for attempt in Retrying(stop=stop_after_delay(5 * 60), wait=wait_fixed(10)):
260260
with attempt:
261261
output = subprocess.check_output(
262-
f"kubectl get pods --namespace {namespace} -l app.kubernetes.io/instance=chaos-mesh".split(),
262+
f"microk8s.kubectl get pods --namespace {namespace} -l app.kubernetes.io/instance=chaos-mesh".split(),
263263
env=env,
264264
)
265265
assert output.decode().count("Running") == 4, "Chaos Mesh not ready"
@@ -528,7 +528,7 @@ def isolate_instance_from_cluster(ops_test: OpsTest, unit_name: str) -> None:
528528
env["KUBECONFIG"] = os.path.expanduser("~/.kube/config")
529529

530530
try:
531-
subprocess.check_output(["kubectl", "apply", "-f", temp_file.name], env=env)
531+
subprocess.check_output(["microk8s.kubectl", "apply", "-f", temp_file.name], env=env)
532532
except subprocess.CalledProcessError as e:
533533
logger.error(e.output)
534534
logger.error(e.stderr)
@@ -540,7 +540,7 @@ def remove_instance_isolation(ops_test: OpsTest) -> None:
540540
env = os.environ
541541
env["KUBECONFIG"] = os.path.expanduser("~/.kube/config")
542542
subprocess.check_output(
543-
f"kubectl -n {ops_test.model.info.name} delete networkchaos network-loss-primary",
543+
f"microk8s.kubectl -n {ops_test.model.info.name} delete networkchaos network-loss-primary",
544544
shell=True,
545545
env=env,
546546
)

tests/integration/high_availability/scripts/destroy_chaos_mesh.sh

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -11,38 +11,38 @@ fi
1111

1212
destroy_chaos_mesh() {
1313
echo "deleting api-resources"
14-
for i in $(kubectl api-resources | awk '/chaos-mesh/ {print $1}'); do
15-
timeout 30 kubectl delete "${i}" --all --all-namespaces || true
14+
for i in $(microk8s.kubectl api-resources | awk '/chaos-mesh/ {print $1}'); do
15+
timeout 30 microk8s.kubectl delete "${i}" --all --all-namespaces || true
1616
done
1717

18-
if kubectl get mutatingwebhookconfiguration | grep -q 'chaos-mesh-mutation'; then
19-
timeout 30 kubectl delete mutatingwebhookconfiguration chaos-mesh-mutation || true
18+
if microk8s.kubectl get mutatingwebhookconfiguration | grep -q 'chaos-mesh-mutation'; then
19+
timeout 30 microk8s.kubectl delete mutatingwebhookconfiguration chaos-mesh-mutation || true
2020
fi
2121

22-
if kubectl get validatingwebhookconfiguration | grep -q 'chaos-mesh-validation-auth'; then
23-
timeout 30 kubectl delete validatingwebhookconfiguration chaos-mesh-validation-auth || true
22+
if microk8s.kubectl get validatingwebhookconfiguration | grep -q 'chaos-mesh-validation-auth'; then
23+
timeout 30 microk8s.kubectl delete validatingwebhookconfiguration chaos-mesh-validation-auth || true
2424
fi
2525

26-
if kubectl get validatingwebhookconfiguration | grep -q 'chaos-mesh-validation'; then
27-
timeout 30 kubectl delete validatingwebhookconfiguration chaos-mesh-validation || true
26+
if microk8s.kubectl get validatingwebhookconfiguration | grep -q 'chaos-mesh-validation'; then
27+
timeout 30 microk8s.kubectl delete validatingwebhookconfiguration chaos-mesh-validation || true
2828
fi
2929

30-
if kubectl get clusterrolebinding | grep -q 'chaos-mesh'; then
30+
if microk8s.kubectl get clusterrolebinding | grep -q 'chaos-mesh'; then
3131
echo "deleting clusterrolebindings"
32-
readarray -t args < <(kubectl get clusterrolebinding | awk '/chaos-mesh/ {print $1}')
33-
timeout 30 kubectl delete clusterrolebinding "${args[@]}" || true
32+
readarray -t args < <(microk8s.kubectl get clusterrolebinding | awk '/chaos-mesh/ {print $1}')
33+
timeout 30 microk8s.kubectl delete clusterrolebinding "${args[@]}" || true
3434
fi
3535

36-
if kubectl get clusterrole | grep -q 'chaos-mesh'; then
36+
if microk8s.kubectl get clusterrole | grep -q 'chaos-mesh'; then
3737
echo "deleting clusterroles"
38-
readarray -t args < <(kubectl get clusterrole | awk '/chaos-mesh/ {print $1}')
39-
timeout 30 kubectl delete clusterrole "${args[@]}" || true
38+
readarray -t args < <(microk8s.kubectl get clusterrole | awk '/chaos-mesh/ {print $1}')
39+
timeout 30 microk8s.kubectl delete clusterrole "${args[@]}" || true
4040
fi
4141

42-
if kubectl get crd | grep -q 'chaos-mesh.org'; then
42+
if microk8s.kubectl get crd | grep -q 'chaos-mesh.org'; then
4343
echo "deleting crds"
44-
readarray -t args < <(kubectl get crd | awk '/chaos-mesh.org/ {print $1}')
45-
timeout 30 kubectl delete crd "${args[@]}" || true
44+
readarray -t args < <(microk8s.kubectl get crd | awk '/chaos-mesh.org/ {print $1}')
45+
timeout 30 microk8s.kubectl delete crd "${args[@]}" || true
4646
fi
4747

4848
if [ -n "${chaos_mesh_ns}" ] && sg snap_microk8s -c "microk8s.helm3 repo list --namespace=${chaos_mesh_ns}" | grep -q 'chaos-mesh'; then

tests/integration/high_availability/test_async_replication.py

Lines changed: 35 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55

66
import logging
7+
import subprocess
78
from asyncio import gather
89
from pathlib import Path
910
from time import sleep
@@ -14,14 +15,13 @@
1415
from juju.model import Model
1516
from pytest_operator.plugin import OpsTest
1617

17-
from .. import juju_
18+
from .. import architecture, juju_, markers
1819
from ..helpers import (
1920
execute_queries_on_unit,
2021
get_cluster_status,
2122
get_leader_unit,
2223
get_unit_address,
2324
)
24-
from ..markers import juju3
2525
from .high_availability_helpers import (
2626
DATABASE_NAME,
2727
TABLE_NAME,
@@ -55,6 +55,11 @@ async def second_model(
5555
"""Create and return the second model."""
5656
second_model_name = f"{first_model.info.name}-other"
5757
await ops_test._controller.add_model(second_model_name)
58+
subprocess.run(["juju", "switch", second_model_name], check=True)
59+
subprocess.run(
60+
["juju", "set-model-constraints", f"arch={architecture.architecture}"], check=True
61+
)
62+
subprocess.run(["juju", "switch", first_model.info.name], check=True)
5863
second_model = Model()
5964
await second_model.connect(model_name=second_model_name)
6065
yield second_model # pyright: ignore [reportReturnType]
@@ -65,9 +70,10 @@ async def second_model(
6570
await ops_test._controller.destroy_model(second_model_name, destroy_storage=True)
6671

6772

68-
@juju3
69-
@pytest.mark.abort_on_fail
7073
@pytest.mark.group(1)
74+
@markers.juju3
75+
@markers.amd64_only # TODO: remove after mysql-router-k8s arm64 stable release
76+
@pytest.mark.abort_on_fail
7177
async def test_build_and_deploy(
7278
ops_test: OpsTest, first_model: Model, second_model: Model
7379
) -> None:
@@ -112,9 +118,10 @@ async def test_build_and_deploy(
112118
)
113119

114120

115-
@juju3
116-
@pytest.mark.abort_on_fail
117121
@pytest.mark.group(1)
122+
@markers.juju3
123+
@markers.amd64_only # TODO: remove after mysql-router-k8s arm64 stable release
124+
@pytest.mark.abort_on_fail
118125
async def test_async_relate(first_model: Model, second_model: Model) -> None:
119126
"""Relate the two mysql clusters."""
120127
logger.info("Creating offers in first model")
@@ -145,9 +152,10 @@ async def test_async_relate(first_model: Model, second_model: Model) -> None:
145152
)
146153

147154

148-
@juju3
149-
@pytest.mark.abort_on_fail
150155
@pytest.mark.group(1)
156+
@markers.juju3
157+
@markers.amd64_only # TODO: remove after mysql-router-k8s arm64 stable release
158+
@pytest.mark.abort_on_fail
151159
async def test_create_replication(first_model: Model, second_model: Model) -> None:
152160
"""Run the create replication and wait for the applications to settle."""
153161
logger.info("Running create replication action")
@@ -175,9 +183,10 @@ async def test_create_replication(first_model: Model, second_model: Model) -> No
175183
)
176184

177185

178-
@juju3
179-
@pytest.mark.abort_on_fail
180186
@pytest.mark.group(1)
187+
@markers.juju3
188+
@markers.amd64_only # TODO: remove after mysql-router-k8s arm64 stable release
189+
@pytest.mark.abort_on_fail
181190
async def test_deploy_router_and_app(first_model: Model) -> None:
182191
"""Deploy the router and the test application."""
183192
logger.info("Deploying router and application")
@@ -193,7 +202,7 @@ async def test_deploy_router_and_app(first_model: Model) -> None:
193202
APPLICATION_APP_NAME,
194203
application_name=APPLICATION_APP_NAME,
195204
series="jammy",
196-
channel="latest/stable",
205+
channel="latest/edge",
197206
num_units=1,
198207
)
199208

@@ -212,9 +221,10 @@ async def test_deploy_router_and_app(first_model: Model) -> None:
212221
)
213222

214223

215-
@juju3
216-
@pytest.mark.abort_on_fail
217224
@pytest.mark.group(1)
225+
@markers.juju3
226+
@markers.amd64_only # TODO: remove after mysql-router-k8s arm64 stable release
227+
@pytest.mark.abort_on_fail
218228
async def test_data_replication(
219229
first_model: Model, second_model: Model, continuous_writes
220230
) -> None:
@@ -225,9 +235,10 @@ async def test_data_replication(
225235
assert results[0] > 1, "No data was written to the database"
226236

227237

228-
@juju3
229-
@pytest.mark.abort_on_fail
230238
@pytest.mark.group(1)
239+
@markers.juju3
240+
@markers.amd64_only # TODO: remove after mysql-router-k8s arm64 stable release
241+
@pytest.mark.abort_on_fail
231242
async def test_standby_promotion(
232243
ops_test: OpsTest, first_model: Model, second_model: Model, continuous_writes
233244
) -> None:
@@ -253,9 +264,10 @@ async def test_standby_promotion(
253264
), "standby not promoted to primary"
254265

255266

256-
@juju3
257-
@pytest.mark.abort_on_fail
258267
@pytest.mark.group(1)
268+
@markers.juju3
269+
@markers.amd64_only # TODO: remove after mysql-router-k8s arm64 stable release
270+
@pytest.mark.abort_on_fail
259271
async def test_failover(ops_test: OpsTest, first_model: Model, second_model: Model) -> None:
260272
"""Test switchover on primary cluster fail."""
261273
logger.info("Freezing mysqld on primary cluster units")
@@ -291,9 +303,10 @@ async def test_failover(ops_test: OpsTest, first_model: Model, second_model: Mod
291303
)
292304

293305

294-
@juju3
295-
@pytest.mark.abort_on_fail
296306
@pytest.mark.group(1)
307+
@markers.juju3
308+
@markers.amd64_only # TODO: remove after mysql-router-k8s arm64 stable release
309+
@pytest.mark.abort_on_fail
297310
async def test_rejoin_invalidated_cluster(
298311
first_model: Model, second_model: Model, continuous_writes
299312
) -> None:
@@ -311,9 +324,10 @@ async def test_rejoin_invalidated_cluster(
311324
assert results[0] > 1, "No data was written to the database"
312325

313326

314-
@juju3
315-
@pytest.mark.abort_on_fail
316327
@pytest.mark.group(1)
328+
@markers.juju3
329+
@markers.amd64_only # TODO: remove after mysql-router-k8s arm64 stable release
330+
@pytest.mark.abort_on_fail
317331
async def test_remove_relation_and_relate(
318332
first_model: Model, second_model: Model, continuous_writes
319333
) -> None:

tests/integration/high_availability/test_upgrade_from_stable.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import pytest
99
from pytest_operator.plugin import OpsTest
1010

11-
from .. import juju_
11+
from .. import juju_, markers
1212
from ..helpers import (
1313
get_leader_unit,
1414
get_primary_unit,
@@ -31,6 +31,7 @@
3131

3232

3333
@pytest.mark.group(1)
34+
@markers.amd64_only # TODO: remove after arm64 stable release
3435
@pytest.mark.abort_on_fail
3536
async def test_deploy_stable(ops_test: OpsTest) -> None:
3637
"""Simple test to ensure that the mysql and application charms get deployed."""
@@ -61,6 +62,7 @@ async def test_deploy_stable(ops_test: OpsTest) -> None:
6162

6263

6364
@pytest.mark.group(1)
65+
@markers.amd64_only # TODO: remove after arm64 stable release
6466
@pytest.mark.abort_on_fail
6567
async def test_pre_upgrade_check(ops_test: OpsTest) -> None:
6668
"""Test that the pre-upgrade-check action runs successfully."""
@@ -87,6 +89,7 @@ async def test_pre_upgrade_check(ops_test: OpsTest) -> None:
8789

8890

8991
@pytest.mark.group(1)
92+
@markers.amd64_only # TODO: remove after arm64 stable release
9093
@pytest.mark.abort_on_fail
9194
async def test_upgrade_from_stable(ops_test: OpsTest):
9295
"""Test updating from stable channel."""

0 commit comments

Comments
 (0)