Skip to content

Commit f07ef5c

Browse files
authored
Merge branch 'main' into ping
2 parents a85b41c + 3c3233c commit f07ef5c

File tree

12 files changed

+136
-69
lines changed

12 files changed

+136
-69
lines changed

.buildkite/pipeline_cross.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
]
3030
instances_aarch64 = ["m7g.metal"]
3131
commands = [
32-
"./tools/devtool -y test --no-build -- -m nonci -n4 integration_tests/functional/test_snapshot_phase1.py",
32+
"./tools/devtool -y test --no-build --no-archive -- -m nonci -n4 integration_tests/functional/test_snapshot_phase1.py",
3333
# punch holes in mem snapshot tiles and tar them so they are preserved in S3
3434
"find test_results/test_snapshot_phase1 -type f -name mem |xargs -P4 -t -n1 fallocate -d",
3535
"mv -v test_results/test_snapshot_phase1 snapshot_artifacts",

tests/conftest.py

Lines changed: 21 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ def pytest_runtest_logreport(report):
172172

173173

174174
@pytest.fixture()
175-
def metrics(request):
175+
def metrics(results_dir, request):
176176
"""Fixture to pass the metrics scope
177177
178178
We use a fixture instead of the @metrics_scope decorator as that conflicts
@@ -188,6 +188,8 @@ def metrics(request):
188188
metrics_logger.set_property(prop_name, prop_val)
189189
yield metrics_logger
190190
metrics_logger.flush()
191+
if results_dir:
192+
metrics_logger.store_data(results_dir)
191193

192194

193195
@pytest.fixture
@@ -392,21 +394,33 @@ def io_engine(request):
392394

393395

394396
@pytest.fixture
395-
def results_dir(request):
397+
def results_dir(request, pytestconfig):
396398
"""
397399
Fixture yielding the path to a directory into which the test can dump its results
398400
399-
Directories are unique per test, and named after the test name. Everything the tests puts
400-
into its directory will to be uploaded to S3. Directory will be placed inside defs.TEST_RESULTS_DIR.
401+
Directories are unique per test, and their names include test name and test parameters.
402+
Everything the tests puts into its directory will to be uploaded to S3.
403+
Directory will be placed inside defs.TEST_RESULTS_DIR.
401404
402405
For example
403406
```py
404-
def test_my_file(results_dir):
407+
@pytest.mark.parametrize("p", ["a", "b"])
408+
def test_my_file(p, results_dir):
405409
(results_dir / "output.txt").write_text("Hello World")
406410
```
407-
will result in `defs.TEST_RESULTS_DIR`/test_my_file/output.txt.
411+
will result in:
412+
- `defs.TEST_RESULTS_DIR`/test_my_file/test_my_file[a]/output.txt.
413+
- `defs.TEST_RESULTS_DIR`/test_my_file/test_my_file[b]/output.txt.
414+
415+
When this fixture is called with DoctestItem as a request.node
416+
during doc tests, it will return None.
408417
"""
409-
results_dir = defs.TEST_RESULTS_DIR / request.node.originalname
418+
try:
419+
report_file = pytestconfig.getoption("--json-report-file")
420+
parent = Path(report_file).parent.absolute()
421+
results_dir = parent / request.node.originalname / request.node.name
422+
except AttributeError:
423+
return None
410424
results_dir.mkdir(parents=True, exist_ok=True)
411425
return results_dir
412426

tests/framework/ab_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def git_ab_test_host_command(
160160

161161

162162
def set_did_not_grow_comparator(
163-
set_generator: Callable[[CommandReturn], set]
163+
set_generator: Callable[[CommandReturn], set],
164164
) -> Callable[[CommandReturn, CommandReturn], bool]:
165165
"""Factory function for comparators to use with git_ab_test_command that converts the command output to sets
166166
(using the given callable) and then checks that the "B" set is a subset of the "A" set

tests/framework/defs.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,6 @@
2323
# Default test session artifacts path
2424
LOCAL_BUILD_PATH = FC_WORKSPACE_DIR / "build/"
2525

26-
# Absolute path to the test results folder
27-
TEST_RESULTS_DIR = FC_WORKSPACE_DIR / "test_results"
28-
2926
DEFAULT_BINARY_DIR = (
3027
LOCAL_BUILD_PATH
3128
/ "cargo_target"

tests/host_tools/fcmetrics.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33

44

55
"""Provides:
6-
- Mechanism to collect and export Firecracker metrics every 60seconds to CloudWatch
7-
- Utility functions to validate Firecracker metrics format and to validate Firecracker device metrics.
6+
- Mechanism to collect and export Firecracker metrics every 60seconds to CloudWatch
7+
- Utility functions to validate Firecracker metrics format and to validate Firecracker device metrics.
88
"""
99

1010
import datetime

tests/host_tools/metrics.py

Lines changed: 30 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -46,52 +46,59 @@
4646
import json
4747
import os
4848
import socket
49+
from pathlib import Path
4950
from urllib.parse import urlparse
5051

5152
from aws_embedded_metrics.constants import DEFAULT_NAMESPACE
5253
from aws_embedded_metrics.logger.metrics_logger_factory import create_metrics_logger
5354

5455

55-
class MetricsWrapperDummy:
56-
"""Send metrics to /dev/null"""
56+
class MetricsWrapper:
57+
"""A convenient metrics logger"""
58+
59+
def __init__(self, logger):
60+
self.data = {}
61+
self.logger = logger
5762

5863
def set_dimensions(self, *args, **kwargs):
5964
"""Set dimensions"""
65+
if self.logger:
66+
self.logger.set_dimensions(*args, **kwargs)
6067

61-
def put_metric(self, *args, **kwargs):
68+
def put_metric(self, name, data, unit):
6269
"""Put a datapoint with given dimensions"""
70+
if name not in self.data:
71+
self.data[name] = {"unit": unit, "values": []}
72+
self.data[name]["values"].append(data)
73+
74+
if self.logger:
75+
self.logger.put_metric(name, data, unit)
6376

6477
def set_property(self, *args, **kwargs):
6578
"""Set a property"""
79+
if self.logger:
80+
self.logger.set_property(*args, **kwargs)
6681

6782
def flush(self):
6883
"""Flush any remaining metrics"""
84+
if self.logger:
85+
asyncio.run(self.logger.flush())
6986

70-
71-
class MetricsWrapper:
72-
"""A convenient metrics logger"""
73-
74-
def __init__(self, logger):
75-
self.logger = logger
76-
77-
def __getattr__(self, attr):
78-
"""Dispatch methods to logger instance"""
79-
if attr not in self.__dict__:
80-
return getattr(self.logger, attr)
81-
return getattr(self, attr)
82-
83-
def flush(self):
84-
"""Flush any remaining metrics"""
85-
asyncio.run(self.logger.flush())
87+
def store_data(self, dir_path):
88+
"""Store data into a file"""
89+
metrics_path = Path(dir_path / "metrics.json")
90+
with open(metrics_path, "w", encoding="utf-8") as f:
91+
json.dump(self.data, f)
8692

8793

8894
def get_metrics_logger():
8995
"""Get a new metrics logger object"""
9096
# if no metrics namespace, don't output metrics
91-
if "AWS_EMF_NAMESPACE" not in os.environ:
92-
return MetricsWrapperDummy()
93-
logger = create_metrics_logger()
94-
logger.reset_dimensions(False)
97+
if "AWS_EMF_NAMESPACE" in os.environ:
98+
logger = create_metrics_logger()
99+
logger.reset_dimensions(False)
100+
else:
101+
logger = None
95102
return MetricsWrapper(logger)
96103

97104

tests/integration_tests/build/test_seccomp_no_redundant_rules.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
# Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
22
# SPDX-License-Identifier: Apache-2.0
33
"""A test that fails if it can definitely prove a seccomp rule redundant
4-
(although it passing does not guarantee the converse, that all rules are definitely needed)."""
4+
(although it passing does not guarantee the converse, that all rules are definitely needed).
5+
"""
56
import platform
67
from pathlib import Path
78

tests/integration_tests/performance/test_block_ab.py

Lines changed: 24 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
import concurrent
66
import glob
77
import os
8-
import shutil
98
from pathlib import Path
109

1110
import pytest
@@ -45,7 +44,7 @@ def prepare_microvm_for_test(microvm):
4544
check_output("echo 3 > /proc/sys/vm/drop_caches")
4645

4746

48-
def run_fio(microvm, mode, block_size, fio_engine="libaio"):
47+
def run_fio(microvm, mode, block_size, test_output_dir, fio_engine="libaio"):
4948
"""Run a fio test in the specified mode with block size bs."""
5049
cmd = (
5150
CmdBuilder("fio")
@@ -71,16 +70,11 @@ def run_fio(microvm, mode, block_size, fio_engine="libaio"):
7170
.with_arg(f"--write_bw_log={mode}")
7271
.with_arg(f"--write_lat_log={mode}")
7372
.with_arg("--log_avg_msec=1000")
73+
.with_arg("--output-format=json+")
74+
.with_arg("--output=/tmp/fio.json")
7475
.build()
7576
)
7677

77-
logs_path = Path(microvm.jailer.chroot_base_with_id()) / "fio_output"
78-
79-
if logs_path.is_dir():
80-
shutil.rmtree(logs_path)
81-
82-
logs_path.mkdir()
83-
8478
prepare_microvm_for_test(microvm)
8579

8680
# Start the CPU load monitor.
@@ -97,17 +91,23 @@ def run_fio(microvm, mode, block_size, fio_engine="libaio"):
9791
assert rc == 0, stderr
9892
assert stderr == ""
9993

100-
microvm.ssh.scp_get("/tmp/*.log", logs_path)
101-
microvm.ssh.check_output("rm /tmp/*.log")
94+
microvm.ssh.scp_get("/tmp/fio.json", test_output_dir)
95+
microvm.ssh.scp_get("/tmp/*.log", test_output_dir)
10296

103-
return logs_path, cpu_load_future.result()
97+
return cpu_load_future.result()
10498

10599

106-
def process_fio_log_files(logs_glob):
107-
"""Parses all fio log files matching the given glob and yields tuples of same-timestamp read and write metrics"""
100+
def process_fio_log_files(root_dir, logs_glob):
101+
"""
102+
Parses all fio log files in the root_dir matching the given glob and
103+
yields tuples of same-timestamp read and write metrics
104+
"""
105+
# We specify `root_dir` for `glob.glob` because otherwise it will
106+
# struggle with directory with names like:
107+
# test_block_performance[vmlinux-5.10.233-Sync-bs4096-randread-1vcpu]
108108
data = [
109-
Path(pathname).read_text("UTF-8").splitlines()
110-
for pathname in glob.glob(logs_glob)
109+
Path(root_dir / pathname).read_text("UTF-8").splitlines()
110+
for pathname in glob.glob(logs_glob, root_dir=root_dir)
111111
]
112112

113113
assert data, "no log files found!"
@@ -134,13 +134,13 @@ def process_fio_log_files(logs_glob):
134134

135135
def emit_fio_metrics(logs_dir, metrics):
136136
"""Parses the fio logs in `{logs_dir}/*_[clat|bw].*.log and emits their contents as CloudWatch metrics"""
137-
for bw_read, bw_write in process_fio_log_files(f"{logs_dir}/*_bw.*.log"):
137+
for bw_read, bw_write in process_fio_log_files(logs_dir, "*_bw.*.log"):
138138
if bw_read:
139139
metrics.put_metric("bw_read", sum(bw_read), "Kilobytes/Second")
140140
if bw_write:
141141
metrics.put_metric("bw_write", sum(bw_write), "Kilobytes/Second")
142142

143-
for lat_read, lat_write in process_fio_log_files(f"{logs_dir}/*_clat.*.log"):
143+
for lat_read, lat_write in process_fio_log_files(logs_dir, "*_clat.*.log"):
144144
# latency values in fio logs are in nanoseconds, but cloudwatch only supports
145145
# microseconds as the more granular unit, so need to divide by 1000.
146146
for value in lat_read:
@@ -164,6 +164,7 @@ def test_block_performance(
164164
fio_engine,
165165
io_engine,
166166
metrics,
167+
results_dir,
167168
):
168169
"""
169170
Execute block device emulation benchmarking scenarios.
@@ -192,9 +193,9 @@ def test_block_performance(
192193

193194
vm.pin_threads(0)
194195

195-
logs_dir, cpu_util = run_fio(vm, fio_mode, fio_block_size, fio_engine)
196+
cpu_util = run_fio(vm, fio_mode, fio_block_size, results_dir, fio_engine)
196197

197-
emit_fio_metrics(logs_dir, metrics)
198+
emit_fio_metrics(results_dir, metrics)
198199

199200
for thread_name, values in cpu_util.items():
200201
for value in values:
@@ -213,6 +214,7 @@ def test_block_vhost_user_performance(
213214
fio_mode,
214215
fio_block_size,
215216
metrics,
217+
results_dir,
216218
):
217219
"""
218220
Execute block device emulation benchmarking scenarios.
@@ -242,9 +244,9 @@ def test_block_vhost_user_performance(
242244
next_cpu = vm.pin_threads(0)
243245
vm.disks_vhost_user["scratch"].pin(next_cpu)
244246

245-
logs_dir, cpu_util = run_fio(vm, fio_mode, fio_block_size)
247+
cpu_util = run_fio(vm, fio_mode, fio_block_size, results_dir)
246248

247-
emit_fio_metrics(logs_dir, metrics)
249+
emit_fio_metrics(results_dir, metrics)
248250

249251
for thread_name, values in cpu_util.items():
250252
for value in values:

tests/integration_tests/performance/test_network_ab.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@
22
# SPDX-License-Identifier: Apache-2.0
33
"""Tests the network latency of a Firecracker guest."""
44

5+
import json
56
import re
7+
from pathlib import Path
68

79
import pytest
810

@@ -95,6 +97,7 @@ def test_network_tcp_throughput(
9597
payload_length,
9698
mode,
9799
metrics,
100+
results_dir,
98101
):
99102
"""
100103
Iperf between guest and host in both directions for TCP workload.
@@ -133,4 +136,13 @@ def test_network_tcp_throughput(
133136
)
134137
data = test.run_test(network_microvm.vcpus_count + 2)
135138

139+
for i, g2h in enumerate(data["g2h"]):
140+
Path(results_dir / f"g2h_{i}.json").write_text(
141+
json.dumps(g2h), encoding="utf-8"
142+
)
143+
for i, h2g in enumerate(data["h2g"]):
144+
Path(results_dir / f"h2g_{i}.json").write_text(
145+
json.dumps(h2g), encoding="utf-8"
146+
)
147+
136148
emit_iperf3_metrics(metrics, data, warmup_sec)

tests/integration_tests/performance/test_vsock_ab.py

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@
22
# SPDX-License-Identifier: Apache-2.0
33
"""Tests the VSOCK throughput of Firecracker uVMs."""
44

5+
import json
56
import os
7+
from pathlib import Path
68

79
import pytest
810

@@ -73,7 +75,14 @@ def guest_command(self, port_offset):
7375
@pytest.mark.parametrize("payload_length", ["64K", "1024K"], ids=["p64K", "p1024K"])
7476
@pytest.mark.parametrize("mode", ["g2h", "h2g", "bd"])
7577
def test_vsock_throughput(
76-
microvm_factory, guest_kernel_acpi, rootfs, vcpus, payload_length, mode, metrics
78+
microvm_factory,
79+
guest_kernel_acpi,
80+
rootfs,
81+
vcpus,
82+
payload_length,
83+
mode,
84+
metrics,
85+
results_dir,
7786
):
7887
"""
7988
Test vsock throughput for multiple vm configurations.
@@ -107,4 +116,13 @@ def test_vsock_throughput(
107116
test = VsockIPerf3Test(vm, mode, payload_length)
108117
data = test.run_test(vm.vcpus_count + 2)
109118

119+
for i, g2h in enumerate(data["g2h"]):
120+
Path(results_dir / f"g2h_{i}.json").write_text(
121+
json.dumps(g2h), encoding="utf-8"
122+
)
123+
for i, h2g in enumerate(data["h2g"]):
124+
Path(results_dir / f"h2g_{i}.json").write_text(
125+
json.dumps(h2g), encoding="utf-8"
126+
)
127+
110128
emit_iperf3_metrics(metrics, data, VsockIPerf3Test.WARMUP_SEC)

0 commit comments

Comments
 (0)