Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions .buildkite/pipeline_perf.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,41 +18,41 @@
perf_test = {
"virtio-block-sync": {
"label": "💿 Virtio Sync Block Performance",
"tests": "integration_tests/performance/test_block_ab.py::test_block_performance -k 'not Async'",
"tests": "integration_tests/performance/test_block.py::test_block_performance -k 'not Async'",
"devtool_opts": "-c 1-10 -m 0",
},
"virtio-block-async": {
"label": "💿 Virtio Async Block Performance",
"tests": "integration_tests/performance/test_block_ab.py::test_block_performance -k Async",
"tests": "integration_tests/performance/test_block.py::test_block_performance -k Async",
"devtool_opts": "-c 1-10 -m 0",
},
"vhost-user-block": {
"label": "💿 vhost-user Block Performance",
"tests": "integration_tests/performance/test_block_ab.py::test_block_vhost_user_performance",
"tests": "integration_tests/performance/test_block.py::test_block_vhost_user_performance",
"devtool_opts": "-c 1-10 -m 0",
"ab_opts": "--noise-threshold 0.1",
},
"network": {
"label": "📠 Network Latency and Throughput",
"tests": "integration_tests/performance/test_network_ab.py",
"tests": "integration_tests/performance/test_network.py",
"devtool_opts": "-c 1-10 -m 0",
# Triggers if delta is > 0.01ms (10µs) or default relative threshold (5%)
# only relevant for latency test, throughput test will always be magnitudes above this anyway
"ab_opts": "--absolute-strength 0.010",
},
"snapshot-latency": {
"label": "📸 Snapshot Latency",
"tests": "integration_tests/performance/test_snapshot_ab.py::test_restore_latency integration_tests/performance/test_snapshot_ab.py::test_post_restore_latency",
"tests": "integration_tests/performance/test_snapshot.py::test_restore_latency integration_tests/performance/test_snapshot.py::test_post_restore_latency integration_tests/performance/test_snapshot.py::test_snapshot_create_latency",
"devtool_opts": "-c 1-12 -m 0",
},
"population-latency": {
"label": "📸 Memory Population Latency",
"tests": "integration_tests/performance/test_snapshot_ab.py::test_population_latency",
"tests": "integration_tests/performance/test_snapshot.py::test_population_latency",
"devtool_opts": "-c 1-12 -m 0",
},
"vsock-throughput": {
"label": "🧦 Vsock Throughput",
"tests": "integration_tests/performance/test_vsock_ab.py",
"tests": "integration_tests/performance/test_vsock.py",
"devtool_opts": "-c 1-10 -m 0",
},
"memory-overhead": {
Expand Down
2 changes: 1 addition & 1 deletion tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ BUILDKITE_PULL_REQUEST=true BUILDKITE_PULL_REQUEST_BASE_BRANCH=main ./tools/devt
Firecracker has a special framework for orchestrating long-running A/B-tests
which run outside the pre-PR CI. Instead, these tests are scheduled to run
post-merge. Specific tests, such as our
[snapshot restore latency tests](integration_tests/performance/test_snapshot_ab.py)
[snapshot restore latency tests](integration_tests/performance/test_snapshot.py)
contain no assertions themselves, but rather they emit data series using the
`aws_embedded_metrics` library. When executed by the
[`tools/ab_test.py`](../tools/ab_test.py) orchestration script, these data
Expand Down
8 changes: 7 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
from framework import defs, utils
from framework.artifacts import disks, kernel_params
from framework.defs import DEFAULT_BINARY_DIR
from framework.microvm import MicroVMFactory
from framework.microvm import MicroVMFactory, SnapshotType
from framework.properties import global_props
from framework.utils_cpu_templates import (
custom_cpu_templates_params,
Expand Down Expand Up @@ -411,6 +411,12 @@ def io_engine(request):
return request.param


@pytest.fixture(params=[SnapshotType.DIFF, SnapshotType.FULL])
def snapshot_type(request):
"""All possible snapshot types"""
return request.param


@pytest.fixture
def results_dir(request, pytestconfig):
"""
Expand Down
2 changes: 2 additions & 0 deletions tests/integration_tests/build/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
2 changes: 2 additions & 0 deletions tests/integration_tests/functional/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
2 changes: 0 additions & 2 deletions tests/integration_tests/functional/test_snapshot_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ def test_snapshot_current_version(uvm_nano):
# - Rootfs: Ubuntu 18.04
# - Microvm: 2vCPU with 512 MB RAM
# TODO: Multiple microvm sizes must be tested in the async pipeline.
@pytest.mark.parametrize("snapshot_type", [SnapshotType.DIFF, SnapshotType.FULL])
@pytest.mark.parametrize("use_snapshot_editor", [False, True])
def test_cycled_snapshot_restore(
bin_vsock_path,
Expand Down Expand Up @@ -500,7 +499,6 @@ def test_snapshot_overwrite_self(guest_kernel, rootfs, microvm_factory):
# restored, with a new snapshot of this vm, does not break the VM


@pytest.mark.parametrize("snapshot_type", [SnapshotType.DIFF, SnapshotType.FULL])
def test_vmgenid(guest_kernel_linux_6_1, rootfs, microvm_factory, snapshot_type):
"""
Test VMGenID device upon snapshot resume
Expand Down
2 changes: 2 additions & 0 deletions tests/integration_tests/performance/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import pytest

import host_tools.drive as drive_tools
from framework.microvm import HugePagesConfig, Microvm
from framework.microvm import HugePagesConfig, Microvm, SnapshotType

USEC_IN_MSEC = 1000
NS_IN_MSEC = 1_000_000
Expand Down Expand Up @@ -251,26 +251,36 @@ def test_population_latency(
raise RuntimeError("UFFD handler did not print population latency after 5s")


@pytest.mark.nonci
def test_snapshot_create_latency(
microvm_factory,
guest_kernel_linux_5_10,
rootfs,
metrics,
snapshot_type,
):
"""Measure the latency of creating a Full snapshot"""

vm = microvm_factory.build(guest_kernel_linux_5_10, rootfs, monitor_memory=False)
vm.spawn()
vm.basic_config(vcpu_count=2, mem_size_mib=512)
vm.basic_config(
vcpu_count=2,
mem_size_mib=512,
track_dirty_pages=snapshot_type == SnapshotType.DIFF,
)
vm.start()
vm.pin_threads(0)

metrics.set_dimensions(
{**vm.dimensions, "performance_test": "test_snapshot_create_latency"}
{
**vm.dimensions,
"performance_test": "test_snapshot_create_latency",
"snapshot_type": snapshot_type.value,
}
)

for _ in range(ITERATIONS):
vm.snapshot_full()
vm.make_snapshot(snapshot_type)
fc_metrics = vm.flush_metrics()

value = fc_metrics["latencies_us"]["full_create_snapshot"] / USEC_IN_MSEC
Expand Down
2 changes: 2 additions & 0 deletions tests/integration_tests/security/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
2 changes: 2 additions & 0 deletions tests/integration_tests/style/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0