From 8029b8a68d017717ae72accaf49da5e4f0700f9e Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Tue, 9 Dec 2025 17:33:56 +0000 Subject: [PATCH 01/11] chore: remove redundant snapshot version check for old FC versions The snapshot version check was iterating over last several FC versions pulled from S3, but it was unnecessary since there were no cross FC checks. Remove checks of old FC versions and only test the current version. Signed-off-by: Egor Lazarchuk --- tests/framework/artifacts.py | 102 ------------------ .../integration_tests/functional/conftest.py | 18 ---- .../functional/test_cmd_line_parameters.py | 30 ++---- 3 files changed, 11 insertions(+), 139 deletions(-) delete mode 100644 tests/integration_tests/functional/conftest.py diff --git a/tests/framework/artifacts.py b/tests/framework/artifacts.py index 701f26cdd40..4e1a980a4b1 100644 --- a/tests/framework/artifacts.py +++ b/tests/framework/artifacts.py @@ -4,16 +4,12 @@ """Define classes for interacting with CI artifacts""" import re -from dataclasses import dataclass from pathlib import Path from typing import Iterator import pytest from framework.defs import ARTIFACT_DIR -from framework.utils import check_output, get_firecracker_version_from_toml -from framework.with_filelock import with_filelock -from host_tools.cargo_build import get_binary def select_supported_kernels(): @@ -55,101 +51,3 @@ def kernel_params( """Return supported kernels""" for kernel in select(glob, artifact_dir): yield pytest.param(kernel, id=kernel.name) - - -@dataclass(frozen=True, repr=True) -class FirecrackerArtifact: - """Utility class for Firecracker binary artifacts.""" - - path: Path - - @property - def name(self): - """Get the Firecracker name.""" - return self.path.name - - @property - def jailer(self): - """Get the jailer with the same version.""" - return self.path.with_name(f"jailer-v{self.version}") - - @property - def version(self): - """Return Firecracker's version: `X.Y.Z-prerelase`.""" - # Get the filename, split on the first '-' and trim the leading 'v'. - # sample: firecracker-v1.2.0-alpha - return self.path.name.split("-", 1)[1][1:] - - @property - def version_tuple(self): - """Return the artifact's version as a tuple `(X, Y, Z)`.""" - return tuple(int(x) for x in self.version.split(".")) - - @property - def snapshot_version_tuple(self): - """Return the artifact's snapshot version as a tuple: `X.Y.0`.""" - - # Starting from Firecracker v1.7.0, snapshots have their own version that is - # independent of Firecracker versions. For these Firecracker versions, use - # the --snapshot-version Firecracker flag, to figure out which snapshot version - # it supports. - - return ( - check_output([self.path, "--snapshot-version"]) - .stdout.strip() - .split("\n")[0] - .split(".") - ) - - @property - def snapshot_version(self): - """Return the artifact's snapshot version: `X.Y.0`. - - Due to how Firecracker maps release versions to snapshot versions, we - have to request the minor version instead of the actual version. - """ - return ".".join(str(x) for x in self.snapshot_version_tuple) - - -@with_filelock -def current_release(version): - """Massage this working copy Firecracker binary to look like a normal - release, so it can run the same tests. - """ - binaries = [] - for binary in ["firecracker", "jailer"]: - bin_path1 = get_binary(binary) - bin_path2 = bin_path1.with_name(f"{binary}-v{version}") - if not bin_path2.exists(): - bin_path2.unlink(missing_ok=True) - bin_path2.hardlink_to(bin_path1) - binaries.append(bin_path2) - return binaries - - -def working_version_as_artifact(): - """ - Return working copy of Firecracker as a release artifact - """ - cargo_version = get_firecracker_version_from_toml() - return FirecrackerArtifact(current_release(str(cargo_version))[0]) - - -def firecracker_artifacts(): - """Return all supported firecracker binaries.""" - cargo_version = get_firecracker_version_from_toml() - # until the next minor version (but *not* including) - max_version = (cargo_version.major, cargo_version.minor + 1, 0) - prefix = "firecracker/firecracker-*" - for firecracker in sorted(ARTIFACT_DIR.glob(prefix)): - match = re.match(r"firecracker-v(\d+)\.(\d+)\.(\d+)", firecracker.name) - if not match: - continue - fc = FirecrackerArtifact(firecracker) - version = fc.version_tuple - if version >= max_version: - continue - yield pytest.param(fc, id=fc.name) - - fc = working_version_as_artifact() - yield pytest.param(fc, id=fc.name) diff --git a/tests/integration_tests/functional/conftest.py b/tests/integration_tests/functional/conftest.py deleted file mode 100644 index cb4426a5982..00000000000 --- a/tests/integration_tests/functional/conftest.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -"""Imported by pytest when running tests in this directory""" - -import pytest - -from framework.artifacts import firecracker_artifacts - - -# This fixture forces a Firecracker build, even if it doesn't get used. -# By placing it here instead of the top-level conftest.py we skip the build. -@pytest.fixture(params=firecracker_artifacts()) -def firecracker_release(request, record_property): - """Return all supported firecracker binaries.""" - firecracker = request.param - record_property("firecracker_release", firecracker.name) - return firecracker diff --git a/tests/integration_tests/functional/test_cmd_line_parameters.py b/tests/integration_tests/functional/test_cmd_line_parameters.py index 59eebf5d42e..acacea7e339 100644 --- a/tests/integration_tests/functional/test_cmd_line_parameters.py +++ b/tests/integration_tests/functional/test_cmd_line_parameters.py @@ -11,9 +11,7 @@ from host_tools.fcmetrics import validate_fc_metrics -def test_describe_snapshot_all_versions( - microvm_factory, guest_kernel, rootfs, firecracker_release -): +def test_describe_snapshot(uvm_plain): """ Test `--describe-snapshot` correctness for all snapshot versions. @@ -21,29 +19,23 @@ def test_describe_snapshot_all_versions( snapshot state file. """ - target_version = firecracker_release.snapshot_version - vm = microvm_factory.build( - guest_kernel, - rootfs, - fc_binary_path=firecracker_release.path, - jailer_binary_path=firecracker_release.jailer, - ) - # FIXME: Once only FC versions >= 1.12 are supported, drop log_level="warn" - vm.spawn(log_level="warn", serial_out_path=None) + vm = uvm_plain + fc_binary = vm.fc_binary_path + + cmd = [fc_binary, "--snapshot-version"] + snap_version_tuple = check_output(cmd).stdout.strip().split("\n")[0].split(".") + snap_version = ".".join(str(x) for x in snap_version_tuple) + + vm.spawn() vm.basic_config(track_dirty_pages=True) vm.start() snapshot = vm.snapshot_diff() - print("========== Firecracker create snapshot log ==========") - print(vm.log_data) vm.kill() - # Fetch Firecracker binary - fc_binary = microvm_factory.fc_binary_path - # Verify the output of `--describe-snapshot` command line parameter - cmd = [fc_binary] + ["--describe-snapshot", snapshot.vmstate] + cmd = [fc_binary, "--describe-snapshot", snapshot.vmstate] _, stdout, stderr = check_output(cmd) assert stderr == "" - assert target_version in stdout + assert snap_version in stdout def test_cli_metrics_path(uvm_plain): From bcd44fa8d3865ec41307caf87b964fe6f3f300d2 Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Mon, 8 Dec 2025 14:48:46 +0000 Subject: [PATCH 02/11] feat(devtool): pull newest test artifacts Instead of using fixed artifact path for a current FC version pull newest artifacts from S3. This way we can update them independently while keeping old versions around. Unfortunately there is no simple way to query newest added directory in S3, so do the next best thing of looking through all files in all directories and determining the `newest` directory be the `LastModified` status of files inside. Signed-off-by: Egor Lazarchuk --- tools/devtool | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/tools/devtool b/tools/devtool index 9fc5ae441b8..2e551187f1b 100755 --- a/tools/devtool +++ b/tools/devtool @@ -133,6 +133,28 @@ TARGET_PREFIX="$(uname -m)-unknown-linux-" # Container path to directory where we store built CI artifacts. CTR_CI_ARTIFACTS_PATH="${CTR_FC_ROOT_DIR}/resources/$(uname -m)" +DEFAULT_ARTIFACTS_S3_BUCKET=s3://spec.ccfc.min/firecracker-ci + +# Query default S3 bucket with artifacts and return the most recient path +get_newest_s3_artifacts() { + local bucket="spec.ccfc.min" + local base_prefix="firecracker-ci/" + + # Query all files in the `firecracker-ci` directory, sort them by the `LastModified` date + # and return the first one. + # Reasons for such complex operation: + # - since S3 does not store `LastModified` info for directories, the next best approach of + # determining the `newest` directory is to check which directory contains the most recently + # modified file + # - also awscli is paginated, so the command bellow can return multiple values, one for each page. + # The very first entry though should be the correct one + # - the end of the command filters out directory out of the output + local newest_dir=$(aws s3api list-objects-v2 --bucket "$bucket" --prefix "$base_prefix" --no-sign-request --query 'reverse(sort_by(Contents, &LastModified))[0].Key' --output text | head -1 | sed "s|^$base_prefix||" | cut -d'/' -f1) + + echo "$DEFAULT_ARTIFACTS_S3_BUCKET/$newest_dir" +} + + # Check if Docker is available and exit if it's not. # Upon returning from this call, the caller can be certain Docker is available. # @@ -571,9 +593,8 @@ ensure_ci_artifacts() { fi # Fetch all the artifacts so they are local - say "Fetching CI artifacts from S3" - FC_VERSION=$(cmd_sh "cd src/firecracker/src; cargo pkgid | cut -d# -f2 | cut -d. -f1-2") - S3_URL=s3://spec.ccfc.min/firecracker-ci/v$FC_VERSION/$(uname -m) + S3_URL=$(get_newest_s3_artifacts)/$(uname -m) + say "Fetching CI artifacts from S3: " $S3_URL ARTIFACTS=$MICROVM_IMAGES_DIR/$(uname -m) if [ ! -d "$ARTIFACTS" ]; then mkdir -pv $ARTIFACTS From 83ca13e172dbeba8080976351b777f6f78eaad68 Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Fri, 12 Dec 2025 14:01:53 +0000 Subject: [PATCH 03/11] feat: update the way test artifacts are downloaded and used The old setup used `build/img` as the base directory for artifacts pulled from S3. In S3 the artifacts directory contains 1 subdirectory per architecture (x86_64 and aarch64). This was causing final local artifacts path to be `build/img/x86_64` or `build/img/aarch64`. This commit changes the structure a to have a separate `build/artifacts` directory with subdirectories containing different versions of artifacts. The path to currently used artifacts will be placed in `build/current_artifacts` file. This make is easy to switch between multiple versions of artifacts without a need to delete/download them. This also opens a door for A/B testing of artifacts. Signed-off-by: Egor Lazarchuk --- tests/README.md | 6 +++--- tests/conftest.py | 5 ++--- tests/framework/defs.py | 13 ++++++++----- tools/devtool | 37 +++++++++++++++++++++++++++---------- tools/setup-ci-artifacts.sh | 2 +- tools/test.sh | 2 +- 6 files changed, 42 insertions(+), 23 deletions(-) diff --git a/tests/README.md b/tests/README.md index ae1a2d7756d..c4624448613 100644 --- a/tests/README.md +++ b/tests/README.md @@ -452,9 +452,9 @@ there. `pytest` will bring them into scope for all your tests. `Q4:` *I want to use more/other microvm test images, but I don't want to add them to the common s3 bucket.*\ -`A4:` Add your custom images to the `build/img` subdirectory in the Firecracker -source tree. This directory is bind-mounted in the container and used as a local -image cache. +`A4:` Add your custom images to the `build/artifacts` subdirectory in the +Firecracker source tree. This directory is bind-mounted in the container and +used as a local image cache. `Q5:` *How can I get live logger output from the tests?*\ `A5:` Accessing **pytest.ini** will allow you to modify logger settings. diff --git a/tests/conftest.py b/tests/conftest.py index 7c777eb6abd..00000ce05ee 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,7 +22,6 @@ import inspect import json import os -import platform import shutil import sys import tempfile @@ -33,7 +32,7 @@ import host_tools.cargo_build as build_tools from framework import defs, utils from framework.artifacts import disks, kernel_params -from framework.defs import DEFAULT_BINARY_DIR +from framework.defs import DEFAULT_BINARY_DIR, ARTIFACT_DIR from framework.microvm import HugePagesConfig, MicroVMFactory, SnapshotType from framework.properties import global_props from framework.utils_cpu_templates import ( @@ -399,7 +398,7 @@ def microvm_factory(request, record_property, results_dir, netns_factory): uvm_data.joinpath("host-dmesg.log").write_text( utils.run_cmd(["dmesg", "-dPx"]).stdout ) - shutil.copy(f"/firecracker/build/img/{platform.machine()}/id_rsa", uvm_data) + shutil.copy(ARTIFACT_DIR / "id_rsa", uvm_data) if Path(uvm.screen_log).exists(): shutil.copy(uvm.screen_log, uvm_data) diff --git a/tests/framework/defs.py b/tests/framework/defs.py index 827c9ee3caa..0a0e159c0e1 100644 --- a/tests/framework/defs.py +++ b/tests/framework/defs.py @@ -32,10 +32,13 @@ SUPPORTED_HOST_KERNELS = ["5.10", "6.1"] -IMG_DIR = Path(DEFAULT_TEST_SESSION_ROOT_PATH) / "img" +ARTIFACT_DIR = Path(DEFAULT_TEST_SESSION_ROOT_PATH) / "current_artifacts" # fall-back to the local directory -if not IMG_DIR.exists(): - IMG_DIR = LOCAL_BUILD_PATH / "img" - -ARTIFACT_DIR = IMG_DIR / platform.machine() +if not ARTIFACT_DIR.exists(): + current_artifacts_dir = ( + open(Path(LOCAL_BUILD_PATH) / "current_artifacts", "r", encoding="utf-8") + .read() + .strip() + ) + ARTIFACT_DIR = LOCAL_BUILD_PATH / current_artifacts_dir diff --git a/tools/devtool b/tools/devtool index 2e551187f1b..0469a3ef2d5 100755 --- a/tools/devtool +++ b/tools/devtool @@ -116,7 +116,11 @@ CTR_TEST_RESULTS_DIR="${CTR_FC_ROOT_DIR}/test_results" CTR_CARGO_TARGET_DIR="$CTR_FC_BUILD_DIR/cargo_target" # Path to the microVM images cache dir -MICROVM_IMAGES_DIR="build/img" +LOCAL_ARTIFACTS_DIR="build/artifacts" + +# File with a single line specifing the name of the +# currently used artifacts +LOCAL_ARTIFACTS_CURRENT_DIR_FILE="build/current_artifacts" # Full path to the public key mapping on the guest PUB_KEY_PATH=/root/.ssh/id_rsa.pub @@ -154,6 +158,13 @@ get_newest_s3_artifacts() { echo "$DEFAULT_ARTIFACTS_S3_BUCKET/$newest_dir" } +# Function to return local path to artifacts. Accepts the url from function above +# as an argument. +get_local_artifacts_path() { + local path=$1 + echo $LOCAL_ARTIFACTS_DIR/"${path//\//-}" +} + # Check if Docker is available and exit if it's not. # Upon returning from this call, the caller can be certain Docker is available. @@ -581,7 +592,7 @@ cmd_distclean() { cmd_download_ci_artifacts() { if [ "$1" = "--force" ]; then - rm -rf $FC_BUILD_DIR/img + rm -rf $LOCAL_ARTIFACTS_DIR fi ensure_ci_artifacts @@ -593,15 +604,21 @@ ensure_ci_artifacts() { fi # Fetch all the artifacts so they are local - S3_URL=$(get_newest_s3_artifacts)/$(uname -m) - say "Fetching CI artifacts from S3: " $S3_URL - ARTIFACTS=$MICROVM_IMAGES_DIR/$(uname -m) - if [ ! -d "$ARTIFACTS" ]; then - mkdir -pv $ARTIFACTS - aws s3 sync --no-sign-request "$S3_URL" "$ARTIFACTS" - ok_or_die "Failed to download CI artifacts using awscli!" - cmd_sh "./tools/setup-ci-artifacts.sh" + local artifacts_s3_url=$(get_newest_s3_artifacts) + local artifacts_s3_url_arch=$artifacts_s3_url/$(uname -m) + local artifacts_local_path=$(get_local_artifacts_path $artifacts_s3_url)/$(uname -m) + + if [ ! -d "$artifacts_local_path" ]; then + say "Fetching artifacts from S3: " $artifacts_s3_url_arch " into: " $artifacts_local_path + mkdir -pv $artifacts_local_path + aws s3 sync --no-sign-request "$artifacts_s3_url_arch" "$artifacts_local_path" + ok_or_die "Failed to download artifacts using awscli!" + cmd_sh "./tools/setup-ci-artifacts.sh" $artifacts_local_path + else + say "Found existing artifacts " $artifacts_s3_url_arch " at: " $artifacts_local_path fi + + echo $artifacts_local_path > $LOCAL_ARTIFACTS_CURRENT_DIR_FILE } apply_linux_61_tweaks() { diff --git a/tools/setup-ci-artifacts.sh b/tools/setup-ci-artifacts.sh index 10fded08787..bddcafc83c2 100755 --- a/tools/setup-ci-artifacts.sh +++ b/tools/setup-ci-artifacts.sh @@ -9,7 +9,7 @@ TOOLS_DIR=$(dirname $0) source "$TOOLS_DIR/functions" say "Setup CI artifacts" -cd build/img/$(uname -m) +cd $1 say "Fix executable permissions" find "firecracker" -type f |xargs chmod -c 755 diff --git a/tools/test.sh b/tools/test.sh index 0bf67a65666..867576e5069 100755 --- a/tools/test.sh +++ b/tools/test.sh @@ -32,7 +32,7 @@ if [ -f $CGROUP/cgroup.controllers -a -e $CGROUP/cgroup.type ]; then fi say "Copy CI artifacts to /srv, so hardlinks work" -cp -ruvf build/img /srv +cp -ruvfL $(cat build/current_artifacts) /srv/current_artifacts cd tests export PYTEST_ADDOPTS="${PYTEST_ADDOPTS:-} --pdbcls=IPython.terminal.debugger:TerminalPdb" From 928ac6e5f4964a3541b192c52d299742dc30c78b Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Thu, 18 Dec 2025 16:16:25 +0000 Subject: [PATCH 04/11] refactor: split downloading and setting current artifacts Downloading artifacts is a separate step that should not mess with currently selected artifacts. Signed-off-by: Egor Lazarchuk --- tools/devtool | 38 ++++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/tools/devtool b/tools/devtool index 0469a3ef2d5..6da8c1a0c0d 100755 --- a/tools/devtool +++ b/tools/devtool @@ -594,11 +594,20 @@ cmd_download_ci_artifacts() { if [ "$1" = "--force" ]; then rm -rf $LOCAL_ARTIFACTS_DIR fi + download_ci_artifacts +} - ensure_ci_artifacts +ensure_current_artifacts_are_set_up() { + if [ -f $LOCAL_ARTIFACTS_CURRENT_DIR_FILE ]; then + say "Current artifacts already setup at " $(cat $LOCAL_ARTIFACTS_CURRENT_DIR_FILE) + else + download_ci_artifacts + echo $LOCAL_ARTIFACTS_PATH > $LOCAL_ARTIFACTS_CURRENT_DIR_FILE + say "Current artifacts downloaded and setup at " $LOCAL_ARTIFACTS_PATH + fi } -ensure_ci_artifacts() { +download_ci_artifacts() { if ! command -v aws >/dev/null; then die "AWS CLI not installed, which is required for downloading artifacts for integration tests." fi @@ -606,19 +615,19 @@ ensure_ci_artifacts() { # Fetch all the artifacts so they are local local artifacts_s3_url=$(get_newest_s3_artifacts) local artifacts_s3_url_arch=$artifacts_s3_url/$(uname -m) - local artifacts_local_path=$(get_local_artifacts_path $artifacts_s3_url)/$(uname -m) + local local_artifacts_path=$(get_local_artifacts_path $artifacts_s3_url)/$(uname -m) - if [ ! -d "$artifacts_local_path" ]; then - say "Fetching artifacts from S3: " $artifacts_s3_url_arch " into: " $artifacts_local_path - mkdir -pv $artifacts_local_path - aws s3 sync --no-sign-request "$artifacts_s3_url_arch" "$artifacts_local_path" + if [ ! -d "$local_artifacts_path" ]; then + say "Fetching artifacts from S3: " $artifacts_s3_url_arch " into: " $local_artifacts_path + mkdir -pv $local_artifacts_path + aws s3 sync --no-sign-request "$artifacts_s3_url_arch" "$local_artifacts_path" ok_or_die "Failed to download artifacts using awscli!" - cmd_sh "./tools/setup-ci-artifacts.sh" $artifacts_local_path + cmd_sh "./tools/setup-ci-artifacts.sh" $local_artifacts_path else - say "Found existing artifacts " $artifacts_s3_url_arch " at: " $artifacts_local_path + say "Found existing artifacts " $artifacts_s3_url_arch " at: " $local_artifacts_path fi - echo $artifacts_local_path > $LOCAL_ARTIFACTS_CURRENT_DIR_FILE + LOCAL_ARTIFACTS_PATH=$local_artifacts_path } apply_linux_61_tweaks() { @@ -763,7 +772,8 @@ cmd_test() { [ $do_kvm_check != 0 ] && ensure_kvm ensure_devctr ensure_build_dir - ensure_ci_artifacts + ensure_current_artifacts_are_set_up + if [ $do_build != 0 ]; then cmd_build --release if [ -n "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" ]; then @@ -944,7 +954,7 @@ cmd_sh() { cmd_sandbox() { cmd_build --release - ensure_ci_artifacts + ensure_current_artifacts_are_set_up cmd_sh "tmux new env PYTEST_ADDOPTS=--pdbcls=IPython.terminal.debugger:TerminalPdb PYTHONPATH=tests IPYTHONDIR=\$PWD/.ipython ipython -i ./tools/sandbox.py $@" cmd_fix_perms ".ipython" } @@ -966,12 +976,12 @@ cmd_sandbox_native() { pip3.11 install ipython requests requests_unixsocket2 psutil tenacity filelock pip3.11 install jsonschema aws_embedded_metrics pip3.11 install packaging pytest - ensure_ci_artifacts + ensure_current_artifacts_are_set_up tmux neww sudo --preserve-env=HOME,PATH,TMUX env PYTHONPATH=tests IPYTHONDIR=\$PWD/.ipython ipython -i ./tools/sandbox.py $@ } cmd_test_debug() { - ensure_ci_artifacts + ensure_current_artifacts_are_set_up cmd_sh "tmux new ./tools/test.sh --pdb $@" } From 1e9cf29c769cbe5ab18416b4a4ea9fcb340a3db2 Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Tue, 16 Dec 2025 14:24:18 +0000 Subject: [PATCH 05/11] feat: ability to download multiple artifacts Modify `download_ci_artifacts` to accept artifacts paths as input and download by passing these paths to `ensure_ci_artifacts`. `ensure_ci_artifacts` will default to latest s3 artifacts if no args were provided. `--force` still works and only deletes specified artifacts. Signed-off-by: Egor Lazarchuk --- tests/conftest.py | 2 +- tools/devtool | 61 +++++++++++++++++++++++++++++++++++++---------- 2 files changed, 49 insertions(+), 14 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 00000ce05ee..ec10d7cb80d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -32,7 +32,7 @@ import host_tools.cargo_build as build_tools from framework import defs, utils from framework.artifacts import disks, kernel_params -from framework.defs import DEFAULT_BINARY_DIR, ARTIFACT_DIR +from framework.defs import ARTIFACT_DIR, DEFAULT_BINARY_DIR from framework.microvm import HugePagesConfig, MicroVMFactory, SnapshotType from framework.properties import global_props from framework.utils_cpu_templates import ( diff --git a/tools/devtool b/tools/devtool index 6da8c1a0c0d..5450c337153 100755 --- a/tools/devtool +++ b/tools/devtool @@ -440,9 +440,17 @@ cmd_help() { echo " Builds the rootfs and guest kernel artifacts we use for our CI." echo " Run './tools/devtool build_ci_artifacts help' for more details about the available commands." echo "" - echo " download_ci_artifacts [--force]" - echo " Downloads the CI artifacts used for testing from our S3 bucket. If --force is passed, purges any existing" - echo " artifacts first. Useful for refreshing local artifacts after an update, or if something got messed up." + echo " download_ci_artifacts [--force] [s3_uri_1, s3_uri_2 ...]" + echo " Downloads artifacts from provided S3 URI (like s3://spec.ccfc.min/firecracker-ci/my_artifacts)" + echo " and runs ./tools/setup-ci-artifacts.sh. for each of them." + echo " If no arguments are provided, pulls newest artifacts from $DEFAULT_ARTIFACTS_S3_BUCKET" + echo " If `--force` is specified, removes previous artifacts with same name" + echo "" + echo " set_current_artifacts s3_uri/directory name" + echo " Sets the $LOCAL_ARTIFACTS_CURRENT_DIR_FILE to contain a local path where the artifacts should be." + echo " Accepts some name used to generate the final directory name. Mainly used with S3 URI" + echo " like `download_ci_artifacts` cmd. Alternatively it is possible to manually write local " + echo " path to artifacts directory into $LOCAL_ARTIFACTS_CURRENT_DIR_FILE file" echo "" cat < $LOCAL_ARTIFACTS_CURRENT_DIR_FILE + say "Current artifacts setup at " $local_artifacts_path } ensure_current_artifacts_are_set_up() { @@ -608,23 +635,31 @@ ensure_current_artifacts_are_set_up() { } download_ci_artifacts() { - if ! command -v aws >/dev/null; then - die "AWS CLI not installed, which is required for downloading artifacts for integration tests." + local artifacts=$1 + + if [ -z $artifacts ]; then + local default_artifacts=$(get_newest_s3_artifacts) + say "No specific artifacts are defined. Using default artifacts: " $default_artifacts + artifacts=$default_artifacts fi # Fetch all the artifacts so they are local - local artifacts_s3_url=$(get_newest_s3_artifacts) - local artifacts_s3_url_arch=$artifacts_s3_url/$(uname -m) - local local_artifacts_path=$(get_local_artifacts_path $artifacts_s3_url)/$(uname -m) + local artifacts_arch=$artifacts/$(uname -m) + local local_artifacts_path=$(get_local_artifacts_path $artifacts)/$(uname -m) + + if [ ! -z $FORCE_ARTIFACT_DOWNLOAD ]; then + say "Removing " $local_artifacts_path + rm -r $local_artifacts_path + fi if [ ! -d "$local_artifacts_path" ]; then - say "Fetching artifacts from S3: " $artifacts_s3_url_arch " into: " $local_artifacts_path + say "Fetching artifacts from S3: " $artifacts_arch " into: " $local_artifacts_path mkdir -pv $local_artifacts_path - aws s3 sync --no-sign-request "$artifacts_s3_url_arch" "$local_artifacts_path" + aws s3 sync --no-sign-request "$artifacts_arch" "$local_artifacts_path" ok_or_die "Failed to download artifacts using awscli!" cmd_sh "./tools/setup-ci-artifacts.sh" $local_artifacts_path else - say "Found existing artifacts " $artifacts_s3_url_arch " at: " $local_artifacts_path + say "Found existing artifacts " $artifacts_arch " at: " $local_artifacts_path fi LOCAL_ARTIFACTS_PATH=$local_artifacts_path From 9cb87126662f4d771eec7f449a5987d9eff52e62 Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Fri, 12 Dec 2025 15:54:28 +0000 Subject: [PATCH 06/11] refactor: specify explicit arguments for ab_test.py Add arguments names for binary paths for ab_test.py Signed-off-by: Egor Lazarchuk --- .buildkite/pipeline_perf.py | 2 +- tests/README.md | 2 +- tools/ab_test.py | 12 +++++++----- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/.buildkite/pipeline_perf.py b/.buildkite/pipeline_perf.py index feda974bd0d..8143a4374c3 100755 --- a/.buildkite/pipeline_perf.py +++ b/.buildkite/pipeline_perf.py @@ -134,7 +134,7 @@ test_script_opts = "" if REVISION_A: devtool_opts += " --ab" - test_script_opts = f'{ab_opts} run build/{REVISION_A}/ build/{REVISION_B} --pytest-opts "{test_selector}"' + test_script_opts = f'{ab_opts} run --binaries-a build/{REVISION_A}/ --binaries-b build/{REVISION_B} --pytest-opts "{test_selector}"' else: # Passing `-m ''` below instructs pytest to collect tests regardless of # their markers (e.g. it will collect both tests marked as nonci, and diff --git a/tests/README.md b/tests/README.md index c4624448613..0af6f3e4f2d 100644 --- a/tests/README.md +++ b/tests/README.md @@ -184,7 +184,7 @@ function to [`.buildkite/pipeline_perf.py`](../.buildkite/pipeline_perf.py). To manually run an A/B-Test, use ```sh -tools/devtool -y test --ab [optional arguments to ab_test.py] run --pytest-opts +tools/devtool -y test --ab [optional arguments to ab_test.py] run --binaries-a --binaries-b --pytest-opts ``` Here, _dir A_ and _dir B_ are directories containing firecracker and jailer diff --git a/tools/ab_test.py b/tools/ab_test.py index 8998f457f25..bb917ad5bc7 100755 --- a/tools/ab_test.py +++ b/tools/ab_test.py @@ -422,14 +422,16 @@ def ab_performance_test( help="Run an specific test of our test suite as an A/B-test across two specified commits", ) run_parser.add_argument( - "a_revision", + "--binaries-a", help="Directory containing firecracker and jailer binaries to be considered the performance baseline", type=Path, + required=True, ) run_parser.add_argument( - "b_revision", - help="Directory containing firecracker and jailer binaries whose performance we want to compare against the results from a_revision", + "--binaries-b", + help="Directory containing firecracker and jailer binaries whose performance we want to compare against the results from binaries-a", type=Path, + required=True, ) run_parser.add_argument( "--pytest-opts", @@ -472,8 +474,8 @@ def ab_performance_test( if args.command == "run": ab_performance_test( - args.a_revision, - args.b_revision, + args.binaries_a, + args.binaries_b, args.pytest_opts, args.significance, args.absolute_strength, From 2ab5a0b52ce8388157d0bef20f822a5095bb05b4 Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Wed, 17 Dec 2025 14:38:25 +0000 Subject: [PATCH 07/11] feat(A/B): add customizable artifacts Update `ab_test.py` with ability to accept custom artifacts for A and B runs. Additionally update `pipeline_perf.py` as well. Now REVISION_A_ARTIFACTS and REVISION_B_ARTIFACTS environment variables specify custom artifacts which will be used in A/B test Signed-off-by: Egor Lazarchuk --- .buildkite/common.py | 6 ++++ .buildkite/pipeline_perf.py | 16 +++++++++- tools/ab_test.py | 63 ++++++++++++++++++++++++++++++------- 3 files changed, 73 insertions(+), 12 deletions(-) diff --git a/.buildkite/common.py b/.buildkite/common.py index 6c86e26487d..bedbe4d9517 100644 --- a/.buildkite/common.py +++ b/.buildkite/common.py @@ -376,6 +376,12 @@ def to_json(self): """Serialize the pipeline to JSON""" return json.dumps(self.to_dict(), indent=4, sort_keys=True, ensure_ascii=False) + def devtool_download_artifacts(self, artifacts): + """Generate a `devtool download_ci_artifacts` command""" + parts = ["./tools/devtool -y download_ci_artifacts"] + parts += artifacts + return " ".join(parts) + def devtool_test(self, devtool_opts=None, pytest_opts=None): """Generate a `devtool test` command""" cmds = [] diff --git a/.buildkite/pipeline_perf.py b/.buildkite/pipeline_perf.py index 8143a4374c3..a28848aae30 100755 --- a/.buildkite/pipeline_perf.py +++ b/.buildkite/pipeline_perf.py @@ -94,11 +94,16 @@ REVISION_A = os.environ.get("REVISION_A") REVISION_B = os.environ.get("REVISION_B") +REVISION_A_ARTIFACTS = os.environ.get("REVISION_A_ARTIFACTS") +REVISION_B_ARTIFACTS = os.environ.get("REVISION_B_ARTIFACTS") # Either both are specified or neither. Only doing either is a bug. If you want to # run performance tests _on_ a specific commit, specify neither and put your commit # into buildkite's "commit" field. assert (REVISION_A and REVISION_B) or (not REVISION_A and not REVISION_B) +assert (REVISION_A_ARTIFACTS and REVISION_B_ARTIFACTS) or ( + not REVISION_A_ARTIFACTS and not REVISION_B_ARTIFACTS +) BKPipeline.parser.add_argument( "--test", @@ -132,17 +137,26 @@ ab_opts = test.pop("ab_opts", "") devtool_opts += " --performance" test_script_opts = "" + artifacts = [] if REVISION_A: devtool_opts += " --ab" test_script_opts = f'{ab_opts} run --binaries-a build/{REVISION_A}/ --binaries-b build/{REVISION_B} --pytest-opts "{test_selector}"' + if REVISION_A_ARTIFACTS: + artifacts.append(REVISION_A_ARTIFACTS) + artifacts.append(REVISION_B_ARTIFACTS) + test_script_opts += f" --artifacts-a {REVISION_A_ARTIFACTS} --artifacts-b {REVISION_B_ARTIFACTS}" else: # Passing `-m ''` below instructs pytest to collect tests regardless of # their markers (e.g. it will collect both tests marked as nonci, and # tests without any markers). test_script_opts += f" -m '' {test_selector}" + command = [] + if artifacts: + command.append(pipeline.devtool_download_artifacts(artifacts)) + command.extend(pipeline.devtool_test(devtool_opts, test_script_opts)) pipeline.build_group( - command=pipeline.devtool_test(devtool_opts, test_script_opts), + command=command, # and the rest can be command arguments **test, ) diff --git a/tools/ab_test.py b/tools/ab_test.py index bb917ad5bc7..e6e64babaf5 100755 --- a/tools/ab_test.py +++ b/tools/ab_test.py @@ -24,7 +24,7 @@ import subprocess from collections import defaultdict from pathlib import Path -from typing import Callable, List, TypeVar +from typing import Callable, List, Optional, TypeVar import scipy @@ -194,16 +194,31 @@ def uninteresting_dimensions(data): return uninteresting -def collect_data(tag: str, binary_dir: Path, pytest_opts: str): +def collect_data( + tag: str, binary_dir: Path, artifacts: Optional[Path], pytest_opts: str +): """ Executes the specified test using the provided firecracker binaries and stores results into the `test_results/tag` directory """ binary_dir = binary_dir.resolve() - print(f"Collecting samples with {binary_dir}") + print( + f"Collecting samples | binaries path: {binary_dir}" + + f" | artifacts path: {artifacts}" + if artifacts + else "" + ) test_path = f"test_results/{tag}" test_report_path = f"{test_path}/test-report.json" + + # It is not possible to just download them here this script is usually run inside docker + # and artifacts downloading does not work inside it. + if artifacts: + subprocess.run( + f"./tools/devtool set_current_artifacts {artifacts}", check=True, shell=True + ) + subprocess.run( f"./tools/test.sh --binary-dir={binary_dir} {pytest_opts} -m '' --json-report-file=../{test_report_path}", env=os.environ @@ -371,25 +386,29 @@ def analyze_data( def binary_ab_test( - test_runner: Callable[[Path, bool], T], + test_runner: Callable[[Path, Optional[Path], bool], T], comparator: Callable[[T, T], U], *, a_directory: Path, b_directory: Path, + a_artifacts: Optional[Path], + b_artifacts: Optional[Path], ): """ Similar to `git_ab_test`, but instead of locally checking out different revisions, it operates on directories containing firecracker/jailer binaries """ - result_a = test_runner(a_directory, True) - result_b = test_runner(b_directory, False) + result_a = test_runner(a_directory, a_artifacts, True) + result_b = test_runner(b_directory, b_artifacts, False) return result_a, result_b, comparator(result_a, result_b) def ab_performance_test( - a_revision: Path, - b_revision: Path, + a_directory: Path, + b_directory: Path, + a_artifacts: Optional[Path], + b_artifacts: Optional[Path], pytest_opts, p_thresh, strength_abs_thresh, @@ -398,7 +417,9 @@ def ab_performance_test( """Does an A/B-test of the specified test with the given firecracker/jailer binaries""" return binary_ab_test( - lambda bin_dir, is_a: collect_data(is_a and "A" or "B", bin_dir, pytest_opts), + lambda bin_dir, art_dir, is_a: collect_data( + is_a and "A" or "B", bin_dir, art_dir, pytest_opts + ), lambda ah, be: analyze_data( ah, be, @@ -407,8 +428,10 @@ def ab_performance_test( noise_threshold, n_resamples=int(100 / p_thresh), ), - a_directory=a_revision, - b_directory=b_revision, + a_directory=a_directory, + b_directory=b_directory, + a_artifacts=a_artifacts, + b_artifacts=b_artifacts, ) @@ -433,6 +456,22 @@ def ab_performance_test( type=Path, required=True, ) + run_parser.add_argument( + "--artifacts-a", + help="Name of the artifacts directory in the build/artifacts to use for revision A test. If the directory does not exist, the name will be treated as S3 path and artifacts will be downloaded from there.", + # Type is string since it can be an s3 paht which if passed to `Path` constructor + # will be incorrectly modified + type=str, + required=False, + ) + run_parser.add_argument( + "--artifacts-b", + help="Name of the artifacts directory in the build/artifacts to use for revision B test. If the directory does not exist, the name will be treated as S3 path and artifacts will be downloaded from there.", + # Type is string since it can be an s3 paht which if passed to `Path` constructor + # will be incorrectly modified + type=str, + required=False, + ) run_parser.add_argument( "--pytest-opts", help="Parameters to pass through to pytest, for example for test selection", @@ -476,6 +515,8 @@ def ab_performance_test( ab_performance_test( args.binaries_a, args.binaries_b, + args.artifacts_a, + args.artifacts_b, args.pytest_opts, args.significance, args.absolute_strength, From e25f29bff49e104e1b303c68243f41e8b8e04c76 Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Thu, 18 Dec 2025 17:36:31 +0000 Subject: [PATCH 08/11] chore(test.sh): skip aritfact setup if no current is specified Some tests like style checks do not require artifacts to be present. Signed-off-by: Egor Lazarchuk --- tools/test.sh | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tools/test.sh b/tools/test.sh index 867576e5069..367c89165cb 100755 --- a/tools/test.sh +++ b/tools/test.sh @@ -31,8 +31,14 @@ if [ -f $CGROUP/cgroup.controllers -a -e $CGROUP/cgroup.type ]; then > $CGROUP/cgroup.subtree_control fi -say "Copy CI artifacts to /srv, so hardlinks work" -cp -ruvfL $(cat build/current_artifacts) /srv/current_artifacts +if [ -f build/current_artifacts ]; then + say "Copy CI artifacts to /srv, so hardlinks work" + cp -ruvfL $(cat build/current_artifacts) /srv/current_artifacts +else + # The directory must exist for pytest to function + mkdir -p /srv/current_artifacts + say "Skipping setting up CI artifacts" +fi cd tests export PYTEST_ADDOPTS="${PYTEST_ADDOPTS:-} --pdbcls=IPython.terminal.debugger:TerminalPdb" From 82b17a948eb0b291c85ce627d1dba1bed20593af Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Thu, 18 Dec 2025 17:09:33 +0000 Subject: [PATCH 09/11] chore(devtool): remove artifacts check from checkstyle command There is no reason for style check to download artifacts Signed-off-by: Egor Lazarchuk --- tools/devtool | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tools/devtool b/tools/devtool index 5450c337153..5d1da23eecf 100755 --- a/tools/devtool +++ b/tools/devtool @@ -768,6 +768,7 @@ cmd_test() { do_build=1 do_archive=1 do_kvm_check=1 + do_artifacts_check=1 # Parse any command line args. while [ $# -gt 0 ]; do case "$1" in @@ -795,6 +796,9 @@ cmd_test() { "--no-kvm-check") do_kvm_check=0 ;; + "--no-artifacts-check") + do_artifacts_check=0 + ;; "--") { shift; break; } ;; *) die "Unknown argument: $1. Please use --help for help." @@ -807,7 +811,7 @@ cmd_test() { [ $do_kvm_check != 0 ] && ensure_kvm ensure_devctr ensure_build_dir - ensure_current_artifacts_are_set_up + [ $do_artifacts_check != 0 ] && ensure_current_artifacts_are_set_up if [ $do_build != 0 ]; then cmd_build --release @@ -1041,8 +1045,8 @@ cmd_checkstyle() { cmd_sh "git-secrets --register-aws && git-secrets --scan" fi - cmd_test --no-build --no-kvm-check -- -n 4 --dist worksteal integration_tests/style || exit 1 - cmd_test --no-build --no-kvm-check -- -n 4 --doctest-modules framework || exit 1 + cmd_test --no-build --no-kvm-check --no-artifacts-check -- -n 4 --dist worksteal integration_tests/style || exit 1 + cmd_test --no-build --no-kvm-check --no-artifacts-check -- -n 4 --doctest-modules framework || exit 1 } cmd_checkbuild() { From 2b3c8b9c04941466a6d059c2cf3b7273917c35c0 Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Thu, 18 Dec 2025 17:45:02 +0000 Subject: [PATCH 10/11] chore(devtool): update cmd_test help Add missing descriptions for additional arguments Signed-off-by: Egor Lazarchuk --- tools/devtool | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tools/devtool b/tools/devtool index 5d1da23eecf..55fb42c0a30 100755 --- a/tools/devtool +++ b/tools/devtool @@ -427,14 +427,22 @@ cmd_help() { echo " sh CMD..." echo " Launch the development container and run a command." echo "" - echo " test [-- []]" - echo " Run the Firecracker integration tests." + echo " test [args] [-- []]" + echo " Run the Firecracker integration or A/B tests." echo " The Firecracker testing system is based on pytest. All arguments after --" echo " will be passed through to pytest." + echo "" + echo " Args for the `test` itself:" + echo " -h, --help Print help" echo " -c, --cpuset-cpus cpulist Set a dedicated cpulist to be used by the tests." echo " -m, --cpuset-mems memlist Set a dedicated memlist to be used by the tests." echo " --performance Tweak various setting of the host running the tests (such as C- and P-states)" echo " to achieve consistent performance. Used for running performance tests in CI." + echo " --ab Run A/B test." + echo " --no-build Skip building step." + echo " --no-archive Skip archiving of `test_result` after the test is done." + echo " --no-kvm-check Skip checking for `/dev/kvm` presence." + echo " --no-artifacts-check Skip checking existing artifacts." echo "" echo " build_ci_artifacts [all|rootfs|kernels]" echo " Builds the rootfs and guest kernel artifacts we use for our CI." From e802451b64ab4652a1c3b4c466f25e792faa165d Mon Sep 17 00:00:00 2001 From: Egor Lazarchuk Date: Fri, 19 Dec 2025 17:07:29 +0000 Subject: [PATCH 11/11] feat: make ensure_current_artifacts_are_set_up a devtool command The command is used internally in the `devtool` to make sure there are at lest some current artifacts set up. This behaviour is also required in the popular docker test, so convert it to a callable command. Signed-off-by: Egor Lazarchuk --- .buildkite/pipeline_docker_popular.py | 2 +- tools/devtool | 14 +++++++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/.buildkite/pipeline_docker_popular.py b/.buildkite/pipeline_docker_popular.py index 2bcbe2eed56..ee0bf664fc4 100755 --- a/.buildkite/pipeline_docker_popular.py +++ b/.buildkite/pipeline_docker_popular.py @@ -28,7 +28,7 @@ pipeline.build_group( ":whale: Docker Popular Containers", [ - "./tools/devtool download_ci_artifacts", + "./tools/devtool ensure_current_artifacts_are_set_up", f'buildkite-agent artifact download "{ROOTFS_TAR}" .', f'tar xzf "{ROOTFS_TAR}" -C tools/test-popular-containers', './tools/devtool sh "cd ./tools/test-popular-containers; PYTHONPATH=../../tests ./test-docker-rootfs.py"', diff --git a/tools/devtool b/tools/devtool index 55fb42c0a30..5e510c09963 100755 --- a/tools/devtool +++ b/tools/devtool @@ -460,6 +460,10 @@ cmd_help() { echo " like `download_ci_artifacts` cmd. Alternatively it is possible to manually write local " echo " path to artifacts directory into $LOCAL_ARTIFACTS_CURRENT_DIR_FILE file" echo "" + echo " ensure_current_artifacts_are_set_up" + echo " Makes sure there are some artifacts setup as current ones. If no current artifacts are setup" + echo " downloads latest artifacts and sets them up as current." + echo "" cat <]] @@ -632,7 +636,7 @@ cmd_set_current_artifacts() { say "Current artifacts setup at " $local_artifacts_path } -ensure_current_artifacts_are_set_up() { +cmd_ensure_current_artifacts_are_set_up() { if [ -f $LOCAL_ARTIFACTS_CURRENT_DIR_FILE ]; then say "Current artifacts already setup at " $(cat $LOCAL_ARTIFACTS_CURRENT_DIR_FILE) else @@ -819,7 +823,7 @@ cmd_test() { [ $do_kvm_check != 0 ] && ensure_kvm ensure_devctr ensure_build_dir - [ $do_artifacts_check != 0 ] && ensure_current_artifacts_are_set_up + [ $do_artifacts_check != 0 ] && cmd_ensure_current_artifacts_are_set_up if [ $do_build != 0 ]; then cmd_build --release @@ -1001,7 +1005,7 @@ cmd_sh() { cmd_sandbox() { cmd_build --release - ensure_current_artifacts_are_set_up + cmd_ensure_current_artifacts_are_set_up cmd_sh "tmux new env PYTEST_ADDOPTS=--pdbcls=IPython.terminal.debugger:TerminalPdb PYTHONPATH=tests IPYTHONDIR=\$PWD/.ipython ipython -i ./tools/sandbox.py $@" cmd_fix_perms ".ipython" } @@ -1023,12 +1027,12 @@ cmd_sandbox_native() { pip3.11 install ipython requests requests_unixsocket2 psutil tenacity filelock pip3.11 install jsonschema aws_embedded_metrics pip3.11 install packaging pytest - ensure_current_artifacts_are_set_up + cmd_ensure_current_artifacts_are_set_up tmux neww sudo --preserve-env=HOME,PATH,TMUX env PYTHONPATH=tests IPYTHONDIR=\$PWD/.ipython ipython -i ./tools/sandbox.py $@ } cmd_test_debug() { - ensure_current_artifacts_are_set_up + cmd_ensure_current_artifacts_are_set_up cmd_sh "tmux new ./tools/test.sh --pdb $@" }