diff --git a/.buildkite/common.py b/.buildkite/common.py
index 6c86e26487d..bedbe4d9517 100644
--- a/.buildkite/common.py
+++ b/.buildkite/common.py
@@ -376,6 +376,12 @@ def to_json(self):
"""Serialize the pipeline to JSON"""
return json.dumps(self.to_dict(), indent=4, sort_keys=True, ensure_ascii=False)
+ def devtool_download_artifacts(self, artifacts):
+ """Generate a `devtool download_ci_artifacts` command"""
+ parts = ["./tools/devtool -y download_ci_artifacts"]
+ parts += artifacts
+ return " ".join(parts)
+
def devtool_test(self, devtool_opts=None, pytest_opts=None):
"""Generate a `devtool test` command"""
cmds = []
diff --git a/.buildkite/pipeline_docker_popular.py b/.buildkite/pipeline_docker_popular.py
index 2bcbe2eed56..ee0bf664fc4 100755
--- a/.buildkite/pipeline_docker_popular.py
+++ b/.buildkite/pipeline_docker_popular.py
@@ -28,7 +28,7 @@
pipeline.build_group(
":whale: Docker Popular Containers",
[
- "./tools/devtool download_ci_artifacts",
+ "./tools/devtool ensure_current_artifacts_are_set_up",
f'buildkite-agent artifact download "{ROOTFS_TAR}" .',
f'tar xzf "{ROOTFS_TAR}" -C tools/test-popular-containers',
'./tools/devtool sh "cd ./tools/test-popular-containers; PYTHONPATH=../../tests ./test-docker-rootfs.py"',
diff --git a/.buildkite/pipeline_perf.py b/.buildkite/pipeline_perf.py
index feda974bd0d..a28848aae30 100755
--- a/.buildkite/pipeline_perf.py
+++ b/.buildkite/pipeline_perf.py
@@ -94,11 +94,16 @@
REVISION_A = os.environ.get("REVISION_A")
REVISION_B = os.environ.get("REVISION_B")
+REVISION_A_ARTIFACTS = os.environ.get("REVISION_A_ARTIFACTS")
+REVISION_B_ARTIFACTS = os.environ.get("REVISION_B_ARTIFACTS")
# Either both are specified or neither. Only doing either is a bug. If you want to
# run performance tests _on_ a specific commit, specify neither and put your commit
# into buildkite's "commit" field.
assert (REVISION_A and REVISION_B) or (not REVISION_A and not REVISION_B)
+assert (REVISION_A_ARTIFACTS and REVISION_B_ARTIFACTS) or (
+ not REVISION_A_ARTIFACTS and not REVISION_B_ARTIFACTS
+)
BKPipeline.parser.add_argument(
"--test",
@@ -132,17 +137,26 @@
ab_opts = test.pop("ab_opts", "")
devtool_opts += " --performance"
test_script_opts = ""
+ artifacts = []
if REVISION_A:
devtool_opts += " --ab"
- test_script_opts = f'{ab_opts} run build/{REVISION_A}/ build/{REVISION_B} --pytest-opts "{test_selector}"'
+ test_script_opts = f'{ab_opts} run --binaries-a build/{REVISION_A}/ --binaries-b build/{REVISION_B} --pytest-opts "{test_selector}"'
+ if REVISION_A_ARTIFACTS:
+ artifacts.append(REVISION_A_ARTIFACTS)
+ artifacts.append(REVISION_B_ARTIFACTS)
+ test_script_opts += f" --artifacts-a {REVISION_A_ARTIFACTS} --artifacts-b {REVISION_B_ARTIFACTS}"
else:
# Passing `-m ''` below instructs pytest to collect tests regardless of
# their markers (e.g. it will collect both tests marked as nonci, and
# tests without any markers).
test_script_opts += f" -m '' {test_selector}"
+ command = []
+ if artifacts:
+ command.append(pipeline.devtool_download_artifacts(artifacts))
+ command.extend(pipeline.devtool_test(devtool_opts, test_script_opts))
pipeline.build_group(
- command=pipeline.devtool_test(devtool_opts, test_script_opts),
+ command=command,
# and the rest can be command arguments
**test,
)
diff --git a/tests/README.md b/tests/README.md
index ae1a2d7756d..0af6f3e4f2d 100644
--- a/tests/README.md
+++ b/tests/README.md
@@ -184,7 +184,7 @@ function to [`.buildkite/pipeline_perf.py`](../.buildkite/pipeline_perf.py). To
manually run an A/B-Test, use
```sh
-tools/devtool -y test --ab [optional arguments to ab_test.py] run
--pytest-opts
+tools/devtool -y test --ab [optional arguments to ab_test.py] run --binaries-a --binaries-b --pytest-opts
```
Here, _dir A_ and _dir B_ are directories containing firecracker and jailer
@@ -452,9 +452,9 @@ there. `pytest` will bring them into scope for all your tests.
`Q4:` *I want to use more/other microvm test images, but I don't want to add
them to the common s3 bucket.*\
-`A4:` Add your custom images to the `build/img` subdirectory in the Firecracker
-source tree. This directory is bind-mounted in the container and used as a local
-image cache.
+`A4:` Add your custom images to the `build/artifacts` subdirectory in the
+Firecracker source tree. This directory is bind-mounted in the container and
+used as a local image cache.
`Q5:` *How can I get live logger output from the tests?*\
`A5:` Accessing **pytest.ini** will allow you to modify logger settings.
diff --git a/tests/conftest.py b/tests/conftest.py
index 7c777eb6abd..ec10d7cb80d 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -22,7 +22,6 @@
import inspect
import json
import os
-import platform
import shutil
import sys
import tempfile
@@ -33,7 +32,7 @@
import host_tools.cargo_build as build_tools
from framework import defs, utils
from framework.artifacts import disks, kernel_params
-from framework.defs import DEFAULT_BINARY_DIR
+from framework.defs import ARTIFACT_DIR, DEFAULT_BINARY_DIR
from framework.microvm import HugePagesConfig, MicroVMFactory, SnapshotType
from framework.properties import global_props
from framework.utils_cpu_templates import (
@@ -399,7 +398,7 @@ def microvm_factory(request, record_property, results_dir, netns_factory):
uvm_data.joinpath("host-dmesg.log").write_text(
utils.run_cmd(["dmesg", "-dPx"]).stdout
)
- shutil.copy(f"/firecracker/build/img/{platform.machine()}/id_rsa", uvm_data)
+ shutil.copy(ARTIFACT_DIR / "id_rsa", uvm_data)
if Path(uvm.screen_log).exists():
shutil.copy(uvm.screen_log, uvm_data)
diff --git a/tests/framework/artifacts.py b/tests/framework/artifacts.py
index 701f26cdd40..4e1a980a4b1 100644
--- a/tests/framework/artifacts.py
+++ b/tests/framework/artifacts.py
@@ -4,16 +4,12 @@
"""Define classes for interacting with CI artifacts"""
import re
-from dataclasses import dataclass
from pathlib import Path
from typing import Iterator
import pytest
from framework.defs import ARTIFACT_DIR
-from framework.utils import check_output, get_firecracker_version_from_toml
-from framework.with_filelock import with_filelock
-from host_tools.cargo_build import get_binary
def select_supported_kernels():
@@ -55,101 +51,3 @@ def kernel_params(
"""Return supported kernels"""
for kernel in select(glob, artifact_dir):
yield pytest.param(kernel, id=kernel.name)
-
-
-@dataclass(frozen=True, repr=True)
-class FirecrackerArtifact:
- """Utility class for Firecracker binary artifacts."""
-
- path: Path
-
- @property
- def name(self):
- """Get the Firecracker name."""
- return self.path.name
-
- @property
- def jailer(self):
- """Get the jailer with the same version."""
- return self.path.with_name(f"jailer-v{self.version}")
-
- @property
- def version(self):
- """Return Firecracker's version: `X.Y.Z-prerelase`."""
- # Get the filename, split on the first '-' and trim the leading 'v'.
- # sample: firecracker-v1.2.0-alpha
- return self.path.name.split("-", 1)[1][1:]
-
- @property
- def version_tuple(self):
- """Return the artifact's version as a tuple `(X, Y, Z)`."""
- return tuple(int(x) for x in self.version.split("."))
-
- @property
- def snapshot_version_tuple(self):
- """Return the artifact's snapshot version as a tuple: `X.Y.0`."""
-
- # Starting from Firecracker v1.7.0, snapshots have their own version that is
- # independent of Firecracker versions. For these Firecracker versions, use
- # the --snapshot-version Firecracker flag, to figure out which snapshot version
- # it supports.
-
- return (
- check_output([self.path, "--snapshot-version"])
- .stdout.strip()
- .split("\n")[0]
- .split(".")
- )
-
- @property
- def snapshot_version(self):
- """Return the artifact's snapshot version: `X.Y.0`.
-
- Due to how Firecracker maps release versions to snapshot versions, we
- have to request the minor version instead of the actual version.
- """
- return ".".join(str(x) for x in self.snapshot_version_tuple)
-
-
-@with_filelock
-def current_release(version):
- """Massage this working copy Firecracker binary to look like a normal
- release, so it can run the same tests.
- """
- binaries = []
- for binary in ["firecracker", "jailer"]:
- bin_path1 = get_binary(binary)
- bin_path2 = bin_path1.with_name(f"{binary}-v{version}")
- if not bin_path2.exists():
- bin_path2.unlink(missing_ok=True)
- bin_path2.hardlink_to(bin_path1)
- binaries.append(bin_path2)
- return binaries
-
-
-def working_version_as_artifact():
- """
- Return working copy of Firecracker as a release artifact
- """
- cargo_version = get_firecracker_version_from_toml()
- return FirecrackerArtifact(current_release(str(cargo_version))[0])
-
-
-def firecracker_artifacts():
- """Return all supported firecracker binaries."""
- cargo_version = get_firecracker_version_from_toml()
- # until the next minor version (but *not* including)
- max_version = (cargo_version.major, cargo_version.minor + 1, 0)
- prefix = "firecracker/firecracker-*"
- for firecracker in sorted(ARTIFACT_DIR.glob(prefix)):
- match = re.match(r"firecracker-v(\d+)\.(\d+)\.(\d+)", firecracker.name)
- if not match:
- continue
- fc = FirecrackerArtifact(firecracker)
- version = fc.version_tuple
- if version >= max_version:
- continue
- yield pytest.param(fc, id=fc.name)
-
- fc = working_version_as_artifact()
- yield pytest.param(fc, id=fc.name)
diff --git a/tests/framework/defs.py b/tests/framework/defs.py
index 827c9ee3caa..0a0e159c0e1 100644
--- a/tests/framework/defs.py
+++ b/tests/framework/defs.py
@@ -32,10 +32,13 @@
SUPPORTED_HOST_KERNELS = ["5.10", "6.1"]
-IMG_DIR = Path(DEFAULT_TEST_SESSION_ROOT_PATH) / "img"
+ARTIFACT_DIR = Path(DEFAULT_TEST_SESSION_ROOT_PATH) / "current_artifacts"
# fall-back to the local directory
-if not IMG_DIR.exists():
- IMG_DIR = LOCAL_BUILD_PATH / "img"
-
-ARTIFACT_DIR = IMG_DIR / platform.machine()
+if not ARTIFACT_DIR.exists():
+ current_artifacts_dir = (
+ open(Path(LOCAL_BUILD_PATH) / "current_artifacts", "r", encoding="utf-8")
+ .read()
+ .strip()
+ )
+ ARTIFACT_DIR = LOCAL_BUILD_PATH / current_artifacts_dir
diff --git a/tests/integration_tests/functional/conftest.py b/tests/integration_tests/functional/conftest.py
deleted file mode 100644
index cb4426a5982..00000000000
--- a/tests/integration_tests/functional/conftest.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-# SPDX-License-Identifier: Apache-2.0
-
-"""Imported by pytest when running tests in this directory"""
-
-import pytest
-
-from framework.artifacts import firecracker_artifacts
-
-
-# This fixture forces a Firecracker build, even if it doesn't get used.
-# By placing it here instead of the top-level conftest.py we skip the build.
-@pytest.fixture(params=firecracker_artifacts())
-def firecracker_release(request, record_property):
- """Return all supported firecracker binaries."""
- firecracker = request.param
- record_property("firecracker_release", firecracker.name)
- return firecracker
diff --git a/tests/integration_tests/functional/test_cmd_line_parameters.py b/tests/integration_tests/functional/test_cmd_line_parameters.py
index 59eebf5d42e..acacea7e339 100644
--- a/tests/integration_tests/functional/test_cmd_line_parameters.py
+++ b/tests/integration_tests/functional/test_cmd_line_parameters.py
@@ -11,9 +11,7 @@
from host_tools.fcmetrics import validate_fc_metrics
-def test_describe_snapshot_all_versions(
- microvm_factory, guest_kernel, rootfs, firecracker_release
-):
+def test_describe_snapshot(uvm_plain):
"""
Test `--describe-snapshot` correctness for all snapshot versions.
@@ -21,29 +19,23 @@ def test_describe_snapshot_all_versions(
snapshot state file.
"""
- target_version = firecracker_release.snapshot_version
- vm = microvm_factory.build(
- guest_kernel,
- rootfs,
- fc_binary_path=firecracker_release.path,
- jailer_binary_path=firecracker_release.jailer,
- )
- # FIXME: Once only FC versions >= 1.12 are supported, drop log_level="warn"
- vm.spawn(log_level="warn", serial_out_path=None)
+ vm = uvm_plain
+ fc_binary = vm.fc_binary_path
+
+ cmd = [fc_binary, "--snapshot-version"]
+ snap_version_tuple = check_output(cmd).stdout.strip().split("\n")[0].split(".")
+ snap_version = ".".join(str(x) for x in snap_version_tuple)
+
+ vm.spawn()
vm.basic_config(track_dirty_pages=True)
vm.start()
snapshot = vm.snapshot_diff()
- print("========== Firecracker create snapshot log ==========")
- print(vm.log_data)
vm.kill()
- # Fetch Firecracker binary
- fc_binary = microvm_factory.fc_binary_path
- # Verify the output of `--describe-snapshot` command line parameter
- cmd = [fc_binary] + ["--describe-snapshot", snapshot.vmstate]
+ cmd = [fc_binary, "--describe-snapshot", snapshot.vmstate]
_, stdout, stderr = check_output(cmd)
assert stderr == ""
- assert target_version in stdout
+ assert snap_version in stdout
def test_cli_metrics_path(uvm_plain):
diff --git a/tools/ab_test.py b/tools/ab_test.py
index 8998f457f25..e6e64babaf5 100755
--- a/tools/ab_test.py
+++ b/tools/ab_test.py
@@ -24,7 +24,7 @@
import subprocess
from collections import defaultdict
from pathlib import Path
-from typing import Callable, List, TypeVar
+from typing import Callable, List, Optional, TypeVar
import scipy
@@ -194,16 +194,31 @@ def uninteresting_dimensions(data):
return uninteresting
-def collect_data(tag: str, binary_dir: Path, pytest_opts: str):
+def collect_data(
+ tag: str, binary_dir: Path, artifacts: Optional[Path], pytest_opts: str
+):
"""
Executes the specified test using the provided firecracker binaries and
stores results into the `test_results/tag` directory
"""
binary_dir = binary_dir.resolve()
- print(f"Collecting samples with {binary_dir}")
+ print(
+ f"Collecting samples | binaries path: {binary_dir}"
+ + f" | artifacts path: {artifacts}"
+ if artifacts
+ else ""
+ )
test_path = f"test_results/{tag}"
test_report_path = f"{test_path}/test-report.json"
+
+ # It is not possible to just download them here this script is usually run inside docker
+ # and artifacts downloading does not work inside it.
+ if artifacts:
+ subprocess.run(
+ f"./tools/devtool set_current_artifacts {artifacts}", check=True, shell=True
+ )
+
subprocess.run(
f"./tools/test.sh --binary-dir={binary_dir} {pytest_opts} -m '' --json-report-file=../{test_report_path}",
env=os.environ
@@ -371,25 +386,29 @@ def analyze_data(
def binary_ab_test(
- test_runner: Callable[[Path, bool], T],
+ test_runner: Callable[[Path, Optional[Path], bool], T],
comparator: Callable[[T, T], U],
*,
a_directory: Path,
b_directory: Path,
+ a_artifacts: Optional[Path],
+ b_artifacts: Optional[Path],
):
"""
Similar to `git_ab_test`, but instead of locally checking out different revisions, it operates on
directories containing firecracker/jailer binaries
"""
- result_a = test_runner(a_directory, True)
- result_b = test_runner(b_directory, False)
+ result_a = test_runner(a_directory, a_artifacts, True)
+ result_b = test_runner(b_directory, b_artifacts, False)
return result_a, result_b, comparator(result_a, result_b)
def ab_performance_test(
- a_revision: Path,
- b_revision: Path,
+ a_directory: Path,
+ b_directory: Path,
+ a_artifacts: Optional[Path],
+ b_artifacts: Optional[Path],
pytest_opts,
p_thresh,
strength_abs_thresh,
@@ -398,7 +417,9 @@ def ab_performance_test(
"""Does an A/B-test of the specified test with the given firecracker/jailer binaries"""
return binary_ab_test(
- lambda bin_dir, is_a: collect_data(is_a and "A" or "B", bin_dir, pytest_opts),
+ lambda bin_dir, art_dir, is_a: collect_data(
+ is_a and "A" or "B", bin_dir, art_dir, pytest_opts
+ ),
lambda ah, be: analyze_data(
ah,
be,
@@ -407,8 +428,10 @@ def ab_performance_test(
noise_threshold,
n_resamples=int(100 / p_thresh),
),
- a_directory=a_revision,
- b_directory=b_revision,
+ a_directory=a_directory,
+ b_directory=b_directory,
+ a_artifacts=a_artifacts,
+ b_artifacts=b_artifacts,
)
@@ -422,14 +445,32 @@ def ab_performance_test(
help="Run an specific test of our test suite as an A/B-test across two specified commits",
)
run_parser.add_argument(
- "a_revision",
+ "--binaries-a",
help="Directory containing firecracker and jailer binaries to be considered the performance baseline",
type=Path,
+ required=True,
)
run_parser.add_argument(
- "b_revision",
- help="Directory containing firecracker and jailer binaries whose performance we want to compare against the results from a_revision",
+ "--binaries-b",
+ help="Directory containing firecracker and jailer binaries whose performance we want to compare against the results from binaries-a",
type=Path,
+ required=True,
+ )
+ run_parser.add_argument(
+ "--artifacts-a",
+ help="Name of the artifacts directory in the build/artifacts to use for revision A test. If the directory does not exist, the name will be treated as S3 path and artifacts will be downloaded from there.",
+ # Type is string since it can be an s3 paht which if passed to `Path` constructor
+ # will be incorrectly modified
+ type=str,
+ required=False,
+ )
+ run_parser.add_argument(
+ "--artifacts-b",
+ help="Name of the artifacts directory in the build/artifacts to use for revision B test. If the directory does not exist, the name will be treated as S3 path and artifacts will be downloaded from there.",
+ # Type is string since it can be an s3 paht which if passed to `Path` constructor
+ # will be incorrectly modified
+ type=str,
+ required=False,
)
run_parser.add_argument(
"--pytest-opts",
@@ -472,8 +513,10 @@ def ab_performance_test(
if args.command == "run":
ab_performance_test(
- args.a_revision,
- args.b_revision,
+ args.binaries_a,
+ args.binaries_b,
+ args.artifacts_a,
+ args.artifacts_b,
args.pytest_opts,
args.significance,
args.absolute_strength,
diff --git a/tools/devtool b/tools/devtool
index 9fc5ae441b8..5e510c09963 100755
--- a/tools/devtool
+++ b/tools/devtool
@@ -116,7 +116,11 @@ CTR_TEST_RESULTS_DIR="${CTR_FC_ROOT_DIR}/test_results"
CTR_CARGO_TARGET_DIR="$CTR_FC_BUILD_DIR/cargo_target"
# Path to the microVM images cache dir
-MICROVM_IMAGES_DIR="build/img"
+LOCAL_ARTIFACTS_DIR="build/artifacts"
+
+# File with a single line specifing the name of the
+# currently used artifacts
+LOCAL_ARTIFACTS_CURRENT_DIR_FILE="build/current_artifacts"
# Full path to the public key mapping on the guest
PUB_KEY_PATH=/root/.ssh/id_rsa.pub
@@ -133,6 +137,35 @@ TARGET_PREFIX="$(uname -m)-unknown-linux-"
# Container path to directory where we store built CI artifacts.
CTR_CI_ARTIFACTS_PATH="${CTR_FC_ROOT_DIR}/resources/$(uname -m)"
+DEFAULT_ARTIFACTS_S3_BUCKET=s3://spec.ccfc.min/firecracker-ci
+
+# Query default S3 bucket with artifacts and return the most recient path
+get_newest_s3_artifacts() {
+ local bucket="spec.ccfc.min"
+ local base_prefix="firecracker-ci/"
+
+ # Query all files in the `firecracker-ci` directory, sort them by the `LastModified` date
+ # and return the first one.
+ # Reasons for such complex operation:
+ # - since S3 does not store `LastModified` info for directories, the next best approach of
+ # determining the `newest` directory is to check which directory contains the most recently
+ # modified file
+ # - also awscli is paginated, so the command bellow can return multiple values, one for each page.
+ # The very first entry though should be the correct one
+ # - the end of the command filters out directory out of the output
+ local newest_dir=$(aws s3api list-objects-v2 --bucket "$bucket" --prefix "$base_prefix" --no-sign-request --query 'reverse(sort_by(Contents, &LastModified))[0].Key' --output text | head -1 | sed "s|^$base_prefix||" | cut -d'/' -f1)
+
+ echo "$DEFAULT_ARTIFACTS_S3_BUCKET/$newest_dir"
+}
+
+# Function to return local path to artifacts. Accepts the url from function above
+# as an argument.
+get_local_artifacts_path() {
+ local path=$1
+ echo $LOCAL_ARTIFACTS_DIR/"${path//\//-}"
+}
+
+
# Check if Docker is available and exit if it's not.
# Upon returning from this call, the caller can be certain Docker is available.
#
@@ -394,22 +427,42 @@ cmd_help() {
echo " sh CMD..."
echo " Launch the development container and run a command."
echo ""
- echo " test [-- []]"
- echo " Run the Firecracker integration tests."
+ echo " test [args] [-- []]"
+ echo " Run the Firecracker integration or A/B tests."
echo " The Firecracker testing system is based on pytest. All arguments after --"
echo " will be passed through to pytest."
+ echo ""
+ echo " Args for the `test` itself:"
+ echo " -h, --help Print help"
echo " -c, --cpuset-cpus cpulist Set a dedicated cpulist to be used by the tests."
echo " -m, --cpuset-mems memlist Set a dedicated memlist to be used by the tests."
echo " --performance Tweak various setting of the host running the tests (such as C- and P-states)"
echo " to achieve consistent performance. Used for running performance tests in CI."
+ echo " --ab Run A/B test."
+ echo " --no-build Skip building step."
+ echo " --no-archive Skip archiving of `test_result` after the test is done."
+ echo " --no-kvm-check Skip checking for `/dev/kvm` presence."
+ echo " --no-artifacts-check Skip checking existing artifacts."
echo ""
echo " build_ci_artifacts [all|rootfs|kernels]"
echo " Builds the rootfs and guest kernel artifacts we use for our CI."
echo " Run './tools/devtool build_ci_artifacts help' for more details about the available commands."
echo ""
- echo " download_ci_artifacts [--force]"
- echo " Downloads the CI artifacts used for testing from our S3 bucket. If --force is passed, purges any existing"
- echo " artifacts first. Useful for refreshing local artifacts after an update, or if something got messed up."
+ echo " download_ci_artifacts [--force] [s3_uri_1, s3_uri_2 ...]"
+ echo " Downloads artifacts from provided S3 URI (like s3://spec.ccfc.min/firecracker-ci/my_artifacts)"
+ echo " and runs ./tools/setup-ci-artifacts.sh. for each of them."
+ echo " If no arguments are provided, pulls newest artifacts from $DEFAULT_ARTIFACTS_S3_BUCKET"
+ echo " If `--force` is specified, removes previous artifacts with same name"
+ echo ""
+ echo " set_current_artifacts s3_uri/directory name"
+ echo " Sets the $LOCAL_ARTIFACTS_CURRENT_DIR_FILE to contain a local path where the artifacts should be."
+ echo " Accepts some name used to generate the final directory name. Mainly used with S3 URI"
+ echo " like `download_ci_artifacts` cmd. Alternatively it is possible to manually write local "
+ echo " path to artifacts directory into $LOCAL_ARTIFACTS_CURRENT_DIR_FILE file"
+ echo ""
+ echo " ensure_current_artifacts_are_set_up"
+ echo " Makes sure there are some artifacts setup as current ones. If no current artifacts are setup"
+ echo " downloads latest artifacts and sets them up as current."
echo ""
cat < $LOCAL_ARTIFACTS_CURRENT_DIR_FILE
+ say "Current artifacts setup at " $local_artifacts_path
+}
- ensure_ci_artifacts
+cmd_ensure_current_artifacts_are_set_up() {
+ if [ -f $LOCAL_ARTIFACTS_CURRENT_DIR_FILE ]; then
+ say "Current artifacts already setup at " $(cat $LOCAL_ARTIFACTS_CURRENT_DIR_FILE)
+ else
+ download_ci_artifacts
+ echo $LOCAL_ARTIFACTS_PATH > $LOCAL_ARTIFACTS_CURRENT_DIR_FILE
+ say "Current artifacts downloaded and setup at " $LOCAL_ARTIFACTS_PATH
+ fi
}
-ensure_ci_artifacts() {
- if ! command -v aws >/dev/null; then
- die "AWS CLI not installed, which is required for downloading artifacts for integration tests."
+download_ci_artifacts() {
+ local artifacts=$1
+
+ if [ -z $artifacts ]; then
+ local default_artifacts=$(get_newest_s3_artifacts)
+ say "No specific artifacts are defined. Using default artifacts: " $default_artifacts
+ artifacts=$default_artifacts
fi
# Fetch all the artifacts so they are local
- say "Fetching CI artifacts from S3"
- FC_VERSION=$(cmd_sh "cd src/firecracker/src; cargo pkgid | cut -d# -f2 | cut -d. -f1-2")
- S3_URL=s3://spec.ccfc.min/firecracker-ci/v$FC_VERSION/$(uname -m)
- ARTIFACTS=$MICROVM_IMAGES_DIR/$(uname -m)
- if [ ! -d "$ARTIFACTS" ]; then
- mkdir -pv $ARTIFACTS
- aws s3 sync --no-sign-request "$S3_URL" "$ARTIFACTS"
- ok_or_die "Failed to download CI artifacts using awscli!"
- cmd_sh "./tools/setup-ci-artifacts.sh"
+ local artifacts_arch=$artifacts/$(uname -m)
+ local local_artifacts_path=$(get_local_artifacts_path $artifacts)/$(uname -m)
+
+ if [ ! -z $FORCE_ARTIFACT_DOWNLOAD ]; then
+ say "Removing " $local_artifacts_path
+ rm -r $local_artifacts_path
fi
+
+ if [ ! -d "$local_artifacts_path" ]; then
+ say "Fetching artifacts from S3: " $artifacts_arch " into: " $local_artifacts_path
+ mkdir -pv $local_artifacts_path
+ aws s3 sync --no-sign-request "$artifacts_arch" "$local_artifacts_path"
+ ok_or_die "Failed to download artifacts using awscli!"
+ cmd_sh "./tools/setup-ci-artifacts.sh" $local_artifacts_path
+ else
+ say "Found existing artifacts " $artifacts_arch " at: " $local_artifacts_path
+ fi
+
+ LOCAL_ARTIFACTS_PATH=$local_artifacts_path
}
apply_linux_61_tweaks() {
@@ -686,6 +780,7 @@ cmd_test() {
do_build=1
do_archive=1
do_kvm_check=1
+ do_artifacts_check=1
# Parse any command line args.
while [ $# -gt 0 ]; do
case "$1" in
@@ -713,6 +808,9 @@ cmd_test() {
"--no-kvm-check")
do_kvm_check=0
;;
+ "--no-artifacts-check")
+ do_artifacts_check=0
+ ;;
"--") { shift; break; } ;;
*)
die "Unknown argument: $1. Please use --help for help."
@@ -725,7 +823,8 @@ cmd_test() {
[ $do_kvm_check != 0 ] && ensure_kvm
ensure_devctr
ensure_build_dir
- ensure_ci_artifacts
+ [ $do_artifacts_check != 0 ] && cmd_ensure_current_artifacts_are_set_up
+
if [ $do_build != 0 ]; then
cmd_build --release
if [ -n "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" ]; then
@@ -906,7 +1005,7 @@ cmd_sh() {
cmd_sandbox() {
cmd_build --release
- ensure_ci_artifacts
+ cmd_ensure_current_artifacts_are_set_up
cmd_sh "tmux new env PYTEST_ADDOPTS=--pdbcls=IPython.terminal.debugger:TerminalPdb PYTHONPATH=tests IPYTHONDIR=\$PWD/.ipython ipython -i ./tools/sandbox.py $@"
cmd_fix_perms ".ipython"
}
@@ -928,12 +1027,12 @@ cmd_sandbox_native() {
pip3.11 install ipython requests requests_unixsocket2 psutil tenacity filelock
pip3.11 install jsonschema aws_embedded_metrics
pip3.11 install packaging pytest
- ensure_ci_artifacts
+ cmd_ensure_current_artifacts_are_set_up
tmux neww sudo --preserve-env=HOME,PATH,TMUX env PYTHONPATH=tests IPYTHONDIR=\$PWD/.ipython ipython -i ./tools/sandbox.py $@
}
cmd_test_debug() {
- ensure_ci_artifacts
+ cmd_ensure_current_artifacts_are_set_up
cmd_sh "tmux new ./tools/test.sh --pdb $@"
}
@@ -958,8 +1057,8 @@ cmd_checkstyle() {
cmd_sh "git-secrets --register-aws && git-secrets --scan"
fi
- cmd_test --no-build --no-kvm-check -- -n 4 --dist worksteal integration_tests/style || exit 1
- cmd_test --no-build --no-kvm-check -- -n 4 --doctest-modules framework || exit 1
+ cmd_test --no-build --no-kvm-check --no-artifacts-check -- -n 4 --dist worksteal integration_tests/style || exit 1
+ cmd_test --no-build --no-kvm-check --no-artifacts-check -- -n 4 --doctest-modules framework || exit 1
}
cmd_checkbuild() {
diff --git a/tools/setup-ci-artifacts.sh b/tools/setup-ci-artifacts.sh
index 10fded08787..bddcafc83c2 100755
--- a/tools/setup-ci-artifacts.sh
+++ b/tools/setup-ci-artifacts.sh
@@ -9,7 +9,7 @@ TOOLS_DIR=$(dirname $0)
source "$TOOLS_DIR/functions"
say "Setup CI artifacts"
-cd build/img/$(uname -m)
+cd $1
say "Fix executable permissions"
find "firecracker" -type f |xargs chmod -c 755
diff --git a/tools/test.sh b/tools/test.sh
index 0bf67a65666..367c89165cb 100755
--- a/tools/test.sh
+++ b/tools/test.sh
@@ -31,8 +31,14 @@ if [ -f $CGROUP/cgroup.controllers -a -e $CGROUP/cgroup.type ]; then
> $CGROUP/cgroup.subtree_control
fi
-say "Copy CI artifacts to /srv, so hardlinks work"
-cp -ruvf build/img /srv
+if [ -f build/current_artifacts ]; then
+ say "Copy CI artifacts to /srv, so hardlinks work"
+ cp -ruvfL $(cat build/current_artifacts) /srv/current_artifacts
+else
+ # The directory must exist for pytest to function
+ mkdir -p /srv/current_artifacts
+ say "Skipping setting up CI artifacts"
+fi
cd tests
export PYTEST_ADDOPTS="${PYTEST_ADDOPTS:-} --pdbcls=IPython.terminal.debugger:TerminalPdb"