Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@ jobs:
- uses: actions/setup-go@v5
with:
go-version: "^1.18"
# Install `patch`, needed to build `krane-bundle`
- run: sudo apt-get install -y patch
- run: make build
# Install `patch`, needed to build `krane-bundle`, and tools for linking against musl
- run: sudo apt-get install -y patch musl-tools musl-dev
- run: rustup target add x86_64-unknown-linux-musl
- run: CARGO_BUILD_TARGET=x86_64-unknown-linux-musl make build

cross-build:
runs-on:
Expand Down
19 changes: 18 additions & 1 deletion clarify.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ license-files = [
{ path = "COPYING", hash = 0x278afbcf },
{ path = "LICENSE-APACHE", hash = 0x24b54f4b },
{ path = "LICENSE-MIT", hash = 0x462dee44 },
{ path = "src/unicode/data/LICENSE-UNICODE", hash = 0x70f7339 },
{ path = "src/unicode/data/LICENSE-UNICODE", hash = 0x70f7339 },
]

[clarify.petgraph]
Expand All @@ -27,6 +27,23 @@ license-files = [
{ path = "src/unicode_tables/LICENSE-UNICODE", hash = 0xa7f28b93 },
]

[clarify.rust-fuzzy-search]
expression = "MIT OR Apache-2.0"
license-files = [
{ path = "LICENSE-APACHE", hash = 0xbde481e5 },
{ path = "LICENSE-MIT", hash = 0xb5a90d39 },
]
skip-files = [
# these licenses apply to documentation
"target/doc/FiraSans-LICENSE.txt",
"target/doc/COPYRIGHT.txt",
"target/doc/LICENSE-APACHE.txt",
"target/doc/LICENSE-MIT.txt",
"target/doc/SourceCodePro-LICENSE.txt",
"target/doc/SourceSerif4-LICENSE.md",
]


[clarify.typenum]
expression = "MIT OR Apache-2.0"
license-files = [
Expand Down
8 changes: 3 additions & 5 deletions deny.toml
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,6 @@ skip = [
{ name = "tabled", version = "0.15.0" },
# multiple deps are using an older version of tabled_derive
{ name = "tabled_derive", version = "0.7.0" },
# multiple deps are using an older version of zerocopy
{ name = "zerocopy", version = "0.7.35" },
]

skip-tree = [
Expand All @@ -90,9 +88,9 @@ skip-tree = [
{ name = "windows-sys" },
]

[bans.workspace-dependencies]
duplicates = "deny"
include-path-dependencies = true
[bans.workspace-dependencies]
duplicates = "deny"
include-path-dependencies = true
unused = "deny"

[sources]
Expand Down
2 changes: 1 addition & 1 deletion rust-toolchain.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
# particular date of the nightly compiler, but we want builds to be reproducable, so we lock to a
# specific, recent instance of nightly.
[toolchain]
channel = "nightly-2024-07-11"
channel = "nightly-2025-02-28"
profile = "default"
2 changes: 1 addition & 1 deletion tools/buildsys/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ impl LookasideCache {
let name = parsed
.path_segments()
.context(error::ExternalFileNameSnafu { path: url })?
.last()
.next_back()
.context(error::ExternalFileNameSnafu { path: url })?;
Ok(name.into())
}
Expand Down
2 changes: 1 addition & 1 deletion tools/buildsys/src/gomod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ fn extract_file_name(url: &str) -> Result<PathBuf> {
let name = parsed
.path_segments()
.context(error::InputFileBadSnafu { path: url })?
.last()
.next_back()
.context(error::InputFileBadSnafu { path: url })?;
Ok(name.into())
}
Expand Down
14 changes: 7 additions & 7 deletions tools/oci-cli-wrapper/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
//!
//! Current two tools are supported:
//! * crane, gcrane, krane
//! Crane provides a more direct interaction with the container registry,
//! allowing us to query image information in the registry without having to pull the full image to
//! disk. It also does not require a daemon to operate and has optimizations for pulling large images to disk
//! Crane provides a more direct interaction with the container registry,
//! allowing us to query image information in the registry without having to pull the full image to
//! disk. It also does not require a daemon to operate and has optimizations for pulling large images to disk
//! * docker
//! Docker can perform all interactions we need with several caveats that make it less efficient than
//! crane. The image needs to be pulled locally in order for docker to inspect the manifest and extract
//! metadata. In addition, in order to operate with OCI image format, the containerd-snapshotter
//! feature has to be enabled in the docker daemon
//! Docker can perform all interactions we need with several caveats that make it less efficient than
//! crane. The image needs to be pulled locally in order for docker to inspect the manifest and extract
//! metadata. In addition, in order to operate with OCI image format, the containerd-snapshotter
//! feature has to be enabled in the docker daemon
use std::fmt::{Display, Formatter};
use std::{collections::HashMap, path::Path};

Expand Down
8 changes: 4 additions & 4 deletions tools/testsys/src/aws_resources.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,8 @@ pub(crate) struct AmiImage {
}

/// Create a CRD to launch Bottlerocket instances on an EKS or ECS cluster.
pub(crate) async fn ec2_crd<'a>(
bottlerocket_input: BottlerocketInput<'a>,
pub(crate) async fn ec2_crd(
bottlerocket_input: BottlerocketInput<'_>,
cluster_type: ClusterType,
region: &str,
) -> Result<Resource> {
Expand Down Expand Up @@ -231,8 +231,8 @@ pub(crate) async fn ec2_crd<'a>(
}

/// Create a CRD to launch Bottlerocket instances on an EKS or ECS cluster.
pub(crate) async fn ec2_karpenter_crd<'a>(
bottlerocket_input: BottlerocketInput<'a>,
pub(crate) async fn ec2_karpenter_crd(
bottlerocket_input: BottlerocketInput<'_>,
region: &str,
) -> Result<Resource> {
let cluster_name = bottlerocket_input
Expand Down
2 changes: 1 addition & 1 deletion tools/testsys/src/crds.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ pub struct CrdInput<'a> {
pub images: TestsysImages,
}

impl<'a> CrdInput<'a> {
impl CrdInput<'_> {
/// Retrieve the TUF repo information from `Infra.toml`
pub fn tuf_repo_config(&self) -> Option<TufRepoConfig> {
if let (Some(metadata_base_url), Some(targets_url)) = (
Expand Down
2 changes: 1 addition & 1 deletion tools/update-metadata/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ impl Update {
.waves
.range((Included(0), Excluded(seed)))
.map(|(k, v)| (*k, *v))
.last();
.next_back();
let end_wave = self
.waves
.range((Included(seed), Included(MAX_SEED)))
Expand Down
56 changes: 56 additions & 0 deletions twoliter/embedded/Makefile.toml
Original file line number Diff line number Diff line change
Expand Up @@ -416,6 +416,7 @@ done
[tasks.check]
dependencies = [
"check-cargo-version",
"check-advisories",
"unit-tests",
"check-fmt",
"check-lints",
Expand Down Expand Up @@ -540,6 +541,61 @@ fi
'''
]

# Task to lint Bottlerocket Security Advisories by checking for valid CVE or GHSA IDs
# and verifying that each versioned directory under "advisories" has a corresponding
# tag on the Twoliter project this task runs against.
[tasks.check-advisories]
script_runner = "bash"
script = [
'''
if find advisories -name '*.toml' -type f >/dev/null 2>&1 ; then
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
if find advisories -name '*.toml' -type f >/dev/null 2>&1 ; then
if find ${BUILDSYS_ROOT_DIR}/advisories -name '*.toml' -type f >/dev/null 2>&1 ; then

We should use full filepaths in case the current working directory is not set as expected. There are a few cases in here that refer to the advisories directory that require this.


# Ensure each versioned advisories directory has a corresponding release tag.
for version in $(find advisories/* -type d -not -path advisories/staging); do
set +e; grep -q v$(basename ${version})$ <(PAGER= git tag); rc="$?"; set -e;
if [ "${rc}" -ne 0 ]; then
echo "error: no corresponding tag found for ${version} advisories directory" >&2
exit 1
fi
done


# Not all BRSAs might have a CVE; there can be cases where an advisory
# is for a GHSA, for example, but no corresponding CVE, and vice versa.
# Check separately for GHSA ID regex when a 'ghsa' is included and for
# CVE ID regex when a 'cve' is included.

# 1. If ghsa line exists and GHSA ID does not match regex, error

# Regex to strictly match a GHSA identifier in an advisory
# https://github.com/github/advisory-database?tab=readme-ov-file#ghsa-ids
ghsa_regex="^ghsa\s+=\s+\"GHSA(-[23456789cfghjmpqrvwx]{4}){3}\""

# Find all non-matching GHSA lines and report
ghsa_found="$(grep -L --include '*.toml' -R -P ${ghsa_regex} advisories | xargs awk '/ghsa/')"
if [ ! -z "${ghsa_found}" ] ; then
echo "error: advisory GHSA ID did not match expression '${ghsa_regex}' and may contain non-ASCII characters" >&2
echo "${ghsa_found}" >&2
exit 1
fi

# 2. If cve line exists and CVE ID does not match regex, error

# Regex to strictly match CVE identifier in an advisory
# https://cve.mitre.org/cve/identifiers/tech-guidance.html#input_format
cve_regex="^cve\s+=\s+\"CVE-\d{4}-(0\d{3}|[1-9]\d{3,})\""

# Find all non-matching CVE lines and report
cve_found="$(grep -L --include '*.toml' -R -P ${cve_regex} advisories | xargs awk '/cve/')"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If I understand right, these checks will fail if an advisory document contains the string cve or ghsa but does not contain a valid CVE or GHSA line.

I think what we actually want to do is semantically parse the advisory TOML document and check that the CVE and GHSA lines are valid, right? I think parsing with regexes like this leads to a lot of potential for surprising and valid inputs to get caught in the crossfire.

Can we at least constrain the mismatch checks to lines that start with cve/ghsa rather than the entirety of the document? That way we don't misfire on freeform text e.g. in an advisory description that happens to have the character string cve in it.


In general I want to see code like this in Twoliter hoisted into Rust so that we can get unit test coverage and a higher ability to take advantage of software libraries like a toml parser. We don't really have a "recommended" way to do that. In a pinch we could probably have Twoliter provide a helper binary (oneliter?) under tools/ to run some of these tasks, but in the longer term we probably want to express the task graph currently in Makefile.toml inside Twoliter and drop cargo-make as a dependency.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we at least constrain the mismatch checks to lines that start with cve/ghsa rather than the entirety of the document? That way we don't misfire on freeform text e.g. in an advisory description that happens to have the character string cve in it.

You're right, the awk half of this pipeline would break down on any inclusion of "cve"/"ghsa", including advisory descriptions. I'll modify those respective checks to match on line start.

In general I want to see code like this in Twoliter hoisted into Rust so that we can get unit test coverage and a higher ability to take advantage of software libraries like a toml parser.

I totally agree; that along with including advisory types, tools, and models in twoliter are a more robust long term effort

if [ ! -z "${cve_found}" ] ; then
echo "error: advisory CVE ID did not match expression '${cve_regex}' and may contain non-ASCII characters" >&2
echo "${cve_found}" >&2
exit 1
fi
fi
'''
]

[tasks.check-golangci-lint]
script = [
'''
Expand Down
8 changes: 4 additions & 4 deletions twoliter/src/project/lock/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/// Covers the functionality and implementation of Twoliter.lock which is generated using
/// `twoliter update`. It acts similarly to Cargo.lock as a flattened out representation of all kit
/// and sdk image dependencies with associated digests so twoliter can validate that contents of a kit
/// do not mutate unexpectedly.
//! Covers the functionality and implementation of Twoliter.lock which is generated using
//! `twoliter update`. It acts similarly to Cargo.lock as a flattened out representation of all kit
//! and sdk image dependencies with associated digests so twoliter can validate that contents of a kit
//! do not mutate unexpectedly.

/// Contains operations for working with an OCI Archive
mod archive;
Expand Down
3 changes: 0 additions & 3 deletions twoliter/src/project/lock/verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,6 @@ impl LockfileVerifier for Lock {
}
}

/// A `LockfileVerifier` can return a set of `VerifyTag` structs, claiming that those artifacts
/// have been resolved and verified against the lockfile.

/// Writes marker files indicating which artifacts have been resolved and verified against the lock
#[derive(Debug)]
pub(crate) struct VerificationTagger {
Expand Down
2 changes: 1 addition & 1 deletion twoliter/src/project/vendor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ impl OverriddenVendor {
.unwrap_or(&self.original_vendor.registry)
}

pub(crate) fn repo_for<'a, V: VendedArtifact>(&'a self, image: &'a V) -> &str {
pub(crate) fn repo_for<'a, V: VendedArtifact>(&'a self, image: &'a V) -> &'a str {
self.override_
.name
.as_deref()
Expand Down
Loading