Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .gitattributes
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
testdata/perf_map/* filter=lfs diff=lfs merge=lfs -text
src/run/runner/wall_time/perf/snapshots/*.snap filter=lfs diff=lfs merge=lfs -text
*.snap filter=lfs diff=lfs merge=lfs -text
86 changes: 78 additions & 8 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

22 changes: 15 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ path = "src/main.rs"


[dependencies]
anyhow = "1.0.75"
clap = { version = "4.4.8", features = ["derive", "env", "color"] }
anyhow = { workspace = true }
clap = { workspace = true }
itertools = "0.11.0"
lazy_static = "1.4.0"
log = "0.4.20"
Expand All @@ -26,8 +26,8 @@ reqwest = { version = "0.11.22", features = [
] }
reqwest-middleware = "0.2.4"
reqwest-retry = "0.3.0"
serde = { version = "1.0.192", features = ["derive"] }
serde_json = { version = "1.0.108", features = ["preserve_order"] }
serde = { workspace = true }
serde_json = { workspace = true }
url = "2.4.1"
sha256 = "1.4.0"
tokio = { version = "1", features = ["macros", "rt"] }
Expand All @@ -51,10 +51,11 @@ async-trait = "0.1.82"
libc = "0.2.171"
bincode = "1.3.3"
object = "0.36.7"
linux-perf-data = "0.11.0"
# TODO: Make this repo public
linux-perf-data = { git = "ssh://[email protected]/CodSpeedHQ/linux-perf-data.git", branch = "feat/support-perf-pipe-data-parsing" }
debugid = "0.8.0"
memmap2 = "0.9.5"
nix = { version = "0.29.0", features = ["fs", "time", "user"] }
nix = { workspace = true, features = ["fs", "time", "user"] }
futures = "0.3.31"
runner-shared = { path = "crates/runner-shared" }
shellexpand = { version = "3.1.1", features = ["tilde"] }
Expand All @@ -76,7 +77,14 @@ rstest_reuse = "0.7.0"
shell-quote = "0.7.2"

[workspace]
members = ["crates/runner-shared"]
members = ["crates/exec-harness", "crates/runner-shared"]

[workspace.dependencies]
anyhow = "1.0.75"
clap = { version = "4.4.8", features = ["derive", "env", "color"] }
nix = "0.29.0"
serde = { version = "1.0.192", features = ["derive"] }
serde_json = { version = "1.0.108", features = ["preserve_order"] }

[workspace.metadata.release]
sign-tag = true
Expand Down
12 changes: 12 additions & 0 deletions crates/exec-harness/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[package]
name = "exec-harness"
version = "4.4.1"
edition = "2024"

[dependencies]
anyhow = { workspace = true }
codspeed = "4.1.0"
clap = { workspace = true }
serde_json = { workspace = true }
serde = { workspace = true }
nix = { workspace = true, features = ["signal"] }
116 changes: 116 additions & 0 deletions crates/exec-harness/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
use crate::walltime::WalltimeResults;
use clap::Parser;
use codspeed::instrument_hooks::InstrumentHooks;
use codspeed::walltime_results::WalltimeBenchmark;
use std::path::PathBuf;
use std::process;

mod walltime;

#[derive(Parser, Debug)]
#[command(name = "exec-harness")]
#[command(about = "CodSpeed exec harness - wraps commands with performance instrumentation")]
struct Args {
/// Optional benchmark name (defaults to command filename)
#[arg(long)]
name: Option<String>,

/// The command and arguments to execute
command: Vec<String>,
}

fn main() {
let args = Args::parse();

if args.command.is_empty() {
eprintln!("Error: No command provided");
process::exit(1);
}

// Derive benchmark name from command if not provided
let bench_name = args.name.unwrap_or_else(|| {
// Extract filename from command path
let cmd = &args.command[0];
std::path::Path::new(cmd)
.file_name()
.and_then(|n| n.to_str())
.map(|s| s.to_string())
.unwrap_or_else(|| "exec_benchmark".to_string())
});
// TODO: Better URI generation
let bench_uri = format!("standalone_run::{bench_name}");

let hooks = InstrumentHooks::instance();

// TODO: Change this to avoid impersonating `codspeed-rust`
hooks
.set_integration("codspeed-rust", env!("CARGO_PKG_VERSION"))
.unwrap();

const NUM_ITERATIONS: usize = 1;
let mut times_per_round_ns = Vec::with_capacity(NUM_ITERATIONS);

hooks.start_benchmark().unwrap();
for _ in 0..NUM_ITERATIONS {
// Spawn the command
let mut child = match process::Command::new(&args.command[0])
.args(&args.command[1..])
.spawn()
{
Ok(child) => child,
Err(e) => {
eprintln!("Failed to spawn command: {e}");
process::exit(1);
}
};
// Start monotonic timer for this iteration
let bench_start = InstrumentHooks::current_timestamp();

// Wait for the process to complete
let status = match child.wait() {
Ok(status) => status,
Err(e) => {
eprintln!("Failed to wait for command: {e}");
process::exit(1);
}
};

// Measure elapsed time
let bench_end = InstrumentHooks::current_timestamp();
hooks.add_benchmark_timestamps(bench_start, bench_end);

// Exit immediately if any iteration fails
if !status.success() {
eprintln!("Command failed with exit code: {:?}", status.code());
process::exit(status.code().unwrap_or(1));
}

// Calculate and store the elapsed time in nanoseconds
let elapsed_ns = (bench_end - bench_start) as u128;
times_per_round_ns.push(elapsed_ns);
}

hooks.stop_benchmark().unwrap();
hooks.set_executed_benchmark(&bench_uri).unwrap();

// Collect walltime results
let max_time_ns = times_per_round_ns.iter().copied().max();
let walltime_benchmark = WalltimeBenchmark::from_runtime_data(
bench_name.clone(),
bench_uri.clone(),
vec![1; NUM_ITERATIONS],
times_per_round_ns,
max_time_ns,
);

let walltime_results = WalltimeResults::from_benchmarks(vec![walltime_benchmark])
.expect("Failed to create walltime results");

walltime_results
.save_to_file(
std::env::var("CODSPEED_PROFILE_FOLDER")
.map(PathBuf::from)
.unwrap_or_else(|_| std::env::current_dir().unwrap().join(".codspeed")),
)
.expect("Failed to save walltime results");
}
Loading