Skip to content

Commit 986a48d

Browse files
committed
run: add --details options
Closes: #58
1 parent 97babdc commit 986a48d

File tree

6 files changed

+254
-44
lines changed

6 files changed

+254
-44
lines changed

crates/cargo-codspeed/README.md

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,26 @@ Options:
4848
-V, --version Print version information
4949
```
5050

51+
### Running benchmarks with details
52+
53+
Use the `--details` flag to see timing information for each benchmark:
54+
55+
```bash
56+
cargo codspeed run --details
57+
```
58+
59+
This will show execution times for each benchmark:
60+
```
61+
Checked: benches/example.rs::fibonacci (5.6 us)
62+
Checked: benches/example.rs::factorial (368 ns)
63+
```
64+
65+
The output also shows the total number of benchmarks executed:
66+
```
67+
Done running benchmark_suite (5 benchmarks)
68+
Finished running 2 benchmark suite(s) (10 benchmarks total)
69+
```
70+
5171
## Development
5272

5373
### Troubleshooting

crates/cargo-codspeed/src/app.rs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,10 @@ enum Commands {
8585

8686
#[command(flatten)]
8787
bench_target_filters: BenchTargetFilters,
88+
89+
/// Print per-benchmark duration details
90+
#[arg(long)]
91+
details: bool,
8892
},
8993
}
9094

@@ -141,12 +145,14 @@ pub fn run(args: impl Iterator<Item = OsString>) -> Result<()> {
141145
benchname,
142146
package_filters,
143147
bench_target_filters,
148+
details,
144149
} => run_benches(
145150
&metadata,
146151
benchname,
147152
package_filters,
148153
bench_target_filters,
149154
measurement_mode,
155+
details,
150156
),
151157
};
152158

crates/cargo-codspeed/src/run.rs

Lines changed: 73 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ use codspeed::walltime_results::WalltimeResults;
1010
use std::{
1111
io::{self, Write},
1212
path::{Path, PathBuf},
13+
process::Stdio,
1314
};
1415

1516
#[cfg(unix)]
@@ -99,6 +100,7 @@ pub fn run_benches(
99100
package_filters: PackageFilters,
100101
bench_target_filters: BenchTargetFilters,
101102
measurement_mode: MeasurementMode,
103+
show_details: bool,
102104
) -> Result<()> {
103105
let codspeed_target_dir = get_codspeed_target_dir(metadata, measurement_mode);
104106
let workspace_root = metadata.workspace_root.as_std_path();
@@ -113,16 +115,29 @@ pub fn run_benches(
113115

114116
eprintln!("Collected {} benchmark suite(s) to run", benches.len());
115117

118+
let mut total_benchmark_count = 0;
119+
116120
for bench in benches.iter() {
117121
let bench_target_name = &bench.bench_target_name;
118122
// workspace_root is needed since file! returns the path relatively to the workspace root
119123
// while CARGO_MANIFEST_DIR returns the path to the sub package
120124
let workspace_root = metadata.workspace_root.clone();
121-
eprintln!("Running {} {}", &bench.package_name, bench_target_name);
125+
eprintln!(
126+
"Running {} {} ({})",
127+
&bench.package_name,
128+
bench_target_name,
129+
bench.bench_path.display()
130+
);
122131
let mut command = std::process::Command::new(&bench.bench_path);
123132
command
124133
.env("CODSPEED_CARGO_WORKSPACE_ROOT", workspace_root)
125-
.current_dir(&bench.working_directory);
134+
.current_dir(&bench.working_directory)
135+
.stdout(Stdio::piped())
136+
.stderr(Stdio::inherit());
137+
138+
if show_details {
139+
command.env("CODSPEED_SHOW_DETAILS", "1");
140+
}
126141

127142
if measurement_mode == MeasurementMode::Walltime {
128143
command.arg("--bench"); // Walltime targets need this additional argument (inherited from running them with `cargo bench`)
@@ -132,33 +147,63 @@ pub fn run_benches(
132147
command.arg(bench_name_filter);
133148
}
134149

135-
command
136-
.status()
137-
.map_err(|e| anyhow!("failed to execute the benchmark process: {}", e))
138-
.and_then(|status| {
139-
if status.success() {
140-
Ok(())
141-
} else {
142-
#[cfg(unix)]
143-
{
144-
let code = status
145-
.code()
146-
.or(status.signal().map(|s| 128 + s)) // 128+N indicates that a command was interrupted by signal N (see: https://tldp.org/LDP/abs/html/exitcodes.html)
147-
.unwrap_or(1);
148-
149-
eprintln!("failed to execute the benchmark process, exit code: {code}");
150-
151-
std::process::exit(code);
152-
}
153-
#[cfg(not(unix))]
154-
{
155-
bail!("failed to execute the benchmark process: {}", status)
156-
}
157-
}
158-
})?;
159-
eprintln!("Done running {bench_target_name}");
150+
let output = command
151+
.output()
152+
.map_err(|e| anyhow!("failed to execute the benchmark process: {}", e))?;
153+
154+
// Count benchmarks by looking for "Measured:" or "Checked:" lines
155+
let stdout = String::from_utf8_lossy(&output.stdout);
156+
let benchmark_count = stdout
157+
.lines()
158+
.filter(|line| {
159+
line.trim_start().starts_with("Measured:")
160+
|| line.trim_start().starts_with("Checked:")
161+
|| (show_details && line.trim_start().starts_with(" Checked:"))
162+
|| (show_details && line.trim_start().starts_with(" Measured:"))
163+
})
164+
.count();
165+
total_benchmark_count += benchmark_count;
166+
167+
// Print captured output
168+
print!("{stdout}");
169+
io::stdout().flush().unwrap();
170+
171+
if !output.status.success() {
172+
#[cfg(unix)]
173+
{
174+
let code = output
175+
.status
176+
.code()
177+
.or(output.status.signal().map(|s| 128 + s)) // 128+N indicates that a command was interrupted by signal N (see: https://tldp.org/LDP/abs/html/exitcodes.html)
178+
.unwrap_or(1);
179+
180+
eprintln!("failed to execute the benchmark process, exit code: {code}");
181+
182+
std::process::exit(code);
183+
}
184+
#[cfg(not(unix))]
185+
{
186+
bail!("failed to execute the benchmark process: {}", output.status)
187+
}
188+
}
189+
190+
if benchmark_count == 0 && !stdout.is_empty() {
191+
eprintln!("Warning: No benchmarks detected in output for {bench_target_name}");
192+
}
193+
if show_details {
194+
eprintln!("Done running {bench_target_name} ({benchmark_count} benchmarks)");
195+
} else {
196+
eprintln!("Done running {bench_target_name}");
197+
}
198+
}
199+
if show_details {
200+
eprintln!(
201+
"Finished running {} benchmark suite(s) ({total_benchmark_count} benchmarks total)",
202+
benches.len()
203+
);
204+
} else {
205+
eprintln!("Finished running {} benchmark suite(s)", benches.len());
160206
}
161-
eprintln!("Finished running {} benchmark suite(s)", benches.len());
162207

163208
if measurement_mode == MeasurementMode::Walltime {
164209
aggregate_raw_walltime_data(workspace_root)?;

crates/codspeed/src/codspeed.rs

Lines changed: 91 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
use crate::measurement;
1+
use crate::{measurement, utils};
22
use colored::Colorize;
33
use std::ffi::CString;
4+
use std::time::{Duration, Instant};
45

56
pub use std::hint::black_box;
67

@@ -15,11 +16,14 @@ pub struct CodSpeed {
1516
current_benchmark: CString,
1617
group_stack: Vec<String>,
1718
is_instrumented: bool,
19+
start_time: Option<Instant>,
20+
show_details: bool,
1821
}
1922

2023
impl CodSpeed {
2124
pub fn new() -> Self {
2225
let is_instrumented = measurement::is_instrumented();
26+
let show_details = utils::show_details();
2327
if !is_instrumented {
2428
println!(
2529
"{} codspeed is enabled, but no performance measurement will be made since it's running in an unknown environment.",
@@ -32,6 +36,8 @@ impl CodSpeed {
3236
current_benchmark: CString::new("").expect("CString::new failed"),
3337
group_stack: Vec::new(),
3438
is_instrumented,
39+
start_time: None,
40+
show_details,
3541
}
3642
}
3743

@@ -47,29 +53,56 @@ impl CodSpeed {
4753
pub fn start_benchmark(&mut self, name: &str) {
4854
self.current_benchmark = CString::new(name).expect("CString::new failed");
4955
measurement::start();
56+
if self.show_details && !self.is_instrumented {
57+
self.start_time = Some(Instant::now());
58+
} else {
59+
self.start_time = None;
60+
}
5061
}
5162

5263
#[inline(always)]
5364
pub fn end_benchmark(&mut self) {
5465
measurement::stop(&self.current_benchmark);
5566
self.benchmarked
5667
.push(self.current_benchmark.to_str().unwrap().to_string());
57-
let action_str = if self.is_instrumented {
58-
"Measured"
59-
} else {
60-
"Checked"
61-
};
62-
let group_str = if self.group_stack.is_empty() {
63-
"".to_string()
68+
if self.show_details {
69+
if !self.is_instrumented {
70+
let elapsed = self
71+
.start_time
72+
.take()
73+
.map(|start| start.elapsed())
74+
.unwrap_or_default();
75+
let group_str = if self.group_stack.is_empty() {
76+
"".to_string()
77+
} else {
78+
format!(" (group: {})", self.group_stack.join("/"))
79+
};
80+
let formatted_duration = format_duration(elapsed);
81+
println!(
82+
" Checked: {}{} ({})",
83+
self.current_benchmark.to_string_lossy(),
84+
group_str,
85+
formatted_duration
86+
);
87+
}
6488
} else {
65-
format!(" (group: {})", self.group_stack.join("/"))
66-
};
67-
println!(
68-
"{}: {}{}",
69-
action_str,
70-
self.current_benchmark.to_string_lossy(),
71-
group_str
72-
);
89+
let action_str = if self.is_instrumented {
90+
"Measured"
91+
} else {
92+
"Checked"
93+
};
94+
let group_str = if self.group_stack.is_empty() {
95+
"".to_string()
96+
} else {
97+
format!(" (group: {})", self.group_stack.join("/"))
98+
};
99+
println!(
100+
"{}: {}{}",
101+
action_str,
102+
self.current_benchmark.to_string_lossy(),
103+
group_str
104+
);
105+
}
73106
}
74107
}
75108

@@ -78,3 +111,45 @@ impl Default for CodSpeed {
78111
Self::new()
79112
}
80113
}
114+
115+
fn format_duration(duration: Duration) -> String {
116+
let nanos = duration.as_nanos();
117+
if nanos == 0 {
118+
return "<1 ns".to_string();
119+
}
120+
121+
if nanos < 1_000 {
122+
format!("{nanos} ns")
123+
} else if nanos < 1_000_000 {
124+
format!("{:.1} us", nanos as f64 / 1_000.0)
125+
} else if nanos < 1_000_000_000 {
126+
format!("{:.1} ms", nanos as f64 / 1_000_000.0)
127+
} else {
128+
format!("{:.2} s", nanos as f64 / 1_000_000_000.0)
129+
}
130+
}
131+
132+
#[cfg(test)]
133+
mod tests {
134+
use super::*;
135+
136+
#[test]
137+
fn format_duration_handles_zero() {
138+
assert_eq!(format_duration(Duration::from_nanos(0)), "<1 ns");
139+
}
140+
141+
#[test]
142+
fn format_duration_scales_to_microseconds() {
143+
assert_eq!(format_duration(Duration::from_nanos(1_500)), "1.5 us");
144+
}
145+
146+
#[test]
147+
fn format_duration_scales_to_milliseconds() {
148+
assert_eq!(format_duration(Duration::from_micros(2_345)), "2.3 ms");
149+
}
150+
151+
#[test]
152+
fn format_duration_scales_to_seconds() {
153+
assert_eq!(format_duration(Duration::from_millis(2_500)), "2.50 s");
154+
}
155+
}

crates/codspeed/src/utils.rs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,10 @@ pub fn is_perf_enabled() -> bool {
4646
std::env::var("CODSPEED_PERF_ENABLED").is_ok()
4747
}
4848

49+
pub fn show_details() -> bool {
50+
std::env::var("CODSPEED_SHOW_DETAILS").is_ok()
51+
}
52+
4953
#[cfg(test)]
5054
mod tests {
5155
use super::*;

0 commit comments

Comments
 (0)