Skip to content

Commit 97eba4c

Browse files
committed
run: add --details options
Closes: #58
1 parent 97babdc commit 97eba4c

File tree

6 files changed

+259
-44
lines changed

6 files changed

+259
-44
lines changed

crates/cargo-codspeed/README.md

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,26 @@ Options:
4848
-V, --version Print version information
4949
```
5050

51+
### Running benchmarks with details
52+
53+
Use the `--details` flag to see timing information for each benchmark:
54+
55+
```bash
56+
cargo codspeed run --details
57+
```
58+
59+
This will show execution times for each benchmark:
60+
```
61+
Checked: benches/example.rs::fibonacci (5.6 us)
62+
Checked: benches/example.rs::factorial (368 ns)
63+
```
64+
65+
The output also shows the total number of benchmarks executed:
66+
```
67+
Done running benchmark_suite (5 benchmarks)
68+
Finished running 2 benchmark suite(s) (10 benchmarks total)
69+
```
70+
5171
## Development
5272

5373
### Troubleshooting

crates/cargo-codspeed/src/app.rs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,10 @@ enum Commands {
8585

8686
#[command(flatten)]
8787
bench_target_filters: BenchTargetFilters,
88+
89+
/// Print per-benchmark duration details
90+
#[arg(long)]
91+
details: bool,
8892
},
8993
}
9094

@@ -141,12 +145,14 @@ pub fn run(args: impl Iterator<Item = OsString>) -> Result<()> {
141145
benchname,
142146
package_filters,
143147
bench_target_filters,
148+
details,
144149
} => run_benches(
145150
&metadata,
146151
benchname,
147152
package_filters,
148153
bench_target_filters,
149154
measurement_mode,
155+
details,
150156
),
151157
};
152158

crates/cargo-codspeed/src/run.rs

Lines changed: 73 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ use codspeed::walltime_results::WalltimeResults;
1010
use std::{
1111
io::{self, Write},
1212
path::{Path, PathBuf},
13+
process::Stdio,
1314
};
1415

1516
#[cfg(unix)]
@@ -99,6 +100,7 @@ pub fn run_benches(
99100
package_filters: PackageFilters,
100101
bench_target_filters: BenchTargetFilters,
101102
measurement_mode: MeasurementMode,
103+
show_details: bool,
102104
) -> Result<()> {
103105
let codspeed_target_dir = get_codspeed_target_dir(metadata, measurement_mode);
104106
let workspace_root = metadata.workspace_root.as_std_path();
@@ -113,16 +115,29 @@ pub fn run_benches(
113115

114116
eprintln!("Collected {} benchmark suite(s) to run", benches.len());
115117

118+
let mut total_benchmark_count = 0;
119+
116120
for bench in benches.iter() {
117121
let bench_target_name = &bench.bench_target_name;
118122
// workspace_root is needed since file! returns the path relatively to the workspace root
119123
// while CARGO_MANIFEST_DIR returns the path to the sub package
120124
let workspace_root = metadata.workspace_root.clone();
121-
eprintln!("Running {} {}", &bench.package_name, bench_target_name);
125+
eprintln!(
126+
"Running {} {} ({})",
127+
&bench.package_name,
128+
bench_target_name,
129+
bench.bench_path.display()
130+
);
122131
let mut command = std::process::Command::new(&bench.bench_path);
123132
command
124133
.env("CODSPEED_CARGO_WORKSPACE_ROOT", workspace_root)
125-
.current_dir(&bench.working_directory);
134+
.current_dir(&bench.working_directory)
135+
.stdout(Stdio::piped())
136+
.stderr(Stdio::inherit());
137+
138+
if show_details {
139+
command.env("CODSPEED_SHOW_DETAILS", "1");
140+
}
126141

127142
if measurement_mode == MeasurementMode::Walltime {
128143
command.arg("--bench"); // Walltime targets need this additional argument (inherited from running them with `cargo bench`)
@@ -132,33 +147,63 @@ pub fn run_benches(
132147
command.arg(bench_name_filter);
133148
}
134149

135-
command
136-
.status()
137-
.map_err(|e| anyhow!("failed to execute the benchmark process: {}", e))
138-
.and_then(|status| {
139-
if status.success() {
140-
Ok(())
141-
} else {
142-
#[cfg(unix)]
143-
{
144-
let code = status
145-
.code()
146-
.or(status.signal().map(|s| 128 + s)) // 128+N indicates that a command was interrupted by signal N (see: https://tldp.org/LDP/abs/html/exitcodes.html)
147-
.unwrap_or(1);
148-
149-
eprintln!("failed to execute the benchmark process, exit code: {code}");
150-
151-
std::process::exit(code);
152-
}
153-
#[cfg(not(unix))]
154-
{
155-
bail!("failed to execute the benchmark process: {}", status)
156-
}
157-
}
158-
})?;
159-
eprintln!("Done running {bench_target_name}");
150+
let output = command
151+
.output()
152+
.map_err(|e| anyhow!("failed to execute the benchmark process: {}", e))?;
153+
154+
// Count benchmarks by looking for "Measured:" or "Checked:" lines
155+
let stdout = String::from_utf8_lossy(&output.stdout);
156+
let benchmark_count = stdout
157+
.lines()
158+
.filter(|line| {
159+
line.trim_start().starts_with("Measured:")
160+
|| line.trim_start().starts_with("Checked:")
161+
|| (show_details && line.trim_start().starts_with(" Checked:"))
162+
|| (show_details && line.trim_start().starts_with(" Measured:"))
163+
})
164+
.count();
165+
total_benchmark_count += benchmark_count;
166+
167+
// Print captured output
168+
print!("{stdout}");
169+
io::stdout().flush().unwrap();
170+
171+
if !output.status.success() {
172+
#[cfg(unix)]
173+
{
174+
let code = output
175+
.status
176+
.code()
177+
.or(output.status.signal().map(|s| 128 + s)) // 128+N indicates that a command was interrupted by signal N (see: https://tldp.org/LDP/abs/html/exitcodes.html)
178+
.unwrap_or(1);
179+
180+
eprintln!("failed to execute the benchmark process, exit code: {code}");
181+
182+
std::process::exit(code);
183+
}
184+
#[cfg(not(unix))]
185+
{
186+
bail!("failed to execute the benchmark process: {}", output.status)
187+
}
188+
}
189+
190+
if benchmark_count == 0 && !stdout.is_empty() {
191+
eprintln!("Warning: No benchmarks detected in output for {bench_target_name}");
192+
}
193+
if show_details {
194+
eprintln!("Done running {bench_target_name} ({benchmark_count} benchmarks)");
195+
} else {
196+
eprintln!("Done running {bench_target_name}");
197+
}
198+
}
199+
if show_details {
200+
eprintln!(
201+
"Finished running {} benchmark suite(s) ({total_benchmark_count} benchmarks total)",
202+
benches.len()
203+
);
204+
} else {
205+
eprintln!("Finished running {} benchmark suite(s)", benches.len());
160206
}
161-
eprintln!("Finished running {} benchmark suite(s)", benches.len());
162207

163208
if measurement_mode == MeasurementMode::Walltime {
164209
aggregate_raw_walltime_data(workspace_root)?;

crates/codspeed/src/codspeed.rs

Lines changed: 49 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
use crate::measurement;
1+
use crate::{measurement, utils};
22
use colored::Colorize;
33
use std::ffi::CString;
4+
use std::time::Instant;
45

56
pub use std::hint::black_box;
67

@@ -15,11 +16,14 @@ pub struct CodSpeed {
1516
current_benchmark: CString,
1617
group_stack: Vec<String>,
1718
is_instrumented: bool,
19+
start_time: Option<Instant>,
20+
show_details: bool,
1821
}
1922

2023
impl CodSpeed {
2124
pub fn new() -> Self {
2225
let is_instrumented = measurement::is_instrumented();
26+
let show_details = utils::show_details();
2327
if !is_instrumented {
2428
println!(
2529
"{} codspeed is enabled, but no performance measurement will be made since it's running in an unknown environment.",
@@ -32,6 +36,8 @@ impl CodSpeed {
3236
current_benchmark: CString::new("").expect("CString::new failed"),
3337
group_stack: Vec::new(),
3438
is_instrumented,
39+
start_time: None,
40+
show_details,
3541
}
3642
}
3743

@@ -47,29 +53,56 @@ impl CodSpeed {
4753
pub fn start_benchmark(&mut self, name: &str) {
4854
self.current_benchmark = CString::new(name).expect("CString::new failed");
4955
measurement::start();
56+
if self.show_details && !self.is_instrumented {
57+
self.start_time = Some(Instant::now());
58+
} else {
59+
self.start_time = None;
60+
}
5061
}
5162

5263
#[inline(always)]
5364
pub fn end_benchmark(&mut self) {
5465
measurement::stop(&self.current_benchmark);
5566
self.benchmarked
5667
.push(self.current_benchmark.to_str().unwrap().to_string());
57-
let action_str = if self.is_instrumented {
58-
"Measured"
59-
} else {
60-
"Checked"
61-
};
62-
let group_str = if self.group_stack.is_empty() {
63-
"".to_string()
68+
if self.show_details {
69+
if !self.is_instrumented {
70+
let elapsed = self
71+
.start_time
72+
.take()
73+
.map(|start| start.elapsed())
74+
.unwrap_or_default();
75+
let group_str = if self.group_stack.is_empty() {
76+
"".to_string()
77+
} else {
78+
format!(" (group: {})", self.group_stack.join("/"))
79+
};
80+
let formatted_duration = crate::utils::format_duration_nanos(elapsed.as_nanos());
81+
println!(
82+
" Checked: {}{} ({})",
83+
self.current_benchmark.to_string_lossy(),
84+
group_str,
85+
formatted_duration
86+
);
87+
}
6488
} else {
65-
format!(" (group: {})", self.group_stack.join("/"))
66-
};
67-
println!(
68-
"{}: {}{}",
69-
action_str,
70-
self.current_benchmark.to_string_lossy(),
71-
group_str
72-
);
89+
let action_str = if self.is_instrumented {
90+
"Measured"
91+
} else {
92+
"Checked"
93+
};
94+
let group_str = if self.group_stack.is_empty() {
95+
"".to_string()
96+
} else {
97+
format!(" (group: {})", self.group_stack.join("/"))
98+
};
99+
println!(
100+
"{}: {}{}",
101+
action_str,
102+
self.current_benchmark.to_string_lossy(),
103+
group_str
104+
);
105+
}
73106
}
74107
}
75108

crates/codspeed/src/utils.rs

Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,32 @@ pub fn is_perf_enabled() -> bool {
4646
std::env::var("CODSPEED_PERF_ENABLED").is_ok()
4747
}
4848

49+
pub fn show_details() -> bool {
50+
std::env::var("CODSPEED_SHOW_DETAILS").is_ok()
51+
}
52+
53+
/// Format a duration value (as f64 nanoseconds) into a human-readable string with appropriate units
54+
pub fn format_duration_nanos_f64(value: f64) -> String {
55+
if value <= 0.0 {
56+
return "<1 ns".to_string();
57+
}
58+
59+
if value < 1_000.0 {
60+
format!("{value:.0} ns")
61+
} else if value < 1_000_000.0 {
62+
format!("{:.1} us", value / 1_000.0)
63+
} else if value < 1_000_000_000.0 {
64+
format!("{:.1} ms", value / 1_000_000.0)
65+
} else {
66+
format!("{:.2} s", value / 1_000_000_000.0)
67+
}
68+
}
69+
70+
/// Format a duration value into a human-readable string with appropriate units
71+
pub fn format_duration_nanos(nanos: u128) -> String {
72+
format_duration_nanos_f64(nanos as f64)
73+
}
74+
4975
#[cfg(test)]
5076
mod tests {
5177
use super::*;
@@ -94,4 +120,65 @@ mod tests {
94120
let expected_output = "std::vec::Vec::new".to_string();
95121
assert_eq!(get_formated_function_path(input), expected_output);
96122
}
123+
124+
#[test]
125+
fn test_format_duration_nanos_handles_zero() {
126+
assert_eq!(format_duration_nanos(0), "<1 ns");
127+
}
128+
129+
#[test]
130+
fn test_format_duration_nanos_scales_to_microseconds() {
131+
assert_eq!(format_duration_nanos(1_500), "1.5 us");
132+
}
133+
134+
#[test]
135+
fn test_format_duration_nanos_scales_to_milliseconds() {
136+
assert_eq!(format_duration_nanos(2_345_000), "2.3 ms");
137+
}
138+
139+
#[test]
140+
fn test_format_duration_nanos_scales_to_seconds() {
141+
assert_eq!(format_duration_nanos(2_500_000_000), "2.50 s");
142+
}
143+
144+
#[test]
145+
fn test_format_duration_nanos_f64_handles_zero() {
146+
assert_eq!(format_duration_nanos_f64(0.0), "<1 ns");
147+
}
148+
149+
#[test]
150+
fn test_format_duration_nanos_f64_scales_to_microseconds() {
151+
assert_eq!(format_duration_nanos_f64(1_500.0), "1.5 us");
152+
}
153+
154+
#[test]
155+
fn test_format_duration_nanos_f64_scales_to_milliseconds() {
156+
assert_eq!(format_duration_nanos_f64(2_345_000.0), "2.3 ms");
157+
}
158+
159+
#[test]
160+
fn test_format_duration_nanos_f64_scales_to_seconds() {
161+
assert_eq!(format_duration_nanos_f64(2_500_000_000.0), "2.50 s");
162+
}
163+
164+
// Additional format_duration_nanos_f64 tests (moved from walltime_results.rs)
165+
#[test]
166+
fn test_format_duration_nanos_f64_formats_small_values() {
167+
assert_eq!(format_duration_nanos_f64(420.0), "420 ns");
168+
}
169+
170+
#[test]
171+
fn test_format_duration_nanos_f64_formats_microseconds() {
172+
assert_eq!(format_duration_nanos_f64(12_500.0), "12.5 us");
173+
}
174+
175+
#[test]
176+
fn test_format_duration_nanos_f64_formats_milliseconds() {
177+
assert_eq!(format_duration_nanos_f64(3_250_000.0), "3.2 ms");
178+
}
179+
180+
#[test]
181+
fn test_format_duration_nanos_f64_formats_seconds() {
182+
assert_eq!(format_duration_nanos_f64(1_750_000_000.0), "1.75 s");
183+
}
97184
}

0 commit comments

Comments
 (0)