@@ -10,6 +10,7 @@ use codspeed::walltime_results::WalltimeResults;
1010use std:: {
1111 io:: { self , Write } ,
1212 path:: { Path , PathBuf } ,
13+ process:: Stdio ,
1314} ;
1415
1516#[ cfg( unix) ]
@@ -99,6 +100,7 @@ pub fn run_benches(
99100 package_filters : PackageFilters ,
100101 bench_target_filters : BenchTargetFilters ,
101102 measurement_mode : MeasurementMode ,
103+ show_details : bool ,
102104) -> Result < ( ) > {
103105 let codspeed_target_dir = get_codspeed_target_dir ( metadata, measurement_mode) ;
104106 let workspace_root = metadata. workspace_root . as_std_path ( ) ;
@@ -113,16 +115,29 @@ pub fn run_benches(
113115
114116 eprintln ! ( "Collected {} benchmark suite(s) to run" , benches. len( ) ) ;
115117
118+ let mut total_benchmark_count = 0 ;
119+
116120 for bench in benches. iter ( ) {
117121 let bench_target_name = & bench. bench_target_name ;
118122 // workspace_root is needed since file! returns the path relatively to the workspace root
119123 // while CARGO_MANIFEST_DIR returns the path to the sub package
120124 let workspace_root = metadata. workspace_root . clone ( ) ;
121- eprintln ! ( "Running {} {}" , & bench. package_name, bench_target_name) ;
125+ eprintln ! (
126+ "Running {} {} ({})" ,
127+ & bench. package_name,
128+ bench_target_name,
129+ bench. bench_path. display( )
130+ ) ;
122131 let mut command = std:: process:: Command :: new ( & bench. bench_path ) ;
123132 command
124133 . env ( "CODSPEED_CARGO_WORKSPACE_ROOT" , workspace_root)
125- . current_dir ( & bench. working_directory ) ;
134+ . current_dir ( & bench. working_directory )
135+ . stdout ( Stdio :: piped ( ) )
136+ . stderr ( Stdio :: inherit ( ) ) ;
137+
138+ if show_details {
139+ command. env ( "CODSPEED_SHOW_DETAILS" , "1" ) ;
140+ }
126141
127142 if measurement_mode == MeasurementMode :: Walltime {
128143 command. arg ( "--bench" ) ; // Walltime targets need this additional argument (inherited from running them with `cargo bench`)
@@ -132,33 +147,63 @@ pub fn run_benches(
132147 command. arg ( bench_name_filter) ;
133148 }
134149
135- command
136- . status ( )
137- . map_err ( |e| anyhow ! ( "failed to execute the benchmark process: {}" , e) )
138- . and_then ( |status| {
139- if status. success ( ) {
140- Ok ( ( ) )
141- } else {
142- #[ cfg( unix) ]
143- {
144- let code = status
145- . code ( )
146- . or ( status. signal ( ) . map ( |s| 128 + s) ) // 128+N indicates that a command was interrupted by signal N (see: https://tldp.org/LDP/abs/html/exitcodes.html)
147- . unwrap_or ( 1 ) ;
148-
149- eprintln ! ( "failed to execute the benchmark process, exit code: {code}" ) ;
150-
151- std:: process:: exit ( code) ;
152- }
153- #[ cfg( not( unix) ) ]
154- {
155- bail ! ( "failed to execute the benchmark process: {}" , status)
156- }
157- }
158- } ) ?;
159- eprintln ! ( "Done running {bench_target_name}" ) ;
150+ let output = command
151+ . output ( )
152+ . map_err ( |e| anyhow ! ( "failed to execute the benchmark process: {}" , e) ) ?;
153+
154+ // Count benchmarks by looking for "Measured:" or "Checked:" lines
155+ let stdout = String :: from_utf8_lossy ( & output. stdout ) ;
156+ let benchmark_count = stdout
157+ . lines ( )
158+ . filter ( |line| {
159+ line. trim_start ( ) . starts_with ( "Measured:" )
160+ || line. trim_start ( ) . starts_with ( "Checked:" )
161+ || ( show_details && line. trim_start ( ) . starts_with ( " Checked:" ) )
162+ || ( show_details && line. trim_start ( ) . starts_with ( " Measured:" ) )
163+ } )
164+ . count ( ) ;
165+ total_benchmark_count += benchmark_count;
166+
167+ // Print captured output
168+ print ! ( "{stdout}" ) ;
169+ io:: stdout ( ) . flush ( ) . unwrap ( ) ;
170+
171+ if !output. status . success ( ) {
172+ #[ cfg( unix) ]
173+ {
174+ let code = output
175+ . status
176+ . code ( )
177+ . or ( output. status . signal ( ) . map ( |s| 128 + s) ) // 128+N indicates that a command was interrupted by signal N (see: https://tldp.org/LDP/abs/html/exitcodes.html)
178+ . unwrap_or ( 1 ) ;
179+
180+ eprintln ! ( "failed to execute the benchmark process, exit code: {code}" ) ;
181+
182+ std:: process:: exit ( code) ;
183+ }
184+ #[ cfg( not( unix) ) ]
185+ {
186+ bail ! ( "failed to execute the benchmark process: {}" , output. status)
187+ }
188+ }
189+
190+ if benchmark_count == 0 && !stdout. is_empty ( ) {
191+ eprintln ! ( "Warning: No benchmarks detected in output for {bench_target_name}" ) ;
192+ }
193+ if show_details {
194+ eprintln ! ( "Done running {bench_target_name} ({benchmark_count} benchmarks)" ) ;
195+ } else {
196+ eprintln ! ( "Done running {bench_target_name}" ) ;
197+ }
198+ }
199+ if show_details {
200+ eprintln ! (
201+ "Finished running {} benchmark suite(s) ({total_benchmark_count} benchmarks total)" ,
202+ benches. len( )
203+ ) ;
204+ } else {
205+ eprintln ! ( "Finished running {} benchmark suite(s)" , benches. len( ) ) ;
160206 }
161- eprintln ! ( "Finished running {} benchmark suite(s)" , benches. len( ) ) ;
162207
163208 if measurement_mode == MeasurementMode :: Walltime {
164209 aggregate_raw_walltime_data ( workspace_root) ?;
0 commit comments