@@ -29,7 +29,7 @@ fn generate_duplicate_heavy_data(num_groups: usize, duplicates_per_group: usize)
2929
3030/// Benchmark 1: Heavy duplicates - the main optimization target
3131/// Many consecutive duplicate lines that stress the line comparison optimization
32- #[ divan:: bench( args = [ 10_000_000 ] ) ]
32+ #[ divan:: bench( args = [ 10_000 ] ) ]
3333fn uniq_heavy_duplicates ( bencher : Bencher , num_lines : usize ) {
3434 // Create 1000 groups with ~10,000 duplicates each
3535 // This maximizes the benefit of PR #8703's optimization
@@ -46,7 +46,7 @@ fn uniq_heavy_duplicates(bencher: Bencher, num_lines: usize) {
4646
4747/// Benchmark 2: Mixed duplicates with counting
4848/// Tests the -c flag with a mix of duplicate groups
49- #[ divan:: bench( args = [ 5_000_000 ] ) ]
49+ #[ divan:: bench( args = [ 10_000 ] ) ]
5050fn uniq_with_count ( bencher : Bencher , num_lines : usize ) {
5151 // Create more groups with fewer duplicates for varied counting
5252 let num_groups = num_lines / 100 ;
@@ -61,7 +61,7 @@ fn uniq_with_count(bencher: Bencher, num_lines: usize) {
6161
6262/// Benchmark 3: Case-insensitive comparison with duplicates
6363/// Tests the -i flag which requires case folding during comparison
64- #[ divan:: bench( args = [ 2_000_000 ] ) ]
64+ #[ divan:: bench( args = [ 10_000 ] ) ]
6565fn uniq_case_insensitive ( bencher : Bencher , num_lines : usize ) {
6666 let mut data = Vec :: new ( ) ;
6767 let words = [
0 commit comments