-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathfunction_parse.rs
More file actions
280 lines (231 loc) · 8.43 KB
/
function_parse.rs
File metadata and controls
280 lines (231 loc) · 8.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
use std::time::{Duration, Instant};
use criterion::{BenchmarkId, Criterion, Throughput, criterion_group, criterion_main};
use rayon::prelude::*;
use std::hint::black_box;
use parser::{ParserError, parse};
const TEST_FILESNAMES_BASE: &[&str] = &[
"test_csv_1.csv",
"test_docx_1.docx",
"test_json_1.json",
"test_pdf_1.pdf",
"test_pptx_1.pptx",
"test_txt_1.txt",
"test_xlsx_1.xlsx",
];
const TEST_FILESNAMES_IMAGES: &[&str] = &["test_jpg_1.jpg", "test_png_1.png", "test_webp_1.webp"];
const _TEST_FILESNAMES_FULL: &[&str] = &[
"test_csv_1.csv",
"test_docx_1.docx",
"test_jpg_1.jpg",
"test_json_1.json",
"test_pdf_1.pdf",
"test_png_1.png",
"test_pptx_1.pptx",
"test_txt_1.txt",
"test_webp_1.webp",
"test_xlsx_1.xlsx",
];
fn read_test_file(filename: &str) -> Vec<u8> {
std::fs::read(
std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests/assets")
.join(filename),
)
.unwrap()
}
fn benchmark_sequential_vs_parallel(c: &mut Criterion) {
// Create a vector of file data the size of the number of CPUs
let file_data = read_test_file("test_pdf_1.pdf");
let files: Vec<&[u8]> = vec![&file_data; num_cpus::get()];
let mut group = c.benchmark_group("Sequential vs Parallel Parsing");
group.throughput(Throughput::Elements(files.len() as u64));
// Benchmark parallel parsing
group.bench_function("parallel", |b| {
b.iter(|| {
files
.par_iter()
.map(|d| parse(black_box(d)))
.collect::<Result<Vec<String>, ParserError>>()
});
});
// Benchmark sequential parsing
group.bench_function("sequential", |b| {
b.iter(|| {
files
.iter()
.map(|d| parse(black_box(d)))
.collect::<Result<Vec<String>, ParserError>>()
});
});
group.finish();
}
fn benchmark_parallel_efficiency(c: &mut Criterion) {
let file_data = read_test_file("test_pdf_1.pdf");
let cpu_count = num_cpus::get();
let mut counts = vec![
cpu_count / 4,
cpu_count / 2,
cpu_count,
cpu_count * 2,
cpu_count * 4,
];
counts.dedup();
let mut group = c.benchmark_group("Parallel Efficiency");
group.throughput(Throughput::Elements(1));
for &count in &counts {
let files: Vec<&[u8]> = vec![&file_data; count];
group.bench_function(BenchmarkId::new("files", count), |b| {
b.iter(|| {
files
.par_iter()
.map(|d| parse(black_box(d)))
.collect::<Result<Vec<String>, ParserError>>()
});
});
}
group.finish();
}
fn benchmark_per_filetype(c: &mut Criterion) {
let cpus = num_cpus::get();
// Create a group for base files
let mut base_group = c.benchmark_group("Per Filetype Parsing, Base");
base_group.throughput(Throughput::Elements(cpus as u64));
// Regular benchmarks with default sample size
for &filename in TEST_FILESNAMES_BASE {
let file = read_test_file(filename);
let files: Vec<&[u8]> = vec![&file; cpus];
base_group.bench_function(filename, |b| {
b.iter(|| {
files
.par_iter()
.map(|d| parse(black_box(d)))
.collect::<Result<Vec<String>, ParserError>>()
});
});
}
base_group.finish();
// Create a group for image files
let mut image_group = c.benchmark_group("Per Filetype Parsing, Images");
image_group.sample_size(10);
image_group.throughput(Throughput::Elements(cpus as u64));
for &filename in TEST_FILESNAMES_IMAGES {
let file = read_test_file(filename);
let files: Vec<&[u8]> = vec![&file; cpus];
image_group.bench_function(filename, |b| {
b.iter(|| {
files
.par_iter()
.map(|d| parse(black_box(d)))
.collect::<Result<Vec<String>, ParserError>>()
});
});
}
image_group.finish();
}
// Finds the threshold number of files for each type that takes less than 16ms
fn benchmark_parallel_threshold(c: &mut Criterion) {
const SAMPLE_COUNT: usize = 5;
let max_time_threshold = Duration::from_millis(16);
// Read each test file only once
for &filename in TEST_FILESNAMES_BASE {
let file_extension = filename.split('.').next_back().unwrap_or("unknown");
let group_name = format!("Parallel {} Processing", file_extension.to_uppercase());
let mut group = c.benchmark_group(&group_name);
// Cache the file data once
let file_data = read_test_file(filename);
// Function to measure processing time for a given count
let measure_time = |count: usize| -> Duration {
// Pre-allocate the vector of references outside timing
let files: Vec<&[u8]> = vec![&file_data; count];
// Perform warm-up runs to stabilize cache and runtime behavior
for _ in 0..3 {
black_box(
files
.par_iter()
.map(|d| parse(black_box(d)))
.collect::<Result<Vec<String>, ParserError>>()
.unwrap(),
);
}
// Take multiple measurements and use median for robustness
let mut durations = Vec::with_capacity(SAMPLE_COUNT);
for _ in 0..SAMPLE_COUNT {
// Clear caches between runs to ensure consistent starting state
black_box(());
let start = Instant::now();
black_box(
files
.par_iter()
.map(|d| parse(black_box(d)))
.collect::<Result<Vec<String>, ParserError>>()
.unwrap(),
);
durations.push(start.elapsed());
}
// Sort and take median duration (more robust against outliers)
durations.sort();
durations[SAMPLE_COUNT / 2]
};
// Finding and benchmarking the threshold count
let mut low = 1;
let mut high = 1;
// Phase 1: Find upper bound using exponential search
while measure_time(high) <= max_time_threshold {
low = high;
high *= 2;
}
// Phase 2: Binary search between bounds
while high - low > 1 {
let mid = low + (high - low) / 2;
if measure_time(mid) <= max_time_threshold {
low = mid;
} else {
high = mid;
}
}
// The threshold count is now in 'low'
let threshold_count = low;
// Permille values for percentages: 99.0%, 99.9%, 100.0%, 100.1%, 101.0%
let permille_values: [usize; 5] = [990, 999, 1000, 1001, 1010];
// Generate test points based on percentages of the threshold using integer math
let mut test_points: Vec<usize> = permille_values
.iter()
.map(|&p| {
let product = threshold_count.saturating_mul(p);
product.div_ceil(1000).max(1)
})
.collect();
test_points.dedup();
// Benchmark each test point with proper throughput measurement
for &count in &test_points {
// Set throughput for proper operations/second measurements
group.throughput(Throughput::Elements(count as u64));
// Benchmark with the current count
let files: Vec<&[u8]> = vec![&file_data; count];
group.bench_with_input(BenchmarkId::new("files", count), &count, |b, &_| {
b.iter(|| {
files
.par_iter()
.map(|d| parse(black_box(d)))
.collect::<Result<Vec<String>, ParserError>>()
});
});
}
// Add custom threshold marker to output
println!(
"Threshold for {}: {} files within {}ms",
file_extension,
threshold_count,
max_time_threshold.as_millis()
);
group.finish();
}
}
criterion_group!(
benches,
benchmark_sequential_vs_parallel,
benchmark_parallel_efficiency,
benchmark_per_filetype,
benchmark_parallel_threshold
);
criterion_main!(benches);