Skip to content

Commit b5229d8

Browse files
committed
chore: clean up
1 parent c29cf6d commit b5229d8

File tree

2 files changed

+18
-15
lines changed

2 files changed

+18
-15
lines changed

crates/derivation-pipeline/benches/pipeline.rs

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,10 @@ use scroll_derivation_pipeline::DerivationPipeline;
1919
use std::{collections::HashMap, future::Future, path::PathBuf, pin::Pin, sync::Arc};
2020
use tokio::runtime::{Handle, Runtime};
2121

22+
const BATCHES_START_INDEX: u64 = 414261;
23+
const BATCHES_STOP_INDEX: u64 = 414513;
24+
25+
/// Set up a mock provider instance.
2226
fn setup_mock_provider(
2327
db: Arc<Database>,
2428
) -> Pin<Box<dyn Future<Output = MockL1Provider<Arc<Database>>> + Send>> {
@@ -40,6 +44,7 @@ fn setup_mock_provider(
4044
})
4145
}
4246

47+
/// Set up a full provider instance
4348
fn setup_full_provider(
4449
db: Arc<Database>,
4550
) -> Pin<Box<dyn Future<Output = FullL1Provider<Arc<Database>, S3BlobProvider>> + Send>> {
@@ -52,8 +57,9 @@ fn setup_full_provider(
5257
})
5358
}
5459

60+
/// Returns a pipeline with a provider initiated from the factory function.
5561
async fn setup_pipeline<P: L1Provider + Clone + Send + Sync + 'static>(
56-
setup: Box<dyn Fn(Arc<Database>) -> Pin<Box<dyn Future<Output = P> + Send>> + Send>,
62+
factory: Box<dyn Fn(Arc<Database>) -> Pin<Box<dyn Future<Output = P> + Send>> + Send>,
5763
) -> DerivationPipeline<P> {
5864
// load batch data in the db.
5965
let db = Arc::new(setup_test_db().await);
@@ -63,7 +69,7 @@ async fn setup_pipeline<P: L1Provider + Clone + Send + Sync + 'static>(
6369
.unwrap();
6470

6571
let tx = db.tx_mut().await.unwrap();
66-
for (index, hash) in (414261..=414513).zip(blob_hashes.into_iter()) {
72+
for (index, hash) in (BATCHES_START_INDEX..=BATCHES_STOP_INDEX).zip(blob_hashes.into_iter()) {
6773
let raw_calldata =
6874
std::fs::read(format!("./benches/testdata/calldata/calldata_batch_{index}.bin"))
6975
.unwrap();
@@ -95,14 +101,14 @@ async fn setup_pipeline<P: L1Provider + Clone + Send + Sync + 'static>(
95101
tx.commit().await.unwrap();
96102

97103
// construct the pipeline.
98-
let l1_provider = setup(db.clone()).await;
104+
let l1_provider = factory(db.clone()).await;
99105
DerivationPipeline::new(l1_provider, db, u64::MAX)
100106
}
101107

108+
/// Benchmark the derivation pipeline with blobs fetched from file. This does not bench the network
109+
/// call to the AWS S3 blob storage.
102110
fn benchmark_pipeline_derivation_in_file_blobs(c: &mut Criterion) {
103111
let rt = Runtime::new().unwrap();
104-
let start_index = 414261;
105-
let end_index = 414513;
106112

107113
c.bench_function("pipeline_derive_in_file_blobs", |b| {
108114
b.to_async(&rt).iter_batched(
@@ -113,7 +119,7 @@ fn benchmark_pipeline_derivation_in_file_blobs(c: &mut Criterion) {
113119
let mut pipeline = setup_pipeline(Box::new(setup_mock_provider)).await;
114120

115121
// commit 253 batches.
116-
for index in start_index..=end_index {
122+
for index in BATCHES_START_INDEX..=BATCHES_STOP_INDEX {
117123
let batch_info = BatchInfo { index, hash: Default::default() };
118124
pipeline.push_batch(batch_info, 0);
119125
}
@@ -124,7 +130,7 @@ fn benchmark_pipeline_derivation_in_file_blobs(c: &mut Criterion) {
124130
},
125131
|mut pipeline| async move {
126132
// measured work.
127-
for _ in start_index..=end_index {
133+
for _ in BATCHES_START_INDEX..=BATCHES_STOP_INDEX {
128134
let _ = pipeline.next().await;
129135
}
130136
},
@@ -133,12 +139,9 @@ fn benchmark_pipeline_derivation_in_file_blobs(c: &mut Criterion) {
133139
});
134140
}
135141

142+
/// Benchmark the derivation pipeline with blobs fetched from S3.
136143
fn benchmark_pipeline_derivation_s3_blobs(c: &mut Criterion) {
137144
let rt = Runtime::new().unwrap();
138-
// Bench 15 batches.
139-
let start_index = 414261;
140-
let end_index = 414276;
141-
init_test_tracing();
142145
let mut group = c.benchmark_group("pipeline_derive_s3_blobs");
143146
group.sample_size(10);
144147
group.measurement_time(std::time::Duration::from_secs(20));
@@ -152,7 +155,7 @@ fn benchmark_pipeline_derivation_s3_blobs(c: &mut Criterion) {
152155
let mut pipeline = setup_pipeline(Box::new(setup_full_provider)).await;
153156

154157
// commit 15 batches.
155-
for index in start_index..=end_index {
158+
for index in BATCHES_START_INDEX..=BATCHES_START_INDEX + 15 {
156159
let batch_info = BatchInfo { index, hash: Default::default() };
157160
pipeline.push_batch(batch_info, 0);
158161
}
@@ -163,7 +166,7 @@ fn benchmark_pipeline_derivation_s3_blobs(c: &mut Criterion) {
163166
},
164167
|mut pipeline| async move {
165168
// measured work.
166-
for _ in start_index..=end_index {
169+
for _ in BATCHES_START_INDEX..=BATCHES_START_INDEX + 15 {
167170
let _ = pipeline.next().await;
168171
}
169172
},

crates/providers/src/test_utils.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ impl<P: L1MessageProvider + Sync> BlobProvider for MockL1Provider<P> {
2424
hash: B256,
2525
) -> Result<Option<Arc<Blob>>, L1ProviderError> {
2626
let blob = self.blobs.get(&hash).map(|path| {
27-
let arr = std::fs::read(path)
27+
let blob = std::fs::read(path)
2828
.expect("failed to read blob file")
2929
.as_slice()
3030
.try_into()
3131
.expect("failed to convert bytes to blob");
32-
Arc::new(arr)
32+
Arc::new(blob)
3333
});
3434
Ok(blob)
3535
}

0 commit comments

Comments
 (0)