diff --git a/Cargo.lock b/Cargo.lock index 5b86b2f718..f2039e9497 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2656,6 +2656,7 @@ dependencies = [ "ark-serialize", "ark-std", "clap 4.4.18", + "criterion", "ctor", "hex", "kimchi", diff --git a/saffron/Cargo.toml b/saffron/Cargo.toml index 4b0914caea..76c93ce43c 100644 --- a/saffron/Cargo.toml +++ b/saffron/Cargo.toml @@ -46,6 +46,7 @@ tracing-subscriber = { version = "0.3", features = [ [dev-dependencies] ark-std.workspace = true +criterion = { workspace = true, features = ["html_reports"] } ctor = "0.2" proptest.workspace = true once_cell.workspace = true @@ -53,3 +54,7 @@ once_cell.workspace = true [[bin]] name = "saffron-og-flow" path = "og-flow/main.rs" + +[[bench]] +name = "read_proof_bench" +harness = false diff --git a/saffron/benches/read_proof_bench.rs b/saffron/benches/read_proof_bench.rs new file mode 100644 index 0000000000..950a4fb67e --- /dev/null +++ b/saffron/benches/read_proof_bench.rs @@ -0,0 +1,117 @@ +//! Run this bench using `cargo criterion -p saffron --bench read_proof_bench` + +use ark_ff::{One, UniformRand, Zero}; +use ark_poly::{univariate::DensePolynomial, Evaluations}; +use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion}; +use kimchi::{circuits::domains::EvaluationDomains, groupmap::GroupMap}; +use mina_curves::pasta::{Fp, Vesta}; +use once_cell::sync::Lazy; +use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, SRS as _}; +use rand::rngs::OsRng; +use saffron::{ + env, + read_proof::{prove, verify}, + ScalarField, SRS_SIZE, +}; + +// Set up static resources to avoid re-computation during benchmarks +static SRS: Lazy> = Lazy::new(|| { + if let Ok(srs) = std::env::var("SRS_FILEPATH") { + env::get_srs_from_cache(srs) + } else { + SRS::create(SRS_SIZE) + } +}); + +static DOMAIN: Lazy> = + Lazy::new(|| EvaluationDomains::::create(SRS_SIZE).unwrap()); + +static GROUP_MAP: Lazy<::Map> = + Lazy::new(::Map::setup); + +fn generate_test_data( + size: usize, +) -> (Vec, Vec, Vec, Vesta) { + let mut rng = o1_utils::tests::make_test_rng(None); + + // Generate data with specified size + let data: Vec = (0..size).map(|_| Fp::rand(&mut rng)).collect(); + + // Create data commitment + let data_poly: DensePolynomial = + Evaluations::from_vec_and_domain(data.clone(), DOMAIN.d1).interpolate(); + let data_comm: Vesta = SRS.commit_non_hiding(&data_poly, 1).chunks[0]; + + // Generate query (about 10% of positions will be queried) + let query: Vec = (0..size) + .map(|_| { + if rand::random::() < 0.1 { + Fp::one() + } else { + Fp::zero() + } + }) + .collect(); + + // Compute answer as data * query + let answer: Vec = data.iter().zip(query.iter()).map(|(d, q)| *d * q).collect(); + + (data, query, answer, data_comm) +} + +fn bench_read_proof_prove(c: &mut Criterion) { + let (data, query, answer, data_comm) = generate_test_data(SRS_SIZE); + + let description = format!("prove size {}", SRS_SIZE); + c.bench_function(description.as_str(), |b| { + b.iter_batched( + || OsRng, + |mut rng| { + black_box(prove( + *DOMAIN, + &SRS, + &GROUP_MAP, + &mut rng, + data.as_slice(), + query.as_slice(), + answer.as_slice(), + &data_comm, + )) + }, + BatchSize::NumIterations(10), + ) + }); +} + +fn bench_read_proof_verify(c: &mut Criterion) { + let (data, query, answer, data_comm) = generate_test_data(SRS_SIZE); + + // Create proof first + let mut rng = OsRng; + let proof = prove( + *DOMAIN, + &SRS, + &GROUP_MAP, + &mut rng, + data.as_slice(), + query.as_slice(), + answer.as_slice(), + &data_comm, + ); + + let description = format!("verify size {}", SRS_SIZE); + c.bench_function(description.as_str(), |b| { + b.iter_batched( + || OsRng, + |mut rng| { + black_box(verify( + *DOMAIN, &SRS, &GROUP_MAP, &mut rng, &data_comm, &proof, + )) + }, + BatchSize::SmallInput, + ) + }); +} + +criterion_group!(benches, bench_read_proof_prove, bench_read_proof_verify); +criterion_main!(benches);