Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 15 additions & 9 deletions crates/stwo/src/prover/pcs/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ impl<'a, B: BackendForChannel<MC>, MC: MerkleChannel> CommitmentSchemeProver<'a,
pub fn build_weights_hash_map(
&self,
sampled_points: &TreeVec<ColumnVec<Vec<CirclePoint<SecureField>>>>,
max_log_size: u32,
) -> WeightsHashMap<B>
where
Col<B, SecureField>: Send + Sync,
Expand All @@ -120,16 +121,20 @@ impl<'a, B: BackendForChannel<MC>, MC: MerkleChannel> CommitmentSchemeProver<'a,
};

let log_size = poly.evals.domain.log_size();

// For each sample point, compute the weights needed to evaluate the polynomial at
// the folded sample point.
// TODO(Leo): the computation `point.repeated_double(max_log_size - log_size)` is
// likely repeated a bunch of times in a typical flat air. Consider moving it
// outside the loop.
#[cfg(not(feature = "parallel"))]
points
.iter()
.for_each(|&point| compute_weights((log_size, point)));
points.iter().for_each(|&point| {
compute_weights((log_size, point.repeated_double(max_log_size - log_size)))
});

#[cfg(feature = "parallel")]
points
.par_iter()
.for_each(|&point| compute_weights((log_size, point)));
points.par_iter().for_each(|&point| {
compute_weights((log_size, point.repeated_double(max_log_size - log_size)))
});
});

weights_dashmap
Expand All @@ -147,12 +152,13 @@ impl<'a, B: BackendForChannel<MC>, MC: MerkleChannel> CommitmentSchemeProver<'a,
class = "EvaluateOutOfDomain"
)
.entered();

let max_log_size = self.trees.last().unwrap().commitment.layers.len() as u32 - 1;
let weights_hash_map = if self.store_polynomials_coefficients {
None
} else {
Some(self.build_weights_hash_map(&sampled_points))
Some(self.build_weights_hash_map(&sampled_points, max_log_size))
};
let max_log_size = self.trees.last().unwrap().commitment.layers.len() as u32 - 1;
let samples: TreeVec<Vec<Vec<PointSample>>> = self
.polynomials()
.zip_cols(&sampled_points)
Expand Down
18 changes: 13 additions & 5 deletions crates/stwo/src/prover/pcs/quotient_ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,8 +218,10 @@ mod tests {
polys
}

fn prove_and_verify_pcs<B: BackendForChannel<Blake2sMerkleChannel>>(
) -> Result<(), VerificationError> {
fn prove_and_verify_pcs<
B: BackendForChannel<Blake2sMerkleChannel>,
const STORE_COEFFS: bool,
>() -> Result<(), VerificationError> {
const N_COLS: usize = 10;
const LIFTING_LOG_SIZE: u32 = 8;

Expand All @@ -231,7 +233,9 @@ mod tests {
);
let mut commitment_scheme =
CommitmentSchemeProver::<B, Blake2sMerkleChannel>::new(config, &twiddles);
commitment_scheme.set_store_polynomials_coefficients();
if STORE_COEFFS {
commitment_scheme.set_store_polynomials_coefficients();
}
let polys = prepare_polys::<B, N_COLS, LIFTING_LOG_SIZE>();
let sizes = polys.iter().map(|poly| poly.log_size()).collect_vec();

Expand Down Expand Up @@ -261,10 +265,14 @@ mod tests {

#[test]
fn test_pcs_prove_and_verify_cpu() {
assert!(prove_and_verify_pcs::<CpuBackend>().is_ok());
assert!(prove_and_verify_pcs::<CpuBackend, true>().is_ok());
}
#[test]
fn test_pcs_prove_and_verify_simd() {
assert!(prove_and_verify_pcs::<SimdBackend>().is_ok());
assert!(prove_and_verify_pcs::<SimdBackend, true>().is_ok());
}
#[test]
fn test_pcs_prove_and_verify_simd_with_barycentric() {
assert!(prove_and_verify_pcs::<SimdBackend, false>().is_ok());
}
}