diff --git a/Cargo.lock b/Cargo.lock index 6658d5d1843..c443639dafe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -2979,6 +2979,7 @@ dependencies = [ "ark-poly", "ark-serialize", "arkworks", + "base64 0.21.7", "getrandom 0.2.15", "kimchi", "libc", diff --git a/plonk-napi/Cargo.toml b/plonk-napi/Cargo.toml index 74ee5691f19..c06aac89909 100644 --- a/plonk-napi/Cargo.toml +++ b/plonk-napi/Cargo.toml @@ -26,9 +26,10 @@ arkworks.workspace = true mina-curves = { path = "../curves" } mina-poseidon = { path = "../poseidon" } o1-utils = { path = "../utils" } -poly-commitment = { path = "../poly-commitment" } plonk_wasm.workspace = true +poly-commitment = { path = "../poly-commitment" } +base64.workspace = true getrandom.workspace = true kimchi.workspace = true libc.workspace = true diff --git a/plonk-napi/src/lib.rs b/plonk-napi/src/lib.rs index 8d520f3db46..e46f862eab1 100644 --- a/plonk-napi/src/lib.rs +++ b/plonk-napi/src/lib.rs @@ -1,6 +1,7 @@ mod build_info; mod circuit; mod gate_vector; +mod oracles; mod pasta_fp_plonk_index; mod pasta_fq_plonk_index; mod plonk_verifier_index; @@ -30,32 +31,71 @@ pub use gate_vector::{ NapiFpGateVector as WasmFpGateVector, NapiFqGate as WasmFqGate, NapiFqGateVector as WasmFqGateVector, }; +pub use oracles::{ + fp::{fp_oracles_create, fp_oracles_deep_copy, fp_oracles_dummy}, + fq::{fq_oracles_create, fq_oracles_deep_copy, fq_oracles_dummy}, +}; pub use pasta_fp_plonk_index::{ - prover_index_fp_from_bytes, prover_index_fp_to_bytes, WasmPastaFpPlonkIndex, + caml_pasta_fp_plonk_index_create, caml_pasta_fp_plonk_index_decode, + caml_pasta_fp_plonk_index_domain_d1_size, caml_pasta_fp_plonk_index_domain_d4_size, + caml_pasta_fp_plonk_index_domain_d8_size, caml_pasta_fp_plonk_index_encode, + caml_pasta_fp_plonk_index_max_degree, caml_pasta_fp_plonk_index_public_inputs, + caml_pasta_fp_plonk_index_read, caml_pasta_fp_plonk_index_write, prover_index_fp_deserialize, + prover_index_fp_serialize, WasmPastaFpPlonkIndex, }; pub use pasta_fq_plonk_index::{ - prover_index_fq_from_bytes, prover_index_fq_to_bytes, WasmPastaFqPlonkIndex, + caml_pasta_fq_plonk_index_create, caml_pasta_fq_plonk_index_decode, + caml_pasta_fq_plonk_index_domain_d1_size, caml_pasta_fq_plonk_index_domain_d4_size, + caml_pasta_fq_plonk_index_domain_d8_size, caml_pasta_fq_plonk_index_encode, + caml_pasta_fq_plonk_index_max_degree, caml_pasta_fq_plonk_index_public_inputs, + caml_pasta_fq_plonk_index_read, caml_pasta_fq_plonk_index_write, prover_index_fq_deserialize, + prover_index_fq_serialize, WasmPastaFqPlonkIndex, }; pub use plonk_verifier_index::{ - caml_pasta_fp_plonk_verifier_index_shifts, caml_pasta_fq_plonk_verifier_index_shifts, + fp::{ + caml_pasta_fp_plonk_verifier_index_create, caml_pasta_fp_plonk_verifier_index_deep_copy, + caml_pasta_fp_plonk_verifier_index_deserialize, caml_pasta_fp_plonk_verifier_index_dummy, + caml_pasta_fp_plonk_verifier_index_read, caml_pasta_fp_plonk_verifier_index_serialize, + caml_pasta_fp_plonk_verifier_index_shifts, caml_pasta_fp_plonk_verifier_index_write, + NapiFpLookupVerifierIndex, NapiFpPlonkVerifierIndex, + }, + fq::{ + caml_pasta_fq_plonk_verifier_index_create, caml_pasta_fq_plonk_verifier_index_deep_copy, + caml_pasta_fq_plonk_verifier_index_deserialize, caml_pasta_fq_plonk_verifier_index_dummy, + caml_pasta_fq_plonk_verifier_index_read, caml_pasta_fq_plonk_verifier_index_serialize, + caml_pasta_fq_plonk_verifier_index_shifts, caml_pasta_fq_plonk_verifier_index_write, + NapiFqLookupVerifierIndex, NapiFqPlonkVerifierIndex, + }, }; + pub use poly_comm::{ pallas::NapiFqPolyComm as WasmFqPolyComm, vesta::NapiFpPolyComm as WasmFpPolyComm, }; pub use poseidon::{caml_pasta_fp_poseidon_block_cipher, caml_pasta_fq_poseidon_block_cipher}; -pub use proof::caml_pasta_fp_plonk_proof_create; +pub use proof::{ + fp::{ + caml_pasta_fp_plonk_proof_batch_verify, caml_pasta_fp_plonk_proof_create, + caml_pasta_fp_plonk_proof_deep_copy, caml_pasta_fp_plonk_proof_dummy, + caml_pasta_fp_plonk_proof_verify, NapiFpLookupCommitments, NapiFpOpeningProof, + NapiFpProofEvaluations, NapiFpProverCommitments, NapiFpProverProof, + }, + fq::{ + caml_pasta_fq_plonk_proof_batch_verify, caml_pasta_fq_plonk_proof_create, + caml_pasta_fq_plonk_proof_deep_copy, caml_pasta_fq_plonk_proof_dummy, + caml_pasta_fq_plonk_proof_verify, NapiFqLookupCommitments, NapiFqOpeningProof, + NapiFqProofEvaluations, NapiFqProverCommitments, NapiFqProverProof, + }, +}; pub use srs::{ caml_fp_srs_from_bytes, caml_fp_srs_from_bytes_external, caml_fp_srs_to_bytes, - caml_fq_srs_from_bytes, caml_fq_srs_from_bytes_external, caml_fq_srs_to_bytes, - fp::NapiFpSrs as WasmFpSrs, fq::NapiFqSrs as WasmFqSrs, *, + caml_fp_srs_to_bytes_external, caml_fq_srs_from_bytes, caml_fq_srs_from_bytes_external, + caml_fq_srs_to_bytes, caml_fq_srs_to_bytes_external, fp::NapiFpSrs as WasmFpSrs, + fq::NapiFqSrs as WasmFqSrs, *, }; pub use tables::{JsLookupTableFp, JsLookupTableFq, JsRuntimeTableCfgFp, JsRuntimeTableCfgFq}; -pub use vector::{ - fp::NapiVecVecFp as WasmVecVecFp, fq::NapiVecVecFq as WasmVecVecFq, NapiFlatVector, -}; +pub use vector::{fp::WasmVecVecFp, fq::WasmVecVecFq, NapiFlatVector}; pub use wrappers::{ field::{NapiPastaFp as WasmPastaFp, NapiPastaFq as WasmPastaFq}, group::{NapiGPallas as WasmGPallas, NapiGVesta as WasmGVesta}, + lookups::{NapiFpRuntimeTable, NapiFqRuntimeTable}, }; - -pub use plonk_verifier_index::{fp::*, fq::*}; diff --git a/plonk-napi/src/plonk_verifier_index/fp.rs b/plonk-napi/src/old_plonk_verifier_index/fp.rs similarity index 76% rename from plonk-napi/src/plonk_verifier_index/fp.rs rename to plonk-napi/src/old_plonk_verifier_index/fp.rs index 6832d47e28b..356892558ab 100644 --- a/plonk-napi/src/plonk_verifier_index/fp.rs +++ b/plonk-napi/src/old_plonk_verifier_index/fp.rs @@ -2,6 +2,7 @@ use crate::{ poly_comm::vesta::NapiFpPolyComm, srs::fp::NapiFpSrs, wrappers::{field::NapiPastaFp, lookups::NapiLookupInfo}, + WasmPastaFpPlonkIndex, }; use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; use kimchi::{ @@ -16,15 +17,12 @@ use kimchi::{ }, }, linearization::expr_linearization, - verifier_index::{LookupVerifierIndex, VerifierIndex as DlogVerifierIndex}, + verifier_index::{LookupVerifierIndex, VerifierIndex}, }; use mina_curves::pasta::{Fp, Pallas as GAffineOther, Vesta as GAffine}; -use napi::bindgen_prelude::{Error, Status}; +use napi::bindgen_prelude::{Error, External, Status}; use napi_derive::napi; -use poly_commitment::{ - commitment::PolyComm, - ipa::{OpeningProof, SRS}, -}; +use poly_commitment::{commitment::PolyComm, ipa::OpeningProof, SRS}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -35,6 +33,22 @@ pub struct NapiFpDomain { pub group_gen: NapiPastaFp, } +impl From for Domain { + fn from(domain: NapiFpDomain) -> Self { + let size = 1 << domain.log_size_of_group; + Domain::::new(size).expect("Failed to create evaluation domain") + } +} + +impl From<&Domain> for NapiFpDomain { + fn from(domain: &Domain) -> Self { + Self { + log_size_of_group: domain.log_size_of_group as i32, + group_gen: domain.group_gen.into(), + } + } +} + #[napi(object, js_name = "WasmFpShifts")] #[derive(Clone, Debug, Serialize, Deserialize, Default)] pub struct NapiFpShifts { @@ -47,6 +61,20 @@ pub struct NapiFpShifts { pub s6: NapiPastaFp, } +impl From<&[Fp; 7]> for NapiFpShifts { + fn from(shifts: &[Fp; 7]) -> Self { + Self { + s0: shifts[0].into(), + s1: shifts[1].into(), + s2: shifts[2].into(), + s3: shifts[3].into(), + s4: shifts[4].into(), + s5: shifts[5].into(), + s6: shifts[6].into(), + } + } +} + #[napi(object, js_name = "WasmFpLookupSelectors")] #[derive(Clone, Debug, Serialize, Deserialize, Default)] pub struct NapiFpLookupSelectors { @@ -182,6 +210,27 @@ pub struct NapiFpPlonkVerificationEvals { pub rot_comm: Option, } +impl From<&VerifierIndex>> for NapiFpPlonkVerificationEvals { + fn from(index: &VerifierIndex>) -> Self { + Self { + sigma_comm: index.sigma_comm.iter().map(Into::into).collect(), + coefficients_comm: index.coefficients_comm.iter().map(Into::into).collect(), + generic_comm: index.generic_comm.clone().into(), + psm_comm: index.psm_comm.clone().into(), + complete_add_comm: index.complete_add_comm.clone().into(), + mul_comm: index.mul_comm.clone().into(), + emul_comm: index.emul_comm.clone().into(), + endomul_scalar_comm: index.endomul_scalar_comm.clone().into(), + xor_comm: index.xor_comm.clone().map(Into::into), + range_check0_comm: index.range_check0_comm.clone().map(Into::into), + range_check1_comm: index.range_check1_comm.clone().map(Into::into), + foreign_field_add_comm: index.foreign_field_add_comm.clone().map(Into::into), + foreign_field_mul_comm: index.foreign_field_mul_comm.clone().map(Into::into), + rot_comm: index.rot_comm.clone().map(Into::into), + } + } +} + #[napi(object, js_name = "WasmFpPlonkVerifierIndex")] #[derive(Clone, Debug, Default)] pub struct NapiFpPlonkVerifierIndex { @@ -196,6 +245,28 @@ pub struct NapiFpPlonkVerifierIndex { pub zk_rows: i32, } +#[napi(js_name = "caml_pasta_fp_plonk_verifier_index_create")] +pub fn caml_pasta_fp_plonk_verifier_index_create( + index: &External, +) -> NapiFpPlonkVerifierIndex { + index + .0 + .srs + .get_lagrange_basis(index.0.as_ref().cs.domain.d1); + let verifier_index = index.0.as_ref().verifier_index(); + NapiFpPlonkVerifierIndex::from(&verifier_index) +} + +#[napi(js_name = "caml_pasta_fp_plonk_verifier_index_read")] +pub fn caml_pasta_fp_plonk_verifier_index_read( + offset: Option, + srs: &External, + path: String, +) -> NapiFpPlonkVerifierIndex { + let vi = read_raw(offset, srs, path)?; + Ok(to_wasm(srs, vi.into())) +} + #[napi(js_name = "caml_pasta_fp_plonk_verifier_index_shifts")] pub fn caml_pasta_fp_plonk_verifier_index_shifts( log2_size: i32, @@ -223,7 +294,7 @@ pub fn caml_pasta_fp_plonk_verifier_index_shifts( }) } -impl From for DlogVerifierIndex> { +impl From for VerifierIndex> { fn from(index: NapiFpPlonkVerifierIndex) -> Self { let max_poly_size = index.max_poly_size; let public_ = index.public_; @@ -242,7 +313,7 @@ impl From for DlogVerifierIndex for DlogVerifierIndex>> for NapiFpPlonkVerifierIndex { + fn from(index: &VerifierIndex>) -> Self { + Self { + domain: (&index.domain).into(), + max_poly_size: index.max_poly_size as i32, + public_: index.public as i32, + prev_challenges: index.prev_challenges as i32, + srs: (&index.srs).into(), + evals: index.into(), + shifts: (&index.shift).into(), + lookup_index: index.lookup_index.as_ref().map(Into::into), + zk_rows: index.zk_rows as i32, + } + } +} + fn compute_feature_flags(index: &NapiFpPlonkVerifierIndex) -> FeatureFlags { let xor = index.evals.xor_comm.is_some(); let range_check0 = index.evals.range_check0_comm.is_some(); diff --git a/plonk-napi/src/plonk_verifier_index/fq.rs b/plonk-napi/src/old_plonk_verifier_index/fq.rs similarity index 78% rename from plonk-napi/src/plonk_verifier_index/fq.rs rename to plonk-napi/src/old_plonk_verifier_index/fq.rs index bd59e7bc356..222f71e4763 100644 --- a/plonk-napi/src/plonk_verifier_index/fq.rs +++ b/plonk-napi/src/old_plonk_verifier_index/fq.rs @@ -2,6 +2,7 @@ use crate::{ poly_comm::pallas::NapiFqPolyComm, srs::fq::NapiFqSrs, wrappers::{field::NapiPastaFq, lookups::NapiLookupInfo}, + WasmPastaFqPlonkIndex, }; use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; use kimchi::{ @@ -16,15 +17,12 @@ use kimchi::{ }, }, linearization::expr_linearization, - verifier_index::{LookupVerifierIndex, VerifierIndex as DlogVerifierIndex}, + verifier_index::{LookupVerifierIndex, VerifierIndex}, }; use mina_curves::pasta::{Fq, Pallas as GAffine, Vesta as GAffineOther}; -use napi::bindgen_prelude::{Error, Status}; +use napi::bindgen_prelude::{Error, External, Status}; use napi_derive::napi; -use poly_commitment::{ - commitment::PolyComm, - ipa::{OpeningProof, SRS}, -}; +use poly_commitment::{commitment::PolyComm, ipa::OpeningProof, SRS}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -35,6 +33,22 @@ pub struct NapiFqDomain { pub group_gen: NapiPastaFq, } +impl From for Domain { + fn from(domain: NapiFqDomain) -> Self { + let size = 1 << domain.log_size_of_group; + Domain::::new(size).expect("Failed to create evaluation domain") + } +} + +impl From<&Domain> for NapiFqDomain { + fn from(domain: &Domain) -> Self { + Self { + log_size_of_group: domain.log_size_of_group as i32, + group_gen: domain.group_gen.into(), + } + } +} + #[napi(object, js_name = "WasmFqShifts")] #[derive(Clone, Debug, Serialize, Deserialize, Default)] pub struct NapiFqShifts { @@ -47,6 +61,20 @@ pub struct NapiFqShifts { pub s6: NapiPastaFq, } +impl From<&[Fq; 7]> for NapiFqShifts { + fn from(shifts: &[Fq; 7]) -> Self { + Self { + s0: shifts[0].into(), + s1: shifts[1].into(), + s2: shifts[2].into(), + s3: shifts[3].into(), + s4: shifts[4].into(), + s5: shifts[5].into(), + s6: shifts[6].into(), + } + } +} + #[napi(object, js_name = "WasmFqLookupSelectors")] #[derive(Clone, Debug, Serialize, Deserialize, Default)] pub struct NapiFqLookupSelectors { @@ -182,6 +210,27 @@ pub struct NapiFqPlonkVerificationEvals { pub rot_comm: Option, } +impl From<&VerifierIndex>> for NapiFqPlonkVerificationEvals { + fn from(index: &VerifierIndex>) -> Self { + Self { + sigma_comm: index.sigma_comm.iter().map(Into::into).collect(), + coefficients_comm: index.coefficients_comm.iter().map(Into::into).collect(), + generic_comm: index.generic_comm.clone().into(), + psm_comm: index.psm_comm.clone().into(), + complete_add_comm: index.complete_add_comm.clone().into(), + mul_comm: index.mul_comm.clone().into(), + emul_comm: index.emul_comm.clone().into(), + endomul_scalar_comm: index.endomul_scalar_comm.clone().into(), + xor_comm: index.xor_comm.clone().map(Into::into), + range_check0_comm: index.range_check0_comm.clone().map(Into::into), + range_check1_comm: index.range_check1_comm.clone().map(Into::into), + foreign_field_add_comm: index.foreign_field_add_comm.clone().map(Into::into), + foreign_field_mul_comm: index.foreign_field_mul_comm.clone().map(Into::into), + rot_comm: index.rot_comm.clone().map(Into::into), + } + } +} + #[napi(object, js_name = "WasmFqPlonkVerifierIndex")] #[derive(Clone, Debug, Default)] pub struct NapiFqPlonkVerifierIndex { @@ -196,6 +245,18 @@ pub struct NapiFqPlonkVerifierIndex { pub zk_rows: i32, } +#[napi(js_name = "caml_pasta_fq_plonk_verifier_index_create")] +pub fn caml_pasta_fq_plonk_verifier_index_create( + index: &External, +) -> NapiFqPlonkVerifierIndex { + index + .0 + .srs + .get_lagrange_basis(index.0.as_ref().cs.domain.d1); + let verifier_index = index.0.as_ref().verifier_index(); + NapiFqPlonkVerifierIndex::from(&verifier_index) +} + #[napi(js_name = "caml_pasta_fq_plonk_verifier_index_shifts")] pub fn caml_pasta_fq_plonk_verifier_index_shifts( log2_size: i32, @@ -223,7 +284,7 @@ pub fn caml_pasta_fq_plonk_verifier_index_shifts( }) } -impl From for DlogVerifierIndex> { +impl From for VerifierIndex> { fn from(index: NapiFqPlonkVerifierIndex) -> Self { let max_poly_size = index.max_poly_size; let public_ = index.public_; @@ -242,7 +303,7 @@ impl From for DlogVerifierIndex for DlogVerifierIndex>> for NapiFqPlonkVerifierIndex { + fn from(index: &VerifierIndex>) -> Self { + Self { + domain: (&index.domain).into(), + max_poly_size: index.max_poly_size as i32, + public_: index.public as i32, + prev_challenges: index.prev_challenges as i32, + srs: (&index.srs).into(), + evals: index.into(), + shifts: (&index.shift).into(), + lookup_index: index.lookup_index.as_ref().map(Into::into), + zk_rows: index.zk_rows as i32, + } + } +} + fn compute_feature_flags(index: &NapiFqPlonkVerifierIndex) -> FeatureFlags { let xor = index.evals.xor_comm.is_some(); let range_check0 = index.evals.range_check0_comm.is_some(); diff --git a/plonk-napi/src/plonk_verifier_index/mod.rs b/plonk-napi/src/old_plonk_verifier_index/mod.rs similarity index 100% rename from plonk-napi/src/plonk_verifier_index/mod.rs rename to plonk-napi/src/old_plonk_verifier_index/mod.rs diff --git a/plonk-napi/src/oracles.rs b/plonk-napi/src/oracles.rs new file mode 100644 index 00000000000..3c46822d030 --- /dev/null +++ b/plonk-napi/src/oracles.rs @@ -0,0 +1,331 @@ +use crate::{ + vector::{NapiFlatVector, NapiVector}, + wrappers::field::{NapiPastaFp, NapiPastaFq}, +}; +use ark_ff::{One, Zero}; +use kimchi::{ + circuits::scalars::RandomOracles, proof::ProverProof, + verifier_index::VerifierIndex as DlogVerifierIndex, +}; +use mina_poseidon::{ + self, + constants::PlonkSpongeConstantsKimchi, + sponge::{DefaultFqSponge, DefaultFrSponge}, + FqSponge, +}; +use napi::{bindgen_prelude::*, Error as NapiError, Status}; +use napi_derive::napi; +use paste::paste; +use poly_commitment::{ + commitment::{shift_scalar, PolyComm}, + ipa::OpeningProof, + SRS, +}; + +macro_rules! impl_oracles { + ($NapiF: ty, + $F: ty, + $NapiG: ty, + $G: ty, + $NapiPolyComm: ty, + $NapiProverProof: ty, + $index: ty, + $curve_params: ty, + $field_name: ident) => { + + paste! { + use mina_poseidon::sponge::ScalarChallenge; + + #[napi(js_name = [])] + #[derive(Clone, Copy)] + pub struct [] { + pub joint_combiner_chal: Option<$NapiF>, + pub joint_combiner: Option<$NapiF>, + pub beta: $NapiF, + pub gamma: $NapiF, + pub alpha_chal: $NapiF, + pub alpha: $NapiF, + pub zeta: $NapiF, + pub v: $NapiF, + pub u: $NapiF, + pub zeta_chal: $NapiF, + pub v_chal: $NapiF, + pub u_chal: $NapiF, + } + type NapiRandomOracles = []; + + #[napi] + impl [] { + #[napi(constructor)] + #[allow(clippy::too_many_arguments)] + pub fn new( + joint_combiner_chal: Option<$NapiF>, + joint_combiner: Option<$NapiF>, + beta: $NapiF, + gamma: $NapiF, + alpha_chal: $NapiF, + alpha: $NapiF, + zeta: $NapiF, + v: $NapiF, + u: $NapiF, + zeta_chal: $NapiF, + v_chal: $NapiF, + u_chal: $NapiF) -> Self { + Self { + joint_combiner_chal, + joint_combiner, + beta, + gamma, + alpha_chal, + alpha, + zeta, + v, + u, + zeta_chal, + v_chal, + u_chal, + } + } + } + + impl From> for NapiRandomOracles + { + fn from(ro: RandomOracles<$F>) -> Self { + Self { + joint_combiner_chal: ro.joint_combiner.as_ref().map(|x| x.0.0.into()), + joint_combiner: ro.joint_combiner.as_ref().map(|x| x.1.into()), + beta: ro.beta.into(), + gamma: ro.gamma.into(), + alpha_chal: ro.alpha_chal.0.into(), + alpha: ro.alpha.into(), + zeta: ro.zeta.into(), + v: ro.v.into(), + u: ro.u.into(), + zeta_chal: ro.zeta_chal.0.into(), + v_chal: ro.v_chal.0.into(), + u_chal: ro.u_chal.0.into(), + } + } + } + + impl From for RandomOracles<$F> + { + fn from(ro: NapiRandomOracles) -> Self { + Self { + joint_combiner: ro.joint_combiner_chal.and_then(|x| { + ro.joint_combiner.map(|y| (ScalarChallenge(x.into()), y.into())) + }), + beta: ro.beta.into(), + gamma: ro.gamma.into(), + alpha_chal: ScalarChallenge(ro.alpha_chal.into()), + alpha: ro.alpha.into(), + zeta: ro.zeta.into(), + v: ro.v.into(), + u: ro.u.into(), + zeta_chal: ScalarChallenge(ro.zeta_chal.into()), + v_chal: ScalarChallenge(ro.v_chal.into()), + u_chal: ScalarChallenge(ro.u_chal.into()), + } + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + impl<'a> ToNapiValue for &'a mut [] { + unsafe fn to_napi_value( + env: sys::napi_env, + val: Self, + ) -> Result { + <[] as ToNapiValue>::to_napi_value(env, val.clone()) + } + } + + #[napi(js_name = [])] + #[derive(Clone)] + pub struct [] { + pub o: [], + pub p_eval0: $NapiF, + pub p_eval1: $NapiF, + #[napi(skip)] + pub opening_prechallenges: NapiFlatVector<$NapiF>, + pub digest_before_evaluations: $NapiF, + } + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + o: NapiRandomOracles, + p_eval0: $NapiF, + p_eval1: $NapiF, + opening_prechallenges: NapiFlatVector<$NapiF>, + digest_before_evaluations: $NapiF) -> Self { + Self {o, p_eval0, p_eval1, opening_prechallenges, digest_before_evaluations} + } + + #[napi(getter, js_name="opening_prechallenges")] + pub fn opening_prechallenges(&self) -> NapiFlatVector<$NapiF> { + self.opening_prechallenges.clone() + } + + #[napi(setter, js_name="set_opening_prechallenges")] + pub fn set_opening_prechallenges(&mut self, x: NapiFlatVector<$NapiF>) { + self.opening_prechallenges = x; + } + } + + #[napi(js_name = [<$F:snake _oracles_create>])] + pub fn [<$F:snake _oracles_create>]( + lgr_comm: NapiVector<$NapiPolyComm>, // the bases to commit polynomials + index: $index, // parameters + proof: $NapiProverProof, // the final proof (contains public elements at the beginning) + ) -> Result<[]> { + // conversions + let result: Result<(RandomOracles<$F>, [Vec<$F>; 2], NapiFlatVector<$NapiF>, $F), String> = { + let index: DlogVerifierIndex<$G, OpeningProof<$G>> = index.into(); + + let lgr_comm: Vec> = lgr_comm + .into_iter() + .take(proof.public.len()) + .map(Into::into) + .collect(); + let lgr_comm_refs: Vec<_> = lgr_comm.iter().collect(); + + let p_comm = PolyComm::<$G>::multi_scalar_mul( + &lgr_comm_refs, + &proof + .public + .iter() + .map(|a| a.clone().into()) + .map(|s: $F| -s) + .collect::>(), + ); + let p_comm = { + index + .srs() + .mask_custom( + p_comm.clone(), + &p_comm.map(|_| $F::one()), + ) + .unwrap() + .commitment + }; + + let (proof, public_input): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) = proof.into(); + + let oracles_result = + proof.oracles::< + DefaultFqSponge<$curve_params, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<$F, PlonkSpongeConstantsKimchi> + >(&index, &p_comm, Some(&public_input)); + let oracles_result = match oracles_result { + Err(e) => { + return Err(NapiError::new(Status::GenericFailure, format!("oracles_create: {}", e))); + } + Ok(cs) => cs, + }; + + let (mut sponge, combined_inner_product, p_eval, digest, oracles) = ( + oracles_result.fq_sponge, + oracles_result.combined_inner_product, + oracles_result.public_evals, + oracles_result.digest, + oracles_result.oracles, + ); + + sponge.absorb_fr(&[shift_scalar::<$G>(combined_inner_product)]); + + let opening_prechallenges = proof + .proof + .prechallenges(&mut sponge) + .into_iter() + .map(|x| x.0.into()) + .collect(); + + Ok((oracles, p_eval, opening_prechallenges, digest)) + }; + + match result { + Ok((oracles, p_eval, opening_prechallenges, digest)) => Ok([] { + o: oracles.into(), + p_eval0: p_eval[0][0].into(), + p_eval1: p_eval[1][0].into(), + opening_prechallenges, + digest_before_evaluations: digest.into() + }), + Err(err) => Err(NapiError::new(Status::GenericFailure, err)), + } + } + + #[napi(js_name = [<$F:snake _oracles_dummy>])] + pub fn [<$F:snake _oracles_dummy>]() -> [] { + [] { + o: RandomOracles::<$F>::default().into(), + p_eval0: $F::zero().into(), + p_eval1: $F::zero().into(), + opening_prechallenges: vec![].into(), + digest_before_evaluations: $F::zero().into(), + } + } + + #[napi(js_name = [<$F:snake _oracles_deep_copy>])] + pub fn [<$F:snake _oracles_deep_copy>]( + x: $NapiProverProof, + ) -> $NapiProverProof { + x + } + } + } +} + +pub mod fp { + use super::*; + use crate::{ + plonk_verifier_index::fp::NapiFpPlonkVerifierIndex as WasmPlonkVerifierIndex, + poly_comm::vesta::NapiFpPolyComm as WasmPolyComm, + proof::fp::NapiFpProverProof as WasmProverProof, + }; + use mina_curves::pasta::{Fp, Vesta as GAffine, VestaParameters}; + + impl_oracles!( + NapiPastaFp, + Fp, + WasmGVesta, + GAffine, + WasmPolyComm, + WasmProverProof, + WasmPlonkVerifierIndex, + VestaParameters, + Fp + ); +} + +pub mod fq { + use super::*; + use crate::{ + plonk_verifier_index::fq::NapiFqPlonkVerifierIndex as WasmPlonkVerifierIndex, + poly_comm::pallas::NapiFqPolyComm as WasmPolyComm, + proof::fq::NapiFqProverProof as WasmProverProof, + }; + use mina_curves::pasta::{Fq, Pallas as GAffine, PallasParameters}; + + impl_oracles!( + NapiPastaFq, + Fq, + WasmGPallas, + GAffine, + WasmPolyComm, + WasmProverProof, + WasmPlonkVerifierIndex, + PallasParameters, + Fq + ); +} diff --git a/plonk-napi/src/pasta_fp_plonk_index.rs b/plonk-napi/src/pasta_fp_plonk_index.rs index 7aac5dba3dd..58b02ebc45d 100644 --- a/plonk-napi/src/pasta_fp_plonk_index.rs +++ b/plonk-napi/src/pasta_fp_plonk_index.rs @@ -26,7 +26,11 @@ use std::{ use crate::tables::{ lookup_table_fp_from_js, runtime_table_cfg_fp_from_js, JsLookupTableFp, JsRuntimeTableCfgFp, }; -pub struct WasmPastaFpPlonkIndex(pub Box>>); + +#[napi(js_name = "WasmPastaFpPlonkIndex")] +pub struct WasmPastaFpPlonkIndex( + #[napi(skip)] pub Box>>, +); #[derive(Serialize, Deserialize)] struct SerializedProverIndex { @@ -78,21 +82,9 @@ impl WasmPastaFpPlonkIndex { } } -// TOOD: remove incl all dependencies when no longer needed and we only pass napi objects around -#[napi(js_name = "prover_index_fp_from_bytes")] -pub fn prover_index_fp_from_bytes( - bytes: Uint8Array, -) -> napi::bindgen_prelude::Result> { - report_native_call(); - - let index = WasmPastaFpPlonkIndex::deserialize_inner(bytes.as_ref()) - .map_err(|e| Error::new(Status::InvalidArg, e))?; - Ok(External::new(index)) -} - -// TOOD: remove incl all dependencies when no longer needed and we only pass napi objects around -#[napi(js_name = "prover_index_fp_to_bytes")] -pub fn prover_index_fp_to_bytes( +// TODO: remove incl all dependencies when no longer needed and we only pass napi objects around +#[napi(js_name = "prover_index_fp_serialize")] +pub fn prover_index_fp_serialize( index: &External, ) -> napi::bindgen_prelude::Result { report_native_call(); @@ -103,6 +95,18 @@ pub fn prover_index_fp_to_bytes( Ok(Uint8Array::from(bytes)) } +// TODO: remove incl all dependencies when no longer needed and we only pass napi objects around +#[napi(js_name = "prover_index_fp_deserialize")] +pub fn prover_index_fp_deserialize( + bytes: Uint8Array, +) -> napi::bindgen_prelude::Result> { + report_native_call(); + + let index = WasmPastaFpPlonkIndex::deserialize_inner(bytes.as_ref()) + .map_err(|e| Error::new(Status::InvalidArg, e))?; + Ok(External::new(index)) +} + #[napi(js_name = "caml_pasta_fp_plonk_index_max_degree")] pub fn caml_pasta_fp_plonk_index_max_degree(index: &External) -> i32 { index.0.srs.max_poly_size() as i32 diff --git a/plonk-napi/src/pasta_fq_plonk_index.rs b/plonk-napi/src/pasta_fq_plonk_index.rs index c1fee9f6b1b..dd59c84e537 100644 --- a/plonk-napi/src/pasta_fq_plonk_index.rs +++ b/plonk-napi/src/pasta_fq_plonk_index.rs @@ -22,7 +22,10 @@ use std::{ use crate::tables::{ lookup_table_fq_from_js, runtime_table_cfg_fq_from_js, JsLookupTableFq, JsRuntimeTableCfgFq, }; -pub struct WasmPastaFqPlonkIndex(pub Box>>); +#[napi(js_name = "WasmPastaFqPlonkIndex")] +pub struct WasmPastaFqPlonkIndex( + #[napi(skip)] pub Box>>, +); #[derive(Serialize, Deserialize)] struct SerializedProverIndex { @@ -74,21 +77,9 @@ impl WasmPastaFqPlonkIndex { } } -// TOOD: remove incl all dependencies when no longer needed and we only pass napi objects around -#[napi(js_name = "prover_index_fq_from_bytes")] -pub fn prover_index_fq_from_bytes( - bytes: Uint8Array, -) -> napi::bindgen_prelude::Result> { - report_native_call(); - - let index = WasmPastaFqPlonkIndex::deserialize_inner(bytes.as_ref()) - .map_err(|e| Error::new(Status::InvalidArg, e))?; - Ok(External::new(index)) -} - -// TOOD: remove incl all dependencies when no longer needed and we only pass napi objects around -#[napi(js_name = "prover_index_fq_to_bytes")] -pub fn prover_index_fq_to_bytes( +// TODO: remove incl all dependencies when no longer needed and we only pass napi objects around +#[napi(js_name = "prover_index_fq_serialize")] +pub fn prover_index_fq_serialize( index: &External, ) -> napi::bindgen_prelude::Result { report_native_call(); @@ -99,6 +90,18 @@ pub fn prover_index_fq_to_bytes( Ok(Uint8Array::from(bytes)) } +// TODO: remove incl all dependencies when no longer needed and we only pass napi objects around +#[napi(js_name = "prover_index_fq_deserialize")] +pub fn prover_index_fq_deserialize( + bytes: Uint8Array, +) -> napi::bindgen_prelude::Result> { + report_native_call(); + + let index = WasmPastaFqPlonkIndex::deserialize_inner(bytes.as_ref()) + .map_err(|e| Error::new(Status::InvalidArg, e))?; + Ok(External::new(index)) +} + #[napi(js_name = "caml_pasta_fq_plonk_index_max_degree")] pub fn caml_pasta_fq_plonk_index_max_degree(index: &External) -> i32 { index.0.srs.max_poly_size() as i32 diff --git a/plonk-napi/src/plonk_verifier_index.rs b/plonk-napi/src/plonk_verifier_index.rs new file mode 100644 index 00000000000..c966814bcef --- /dev/null +++ b/plonk-napi/src/plonk_verifier_index.rs @@ -0,0 +1,637 @@ +use crate::{vector::NapiVector, wrappers::lookups::NapiLookupInfo}; +use ark_ec::AffineRepr; +use ark_ff::One; +use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; +use kimchi::{ + circuits::{ + constraints::FeatureFlags, + lookup::{ + index::LookupSelectors, + lookups::{LookupFeatures, LookupPatterns}, + }, + polynomials::permutation::{permutation_vanishing_polynomial, zk_w, Shifts}, + wires::{COLUMNS, PERMUTS}, + }, + linearization::expr_linearization, + verifier_index::{LookupVerifierIndex, VerifierIndex}, +}; +use napi::{bindgen_prelude::*, Error, Status}; +use napi_derive::napi; +use paste::paste; +use poly_commitment::{ + commitment::PolyComm, + ipa::{OpeningProof, SRS}, + SRS as _, +}; +use serde::{Deserialize, Serialize}; +use std::{path::Path, sync::Arc}; + +macro_rules! impl_verification_key { + ( + $NapiG: ty, + $G: ty, + $NapiF: ty, + $F: ty, + $NapiPolyComm: ty, + $NapiSrs: ty, + $GOther: ty, + $FrSpongeParams: path, + $FqSpongeParams: path, + $NapiIndex: ty, + $field_name: ident + ) => { + paste! { + #[napi(object, js_name = [])] + #[derive(Clone, Debug, Serialize, Deserialize, Default)] + pub struct [] { + #[napi(js_name = "log_size_of_group")] + pub log_size_of_group: i32, + #[napi(js_name = "group_gen")] + pub group_gen: $NapiF, + } + type NapiDomain = []; + + impl From for Domain<[<$F>]> { + fn from(domain: NapiDomain) -> Self { + let size = 1 << domain.log_size_of_group; + Domain::<[<$F>]>::new(size).expect("Failed to create evaluation domain") + } + } + + impl From<&Domain<$F>> for NapiDomain { + fn from(domain: &Domain<$F>) -> Self { + Self { + log_size_of_group: domain.log_size_of_group as i32, + group_gen: domain.group_gen.into(), + } + } + } + + #[napi(object, js_name = [])] + #[derive(Clone, Debug, Serialize, Deserialize, Default)] + pub struct [] { + #[napi(skip, js_name = "sigma_comm")] + pub sigma_comm: NapiVector<$NapiPolyComm>, + #[napi(skip, js_name = "coefficients_comm")] + pub coefficients_comm: NapiVector<$NapiPolyComm>, + #[napi(skip, js_name = "generic_comm")] + pub generic_comm: $NapiPolyComm, + #[napi(skip, js_name = "psm_comm")] + pub psm_comm: $NapiPolyComm, + #[napi(skip, js_name = "complete_add_comm")] + pub complete_add_comm: $NapiPolyComm, + #[napi(skip, js_name = "mul_comm")] + pub mul_comm: $NapiPolyComm, + #[napi(skip, js_name = "emul_comm")] + pub emul_comm: $NapiPolyComm, + #[napi(skip, js_name = "endomul_scalar_comm")] + pub endomul_scalar_comm: $NapiPolyComm, + #[napi(skip, js_name = "xor_comm")] + pub xor_comm: Option<$NapiPolyComm>, + #[napi(skip, js_name = "range_check0_comm")] + pub range_check0_comm: Option<$NapiPolyComm>, + #[napi(skip, js_name = "range_check1_comm")] + pub range_check1_comm: Option<$NapiPolyComm>, + #[napi(skip, js_name = "foreign_field_add_comm")] + pub foreign_field_add_comm: Option<$NapiPolyComm>, + #[napi(skip, js_name = "foreign_field_mul_comm")] + pub foreign_field_mul_comm: Option<$NapiPolyComm>, + #[napi(skip, js_name = "rot_comm")] + pub rot_comm: Option<$NapiPolyComm>, + } + type NapiPlonkVerificationEvals = []; + + impl From<&VerifierIndex<$G, OpeningProof<$G>>> for NapiPlonkVerificationEvals { + fn from(index: &VerifierIndex<$G, OpeningProof<$G>>) -> Self { + Self { + sigma_comm: index.sigma_comm.iter().map(Into::into).collect(), + coefficients_comm: index.coefficients_comm.iter().map(Into::into).collect(), + generic_comm: index.generic_comm.clone().into(), + psm_comm: index.psm_comm.clone().into(), + complete_add_comm: index.complete_add_comm.clone().into(), + mul_comm: index.mul_comm.clone().into(), + emul_comm: index.emul_comm.clone().into(), + endomul_scalar_comm: index.endomul_scalar_comm.clone().into(), + xor_comm: index.xor_comm.clone().map(Into::into), + range_check0_comm: index.range_check0_comm.clone().map(Into::into), + range_check1_comm: index.range_check1_comm.clone().map(Into::into), + foreign_field_add_comm: index.foreign_field_add_comm.clone().map(Into::into), + foreign_field_mul_comm: index.foreign_field_mul_comm.clone().map(Into::into), + rot_comm: index.rot_comm.clone().map(Into::into), + } + } + } + + #[derive(Clone, Debug, Serialize, Deserialize, Default)] + #[napi(object, js_name = [])] + pub struct [] { + pub s0: $NapiF, + pub s1: $NapiF, + pub s2: $NapiF, + pub s3: $NapiF, + pub s4: $NapiF, + pub s5: $NapiF, + pub s6: $NapiF, + } + type NapiShifts = []; + + impl From<&[$F; 7]> for NapiShifts { + fn from(shifts: &[$F; 7]) -> Self { + Self { + s0: shifts[0].into(), + s1: shifts[1].into(), + s2: shifts[2].into(), + s3: shifts[3].into(), + s4: shifts[4].into(), + s5: shifts[5].into(), + s6: shifts[6].into(), + } + } + } + + #[derive(Clone, Debug, Serialize, Deserialize, Default)] + #[napi(object, js_name = [])] + pub struct [] { + #[napi(skip)] + pub xor: Option<$NapiPolyComm>, + #[napi(skip)] + pub lookup: Option<$NapiPolyComm>, + #[napi(skip, js_name = "range_check")] + pub range_check: Option<$NapiPolyComm>, + #[napi(skip)] + pub ffmul: Option<$NapiPolyComm>, + } + type NapiLookupSelectors = []; + + impl From for LookupSelectors> { + fn from(x: NapiLookupSelectors) -> Self { + Self { + xor: x.xor.map(Into::into), + lookup: x.lookup.map(Into::into), + range_check: x.range_check.map(Into::into), + ffmul: x.ffmul.map(Into::into), + } + } + } + + impl From<&NapiLookupSelectors> for LookupSelectors> { + fn from(x: &NapiLookupSelectors) -> Self { + Self { + xor: x.xor.clone().map(Into::into), + lookup: x.lookup.clone().map(Into::into), + range_check: x.range_check.clone().map(Into::into), + ffmul: x.ffmul.clone().map(Into::into), + } + } + } + + impl From<&LookupSelectors>> for NapiLookupSelectors { + fn from(x: &LookupSelectors>) -> Self { + Self { + xor: x.xor.clone().map(Into::into), + lookup: x.lookup.clone().map(Into::into), + range_check: x.range_check.clone().map(Into::into), + ffmul: x.ffmul.clone().map(Into::into), + } + } + } + + impl From>> for NapiLookupSelectors { + fn from(x: LookupSelectors>) -> Self { + Self { + xor: x.xor.clone().map(Into::into), + lookup: x.lookup.clone().map(Into::into), + range_check: x.range_check.clone().map(Into::into), + ffmul: x.ffmul.clone().map(Into::into), + } + } + } + + #[napi(object, js_name = [])] + #[derive(Clone, Debug, Serialize, Deserialize, Default)] + pub struct [] { + pub joint_lookup_used: bool, + + #[napi(skip)] + pub lookup_table: NapiVector<$NapiPolyComm>, + + #[napi(skip, js_name = "lookup_selectors")] + pub lookup_selectors: NapiLookupSelectors, + + #[napi(skip)] + pub table_ids: Option<$NapiPolyComm>, + + #[napi(skip)] + pub lookup_info: NapiLookupInfo, + + #[napi(skip)] + pub runtime_tables_selector: Option<$NapiPolyComm>, + } + type NapiLookupVerifierIndex = []; + + impl From<&LookupVerifierIndex<$G>> for NapiLookupVerifierIndex { + fn from(x: &LookupVerifierIndex<$G>) -> Self { + Self { + joint_lookup_used: x.joint_lookup_used.into(), + lookup_table: x.lookup_table.clone().iter().map(Into::into).collect(), + lookup_selectors: x.lookup_selectors.clone().into(), + table_ids: x.table_ids.clone().map(Into::into), + lookup_info: x.lookup_info.into(), + runtime_tables_selector: x.runtime_tables_selector.clone().map(Into::into) + } + } + } + + impl From> for NapiLookupVerifierIndex { + fn from(x: LookupVerifierIndex<$G>) -> Self { + Self { + joint_lookup_used: x.joint_lookup_used.into(), + lookup_table: x.lookup_table.iter().map(Into::into).collect(), + lookup_selectors: x.lookup_selectors.into(), + table_ids: x.table_ids.map(Into::into), + lookup_info: x.lookup_info.into(), + runtime_tables_selector: x.runtime_tables_selector.map(Into::into) + } + } + } + + impl From<&NapiLookupVerifierIndex> for LookupVerifierIndex<$G> { + fn from(x: &NapiLookupVerifierIndex) -> Self { + Self { + joint_lookup_used: x.joint_lookup_used.into(), + lookup_table: x.lookup_table.clone().iter().map(Into::into).collect(), + lookup_selectors: x.lookup_selectors.clone().into(), + table_ids: x.table_ids.clone().map(Into::into), + lookup_info: x.lookup_info.clone().into(), + runtime_tables_selector: x.runtime_tables_selector.clone().map(Into::into) + } + } + } + + impl From for LookupVerifierIndex<$G> { + fn from(x: NapiLookupVerifierIndex) -> Self { + Self { + joint_lookup_used: x.joint_lookup_used.into(), + lookup_table: x.lookup_table.iter().map(Into::into).collect(), + lookup_selectors: x.lookup_selectors.into(), + table_ids: x.table_ids.map(Into::into), + lookup_info: x.lookup_info.into(), + runtime_tables_selector: x.runtime_tables_selector.map(Into::into) + } + } + } + + #[napi(object, js_name = [])] + #[derive(Clone, Debug, Default)] + pub struct [] { + pub domain: NapiDomain, + #[napi(js_name = "max_poly_size")] + pub max_poly_size: i32, + pub public_: i32, + pub prev_challenges: i32, + #[napi(skip)] + pub srs: $NapiSrs, + #[napi(skip)] + pub evals: NapiPlonkVerificationEvals, + pub shifts: NapiShifts, + #[napi(skip)] + pub lookup_index: Option, + pub zk_rows: i32, + } + type NapiPlonkVerifierIndex = []; + + fn compute_feature_flags(index: &NapiPlonkVerifierIndex) -> FeatureFlags { + let xor = index.evals.xor_comm.is_some(); + let range_check0 = index.evals.range_check0_comm.is_some(); + let range_check1 = index.evals.range_check1_comm.is_some(); + let foreign_field_add = index.evals.foreign_field_add_comm.is_some(); + let foreign_field_mul = index.evals.foreign_field_mul_comm.is_some(); + let rot = index.evals.rot_comm.is_some(); + + let lookup = index + .lookup_index.as_ref() + .map_or(false, |li| li.lookup_info.features.patterns.lookup); + + let runtime_tables = index + .lookup_index.as_ref() + .map_or(false, |li| li.runtime_tables_selector.is_some()); + + let patterns = LookupPatterns { + xor, + lookup, + range_check: range_check0 || range_check1 || rot, + foreign_field_mul, + }; + + FeatureFlags { + range_check0, + range_check1, + foreign_field_add, + foreign_field_mul, + xor, + rot, + lookup_features: LookupFeatures { + patterns, + joint_lookup_used: patterns.joint_lookups_used(), + uses_runtime_tables: runtime_tables, + }, + } + } + + impl From for VerifierIndex<$G, OpeningProof<$G>> { + fn from(index: NapiPlonkVerifierIndex) -> Self { + let max_poly_size = index.max_poly_size; + let public_ = index.public_; + let prev_challenges = index.prev_challenges; + let log_size_of_group = index.domain.log_size_of_group; + let srs = &index.srs; + let evals = &index.evals; + let shifts = &index.shifts; + + let (endo_q, _endo_r) = poly_commitment::ipa::endos::(); + let domain = Domain::<$F>::new(1 << log_size_of_group).unwrap(); + + let feature_flags = compute_feature_flags(&index); + let (linearization, powers_of_alpha) = expr_linearization(Some(&feature_flags), true); + + let index = { + let zk_rows = index.zk_rows as u64; + + VerifierIndex { + domain, + + sigma_comm: core::array::from_fn(|i| (&evals.sigma_comm[i]).into()), + generic_comm: (&evals.generic_comm).into(), + coefficients_comm: core::array::from_fn(|i| (&evals.coefficients_comm[i]).into()), + + psm_comm: (&evals.psm_comm).into(), + + complete_add_comm: (&evals.complete_add_comm).into(), + mul_comm: (&evals.mul_comm).into(), + emul_comm: (&evals.emul_comm).into(), + + endomul_scalar_comm: (&evals.endomul_scalar_comm).into(), + xor_comm: (&evals.xor_comm).as_ref().map(Into::into), + range_check0_comm: (&evals.range_check0_comm).as_ref().map(Into::into), + range_check1_comm: (&evals.range_check1_comm).as_ref().map(Into::into), + foreign_field_add_comm: (&evals.foreign_field_add_comm).as_ref().map(Into::into), + foreign_field_mul_comm: (&evals.foreign_field_mul_comm).as_ref().map(Into::into), + rot_comm: (&evals.rot_comm).as_ref().map(Into::into), + + w: { + let res = once_cell::sync::OnceCell::new(); + res.set(zk_w(domain, zk_rows)).unwrap(); + res + }, + endo: endo_q, + max_poly_size: max_poly_size as usize, + public: public_ as usize, + prev_challenges: prev_challenges as usize, + permutation_vanishing_polynomial_m: { + let res = once_cell::sync::OnceCell::new(); + res.set(permutation_vanishing_polynomial(domain, zk_rows)) + .unwrap(); + res + }, + shift: [ + shifts.s0.into(), + shifts.s1.into(), + shifts.s2.into(), + shifts.s3.into(), + shifts.s4.into(), + shifts.s5.into(), + shifts.s6.into(), + ], + srs: { Arc::clone(&srs.0) }, + + zk_rows, + + linearization, + powers_of_alpha, + lookup_index: index.lookup_index.map(Into::into), + } + }; + (index, srs.0.clone()).0 + } + } + + impl From<&VerifierIndex<$G, OpeningProof<$G>>> for NapiPlonkVerifierIndex { + fn from(index: &VerifierIndex<$G, OpeningProof<$G>>) -> Self { + Self { + domain: (&index.domain).into(), + max_poly_size: index.max_poly_size as i32, + public_: index.public as i32, + prev_challenges: index.prev_challenges as i32, + srs: (&index.srs).into(), + evals: index.into(), + shifts: (&index.shift).into(), + lookup_index: index.lookup_index.as_ref().map(Into::into), + zk_rows: index.zk_rows as i32, + } + } + } + + pub fn read_raw( + offset: Option, + srs: &$NapiSrs, + path: String, + ) -> napi::Result>> { + let path = Path::new(&path); + let (endo_q, _endo_r) = poly_commitment::ipa::endos::<$GOther>(); + VerifierIndex::<$G, OpeningProof<$G>>::from_file( + srs.0.clone(), + path, + offset.map(|x| x as u64), + endo_q, + ).map_err(|e| Error::new(Status::GenericFailure, format!("read_raw: {}", e))) + } + + #[napi(js_name = [])] + pub fn []( + offset: Option, + srs: &$NapiSrs, + path: String, + ) -> napi::Result { + let vi = read_raw(offset, srs, path)?; + Ok(NapiPlonkVerifierIndex::from(&vi)) + } + + #[napi(js_name = [])] + pub fn []( + append: Option, + index: NapiPlonkVerifierIndex, + path: String, + ) -> napi::Result<()> { + let index: VerifierIndex<$G, OpeningProof<$G>> = index.into(); + let path = Path::new(&path); + index + .to_file(path, append) + .map_err(|e| Error::new(Status::GenericFailure, format!("plonk_verifier_index_write: {}", e))) + } + + #[napi(js_name = [])] + pub fn []( + index: NapiPlonkVerifierIndex, + ) -> String { + let index: VerifierIndex<$G, OpeningProof<$G>> = index.into(); + serde_json::to_string(&index).unwrap() + } + + #[napi(js_name = [])] + pub fn []( + _srs: &$NapiSrs, + index: String, + ) -> napi::Result { + match serde_json::from_str::>>(&index) { + Ok(vi) => Ok(NapiPlonkVerifierIndex::from(&vi)), + Err(e) => Err(Error::new(Status::GenericFailure, e.to_string())), + } + } + + #[napi(js_name = [])] + pub fn []( + index: &External<$NapiIndex>, + ) -> NapiPlonkVerifierIndex { + index.0.srs.get_lagrange_basis(index.0.as_ref().cs.domain.d1); + let verifier_index = index.0.as_ref().verifier_index(); + NapiPlonkVerifierIndex::from(&verifier_index) + } + + #[napi(js_name = [])] + pub fn [](log2_size: i32) -> napi::bindgen_prelude::Result { + println!( + "from napi! caml_pasta_plonk_verifier_index_shifts with log2_size {}", + log2_size + ); + + let size = 1usize << (log2_size as u32); + let domain = Domain::<$F>::new(size) + .ok_or_else(|| Error::new(Status::InvalidArg, "failed to create evaluation domain"))?; + + let shifts = Shifts::new(&domain); + let s = shifts.shifts(); + + Ok(NapiShifts { + s0: s[0].clone().into(), + s1: s[1].clone().into(), + s2: s[2].clone().into(), + s3: s[3].clone().into(), + s4: s[4].clone().into(), + s5: s[5].clone().into(), + s6: s[6].clone().into(), + }) + } + + #[napi(js_name = [])] + pub fn []() -> NapiPlonkVerifierIndex { + fn comm() -> $NapiPolyComm { + let g: $NapiG = $G::generator().into(); + $NapiPolyComm { + shifted: None, + unshifted: vec![g].into(), + } + } + fn vec_comm(num: usize) -> NapiVector<$NapiPolyComm> { + (0..num).map(|_| comm()).collect() + } + + NapiPlonkVerifierIndex { + domain: NapiDomain { + log_size_of_group: 1, + group_gen: $F::one().into(), + }, + max_poly_size: 0, + public_: 0, + prev_challenges: 0, + srs: $NapiSrs(Arc::new(SRS::create(0))), + evals: NapiPlonkVerificationEvals { + sigma_comm: vec_comm(PERMUTS), + coefficients_comm: vec_comm(COLUMNS), + generic_comm: comm(), + psm_comm: comm(), + complete_add_comm: comm(), + mul_comm: comm(), + emul_comm: comm(), + endomul_scalar_comm: comm(), + xor_comm: None, + range_check0_comm: None, + range_check1_comm: None, + foreign_field_add_comm: None, + foreign_field_mul_comm: None, + rot_comm: None, + }, + shifts: + NapiShifts { + s0: $F::one().into(), + s1: $F::one().into(), + s2: $F::one().into(), + s3: $F::one().into(), + s4: $F::one().into(), + s5: $F::one().into(), + s6: $F::one().into(), + }, + lookup_index: None, + zk_rows: 3, + } + } + + #[napi(js_name = [])] + pub fn []( + x: NapiPlonkVerifierIndex, + ) -> NapiPlonkVerifierIndex { + x.clone() + } + + } + } +} + +pub mod fp { + use super::*; + use crate::{ + pasta_fp_plonk_index::WasmPastaFpPlonkIndex as NapiPastaFpPlonkIndex, + poly_comm::vesta::NapiFpPolyComm as NapiPolyComm, + srs::fp::NapiFpSrs, + wrappers::{field::NapiPastaFp, group::NapiGVesta}, + }; + use mina_curves::pasta::{Fp, Pallas as GAffineOther, Vesta as GAffine}; + + impl_verification_key!( + NapiGVesta, + GAffine, + NapiPastaFp, + Fp, + NapiPolyComm, + NapiFpSrs, + GAffineOther, + mina_poseidon::pasta::fp_kimchi, + mina_poseidon::pasta::fq_kimchi, + NapiPastaFpPlonkIndex, + fp + ); +} + +pub mod fq { + use super::*; + use crate::{ + pasta_fq_plonk_index::WasmPastaFqPlonkIndex as NapiPastaFqPlonkIndex, + poly_comm::pallas::NapiFqPolyComm as NapiPolyComm, + srs::fq::NapiFqSrs, + wrappers::{field::NapiPastaFq, group::NapiGPallas}, + }; + use mina_curves::pasta::{Fq, Pallas as GAffine, Vesta as GAffineOther}; + + impl_verification_key!( + NapiGPallas, + GAffine, + NapiPastaFq, + Fq, + NapiPolyComm, + NapiFqSrs, + GAffineOther, + mina_poseidon::pasta::fq_kimchi, + mina_poseidon::pasta::fp_kimchi, + NapiPastaFqPlonkIndex, + fq + ); +} diff --git a/plonk-napi/src/proof.rs b/plonk-napi/src/proof.rs index ec89df3bd15..e9c8b487b6f 100644 --- a/plonk-napi/src/proof.rs +++ b/plonk-napi/src/proof.rs @@ -1,117 +1,790 @@ -use crate::{ - tables::JsRuntimeTableFp, - vector::NapiVector, - wrappers::{field::NapiPastaFp, group::NapiGVesta}, - NapiFlatVector, WasmVecVecFp, +use crate::vector::{ + fp::WasmVecVecFp as NapiVecVecFp, fq::WasmVecVecFq as NapiVecVecFq, NapiFlatVector, NapiVector, }; +use ark_ec::AffineRepr; +use ark_ff::One; +use core::array; use kimchi::{ circuits::{lookup::runtime_tables::RuntimeTable, wires::COLUMNS}, groupmap::GroupMap, - proof::{ProverProof, RecursionChallenge}, + proof::{ + LookupCommitments, PointEvaluations, ProofEvaluations, ProverCommitments, ProverProof, + RecursionChallenge, + }, prover_index::ProverIndex, + verifier::{batch_verify, Context}, }; -use mina_curves::pasta::{Fp, Vesta as GAffine}; use mina_poseidon::{ constants::PlonkSpongeConstantsKimchi, sponge::{DefaultFqSponge, DefaultFrSponge}, }; use napi::{ - bindgen_prelude::{External, Result}, + bindgen_prelude::{sys, ClassInstance, External, FromNapiValue, Result}, Error as NapiError, Status, }; use napi_derive::napi; -use plonk_wasm::pasta_fp_plonk_index::WasmPastaFpPlonkIndex; +use paste::paste; +use poly_commitment::commitment::CommitmentCurve; // Import CommitmentCurve trait use poly_commitment::{ipa::OpeningProof, PolyComm, SRS}; -pub struct Proof { - pub proof: ProverProof>, - pub public_input: Vec, -} +macro_rules! impl_proof { + ( + $NapiG: ty, + $G: ty, + $NapiF: ty, + $F: ty, + $NapiPolyComm: ty, + $NapiSrs: ty, + $NapiIndex: ty, + $NapiVerifierIndex: ty, + $NapiVecVec: ty, + $field_name: ident, + $NapiRuntimeTable: ty, + ) => { + paste! { + // type NapiVecVecF = []; + + #[napi(js_name = [])] + #[derive(Clone)] + pub struct []( + ProofEvaluations>> + ); + + type NapiProofEvaluations = []; + + impl From for ProofEvaluations>> { + fn from(x: NapiProofEvaluations) -> Self { + x.0 + } + } + + impl From>>> for NapiProofEvaluations { + fn from(x: ProofEvaluations>>) -> Self { + Self(x) + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + #[napi(js_name = [])] + #[derive(Clone)] + pub struct [] + { + #[napi(skip)] + pub sorted: NapiVector<$NapiPolyComm>, + #[napi(skip)] + pub aggreg: $NapiPolyComm, + #[napi(skip)] + pub runtime: Option<$NapiPolyComm>, + } + + type NapiLookupCommitments = []; + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + sorted: NapiVector<$NapiPolyComm>, + aggreg: $NapiPolyComm, + runtime: Option<$NapiPolyComm>) -> Self { + NapiLookupCommitments { sorted, aggreg, runtime } + } + + #[napi(getter)] + pub fn sorted(&self) -> NapiVector<$NapiPolyComm> { + self.sorted.clone() + } + + #[napi(getter)] + pub fn aggreg(&self) -> $NapiPolyComm { + self.aggreg.clone() + } + + #[napi(getter)] + pub fn runtime(&self) -> Option<$NapiPolyComm> { + self.runtime.clone() + } + + #[napi(setter, js_name="set_sorted")] + pub fn set_sorted(&mut self, s: NapiVector<$NapiPolyComm>) { + self.sorted = s + } + + #[napi(setter, js_name="set_aggreg")] + pub fn set_aggreg(&mut self, a: $NapiPolyComm) { + self.aggreg = a + } + + #[napi(setter, js_name="set_runtime")] + pub fn set_runtime(&mut self, r: Option<$NapiPolyComm>) { + self.runtime = r + } + } + + impl From> for NapiLookupCommitments { + fn from(x: LookupCommitments<$G>) -> Self { + NapiLookupCommitments { + sorted: x.sorted.into_iter().map(Into::into).collect(), + aggreg: x.aggreg.into(), + runtime: x.runtime.map(Into::into) + } + } + } + + impl From for LookupCommitments<$G> { + fn from(x: NapiLookupCommitments) -> Self { + LookupCommitments { + sorted: x.sorted.into_iter().map(Into::into).collect(), + aggreg: x.aggreg.into(), + runtime: x.runtime.map(Into::into) + } + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } -#[napi] -pub fn caml_pasta_fp_plonk_proof_create( - index: &External, - witness: WasmVecVecFp, - runtime_tables: NapiVector, - prev_challenges: NapiFlatVector, - prev_sgs: NapiVector, -) -> Result> { - let (maybe_proof, public_input) = { - index - .0 - .srs - .as_ref() - .get_lagrange_basis(index.0.as_ref().cs.domain.d1); - let prev: Vec> = { - if prev_challenges.is_empty() { - Vec::new() - } else { - let challenges_per_sg = prev_challenges.len() / prev_sgs.len(); - let d = prev_sgs - .into_iter() - .map(Into::::into) - .enumerate() - .map(|(i, sg)| { - let chals = prev_challenges - [(i * challenges_per_sg)..(i + 1) * challenges_per_sg] - .iter() - .cloned() - .map(Into::into) - .collect(); - let comm = PolyComm:: { chunks: vec![sg] }; - RecursionChallenge { chals, comm } + #[napi(js_name = [])] + #[derive(Clone)] + pub struct [] + { + #[napi(skip)] + pub w_comm: NapiVector<$NapiPolyComm>, + #[napi(skip)] + pub z_comm: $NapiPolyComm, + #[napi(skip)] + pub t_comm: $NapiPolyComm, + #[napi(skip)] + pub lookup: Option, + } + + type NapiProverCommitments = []; + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + w_comm: NapiVector<$NapiPolyComm>, + z_comm: $NapiPolyComm, + t_comm: $NapiPolyComm, + lookup: Option + ) -> Self { + NapiProverCommitments { w_comm, z_comm, t_comm, lookup } + } + + #[napi(getter, js_name="w_comm")] + pub fn w_comm(&self) -> NapiVector<$NapiPolyComm> { + self.w_comm.clone() + } + #[napi(getter, js_name="z_comm")] + pub fn z_comm(&self) -> $NapiPolyComm { + self.z_comm.clone() + } + #[napi(getter, js_name="t_comm")] + pub fn t_comm(&self) -> $NapiPolyComm { + self.t_comm.clone() + } + + #[napi(getter)] + pub fn lookup(&self) -> Option { + self.lookup.clone() + } + + #[napi(setter, js_name="set_w_comm")] + pub fn set_w_comm(&mut self, x: NapiVector<$NapiPolyComm>) { + self.w_comm = x + } + #[napi(setter, js_name="set_z_comm")] + pub fn set_z_comm(&mut self, x: $NapiPolyComm) { + self.z_comm = x + } + #[napi(setter, js_name="set_t_comm")] + pub fn set_t_comm(&mut self, x: $NapiPolyComm) { + self.t_comm = x + } + + #[napi(setter, js_name="set_lookup")] + pub fn set_lookup(&mut self, l: Option) { + self.lookup = l + } + } + + impl From> for NapiProverCommitments { + fn from(x: ProverCommitments<$G>) -> Self { + NapiProverCommitments { + w_comm: x.w_comm.iter().map(Into::into).collect(), + z_comm: x.z_comm.into(), + t_comm: x.t_comm.into(), + lookup: x.lookup.map(Into::into), + } + } + } + + impl From for ProverCommitments<$G> { + fn from(x: NapiProverCommitments) -> Self { + ProverCommitments { + w_comm: core::array::from_fn(|i| (&x.w_comm[i]).into()), + z_comm: x.z_comm.into(), + t_comm: x.t_comm.into(), + lookup: x.lookup.map(Into::into), + } + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + #[napi(js_name = [] )] + #[derive(Clone, Debug)] + pub struct [] { + #[napi(skip)] + pub lr_0: NapiVector<$NapiG>, // vector of rounds of L commitments + #[napi(skip)] + pub lr_1: NapiVector<$NapiG>, // vector of rounds of R commitments + #[napi(skip)] + pub delta: $NapiG, + pub z1: $NapiF, + pub z2: $NapiF, + #[napi(skip)] + pub sg: $NapiG, + } + + type NapiOpeningProof = []; + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + lr_0: NapiVector<$NapiG>, + lr_1: NapiVector<$NapiG>, + delta: $NapiG, + z1: $NapiF, + z2: $NapiF, + sg: $NapiG) -> Self { + NapiOpeningProof { lr_0, lr_1, delta, z1, z2, sg } + } + + #[napi(getter, js_name="lr_0")] + pub fn lr_0(&self) -> NapiVector<$NapiG> { + self.lr_0.clone() + } + #[napi(getter, js_name="lr_1")] + pub fn lr_1(&self) -> NapiVector<$NapiG> { + self.lr_1.clone() + } + #[napi(getter)] + pub fn delta(&self) -> $NapiG { + self.delta.clone() + } + #[napi(getter)] + pub fn sg(&self) -> $NapiG { + self.sg.clone() + } + + #[napi(setter, js_name="set_lr_0")] + pub fn set_lr_0(&mut self, lr_0: NapiVector<$NapiG>) { + self.lr_0 = lr_0 + } + #[napi(setter, js_name="set_lr_1")] + pub fn set_lr_1(&mut self, lr_1: NapiVector<$NapiG>) { + self.lr_1 = lr_1 + } + #[napi(setter, js_name="set_delta")] + pub fn set_delta(&mut self, delta: $NapiG) { + self.delta = delta + } + #[napi(setter, js_name="set_sg")] + pub fn set_sg(&mut self, sg: $NapiG) { + self.sg = sg + } + } + + impl From for OpeningProof<$G> { + fn from(x: NapiOpeningProof) -> Self { + let NapiOpeningProof {lr_0, lr_1, delta, z1, z2, sg} = x; + OpeningProof { + lr: lr_0.into_iter().zip(lr_1.into_iter()).map(|(x, y)| (x.into(), y.into())).collect(), + delta: delta.into(), + z1: z1.into(), + z2: z2.into(), + sg: sg.into(), + } + } + } + + impl From> for NapiOpeningProof { + fn from(x: OpeningProof<$G>) -> Self { + let (lr_0, lr_1) = x.lr.clone().into_iter().map(|(x, y)| (x.into(), y.into())).unzip(); + NapiOpeningProof { + lr_0, + lr_1, + delta: x.delta.clone().into(), + z1: x.z1.into(), + z2: x.z2.into(), + sg: x.sg.clone().into(), + } + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + #[napi(js_name = [])] + #[derive(Clone)] + pub struct [] { + #[napi(skip)] + pub commitments: NapiProverCommitments, + #[napi(skip)] + pub proof: NapiOpeningProof, + // OCaml doesn't have sized arrays, so we have to convert to a tuple.. + #[napi(skip)] + pub evals: NapiProofEvaluations, + pub ft_eval1: $NapiF, + #[napi(skip)] + pub public: NapiFlatVector<$NapiF>, + #[napi(skip)] + pub prev_challenges_scalars: Vec>, + #[napi(skip)] + pub prev_challenges_comms:NapiVector<$NapiPolyComm>, + } + + type NapiProverProof = []; + + impl From<&NapiProverProof> for (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) { + fn from(x: &NapiProverProof) -> Self { + let proof = ProverProof { + commitments: x.commitments.clone().into(), + proof: x.proof.clone().into(), + evals: x.evals.clone().into(), + prev_challenges: + (&x.prev_challenges_scalars) + .into_iter() + .zip((&x.prev_challenges_comms).into_iter()) + .map(|(chals, comm)| { + RecursionChallenge { + chals: chals.clone(), + comm: comm.into(), + } + }) + .collect(), + ft_eval1: x.ft_eval1.clone().into() + }; + let public = x.public.clone().into_iter().map(Into::into).collect(); + (proof, public) + } + } + + impl From for (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) { + fn from(x: NapiProverProof) -> Self { + let proof = ProverProof { + commitments: x.commitments.into(), + proof: x.proof.into(), + evals: x.evals.into(), + prev_challenges: + (x.prev_challenges_scalars) + .into_iter() + .zip((x.prev_challenges_comms).into_iter()) + .map(|(chals, comm)| { + RecursionChallenge { + chals: chals.into(), + comm: comm.into(), + } + }) + .collect(), + ft_eval1: x.ft_eval1.into() + }; + let public = x.public.into_iter().map(Into::into).collect(); + (proof, public) + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + commitments: NapiProverCommitments, // maybe remove FromNapiValue trait implementation and wrap it in External instead + proof: NapiOpeningProof, + evals: NapiProofEvaluations, // maybe remove FromNapiValue trait implementation and wrap it in External instead + ft_eval1: $NapiF, + public_: NapiFlatVector<$NapiF>, + prev_challenges_scalars: $NapiVecVec, + prev_challenges_comms: NapiVector<$NapiPolyComm>) -> Self { + NapiProverProof { + commitments, + proof, + evals, + ft_eval1, + public: public_, + prev_challenges_scalars: prev_challenges_scalars.0, + prev_challenges_comms, + } + } + + #[napi(getter)] + pub fn commitments(&self) -> NapiProverCommitments { + self.commitments.clone() + } + #[napi(getter)] + pub fn proof(&self) -> NapiOpeningProof { + self.proof.clone() + } + #[napi(getter)] + pub fn evals(&self) -> NapiProofEvaluations { + self.evals.clone() + } + #[napi(getter, js_name="public_")] + pub fn public_(&self) -> NapiFlatVector<$NapiF> { + self.public.clone() + } + #[napi(getter, js_name="prev_challenges_scalars")] + pub fn prev_challenges_scalars(&self) -> $NapiVecVec { + [](self.prev_challenges_scalars.clone()) + } + #[napi(getter, js_name="prev_challenges_comms")] + pub fn prev_challenges_comms(&self) -> NapiVector<$NapiPolyComm> { + self.prev_challenges_comms.clone() + } + + #[napi(setter, js_name="set_commitments")] + pub fn set_commitments(&mut self, commitments: NapiProverCommitments) { + self.commitments = commitments + } + #[napi(setter, js_name="set_proof")] + pub fn set_proof(&mut self, proof: NapiOpeningProof) { + self.proof = proof + } + #[napi(setter, js_name="set_evals")] + pub fn set_evals(&mut self, evals: NapiProofEvaluations) { + self.evals = evals + } + #[napi(setter, js_name="set_public_")] + pub fn set_public_(&mut self, public_: NapiFlatVector<$NapiF>) { + self.public = public_ + } + #[napi(setter, js_name="set_prev_challenges_scalars")] + pub fn set_prev_challenges_scalars(&mut self, prev_challenges_scalars: $NapiVecVec) { + self.prev_challenges_scalars = prev_challenges_scalars.0 + } + #[napi(setter, js_name="set_prev_challenges_comms")] + pub fn set_prev_challenges_comms(&mut self, prev_challenges_comms: NapiVector<$NapiPolyComm>) { + self.prev_challenges_comms = prev_challenges_comms + } + + #[napi] + #[allow(deprecated)] + pub fn serialize(&self) -> String { + let (proof, _public_input) = self.into(); + let serialized = rmp_serde::to_vec(&proof).unwrap(); + // Deprecated used on purpose: updating this leads to a bug in o1js + base64::encode(serialized) + } + } + + #[derive(Clone)] + pub struct [] { + pub proof: ProverProof<$G, OpeningProof<$G>>, + pub public_input: Vec<$F>, + } + + type NapiProofF = []; + // type JsRuntimeTableF = []; + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_create">])] + pub fn []( + index: &External<$NapiIndex>, + witness: $NapiVecVec, + runtime_tables: NapiVector<$NapiRuntimeTable>, + prev_challenges: NapiFlatVector<$NapiF>, + prev_sgs: NapiVector<$NapiG>, + ) -> Result> { + let (maybe_proof, public_input) = { + index + .0 + .srs + .as_ref() + .get_lagrange_basis(index.0.as_ref().cs.domain.d1); + let prev: Vec> = { + if prev_challenges.is_empty() { + Vec::new() + } else { + let challenges_per_sg = prev_challenges.len() / prev_sgs.len(); + let d = prev_sgs + .into_iter() + .map(Into::<$G>::into) + .enumerate() + .map(|(i, sg)| { + let chals = prev_challenges + [(i * challenges_per_sg)..(i + 1) * challenges_per_sg] + .iter() + .cloned() + .map(Into::into) + .collect(); + let comm = PolyComm::<$G> { chunks: vec![sg] }; + RecursionChallenge { chals, comm } + }) + .collect(); + d + } + }; + let rust_runtime_tables: Vec> = runtime_tables.into_iter().map(Into::into).collect(); + + let witness: [Vec<_>; COLUMNS] = witness + .0 + .try_into() + .expect("the witness should be a column of 15 vectors"); + + let index: &ProverIndex<$G, OpeningProof<$G>> = &index.0.as_ref(); + + let public_input = witness[0][0..index.cs.public].to_vec(); + + // Release the runtime lock so that other threads can run using it while we generate the proof. + let group_map = GroupMap::<_>::setup(); + let maybe_proof = ProverProof::create_recursive::< + DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + _, + >( + &group_map, + witness, + &rust_runtime_tables, + index, + prev, + None, + &mut rand::rngs::OsRng, + ); + (maybe_proof, public_input) + }; + + match maybe_proof { + Ok(proof) => Ok(External::new([] { + proof, + public_input, + })), + Err(err) => Err(NapiError::new(Status::GenericFailure, err.to_string())), + } + } + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_verify">])] + pub fn []( + index: $NapiVerifierIndex, + proof: &External, + ) -> bool { + let group_map = <$G as CommitmentCurve>::Map::setup(); + let verifier_index = &index.into(); + let (proof, public_input) = (&proof.as_ref().proof, &proof.as_ref().public_input); + batch_verify::< + $G, + DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + OpeningProof<$G> + >( + &group_map, + &[Context { verifier_index, proof, public_input }] + ).is_ok() + } + + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_batch_verify">])] + pub fn []( + indexes: NapiVector<$NapiVerifierIndex>, + proofs: &External>, + ) -> bool { + let indexes: Vec<_> = indexes.into_iter().map(Into::into).collect(); + let proofs_ref = proofs.as_ref(); + + if indexes.len() != proofs_ref.len() { + return false; + } + + let contexts: Vec<_> = indexes + .iter() + .zip(proofs_ref.iter()) + .map(|(index, proof)| Context { + verifier_index: index, + proof: &proof.proof, + public_input: &proof.public_input, }) .collect(); - d - } - }; - - let rust_runtime_tables: Vec> = runtime_tables - .into_iter() - .flat_map(|table| { - let JsRuntimeTableFp { id, data } = table; - data.into_iter().map(move |column| { - let values = NapiFlatVector::::from_bytes(column.to_vec()) - .into_iter() - .map(Into::into) - .collect(); - RuntimeTable { id, data: values } - }) - }) - .collect(); - - let witness: [Vec<_>; COLUMNS] = witness - .0 - .try_into() - .expect("the witness should be a column of 15 vectors"); - - let index: &ProverIndex> = &index.0.as_ref(); - - let public_input = witness[0][0..index.cs.public].to_vec(); - - // Release the runtime lock so that other threads can run using it while we generate the proof. - let group_map = GroupMap::<_>::setup(); - let maybe_proof = ProverProof::create_recursive::< - DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, - DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, - _, - >( - &group_map, - witness, - &rust_runtime_tables, - index, - prev, - None, - &mut rand::rngs::OsRng, - ); - (maybe_proof, public_input) + + let group_map = GroupMap::<_>::setup(); + + batch_verify::< + $G, + DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + OpeningProof<$G> + >(&group_map, &contexts) + .is_ok() + } + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_dummy">])] + pub fn []() -> External { + fn comm() -> PolyComm<$G> { + let g = $G::generator(); + PolyComm { + chunks: vec![g, g, g], + } + } + + let prev = RecursionChallenge { + chals: vec![$F::one(), $F::one()], + comm: comm(), + }; + let prev_challenges = vec![prev.clone(), prev.clone(), prev.clone()]; + + let g = $G::generator(); + let proof = OpeningProof { + lr: vec![(g, g), (g, g), (g, g)], + z1: $F::one(), + z2: $F::one(), + delta: g, + sg: g, + }; + let eval = || PointEvaluations { + zeta: vec![$F::one()], + zeta_omega: vec![$F::one()], + }; + let evals = ProofEvaluations { + w: core::array::from_fn(|_| eval()), + coefficients: core::array::from_fn(|_| eval()), + z: eval(), + s: core::array::from_fn(|_| eval()), + generic_selector: eval(), + poseidon_selector: eval(), + complete_add_selector: eval(), + mul_selector: eval(), + emul_selector: eval(), + endomul_scalar_selector: eval(), + range_check0_selector: None, + range_check1_selector: None, + foreign_field_add_selector: None, + foreign_field_mul_selector: None, + xor_selector: None, + rot_selector: None, + lookup_aggregation: None, + lookup_table: None, + lookup_sorted: array::from_fn(|_| None), + runtime_lookup_table: None, + runtime_lookup_table_selector: None, + xor_lookup_selector: None, + lookup_gate_lookup_selector: None, + range_check_lookup_selector: None, + foreign_field_mul_lookup_selector: None, + public: None, + }; + + let dlogproof = ProverProof { + commitments: ProverCommitments { + w_comm: core::array::from_fn(|_| comm()), + z_comm: comm(), + t_comm: comm(), + lookup: None, + }, + proof, + evals, + ft_eval1: $F::one(), + prev_challenges, + }; + + let public = vec![$F::one(), $F::one()]; + External::new(NapiProofF{proof: dlogproof, public_input: public}) + } + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_deep_copy">])] + pub fn []( + x: &External + ) -> External { + External::new(x.as_ref().clone()) + } + } + }; +} + +pub mod fp { + use super::*; + use crate::{ + pasta_fp_plonk_index::WasmPastaFpPlonkIndex as NapiPastaFpPlonkIndex, + plonk_verifier_index::fp::NapiFpPlonkVerifierIndex, + poly_comm::vesta::NapiFpPolyComm, + wrappers::{field::NapiPastaFp, group::NapiGVesta, lookups::NapiFpRuntimeTable}, + }; + use mina_curves::pasta::{Fp, Vesta}; + + impl_proof!( + NapiGVesta, + Vesta, + NapiPastaFp, + Fp, + NapiFpPolyComm, + NapiSrs, + NapiPastaFpPlonkIndex, + NapiFpPlonkVerifierIndex, + NapiVecVecFp, + Fp, + NapiFpRuntimeTable, + ); +} + +pub mod fq { + use super::*; + use crate::{ + pasta_fq_plonk_index::WasmPastaFqPlonkIndex as NapiPastaFqPlonkIndex, + plonk_verifier_index::fq::NapiFqPlonkVerifierIndex, + poly_comm::pallas::NapiFqPolyComm, + wrappers::{field::NapiPastaFq, group::NapiGPallas, lookups::NapiFqRuntimeTable}, }; + use mina_curves::pasta::{Fq, Pallas}; - match maybe_proof { - Ok(proof) => Ok(External::new(Proof { - proof, - public_input, - })), - Err(err) => Err(NapiError::new(Status::GenericFailure, err.to_string())), - } + impl_proof!( + NapiGPallas, + Pallas, + NapiPastaFq, + Fq, + NapiFqPolyComm, + NapiSrs, + NapiPastaFqPlonkIndex, + NapiFqPlonkVerifierIndex, + NapiVecVecFq, + Fq, + NapiFqRuntimeTable, + ); } diff --git a/plonk-napi/src/srs.rs b/plonk-napi/src/srs.rs index dcfe743447a..3646d6fd92f 100644 --- a/plonk-napi/src/srs.rs +++ b/plonk-napi/src/srs.rs @@ -109,15 +109,15 @@ macro_rules! impl_srs { } #[napi(js_name = [<"caml_" $name:snake "_srs_create">])] - pub fn [](depth: i32) -> External<[]> { + pub fn [](depth: i32) -> [] { println!("Creating SRS with napi"); - External::new(Arc::new(SRS::<$G>::create(depth as usize)).into()) + Arc::new(SRS::<$G>::create(depth as usize)).into() } #[napi(js_name = [<"caml_" $name:snake "_srs_create_parallel">])] - pub fn [](depth: i32) -> External<[]> { + pub fn [](depth: i32) -> [] { println!("Creating SRS in parallel with napi"); - External::new(Arc::new(SRS::<$G>::create_parallel(depth as usize)).into()) + Arc::new(SRS::<$G>::create_parallel(depth as usize)).into() } #[napi(js_name = [<"caml_" $name:snake "_srs_add_lagrange_basis">])] @@ -164,6 +164,32 @@ macro_rules! impl_srs { } } + #[napi(js_name = [<"caml_" $name:snake "_srs_lagrange_commitment">])] + pub fn []( + srs: &[], + domain_size: i32, + i: i32, + ) -> Result<[<$NapiPolyComm>]> { + let x_domain = EvaluationDomain::<$F>::new(domain_size as usize) + .ok_or_else(invalid_domain_error)?; + let basis = srs.get_lagrange_basis(x_domain); + Ok(basis[i as usize].clone().into()) + } + + // Fake overwrite of the plonk_wasm equivalent, but without pointers. + // In the srs bindings, the same symbol will be used to either provide + // the pointer for wasm, or the actual data for napi + #[napi(js_name = [<"caml_" $name:snake "_srs_lagrange_commitments_whole_domain_ptr">])] + pub fn []( + srs: &External<[]>, + domain_size: i32, + ) -> Result> { + let domain = EvaluationDomain::<$F>::new(domain_size as usize) + .ok_or_else(invalid_domain_error)?; + let basis = srs.0.get_lagrange_basis(domain); + Ok(basis.iter().cloned().map(Into::into).collect()) + } + #[napi(js_name = [<"caml_" $name:snake "_srs_get">])] pub fn [](srs: &External<[]>) -> Vec<$NapiG> { println!("Getting SRS with napi"); @@ -173,14 +199,14 @@ macro_rules! impl_srs { } #[napi(js_name = [<"caml_" $name:snake "_srs_set">])] - pub fn [](h_and_gs: Vec<$NapiG>) -> External<[]> { + pub fn [](h_and_gs: Vec<$NapiG>) -> [] { println!("Setting SRS with napi"); let mut h_and_gs: Vec<$G> = h_and_gs.into_iter().map(Into::into).collect(); let h = h_and_gs.remove(0); let g = h_and_gs; let srs = SRS::<$G> { h, g, lagrange_bases: HashMapCache::new() }; - External::new(Arc::new(srs).into()) + Arc::new(srs).into() } #[napi(js_name = [<"caml_" $name:snake "_srs_maybe_lagrange_commitment">])] @@ -189,7 +215,7 @@ macro_rules! impl_srs { domain_size: i32, i: i32, ) -> Option<$NapiPolyComm> { - println!("Getting maybe lagrange commitment with napi"); + println!("Getting maybe lagrange commitment with napi rust"); if !srs .0 .lagrange_bases @@ -197,9 +223,10 @@ macro_rules! impl_srs { { return None; } + println!("Lagrange basis found in cache"); let basis = srs .get_lagrange_basis_from_domain_size(domain_size as usize); - Some(basis[i as usize].clone().into()) + basis.get(i as usize).map(Into::into) } #[napi(js_name = [<"caml_" $name:snake "_srs_set_lagrange_basis">])] @@ -223,15 +250,6 @@ macro_rules! impl_srs { Ok(basis.iter().cloned().map(Into::into).collect()) } - #[napi(js_name = [<"caml_" $name:snake "_srs_to_bytes">])] - pub fn [](srs: &[]) -> Result { - srs.serialize() - } - - #[napi(js_name = [<"caml_" $name:snake "_srs_from_bytes">])] - pub fn [](bytes: Uint8Array) -> Result<[]> { - []::deserialize(bytes) - } #[napi(js_name = [<"caml_" $name:snake "_srs_commit_evaluations">])] pub fn [](srs: &[], @@ -308,8 +326,8 @@ macro_rules! impl_srs { Ok(points.into_iter().map(Into::into).collect()) } - #[napi(js_name = [<"caml_" $name:snake "_srs_get_h">])] - pub fn h(srs: &[]) -> $NapiG { + #[napi(js_name = [<"caml_" $name:snake "_srs_h">])] + pub fn [](srs: &[]) -> $NapiG { println!("Getting h point with napi"); srs.h.into() } @@ -322,6 +340,11 @@ pub fn caml_fp_srs_to_bytes(srs: &fp::NapiFpSrs) -> Result { srs.serialize() } +#[napi(js_name = "caml_fp_srs_to_bytes_external")] +pub fn caml_fp_srs_to_bytes_external(srs: &External) -> Uint8Array { + caml_fp_srs_to_bytes(srs).expect("failed to serialize external fp srs") +} + #[napi(js_name = "caml_fp_srs_from_bytes")] pub fn caml_fp_srs_from_bytes(bytes: Uint8Array) -> Result { fp::NapiFpSrs::deserialize(bytes) @@ -338,6 +361,11 @@ pub fn caml_fq_srs_to_bytes(srs: &fq::NapiFqSrs) -> Result { srs.serialize() } +#[napi(js_name = "caml_fq_srs_to_bytes_external")] +pub fn caml_fq_srs_to_bytes_external(srs: &External) -> Uint8Array { + caml_fq_srs_to_bytes(srs).expect("failed to serialize external fq srs") +} + #[napi(js_name = "caml_fq_srs_from_bytes")] pub fn caml_fq_srs_from_bytes(bytes: Uint8Array) -> Result { fq::NapiFqSrs::deserialize(bytes) diff --git a/plonk-napi/src/vector.rs b/plonk-napi/src/vector.rs index 6883ccbe697..d429dc693fe 100644 --- a/plonk-napi/src/vector.rs +++ b/plonk-napi/src/vector.rs @@ -305,8 +305,9 @@ macro_rules! impl_vec_vec_fp { #[napi] impl $name { #[napi(constructor)] - pub fn create(capacity: i32) -> Self { - Self(Vec::with_capacity(capacity as usize)) + pub fn new(capacity: i32) -> Self { + println!("Creating napi VecVec"); + $name(Vec::with_capacity(capacity as usize)) } #[napi] @@ -373,18 +374,6 @@ macro_rules! impl_vec_vec_fp { value.0 } } - /* - impl FromNapiValue for $name { - unsafe fn from_napi_value( - env: sys::napi_env, - napi_val: sys::napi_value, - ) -> Result { - let instance = - as FromNapiValue>::from_napi_value(env, napi_val)?; - Ok((*instance).clone()) - } - } - */ }; } @@ -394,7 +383,7 @@ pub mod fp { use mina_curves::pasta::Fp; use napi_derive::napi; - impl_vec_vec_fp!(NapiVecVecFp, Fp, NapiPastaFp); + impl_vec_vec_fp!(WasmVecVecFp, Fp, NapiPastaFp); } pub mod fq { @@ -403,5 +392,5 @@ pub mod fq { use mina_curves::pasta::Fq; use napi_derive::napi; - impl_vec_vec_fp!(NapiVecVecFq, Fq, NapiPastaFq); + impl_vec_vec_fp!(WasmVecVecFq, Fq, NapiPastaFq); } diff --git a/plonk-napi/src/wrappers/field.rs b/plonk-napi/src/wrappers/field.rs index 022636f39d1..7ebc27763ef 100644 --- a/plonk-napi/src/wrappers/field.rs +++ b/plonk-napi/src/wrappers/field.rs @@ -113,6 +113,13 @@ macro_rules! impl_field_wrapper { ::to_napi_value(env, buffer) } } + + impl<'a> ToNapiValue for &'a mut $name { + unsafe fn to_napi_value(env: sys::napi_env, val: Self) -> Result { + let buffer = Buffer::from(val.to_bytes()); + ::to_napi_value(env, buffer) + } + } }; } diff --git a/plonk-napi/src/wrappers/lookups.rs b/plonk-napi/src/wrappers/lookups.rs index a33fcb4f617..50cb7a82a9c 100644 --- a/plonk-napi/src/wrappers/lookups.rs +++ b/plonk-napi/src/wrappers/lookups.rs @@ -1,5 +1,5 @@ use crate::{ - vector::{fp::NapiVecVecFp, fq::NapiVecVecFq}, + vector::{fp::WasmVecVecFp as NapiVecVecFp, fq::WasmVecVecFq as NapiVecVecFq, NapiFlatVector}, wrappers::field::{NapiPastaFp, NapiPastaFq}, }; use kimchi::circuits::lookup::{ @@ -110,17 +110,17 @@ impl From for LookupInfo { // ----------------- macro_rules! impl_lookup_wrappers { - ($name:ident, $field:ty, $NapiF:ty, $vec_vec:ty) => { + ($field_name:ident, $field:ty, $NapiF:ty, $vec_vec:ty) => { paste! { - #[napi(js_name = [<"WasmPasta" $name:camel "LookupTable">])] + #[napi(js_name = [<"WasmPasta" $field_name:camel "LookupTable">])] #[derive(Clone)] - pub struct [] { + pub struct [] { id: i32, data: $vec_vec, } #[napi] - impl [] { + impl [] { #[napi(constructor)] pub fn new(id: i32, data: $vec_vec) -> Self { Self { @@ -150,7 +150,7 @@ macro_rules! impl_lookup_wrappers { } } - impl From> for [] { + impl From> for [] { fn from(value: LookupTable<$field>) -> Self { Self { id: value.id, @@ -159,8 +159,8 @@ macro_rules! impl_lookup_wrappers { } } - impl From<[]> for LookupTable<$field> { - fn from(value: []) -> Self { + impl From<[]> for LookupTable<$field> { + fn from(value: []) -> Self { Self { id: value.id, data: value.data.into(), @@ -168,15 +168,15 @@ macro_rules! impl_lookup_wrappers { } } - #[napi(js_name = [<"WasmPasta" $name:camel "RuntimeTableCfg">])] + #[napi(js_name = [<"WasmPasta" $field_name:camel "RuntimeTableCfg">])] #[derive(Clone)] - pub struct [] { + pub struct [] { id: i32, first_column: Vec<$field>, } #[napi] - impl [] { + impl [] { #[napi(constructor)] pub fn new(id: i32, first_column: Uint8Array) -> Result { let bytes = first_column.as_ref().to_vec(); @@ -208,7 +208,7 @@ macro_rules! impl_lookup_wrappers { } } - impl From> for [] { + impl From> for [] { fn from(value: RuntimeTableCfg<$field>) -> Self { Self { id: value.id, @@ -217,8 +217,8 @@ macro_rules! impl_lookup_wrappers { } } - impl From<[]> for RuntimeTableCfg<$field> { - fn from(value: []) -> Self { + impl From<[]> for RuntimeTableCfg<$field> { + fn from(value: []) -> Self { Self { id: value.id, first_column: value.first_column, @@ -226,60 +226,27 @@ macro_rules! impl_lookup_wrappers { } } - #[napi(js_name = [<"Wasm" $name:camel "RuntimeTable">])] + #[napi(object, js_name = [<"Wasm" $field_name:camel "RuntimeTable">])] #[derive(Clone)] - pub struct [] { - id: i32, - data: Vec<$field>, - } - - #[napi] - impl [] { - #[napi(constructor)] - pub fn new(id: i32, data: Uint8Array) -> Result { - let bytes = data.as_ref().to_vec(); - let elements: Vec<$field> = FlatVector::<$NapiF>::from_bytes(bytes) - .into_iter() - .map(Into::into) - .collect(); - Ok(Self { id, data: elements }) - } - - #[napi(getter)] - pub fn id(&self) -> i32 { - self.id - } - - #[napi(setter)] - pub fn set_id(&mut self, id: i32) { - self.id = id; - } - - #[napi(getter)] - pub fn data(&self) -> Result { - let mut bytes = Vec::with_capacity(self.data.len() * <$NapiF>::FLATTENED_SIZE); - for value in &self.data { - let element = <$NapiF>::from(*value); - bytes.extend(element.flatten()); - } - Ok(Uint8Array::from(bytes)) - } + pub struct [] { + pub id: i32, + pub data: NapiFlatVector<$NapiF>, } - impl From> for [] { + impl From> for [] { fn from(value: RuntimeTable<$field>) -> Self { Self { id: value.id, - data: value.data, + data: value.data.into_iter().map(Into::into).collect(), } } } - impl From<[]> for RuntimeTable<$field> { - fn from(value: []) -> Self { + impl From<[]> for RuntimeTable<$field> { + fn from(value: []) -> Self { Self { id: value.id, - data: value.data, + data: value.data.into_iter().map(Into::into).collect(), } } } diff --git a/plonk-napi/src/wrappers/wires.rs b/plonk-napi/src/wrappers/wires.rs index 9a2f67ea11a..399de6ba9e7 100644 --- a/plonk-napi/src/wrappers/wires.rs +++ b/plonk-napi/src/wrappers/wires.rs @@ -1,4 +1,4 @@ -use kimchi::circuits::wires::Wire as KimchiWire; +use kimchi::circuits::wires::Wire; use napi_derive::napi; #[napi(object)] @@ -8,17 +8,17 @@ pub struct NapiWire { pub col: u32, } -impl From for KimchiWire { +impl From for Wire { fn from(value: NapiWire) -> Self { - KimchiWire { + Wire { row: value.row as usize, col: value.col as usize, } } } -impl From for NapiWire { - fn from(value: KimchiWire) -> Self { +impl From for NapiWire { + fn from(value: Wire) -> Self { Self { row: value.row as u32, col: value.col as u32,