Skip to content

Commit 7030ea6

Browse files
committed
feat: migrate to lambdaworks merkle tree in zkvms
1 parent 99b447d commit 7030ea6

File tree

7 files changed

+163
-74
lines changed

7 files changed

+163
-74
lines changed

aggregation_mode/aggregation_programs/Cargo.lock

Lines changed: 114 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

aggregation_mode/aggregation_programs/risc0/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ serde_json = "1.0.117"
1010
# Unstable feature is necessary for tiny-keccak patch, see: https://dev.risczero.com/api/zkvm/precompiles#stability
1111
risc0-zkvm = { git = "https://github.com/risc0/risc0", tag="v2.0.0", default-features = false, features = ["unstable", "std"] }
1212
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
13+
lambdaworks-crypto = { version = "0.12.0", features = ["serde"]}
1314

1415
[lib]
1516
path = "./src/lib.rs"

aggregation_mode/aggregation_programs/risc0/src/lib.rs

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1+
use lambdaworks_crypto::merkle_tree::traits::IsMerkleTreeBackend;
12
use serde::{Deserialize, Serialize};
23
use tiny_keccak::{Hasher, Keccak};
34

4-
#[derive(Serialize, Deserialize)]
5+
#[derive(Serialize, Deserialize, Default)]
56
pub struct Risc0ImageIdAndPubInputs {
67
pub image_id: [u8; 32],
78
pub public_inputs: Vec<u8>,
@@ -21,6 +22,25 @@ impl Risc0ImageIdAndPubInputs {
2122
}
2223
}
2324

25+
impl IsMerkleTreeBackend for Risc0ImageIdAndPubInputs {
26+
type Data = Risc0ImageIdAndPubInputs;
27+
type Node = [u8; 32];
28+
29+
fn hash_data(leaf: &Self::Data) -> Self::Node {
30+
leaf.commitment()
31+
}
32+
33+
fn hash_new_parent(child_1: &Self::Node, child_2: &Self::Node) -> Self::Node {
34+
let mut hasher = Keccak::v256();
35+
hasher.update(child_1);
36+
hasher.update(child_2);
37+
38+
let mut hash = [0u8; 32];
39+
hasher.finalize(&mut hash);
40+
hash
41+
}
42+
}
43+
2444
#[derive(Serialize, Deserialize)]
2545
pub struct Input {
2646
pub proofs_image_id_and_pub_inputs: Vec<Risc0ImageIdAndPubInputs>,
Lines changed: 4 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,46 +1,12 @@
11
#![no_main]
22

3+
use lambdaworks_crypto::merkle_tree::merkle::MerkleTree;
34
use risc0_aggregation_program::{Input, Risc0ImageIdAndPubInputs};
45
use risc0_zkvm::guest::env;
56
use tiny_keccak::{Hasher, Keccak};
67

78
risc0_zkvm::guest::entry!(main);
89

9-
fn combine_hashes(hash_a: &[u8; 32], hash_b: &[u8; 32]) -> [u8; 32] {
10-
let mut hasher = Keccak::v256();
11-
hasher.update(hash_a);
12-
hasher.update(hash_b);
13-
14-
let mut hash = [0u8; 32];
15-
hasher.finalize(&mut hash);
16-
hash
17-
}
18-
19-
/// Computes the merkle root for the given proofs
20-
fn compute_merkle_root(proofs: &[Risc0ImageIdAndPubInputs]) -> [u8; 32] {
21-
let mut leaves: Vec<[u8; 32]> = proofs
22-
.chunks(2)
23-
.map(|chunk| match chunk {
24-
[a, b] => combine_hashes(&a.commitment(), &b.commitment()),
25-
[a] => combine_hashes(&a.commitment(), &a.commitment()),
26-
_ => panic!("Unexpected chunk leaves"),
27-
})
28-
.collect();
29-
30-
while leaves.len() > 1 {
31-
leaves = leaves
32-
.chunks(2)
33-
.map(|chunk| match chunk {
34-
[a, b] => combine_hashes(&a, &b),
35-
[a] => combine_hashes(&a, &a),
36-
_ => panic!("Unexpected chunk size in leaves"),
37-
})
38-
.collect()
39-
}
40-
41-
leaves[0]
42-
}
43-
4410
fn main() {
4511
let input = env::read::<Input>();
4612

@@ -52,7 +18,8 @@ fn main() {
5218
env::verify(image_id.clone(), &public_inputs).expect("proof to be verified correctly");
5319
}
5420

55-
let merkle_root = compute_merkle_root(&input.proofs_image_id_and_pub_inputs);
21+
let merkle_tree: MerkleTree<Risc0ImageIdAndPubInputs> =
22+
MerkleTree::build(&input.proofs_image_id_and_pub_inputs).unwrap();
5623

57-
env::commit_slice(&merkle_root);
24+
env::commit_slice(&merkle_tree.root);
5825
}

aggregation_mode/aggregation_programs/sp1/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ sha2 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", tag = "sha2-v
1010
sha3 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", tag = "sha3-v0.10.8-patch-v1" }
1111
serde = { version = "1.0.203", features = ["derive"] }
1212
serde_json = "1.0.117"
13+
lambdaworks-crypto = { version = "0.12.0", features = ["serde"]}
1314

1415
[lib]
1516
path = "./src/lib.rs"

aggregation_mode/aggregation_programs/sp1/src/lib.rs

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1+
use lambdaworks_crypto::merkle_tree::traits::IsMerkleTreeBackend;
12
use serde::{Deserialize, Serialize};
23
use sha3::{Digest, Keccak256};
34

4-
#[derive(Serialize, Deserialize)]
5+
#[derive(Serialize, Deserialize, Default)]
56
pub struct SP1VkAndPubInputs {
67
pub vk: [u32; 8],
78
pub public_inputs: Vec<u8>,
@@ -18,6 +19,22 @@ impl SP1VkAndPubInputs {
1819
}
1920
}
2021

22+
impl IsMerkleTreeBackend for SP1VkAndPubInputs {
23+
type Data = SP1VkAndPubInputs;
24+
type Node = [u8; 32];
25+
26+
fn hash_data(leaf: &Self::Data) -> Self::Node {
27+
leaf.hash()
28+
}
29+
30+
fn hash_new_parent(child_1: &Self::Node, child_2: &Self::Node) -> Self::Node {
31+
let mut hasher = Keccak256::new();
32+
hasher.update(child_1);
33+
hasher.update(child_2);
34+
hasher.finalize().into()
35+
}
36+
}
37+
2138
#[derive(Serialize, Deserialize)]
2239
pub struct Input {
2340
pub proofs_vk_and_pub_inputs: Vec<SP1VkAndPubInputs>,
Lines changed: 4 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,42 +1,11 @@
11
#![no_main]
22
sp1_zkvm::entrypoint!(main);
33

4+
use lambdaworks_crypto::merkle_tree::merkle::MerkleTree;
45
use sha2::{Digest, Sha256};
56
use sha3::Keccak256;
67
use sp1_aggregation_program::{Input, SP1VkAndPubInputs};
78

8-
fn combine_hashes(hash_a: &[u8; 32], hash_b: &[u8; 32]) -> [u8; 32] {
9-
let mut hasher = Keccak256::new();
10-
hasher.update(hash_a);
11-
hasher.update(hash_b);
12-
hasher.finalize().into()
13-
}
14-
15-
/// Computes the merkle root for the given proofs using the vk
16-
fn compute_merkle_root(proofs: &[SP1VkAndPubInputs]) -> [u8; 32] {
17-
let mut leaves: Vec<[u8; 32]> = proofs
18-
.chunks(2)
19-
.map(|chunk| match chunk {
20-
[a, b] => combine_hashes(&a.hash(), &b.hash()),
21-
[a] => combine_hashes(&a.hash(), &a.hash()),
22-
_ => panic!("Unexpected chunk leaves"),
23-
})
24-
.collect();
25-
26-
while leaves.len() > 1 {
27-
leaves = leaves
28-
.chunks(2)
29-
.map(|chunk| match chunk {
30-
[a, b] => combine_hashes(&a, &b),
31-
[a] => combine_hashes(&a, &a),
32-
_ => panic!("Unexpected chunk size in leaves"),
33-
})
34-
.collect()
35-
}
36-
37-
leaves[0]
38-
}
39-
409
pub fn main() {
4110
let input = sp1_zkvm::io::read::<Input>();
4211

@@ -48,7 +17,8 @@ pub fn main() {
4817
sp1_zkvm::lib::verify::verify_sp1_proof(&vkey, &public_values_digest.into());
4918
}
5019

51-
let merkle_root = compute_merkle_root(&input.proofs_vk_and_pub_inputs);
20+
let merkle_tree: MerkleTree<SP1VkAndPubInputs> =
21+
MerkleTree::build(&input.proofs_vk_and_pub_inputs).unwrap();
5222

53-
sp1_zkvm::io::commit_slice(&merkle_root);
23+
sp1_zkvm::io::commit_slice(&merkle_tree.root);
5424
}

0 commit comments

Comments
 (0)