Skip to content

Commit b39a038

Browse files
MarcosNicolauMauroToscanoJuArce
authored
feat(aggregation_mode): aggregate proofs in chunks (#1896)
Co-authored-by: MauroFab <[email protected]> Co-authored-by: Julian Arce <[email protected]>
1 parent 12e0642 commit b39a038

38 files changed

+1053
-591
lines changed

Makefile

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -218,9 +218,7 @@ install_aggregation_mode: ## Install the aggregation mode with proving enabled
218218
cargo install --path aggregation_mode --features prove,gpu --bin proof_aggregator
219219

220220
agg_mode_write_program_ids: ## Write proof aggregator zkvm programs ids
221-
@cd aggregation_mode && \
222-
cargo run --release --bin write_program_image_id_vk_hash
223-
221+
@cd aggregation_mode && ./scripts/build_programs.sh
224222

225223
_AGGREGATOR_:
226224

aggregation_mode/README.md

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,11 +61,22 @@ make start_proof_aggregator_gpu AGGREGATOR="sp1|risc0"
6161
1. Get latest aggregated proof:
6262

6363
```shell
64-
cast call 0xc351628EB244ec633d5f21fBD6621e1a683B1181 "currentAggregatedProofNumber()" --rpc-url http://localhost:8545
64+
cast logs 0xc351628EB244ec633d5f21fBD6621e1a683B1181 'AggregatedProofVerified(bytes32,bytes32)' --from-block 0 --to-block latest --rpc-url http://localhost:8545
6565
```
6666

67-
2. Get aggregated proof info:
67+
## Compiling programs
68+
69+
Whenever any of the programs change, you must recompile them and update their corresponding program ids in `aggregation_mode/program_ids.json`. To do this, run the following command:
6870

6971
```shell
70-
cast call 0xc351628EB244ec633d5f21fBD6621e1a683B1181 "getAggregatedProof(uint64)(uint8,bytes32,bytes32)" <AGG_PROOF_NUMBER> --rpc-url http://localhost:8545
72+
make agg_mode_write_program_ids
7173
```
74+
75+
We are using docker to produce deterministic builds so that the program ids are the same for all systems.
76+
77+
### Updating the program id in `AlignedProofAggregationService` contract
78+
79+
If the program ids have changed, you will also need to update them in the `AlignedProofAggregationService` contract.
80+
81+
- Risc0: call `setRisc0AggregatorProgramImageId` method with the value of `risc0_root_aggregator_image_id` from `aggregation_mode/program_ids.json`.
82+
- SP1: call: `setSP1AggregatorProgramVKHash` method with the value of `sp1_root_aggregator_vk_hash` from `aggregation_mode/program_ids.json`.

aggregation_mode/aggregation_programs/Cargo.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,3 @@ members = ["sp1", "risc0"]
88
[patch.crates-io]
99
# Adding RISC Zero keccak precompile support
1010
tiny-keccak = { git = "https://github.com/risc0/tiny-keccak", tag = "tiny-keccak/v2.0.2-risczero.0" }
11-

aggregation_mode/aggregation_programs/risc0/Cargo.toml

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,5 +16,9 @@ lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks.git", r
1616
path = "./src/lib.rs"
1717

1818
[[bin]]
19-
name = "risc0_aggregator_program"
20-
path = "./src/main.rs"
19+
name = "risc0_user_proofs_aggregator_program"
20+
path = "./src/user_proofs_aggregator_main.rs"
21+
22+
[[bin]]
23+
name = "risc0_chunk_aggregator_program"
24+
path = "./src/chunk_aggregator_main.rs"
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
#![no_main]
2+
3+
use lambdaworks_crypto::merkle_tree::merkle::MerkleTree;
4+
use risc0_aggregation_program::{ChunkAggregatorInput, Hash32};
5+
use risc0_zkvm::guest::env;
6+
7+
risc0_zkvm::guest::entry!(main);
8+
9+
// Generated with `make agg_mode_write_program_ids` and copied from program_ids.json
10+
pub const USER_PROOFS_AGGREGATOR_PROGRAM_IMAGE_ID: [u8; 32] = [
11+
83, 145, 39, 254, 127, 217, 146, 127, 63, 217, 69, 190, 11, 204, 170, 138, 215, 35, 175, 246,
12+
209, 154, 52, 243, 85, 37, 177, 147, 22, 153, 155, 156,
13+
];
14+
15+
fn main() {
16+
let input = env::read::<ChunkAggregatorInput>();
17+
18+
let mut leaves: Vec<Hash32> = vec![];
19+
20+
for (proof, leaves_commitment) in input.proofs_and_leaves_commitment {
21+
let image_id = proof.image_id;
22+
23+
// Ensure the aggregated chunk originates from the L1 aggregation program.
24+
// This validation step guarantees that the proof was genuinely verified
25+
// by this program. Without this check, a different program using the
26+
// same public inputs could bypass verification.
27+
assert!(image_id == USER_PROOFS_AGGREGATOR_PROGRAM_IMAGE_ID);
28+
29+
// Ensure the committed root matches the root of the provided leaves
30+
let merkle_root: [u8; 32] = proof
31+
.public_inputs
32+
.clone()
33+
.try_into()
34+
.expect("Public input to be the chunk merkle root");
35+
36+
let leaves_commitment: Vec<Hash32> =
37+
leaves_commitment.into_iter().map(|el| Hash32(el)).collect();
38+
let merkle_tree = MerkleTree::<Hash32>::build(&leaves_commitment).unwrap();
39+
assert!(merkle_root == merkle_tree.root);
40+
41+
leaves.extend(leaves_commitment);
42+
43+
// finally verify the proof
44+
env::verify(image_id, &proof.public_inputs).expect("proof to be verified correctly");
45+
}
46+
47+
let merkle_tree = MerkleTree::<Hash32>::build(&leaves).unwrap();
48+
49+
env::commit_slice(&merkle_tree.root);
50+
}

aggregation_mode/aggregation_programs/risc0/src/lib.rs

Lines changed: 53 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,58 @@ impl IsMerkleTreeBackend for Risc0ImageIdAndPubInputs {
6767
}
6868

6969
#[derive(Serialize, Deserialize)]
70-
pub struct Input {
70+
pub struct Hash32(pub [u8; 32]);
71+
72+
// Note: this MerkleTreeBackend is defined in three locations
73+
// - aggregation_mode/src/aggregators/mod.rs
74+
// - aggregation_mode/src/aggregators/risc0_aggregator.rs and
75+
// - aggregation_mode/src/aggregators/sp1_aggregator.rs
76+
// All 3 implementations should match
77+
// The definition on aggregator/mod.rs supports taking proofs from both Risc0 and SP1,
78+
// Additionally, a version that takes the leaves as already hashed data is defined on:
79+
// - batcher/aligned-sdk/src/sdk/aggregation.rs
80+
// This one is used in the SDK since
81+
// the user may not have access to the proofs that they didn't submit
82+
impl IsMerkleTreeBackend for Hash32 {
83+
type Data = Hash32;
84+
type Node = [u8; 32];
85+
86+
fn hash_data(leaf: &Self::Data) -> Self::Node {
87+
leaf.0
88+
}
89+
90+
/// Computes a commutative Keccak256 hash, ensuring H(a, b) == H(b, a).
91+
///
92+
/// See: https://docs.openzeppelin.com/contracts/5.x/api/utils#Hashes
93+
///
94+
/// Source: https://github.com/OpenZeppelin/openzeppelin-contracts/blob/master/contracts/utils/cryptography/Hashes.sol#L17-L19
95+
///
96+
/// Compliant with OpenZeppelin's `processProofCalldata` function from MerkleProof.sol.
97+
///
98+
/// See: https://docs.openzeppelin.com/contracts/5.x/api/utils#MerkleProof
99+
///
100+
/// Source: https://github.com/OpenZeppelin/openzeppelin-contracts/blob/master/contracts/utils/cryptography/MerkleProof.sol#L114-L128
101+
fn hash_new_parent(child_1: &Self::Node, child_2: &Self::Node) -> Self::Node {
102+
let mut hasher = Keccak::v256();
103+
if child_1 < child_2 {
104+
hasher.update(child_1);
105+
hasher.update(child_2);
106+
} else {
107+
hasher.update(child_2);
108+
hasher.update(child_1);
109+
}
110+
let mut hash = [0u8; 32];
111+
hasher.finalize(&mut hash);
112+
hash
113+
}
114+
}
115+
116+
#[derive(Serialize, Deserialize)]
117+
pub struct UserProofsAggregatorInput {
71118
pub proofs_image_id_and_pub_inputs: Vec<Risc0ImageIdAndPubInputs>,
72119
}
120+
121+
#[derive(Serialize, Deserialize)]
122+
pub struct ChunkAggregatorInput {
123+
pub proofs_and_leaves_commitment: Vec<(Risc0ImageIdAndPubInputs, Vec<[u8; 32]>)>,
124+
}

aggregation_mode/aggregation_programs/risc0/src/main.rs

Lines changed: 0 additions & 24 deletions
This file was deleted.
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
#![no_main]
2+
3+
use lambdaworks_crypto::merkle_tree::merkle::MerkleTree;
4+
use risc0_aggregation_program::{Risc0ImageIdAndPubInputs, UserProofsAggregatorInput};
5+
use risc0_zkvm::guest::env;
6+
7+
risc0_zkvm::guest::entry!(main);
8+
9+
fn main() {
10+
let input = env::read::<UserProofsAggregatorInput>();
11+
12+
for proof in &input.proofs_image_id_and_pub_inputs {
13+
env::verify(proof.image_id.clone(), &proof.public_inputs)
14+
.expect("proof to be verified correctly");
15+
}
16+
17+
let merkle_tree =
18+
MerkleTree::<Risc0ImageIdAndPubInputs>::build(&input.proofs_image_id_and_pub_inputs)
19+
.unwrap();
20+
21+
env::commit_slice(&merkle_tree.root);
22+
}

aggregation_mode/aggregation_programs/sp1/Cargo.toml

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,5 +16,9 @@ lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks.git", r
1616
path = "./src/lib.rs"
1717

1818
[[bin]]
19-
name = "sp1_aggregator_program"
20-
path = "./src/main.rs"
19+
name = "sp1_user_proofs_aggregator_program"
20+
path = "./src/user_proofs_aggregator_main.rs"
21+
22+
[[bin]]
23+
name = "sp1_chunk_aggregator_program"
24+
path = "./src/chunk_aggregator_main.rs"
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
#![no_main]
2+
sp1_zkvm::entrypoint!(main);
3+
4+
use lambdaworks_crypto::merkle_tree::merkle::MerkleTree;
5+
use sha2::{Digest, Sha256};
6+
use sp1_aggregation_program::{ChunkAggregatorInput, Hash32};
7+
8+
// Generated with `make agg_mode_write_program_ids` and copied from program_ids.json
9+
pub const USER_PROOFS_AGGREGATOR_PROGRAM_VK_HASH: [u32; 8] = [
10+
684911098, 272834847, 1514192666, 1104122402, 1853418149, 488480116, 2005139814, 1901405498,
11+
];
12+
13+
pub fn main() {
14+
let input = sp1_zkvm::io::read::<ChunkAggregatorInput>();
15+
16+
let mut leaves = vec![];
17+
18+
// Verify the proofs.
19+
for (proof, leaves_commitment) in input.proofs_and_leaves_commitment {
20+
let vkey = proof.vk;
21+
let public_values_digest = Sha256::digest(&proof.public_inputs);
22+
23+
// Ensure the aggregated chunk originates from the user proofs aggregation program.
24+
// This validation step guarantees that the proof was genuinely verified
25+
// by this program. Without this check, a different program using the
26+
// same public inputs could bypass verification.
27+
assert!(proof.vk == USER_PROOFS_AGGREGATOR_PROGRAM_VK_HASH);
28+
29+
let merkle_root: [u8; 32] = proof
30+
.public_inputs
31+
.clone()
32+
.try_into()
33+
.expect("Public input to be the hash of the chunk tree");
34+
35+
// Reconstruct the merkle tree and verify that the roots match
36+
let leaves_commitment: Vec<Hash32> =
37+
leaves_commitment.into_iter().map(|el| Hash32(el)).collect();
38+
let merkle_tree: MerkleTree<Hash32> = MerkleTree::build(&leaves_commitment).unwrap();
39+
assert!(merkle_tree.root == merkle_root);
40+
41+
leaves.extend(leaves_commitment);
42+
43+
sp1_zkvm::lib::verify::verify_sp1_proof(&vkey, &public_values_digest.into());
44+
}
45+
46+
// Finally, compute the final merkle root with all the leaves
47+
let merkle_tree: MerkleTree<Hash32> = MerkleTree::build(&leaves).unwrap();
48+
49+
sp1_zkvm::io::commit_slice(&merkle_tree.root);
50+
}

0 commit comments

Comments
 (0)