Skip to content

Commit 3c052e3

Browse files
committed
starknet_committer,starknet_patricia: layout-dependent FilledTree serialization
1 parent 9f2ba58 commit 3c052e3

File tree

7 files changed

+133
-57
lines changed

7 files changed

+133
-57
lines changed

crates/starknet_committer/src/db/external_test_utils.rs

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ use std::collections::HashMap;
22

33
use serde_json::json;
44
use starknet_api::hash::HashOutput;
5+
use starknet_patricia::db_layout::{NodeLayout, TrieType};
56
use starknet_patricia::patricia_merkle_tree::filled_tree::tree::{FilledTree, FilledTreeImpl};
67
use starknet_patricia::patricia_merkle_tree::node_data::leaf::{
78
Leaf,
@@ -15,32 +16,32 @@ use starknet_patricia::patricia_merkle_tree::updated_skeleton_tree::tree::{
1516
UpdatedSkeletonTree,
1617
UpdatedSkeletonTreeImpl,
1718
};
18-
use starknet_patricia_storage::db_object::{EmptyKeyContext, HasStaticPrefix};
19+
use starknet_patricia_storage::db_object::HasStaticPrefix;
1920
use starknet_patricia_storage::map_storage::MapStorage;
2021

21-
use crate::db::facts_db::db::FactsNodeLayout;
2222
use crate::db::trie_traversal::create_original_skeleton_tree;
2323

24-
// TODO(Ariel, 14/12/2025): make this generic over the layout.
25-
pub async fn tree_computation_flow<L, TH>(
24+
pub async fn tree_computation_flow<L, Layout, TH>(
2625
leaf_modifications: LeafModifications<L>,
2726
storage: &mut MapStorage,
2827
root_hash: HashOutput,
2928
config: impl OriginalSkeletonTreeConfig,
29+
key_context: &<L as HasStaticPrefix>::KeyContext,
3030
) -> FilledTreeImpl<L>
3131
where
3232
TH: TreeHashFunction<L> + 'static,
33-
L: Leaf + HasStaticPrefix<KeyContext = EmptyKeyContext> + 'static,
33+
L: Leaf + 'static,
34+
Layout: for<'a> NodeLayout<'a, L> + 'static,
3435
{
3536
let mut sorted_leaf_indices: Vec<NodeIndex> = leaf_modifications.keys().copied().collect();
3637
let sorted_leaf_indices = SortedLeafIndices::new(&mut sorted_leaf_indices);
37-
let mut original_skeleton = create_original_skeleton_tree::<L, FactsNodeLayout>(
38+
let mut original_skeleton = create_original_skeleton_tree::<L, Layout>(
3839
storage,
3940
root_hash,
4041
sorted_leaf_indices,
4142
&config,
4243
&leaf_modifications,
43-
&EmptyKeyContext,
44+
key_context,
4445
)
4546
.await
4647
.expect("Failed to create the original skeleton tree");
@@ -62,28 +63,38 @@ where
6263
)
6364
.expect("Failed to create the updated skeleton tree");
6465

65-
FilledTreeImpl::<L>::create_with_existing_leaves::<TH>(updated_skeleton, leaf_modifications)
66+
FilledTreeImpl::create_with_existing_leaves::<TH>(updated_skeleton, leaf_modifications)
6667
.await
6768
.expect("Failed to create the filled tree")
6869
}
6970

7071
pub async fn single_tree_flow_test<
71-
L: Leaf + HasStaticPrefix<KeyContext = EmptyKeyContext> + 'static,
72+
L: Leaf + 'static,
73+
Layout: for<'a> NodeLayout<'a, L> + 'static,
7274
TH: TreeHashFunction<L> + 'static,
7375
>(
7476
leaf_modifications: LeafModifications<L>,
7577
storage: &mut MapStorage,
7678
root_hash: HashOutput,
7779
config: impl OriginalSkeletonTreeConfig,
80+
// TODO(Ariel): Move trie_type to the config trait or to the leaf type.
81+
trie_type: TrieType,
7882
) -> String {
7983
// Move from leaf number to actual index.
8084
let leaf_modifications = leaf_modifications
8185
.into_iter()
8286
.map(|(k, v)| (NodeIndex::FIRST_LEAF + k, v))
8387
.collect::<LeafModifications<L>>();
8488

85-
let filled_tree =
86-
tree_computation_flow::<L, TH>(leaf_modifications, storage, root_hash, config).await;
89+
let key_context = Layout::generate_key_context(trie_type);
90+
let filled_tree = tree_computation_flow::<L, Layout, TH>(
91+
leaf_modifications,
92+
storage,
93+
root_hash,
94+
config,
95+
&key_context,
96+
)
97+
.await;
8798

8899
let hash_result = filled_tree.get_root_hash();
89100

@@ -92,7 +103,7 @@ pub async fn single_tree_flow_test<
92103
let json_hash = &json!(hash_result.0.to_hex_string());
93104
result_map.insert("root_hash", json_hash);
94105
// Serlialize the storage modifications.
95-
let json_storage = &json!(filled_tree.serialize(&EmptyKeyContext).unwrap());
106+
let json_storage = &json!(filled_tree.serialize::<L, Layout>(&key_context).unwrap());
96107
result_map.insert("storage_changes", json_storage);
97108
serde_json::to_string(&result_map).expect("serialization failed")
98109
}

crates/starknet_committer/src/db/facts_db/db.rs

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -152,12 +152,21 @@ impl<S: Storage> ForestWriter for FactsDb<S> {
152152

153153
// Storage tries.
154154
for tree in filled_forest.storage_tries.values() {
155-
serialized_forest.extend(tree.serialize(&EmptyKeyContext)?);
155+
serialized_forest
156+
.extend(tree.serialize::<StarknetStorageValue, FactsNodeLayout>(&EmptyKeyContext)?);
156157
}
157158

158159
// Contracts and classes tries.
159-
serialized_forest.extend(filled_forest.contracts_trie.serialize(&EmptyKeyContext)?);
160-
serialized_forest.extend(filled_forest.classes_trie.serialize(&EmptyKeyContext)?);
160+
serialized_forest.extend(
161+
filled_forest
162+
.contracts_trie
163+
.serialize::<ContractState, FactsNodeLayout>(&EmptyKeyContext)?,
164+
);
165+
serialized_forest.extend(
166+
filled_forest
167+
.classes_trie
168+
.serialize::<CompiledClassHash, FactsNodeLayout>(&EmptyKeyContext)?,
169+
);
161170

162171
Ok(serialized_forest)
163172
}

crates/starknet_committer/src/db/index_db/db.rs

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -137,13 +137,25 @@ impl<S: Storage> ForestWriter for IndexDb<S> {
137137
let mut serialized_forest = DbHashMap::new();
138138

139139
// TODO(Ariel): use a different key context when FilledForest is generic over leaf types.
140-
for tree in filled_forest.storage_tries.values() {
141-
serialized_forest.extend(tree.serialize(&EmptyKeyContext)?);
140+
for (contract_address, tree) in &filled_forest.storage_tries {
141+
serialized_forest.extend(
142+
tree.serialize::<IndexLayoutStarknetStorageValue, IndexNodeLayout>(
143+
&TrieType::StorageTrie(*contract_address),
144+
)?,
145+
);
142146
}
143147

144148
// Contracts and classes tries.
145-
serialized_forest.extend(filled_forest.contracts_trie.serialize(&EmptyKeyContext)?);
146-
serialized_forest.extend(filled_forest.classes_trie.serialize(&EmptyKeyContext)?);
149+
serialized_forest.extend(
150+
filled_forest
151+
.contracts_trie
152+
.serialize::<IndexLayoutContractState, IndexNodeLayout>(&TrieType::ContractsTrie)?,
153+
);
154+
serialized_forest.extend(
155+
filled_forest.classes_trie.serialize::<IndexLayoutCompiledClassHash, IndexNodeLayout>(
156+
&TrieType::ClassesTrie,
157+
)?,
158+
);
147159

148160
Ok(serialized_forest)
149161
}

crates/starknet_committer_and_os_cli/benches/main.rs

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ use std::collections::HashMap;
1212
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
1313
use starknet_committer::block_committer::input::StarknetStorageValue;
1414
use starknet_committer::db::external_test_utils::tree_computation_flow;
15+
use starknet_committer::db::facts_db::db::FactsNodeLayout;
1516
use starknet_committer::hash_function::hash::TreeHashFunctionImpl;
1617
use starknet_committer::patricia_merkle_tree::tree::OriginalSkeletonTrieConfig;
1718
use starknet_committer_and_os_cli::committer_cli::commands::commit;
@@ -43,14 +44,17 @@ pub fn single_tree_flow_benchmark(criterion: &mut Criterion) {
4344
benchmark.iter_batched(
4445
|| leaf_modifications.clone(),
4546
|leaf_modifications_input| {
46-
runtime.block_on(
47-
tree_computation_flow::<StarknetStorageValue, TreeHashFunctionImpl>(
48-
leaf_modifications_input,
49-
&mut storage,
50-
root_hash,
51-
OriginalSkeletonTrieConfig::new(false),
52-
),
53-
);
47+
runtime.block_on(tree_computation_flow::<
48+
StarknetStorageValue,
49+
FactsNodeLayout,
50+
TreeHashFunctionImpl,
51+
>(
52+
leaf_modifications_input,
53+
&mut storage,
54+
root_hash,
55+
OriginalSkeletonTrieConfig::new(false),
56+
&EmptyKeyContext,
57+
));
5458
},
5559
BatchSize::LargeInput,
5660
)

crates/starknet_committer_and_os_cli/src/committer_cli/tests/python_tests.rs

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,13 @@ use starknet_committer::block_committer::input::{
1212
};
1313
use starknet_committer::block_committer::random_structs::DummyRandomValue;
1414
use starknet_committer::db::external_test_utils::single_tree_flow_test;
15+
use starknet_committer::db::facts_db::db::FactsNodeLayout;
1516
use starknet_committer::forest::filled_forest::FilledForest;
1617
use starknet_committer::hash_function::hash::{TreeHashFunctionImpl, CONTRACT_STATE_HASH_VERSION};
1718
use starknet_committer::patricia_merkle_tree::leaf::leaf_impl::ContractState;
1819
use starknet_committer::patricia_merkle_tree::tree::OriginalSkeletonTrieConfig;
1920
use starknet_committer::patricia_merkle_tree::types::CompiledClassHash;
21+
use starknet_patricia::db_layout::TrieType;
2022
use starknet_patricia::patricia_merkle_tree::filled_tree::node::FactDbFilledNode;
2123
use starknet_patricia::patricia_merkle_tree::node_data::inner_node::{
2224
BinaryData,
@@ -159,11 +161,16 @@ impl PythonTestRunner for CommitterPythonTestRunner {
159161
let TreeFlowInput { leaf_modifications, mut storage, root_hash } =
160162
serde_json::from_str(Self::non_optional_input(input)?)?;
161163
// 2. Run the test.
162-
let output = single_tree_flow_test::<StarknetStorageValue, TreeHashFunctionImpl>(
164+
let output = single_tree_flow_test::<
165+
StarknetStorageValue,
166+
FactsNodeLayout,
167+
TreeHashFunctionImpl,
168+
>(
163169
leaf_modifications,
164170
&mut storage,
165171
root_hash,
166172
OriginalSkeletonTrieConfig::new(false),
173+
&TrieType::StorageTrie(ContractAddress::from(1_u128)),
167174
)
168175
.await;
169176
// 3. Serialize and return output.

crates/starknet_committer_and_os_cli/src/committer_cli/tests/regression_tests.rs

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,13 @@ use std::fs;
44
use clap::Error;
55
use serde::{Deserialize, Deserializer};
66
use serde_json::{Map, Value};
7+
use starknet_api::core::ContractAddress;
78
use starknet_committer::block_committer::input::StarknetStorageValue;
89
use starknet_committer::db::external_test_utils::single_tree_flow_test;
10+
use starknet_committer::db::facts_db::db::FactsNodeLayout;
911
use starknet_committer::hash_function::hash::TreeHashFunctionImpl;
1012
use starknet_committer::patricia_merkle_tree::tree::OriginalSkeletonTrieConfig;
13+
use starknet_patricia::db_layout::TrieType;
1114
use tempfile::NamedTempFile;
1215

1316
use crate::committer_cli::commands::commit;
@@ -106,13 +109,15 @@ pub async fn test_regression_single_tree() {
106109

107110
let start = std::time::Instant::now();
108111
// Benchmark the single tree flow test.
109-
let output = single_tree_flow_test::<StarknetStorageValue, TreeHashFunctionImpl>(
110-
leaf_modifications,
111-
&mut storage,
112-
root_hash,
113-
OriginalSkeletonTrieConfig::new(false),
114-
)
115-
.await;
112+
let output =
113+
single_tree_flow_test::<StarknetStorageValue, FactsNodeLayout, TreeHashFunctionImpl>(
114+
leaf_modifications,
115+
&mut storage,
116+
root_hash,
117+
OriginalSkeletonTrieConfig::new(false),
118+
&TrieType::StorageTrie(ContractAddress::from(1_u128)),
119+
)
120+
.await;
116121
let execution_time = std::time::Instant::now() - start;
117122

118123
// Assert correctness of the output of the single tree flow test.

0 commit comments

Comments
 (0)