Skip to content

Commit 2b8a848

Browse files
committed
starknet_committer: add index db
1 parent 1218f54 commit 2b8a848

File tree

6 files changed

+186
-31
lines changed

6 files changed

+186
-31
lines changed

crates/starknet_committer/src/db/facts_db/db.rs

Lines changed: 6 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,8 @@ use starknet_patricia_storage::storage_trait::{DbHashMap, Storage};
1919
use crate::block_committer::input::{ReaderConfig, StarknetStorageValue};
2020
use crate::db::db_layout::NodeLayout;
2121
use crate::db::facts_db::types::{FactsDbInitialRead, FactsSubTree};
22-
use crate::db::forest_trait::{ForestReader, ForestWriter};
22+
use crate::db::forest_trait::{read_forest, ForestReader, ForestWriter};
2323
use crate::db::index_db::leaves::TrieType;
24-
use crate::db::trie_traversal::{create_classes_trie, create_contracts_trie, create_storage_tries};
2524
use crate::forest::filled_forest::FilledForest;
2625
use crate::forest::forest_errors::ForestResult;
2726
use crate::forest::original_skeleton_forest::{ForestSortedIndices, OriginalSkeletonForest};
@@ -80,34 +79,15 @@ impl<S: Storage> ForestReader<FactsDbInitialRead> for FactsDb<S> {
8079
forest_sorted_indices: &'a ForestSortedIndices<'a>,
8180
config: ReaderConfig,
8281
) -> ForestResult<(OriginalSkeletonForest<'a>, HashMap<NodeIndex, ContractState>)> {
83-
let (contracts_trie, original_contracts_trie_leaves) =
84-
create_contracts_trie::<ContractState, FactsNodeLayout>(
85-
&mut self.storage,
86-
context.0.contracts_trie_root_hash,
87-
forest_sorted_indices.contracts_trie_sorted_indices,
88-
)
89-
.await?;
90-
let storage_tries = create_storage_tries::<StarknetStorageValue, FactsNodeLayout>(
82+
read_forest::<S, StarknetStorageValue, ContractState, CompiledClassHash, FactsNodeLayout>(
9183
&mut self.storage,
84+
context,
9285
storage_updates,
93-
&original_contracts_trie_leaves,
94-
&config,
95-
&forest_sorted_indices.storage_tries_sorted_indices,
96-
)
97-
.await?;
98-
let classes_trie = create_classes_trie::<CompiledClassHash, FactsNodeLayout>(
99-
&mut self.storage,
10086
classes_updates,
101-
context.0.classes_trie_root_hash,
102-
&config,
103-
forest_sorted_indices.classes_trie_sorted_indices,
87+
forest_sorted_indices,
88+
config,
10489
)
105-
.await?;
106-
107-
Ok((
108-
OriginalSkeletonForest { classes_trie, contracts_trie, storage_tries },
109-
original_contracts_trie_leaves,
110-
))
90+
.await
11191
}
11292
}
11393

crates/starknet_committer/src/db/forest_trait.rs

Lines changed: 53 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,16 @@ use std::collections::HashMap;
33
use async_trait::async_trait;
44
use serde::{Deserialize, Serialize};
55
use starknet_api::core::ContractAddress;
6-
use starknet_patricia::patricia_merkle_tree::node_data::leaf::LeafModifications;
6+
use starknet_patricia::patricia_merkle_tree::node_data::leaf::{Leaf, LeafModifications};
77
use starknet_patricia::patricia_merkle_tree::types::NodeIndex;
88
use starknet_patricia_storage::errors::SerializationResult;
9-
use starknet_patricia_storage::storage_trait::{DbHashMap, DbKey, DbValue};
9+
use starknet_patricia_storage::storage_trait::{DbHashMap, DbKey, DbValue, Storage};
1010

1111
use crate::block_committer::input::{InputContext, ReaderConfig, StarknetStorageValue};
12+
use crate::db::db_layout::NodeLayout;
13+
use crate::db::facts_db::types::FactsDbInitialRead;
1214
use crate::db::serde_db_utils::DbBlockNumber;
15+
use crate::db::trie_traversal::{create_classes_trie, create_contracts_trie, create_storage_tries};
1316
use crate::forest::filled_forest::FilledForest;
1417
use crate::forest::forest_errors::ForestResult;
1518
use crate::forest::original_skeleton_forest::{ForestSortedIndices, OriginalSkeletonForest};
@@ -65,6 +68,54 @@ pub trait ForestReader<I: InputContext> {
6568
) -> ForestResult<(OriginalSkeletonForest<'a>, HashMap<NodeIndex, ContractState>)>;
6669
}
6770

71+
/// Helper function containing layout-common read logic.
72+
pub(crate) async fn read_forest<'a, S, StorageLeaf, ContractStateLeaf, ClassesLeaf, Layout>(
73+
storage: &mut S,
74+
context: FactsDbInitialRead,
75+
storage_updates: &'a HashMap<ContractAddress, LeafModifications<StarknetStorageValue>>,
76+
classes_updates: &'a LeafModifications<CompiledClassHash>,
77+
forest_sorted_indices: &'a ForestSortedIndices<'a>,
78+
config: ReaderConfig,
79+
) -> ForestResult<(OriginalSkeletonForest<'a>, HashMap<NodeIndex, ContractState>)>
80+
where
81+
S: Storage,
82+
ContractStateLeaf: Leaf + Into<ContractState>,
83+
StorageLeaf: Leaf + From<StarknetStorageValue>,
84+
ClassesLeaf: Leaf + From<CompiledClassHash>,
85+
Layout: NodeLayout<'a, StorageLeaf>
86+
+ NodeLayout<'a, ContractStateLeaf>
87+
+ NodeLayout<'a, ClassesLeaf>,
88+
{
89+
let (contracts_trie, original_contracts_trie_leaves) =
90+
create_contracts_trie::<ContractStateLeaf, Layout>(
91+
storage,
92+
context.0.contracts_trie_root_hash,
93+
forest_sorted_indices.contracts_trie_sorted_indices,
94+
)
95+
.await?;
96+
let storage_tries = create_storage_tries::<StorageLeaf, Layout>(
97+
storage,
98+
storage_updates,
99+
&original_contracts_trie_leaves,
100+
&config,
101+
&forest_sorted_indices.storage_tries_sorted_indices,
102+
)
103+
.await?;
104+
let classes_trie = create_classes_trie::<ClassesLeaf, Layout>(
105+
storage,
106+
classes_updates,
107+
context.0.classes_trie_root_hash,
108+
&config,
109+
forest_sorted_indices.classes_trie_sorted_indices,
110+
)
111+
.await?;
112+
113+
Ok((
114+
OriginalSkeletonForest { classes_trie, contracts_trie, storage_tries },
115+
original_contracts_trie_leaves,
116+
))
117+
}
118+
68119
#[async_trait]
69120
pub trait ForestWriter: Send {
70121
/// Serializes a filled forest into a hash map.
Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,121 @@
1+
use std::collections::HashMap;
2+
3+
use async_trait::async_trait;
4+
use starknet_api::core::ContractAddress;
5+
use starknet_patricia::patricia_merkle_tree::filled_tree::tree::FilledTree;
6+
use starknet_patricia::patricia_merkle_tree::node_data::leaf::{Leaf, LeafModifications};
7+
use starknet_patricia::patricia_merkle_tree::types::NodeIndex;
8+
use starknet_patricia::patricia_merkle_tree::updated_skeleton_tree::hash_function::TreeHashFunction;
9+
use starknet_patricia_storage::db_object::{EmptyKeyContext, HasStaticPrefix};
10+
use starknet_patricia_storage::errors::SerializationResult;
11+
use starknet_patricia_storage::storage_trait::{DbHashMap, Storage};
12+
13+
use crate::block_committer::input::{ReaderConfig, StarknetStorageValue};
14+
use crate::db::db_layout::NodeLayout;
15+
use crate::db::facts_db::types::FactsDbInitialRead;
16+
use crate::db::forest_trait::{read_forest, ForestReader, ForestWriter};
17+
use crate::db::index_db::leaves::{
18+
IndexLayoutCompiledClassHash,
19+
IndexLayoutContractState,
20+
IndexLayoutStarknetStorageValue,
21+
TrieType,
22+
};
23+
use crate::db::index_db::types::{
24+
EmptyNodeData,
25+
IndexFilledNode,
26+
IndexLayoutSubTree,
27+
IndexNodeContext,
28+
};
29+
use crate::forest::filled_forest::FilledForest;
30+
use crate::forest::forest_errors::ForestResult;
31+
use crate::forest::original_skeleton_forest::{ForestSortedIndices, OriginalSkeletonForest};
32+
use crate::hash_function::hash::TreeHashFunctionImpl;
33+
use crate::patricia_merkle_tree::leaf::leaf_impl::ContractState;
34+
use crate::patricia_merkle_tree::types::CompiledClassHash;
35+
36+
pub struct IndexDb<S: Storage> {
37+
storage: S,
38+
}
39+
40+
impl<S: Storage> IndexDb<S> {
41+
pub fn new(storage: S) -> Self {
42+
Self { storage }
43+
}
44+
}
45+
46+
pub struct IndexNodeLayout {}
47+
48+
impl<'a, L> NodeLayout<'a, L> for IndexNodeLayout
49+
where
50+
L: Leaf + HasStaticPrefix<KeyContext = TrieType>,
51+
TreeHashFunctionImpl: TreeHashFunction<L>,
52+
{
53+
type NodeData = EmptyNodeData;
54+
type NodeDbObject = IndexFilledNode<L>;
55+
type DeserializationContext = IndexNodeContext;
56+
type SubTree = IndexLayoutSubTree<'a>;
57+
58+
fn generate_key_context(trie_type: TrieType) -> <L as HasStaticPrefix>::KeyContext {
59+
trie_type
60+
}
61+
}
62+
63+
// TODO(Ariel): define an IndexDbInitialRead empty type, and check whether each tree is empty inside
64+
// create_xxx_trie.
65+
#[async_trait]
66+
impl<S: Storage> ForestReader<FactsDbInitialRead> for IndexDb<S> {
67+
/// Creates an original skeleton forest that includes the storage tries of the modified
68+
/// contracts, the classes trie and the contracts trie. Additionally, returns the original
69+
/// contract states that are needed to compute the contract state tree.
70+
async fn read<'a>(
71+
&mut self,
72+
context: FactsDbInitialRead,
73+
storage_updates: &'a HashMap<ContractAddress, LeafModifications<StarknetStorageValue>>,
74+
classes_updates: &'a LeafModifications<CompiledClassHash>,
75+
forest_sorted_indices: &'a ForestSortedIndices<'a>,
76+
config: ReaderConfig,
77+
) -> ForestResult<(OriginalSkeletonForest<'a>, HashMap<NodeIndex, ContractState>)> {
78+
read_forest::<
79+
S,
80+
IndexLayoutStarknetStorageValue,
81+
IndexLayoutContractState,
82+
IndexLayoutCompiledClassHash,
83+
IndexNodeLayout,
84+
>(
85+
&mut self.storage,
86+
context,
87+
storage_updates,
88+
classes_updates,
89+
forest_sorted_indices,
90+
config,
91+
)
92+
.await
93+
}
94+
}
95+
96+
#[async_trait]
97+
impl<S: Storage> ForestWriter for IndexDb<S> {
98+
fn serialize_forest(filled_forest: &FilledForest) -> SerializationResult<DbHashMap> {
99+
let mut serialized_forest = DbHashMap::new();
100+
101+
// TODO(Ariel): use a different key context when FilledForest is generic over leaf types.
102+
for tree in filled_forest.storage_tries.values() {
103+
serialized_forest.extend(tree.serialize(&EmptyKeyContext)?);
104+
}
105+
106+
// Contracts and classes tries.
107+
serialized_forest.extend(filled_forest.contracts_trie.serialize(&EmptyKeyContext)?);
108+
serialized_forest.extend(filled_forest.classes_trie.serialize(&EmptyKeyContext)?);
109+
110+
Ok(serialized_forest)
111+
}
112+
113+
async fn write_updates(&mut self, updates: DbHashMap) -> usize {
114+
let n_updates = updates.len();
115+
self.storage
116+
.mset(updates)
117+
.await
118+
.unwrap_or_else(|_| panic!("Write of {n_updates} new updates to storage failed"));
119+
n_updates
120+
}
121+
}

crates/starknet_committer/src/db/index_db/leaves.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,9 @@ use crate::patricia_merkle_tree::types::CompiledClassHash;
1919

2020
// Wrap the leaves types so that we can implement the [DBObject] trait differently in index
2121
// layout.
22-
#[derive(Clone, Debug, Default, Eq, PartialEq, derive_more::AsRef, derive_more::From)]
22+
#[derive(
23+
Clone, Debug, Default, Eq, PartialEq, derive_more::AsRef, derive_more::From, derive_more::Into,
24+
)]
2325
pub struct IndexLayoutContractState(pub ContractState);
2426

2527
#[derive(Clone, Debug, Default, Eq, PartialEq, derive_more::AsRef, derive_more::From)]

crates/starknet_committer/src/db/index_db/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
pub mod db;
12
pub mod leaves;
23
#[cfg(test)]
34
pub mod serde_tests;

crates/starknet_committer/src/db/index_db/types.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ impl From<HashOutput> for EmptyNodeData {
3737
}
3838
}
3939

40-
#[derive(PartialEq, Debug)]
40+
#[derive(PartialEq, Debug, derive_more::Into)]
4141
pub struct IndexFilledNode<L: Leaf>(pub FilledNode<L, EmptyNodeData>);
4242

4343
pub struct IndexNodeContext {
@@ -148,7 +148,7 @@ pub struct IndexLayoutSubTree<'a> {
148148
}
149149

150150
impl<'a> SubTreeTrait<'a> for IndexLayoutSubTree<'a> {
151-
type NodeData = ();
151+
type NodeData = EmptyNodeData;
152152
type NodeDeserializeContext = IndexNodeContext;
153153

154154
fn create(

0 commit comments

Comments
 (0)