Skip to content

Commit f51d69b

Browse files
committed
starknet_committer,starknet_patricia: add serde errors
1 parent f08a6b1 commit f51d69b

File tree

16 files changed

+139
-60
lines changed

16 files changed

+139
-60
lines changed

crates/starknet_committer/src/db/external_test_utils.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ pub async fn single_tree_flow_test<
9191
let json_hash = &json!(hash_result.0.to_hex_string());
9292
result_map.insert("root_hash", json_hash);
9393
// Serlialize the storage modifications.
94-
let json_storage = &json!(filled_tree.serialize(&EmptyKeyContext));
94+
let json_storage = &json!(filled_tree.serialize(&EmptyKeyContext).unwrap());
9595
result_map.insert("storage_changes", json_storage);
9696
serde_json::to_string(&result_map).expect("serialization failed")
9797
}

crates/starknet_committer/src/db/facts_db/create_facts_tree_test.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,7 @@ async fn test_create_tree(
226226

227227
pub(crate) fn create_mock_leaf_entry(val: u128) -> (DbKey, DbValue) {
228228
let leaf = MockLeaf(Felt::from(val));
229-
(leaf.get_db_key(&EmptyKeyContext, &leaf.0.to_bytes_be()), leaf.serialize())
229+
(leaf.get_db_key(&EmptyKeyContext, &leaf.0.to_bytes_be()), leaf.serialize().unwrap())
230230
}
231231

232232
fn create_mock_leaf_modifications(

crates/starknet_committer/src/db/facts_db/db.rs

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ use starknet_patricia::patricia_merkle_tree::node_data::leaf::{Leaf, LeafModific
99
use starknet_patricia::patricia_merkle_tree::original_skeleton_tree::tree::OriginalSkeletonTreeImpl;
1010
use starknet_patricia::patricia_merkle_tree::types::{NodeIndex, SortedLeafIndices};
1111
use starknet_patricia_storage::db_object::EmptyKeyContext;
12+
use starknet_patricia_storage::errors::SerializationResult;
1213
use starknet_patricia_storage::map_storage::MapStorage;
1314
use starknet_patricia_storage::storage_trait::{
1415
create_db_key,
@@ -189,14 +190,19 @@ impl<S: Storage> ForestReader<FactsDbInitialRead> for FactsDb<S> {
189190

190191
#[async_trait]
191192
impl<S: Storage> ForestWriter for FactsDb<S> {
192-
fn serialize_forest(filled_forest: &FilledForest) -> DbHashMap {
193-
filled_forest
194-
.storage_tries
195-
.values()
196-
.flat_map(|tree| tree.serialize(&EmptyKeyContext).into_iter())
197-
.chain(filled_forest.contracts_trie.serialize(&EmptyKeyContext))
198-
.chain(filled_forest.classes_trie.serialize(&EmptyKeyContext))
199-
.collect()
193+
fn serialize_forest(filled_forest: &FilledForest) -> SerializationResult<DbHashMap> {
194+
let mut serialized_forest = DbHashMap::new();
195+
196+
// Storage tries.
197+
for tree in filled_forest.storage_tries.values() {
198+
serialized_forest.extend(tree.serialize(&EmptyKeyContext)?);
199+
}
200+
201+
// Contracts and classes tries.
202+
serialized_forest.extend(filled_forest.contracts_trie.serialize(&EmptyKeyContext)?);
203+
serialized_forest.extend(filled_forest.classes_trie.serialize(&EmptyKeyContext)?);
204+
205+
Ok(serialized_forest)
200206
}
201207

202208
async fn write_updates(&mut self, updates: DbHashMap) -> usize {

crates/starknet_committer/src/db/forest_trait.rs

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ use starknet_api::block::BlockNumber;
55
use starknet_api::core::ContractAddress;
66
use starknet_patricia::patricia_merkle_tree::node_data::leaf::LeafModifications;
77
use starknet_patricia::patricia_merkle_tree::types::NodeIndex;
8+
use starknet_patricia_storage::errors::SerializationResult;
89
use starknet_patricia_storage::storage_trait::{DbHashMap, DbKey, DbValue, Storage};
910

1011
use crate::block_committer::input::{InputContext, ReaderConfig, StarknetStorageValue};
@@ -63,28 +64,28 @@ pub trait ForestReader<I: InputContext> {
6364
#[async_trait]
6465
pub trait ForestWriter: ForestMetadata + Send {
6566
/// Serializes a filled forest into a hash map.
66-
fn serialize_forest(filled_forest: &FilledForest) -> DbHashMap;
67+
fn serialize_forest(filled_forest: &FilledForest) -> SerializationResult<DbHashMap>;
6768

6869
/// Writes the updates map to storage. Returns the number of new updates written to storage.
6970
async fn write_updates(&mut self, updates: DbHashMap) -> usize;
7071

7172
/// Writes the serialized filled forest to storage. Returns the number of new updates written to
7273
/// storage.
73-
async fn write(&mut self, filled_forest: &FilledForest) -> usize {
74-
let updates = Self::serialize_forest(filled_forest);
75-
self.write_updates(updates).await
74+
async fn write(&mut self, filled_forest: &FilledForest) -> SerializationResult<usize> {
75+
let updates = Self::serialize_forest(filled_forest)?;
76+
Ok(self.write_updates(updates).await)
7677
}
7778

7879
async fn write_with_metadata(
7980
&mut self,
8081
filled_forest: &FilledForest,
8182
metadata: HashMap<ForestMetadataType, DbValue>,
82-
) -> usize {
83-
let mut updates = Self::serialize_forest(filled_forest);
83+
) -> SerializationResult<usize> {
84+
let mut updates = Self::serialize_forest(filled_forest)?;
8485
for (metadata_type, value) in metadata {
8586
Self::insert_metadata(&mut updates, metadata_type, value);
8687
}
87-
self.write_updates(updates).await
88+
Ok(self.write_updates(updates).await)
8889
}
8990
}
9091

crates/starknet_committer/src/forest/skeleton_forest_test.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,12 +51,12 @@ macro_rules! compare_skeleton_tree {
5151

5252
pub(crate) fn create_storage_leaf_entry(val: u128) -> (DbKey, DbValue) {
5353
let leaf = StarknetStorageValue(Felt::from(val));
54-
(leaf.get_db_key(&EmptyKeyContext, &leaf.0.to_bytes_be()), leaf.serialize())
54+
(leaf.get_db_key(&EmptyKeyContext, &leaf.0.to_bytes_be()), leaf.serialize().unwrap())
5555
}
5656

5757
pub(crate) fn create_compiled_class_leaf_entry(val: u128) -> (DbKey, DbValue) {
5858
let leaf = CompiledClassHash(Felt::from(val));
59-
(leaf.get_db_key(&EmptyKeyContext, &leaf.0.to_bytes_be()), leaf.serialize())
59+
(leaf.get_db_key(&EmptyKeyContext, &leaf.0.to_bytes_be()), leaf.serialize().unwrap())
6060
}
6161

6262
pub(crate) fn create_contract_state_leaf_entry(val: u128) -> (DbKey, DbValue) {
@@ -66,7 +66,7 @@ pub(crate) fn create_contract_state_leaf_entry(val: u128) -> (DbKey, DbValue) {
6666
storage_root_hash: HashOutput(felt),
6767
class_hash: ClassHash(felt),
6868
};
69-
(leaf.get_db_key(&EmptyKeyContext, &felt.to_bytes_be()), leaf.serialize())
69+
(leaf.get_db_key(&EmptyKeyContext, &felt.to_bytes_be()), leaf.serialize().unwrap())
7070
}
7171

7272
// This test uses addition hash for simplicity (i.e hash(a,b) = a + b).

crates/starknet_committer/src/patricia_merkle_tree/leaf/leaf_serde.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use starknet_api::core::{ClassHash, Nonce};
55
use starknet_api::hash::HashOutput;
66
use starknet_patricia::patricia_merkle_tree::types::SubTreeHeight;
77
use starknet_patricia_storage::db_object::{DBObject, EmptyDeserializationContext};
8-
use starknet_patricia_storage::errors::DeserializationError;
8+
use starknet_patricia_storage::errors::{DeserializationError, SerializationResult};
99
use starknet_patricia_storage::storage_trait::{DbKeyPrefix, DbValue};
1010
use starknet_types_core::felt::Felt;
1111

@@ -34,8 +34,8 @@ impl DBObject for StarknetStorageValue {
3434
type DeserializeContext = EmptyDeserializationContext;
3535

3636
/// Serializes the value into a 32-byte vector.
37-
fn serialize(&self) -> DbValue {
38-
DbValue(self.0.to_bytes_be().to_vec())
37+
fn serialize(&self) -> SerializationResult<DbValue> {
38+
Ok(DbValue(self.0.to_bytes_be().to_vec()))
3939
}
4040

4141
fn deserialize(
@@ -50,9 +50,9 @@ impl DBObject for CompiledClassHash {
5050
type DeserializeContext = EmptyDeserializationContext;
5151

5252
/// Creates a json string describing the leaf and casts it into a byte vector.
53-
fn serialize(&self) -> DbValue {
53+
fn serialize(&self) -> SerializationResult<DbValue> {
5454
let json_string = format!(r#"{{"compiled_class_hash": "{}"}}"#, self.0.to_hex_string());
55-
DbValue(json_string.into_bytes())
55+
Ok(DbValue(json_string.into_bytes()))
5656
}
5757

5858
fn deserialize(
@@ -72,15 +72,15 @@ impl DBObject for ContractState {
7272
type DeserializeContext = EmptyDeserializationContext;
7373

7474
/// Creates a json string describing the leaf and casts it into a byte vector.
75-
fn serialize(&self) -> DbValue {
75+
fn serialize(&self) -> SerializationResult<DbValue> {
7676
let json_string = format!(
7777
r#"{{"contract_hash": "{}", "storage_commitment_tree": {{"root": "{}", "height": {}}}, "nonce": "{}"}}"#,
7878
fixed_hex_string_no_prefix(&self.class_hash.0),
7979
fixed_hex_string_no_prefix(&self.storage_root_hash.0),
8080
SubTreeHeight::ACTUAL_HEIGHT,
8181
self.nonce.0.to_hex_string(),
8282
);
83-
DbValue(json_string.into_bytes())
83+
Ok(DbValue(json_string.into_bytes()))
8484
}
8585

8686
fn deserialize(

crates/starknet_committer/src/patricia_merkle_tree/leaf/leaf_serde_test.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ use crate::patricia_merkle_tree::types::CompiledClassHash;
3131
})
3232
]
3333
fn test_leaf_serde<L: Leaf + Eq + Debug>(#[case] leaf: L) {
34-
let serialized = leaf.serialize();
34+
let serialized = leaf.serialize().unwrap();
3535
let deserialized = L::deserialize(&serialized, &EmptyDeserializationContext).unwrap();
3636
assert_eq!(deserialized, leaf);
3737
}

crates/starknet_committer_and_os_cli/src/committer_cli/commands.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,10 @@ pub async fn commit(input: FactsDbInputImpl, output_path: String, storage: MapSt
3838
let serialized_filled_forest = SerializedForest(
3939
commit_block(input, &mut facts_db, None).await.expect("Failed to commit the given block."),
4040
);
41-
let output = serialized_filled_forest.forest_to_output().await;
41+
let output = serialized_filled_forest
42+
.forest_to_output()
43+
.await
44+
.expect("Failed to serialize filled forest");
4245
write_to_file(&output_path, &output);
4346
info!(
4447
"Successfully committed given block. Updated Contracts Trie Root Hash: {:?},

crates/starknet_committer_and_os_cli/src/committer_cli/filled_tree_output/filled_forest.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ use serde::Serialize;
22
use starknet_committer::db::facts_db::db::FactsDb;
33
use starknet_committer::db::forest_trait::ForestWriter;
44
use starknet_committer::forest::filled_forest::FilledForest;
5+
use starknet_patricia_storage::errors::SerializationResult;
56
use starknet_patricia_storage::map_storage::MapStorage;
67

78
pub struct SerializedForest(pub FilledForest);
@@ -19,16 +20,16 @@ pub struct Output {
1920
}
2021

2122
impl SerializedForest {
22-
pub async fn forest_to_output(&self) -> Output {
23+
pub async fn forest_to_output(&self) -> SerializationResult<Output> {
2324
// Create an empty storage for the new facts.
2425
let mut output_facts_db = FactsDb::new(MapStorage::default());
25-
output_facts_db.write(&self.0).await;
26+
output_facts_db.write(&self.0).await?;
2627
let contract_storage_root_hash = self.0.get_contract_root_hash().0;
2728
let compiled_class_root_hash = self.0.get_compiled_class_root_hash().0;
28-
Output {
29+
Ok(Output {
2930
storage: output_facts_db.storage,
3031
contract_storage_root_hash: contract_storage_root_hash.to_hex_string(),
3132
compiled_class_root_hash: compiled_class_root_hash.to_hex_string(),
32-
}
33+
})
3334
}
3435
}

crates/starknet_committer_and_os_cli/src/committer_cli/tests/python_tests.rs

Lines changed: 58 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ use starknet_patricia::patricia_merkle_tree::node_data::inner_node::{
2727
};
2828
use starknet_patricia::patricia_merkle_tree::types::SubTreeHeight;
2929
use starknet_patricia_storage::db_object::{DBObject, EmptyKeyContext};
30-
use starknet_patricia_storage::errors::DeserializationError;
30+
use starknet_patricia_storage::errors::{DeserializationError, SerializationError};
3131
use starknet_patricia_storage::map_storage::MapStorage;
3232
use starknet_patricia_storage::storage_trait::{DbKey, DbValue, Storage};
3333
use starknet_types_core::felt::Felt;
@@ -77,6 +77,8 @@ pub enum CommitterSpecificTestError {
7777
InvalidCastError(#[from] std::num::TryFromIntError),
7878
#[error(transparent)]
7979
DeserializationTestFailure(#[from] DeserializationError),
80+
#[error(transparent)]
81+
SerializationError(#[from] SerializationError),
8082
}
8183

8284
/// Implements conversion from a string to the test runner.
@@ -191,8 +193,17 @@ fn serialize_for_rust_committer_flow_test(input: HashMap<String, String>) -> Str
191193
let TreeFlowInput { leaf_modifications, storage, root_hash } =
192194
parse_input_single_storage_tree_flow_test(&input);
193195
// Serialize the leaf modifications to an object that can be JSON-serialized.
194-
let leaf_modifications_to_print: HashMap<String, Vec<u8>> =
195-
leaf_modifications.into_iter().map(|(k, v)| (k.0.to_string(), v.serialize().0)).collect();
196+
let leaf_modifications_to_print: HashMap<String, Vec<u8>> = leaf_modifications
197+
.into_iter()
198+
.map(|(k, v)| {
199+
(
200+
k.0.to_string(),
201+
v.serialize()
202+
.unwrap_or_else(|error| panic!("Failed to serialize storage leaf: {error}"))
203+
.0,
204+
)
205+
})
206+
.collect();
196207

197208
// Create a json string to compare with the expected string in python.
198209
serde_json::to_string(&json!(
@@ -273,7 +284,9 @@ pub(crate) fn test_binary_serialize_test(binary_input: HashMap<String, u128>) ->
273284
FactDbFilledNode { data: NodeData::Binary(binary_data), hash: HashOutput(Felt::ZERO) };
274285

275286
// Serialize the binary node and insert it into the map under the key "value".
276-
let value = filled_node.serialize();
287+
let value = filled_node
288+
.serialize()
289+
.unwrap_or_else(|error| panic!("Failed to serialize filled node: {error}"));
277290
map.insert("value".to_string(), value.0);
278291

279292
// Serialize the map to a JSON string and handle serialization errors.
@@ -519,7 +532,16 @@ async fn test_storage_node(data: HashMap<String, String>) -> CommitterPythonTest
519532
};
520533

521534
// Store the binary node in the storage.
522-
rust_fact_storage.set(binary_rust.db_key(&EmptyKeyContext), binary_rust.serialize()).await?;
535+
rust_fact_storage
536+
.set(
537+
binary_rust.db_key(&EmptyKeyContext),
538+
binary_rust.serialize().map_err(|error| {
539+
PythonTestError::SpecificError(CommitterSpecificTestError::SerializationError(
540+
error,
541+
))
542+
})?,
543+
)
544+
.await?;
523545

524546
// Parse the edge node data from the input.
525547
let edge_json = get_or_key_not_found(&data, "edge")?;
@@ -546,7 +568,16 @@ async fn test_storage_node(data: HashMap<String, String>) -> CommitterPythonTest
546568
};
547569

548570
// Store the edge node in the storage.
549-
rust_fact_storage.set(edge_rust.db_key(&EmptyKeyContext), edge_rust.serialize()).await?;
571+
rust_fact_storage
572+
.set(
573+
edge_rust.db_key(&EmptyKeyContext),
574+
edge_rust.serialize().map_err(|error| {
575+
PythonTestError::SpecificError(CommitterSpecificTestError::SerializationError(
576+
error,
577+
))
578+
})?,
579+
)
580+
.await?;
550581

551582
// Parse the storage leaf data from the input.
552583
let storage_leaf_json = get_or_key_not_found(&data, "storage")?;
@@ -563,7 +594,14 @@ async fn test_storage_node(data: HashMap<String, String>) -> CommitterPythonTest
563594

564595
// Store the storage leaf node in the storage.
565596
rust_fact_storage
566-
.set(storage_leaf_rust.db_key(&EmptyKeyContext), storage_leaf_rust.serialize())
597+
.set(
598+
storage_leaf_rust.db_key(&EmptyKeyContext),
599+
storage_leaf_rust.serialize().map_err(|error| {
600+
PythonTestError::SpecificError(CommitterSpecificTestError::SerializationError(
601+
error,
602+
))
603+
})?,
604+
)
567605
.await?;
568606

569607
// Parse the contract state leaf data from the input.
@@ -592,7 +630,11 @@ async fn test_storage_node(data: HashMap<String, String>) -> CommitterPythonTest
592630
rust_fact_storage
593631
.set(
594632
contract_state_leaf_rust.db_key(&EmptyKeyContext),
595-
contract_state_leaf_rust.serialize(),
633+
contract_state_leaf_rust.serialize().map_err(|error| {
634+
PythonTestError::SpecificError(CommitterSpecificTestError::SerializationError(
635+
error,
636+
))
637+
})?,
596638
)
597639
.await?;
598640

@@ -614,7 +656,11 @@ async fn test_storage_node(data: HashMap<String, String>) -> CommitterPythonTest
614656
rust_fact_storage
615657
.set(
616658
compiled_class_leaf_rust.db_key(&EmptyKeyContext),
617-
compiled_class_leaf_rust.serialize(),
659+
compiled_class_leaf_rust.serialize().map_err(|error| {
660+
PythonTestError::SpecificError(CommitterSpecificTestError::SerializationError(
661+
error,
662+
))
663+
})?,
618664
)
619665
.await?;
620666

@@ -625,7 +671,9 @@ async fn test_storage_node(data: HashMap<String, String>) -> CommitterPythonTest
625671
/// Generates a dummy random filled forest and serializes it to a JSON string.
626672
pub(crate) async fn filled_forest_output_test() -> CommitterPythonTestResult {
627673
let dummy_forest = SerializedForest(FilledForest::dummy_random(&mut rand::thread_rng(), None));
628-
let output = dummy_forest.forest_to_output().await;
674+
let output = dummy_forest.forest_to_output().await.map_err(|error| {
675+
PythonTestError::SpecificError(CommitterSpecificTestError::SerializationError(error))
676+
})?;
629677
let output_string = serde_json::to_string(&output).expect("Failed to serialize");
630678
Ok(output_string)
631679
}

0 commit comments

Comments
 (0)