Skip to content

Commit 77ddcc6

Browse files
danielpapp-trilitechjakab922
authored andcommitted
refactor(data): refactor hash module
- Renamed methods on the Hash struct - Added the Hasher struct - Added tests and documentation for both
1 parent bbe853e commit 77ddcc6

File tree

11 files changed

+257
-127
lines changed

11 files changed

+257
-127
lines changed

data/src/hash.rs

Lines changed: 152 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ use crate::foldable::Foldable;
1818
use crate::foldable::NodeFold;
1919
use crate::merkle_proof::proof_tree::MerkleProof;
2020
use crate::merkle_proof::proof_tree::MerkleProofLeaf;
21-
use crate::serialisation as binary;
21+
use crate::serialisation::serialise_into;
2222
use crate::tree::Tree;
2323

2424
/// Errors that can occur during hashing operations
@@ -60,27 +60,26 @@ pub struct Hash {
6060
}
6161

6262
impl Hash {
63-
/// Hash a slice of bytes
64-
pub fn blake3_hash_bytes(bytes: &[u8]) -> Self {
63+
/// Hashes a byte slice into a [`Hash`] object.
64+
pub fn hash_bytes(bytes: &[u8]) -> Self {
6565
let digest = blake3::hash(bytes).into();
6666
Hash { digest }
6767
}
6868

69-
/// Get the hash of a value that can be serialised by hashing its serialisation
70-
pub fn blake3_hash<T: Encode>(data: T) -> Result<Self, EncodeError> {
69+
/// Creates a [`Hash`] object from something that implements the
70+
/// [`bincode::enc::Encode`] trait.
71+
pub fn hash_encodable<T: Encode>(data: T) -> Result<Self, EncodeError> {
7172
let mut hasher = blake3::Hasher::new();
72-
binary::serialise_into(&data, &mut hasher)?;
73+
serialise_into(&data, &mut hasher)?;
7374

7475
let digest = hasher.finalize().into();
7576
Ok(Hash { digest })
7677
}
7778

78-
/// Combine multiple [`struct@Hash`] values into a single one.
79-
///
80-
/// The hashes are combined by concatenating them, then hashing the result.
81-
/// Pre-image resistance is not compromised because the concatenation is not
82-
/// ambiguous, with hashes having a fixed size ([`DIGEST_SIZE`]).
83-
pub fn combine<H: Borrow<Hash>, HS: IntoIterator<Item = H>>(hashes: HS) -> Hash {
79+
/// Creates a [`Hash`] object from a collection of iterables
80+
/// that can be [`Borrow`]ed as a [`Hash`]. Note that this
81+
/// method is rehashing the hashes!
82+
pub fn combine_hashes<H: Borrow<Hash>, HS: IntoIterator<Item = H>>(hashes: HS) -> Hash {
8483
let mut hasher = blake3::Hasher::new();
8584

8685
for hash in hashes {
@@ -92,21 +91,6 @@ impl Hash {
9291
Hash { digest }
9392
}
9493

95-
/// Like [`Self::combine`], but the iterator can yield errors.
96-
pub fn try_combine<H: Borrow<Hash>, E, HS: IntoIterator<Item = Result<H, E>>>(
97-
hashes: HS,
98-
) -> Result<Hash, E> {
99-
let mut hasher = blake3::Hasher::new();
100-
101-
for hash in hashes {
102-
let hash = hash?;
103-
hasher.update(hash.borrow().as_ref());
104-
}
105-
106-
let digest = hasher.finalize().into();
107-
Ok(Hash { digest })
108-
}
109-
11094
/// Hash the underlying state of a foldable structure.
11195
pub fn from_foldable(foldable: &impl Foldable<HashFold>) -> Self {
11296
foldable.fold(HashFold)
@@ -137,6 +121,36 @@ impl Foldable<HashFold> for Hash {
137121
}
138122
}
139123

124+
pub struct Hasher {
125+
hasher: blake3::Hasher,
126+
}
127+
128+
impl Hasher {
129+
/// Creates a new [`Hasher`] object
130+
pub fn new() -> Self {
131+
Self {
132+
hasher: blake3::Hasher::new(),
133+
}
134+
}
135+
136+
/// Updates the [`Hasher`] with some bytes
137+
pub fn update_with_bytes(&mut self, bytes: &[u8]) {
138+
self.hasher.update(bytes);
139+
}
140+
141+
/// Updates the [`Hasher`] with the digest of a [`Hash`]
142+
pub fn update_with_hash(&mut self, hash: Hash) {
143+
let digest: [u8; DIGEST_SIZE] = hash.into();
144+
self.hasher.update(digest.as_slice());
145+
}
146+
147+
/// Turns the [`Hasher`] into a [`Hash`]
148+
pub fn to_hash(self) -> Hash {
149+
let digest: [u8; DIGEST_SIZE] = self.hasher.finalize().into();
150+
Hash { digest }
151+
}
152+
}
153+
140154
/// [`Fold`] implementation producing a [`struct@Hash`]
141155
pub struct HashFold;
142156

@@ -374,3 +388,114 @@ impl<'tree> NodeFold for PartialHashNodeFold<'tree> {
374388
PartialHash::Present(hash)
375389
}
376390
}
391+
392+
#[cfg(test)]
393+
mod tests {
394+
use super::DIGEST_SIZE;
395+
use super::Hash;
396+
use super::Hasher;
397+
use crate::serialisation::bincode_default_config;
398+
use bincode::Encode;
399+
use std::borrow::Borrow;
400+
401+
#[derive(Clone, Encode)]
402+
struct Encodable {
403+
a: u32,
404+
}
405+
406+
impl Encodable {
407+
fn new(a: u32) -> Self {
408+
Self { a }
409+
}
410+
}
411+
412+
#[derive(Clone)]
413+
struct Borrowable {
414+
hash: Hash,
415+
}
416+
417+
impl Borrow<Hash> for Borrowable {
418+
fn borrow(&self) -> &Hash {
419+
&self.hash
420+
}
421+
}
422+
423+
impl Borrowable {
424+
fn new(hash: Hash) -> Self {
425+
Self { hash }
426+
}
427+
}
428+
429+
#[test]
430+
fn hash_bytes_works_as_blake3_hashing() {
431+
let bytes = [1, 2, 3];
432+
let hash = Hash::hash_bytes(&bytes);
433+
let hash_digest: [u8; DIGEST_SIZE] = hash.into();
434+
let blake3_digest: [u8; DIGEST_SIZE] = blake3::hash(&bytes).into();
435+
assert_eq!(hash_digest, blake3_digest);
436+
}
437+
438+
#[test]
439+
fn hash_encodable_can_hash_encodable_objects() {
440+
let object = Encodable::new(12);
441+
let bytes =
442+
bincode::encode_to_vec(object.clone(), bincode_default_config()).expect("Should work");
443+
let object_hash_digest: [u8; DIGEST_SIZE] = blake3::hash(bytes.as_slice()).into();
444+
let hash_digest: [u8; DIGEST_SIZE] =
445+
Hash::hash_encodable(object).expect("Should work").into();
446+
assert_eq!(object_hash_digest, hash_digest);
447+
}
448+
449+
#[test]
450+
fn hash_combines_can_combine_hashes_to_a_new_hash() {
451+
let coll = vec![
452+
Borrowable::new(Hash::hash_bytes(&[1, 2, 3])),
453+
Borrowable::new(Hash::hash_bytes(&[4, 5, 6])),
454+
];
455+
let hash = Hash::combine_hashes(coll.clone());
456+
let hash_digest: [u8; DIGEST_SIZE] = hash.into();
457+
458+
let mut hasher = blake3::Hasher::new();
459+
let mut borrowed_hash: &Hash = coll[0].borrow();
460+
hasher.update(borrowed_hash.as_ref());
461+
borrowed_hash = coll[1].borrow();
462+
hasher.update(borrowed_hash.as_ref());
463+
let hasher_digest: [u8; DIGEST_SIZE] = hasher.finalize().into();
464+
465+
assert_eq!(hash_digest, hasher_digest);
466+
467+
let hash_from_combined = Hash::hash_bytes(&[1, 2, 3, 4, 5, 6]);
468+
469+
assert_ne!(hash, hash_from_combined);
470+
}
471+
472+
#[test]
473+
fn hasher_update_with_bytes_is_the_same_as_hash_bytes() {
474+
let elems: Vec<Vec<u8>> = vec![vec![1, 2, 3], vec![4, 5, 6]];
475+
let mut hasher: Hasher = Hasher::new();
476+
477+
for elem in elems.iter() {
478+
hasher.update_with_bytes(elem.as_slice());
479+
}
480+
481+
let flattened_elems: Vec<u8> = elems.into_iter().flatten().collect();
482+
let hash = Hash::hash_bytes(flattened_elems.as_slice());
483+
484+
assert_eq!(hasher.to_hash(), hash);
485+
}
486+
487+
#[test]
488+
fn combine_hashes_is_the_same_as_update_with_hash() {
489+
let elems: Vec<Hash> = vec![Hash::hash_bytes(&[1, 2, 3]), Hash::hash_bytes(&[4, 5, 6])];
490+
491+
let hash = Hash::combine_hashes(elems.clone());
492+
493+
let mut hasher = Hasher::new();
494+
495+
for elem in elems.into_iter() {
496+
hasher.update_with_hash(elem);
497+
}
498+
499+
assert_eq!(hash, hasher.to_hash());
500+
}
501+
}

data/src/merkle_proof/proof_tree.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -98,9 +98,9 @@ impl MerkleProof {
9898
},
9999
|leaf| match leaf {
100100
MerkleProofLeaf::Blind(hash) => *hash,
101-
MerkleProofLeaf::Read(data) => Hash::blake3_hash_bytes(data.as_slice()),
101+
MerkleProofLeaf::Read(data) => Hash::hash_bytes(data.as_slice()),
102102
},
103-
|(), leaves| Hash::combine(leaves),
103+
|(), leaves| Hash::combine_hashes(leaves),
104104
)
105105
}
106106
}
@@ -127,11 +127,11 @@ mod tests {
127127
fn merkle_proofs_can_be_encoded() {
128128
let merkle_proofs = [
129129
MerkleProof::leaf_read([1, 2, 3].to_vec()),
130-
MerkleProof::leaf_blind(Hash::blake3_hash_bytes(&[1, 3, 4])),
130+
MerkleProof::leaf_blind(Hash::hash_bytes(&[1, 3, 4])),
131131
Tree::Node(
132132
[
133133
MerkleProof::leaf_read([1, 2, 3].to_vec()),
134-
MerkleProof::leaf_blind(Hash::blake3_hash_bytes(&[1, 3, 4])),
134+
MerkleProof::leaf_blind(Hash::hash_bytes(&[1, 3, 4])),
135135
]
136136
.to_vec(),
137137
),
@@ -147,7 +147,7 @@ mod tests {
147147
let node = Tree::Node(
148148
[
149149
MerkleProof::leaf_read([1, 2, 3].to_vec()),
150-
MerkleProof::leaf_blind(Hash::blake3_hash_bytes(&[1, 3, 4])),
150+
MerkleProof::leaf_blind(Hash::hash_bytes(&[1, 3, 4])),
151151
]
152152
.to_vec(),
153153
);

data/src/merkle_tree.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ impl MerkleTree {
3131
/// let data = vec![1, 2, 3];
3232
/// let merkle_tree = MerkleTree::make_merkle_leaf(data.clone(), false);
3333
/// let root_hash = merkle_tree.root_hash();
34-
/// let hash = Hash::blake3_hash_bytes(&data);
34+
/// let hash = Hash::hash_bytes(&data);
3535
///
3636
/// assert_eq!(hash, root_hash);
3737
/// ```
@@ -44,15 +44,15 @@ impl MerkleTree {
4444

4545
/// Creates a merkle tree which is a single leaf
4646
pub fn make_merkle_leaf(data: Vec<u8>, access_info: bool) -> Self {
47-
let hash = Hash::blake3_hash_bytes(&data);
47+
let hash = Hash::hash_bytes(&data);
4848
MerkleTree::Leaf(hash, access_info, data)
4949
}
5050

5151
/// Takes a list of children nodes and creates a
5252
/// new parent node on top of them.
5353
pub fn make_merkle_node(children: Vec<Self>) -> Self {
5454
let children_hashes = children.iter().map(|t| t.root_hash());
55-
let node_hash = Hash::combine(children_hashes);
55+
let node_hash = Hash::combine_hashes(children_hashes);
5656
MerkleTree::Node(node_hash, children)
5757
}
5858

@@ -65,7 +65,7 @@ impl MerkleTree {
6565

6666
while let Some(node) = deque.pop_front() {
6767
let is_valid_hash = match node {
68-
Self::Leaf(hash, _, data) => &Hash::blake3_hash_bytes(data) == hash,
68+
Self::Leaf(hash, _, data) => &Hash::hash_bytes(data) == hash,
6969
Self::Node(hash, children) => {
7070
let children_hashes: Vec<Hash> = children
7171
.iter()
@@ -75,7 +75,7 @@ impl MerkleTree {
7575
})
7676
.collect();
7777

78-
&Hash::combine(children_hashes) == hash
78+
&Hash::combine_hashes(children_hashes) == hash
7979
}
8080
};
8181
if !is_valid_hash {

data/src/serialisation.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ use bincode::error::EncodeError;
1616

1717
/// Constructs the default options for bincode serialisation and deserialisation.
1818
#[inline]
19-
const fn bincode_default() -> impl Config {
19+
pub const fn bincode_default_config() -> impl Config {
2020
standard()
2121
.with_limit::<{ 1024 * 1024 * 1024 }>()
2222
.with_little_endian()
@@ -25,21 +25,21 @@ const fn bincode_default() -> impl Config {
2525

2626
/// Deserialise a slice of bytes into a value of type `T`.
2727
pub fn deserialise<T: Decode<()>>(data: &[u8]) -> Result<T, DecodeError> {
28-
let (value, _) = bincode::decode_from_slice(data, bincode_default())?;
28+
let (value, _) = bincode::decode_from_slice(data, bincode_default_config())?;
2929
Ok(value)
3030
}
3131

3232
/// Deserialise a value of type `T` from a byte source.
3333
pub fn deserialise_from<T: Decode<()>, R: Read>(source: &mut R) -> Result<T, DecodeError> {
34-
bincode::decode_from_std_read(source, bincode_default())
34+
bincode::decode_from_std_read(source, bincode_default_config())
3535
}
3636

3737
/// Serialize `T` into a vector of bytes.
3838
pub fn serialise<T: Encode>(value: T) -> Result<Vec<u8>, EncodeError> {
39-
bincode::encode_to_vec(value, bincode_default())
39+
bincode::encode_to_vec(value, bincode_default_config())
4040
}
4141

4242
/// Serialize `T` into a sink.
4343
pub fn serialise_into<T: Encode, W: Write>(value: T, sink: &mut W) -> Result<usize, EncodeError> {
44-
bincode::encode_into_std_write(value, sink, bincode_default())
44+
bincode::encode_into_std_write(value, sink, bincode_default_config())
4545
}

src/riscv/lib/src/jit.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ impl JIT {
172172
return None;
173173
}
174174

175-
let Ok(hash) = Hash::blake3_hash((instr_for_hash.as_slice(), program_counter)) else {
175+
let Ok(hash) = Hash::hash_encodable((instr_for_hash.as_slice(), program_counter)) else {
176176
builder.abandon();
177177
self.clear();
178178
return None;

0 commit comments

Comments
 (0)