Skip to content

Commit be89d59

Browse files
dlachaumeAlenar
andcommitted
refactor: improve CardanoImmutableDigester maintainability and modularity by isolating responsibilities into smaller scopes and functions
Co-authored-by: DJO <[email protected]>
1 parent 60db5ce commit be89d59

File tree

1 file changed

+100
-75
lines changed

1 file changed

+100
-75
lines changed

mithril-common/src/digesters/cardano_immutable_digester.rs

Lines changed: 100 additions & 75 deletions
Original file line numberDiff line numberDiff line change
@@ -3,18 +3,22 @@ use crate::{
33
cache::ImmutableFileDigestCacheProvider, ImmutableDigester, ImmutableDigesterError,
44
ImmutableFile,
55
},
6-
entities::{CardanoDbBeacon, HexEncodedDigest, ImmutableFileName},
6+
entities::{CardanoDbBeacon, HexEncodedDigest, ImmutableFileName, ImmutableFileNumber},
77
logging::LoggerExtensions,
88
};
99
use async_trait::async_trait;
1010
use sha2::{Digest, Sha256};
1111
use slog::{debug, info, warn, Logger};
1212
use std::{collections::BTreeMap, io, path::Path, sync::Arc};
1313

14-
/// Result of a cache computation, contains the digest and the list of new entries to add
14+
/// Result of a cache computation, contains the list of immutable digests and the list of new entries to add
1515
/// to the [ImmutableFileDigestCacheProvider].
16-
type CacheComputationResult =
17-
Result<([u8; 32], Vec<(ImmutableFileName, HexEncodedDigest)>), io::Error>;
16+
type ComputedImmutablesDigestsResult = Result<ComputedImmutablesDigests, io::Error>;
17+
18+
struct ComputedImmutablesDigests {
19+
digests: Vec<HexEncodedDigest>,
20+
new_cached_entries: Vec<(ImmutableFileName, HexEncodedDigest)>,
21+
}
1822

1923
/// A digester working directly on a Cardano DB immutables files
2024
pub struct CardanoImmutableDigester {
@@ -40,6 +44,36 @@ impl CardanoImmutableDigester {
4044
logger: logger.new_with_component_name::<Self>(),
4145
}
4246
}
47+
48+
async fn process_immutables(
49+
&self,
50+
immutables: Vec<ImmutableFile>,
51+
) -> Result<ComputedImmutablesDigests, ImmutableDigesterError> {
52+
let cached_values = match self.cache_provider.as_ref() {
53+
None => BTreeMap::from_iter(immutables.into_iter().map(|i| (i, None))),
54+
Some(cache_provider) => match cache_provider.get(immutables.clone()).await {
55+
Ok(values) => values,
56+
Err(error) => {
57+
warn!(
58+
self.logger, "Error while getting cached immutable files digests";
59+
"error" => ?error
60+
);
61+
BTreeMap::from_iter(immutables.into_iter().map(|i| (i, None)))
62+
}
63+
},
64+
};
65+
66+
// The computation of immutable files digests is done in a separate thread because it is blocking the whole task
67+
let logger = self.logger.clone();
68+
let computed_digests =
69+
tokio::task::spawn_blocking(move || -> ComputedImmutablesDigestsResult {
70+
compute_immutables_digests(logger, cached_values)
71+
})
72+
.await
73+
.map_err(|e| ImmutableDigesterError::DigestComputationError(e.into()))??;
74+
75+
Ok(computed_digests)
76+
}
4377
}
4478

4579
#[async_trait]
@@ -49,99 +83,87 @@ impl ImmutableDigester for CardanoImmutableDigester {
4983
dirpath: &Path,
5084
beacon: &CardanoDbBeacon,
5185
) -> Result<String, ImmutableDigesterError> {
52-
let up_to_file_number = beacon.immutable_file_number;
53-
let immutables = ImmutableFile::list_completed_in_dir(dirpath)?
54-
.into_iter()
55-
.filter(|f| f.number <= up_to_file_number)
56-
.collect::<Vec<_>>();
57-
info!(self.logger, ">> compute_digest"; "beacon" => #?beacon, "nb_of_immutables" => immutables.len());
58-
59-
match immutables.last() {
60-
None => Err(ImmutableDigesterError::NotEnoughImmutable {
61-
expected_number: up_to_file_number,
62-
found_number: None,
63-
db_dir: dirpath.to_owned(),
64-
}),
65-
Some(last_immutable_file) if last_immutable_file.number < up_to_file_number => {
66-
Err(ImmutableDigesterError::NotEnoughImmutable {
67-
expected_number: up_to_file_number,
68-
found_number: Some(last_immutable_file.number),
69-
db_dir: dirpath.to_owned(),
70-
})
86+
let immutables_to_process =
87+
list_immutable_files_to_process(dirpath, beacon.immutable_file_number)?;
88+
info!(self.logger, ">> compute_digest"; "beacon" => #?beacon, "nb_of_immutables" => immutables_to_process.len());
89+
let computed_immutables_digests = self.process_immutables(immutables_to_process).await?;
90+
91+
let digest = {
92+
let mut hasher = Sha256::new();
93+
hasher.update(compute_beacon_hash(&self.cardano_network, beacon).as_bytes());
94+
for digest in computed_immutables_digests.digests {
95+
hasher.update(digest);
7196
}
72-
Some(_) => {
73-
let cached_values = match self.cache_provider.as_ref() {
74-
None => BTreeMap::from_iter(immutables.into_iter().map(|i| (i, None))),
75-
Some(cache_provider) => match cache_provider.get(immutables.clone()).await {
76-
Ok(values) => values,
77-
Err(error) => {
78-
warn!(
79-
self.logger, "Error while getting cached immutable files digests";
80-
"error" => ?error
81-
);
82-
BTreeMap::from_iter(immutables.into_iter().map(|i| (i, None)))
83-
}
84-
},
85-
};
86-
87-
// digest is done in a separate thread because it is blocking the whole task
88-
let logger = self.logger.clone();
89-
let thread_cardano_network = self.cardano_network.clone();
90-
let thread_beacon = beacon.clone();
91-
let (hash, new_cache_entries) =
92-
tokio::task::spawn_blocking(move || -> CacheComputationResult {
93-
compute_hash(
94-
logger,
95-
thread_cardano_network,
96-
&thread_beacon,
97-
cached_values,
98-
)
99-
})
100-
.await
101-
.map_err(|e| ImmutableDigesterError::DigestComputationError(e.into()))??;
102-
let digest = hex::encode(hash);
103-
104-
debug!(self.logger, "Computed digest: {digest:?}");
105-
106-
if let Some(cache_provider) = self.cache_provider.as_ref() {
107-
if let Err(error) = cache_provider.store(new_cache_entries).await {
108-
warn!(
109-
self.logger, "Error while storing new immutable files digests to cache";
110-
"error" => ?error
111-
);
112-
}
113-
}
97+
let hash: [u8; 32] = hasher.finalize().into();
98+
99+
hex::encode(hash)
100+
};
114101

115-
Ok(digest)
102+
debug!(self.logger, "Computed digest: {digest:?}");
103+
104+
if let Some(cache_provider) = self.cache_provider.as_ref() {
105+
if let Err(error) = cache_provider
106+
.store(computed_immutables_digests.new_cached_entries)
107+
.await
108+
{
109+
warn!(
110+
self.logger, "Error while storing new immutable files digests to cache";
111+
"error" => ?error
112+
);
116113
}
117114
}
115+
116+
Ok(digest)
118117
}
119118
}
120119

121-
fn compute_hash(
120+
fn list_immutable_files_to_process(
121+
dirpath: &Path,
122+
up_to_file_number: ImmutableFileNumber,
123+
) -> Result<Vec<ImmutableFile>, ImmutableDigesterError> {
124+
let immutables: Vec<ImmutableFile> = ImmutableFile::list_completed_in_dir(dirpath)?
125+
.into_iter()
126+
.filter(|f| f.number <= up_to_file_number)
127+
.collect();
128+
129+
match immutables.last() {
130+
None => Err(ImmutableDigesterError::NotEnoughImmutable {
131+
expected_number: up_to_file_number,
132+
found_number: None,
133+
db_dir: dirpath.to_owned(),
134+
}),
135+
Some(last_immutable_file) if last_immutable_file.number < up_to_file_number => {
136+
Err(ImmutableDigesterError::NotEnoughImmutable {
137+
expected_number: up_to_file_number,
138+
found_number: Some(last_immutable_file.number),
139+
db_dir: dirpath.to_owned(),
140+
})
141+
}
142+
Some(_) => Ok(immutables),
143+
}
144+
}
145+
146+
fn compute_immutables_digests(
122147
logger: Logger,
123-
cardano_network: String,
124-
beacon: &CardanoDbBeacon,
125148
entries: BTreeMap<ImmutableFile, Option<HexEncodedDigest>>,
126-
) -> CacheComputationResult {
127-
let mut hasher = Sha256::new();
149+
) -> ComputedImmutablesDigestsResult {
128150
let mut new_cached_entries = Vec::new();
129151
let mut progress = Progress {
130152
index: 0,
131153
total: entries.len(),
132154
};
133155

134-
hasher.update(compute_beacon_hash(&cardano_network, beacon).as_bytes());
156+
let mut digests = Vec::with_capacity(entries.len());
135157

136158
for (ix, (entry, cache)) in entries.iter().enumerate() {
137159
match cache {
138160
None => {
139161
let data = hex::encode(entry.compute_raw_hash::<Sha256>()?);
140-
hasher.update(&data);
162+
digests.push(data.clone());
141163
new_cached_entries.push((entry.filename.clone(), data));
142164
}
143165
Some(digest) => {
144-
hasher.update(digest);
166+
digests.push(digest.to_string());
145167
}
146168
};
147169

@@ -150,7 +172,10 @@ fn compute_hash(
150172
}
151173
}
152174

153-
Ok((hasher.finalize().into(), new_cached_entries))
175+
Ok(ComputedImmutablesDigests {
176+
digests,
177+
new_cached_entries,
178+
})
154179
}
155180

156181
fn compute_beacon_hash(network: &str, cardano_db_beacon: &CardanoDbBeacon) -> String {

0 commit comments

Comments
 (0)