Skip to content

Commit c0cd4c3

Browse files
dlachaumeAlenar
andcommitted
refactor: enhance ComputedImmutablesDigests struct and isolate update_cache responsibility to reduce code duplication
Co-authored-by: DJO <[email protected]>
1 parent 0b08c6f commit c0cd4c3

File tree

1 file changed

+33
-33
lines changed

1 file changed

+33
-33
lines changed

mithril-common/src/digesters/cardano_immutable_digester.rs

Lines changed: 33 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ type ComputedImmutablesDigestsResult = Result<ComputedImmutablesDigests, io::Err
1818

1919
struct ComputedImmutablesDigests {
2020
entries: BTreeMap<ImmutableFile, HexEncodedDigest>,
21-
new_cached_entries: Vec<(ImmutableFileName, HexEncodedDigest)>,
21+
new_cached_entries: Vec<ImmutableFileName>,
2222
}
2323

2424
/// A digester working directly on a Cardano DB immutables files
@@ -75,6 +75,28 @@ impl CardanoImmutableDigester {
7575

7676
Ok(computed_digests)
7777
}
78+
79+
async fn update_cache(&self, computed_immutables_digests: &ComputedImmutablesDigests) {
80+
if let Some(cache_provider) = self.cache_provider.as_ref() {
81+
let new_cached_entries = computed_immutables_digests
82+
.entries
83+
.iter()
84+
.filter(|(file, _hash)| {
85+
computed_immutables_digests
86+
.new_cached_entries
87+
.contains(&file.filename)
88+
})
89+
.map(|(file, hash)| (file.filename.clone(), hash.clone()))
90+
.collect();
91+
92+
if let Err(error) = cache_provider.store(new_cached_entries).await {
93+
warn!(
94+
self.logger, "Error while storing new immutable files digests to cache";
95+
"error" => ?error
96+
);
97+
}
98+
}
99+
}
78100
}
79101

80102
#[async_trait]
@@ -89,6 +111,8 @@ impl ImmutableDigester for CardanoImmutableDigester {
89111
info!(self.logger, ">> compute_digest"; "beacon" => #?beacon, "nb_of_immutables" => immutables_to_process.len());
90112
let computed_immutables_digests = self.process_immutables(immutables_to_process).await?;
91113

114+
self.update_cache(&computed_immutables_digests).await;
115+
92116
let digest = {
93117
let mut hasher = Sha256::new();
94118
hasher.update(compute_beacon_hash(&self.cardano_network, beacon).as_bytes());
@@ -102,18 +126,6 @@ impl ImmutableDigester for CardanoImmutableDigester {
102126

103127
debug!(self.logger, "Computed digest: {digest:?}");
104128

105-
if let Some(cache_provider) = self.cache_provider.as_ref() {
106-
if let Err(error) = cache_provider
107-
.store(computed_immutables_digests.new_cached_entries)
108-
.await
109-
{
110-
warn!(
111-
self.logger, "Error while storing new immutable files digests to cache";
112-
"error" => ?error
113-
);
114-
}
115-
}
116-
117129
Ok(digest)
118130
}
119131

@@ -127,6 +139,8 @@ impl ImmutableDigester for CardanoImmutableDigester {
127139
info!(self.logger, ">> compute_merkle_tree"; "beacon" => #?beacon, "nb_of_immutables" => immutables_to_process.len());
128140
let computed_immutables_digests = self.process_immutables(immutables_to_process).await?;
129141

142+
self.update_cache(&computed_immutables_digests).await;
143+
130144
let digests: Vec<HexEncodedDigest> =
131145
computed_immutables_digests.entries.into_values().collect();
132146
let mktree =
@@ -136,18 +150,6 @@ impl ImmutableDigester for CardanoImmutableDigester {
136150
self.logger,
137151
"Successfully computed Merkle tree for Cardano database"; "beacon" => #?beacon);
138152

139-
if let Some(cache_provider) = self.cache_provider.as_ref() {
140-
if let Err(error) = cache_provider
141-
.store(computed_immutables_digests.new_cached_entries)
142-
.await
143-
{
144-
warn!(
145-
self.logger, "Error while storing new immutable files digests to cache";
146-
"error" => ?error
147-
);
148-
}
149-
}
150-
151153
Ok(mktree)
152154
}
153155
}
@@ -190,17 +192,15 @@ fn compute_immutables_digests(
190192

191193
let mut digests = BTreeMap::new();
192194

193-
for (ix, (entry, cache)) in entries.iter().enumerate() {
194-
match cache {
195+
for (ix, (entry, cache)) in entries.into_iter().enumerate() {
196+
let hash = match cache {
195197
None => {
196-
let data = hex::encode(entry.compute_raw_hash::<Sha256>()?);
197-
digests.insert(entry.clone(), data.clone());
198-
new_cached_entries.push((entry.filename.clone(), data));
199-
}
200-
Some(digest) => {
201-
digests.insert(entry.clone(), digest.to_string());
198+
new_cached_entries.push(entry.filename.clone());
199+
hex::encode(entry.compute_raw_hash::<Sha256>()?)
202200
}
201+
Some(digest) => digest,
203202
};
203+
digests.insert(entry, hash);
204204

205205
if progress.report(ix) {
206206
info!(logger, "Hashing: {progress}");

0 commit comments

Comments
 (0)