@@ -16,7 +16,7 @@ use slog::{error, Logger};
16
16
17
17
use crate :: {
18
18
file_uploaders:: { GcpUploader , LocalUploader } ,
19
- services:: { CompressedArchiveSnapshotter , Snapshotter , SnapshotterCompressionAlgorithm } ,
19
+ services:: Snapshotter ,
20
20
tools:: url_sanitizer:: SanitizedUrlWithTrailingSlash ,
21
21
DumbUploader , FileUploader , ImmutableFileDigestMapper ,
22
22
} ;
@@ -94,12 +94,12 @@ pub struct DigestArtifactBuilder {
94
94
uploaders : Vec < Arc < dyn DigestFileUploader > > ,
95
95
96
96
snapshotter : Arc < dyn Snapshotter > ,
97
+
97
98
compression_algorithm : CompressionAlgorithm ,
98
99
99
100
network : CardanoNetwork ,
100
101
101
102
digests_dir : PathBuf ,
102
- archive_dir : PathBuf ,
103
103
104
104
immutable_file_digest_mapper : Arc < dyn ImmutableFileDigestMapper > ,
105
105
@@ -109,47 +109,6 @@ pub struct DigestArtifactBuilder {
109
109
impl DigestArtifactBuilder {
110
110
/// Creates a new [DigestArtifactBuilder].
111
111
pub fn new (
112
- aggregator_url_prefix : SanitizedUrlWithTrailingSlash ,
113
- uploaders : Vec < Arc < dyn DigestFileUploader > > ,
114
- compression_algorithm : CompressionAlgorithm ,
115
- network : CardanoNetwork ,
116
- digests_dir : PathBuf ,
117
- immutable_file_digest_mapper : Arc < dyn ImmutableFileDigestMapper > ,
118
- logger : Logger ,
119
- ) -> StdResult < Self > {
120
- // TODO We have to pass SnapshotterCompressionAlgorithm ?
121
- // TODO CompressedArchiveSnapshotter use one temporary folder and should not be use by another one !!!
122
- let algorithm = match compression_algorithm {
123
- CompressionAlgorithm :: Gzip => SnapshotterCompressionAlgorithm :: Gzip ,
124
- _ => todo ! ( ) ,
125
- // Probably pass the SnappshotterCompressionAlgorithm to the struct or the snapshotter ?
126
- // CompressionAlgorithm::Zstandard => self
127
- // .configuration
128
- // .zstandard_parameters
129
- // .unwrap_or_default()
130
- // .into(),
131
- } ;
132
-
133
- let snapshotter = CompressedArchiveSnapshotter :: new (
134
- digests_dir. clone ( ) ,
135
- PathBuf :: from ( "/tmp/mithril_test/unpack" ) ,
136
- algorithm,
137
- logger. clone ( ) ,
138
- ) ?;
139
-
140
- Self :: new_with_snapshotter (
141
- aggregator_url_prefix,
142
- uploaders,
143
- Arc :: new ( snapshotter) ,
144
- compression_algorithm,
145
- network,
146
- digests_dir,
147
- immutable_file_digest_mapper,
148
- logger,
149
- )
150
- }
151
-
152
- pub fn new_with_snapshotter (
153
112
aggregator_url_prefix : SanitizedUrlWithTrailingSlash ,
154
113
uploaders : Vec < Arc < dyn DigestFileUploader > > ,
155
114
snapshotter : Arc < dyn Snapshotter > ,
@@ -165,7 +124,6 @@ impl DigestArtifactBuilder {
165
124
snapshotter,
166
125
compression_algorithm,
167
126
network,
168
- archive_dir : digests_dir. clone ( ) ,
169
127
digests_dir,
170
128
immutable_file_digest_mapper,
171
129
logger : logger. new_with_component_name :: < Self > ( ) ,
@@ -183,12 +141,14 @@ impl DigestArtifactBuilder {
183
141
fs:: remove_file ( & digest_path) . with_context ( || {
184
142
format ! ( "Could not remove digest file: '{}'" , digest_path. display( ) )
185
143
} ) ?;
186
- fs:: remove_file ( & digest_archive_file_path) . with_context ( || {
187
- format ! (
188
- "Could not remove digest archive file: '{}'" ,
189
- digest_archive_file_path. display( )
190
- )
191
- } ) ?;
144
+ if digest_archive_file_path. exists ( ) {
145
+ fs:: remove_file ( & digest_archive_file_path) . with_context ( || {
146
+ format ! (
147
+ "Could not remove digest archive file: '{}'" ,
148
+ digest_archive_file_path. display( )
149
+ )
150
+ } ) ?;
151
+ }
192
152
193
153
let size = file_metadata
194
154
. with_context ( || {
@@ -206,12 +166,11 @@ impl DigestArtifactBuilder {
206
166
}
207
167
208
168
fn create_archive_file ( & self , beacon : & CardanoDbBeacon ) -> Result < PathBuf , anyhow:: Error > {
209
- let digest_archive_file_path = Self :: get_digests_file_path (
210
- & self . archive_dir ,
169
+ let digest_archive_file_path = PathBuf :: from ( Self :: get_digests_file_name (
211
170
& self . network ,
212
171
beacon,
213
172
& self . compression_algorithm . tar_file_extension ( ) ,
214
- ) ;
173
+ ) ) ;
215
174
let digest_file_name =
216
175
PathBuf :: from ( Self :: get_digests_file_name ( & self . network , beacon, "json" ) ) ;
217
176
self . snapshotter
@@ -326,7 +285,11 @@ mod tests {
326
285
} ;
327
286
328
287
use crate :: {
329
- immutable_file_digest_mapper:: MockImmutableFileDigestMapper , test_tools:: TestLogger ,
288
+ immutable_file_digest_mapper:: MockImmutableFileDigestMapper ,
289
+ services:: {
290
+ CompressedArchiveSnapshotter , DumbSnapshotter , SnapshotterCompressionAlgorithm ,
291
+ } ,
292
+ test_tools:: TestLogger ,
330
293
} ;
331
294
use anyhow:: anyhow;
332
295
use flate2:: read:: GzDecoder ;
@@ -338,6 +301,7 @@ mod tests {
338
301
} ;
339
302
use mockall:: predicate:: eq;
340
303
use tar:: Archive ;
304
+ use uuid:: Uuid ;
341
305
342
306
use super :: * ;
343
307
@@ -378,6 +342,7 @@ mod tests {
378
342
let builder = DigestArtifactBuilder :: new (
379
343
SanitizedUrlWithTrailingSlash :: parse ( "https://aggregator/" ) . unwrap ( ) ,
380
344
vec ! [ ] ,
345
+ Arc :: new ( DumbSnapshotter :: new ( ) ) ,
381
346
CompressionAlgorithm :: Gzip ,
382
347
CardanoNetwork :: DevNet ( 123 ) ,
383
348
temp_dir,
@@ -407,6 +372,7 @@ mod tests {
407
372
let builder = DigestArtifactBuilder :: new (
408
373
SanitizedUrlWithTrailingSlash :: parse ( "https://aggregator/" ) . unwrap ( ) ,
409
374
vec ! [ ] ,
375
+ Arc :: new ( DumbSnapshotter :: new ( ) ) ,
410
376
CompressionAlgorithm :: Gzip ,
411
377
CardanoNetwork :: DevNet ( 123 ) ,
412
378
temp_dir,
@@ -438,6 +404,7 @@ mod tests {
438
404
let builder = DigestArtifactBuilder :: new (
439
405
SanitizedUrlWithTrailingSlash :: parse ( "https://aggregator/" ) . unwrap ( ) ,
440
406
vec ! [ Arc :: new( uploader) ] ,
407
+ Arc :: new ( DumbSnapshotter :: new ( ) ) ,
441
408
CompressionAlgorithm :: Gzip ,
442
409
CardanoNetwork :: DevNet ( 123 ) ,
443
410
PathBuf :: from ( "/tmp/whatever" ) ,
@@ -460,6 +427,7 @@ mod tests {
460
427
let builder = DigestArtifactBuilder :: new (
461
428
SanitizedUrlWithTrailingSlash :: parse ( "https://aggregator/" ) . unwrap ( ) ,
462
429
vec ! [ Arc :: new( uploader) ] ,
430
+ Arc :: new ( DumbSnapshotter :: new ( ) ) ,
463
431
CompressionAlgorithm :: Gzip ,
464
432
CardanoNetwork :: DevNet ( 123 ) ,
465
433
PathBuf :: from ( "/tmp/whatever" ) ,
@@ -491,6 +459,7 @@ mod tests {
491
459
let builder = DigestArtifactBuilder :: new (
492
460
SanitizedUrlWithTrailingSlash :: parse ( "https://aggregator/" ) . unwrap ( ) ,
493
461
uploaders,
462
+ Arc :: new ( DumbSnapshotter :: new ( ) ) ,
494
463
CompressionAlgorithm :: Gzip ,
495
464
CardanoNetwork :: DevNet ( 123 ) ,
496
465
PathBuf :: from ( "/tmp/whatever" ) ,
@@ -529,6 +498,7 @@ mod tests {
529
498
let builder = DigestArtifactBuilder :: new (
530
499
SanitizedUrlWithTrailingSlash :: parse ( "https://aggregator/" ) . unwrap ( ) ,
531
500
uploaders,
501
+ Arc :: new ( DumbSnapshotter :: new ( ) ) ,
532
502
CompressionAlgorithm :: Gzip ,
533
503
CardanoNetwork :: DevNet ( 123 ) ,
534
504
PathBuf :: from ( "/tmp/whatever" ) ,
@@ -576,6 +546,7 @@ mod tests {
576
546
let builder = DigestArtifactBuilder :: new (
577
547
SanitizedUrlWithTrailingSlash :: parse ( "https://aggregator/" ) . unwrap ( ) ,
578
548
vec ! [ ] ,
549
+ Arc :: new ( DumbSnapshotter :: new ( ) ) ,
579
550
CompressionAlgorithm :: Gzip ,
580
551
CardanoNetwork :: DevNet ( 123 ) ,
581
552
temp_dir,
@@ -603,55 +574,11 @@ mod tests {
603
574
) ;
604
575
}
605
576
606
- // #[tokio::test]
607
- // async fn upload_should_call_upload_with_created_digest_file_and_delete_the_file() {
608
- // let digests_dir = TempDir::create("digest", current_function!());
609
- // let mut immutable_file_digest_mapper = MockImmutableFileDigestMapper::new();
610
- // immutable_file_digest_mapper
611
- // .expect_get_immutable_file_digest_map()
612
- // .returning(|| Ok(BTreeMap::new()));
613
-
614
- // let mut digest_file_uploader = MockDigestFileUploader::new();
615
-
616
- // let beacon = CardanoDbBeacon::new(3, 456);
617
- // let network = CardanoNetwork::DevNet(24);
618
- // let digest_file =
619
- // DigestArtifactBuilder::get_digests_file_path(&digests_dir, &network, &beacon, "json");
620
-
621
- // let digest_file_clone = digest_file.clone();
622
- // digest_file_uploader
623
- // .expect_upload()
624
- // .withf(move |path, algorithm| {
625
- // path == digest_file_clone && path.exists() && algorithm.is_none()
626
- // })
627
- // .times(1)
628
- // .return_once(|_, _| {
629
- // Ok(DigestLocation::CloudStorage {
630
- // uri: "an_uri".to_string(),
631
- // compression_algorithm: None,
632
- // })
633
- // });
634
-
635
- // let builder = DigestArtifactBuilder::new(
636
- // SanitizedUrlWithTrailingSlash::parse("https://aggregator/").unwrap(),
637
- // vec![Arc::new(digest_file_uploader)],
638
- // CompressionAlgorithm::Gzip,
639
- // network,
640
- // digests_dir,
641
- // Arc::new(immutable_file_digest_mapper),
642
- // TestLogger::stdout(),
643
- // )
644
- // .unwrap();
645
-
646
- // let _locations = builder.upload(&beacon).await.unwrap();
647
-
648
- // assert!(!digest_file.exists());
649
- // }
650
-
651
577
#[ tokio:: test]
652
578
async fn upload_should_call_upload_with_created_digest_archive_file_and_delete_the_file ( ) {
653
579
let tmp_dir = TempDir :: create ( "digest" , current_function ! ( ) ) ;
654
580
let digests_dir = tmp_dir. join ( "digest" ) ;
581
+ let digests_archive_dir = tmp_dir. join ( "archive" ) ;
655
582
let mut immutable_file_digest_mapper = MockImmutableFileDigestMapper :: new ( ) ;
656
583
immutable_file_digest_mapper
657
584
. expect_get_immutable_file_digest_map ( )
@@ -665,26 +592,31 @@ mod tests {
665
592
let network = CardanoNetwork :: DevNet ( 24 ) ;
666
593
let digest_file_name =
667
594
DigestArtifactBuilder :: get_digests_file_name ( & network, & beacon, "json" ) ;
668
- let digest_archive_file = DigestArtifactBuilder :: get_digests_file_path (
669
- & digests_dir ,
595
+ let archive_path = DigestArtifactBuilder :: get_digests_file_path (
596
+ & digests_archive_dir ,
670
597
& network,
671
598
& beacon,
672
599
& compression_algorithm. tar_file_extension ( ) ,
673
600
) ;
601
+ let digest_archive_file = PathBuf :: from ( DigestArtifactBuilder :: get_digests_file_name (
602
+ & network,
603
+ & beacon,
604
+ & compression_algorithm. tar_file_extension ( ) ,
605
+ ) ) ;
674
606
675
607
digest_file_uploader
676
608
. expect_upload ( )
677
609
. with ( eq ( digest_archive_file) , eq ( None ) )
678
610
. times ( 1 )
679
- . return_once ( move |archive_path , _| {
611
+ . return_once ( move |_ , _| {
680
612
assert ! (
681
613
archive_path. exists( ) ,
682
614
"Path to upload should exist: {}" ,
683
615
archive_path. display( )
684
616
) ;
685
617
686
618
let unpack_dir = tmp_dir. join ( "unpack" ) ;
687
- unpack_archive ( archive_path, & unpack_dir) ;
619
+ unpack_archive ( & archive_path, & unpack_dir) ;
688
620
689
621
let unpack_digest_file = unpack_dir. join ( digest_file_name) ;
690
622
assert ! ( unpack_digest_file. is_file( ) ) ;
@@ -695,9 +627,19 @@ mod tests {
695
627
} )
696
628
} ) ;
697
629
630
+ let mut snapshotter = CompressedArchiveSnapshotter :: new (
631
+ digests_dir. clone ( ) ,
632
+ digests_archive_dir,
633
+ SnapshotterCompressionAlgorithm :: Gzip ,
634
+ TestLogger :: stdout ( ) ,
635
+ )
636
+ . unwrap ( ) ;
637
+ snapshotter. set_sub_temp_dir ( Uuid :: new_v4 ( ) . to_string ( ) ) ;
638
+
698
639
let builder = DigestArtifactBuilder :: new (
699
640
SanitizedUrlWithTrailingSlash :: parse ( "https://aggregator/" ) . unwrap ( ) ,
700
641
vec ! [ Arc :: new( digest_file_uploader) ] ,
642
+ Arc :: new ( snapshotter) ,
701
643
compression_algorithm,
702
644
network,
703
645
digests_dir. clone ( ) ,
0 commit comments