@@ -185,14 +185,6 @@ func (rs GSUtilRemoteCache) ExistingPackages(pkgs []*Package) (map[*Package]stru
185
185
return existingPackages , nil
186
186
}
187
187
188
- // Helper function to get all possible artifact URLs for a package
189
- func getPackageArtifactURLs (bucketName , version string ) []string {
190
- return []string {
191
- fmt .Sprintf ("gs://%s/%s.tar.gz" , bucketName , version ),
192
- fmt .Sprintf ("gs://%s/%s.tar" , bucketName , version ),
193
- }
194
- }
195
-
196
188
// Download makes a best-effort attempt at downloading previously cached build artifacts
197
189
func (rs GSUtilRemoteCache ) Download (dst Cache , pkgs []* Package ) error {
198
190
fmt .Printf ("☁️ downloading %d cached build artifacts\n " , len (pkgs ))
@@ -360,27 +352,22 @@ func (rs *S3RemoteCache) ExistingPackages(pkgs []*Package) (map[*Package]struct{
360
352
defer wg .Done ()
361
353
362
354
// Check for .tar.gz first
363
- if stat , err := rs .hasObject (ctx , keys .gzKey ); err != nil {
364
- log .WithField ("bucket" , rs .BucketName ).WithField ("key" , keys .gzKey ).
365
- Debugf ("Failed to check for remote cached object: %s" , err )
366
- } else if stat {
355
+ if rs .checkObjectExists (ctx , keys .gzKey ) {
367
356
mu .Lock ()
368
357
existingPackages [pkg ] = struct {}{}
369
358
mu .Unlock ()
370
359
return
371
360
}
372
361
373
362
// If .tar.gz doesn't exist, check for .tar
374
- if stat , err := rs .hasObject (ctx , keys .tarKey ); err != nil {
375
- log .WithField ("bucket" , rs .BucketName ).WithField ("key" , keys .tarKey ).
376
- Debugf ("Failed to check for remote cached object: %s" , err )
377
- } else if stat {
363
+ if rs .checkObjectExists (ctx , keys .tarKey ) {
378
364
mu .Lock ()
379
365
existingPackages [pkg ] = struct {}{}
380
366
mu .Unlock ()
381
367
}
382
368
}(pkg , keys )
383
369
}
370
+
384
371
wg .Wait ()
385
372
386
373
log .WithField ("bucket" , rs .BucketName ).
0 commit comments