@@ -7,6 +7,7 @@ module Spago.Command.Fetch
77 , getTransitiveDeps
88 , getTransitiveDepsFromRegistry
99 , getWorkspacePackageDeps
10+ , fetchPackagesToLocalCache
1011 , run
1112 , toAllDependencies
1213 , writeNewLockfile
@@ -83,7 +84,7 @@ run :: forall a. FetchOpts -> Spago (FetchEnv a) PackageTransitiveDeps
8384run { packages: packagesRequestedToInstall, ensureRanges, isTest, isRepl } = do
8485 logDebug $ " Requested to install these packages: " <> printJson (CJ .array PackageName .codec) packagesRequestedToInstall
8586
86- { workspace: currentWorkspace, offline } <- ask
87+ { workspace: currentWorkspace } <- ask
8788
8889 let
8990 getPackageConfigPath errorMessageEnd = do
@@ -192,95 +193,99 @@ run { packages: packagesRequestedToInstall, ensureRanges, isTest, isRepl } = do
192193
193194 -- then for every package we have we try to download it, and copy it in the local cache
194195 logInfo " Downloading dependencies..."
195-
196- parallelise $ (flip map) (Map .toUnfoldable depsToFetch :: Array (Tuple PackageName Package )) \(Tuple name package) -> do
197- let localPackageLocation = Config .getPackageLocation name package
198- -- first of all, we check if we have the package in the local cache. If so, we don't even do the work
199- unlessM (FS .exists localPackageLocation) case package of
200- GitPackage gitPackage -> getGitPackageInLocalCache name gitPackage
201- RegistryVersion v -> do
202- -- if the version comes from the registry then we have a longer list of things to do
203- let versionString = Registry.Version .print v
204- let packageVersion = PackageName .print name <> " @" <> versionString
205- -- get the metadata for the package, so we have access to the hash and other info
206- metadata <- Registry .getMetadata name
207- case (metadata >>= (\(Metadata meta) -> Either .note " Didn't find version in the metadata file" $ Map .lookup v meta.published)) of
208- Left err -> die $ " Couldn't read metadata, reason:\n " <> err
209- Right versionMetadata -> do
210- logDebug $ " Metadata read: " <> printJson Metadata .publishedMetadataCodec versionMetadata
211- -- then check if we have a tarball cached. If not, download it
212- let globalCachePackagePath = Path .concat [ Paths .globalCachePath, " packages" , PackageName .print name ]
213- let archivePath = Path .concat [ globalCachePackagePath, versionString <> " .tar.gz" ]
214- FS .mkdirp globalCachePackagePath
215- -- We need to see if the tarball is there, and if we can decompress it.
216- -- This is because if Spago is killed while it's writing the tar, then it might leave it corrupted.
217- -- By checking that it's broken we can try to redownload it here.
218- tarExists <- FS .exists archivePath
219- -- unpack the tars in a temp folder, then move to local cache
220- let tarInnerFolder = PackageName .print name <> " -" <> Version .print v
221- tempDir <- mkTemp
222- FS .mkdirp tempDir
223- tarIsGood <-
224- if tarExists then do
225- logDebug $ " Trying to unpack archive to temp folder: " <> tempDir
226- map (either (const false ) (const true )) $ liftEffect $ Tar .extract { filename: archivePath, cwd: tempDir }
227- else
228- pure false
229- case tarExists, tarIsGood, offline of
230- true , true , _ -> pure unit -- Tar exists and is good, and we already unpacked it. Happy days!
231- _, _, Offline -> die $ " Package " <> packageVersion <> " is not in the local cache, and Spago is running in offline mode - can't make progress."
232- _, _, Online -> do
233- let packageUrl = " https://packages.registry.purescript.org/" <> PackageName .print name <> " /" <> versionString <> " .tar.gz"
234- logInfo $ " Fetching package " <> packageVersion
235- response <- liftAff $ withBackoff' do
236- res <- Http .request
237- ( Http .defaultRequest
238- { method = Left Method.GET
239- , responseFormat = Response .arrayBuffer
240- , url = packageUrl
241- }
242- )
243- -- If we get a 503, we want the backoff to kick in, so we wait here and we'll eventually be retried
244- case res of
245- Right { status } | status == StatusCode 503 -> Aff .delay (Aff.Milliseconds 30_000 .0 )
246- _ -> pure unit
247- pure res
248- case response of
249- Nothing -> die $ " Couldn't reach the registry at " <> packageUrl
250- Just (Left err) -> die $ " Couldn't fetch package " <> packageVersion <> " :\n " <> Http .printError err
251- Just (Right { status, body }) | status /= StatusCode 200 -> do
252- (buf :: Buffer ) <- liftEffect $ Buffer .fromArrayBuffer body
253- bodyString <- liftEffect $ Buffer .toString Encoding.UTF8 buf
254- die $ " Couldn't fetch package " <> packageVersion <> " , status was not ok " <> show status <> " , got answer:\n " <> bodyString
255- Just (Right r@{ body: archiveArrayBuffer }) -> do
256- logDebug $ " Got status: " <> show r.status
257- -- check the size and hash of the tar against the metadata
258- archiveBuffer <- liftEffect $ Buffer .fromArrayBuffer archiveArrayBuffer
259- archiveSize <- liftEffect $ Buffer .size archiveBuffer
260- archiveSha <- liftEffect $ Sha256 .hashBuffer archiveBuffer
261- unless (Int .toNumber archiveSize == versionMetadata.bytes) do
262- die $ " Archive fetched for " <> packageVersion <> " has a different size (" <> show archiveSize <> " ) than expected (" <> show versionMetadata.bytes <> " )"
263- unless (archiveSha == versionMetadata.hash) do
264- die $ " Archive fetched for " <> packageVersion <> " has a different hash (" <> Sha256 .print archiveSha <> " ) than expected (" <> Sha256 .print versionMetadata.hash <> " )"
265- -- if everything's alright we stash the tar in the global cache
266- logDebug $ " Fetched archive for " <> packageVersion <> " , saving it in the global cache: " <> archivePath
267- FS .writeFile archivePath archiveBuffer
268- logDebug $ " Unpacking archive to temp folder: " <> tempDir
269- (liftEffect $ Tar .extract { filename: archivePath, cwd: tempDir }) >>= case _ of
270- Right _ -> pure unit
271- Left err -> die [ " Failed to decode downloaded package " <> packageVersion <> " , error:" , show err ]
272- logDebug $ " Moving extracted file to local cache:" <> localPackageLocation
273- FS .moveSync { src: (Path .concat [ tempDir, tarInnerFolder ]), dst: localPackageLocation }
274- -- Local package, no work to be done
275- LocalPackage _ -> pure unit
276- WorkspacePackage _ -> pure unit
196+ fetchPackagesToLocalCache depsToFetch
277197
278198 -- We return the dependencies, going through the lockfile write if we need to
279199 -- (we return them from inside there because we need to update the commit hashes)
280200 case workspace.packageSet.lockfile of
281201 Right _lockfile -> pure dependencies
282202 Left reason -> writeNewLockfile reason dependencies
283203
204+ fetchPackagesToLocalCache :: ∀ a . Map PackageName Package -> Spago (FetchEnv a ) Unit
205+ fetchPackagesToLocalCache packages = do
206+ { offline } <- ask
207+ parallelise $ packages # Map .toUnfoldable <#> \(Tuple name package) -> do
208+ let localPackageLocation = Config .getPackageLocation name package
209+ -- first of all, we check if we have the package in the local cache. If so, we don't even do the work
210+ unlessM (FS .exists localPackageLocation) case package of
211+ GitPackage gitPackage -> getGitPackageInLocalCache name gitPackage
212+ RegistryVersion v -> do
213+ -- if the version comes from the registry then we have a longer list of things to do
214+ let versionString = Registry.Version .print v
215+ let packageVersion = PackageName .print name <> " @" <> versionString
216+ -- get the metadata for the package, so we have access to the hash and other info
217+ metadata <- Registry .getMetadata name
218+ case (metadata >>= (\(Metadata meta) -> Either .note " Didn't find version in the metadata file" $ Map .lookup v meta.published)) of
219+ Left err -> die $ " Couldn't read metadata, reason:\n " <> err
220+ Right versionMetadata -> do
221+ logDebug $ " Metadata read: " <> printJson Metadata .publishedMetadataCodec versionMetadata
222+ -- then check if we have a tarball cached. If not, download it
223+ let globalCachePackagePath = Path .concat [ Paths .globalCachePath, " packages" , PackageName .print name ]
224+ let archivePath = Path .concat [ globalCachePackagePath, versionString <> " .tar.gz" ]
225+ FS .mkdirp globalCachePackagePath
226+ -- We need to see if the tarball is there, and if we can decompress it.
227+ -- This is because if Spago is killed while it's writing the tar, then it might leave it corrupted.
228+ -- By checking that it's broken we can try to redownload it here.
229+ tarExists <- FS .exists archivePath
230+ -- unpack the tars in a temp folder, then move to local cache
231+ let tarInnerFolder = PackageName .print name <> " -" <> Version .print v
232+ tempDir <- mkTemp
233+ FS .mkdirp tempDir
234+ tarIsGood <-
235+ if tarExists then do
236+ logDebug $ " Trying to unpack archive to temp folder: " <> tempDir
237+ map (either (const false ) (const true )) $ liftEffect $ Tar .extract { filename: archivePath, cwd: tempDir }
238+ else
239+ pure false
240+ case tarExists, tarIsGood, offline of
241+ true , true , _ -> pure unit -- Tar exists and is good, and we already unpacked it. Happy days!
242+ _, _, Offline -> die $ " Package " <> packageVersion <> " is not in the local cache, and Spago is running in offline mode - can't make progress."
243+ _, _, Online -> do
244+ let packageUrl = " https://packages.registry.purescript.org/" <> PackageName .print name <> " /" <> versionString <> " .tar.gz"
245+ logInfo $ " Fetching package " <> packageVersion
246+ response <- liftAff $ withBackoff' do
247+ res <- Http .request
248+ ( Http .defaultRequest
249+ { method = Left Method.GET
250+ , responseFormat = Response .arrayBuffer
251+ , url = packageUrl
252+ }
253+ )
254+ -- If we get a 503, we want the backoff to kick in, so we wait here and we'll eventually be retried
255+ case res of
256+ Right { status } | status == StatusCode 503 -> Aff .delay (Aff.Milliseconds 30_000 .0 )
257+ _ -> pure unit
258+ pure res
259+ case response of
260+ Nothing -> die $ " Couldn't reach the registry at " <> packageUrl
261+ Just (Left err) -> die $ " Couldn't fetch package " <> packageVersion <> " :\n " <> Http .printError err
262+ Just (Right { status, body }) | status /= StatusCode 200 -> do
263+ (buf :: Buffer ) <- liftEffect $ Buffer .fromArrayBuffer body
264+ bodyString <- liftEffect $ Buffer .toString Encoding.UTF8 buf
265+ die $ " Couldn't fetch package " <> packageVersion <> " , status was not ok " <> show status <> " , got answer:\n " <> bodyString
266+ Just (Right r@{ body: archiveArrayBuffer }) -> do
267+ logDebug $ " Got status: " <> show r.status
268+ -- check the size and hash of the tar against the metadata
269+ archiveBuffer <- liftEffect $ Buffer .fromArrayBuffer archiveArrayBuffer
270+ archiveSize <- liftEffect $ Buffer .size archiveBuffer
271+ archiveSha <- liftEffect $ Sha256 .hashBuffer archiveBuffer
272+ unless (Int .toNumber archiveSize == versionMetadata.bytes) do
273+ die $ " Archive fetched for " <> packageVersion <> " has a different size (" <> show archiveSize <> " ) than expected (" <> show versionMetadata.bytes <> " )"
274+ unless (archiveSha == versionMetadata.hash) do
275+ die $ " Archive fetched for " <> packageVersion <> " has a different hash (" <> Sha256 .print archiveSha <> " ) than expected (" <> Sha256 .print versionMetadata.hash <> " )"
276+ -- if everything's alright we stash the tar in the global cache
277+ logDebug $ " Fetched archive for " <> packageVersion <> " , saving it in the global cache: " <> archivePath
278+ FS .writeFile archivePath archiveBuffer
279+ logDebug $ " Unpacking archive to temp folder: " <> tempDir
280+ (liftEffect $ Tar .extract { filename: archivePath, cwd: tempDir }) >>= case _ of
281+ Right _ -> pure unit
282+ Left err -> die [ " Failed to decode downloaded package " <> packageVersion <> " , error:" , show err ]
283+ logDebug $ " Moving extracted file to local cache:" <> localPackageLocation
284+ FS .moveSync { src: (Path .concat [ tempDir, tarInnerFolder ]), dst: localPackageLocation }
285+ -- Local package, no work to be done
286+ LocalPackage _ -> pure unit
287+ WorkspacePackage _ -> pure unit
288+
284289lookupInCache :: ∀ a k v . Ord k => k -> Ref.Ref (Map k v ) -> Spago a (Maybe v )
285290lookupInCache key cacheRef = liftEffect $ Ref .read cacheRef >>= Map .lookup key >>> pure
286291
0 commit comments