Skip to content

Commit e79c98b

Browse files
authored
In spago publish, when registry solver returns package versions not in local cache, install them before trying to build (#1283)
1 parent c4c9641 commit e79c98b

File tree

13 files changed

+262
-102
lines changed

13 files changed

+262
-102
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@ Other improvements:
3636
that command, not root help.
3737
- a new `spago init --subpackage foo` option to initialize a sub-project in the
3838
current workspace.
39+
- #1110: `spago publish` will now install packages returned by the registry solver
40+
before trying to build with them.
3941

4042
## [0.21.0] - 2023-05-04
4143

core/src/Prelude.purs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ import Data.Show.Generic (genericShow) as Extra
3434
import Data.Traversable (for, traverse) as Extra
3535
import Data.TraversableWithIndex (forWithIndex) as Extra
3636
import Data.Tuple (Tuple(..), fst, snd) as Extra
37-
import Data.Tuple.Nested ((/\)) as Extra
37+
import Data.Tuple.Nested (type (/\), (/\)) as Extra
3838
import Effect (Effect) as Extra
3939
import Effect.Aff (Aff, Error) as Extra
4040
import Effect.Aff.Class (class MonadAff, liftAff) as Extra

src/Spago/Command/Fetch.purs

Lines changed: 88 additions & 83 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ module Spago.Command.Fetch
77
, getTransitiveDeps
88
, getTransitiveDepsFromRegistry
99
, getWorkspacePackageDeps
10+
, fetchPackagesToLocalCache
1011
, run
1112
, toAllDependencies
1213
, writeNewLockfile
@@ -83,7 +84,7 @@ run :: forall a. FetchOpts -> Spago (FetchEnv a) PackageTransitiveDeps
8384
run { packages: packagesRequestedToInstall, ensureRanges, isTest, isRepl } = do
8485
logDebug $ "Requested to install these packages: " <> printJson (CJ.array PackageName.codec) packagesRequestedToInstall
8586

86-
{ workspace: currentWorkspace, offline } <- ask
87+
{ workspace: currentWorkspace } <- ask
8788

8889
let
8990
getPackageConfigPath errorMessageEnd = do
@@ -192,95 +193,99 @@ run { packages: packagesRequestedToInstall, ensureRanges, isTest, isRepl } = do
192193

193194
-- then for every package we have we try to download it, and copy it in the local cache
194195
logInfo "Downloading dependencies..."
195-
196-
parallelise $ (flip map) (Map.toUnfoldable depsToFetch :: Array (Tuple PackageName Package)) \(Tuple name package) -> do
197-
let localPackageLocation = Config.getPackageLocation name package
198-
-- first of all, we check if we have the package in the local cache. If so, we don't even do the work
199-
unlessM (FS.exists localPackageLocation) case package of
200-
GitPackage gitPackage -> getGitPackageInLocalCache name gitPackage
201-
RegistryVersion v -> do
202-
-- if the version comes from the registry then we have a longer list of things to do
203-
let versionString = Registry.Version.print v
204-
let packageVersion = PackageName.print name <> "@" <> versionString
205-
-- get the metadata for the package, so we have access to the hash and other info
206-
metadata <- Registry.getMetadata name
207-
case (metadata >>= (\(Metadata meta) -> Either.note "Didn't find version in the metadata file" $ Map.lookup v meta.published)) of
208-
Left err -> die $ "Couldn't read metadata, reason:\n " <> err
209-
Right versionMetadata -> do
210-
logDebug $ "Metadata read: " <> printJson Metadata.publishedMetadataCodec versionMetadata
211-
-- then check if we have a tarball cached. If not, download it
212-
let globalCachePackagePath = Path.concat [ Paths.globalCachePath, "packages", PackageName.print name ]
213-
let archivePath = Path.concat [ globalCachePackagePath, versionString <> ".tar.gz" ]
214-
FS.mkdirp globalCachePackagePath
215-
-- We need to see if the tarball is there, and if we can decompress it.
216-
-- This is because if Spago is killed while it's writing the tar, then it might leave it corrupted.
217-
-- By checking that it's broken we can try to redownload it here.
218-
tarExists <- FS.exists archivePath
219-
-- unpack the tars in a temp folder, then move to local cache
220-
let tarInnerFolder = PackageName.print name <> "-" <> Version.print v
221-
tempDir <- mkTemp
222-
FS.mkdirp tempDir
223-
tarIsGood <-
224-
if tarExists then do
225-
logDebug $ "Trying to unpack archive to temp folder: " <> tempDir
226-
map (either (const false) (const true)) $ liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }
227-
else
228-
pure false
229-
case tarExists, tarIsGood, offline of
230-
true, true, _ -> pure unit -- Tar exists and is good, and we already unpacked it. Happy days!
231-
_, _, Offline -> die $ "Package " <> packageVersion <> " is not in the local cache, and Spago is running in offline mode - can't make progress."
232-
_, _, Online -> do
233-
let packageUrl = "https://packages.registry.purescript.org/" <> PackageName.print name <> "/" <> versionString <> ".tar.gz"
234-
logInfo $ "Fetching package " <> packageVersion
235-
response <- liftAff $ withBackoff' do
236-
res <- Http.request
237-
( Http.defaultRequest
238-
{ method = Left Method.GET
239-
, responseFormat = Response.arrayBuffer
240-
, url = packageUrl
241-
}
242-
)
243-
-- If we get a 503, we want the backoff to kick in, so we wait here and we'll eventually be retried
244-
case res of
245-
Right { status } | status == StatusCode 503 -> Aff.delay (Aff.Milliseconds 30_000.0)
246-
_ -> pure unit
247-
pure res
248-
case response of
249-
Nothing -> die $ "Couldn't reach the registry at " <> packageUrl
250-
Just (Left err) -> die $ "Couldn't fetch package " <> packageVersion <> ":\n " <> Http.printError err
251-
Just (Right { status, body }) | status /= StatusCode 200 -> do
252-
(buf :: Buffer) <- liftEffect $ Buffer.fromArrayBuffer body
253-
bodyString <- liftEffect $ Buffer.toString Encoding.UTF8 buf
254-
die $ "Couldn't fetch package " <> packageVersion <> ", status was not ok " <> show status <> ", got answer:\n " <> bodyString
255-
Just (Right r@{ body: archiveArrayBuffer }) -> do
256-
logDebug $ "Got status: " <> show r.status
257-
-- check the size and hash of the tar against the metadata
258-
archiveBuffer <- liftEffect $ Buffer.fromArrayBuffer archiveArrayBuffer
259-
archiveSize <- liftEffect $ Buffer.size archiveBuffer
260-
archiveSha <- liftEffect $ Sha256.hashBuffer archiveBuffer
261-
unless (Int.toNumber archiveSize == versionMetadata.bytes) do
262-
die $ "Archive fetched for " <> packageVersion <> " has a different size (" <> show archiveSize <> ") than expected (" <> show versionMetadata.bytes <> ")"
263-
unless (archiveSha == versionMetadata.hash) do
264-
die $ "Archive fetched for " <> packageVersion <> " has a different hash (" <> Sha256.print archiveSha <> ") than expected (" <> Sha256.print versionMetadata.hash <> ")"
265-
-- if everything's alright we stash the tar in the global cache
266-
logDebug $ "Fetched archive for " <> packageVersion <> ", saving it in the global cache: " <> archivePath
267-
FS.writeFile archivePath archiveBuffer
268-
logDebug $ "Unpacking archive to temp folder: " <> tempDir
269-
(liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }) >>= case _ of
270-
Right _ -> pure unit
271-
Left err -> die [ "Failed to decode downloaded package " <> packageVersion <> ", error:", show err ]
272-
logDebug $ "Moving extracted file to local cache:" <> localPackageLocation
273-
FS.moveSync { src: (Path.concat [ tempDir, tarInnerFolder ]), dst: localPackageLocation }
274-
-- Local package, no work to be done
275-
LocalPackage _ -> pure unit
276-
WorkspacePackage _ -> pure unit
196+
fetchPackagesToLocalCache depsToFetch
277197

278198
-- We return the dependencies, going through the lockfile write if we need to
279199
-- (we return them from inside there because we need to update the commit hashes)
280200
case workspace.packageSet.lockfile of
281201
Right _lockfile -> pure dependencies
282202
Left reason -> writeNewLockfile reason dependencies
283203

204+
fetchPackagesToLocalCache :: a. Map PackageName Package -> Spago (FetchEnv a) Unit
205+
fetchPackagesToLocalCache packages = do
206+
{ offline } <- ask
207+
parallelise $ packages # Map.toUnfoldable <#> \(Tuple name package) -> do
208+
let localPackageLocation = Config.getPackageLocation name package
209+
-- first of all, we check if we have the package in the local cache. If so, we don't even do the work
210+
unlessM (FS.exists localPackageLocation) case package of
211+
GitPackage gitPackage -> getGitPackageInLocalCache name gitPackage
212+
RegistryVersion v -> do
213+
-- if the version comes from the registry then we have a longer list of things to do
214+
let versionString = Registry.Version.print v
215+
let packageVersion = PackageName.print name <> "@" <> versionString
216+
-- get the metadata for the package, so we have access to the hash and other info
217+
metadata <- Registry.getMetadata name
218+
case (metadata >>= (\(Metadata meta) -> Either.note "Didn't find version in the metadata file" $ Map.lookup v meta.published)) of
219+
Left err -> die $ "Couldn't read metadata, reason:\n " <> err
220+
Right versionMetadata -> do
221+
logDebug $ "Metadata read: " <> printJson Metadata.publishedMetadataCodec versionMetadata
222+
-- then check if we have a tarball cached. If not, download it
223+
let globalCachePackagePath = Path.concat [ Paths.globalCachePath, "packages", PackageName.print name ]
224+
let archivePath = Path.concat [ globalCachePackagePath, versionString <> ".tar.gz" ]
225+
FS.mkdirp globalCachePackagePath
226+
-- We need to see if the tarball is there, and if we can decompress it.
227+
-- This is because if Spago is killed while it's writing the tar, then it might leave it corrupted.
228+
-- By checking that it's broken we can try to redownload it here.
229+
tarExists <- FS.exists archivePath
230+
-- unpack the tars in a temp folder, then move to local cache
231+
let tarInnerFolder = PackageName.print name <> "-" <> Version.print v
232+
tempDir <- mkTemp
233+
FS.mkdirp tempDir
234+
tarIsGood <-
235+
if tarExists then do
236+
logDebug $ "Trying to unpack archive to temp folder: " <> tempDir
237+
map (either (const false) (const true)) $ liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }
238+
else
239+
pure false
240+
case tarExists, tarIsGood, offline of
241+
true, true, _ -> pure unit -- Tar exists and is good, and we already unpacked it. Happy days!
242+
_, _, Offline -> die $ "Package " <> packageVersion <> " is not in the local cache, and Spago is running in offline mode - can't make progress."
243+
_, _, Online -> do
244+
let packageUrl = "https://packages.registry.purescript.org/" <> PackageName.print name <> "/" <> versionString <> ".tar.gz"
245+
logInfo $ "Fetching package " <> packageVersion
246+
response <- liftAff $ withBackoff' do
247+
res <- Http.request
248+
( Http.defaultRequest
249+
{ method = Left Method.GET
250+
, responseFormat = Response.arrayBuffer
251+
, url = packageUrl
252+
}
253+
)
254+
-- If we get a 503, we want the backoff to kick in, so we wait here and we'll eventually be retried
255+
case res of
256+
Right { status } | status == StatusCode 503 -> Aff.delay (Aff.Milliseconds 30_000.0)
257+
_ -> pure unit
258+
pure res
259+
case response of
260+
Nothing -> die $ "Couldn't reach the registry at " <> packageUrl
261+
Just (Left err) -> die $ "Couldn't fetch package " <> packageVersion <> ":\n " <> Http.printError err
262+
Just (Right { status, body }) | status /= StatusCode 200 -> do
263+
(buf :: Buffer) <- liftEffect $ Buffer.fromArrayBuffer body
264+
bodyString <- liftEffect $ Buffer.toString Encoding.UTF8 buf
265+
die $ "Couldn't fetch package " <> packageVersion <> ", status was not ok " <> show status <> ", got answer:\n " <> bodyString
266+
Just (Right r@{ body: archiveArrayBuffer }) -> do
267+
logDebug $ "Got status: " <> show r.status
268+
-- check the size and hash of the tar against the metadata
269+
archiveBuffer <- liftEffect $ Buffer.fromArrayBuffer archiveArrayBuffer
270+
archiveSize <- liftEffect $ Buffer.size archiveBuffer
271+
archiveSha <- liftEffect $ Sha256.hashBuffer archiveBuffer
272+
unless (Int.toNumber archiveSize == versionMetadata.bytes) do
273+
die $ "Archive fetched for " <> packageVersion <> " has a different size (" <> show archiveSize <> ") than expected (" <> show versionMetadata.bytes <> ")"
274+
unless (archiveSha == versionMetadata.hash) do
275+
die $ "Archive fetched for " <> packageVersion <> " has a different hash (" <> Sha256.print archiveSha <> ") than expected (" <> Sha256.print versionMetadata.hash <> ")"
276+
-- if everything's alright we stash the tar in the global cache
277+
logDebug $ "Fetched archive for " <> packageVersion <> ", saving it in the global cache: " <> archivePath
278+
FS.writeFile archivePath archiveBuffer
279+
logDebug $ "Unpacking archive to temp folder: " <> tempDir
280+
(liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }) >>= case _ of
281+
Right _ -> pure unit
282+
Left err -> die [ "Failed to decode downloaded package " <> packageVersion <> ", error:", show err ]
283+
logDebug $ "Moving extracted file to local cache:" <> localPackageLocation
284+
FS.moveSync { src: (Path.concat [ tempDir, tarInnerFolder ]), dst: localPackageLocation }
285+
-- Local package, no work to be done
286+
LocalPackage _ -> pure unit
287+
WorkspacePackage _ -> pure unit
288+
284289
lookupInCache :: a k v. Ord k => k -> Ref.Ref (Map k v) -> Spago a (Maybe v)
285290
lookupInCache key cacheRef = liftEffect $ Ref.read cacheRef >>= Map.lookup key >>> pure
286291

src/Spago/Command/Ls.purs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ import Data.Codec.JSON.Record as CJ.Record
1818
import Data.Foldable (elem)
1919
import Data.Map (filterKeys)
2020
import Data.Map as Map
21-
import Data.Tuple.Nested (type (/\))
2221
import Record as Record
2322
import Registry.Internal.Codec (packageMap)
2423
import Registry.PackageName as PackageName

src/Spago/Command/Publish.purs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -354,6 +354,7 @@ publish _args = do
354354
-- from the solver (this is because the build might terminate the process, and we shall output the errors first)
355355
logInfo "Building again with the build plan from the solver..."
356356
let buildPlanDependencies = map Config.RegistryVersion resolutions
357+
Fetch.fetchPackagesToLocalCache buildPlanDependencies
357358
builtAgain <- runBuild { selected, dependencies: Map.singleton selected.package.name { core: buildPlanDependencies, test: Map.empty } }
358359
( Build.run
359360
{ depsOnly: false
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
.spago/
2+
output/
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
Reading Spago workspace configuration...
2+
3+
✓ Selecting package to build: aaa
4+
5+
Downloading dependencies...
6+
Building...
7+
Src Lib All
8+
Warnings 0 0 0
9+
Errors 0 0 0
10+
11+
✓ Build succeeded.
12+
13+
Passed preliminary checks.
14+
‼ Spago is in offline mode - not pushing the git tag v0.0.1
15+
Building again with the build plan from the solver...
16+
Building...
17+
[1 of 3] Compiling Effect.Console
18+
[3 of 3] Compiling Lib
19+
[2 of 3] Compiling Effect.Class.Console
20+
Src Lib All
21+
Warnings 0 0 0
22+
Errors 0 0 0
23+
24+
✓ Build succeeded.
25+
26+
27+
✓ Ready for publishing. Calling the registry..
28+
29+
30+
✘ Spago is offline - not able to call the Registry.
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
Reading Spago workspace configuration...
2+
3+
✓ Selecting package to build: aaa
4+
5+
Downloading dependencies...
6+
Building...
7+
[1 of 3] Compiling Effect.Console
8+
[3 of 3] Compiling Lib
9+
[2 of 3] Compiling Effect.Class.Console
10+
Src Lib All
11+
Warnings 0 0 0
12+
Errors 0 0 0
13+
14+
✓ Build succeeded.
15+
16+
Passed preliminary checks.
17+
‼ Spago is in offline mode - not pushing the git tag v0.0.1
18+
Building again with the build plan from the solver...
19+
Building...
20+
[1 of 3] Compiling Effect.Console
21+
[ERROR 1/1 MissingFFIModule] .spago/p/console-6.1.0/src/Effect/Console.purs:1:1
22+
23+
v
24+
1 module Effect.Console where
25+
2
26+
3 import Control.Bind (discard, bind, pure)
27+
...
28+
86 result <- inner
29+
87 groupEnd
30+
88 pure result
31+
^
32+
33+
The foreign module implementation for module Effect.Console is missing.
34+
35+
Src Lib All
36+
Warnings 0 0 0
37+
Errors 0 1 1
38+
39+
✘ Failed to build.
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
package:
2+
name: aaa
3+
dependencies:
4+
- console: ">=6.0.0 <7.0.0"
5+
- effect: ">=4.0.0 <5.0.0"
6+
- prelude: ">=6.0.1 <7.0.0"
7+
- maybe: ">=6.0.0 <7.0.0"
8+
publish:
9+
version: 0.0.1
10+
license: MIT
11+
location:
12+
githubOwner: purescript
13+
githubRepo: aaa
14+
workspace:
15+
packageSet:
16+
registry: 58.0.0
17+
extraPackages:
18+
console: "6.0.0"
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
module Lib where
2+
3+
import Prelude
4+
5+
import Data.Maybe (Maybe(..), isNothing)
6+
import Effect (Effect)
7+
import Effect.Console (logShow)
8+
9+
printNothing :: Effect Unit
10+
printNothing = logShow $ isNothing Nothing

0 commit comments

Comments
 (0)