diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 0000000000..37459703e9
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,2 @@
+# ran runic on the code base
+a84228360d6cff568a55911733e830cdf1c492da
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000000..d7a3ed5357
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,11 @@
+# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/" # Location of package manifests
+ schedule:
+ interval: "weekly"
+ groups:
+ all-actions:
+ patterns:
+ - "*"
diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml
new file mode 100644
index 0000000000..8376d060fb
--- /dev/null
+++ b/.github/workflows/check.yml
@@ -0,0 +1,30 @@
+name: Code checks
+
+on:
+ pull_request:
+ push:
+ branches: ["master"]
+
+jobs:
+
+ pre-commit:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1
+ env:
+ # Skip runic-pre-commit since we use runic-action below instead
+ SKIP: runic
+
+ runic:
+ name: "Runic"
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: julia-actions/setup-julia@v2
+ with:
+ version: '1.11'
+ - uses: julia-actions/cache@v2
+ - uses: fredrikekre/runic-action@v1
+ with:
+ version: "1.4" # Keep version in sync with .pre-commit-config.yaml
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 11f1643502..332c33f7a5 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -4,10 +4,12 @@ on:
branches:
- 'master'
- 'release-*'
+ - 'backports-release-*'
push:
branches:
- 'master'
- 'release-*'
+ - 'backports-release-*'
tags: '*'
defaults:
run:
@@ -71,6 +73,7 @@ jobs:
- uses: julia-actions/julia-runtest@v1
with:
coverage: true
+ depwarn: error
env:
JULIA_PKG_SERVER: ${{ matrix.pkg-server }}
JULIA_TEST_VERBOSE_LOGS_DIR: ${{ github.workspace }}
@@ -82,9 +85,10 @@ jobs:
- uses: julia-actions/julia-processcoverage@v1
env:
JULIA_PKG_SERVER: ${{ matrix.pkg-server }}
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v5
with:
- file: lcov.info
+ files: lcov.info
+ token: ${{ secrets.CODECOV_TOKEN }}
docs:
runs-on: ubuntu-latest
timeout-minutes: 60
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000000..68066c2cc2
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,17 @@
+repos:
+ - repo: 'https://github.com/pre-commit/pre-commit-hooks'
+ rev: v5.0.0
+ hooks:
+ - id: check-added-large-files
+ - id: check-case-conflict
+ # - id: check-toml # we have tomls with invalid syntax for tests
+ - id: check-yaml
+ - id: end-of-file-fixer
+ - id: mixed-line-ending
+ - id: trailing-whitespace
+ - repo: 'https://github.com/fredrikekre/runic-pre-commit'
+ rev: v2.0.1
+ hooks:
+ - id: runic
+ additional_dependencies:
+ - 'Runic@1.4' # Keep version in sync with .github/workflows/Check.yml
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 056a6f1f36..6b9c934159 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,9 +1,69 @@
+Pkg v1.13 Release Notes
+=======================
+
+- Project.toml environments now support a `readonly` field to mark environments as read-only, preventing modifications.
+ ([#4284])
+- `Pkg.build` now supports an `allow_reresolve` keyword argument to control whether the build process can re-resolve
+ package versions, similar to the existing option for `Pkg.test`. ([#3329])
+- Packages are now automatically added to `[sources]` when they are added by url or devved. ([#4225])
+- `update` now shows a helpful tip when trying to upgrade a specific package that can be upgraded but is held back
+ because it's part of a less optimal resolver solution ([#4266])
+- `Pkg.status` now displays yanked packages with a `[yanked]` indicator and shows a warning when yanked packages are
+ present. `Pkg.resolve` errors also display warnings about yanked packages that are not resolvable. ([#4310])
+- Added `pkg> compat --current` command to automatically populate missing compat entries with the currently resolved
+ package versions. Use `pkg> compat --current` for all packages or `pkg> compat Foo --current` for specific packages.
+ ([#3266])
+- Added `Pkg.precompile() do` block syntax to delay autoprecompilation until after multiple operations complete,
+ improving efficiency when performing several environment changes. ([#4262])
+- Added `Pkg.autoprecompilation_enabled(state::Bool)` to globally enable or disable automatic precompilation for Pkg
+ operations. ([#4262])
+- Implemented atomic TOML writes to prevent data corruption when Pkg operations are interrupted or multiple processes
+ write simultaneously. All TOML files are now written atomically using temporary files and atomic moves. ([#4293])
+- Implemented lazy loading for RegistryInstance to significantly improve startup performance for operations that don't
+ require full registry data. This reduces `Pkg.instantiate()` time by approximately 60% in many cases. ([#4304])
+- Added support for directly adding git submodules via `Pkg.add(path="/path/to/git-submodule.jl")`. ([#3344])
+- Enhanced REPL user experience by automatically detecting and stripping accidental leading `]` characters in commands.
+ ([#3122])
+- Improved tip messages to show REPL mode syntax when operating in REPL mode. ([#3854])
+- Enhanced error handling with more descriptive error messages when operations fail on empty URLs during git repository
+ installation or registry discovery. ([#4282])
+- Improved error messages for invalid compat entries to provide better guidance for fixing them. ([#4302])
+- Added warnings when attempting to add local paths that contain dirty git repositories. ([#4309])
+- Enhanced package parsing to better handle complex URLs and paths with branch/tag/subdir specifiers. ([#4299])
+- Improved artifact download behavior to only attempt downloads from the Pkg server when the package is registered on
+ that server's registries. ([#4297])
+- Added comprehensive documentation page about depots, including depot layouts and configuration. ([#2245])
+- Enhanced error handling for packages missing from registries or manifests with more informative messages. ([#4303])
+- Added more robust error handling when packages have revisions but no source information. ([#4311])
+- Enhanced registry status reporting with more detailed information. ([#4300])
+- Fixed various edge cases in package resolution and manifest handling. ([#4307], [#4308], [#4312])
+- Improved handling of path separators across different operating systems. ([#4305])
+- Added better error messages when accessing private PackageSpec.repo field. ([#4170])
+
Pkg v1.12 Release Notes
=======================
- Pkg now has support for "workspaces" which is a way to resolve multiple project files into a single manifest.
- The functions `Pkg.status`, `Pkg.why`, `Pkg.instantiate`, `Pkg.precompile` (and their REPL variants) have been updated
- to take a `workspace` option. Read more about this feature in the manual about the TOML-files.
+ The functions `Pkg.status`, `Pkg.why`, `Pkg.instantiate`, `Pkg.precompile` (and their REPL variants) have been
+ updated to take a `workspace` option, with fixes for workspace path collection and package resolution in workspace
+ environments. Read more about this feature in the manual about the TOML-files. ([#3841], [#4229])
+- Pkg now supports "apps" which are Julia packages that can be run directly from the terminal after installation.
+ Apps can be defined in a package's Project.toml and installed via Pkg. Apps now support multiple apps per package
+ via submodules, allowing packages to define multiple command-line applications, with enhanced functionality including
+ update capabilities and better handling of already installed apps. ([#3772], [#4277], [#4263])
+- `status` now shows when different versions/sources of dependencies are loaded than that which is expected by the
+ manifest ([#4109])
+- When adding or developing a package that exists in the `[weakdeps]` section, it is now automatically removed from
+ weak dependencies and added as a regular dependency. ([#3865])
+- Enhanced fuzzy matching algorithm for package name suggestions with improved multi-factor scoring for better package
+ name suggestions. ([#4287])
+- The Pkg REPL now supports GitHub pull request URLs, allowing direct package installation from PRs via
+ `pkg> add https://github.com/Org/Package.jl/pull/123` ([#4295])
+- Improved git repository cloning performance by changing from `refs/*` to `refs/heads/*` to speed up operations on
+ repositories with many branches. ([#2330])
+- Improved REPL command parsing to handle leading whitespace with comma-separated packages. ([#4274])
+- Improved error messages when providing incorrect package UUIDs. ([#4270])
+- Added confirmation prompts before removing compat entries to prevent accidental deletions. ([#4254])
Pkg v1.11 Release Notes
=======================
@@ -21,7 +81,7 @@ Pkg v1.10 Release Notes
=======================
Pkg v1.9 Release Notes
-=======================
+======================
- New functionality: `Pkg.why` and `pkg> why` to show why a package is inside the environment (shows all "paths" to a package starting at the direct dependencies).
- When code coverage tracking is enabled for `Pkg.test` the new path-specific code-coverage option is used to limit coverage
@@ -83,6 +143,16 @@ Pkg v1.7 Release Notes
- The `mode` keyword for `PackageSpec` has been removed ([#2454]).
+[#4225]: https://github.com/JuliaLang/Pkg.jl/issues/4225
+[#4284]: https://github.com/JuliaLang/Pkg.jl/issues/4284
+[#3526]: https://github.com/JuliaLang/Pkg.jl/issues/3526
+[#3708]: https://github.com/JuliaLang/Pkg.jl/issues/3708
+[#3732]: https://github.com/JuliaLang/Pkg.jl/issues/3732
+[#3772]: https://github.com/JuliaLang/Pkg.jl/issues/3772
+[#3783]: https://github.com/JuliaLang/Pkg.jl/issues/3783
+[#3841]: https://github.com/JuliaLang/Pkg.jl/issues/3841
+[#3865]: https://github.com/JuliaLang/Pkg.jl/issues/3865
+[#4109]: https://github.com/JuliaLang/Pkg.jl/issues/4109
[#2284]: https://github.com/JuliaLang/Pkg.jl/issues/2284
[#2431]: https://github.com/JuliaLang/Pkg.jl/issues/2431
[#2432]: https://github.com/JuliaLang/Pkg.jl/issues/2432
@@ -101,3 +171,36 @@ Pkg v1.7 Release Notes
[#2995]: https://github.com/JuliaLang/Pkg.jl/issues/2995
[#3002]: https://github.com/JuliaLang/Pkg.jl/issues/3002
[#3021]: https://github.com/JuliaLang/Pkg.jl/issues/3021
+[#3266]: https://github.com/JuliaLang/Pkg.jl/pull/3266
+[#4266]: https://github.com/JuliaLang/Pkg.jl/pull/4266
+[#4310]: https://github.com/JuliaLang/Pkg.jl/pull/4310
+[#3329]: https://github.com/JuliaLang/Pkg.jl/pull/3329
+[#4262]: https://github.com/JuliaLang/Pkg.jl/pull/4262
+[#4293]: https://github.com/JuliaLang/Pkg.jl/pull/4293
+[#4304]: https://github.com/JuliaLang/Pkg.jl/pull/4304
+[#3344]: https://github.com/JuliaLang/Pkg.jl/pull/3344
+[#2330]: https://github.com/JuliaLang/Pkg.jl/pull/2330
+[#3122]: https://github.com/JuliaLang/Pkg.jl/pull/3122
+[#3854]: https://github.com/JuliaLang/Pkg.jl/pull/3854
+[#4282]: https://github.com/JuliaLang/Pkg.jl/pull/4282
+[#4302]: https://github.com/JuliaLang/Pkg.jl/pull/4302
+[#4309]: https://github.com/JuliaLang/Pkg.jl/pull/4309
+[#4299]: https://github.com/JuliaLang/Pkg.jl/pull/4299
+[#4295]: https://github.com/JuliaLang/Pkg.jl/pull/4295
+[#4277]: https://github.com/JuliaLang/Pkg.jl/pull/4277
+[#4297]: https://github.com/JuliaLang/Pkg.jl/pull/4297
+[#2245]: https://github.com/JuliaLang/Pkg.jl/pull/2245
+[#4303]: https://github.com/JuliaLang/Pkg.jl/pull/4303
+[#4254]: https://github.com/JuliaLang/Pkg.jl/pull/4254
+[#4270]: https://github.com/JuliaLang/Pkg.jl/pull/4270
+[#4263]: https://github.com/JuliaLang/Pkg.jl/pull/4263
+[#4229]: https://github.com/JuliaLang/Pkg.jl/pull/4229
+[#4274]: https://github.com/JuliaLang/Pkg.jl/pull/4274
+[#4311]: https://github.com/JuliaLang/Pkg.jl/pull/4311
+[#4300]: https://github.com/JuliaLang/Pkg.jl/pull/4300
+[#4307]: https://github.com/JuliaLang/Pkg.jl/pull/4307
+[#4308]: https://github.com/JuliaLang/Pkg.jl/pull/4308
+[#4312]: https://github.com/JuliaLang/Pkg.jl/pull/4312
+[#4305]: https://github.com/JuliaLang/Pkg.jl/pull/4305
+[#4170]: https://github.com/JuliaLang/Pkg.jl/pull/4170
+[#4287]: https://github.com/JuliaLang/Pkg.jl/pull/4287
diff --git a/Project.toml b/Project.toml
index 4ddbbefd00..cdc10d1205 100644
--- a/Project.toml
+++ b/Project.toml
@@ -3,7 +3,7 @@ uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
keywords = ["package management"]
license = "MIT"
desc = "The next-generation Julia package manager."
-version = "1.12.0"
+version = "1.13.0"
[workspace]
projects = ["test", "docs"]
@@ -32,4 +32,20 @@ REPL = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
REPLExt = "REPL"
[compat]
+Artifacts = "1.11"
+Dates = "1.11"
+Downloads = "1.6"
+FileWatching = "1.11"
+LibGit2 = "1.11"
+Libdl = "1.11"
+Logging = "1.11"
+Markdown = "1.11"
+Printf = "1.11"
+Random = "1.11"
+REPL = "1.11"
+SHA = "0.7"
+TOML = "1"
+Tar = "1.10"
+UUIDs = "1.11"
julia = "1.12"
+p7zip_jll = "17.5"
diff --git a/README.md b/README.md
index 5cc370c4c9..ad4ec9f25c 100644
--- a/README.md
+++ b/README.md
@@ -13,12 +13,29 @@ If you want to develop this package do the following steps:
- Make a fork and then clone the repo locally on your computer
- Change the current directory to the Pkg repo you just cloned and start julia with `julia --project`.
- `import Pkg` will now load the files in the cloned repo instead of the Pkg stdlib.
-- To test your changes, simply do `include("test/runtests.jl")`.
+- To test your changes, simply do `Pkg.test()`.
If you need to build Julia from source with a Git checkout of Pkg, then instead use `make DEPS_GIT=Pkg` when building Julia. The `Pkg` repo is in `stdlib/Pkg`, and created initially with a detached `HEAD`. If you're doing this from a pre-existing Julia repository, you may need to `make clean` beforehand.
If you need to build Julia from source with Git checkouts of two or more stdlibs, please see the instructions in the [`Building Julia from source with a Git checkout of a stdlib`](https://github.com/JuliaLang/julia/blob/master/doc/src/devdocs/build/build.md#building-julia-from-source-with-a-git-checkout-of-a-stdlib) section of the [`doc/src/devdocs/build/build.md`](https://github.com/JuliaLang/julia/blob/master/doc/src/devdocs/build/build.md) file within the Julia devdocs.
+## Pre-commit hooks
+
+This repository uses pre-commit hooks to automatically check and format code before commits. The hooks perform various checks including:
+
+- File size and case conflict validation
+- YAML syntax checking
+- Trailing whitespace removal and line ending fixes
+- Julia code formatting with Runic
+
+To install and use the pre-commit hooks:
+
+1. Install pre-commit: `pip install pre-commit` (or use your system's package manager)
+2. Install the hooks: `pre-commit install` from the root of the repository
+3. Run on all files: `pre-commit run --all-files` from the root of the repository
+
+Once installed, the hooks will run automatically on each commit. You can also run them manually anytime with `pre-commit run`.
+
## Synchronization with the Julia repo
To check which commit julia master uses see [JuliaLang/julia/stdlib/Pkg.version](https://github.com/JuliaLang/julia/blob/master/stdlib/Pkg.version).
diff --git a/contrib/list_missing_pkg_tags.jl b/contrib/list_missing_pkg_tags.jl
new file mode 100644
index 0000000000..93309889aa
--- /dev/null
+++ b/contrib/list_missing_pkg_tags.jl
@@ -0,0 +1,89 @@
+using LibGit2
+
+const JULIA_REPO_URL = "https://github.com/JuliaLang/julia.git"
+const JULIA_REPO_DIR = "julia"
+const PKG_VERSION_PATH = "stdlib/Pkg.version"
+const PKG_REPO_URL = "https://github.com/JuliaLang/Pkg.jl.git"
+const PKG_REPO_DIR = "Pkg.jl"
+
+function checkout_or_update_repo(url, dir)
+ return if isdir(dir)
+ println("Updating existing repository: $dir")
+ repo = LibGit2.GitRepo(dir)
+ LibGit2.fetch(repo)
+ else
+ println("Cloning repository: $url")
+ LibGit2.clone(url, dir)
+ end
+end
+
+function get_tags(repo)
+ refs = LibGit2.ref_list(repo)
+ tags = filter(ref -> startswith(ref, "refs/tags/"), refs)
+ return sort!(replace.(tags, "refs/tags/" => ""))
+end
+
+function is_stable_v1_release(tag)
+ return occursin(r"^v\d+\.\d+\.\d+$", tag) && VersionNumber(tag) >= v"1.0.0"
+end
+
+function extract_pkg_sha1(text::AbstractString)
+ m = match(r"PKG_SHA1\s*=\s*([a-f0-9]{40})", text)
+ return m !== nothing ? m[1] : nothing
+end
+
+function get_commit_hash_for_pkg_version(repo, tag)
+ return try
+ tag_ref = LibGit2.GitReference(repo, "refs/tags/" * tag)
+ LibGit2.checkout!(repo, string(LibGit2.GitHash(LibGit2.peel(tag_ref))))
+ version_file = joinpath(JULIA_REPO_DIR, PKG_VERSION_PATH)
+ if isfile(version_file)
+ return extract_pkg_sha1(readchomp(version_file))
+ else
+ println("Warning: Pkg.version file missing for tag $tag")
+ return nothing
+ end
+ catch
+ println("Error processing tag $tag")
+ rethrow()
+ end
+end
+
+tempdir = mktempdir()
+cd(tempdir) do
+ # Update Julia repo
+ checkout_or_update_repo(JULIA_REPO_URL, JULIA_REPO_DIR)
+ julia_repo = LibGit2.GitRepo(JULIA_REPO_DIR)
+
+ # Get Julia tags, filtering only stable releases
+ julia_tags = filter(is_stable_v1_release, get_tags(julia_repo))
+ version_commit_map = Dict{String, String}()
+
+ for tag in julia_tags
+ println("Processing Julia tag: $tag")
+ commit_hash = get_commit_hash_for_pkg_version(julia_repo, tag)
+ if commit_hash !== nothing
+ version_commit_map[tag] = commit_hash
+ end
+ end
+
+ # Update Pkg.jl repo
+ checkout_or_update_repo(PKG_REPO_URL, PKG_REPO_DIR)
+ pkg_repo = LibGit2.GitRepo(PKG_REPO_DIR)
+
+ # Get existing tags in Pkg.jl
+ pkg_tags = Set(get_tags(pkg_repo))
+
+ # Filter out versions that already exist
+ missing_versions = filter(v -> v ∉ pkg_tags, collect(keys(version_commit_map)))
+
+ # Sort versions numerically
+ sort!(missing_versions, by = VersionNumber)
+
+ # Generate `git tag` commands
+ println("\nGit tag commands for missing Pkg.jl versions:")
+ for version in missing_versions
+ commit = version_commit_map[version]
+ println("git tag $version $commit")
+ end
+end
diff --git a/docs/NEWS-update.jl b/docs/NEWS-update.jl
index 3812e9e437..d0ca10d391 100644
--- a/docs/NEWS-update.jl
+++ b/docs/NEWS-update.jl
@@ -7,11 +7,11 @@ s = read(NEWS, String)
m = match(r"\[#[0-9]+\]:", s)
if m !== nothing
- s = s[1:m.offset-1]
+ s = s[1:(m.offset - 1)]
end
footnote(n) = "[#$n]: https://github.com/JuliaLang/Pkg.jl/issues/$n"
-N = map(m -> parse(Int,m.captures[1]), eachmatch(r"\[#([0-9]+)\]", s))
+N = map(m -> parse(Int, m.captures[1]), eachmatch(r"\[#([0-9]+)\]", s))
foots = join(map(footnote, sort!(unique(N))), "\n")
open(NEWS, "w") do f
diff --git a/docs/generate.jl b/docs/generate.jl
index fa4af617ef..3d227f1374 100644
--- a/docs/generate.jl
+++ b/docs/generate.jl
@@ -4,38 +4,42 @@
function generate(io, command)
cmd_nospace = replace(command, " " => "-")
- println(io, """
- ```@raw html
-
-
-
- ```
- ```@eval
- using Pkg
- Dict(Pkg.REPLMode.canonical_names())["$(command)"].help
- ```
- ```@raw html
-
-
- ```
- """)
+ return println(
+ io, """
+ ```@raw html
+
+
+
+ ```
+ ```@eval
+ using Pkg
+ Dict(Pkg.REPLMode.canonical_names())["$(command)"].help
+ ```
+ ```@raw html
+
+
+ ```
+ """
+ )
end
function generate()
io = IOBuffer()
- println(io, """
+ println(
+ io, """
# [**11.** REPL Mode Reference](@id REPL-Mode-Reference)
This section describes available commands in the Pkg REPL.
The Pkg REPL mode is mostly meant for interactive use,
and for non-interactive use it is recommended to use the
functional API, see [API Reference](@ref API-Reference).
- """)
+ """
+ )
# list commands
println(io, "## `package` commands")
foreach(command -> generate(io, command), ["add", "build", "compat", "develop", "free", "generate", "pin", "remove", "test", "update"])
diff --git a/docs/make.jl b/docs/make.jl
index be6905de5a..6b38dad0d7 100644
--- a/docs/make.jl
+++ b/docs/make.jl
@@ -9,7 +9,7 @@ const formats = Any[
Documenter.HTML(
prettyurls = get(ENV, "CI", nothing) == "true",
canonical = "https://julialang.github.io/Pkg.jl/v1/",
- assets = ["assets/custom.css"],
+ assets = ["assets/custom.css", "assets/favicon.ico"],
),
]
if "pdf" in ARGS
@@ -17,7 +17,7 @@ if "pdf" in ARGS
end
# setup for doctesting
-DocMeta.setdocmeta!(Pkg.BinaryPlatforms, :DocTestSetup, :(using Base.BinaryPlatforms); recursive=true)
+DocMeta.setdocmeta!(Pkg.BinaryPlatforms, :DocTestSetup, :(using Base.BinaryPlatforms); recursive = true)
# Run doctests first and disable them in makedocs
Documenter.doctest(joinpath(@__DIR__, "src"), [Pkg])
@@ -35,6 +35,7 @@ makedocs(
"managing-packages.md",
"environments.md",
"creating-packages.md",
+ "apps.md",
"compatibility.md",
"registries.md",
"artifacts.md",
@@ -42,6 +43,8 @@ makedocs(
"toml-files.md",
"repl.md",
"api.md",
+ "protocol.md",
+ "depots.md",
],
)
diff --git a/docs/src/api.md b/docs/src/api.md
index 61979453b9..d87169077f 100644
--- a/docs/src/api.md
+++ b/docs/src/api.md
@@ -1,4 +1,4 @@
-# [**12.** API Reference](@id API-Reference)
+# [**13.** API Reference](@id API-Reference)
This section describes the functional API for interacting with Pkg.jl.
It is recommended to use the functional API, rather than the Pkg REPL mode,
@@ -39,6 +39,7 @@ Pkg.gc
Pkg.status
Pkg.compat
Pkg.precompile
+Pkg.autoprecompilation_enabled
Pkg.offline
Pkg.why
Pkg.dependencies
@@ -79,3 +80,10 @@ Pkg.Artifacts.ensure_artifact_installed
Pkg.Artifacts.ensure_all_artifacts_installed
Pkg.Artifacts.archive_artifact
```
+
+## [Package Server Authentication Hooks](@id Package-Server-Authentication-Hooks)
+
+```@docs
+Pkg.PlatformEngines.register_auth_error_handler
+Pkg.PlatformEngines.deregister_auth_error_handler
+```
diff --git a/docs/src/apps.md b/docs/src/apps.md
new file mode 100644
index 0000000000..00b12cada9
--- /dev/null
+++ b/docs/src/apps.md
@@ -0,0 +1,100 @@
+# [**6.** Apps](@id Apps)
+
+!!! note
+ The app support in Pkg is currently considered experimental and some functionality and API may change.
+
+ Some inconveniences that can be encountered are:
+ - You need to manually make `~/.julia/bin` available on the PATH environment.
+ - The path to the julia executable used is the same as the one used to install the app. If this
+ julia installation gets removed, you might need to reinstall the app.
+
+Apps are Julia packages that are intended to be run as "standalone programs" (by e.g. typing the name of the app in the terminal possibly together with some arguments or flags/options).
+This is in contrast to most Julia packages that are used as "libraries" and are loaded by other files or in the Julia REPL.
+
+## Creating a Julia app
+
+A Julia app is structured similar to a standard Julia library with the following additions:
+
+- A `@main` entry point in the package module (see the [Julia help on `@main`](https://docs.julialang.org/en/v1/manual/command-line-interface/#The-Main.main-entry-point) for details)
+- An `[apps]` section in the `Project.toml` file listing the executable names that the package provides.
+
+A very simple example of an app that prints the reversed input arguments would be:
+
+```julia
+# src/MyReverseApp.jl
+module MyReverseApp
+
+function (@main)(ARGS)
+ for arg in ARGS
+ print(stdout, reverse(arg), " ")
+ end
+ return
+end
+
+end # module
+```
+
+```toml
+# Project.toml
+
+# standard fields here
+
+[apps]
+reverse = {}
+```
+The empty table `{}` is to allow for giving metadata about the app but it is currently unused.
+
+After installing this app one could run:
+
+```
+$ reverse some input string
+ emos tupni gnirts
+```
+
+directly in the terminal.
+
+## Multiple Apps per Package
+
+A single package can define multiple apps by using submodules. Each app can have its own entry point in a different submodule of the package.
+
+```julia
+# src/MyMultiApp.jl
+module MyMultiApp
+
+function (@main)(ARGS)
+ println("Main app: ", join(ARGS, " "))
+end
+
+include("CLI.jl")
+
+end # module
+```
+
+```julia
+# src/CLI.jl
+module CLI
+
+function (@main)(ARGS)
+ println("CLI submodule: ", join(ARGS, " "))
+end
+
+end # module CLI
+```
+
+```toml
+# Project.toml
+
+# standard fields here
+
+[apps]
+main-app = {}
+cli-app = { submodule = "CLI" }
+```
+
+This will create two executables:
+- `main-app` that runs `julia -m MyMultiApp`
+- `cli-app` that runs `julia -m MyMultiApp.CLI`
+
+## Installing Julia apps
+
+The installation of Julia apps is similar to [installing Julia libraries](@ref Managing-Packages) but instead of using e.g. `Pkg.add` or `pkg> add` one uses `Pkg.Apps.add` or `pkg> app add` (`develop` is also available).
diff --git a/docs/src/artifacts.md b/docs/src/artifacts.md
index 66a55f99f5..d5fe5f38b7 100644
--- a/docs/src/artifacts.md
+++ b/docs/src/artifacts.md
@@ -1,4 +1,4 @@
-# [**8.** Artifacts](@id Artifacts)
+# [**9.** Artifacts](@id Artifacts)
`Pkg` can install and manage containers of data that are not Julia packages. These containers can contain platform-specific binaries, datasets, text, or any other kind of data that would be convenient to place within an immutable, life-cycled datastore.
These containers, (called "Artifacts") can be created locally, hosted anywhere, and automatically downloaded and unpacked upon installation of your Julia package.
@@ -230,7 +230,7 @@ This is deduced automatically by the `artifacts""` string macro, however, if you
!!! compat "Julia 1.7"
Pkg's extended platform selection requires at least Julia 1.7, and is considered experimental.
-New in Julia 1.6, `Platform` objects can have extended attributes applied to them, allowing artifacts to be tagged with things such as CUDA driver version compatibility, microarchitectural compatibility, julia version compatibility and more!
+New in Julia 1.7, `Platform` objects can have extended attributes applied to them, allowing artifacts to be tagged with things such as CUDA driver version compatibility, microarchitectural compatibility, julia version compatibility and more!
Note that this feature is considered experimental and may change in the future.
If you as a package developer find yourself needing this feature, please get in contact with us so it can evolve for the benefit of the whole ecosystem.
In order to support artifact selection at `Pkg.add()` time, `Pkg` will run the specially-named file `/.pkg/select_artifacts.jl`, passing the current platform triplet as the first argument.
diff --git a/docs/src/assets/favicon.ico b/docs/src/assets/favicon.ico
new file mode 100644
index 0000000000..eeb1edd944
Binary files /dev/null and b/docs/src/assets/favicon.ico differ
diff --git a/docs/src/basedocs.md b/docs/src/basedocs.md
index 7d51728ffe..9e07aa4ca9 100644
--- a/docs/src/basedocs.md
+++ b/docs/src/basedocs.md
@@ -4,7 +4,7 @@ EditURL = "https://github.com/JuliaLang/Pkg.jl/blob/master/docs/src/basedocs.md"
# Pkg
-Pkg is Julia's builtin package manager, and handles operations
+Pkg is Julia's built-in package manager, and handles operations
such as installing, updating and removing packages.
!!! note
diff --git a/docs/src/compatibility.md b/docs/src/compatibility.md
index bc1c58e3e9..dee8b05841 100644
--- a/docs/src/compatibility.md
+++ b/docs/src/compatibility.md
@@ -1,4 +1,4 @@
-# [**6.** Compatibility](@id Compatibility)
+# [**7.** Compatibility](@id Compatibility)
Compatibility refers to the ability to restrict the versions of the dependencies that your project is compatible with.
If the compatibility for a dependency is not given, the project is assumed to be compatible with all versions of that dependency.
@@ -22,7 +22,7 @@ The format of the version specifier is described in detail below.
The rules below apply to the `Project.toml` file; for registries, see [Registry Compat.toml](@ref).
!!! info
- Note that registration into Julia's General Registry requires each dependency to have a `[compat`] entry with an upper bound.
+ Note that registration into Julia's General Registry requires each dependency to have a `[compat]` entry with an upper bound.
## Version specifier format
@@ -97,7 +97,7 @@ PkgA = "~1.2.3" # [1.2.3, 1.3.0)
PkgB = "~1.2" # [1.2.0, 1.3.0)
PkgC = "~1" # [1.0.0, 2.0.0)
PkgD = "~0.2.3" # [0.2.3, 0.3.0)
-PkgE = "~0.0.3" # [0.0.3, 0.0.4)
+PkgE = "~0.0.3" # [0.0.3, 0.1.0)
PkgF = "~0.0" # [0.0.0, 0.1.0)
PkgG = "~0" # [0.0.0, 1.0.0)
```
@@ -164,7 +164,7 @@ PkgA = "0.2 - 0" # 0.2.0 - 0.*.* = [0.2.0, 1.0.0)
```
-## Fixing conflicts
+## [Fixing conflicts](@id Fixing-conflicts)
Version conflicts were introduced previously with an [example](@ref conflicts)
of a conflict arising in a package `D` used by two other packages, `B` and `C`.
diff --git a/docs/src/creating-packages.md b/docs/src/creating-packages.md
index 7bb72c2e91..4e01f0c27a 100644
--- a/docs/src/creating-packages.md
+++ b/docs/src/creating-packages.md
@@ -11,7 +11,7 @@
To generate the bare minimum files for a new package, use `pkg> generate`.
```julia-repl
-(@v1.8) pkg> generate HelloWorld
+(@v1.10) pkg> generate HelloWorld
```
This creates a new project `HelloWorld` in a subdirectory by the same name, with the following files (visualized with the external [`tree` command](https://linux.die.net/man/1/tree)):
@@ -118,7 +118,7 @@ describe about public symbols. A public symbol is a symbol that is exported from
package with the `export` keyword or marked as public with the `public` keyword. When you
change the behavior of something that was previously public so that the new
version no longer conforms to the specifications provided in the old version, you should
-adjust your package version number according to [Julia's variant on SemVer](#Version-specifier-format).
+adjust your package version number according to [Julia's variant on SemVer](@ref Version-specifier-format).
If you would like to include a symbol in your public API without exporting it into the
global namespace of folks who call `using YourPackage`, you should mark that symbol as
public with `public that_symbol`. Symbols marked as public with the `public` keyword are
@@ -127,7 +127,7 @@ just as public as those marked as public with the `export` keyword, but when fol
`YourPackage.that_symbol`.
Let's say we would like our `greet` function to be part of the public API, but not the
-`greet_alien` function. We could the write the following and release it as version `1.0.0`.
+`greet_alien` function. We could then write the following and release it as version `1.0.0`.
```julia
module HelloWorld
@@ -562,10 +562,10 @@ duplicated into `[extras]`. This is an unfortunate duplication, but without
doing this the project verifier under older Julia versions will throw an error
if it finds packages under `[compat]` that is not listed in `[extras]`.
-## Package naming rules
+## Package naming guidelines
Package names should be sensible to most Julia users, *even to those who are not domain experts*.
-The following rules apply to the `General` registry but may be useful for other package
+The following guidelines apply to the `General` registry but may be useful for other package
registries as well.
Since the `General` registry belongs to the entire community, people may have opinions about
@@ -575,8 +575,10 @@ may fit your package better.
1. Avoid jargon. In particular, avoid acronyms unless there is minimal possibility of confusion.
- * It's ok to say `USA` if you're talking about the USA.
- * It's not ok to say `PMA`, even if you're talking about positive mental attitude.
+ * It's ok for package names to contain `DNA` if you're talking about the DNA, which has a universally agreed upon definition.
+ * It's more difficult to justify package names containing the acronym `CI` for instance, which may mean continuous integration, confidence interval, etc.
+ * If there is risk of confusion it may be best to disambiguate an acronym with additional words such as a lab group or field.
+ * If your acronym is unambiguous, easily searchable, and/or unlikely to be confused across domains a good justification is often enough for approval.
2. Avoid using `Julia` in your package name or prefixing it with `Ju`.
* It is usually clear from context and to your users that the package is a Julia package.
@@ -593,6 +595,7 @@ may fit your package better.
4. Err on the side of clarity, even if clarity seems long-winded to you.
* `RandomMatrices` is a less ambiguous name than `RndMat` or `RMT`, even though the latter are shorter.
+ * Generally package names should be at least 5 characters long not including the `.jl` extension
5. A less systematic name may suit a package that implements one of several possible approaches to
its domain.
@@ -621,9 +624,12 @@ may fit your package better.
there's no copyright or trademark infringement etc.)
9. Packages should follow the [Stylistic Conventions](https://docs.julialang.org/en/v1/manual/variables/#Stylistic-Conventions).
- * The package name begin with a capital letter and word separation is shown with upper camel case
+ * The package name should begin with a capital letter and word separation is shown with upper camel case
* Packages that provide the functionality of a project from another language should use the Julia convention
- * Packages that [provide pre-built libraries and executables](https://docs.binarybuilder.org/stable/jll/) can keep orignal name, but should get `_jll`as a suffix. For example `pandoc_jll` wraps pandoc. However, note that the generation and release of most JLL packages is handled by the [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil) system.
+ * Packages that [provide pre-built libraries and executables](https://docs.binarybuilder.org/stable/jll/) can keep their original name, but should get `_jll`as a suffix. For example `pandoc_jll` wraps pandoc. However, note that the generation and release of most JLL packages is handled by the [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil) system.
+
+10. For the complete list of rules for automatic merging into the General registry, see [these guidelines](https://juliaregistries.github.io/RegistryCI.jl/stable/guidelines/).
+
## Registering packages
@@ -649,3 +655,10 @@ To support the various use cases in the Julia package ecosystem, the Pkg develop
* [`Preferences.jl`](https://github.com/JuliaPackaging/Preferences.jl) allows packages to read and write preferences to the top-level `Project.toml`.
These preferences can be read at runtime or compile-time, to enable or disable different aspects of package behavior.
Packages previously would write out files to their own package directories to record options set by the user or environment, but this is highly discouraged now that `Preferences` is available.
+
+## See Also
+
+- [Managing Packages](@ref Managing-Packages) - Learn how to add, update, and manage package dependencies
+- [Working with Environments](@ref Working-with-Environments) - Understand environments and reproducible development
+- [Compatibility](@ref Compatibility) - Specify version constraints for dependencies
+- [API Reference](@ref) - Functional API for non-interactive package management
diff --git a/docs/src/depots.md b/docs/src/depots.md
new file mode 100644
index 0000000000..1d854b9367
--- /dev/null
+++ b/docs/src/depots.md
@@ -0,0 +1,55 @@
+# **15.** Depots
+
+The packages installed for a particular environment, defined in the
+files `Project.toml` and `Manifest.toml` within the directory
+structure, are not actually installed within that directory but into a
+"depot". The location of the depots are set by the variable
+[`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH).
+
+For details on the default depot locations and how they vary by installation method,
+see the [`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH) documentation.
+
+Packages which are installed by a user go into the first depot and the Julia
+standard library is in the last depot.
+
+You should not need to manage the user depot directly. Pkg will automatically clean up
+the depots when packages are removed after a delay. However you may want to manually
+remove old `.julia/compiled/` subdirectories if you have any that reside for older Julia
+versions that you no longer use (hence have not been run to tidy themselves up).
+
+## Configuring the depot path with `JULIA_DEPOT_PATH`
+
+The depot path can be configured using the `JULIA_DEPOT_PATH` environment variable,
+which is used to populate the global Julia [`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH) variable
+at startup. For complete details on the behavior of this environment variable,
+see the [environment variables documentation](https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_DEPOT_PATH).
+
+Unlike the shell `PATH` variable, empty entries in `JULIA_DEPOT_PATH`
+have special behavior for easy overriding of the user depot while retaining access to system resources.
+For example, to switch the user depot to `/custom/depot` while still accessing bundled
+resources, use a trailing path separator:
+
+```bash
+export JULIA_DEPOT_PATH="/custom/depot:"
+```
+
+!!! note
+ The trailing path separator (`:` on Unix, `;` on Windows) is crucial for including
+ the default system depots, which contain the standard library and other bundled
+ resources. Without it, Julia will only use the specified depot and will have to precompile
+ standard library packages, which can be time-consuming and inefficient.
+
+## Shared depots for distributed computing
+
+When using Julia in distributed computing environments, such as high-performance computing
+(HPC) clusters, it's recommended to use a shared depot via `JULIA_DEPOT_PATH`. This allows
+multiple Julia processes to share precompiled packages and reduces redundant compilation.
+
+Since Julia v1.10, multiple processes using the same depot coordinate via pidfile locks
+to ensure only one process precompiles a package while others wait. However, due to
+the caching of native code in pkgimages since v1.9, you may need to set the `JULIA_CPU_TARGET`
+environment variable appropriately to ensure cache compatibility across different
+worker nodes with varying CPU capabilities.
+
+For more details, see the [FAQ section on distributed computing](https://docs.julialang.org/en/v1/manual/faq/#Computing-cluster)
+and the [environment variables documentation](https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_CPU_TARGET).
diff --git a/docs/src/environments.md b/docs/src/environments.md
index 54fa4e9fe9..1464f5db60 100644
--- a/docs/src/environments.md
+++ b/docs/src/environments.md
@@ -1,16 +1,16 @@
-# [**4.** Working with Environment](@id Working-with-Environments)
+# [**4.** Working with Environments](@id Working-with-Environments)
The following discusses Pkg's interaction with environments. For more on the role, environments play in code loading, including the "stack" of environments from which code can be loaded, see [this section in the Julia manual](https://docs.julialang.org/en/v1/manual/code-loading/#Environments-1).
## Creating your own environments
-So far we have added packages to the default environment at `~/.julia/environments/v1.9`. It is however easy to create other, independent, projects.
+So far we have added packages to the default environment at `~/.julia/environments/v1.10`. It is however easy to create other, independent, projects.
This approach has the benefit of allowing you to check in a `Project.toml`, and even a `Manifest.toml` if you wish, into version control (e.g. git) alongside your code.
It should be pointed out that when two projects use the same package at the same version, the content of this package is not duplicated.
In order to create a new project, create a directory for it and then activate that directory to make it the "active project", which package operations manipulate:
```julia-repl
-(@v1.9) pkg> activate MyProject
+(@v1.10) pkg> activate MyProject
Activating new environment at `~/MyProject/Project.toml`
(MyProject) pkg> st
@@ -28,7 +28,7 @@ false
Installed Example ─ v0.5.3
Updating `~/MyProject/Project.toml`
[7876af07] + Example v0.5.3
- Updating `~~/MyProject/Manifest.toml`
+ Updating `~/MyProject/Manifest.toml`
[7876af07] + Example v0.5.3
Precompiling environment...
1 dependency successfully precompiled in 2 seconds
@@ -45,7 +45,7 @@ Example = "7876af07-990d-54b4-ab0e-23690620f79a"
julia> print(read(joinpath("MyProject", "Manifest.toml"), String))
# This file is machine-generated - editing it directly is not advised
-julia_version = "1.9.4"
+julia_version = "1.10.0"
manifest_format = "2.0"
project_hash = "2ca1c6c58cb30e79e021fb54e5626c96d05d5fdc"
@@ -66,7 +66,7 @@ shell> git clone https://github.com/JuliaLang/Example.jl.git
Cloning into 'Example.jl'...
...
-(@v1.12) pkg> activate Example.jl
+(@v1.10) pkg> activate Example.jl
Activating project at `~/Example.jl`
(Example) pkg> instantiate
@@ -82,7 +82,7 @@ If you only have a `Project.toml`, a `Manifest.toml` must be generated by "resol
If you already have a resolved `Manifest.toml`, then you will still need to ensure that the packages are installed and with the correct versions. Again `instantiate` does this for you.
-In short, `instantiate` is your friend to make sure an environment is ready to use. If there's nothing to do, `instantiate` does nothing.
+In short, [`instantiate`](@ref Pkg.instantiate) is your friend to make sure an environment is ready to use. If there's nothing to do, `instantiate` does nothing.
!!! note "Specifying project on startup"
Instead of using `activate` from within Julia, you can specify the project on startup using
@@ -103,7 +103,7 @@ also want a scratch space to try out a new package, or a sandbox to resolve vers
between several incompatible packages.
```julia-repl
-(@v1.9) pkg> activate --temp # requires Julia 1.5 or later
+(@v1.10) pkg> activate --temp # requires Julia 1.5 or later
Activating new environment at `/var/folders/34/km3mmt5930gc4pzq1d08jvjw0000gn/T/jl_a31egx/Project.toml`
(jl_a31egx) pkg> add Example
@@ -117,18 +117,18 @@ between several incompatible packages.
## Shared environments
-A "shared" environment is simply an environment that exists in `~/.julia/environments`. The default `v1.9` environment is
+A "shared" environment is simply an environment that exists in `~/.julia/environments`. The default `v1.10` environment is
therefore a shared environment:
```julia-repl
-(@v1.9) pkg> st
-Status `~/.julia/environments/v1.9/Project.toml`
+(@v1.10) pkg> st
+Status `~/.julia/environments/v1.10/Project.toml`
```
Shared environments can be activated with the `--shared` flag to `activate`:
```julia-repl
-(@v1.9) pkg> activate --shared mysharedenv
+(@v1.10) pkg> activate --shared mysharedenv
Activating project at `~/.julia/environments/mysharedenv`
(@mysharedenv) pkg>
@@ -151,7 +151,7 @@ or using Pkg's precompile option, which can precompile the entire environment, o
which can be significantly faster than the code-load route above.
```julia-repl
-(@v1.9) pkg> precompile
+(@v1.10) pkg> precompile
Precompiling environment...
23 dependencies successfully precompiled in 36 seconds
```
@@ -165,11 +165,11 @@ By default, any package that is added to a project or updated in a Pkg action wi
with its dependencies.
```julia-repl
-(@v1.9) pkg> add Images
+(@v1.10) pkg> add Images
Resolving package versions...
- Updating `~/.julia/environments/v1.9/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[916415d5] + Images v0.25.2
- Updating `~/.julia/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
...
Precompiling environment...
Progress [===================> ] 45/97
@@ -190,16 +190,53 @@ If a given package version errors during auto-precompilation, Pkg will remember
automatically tries and will skip that package with a brief warning. Manual precompilation can be used to
force these packages to be retried, as `pkg> precompile` will always retry all packages.
-To disable the auto-precompilation, set `ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0`.
-
The indicators next to the package names displayed during precompilation
-indicate the status of that package's precompilation.
+indicate the status of that package's precompilation.
- `[◐, ◓, ◑, ◒]` Animated "clock" characters indicate that the package is currently being precompiled.
- `✓` A green checkmark indicates that the package has been successfully precompiled (after which that package will disappear from the list). If the checkmark is yellow it means that the package is currently loaded so the session will need to be restarted to access the version that was just precompiled.
- `?` A question mark character indicates that a `PrecompilableError` was thrown, indicating that precompilation was disallowed, i.e. `__precompile__(false)` in that package.
- `✗` A cross indicates that the package failed to precompile.
+#### Controlling Auto-precompilation
+
+Auto-precompilation can be controlled in several ways:
+
+- **Environment variable**: Set `ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0` to disable auto-precompilation globally.
+- **Programmatically**: Use `Pkg.autoprecompilation_enabled(false)` to disable auto-precompilation for the current session, or `Pkg.autoprecompilation_enabled(true)` to re-enable it.
+- **Scoped control**: Use `Pkg.precompile(f, args...; kwargs...)` to execute a function `f` with auto-precompilation temporarily disabled, then automatically trigger precompilation afterward if any packages were modified during the execution.
+
+!!! compat "Julia 1.13"
+ The `Pkg.autoprecompilation_enabled()` function and `Pkg.precompile()` do-block syntax require at least Julia 1.13.
+
+For example, to add multiple packages without triggering precompilation after each one:
+
+```julia-repl
+julia> Pkg.precompile() do
+ Pkg.add("Example")
+ Pkg.dev("JSON")
+ Pkg.update("HTTP")
+ end
+ Resolving package versions...
+ ...
+Precompiling environment...
+ 14 dependencies successfully precompiled in 25 seconds
+```
+
+Or to temporarily disable auto-precompilation:
+
+```julia-repl
+julia> Pkg.autoprecompilation_enabled(false)
+false
+
+julia> Pkg.add("Example") # No precompilation happens
+ Resolving package versions...
+ ...
+
+julia> Pkg.autoprecompilation_enabled(true)
+true
+```
+
### Precompiling new versions of loaded packages
If a package that has been updated is already loaded in the session, the precompilation process will go ahead and precompile
diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md
index 58693bc583..d822ebd3fa 100644
--- a/docs/src/getting-started.md
+++ b/docs/src/getting-started.md
@@ -22,18 +22,18 @@ To get back to the Julia REPL, press `Ctrl+C` or backspace (when the REPL cursor
Upon entering the Pkg REPL, you should see the following prompt:
```julia-repl
-(@v1.9) pkg>
+(@v1.10) pkg>
```
To add a package, use `add`:
```julia-repl
-(@v1.9) pkg> add Example
+(@v1.10) pkg> add Example
Resolving package versions...
Installed Example ─ v0.5.3
- Updating `~/.julia/environments/v1.9/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] + Example v0.5.3
- Updating `~/.julia/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[7876af07] + Example v0.5.3
```
@@ -49,14 +49,14 @@ julia> Example.hello("friend")
We can also specify multiple packages at once to install:
```julia-repl
-(@v1.9) pkg> add JSON StaticArrays
+(@v1.10) pkg> add JSON StaticArrays
```
The `status` command (or the shorter `st` command) can be used to see installed packages.
```julia-repl
-(@v1.9) pkg> st
-Status `~/.julia/environments/v1.6/Project.toml`
+(@v1.10) pkg> st
+Status `~/.julia/environments/v1.10/Project.toml`
[7876af07] Example v0.5.3
[682c06a0] JSON v0.21.3
[90137ffa] StaticArrays v1.5.9
@@ -68,13 +68,13 @@ Status `~/.julia/environments/v1.6/Project.toml`
To remove packages, use `rm` (or `remove`):
```julia-repl
-(@v1.9) pkg> rm JSON StaticArrays
+(@v1.10) pkg> rm JSON StaticArrays
```
Use `up` (or `update`) to update the installed packages
```julia-repl
-(@v1.9) pkg> up
+(@v1.10) pkg> up
```
If you have been following this guide it is likely that the packages installed are at the latest version
@@ -82,13 +82,13 @@ so `up` will not do anything. Below we show the status output in the case where
an old version of the Example package and then upgrade it:
```julia-repl
-(@v1.9) pkg> st
-Status `~/.julia/environments/v1.9/Project.toml`
+(@v1.10) pkg> st
+Status `~/.julia/environments/v1.10/Project.toml`
⌃ [7876af07] Example v0.5.1
Info Packages marked with ⌃ have new versions available and may be upgradable.
-(@v1.9) pkg> up
- Updating `~/.julia/environments/v1.9/Project.toml`
+(@v1.10) pkg> up
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ↑ Example v0.5.1 ⇒ v0.5.3
```
@@ -101,8 +101,8 @@ For more information about managing packages, see the [Managing Packages](@ref M
Up to this point, we have covered basic package management: adding, updating, and removing packages.
-You may have noticed the `(@v1.9)` in the REPL prompt.
-This lets us know that `v1.9` is the **active environment**.
+You may have noticed the `(@v1.10)` in the REPL prompt.
+This lets us know that `v1.10` is the **active environment**.
Different environments can have totally different packages and versions installed from another environment.
The active environment is the environment that will be modified by Pkg commands such as `add`, `rm` and `update`.
@@ -110,7 +110,7 @@ Let's set up a new environment so we may experiment.
To set the active environment, use `activate`:
```julia-repl
-(@v1.9) pkg> activate tutorial
+(@v1.10) pkg> activate tutorial
[ Info: activating new environment at `~/tutorial/Project.toml`.
```
@@ -166,16 +166,16 @@ For more information about environments, see the [Working with Environments](@re
If you are ever stuck, you can ask `Pkg` for help:
```julia-repl
-(@v1.9) pkg> ?
+(@v1.10) pkg> ?
```
You should see a list of available commands along with short descriptions.
You can ask for more detailed help by specifying a command:
```julia-repl
-(@v1.9) pkg> ?develop
+(@v1.10) pkg> ?develop
```
This guide should help you get started with `Pkg`.
-`Pkg` has much more to offer in terms of powerful package management,
-read the full manual to learn more!
+`Pkg` has much more to offer in terms of powerful package management.
+For more advanced topics, see [Managing Packages](@ref Managing-Packages), [Working with Environments](@ref Working-with-Environments), and [Creating Packages](@ref creating-packages-tutorial).
diff --git a/docs/src/glossary.md b/docs/src/glossary.md
index 60e0546039..4914150ff1 100644
--- a/docs/src/glossary.md
+++ b/docs/src/glossary.md
@@ -1,4 +1,4 @@
-# [**9.** Glossary](@id Glossary)
+# [**10.** Glossary](@id Glossary)
**Project:** a source tree with a standard layout, including a `src` directory
for the main body of Julia code, a `test` directory for testing the project,
@@ -14,8 +14,8 @@ may optionally have a manifest file:
- **Manifest file:** a file in the root directory of a project, named
`Manifest.toml` (or `JuliaManifest.toml`), describing a complete dependency graph
and exact versions of each package and library used by a project. The file name may
- also be suffixed by `-v{major}.{minor}.toml` which julia will prefer if the version
- matches `VERSION`, allowing multiple environments to be maintained for different julia
+ also be suffixed by `-v{major}.{minor}.toml` which Julia will prefer if the version
+ matches `VERSION`, allowing multiple environments to be maintained for different Julia
versions.
**Package:** a project which provides reusable functionality that can be used by
@@ -46,7 +46,7 @@ since that could conflict with the configuration of the main application.
**Environment:** the combination of the top-level name map provided by a project
file combined with the dependency graph and map from packages to their entry points
-provided by a manifest file. For more detail see the manual section on code loading.
+provided by a manifest file. For more detail see the [manual section on code loading](https://docs.julialang.org/en/v1/manual/code-loading/).
- **Explicit environment:** an environment in the form of an explicit project
file and an optional corresponding manifest file together in a directory. If the
diff --git a/docs/src/managing-packages.md b/docs/src/managing-packages.md
index b5889221cf..3295080ef7 100644
--- a/docs/src/managing-packages.md
+++ b/docs/src/managing-packages.md
@@ -10,14 +10,14 @@ The most frequently used is `add` and its usage is described first.
In the Pkg REPL, packages can be added with the `add` command followed by the name of the package, for example:
```julia-repl
-(@v1.8) pkg> add JSON
+(@v1.10) pkg> add JSON
Installing known registries into `~/`
Resolving package versions...
Installed Parsers ─ v2.4.0
Installed JSON ──── v0.21.3
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[682c06a0] + JSON v0.21.3
- Updating `~/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[682c06a0] + JSON v0.21.3
[69de0a69] + Parsers v2.4.0
[ade2ca70] + Dates
@@ -28,7 +28,7 @@ Precompiling environment...
2 dependencies successfully precompiled in 2 seconds
```
-Here we added the package `JSON` to the current environment (which is the default `@v1.8` environment).
+Here we added the package `JSON` to the current environment (which is the default `@v1.10` environment).
In this example, we are using a fresh Julia installation,
and this is our first time adding a package using Pkg. By default, Pkg installs the General registry
and uses this registry to look up packages requested for inclusion in the current environment.
@@ -40,16 +40,16 @@ It is possible to add multiple packages in one command as `pkg> add A B C`.
The status output contains the packages you have added yourself, in this case, `JSON`:
```julia-repl
-(@v1.11) pkg> st
- Status `~/.julia/environments/v1.8/Project.toml`
+(@v1.10) pkg> st
+ Status `~/.julia/environments/v1.10/Project.toml`
[682c06a0] JSON v0.21.3
```
The manifest status shows all the packages in the environment, including recursive dependencies:
```julia-repl
-(@v1.11) pkg> st -m
-Status `~/environments/v1.9/Manifest.toml`
+(@v1.10) pkg> st -m
+Status `~/.julia/environments/v1.10/Manifest.toml`
[682c06a0] JSON v0.21.3
[69de0a69] Parsers v2.4.0
[ade2ca70] Dates
@@ -64,18 +64,18 @@ To specify that you want a particular version (or set of versions) of a package,
to require any patch release of the v0.21 series of JSON after v0.21.4, call `compat JSON 0.21.4`:
```julia-repl
-(@1.11) pkg> compat JSON 0.21.4
+(@v1.10) pkg> compat JSON 0.21.4
Compat entry set:
JSON = "0.21.4"
Resolve checking for compliance with the new compat rules...
Error empty intersection between JSON@0.21.3 and project compatibility 0.21.4 - 0.21
Suggestion Call `update` to attempt to meet the compatibility requirements.
-(@1.11) pkg> update
+(@v1.10) pkg> update
Updating registry at `~/.julia/registries/General.toml`
- Updating `~/.julia/environments/1.11/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[682c06a0] ↑ JSON v0.21.3 ⇒ v0.21.4
- Updating `~/.julia/environments/1.11/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[682c06a0] ↑ JSON v0.21.3 ⇒ v0.21.4
```
@@ -96,11 +96,11 @@ julia> JSON.json(Dict("foo" => [1, "bar"])) |> print
A specific version of a package can be installed by appending a version after a `@` symbol to the package name:
```julia-repl
-(@v1.8) pkg> add JSON@0.21.1
+(@v1.10) pkg> add JSON@0.21.1
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
⌃ [682c06a0] + JSON v0.21.1
- Updating `~/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
⌃ [682c06a0] + JSON v0.21.1
⌅ [69de0a69] + Parsers v1.1.2
[ade2ca70] + Dates
@@ -118,12 +118,12 @@ If a branch (or a certain commit) of `Example` has a hotfix that is not yet incl
we can explicitly track that branch (or commit) by appending `#branchname` (or `#commitSHA1`) to the package name:
```julia-repl
-(@v1.8) pkg> add Example#master
+(@v1.10) pkg> add Example#master
Cloning git-repo `https://github.com/JuliaLang/Example.jl.git`
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] + Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master`
- Updating `~/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[7876af07] + Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master`
```
@@ -139,12 +139,12 @@ When updating packages, updates are pulled from that branch.
To go back to tracking the registry version of `Example`, the command `free` is used:
```julia-repl
-(@v1.8) pkg> free Example
+(@v1.10) pkg> free Example
Resolving package versions...
Installed Example ─ v0.5.3
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ~ Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v0.5.3
- Updating `~/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[7876af07] ~ Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v0.5.3
```
@@ -153,12 +153,12 @@ To go back to tracking the registry version of `Example`, the command `free` is
If a package is not in a registry, it can be added by specifying a URL to the Git repository:
```julia-repl
-(@v1.8) pkg> add https://github.com/fredrikekre/ImportMacros.jl
+(@v1.10) pkg> add https://github.com/fredrikekre/ImportMacros.jl
Cloning git-repo `https://github.com/fredrikekre/ImportMacros.jl`
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[92a963f6] + ImportMacros v1.0.0 `https://github.com/fredrikekre/ImportMacros.jl#master`
- Updating `~/environments/v1.9/Manifest.toml`
+ Updating `~/.julia/environments/v1.10/Manifest.toml`
[92a963f6] + ImportMacros v1.0.0 `https://github.com/fredrikekre/ImportMacros.jl#master`
```
@@ -167,7 +167,7 @@ For unregistered packages, we could have given a branch name (or commit SHA1) to
If you want to add a package using the SSH-based `git` protocol, you have to use quotes because the URL contains a `@`. For example,
```julia-repl
-(@v1.8) pkg> add "git@github.com:fredrikekre/ImportMacros.jl.git"
+(@v1.10) pkg> add "git@github.com:fredrikekre/ImportMacros.jl.git"
Cloning git-repo `git@github.com:fredrikekre/ImportMacros.jl.git`
Updating registry at `~/.julia/registries/General`
Resolving package versions...
@@ -188,7 +188,7 @@ repository:
pkg> add https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore
Cloning git-repo `https://github.com/timholy/SnoopCompile.jl.git`
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[e2b509da] + SnoopCompileCore v2.9.0 `https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore#master`
Updating `~/.julia/environments/v1.8/Manifest.toml`
[e2b509da] + SnoopCompileCore v2.9.0 `https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore#master`
@@ -214,15 +214,15 @@ from that local repo are pulled when packages are updated.
By only using `add` your environment always has a "reproducible state", in other words, as long as the repositories and registries used are still accessible
it is possible to retrieve the exact state of all the dependencies in the environment. This has the advantage that you can send your environment (`Project.toml`
and `Manifest.toml`) to someone else and they can [`Pkg.instantiate`](@ref) that environment in the same state as you had it locally.
-However, when you are developing a package, it is more convenient to load packages at their current state at some path. For this reason, the `dev` command exists.
+However, when you are [developing a package](@ref developing), it is more convenient to load packages at their current state at some path. For this reason, the `dev` command exists.
Let's try to `dev` a registered package:
```julia-repl
-(@v1.8) pkg> dev Example
+(@v1.10) pkg> dev Example
Updating git-repo `https://github.com/JuliaLang/Example.jl.git`
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] + Example v0.5.4 `~/.julia/dev/Example`
Updating `~/.julia/environments/v1.8/Manifest.toml`
[7876af07] + Example v0.5.4 `~/.julia/dev/Example`
@@ -263,9 +263,9 @@ julia> Example.plusone(1)
To stop tracking a path and use the registered version again, use `free`:
```julia-repl
-(@v1.8) pkg> free Example
+(@v1.10) pkg> free Example
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ~ Example v0.5.4 `~/.julia/dev/Example` ⇒ v0.5.3
Updating `~/.julia/environments/v1.8/Manifest.toml`
[7876af07] ~ Example v0.5.4 `~/.julia/dev/Example` ⇒ v0.5.3
@@ -300,29 +300,29 @@ When new versions of packages are released, it is a good idea to update. Simply
to the latest compatible version. Sometimes this is not what you want. You can specify a subset of the dependencies to upgrade by giving them as arguments to `up`, e.g:
```julia-repl
-(@v1.8) pkg> up Example
+(@v1.10) pkg> up Example
```
This will only allow Example do upgrade. If you also want to allow dependencies of Example to upgrade (with the exception of packages that are in the project) you can pass the `--preserve=direct` flag.
```julia-repl
-(@v1.8) pkg> up --preserve=direct Example
+(@v1.10) pkg> up --preserve=direct Example
```
And if you also want to allow dependencies of Example that are also in the project to upgrade, you can use `--preserve=none`:
```julia-repl
-(@v1.8) pkg> up --preserve=none Example
+(@v1.10) pkg> up --preserve=none Example
```
## Pinning a package
A pinned package will never be updated. A package can be pinned using `pin`, for example:
```julia-repl
-(@v1.8) pkg> pin Example
+(@v1.10) pkg> pin Example
Resolving package versions...
- Updating `~/.julia/environments/v1.8/Project.toml`
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ~ Example v0.5.3 ⇒ v0.5.3 ⚲
Updating `~/.julia/environments/v1.8/Manifest.toml`
[7876af07] ~ Example v0.5.3 ⇒ v0.5.3 ⚲
@@ -331,8 +331,8 @@ A pinned package will never be updated. A package can be pinned using `pin`, for
Note the pin symbol `⚲` showing that the package is pinned. Removing the pin is done using `free`
```julia-repl
-(@v1.8) pkg> free Example
- Updating `~/.julia/environments/v1.8/Project.toml`
+(@v1.10) pkg> free Example
+ Updating `~/.julia/environments/v1.10/Project.toml`
[7876af07] ~ Example v0.5.3 ⚲ ⇒ v0.5.3
Updating `~/.julia/environments/v1.8/Manifest.toml`
[7876af07] ~ Example v0.5.3 ⚲ ⇒ v0.5.3
@@ -343,7 +343,7 @@ Note the pin symbol `⚲` showing that the package is pinned. Removing the pin i
The tests for a package can be run using `test` command:
```julia-repl
-(@v1.8) pkg> test Example
+(@v1.10) pkg> test Example
...
Testing Example
Testing Example tests passed
@@ -356,7 +356,7 @@ The output of the build process is directed to a file.
To explicitly run the build step for a package, the `build` command is used:
```julia-repl
-(@v1.8) pkg> build IJulia
+(@v1.10) pkg> build IJulia
Building Conda ─→ `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/6e47d11ea2776bc5627421d59cdcc1296c058071/build.log`
Building IJulia → `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/98ab633acb0fe071b671f6c1785c46cd70bb86bd/build.log`
@@ -486,7 +486,31 @@ To fix such errors, you have a number of options:
- remove either `A` or `B` from your environment. Perhaps `B` is left over from something you were previously working on, and you don't need it anymore. If you don't need `A` and `B` at the same time, this is the easiest way to fix the problem.
- try reporting your conflict. In this case, we were able to deduce that `B` requires an outdated version of `D`. You could thus report an issue in the development repository of `B.jl` asking for an updated version.
- try fixing the problem yourself.
- This becomes easier once you understand `Project.toml` files and how they declare their compatibility requirements. We'll return to this example in [Fixing conflicts](@ref).
+ This becomes easier once you understand `Project.toml` files and how they declare their compatibility requirements. We'll return to this example in [Fixing conflicts](@ref Fixing-conflicts).
+
+## Yanked packages
+
+Package registries can mark specific versions of packages as "yanked". A yanked package version
+is one that should no longer be used, typically because it contains serious bugs, security
+vulnerabilities, or other critical issues. When a package version is yanked, it becomes
+unavailable for new installations but remains accessible (i.e. via `instantiate`) to maintain reproducibility
+of existing environments.
+
+When you run `pkg> status`, yanked packages are clearly marked with a warning symbol:
+
+```julia-repl
+(@v1.13) pkg> status
+ Status `~/.julia/environments/v1.13/Project.toml`
+ [682c06a0] JSON v0.21.3
+ [f4259836] Example v1.2.0 [yanked]
+```
+
+The `[yanked]` annotation indicate that version `v1.2.0` of the `Example` package
+has been yanked and should be updated or replaced.
+
+When resolving dependencies, Pkg will warn you if yanked packages are present and may provide
+guidance on how to resolve the situation. It's important to address yanked packages promptly
+to ensure the security and stability of your Julia environment.
## Garbage collecting old, unused packages
@@ -502,7 +526,7 @@ If you are short on disk space and want to clean out as many unused packages and
To run a typical garbage collection with default arguments, simply use the `gc` command at the `pkg>` REPL:
```julia-repl
-(@v1.8) pkg> gc
+(@v1.10) pkg> gc
Active manifests at:
`~/BinaryProvider/Manifest.toml`
...
diff --git a/docs/src/protocol.md b/docs/src/protocol.md
new file mode 100644
index 0000000000..211b1caf46
--- /dev/null
+++ b/docs/src/protocol.md
@@ -0,0 +1,190 @@
+# [**14.** Package and Storage Server Protocol Reference](@id Pkg-Server-Protocols)
+
+The Julia Package Server Protocol (Pkg Protocol) and the Package Storage Server Protocol (Storage Protocol) define how Julia's package manager, Pkg, obtains and manages packages and their associated resources. They aim to enhance the Julia package ecosystem, making it more efficient, reliable, and user-friendly, avoiding potential points of failure, and ensuring the permanent availability of package versions and artifacts, which is paramount for the stability and reproducibility of Julia projects.
+
+The Pkg client, by default, gets all resources over HTTPS from a single open source service run by the Julia community. This service for serving packages is additionally backed by multiple independent storage services which interface with proprietary origin services (GitHub, etc.) and guarantee persistent availability of resources into the future.
+
+The protocols also aim to address some of the limitations that existed prior to its introduction.
+
+- **Vanishing Resources.** It is possible for authors to delete code repositories of registered Julia packages. Without some kind of package server, no one can install a package which has been deleted. If someone happens to have a current fork of a deleted package, that can be made the new official repository for the package, but the chances of them having no or outdated forks are high. An even worse situation could happen for artifacts since they tend not to be kept in version control and are much more likely to be served from "random" web servers at a fixed URL with content changing over time. Artifact publishers are unlikely to retain all past versions of artifacts, so old versions of packages that depend on specific artifact content will not be reproducible in the future unless we do something to ensure that they are kept around after the publisher has stopped hosting them. By storing all package versions and artifacts in a single place, we can ensure that they are available forever.
+- **Usage Insights.** It is valuable for the Julia community to know how many people are using Julia or what the relative popularity of different packages and operating systems is. Julia uses GitHub to host its ecosystem. GitHub - a commercial, proprietary service - has this information but does not make it available to the Julia community. We are of course using GitHub for free, so we can't complain, but it seems unfortunate that a commercial entity has this valuable information while the open source community remains in the dark. The Julia community really could use insight into who is using Julia and how, so that we can prioritize packages and platforms, and give real numbers when people ask "how many people are using Julia?"
+- **Decoupling from Git and GitHub.** Prior to this, Julia package ecosystem was very deeply coupled to git and was even specialized on GitHub specifically in many ways. The Pkg and Storage Protocols allowed us to decouple ourselves from git as the primary mechanism for getting packages. Now Julia continues to support using git, but does not require it just to install packages from the default public registry anymore. This decoupling also paves the way for supporting other version control systems in the future, making git no longer so special. Special treatment of GitHub will also go away since we get the benefits of specializing for GitHub (fast tarball downloads) directly from the Pkg protocols.
+- **Firewall problems.** Prior to this, Pkg's need to connect to arbitrary servers using a miscellany of protocols caused several problems with firewalls. A large set of protocols and an unbounded list of servers needed to be whitelisted just to support default Pkg operation. If Pkg only needed to talk to a single service over a single, secure protocol (i.e. HTTPS), then whitelisting Pkg for standard use would be dead simple.
+
+## Protocols & Services
+
+1. **Pkg Protocol:** what Julia Pkg Clients speak to Pkg Servers. The Pkg Server serves all resources that Pkg Clients need to install and use registered packages, including registry data, packages and artifacts. It is designed to be easily horizontally scalable and not to have any hard operational requirements: if service is slow, just start more servers; if a Pkg Server crashes, forget it and boot up a new one.
+2. **Storage Protocol:** what Pkg Servers speak to get resources from Storage Services. Julia clients do not interact with Storage services directly and multiple independent Storage Services can symmetrically (all are treated equally) provide their service to a given Pkg Server. Since Pkg Servers cache what they serve to Clients and handle convenient content presentation, Storage Services can expose a much simpler protocol: all they do is serve up complete versions of registries, packages and artifacts, while guaranteeing persistence and completeness. Persistence means: once a version of a resource has been served, that version can be served forever. Completeness means: if the service serves a registry, it can serve all package versions referenced by that registry; if it serves a package version, it can serve all artifacts used by that package.
+
+Both protocols work over HTTPS, using only GET and HEAD requests. As is normal for HTTP, HEAD requests are used to get information about a resource, including whether it would be served, without actually downloading it. As described in what follows, the Pkg Protocol is client-to-server and may be unauthenticated, use basic auth, or OpenID; the Storage Protocol is server-to-server only and uses mutual authentication with TLS certificates.
+
+The following diagram shows how these services interact with each other and with external services such as GitHub, GitLab and BitBucket for source control, and S3 and HDFS for long-term persistence:
+
+ ┌───────────┐
+
+ │ Amazon S3 │
+
+ │ Storage │
+
+ └───────────┘
+
+ ▲
+
+ ║
+
+ ▼
+
+ Storage ╔═══════════╗ ┌───────────┐
+
+ Pkg Protocol ║ Storage ║ ┌──▶│ GitHub │
+
+ Protocol ┌──▶║ Service A ║───┤ └───────────┘
+
+ ┏━━━━━━━━━━━━┓ ┏━━━━━━━━━━━━┓ │ ╚═══════════╝ │ ┌───────────┐
+
+ ┃ Pkg Client ┃────▶┃ Pkg Server ┃───┤ ╔═══════════╗ ├──▶│ GitLab │
+
+ ┗━━━━━━━━━━━━┛ ┗━━━━━━━━━━━━┛ │ ║ Storage ║ │ └───────────┘
+
+ └──▶║ Service B ║───┤ ┌───────────┐
+
+ ╚═══════════╝ └──▶│ BitBucket │
+
+ ▲ └───────────┘
+
+ ║
+
+ ▼
+
+ ┌───────────┐
+
+ │ HDFS │
+
+ │ Cluster │
+
+ └───────────┘
+
+Each Julia Pkg Client is configured to talk to a Pkg Server. By default, they talk to `pkg.julialang.org`, a public, unauthenticated Pkg Server. If the environment variable `JULIA_PKG_SERVER` is set, the Pkg Client connects to that host instead. For example, if `JULIA_PKG_SERVER` is set to `pkg.company.com` then the Pkg Client will connect to `https://pkg.company.com`. So in typical operation, a Pkg Client will no longer rely on `libgit2` or a git command-line client, both of which have been an ongoing headache, especially behind firewalls and on Windows. If fact, git will only be necessary when working with git-hosted registries and unregistered packages - those will continue to work as they have previously, fetched using git.
+
+While the default Pkg Server at `pkg.julialang.org` is unauthenticated, other parties may host Pkg Server instances elsewhere, authenticated or unauthenticated, public or private, as they wish. People can connect to those servers by setting the `JULIA_PKG_SERVER` variable. There will be a configuration file for providing authentication information to Pkg Servers using either basic auth or OpenID. The Pkg Server implementation will be open source and have minimal operational requirements. Specifically, it needs:
+
+1. The ability to accept incoming connections on port 443;
+2. The ability to connect to a configurable set of Storage Services;
+3. Temporary disk storage for caching resources (registries, packages, artifacts).
+
+A Pkg Service may be backed by more than one actual server, as is typical for web services. The Pkg Service is stateless, so this kind of horizontal scaling is straightforward. Each Pkg Server serves registry, package and artifact resources to Pkg Clients and caches whatever it serves. Each Pkg Server, in turn, gets those resources from one or more Storage Services. Storage services are responsible for fetching resources from code hosting sites like GitHub, GitLab and BitBucket, and for persisting everything that they have ever served to long-term storage systems like Amazon S3, hosted HDFS clusters - or whatever an implementor wants to use. If the original copies of resources vanish, Pkg Servers must always serve up all previously served versions of resources.
+
+The Storage Protocol is designed to be extremely simple so that multiple independent implementations can coexist, and each Pkg Server may be symmetrically backed by multiple different Storage Services, providing both redundant backup and ensuring that no single implementation has a "choke hold" on the ecosystem - anyone can implement a new Storage Service and add it to the set of services backing the default Pkg Server at `pkg.julialang.org`. The simplest possible version of a Storage Service is a static HTTPS site serving files generated from a snapshot of a registry. Although this does not provide adequate long-term backup capabilities, and would need to be regenerated whenever a registry changes, it may be sufficient for some private uses. Having multiple independently operated Storage Services helps ensure that even if one Storage Service becomes unavailable or unreliable - for technical, financial, or political reasons - others will keep operating and so will the Pkg ecosystem.
+
+## The Pkg Protocol
+
+This section describes the protocol used by Pkg Clients to get resources from Pkg Servers, including the latest versions of registries, package source trees, and artifacts. There is also a standard system for asking for diffs of all of these from previous versions, to minimize how much data the client needs to download in order to update itself. There is additionally a bundle mechanism for requesting and receiving a set of resources in a single request.
+
+### Authentication
+
+The authentication scheme between a Pkg client and server will be HTTP authorization with bearer tokens, as standardized in RFC6750. This means that authenticated access is accomplished by the client by making an HTTPS request including a `Authorization: Bearer $access_token` header.
+
+The format of the token, its contents and validation mechanism are not specified by the Pkg Protocol. They are left to the server to define. The server is expected to validate the token and determine whether the client is authorized to access the requested resource. Similarly at the client side, the implementation of the token acquisition is not specified by the Pkg Protocol. However Pkg provides [hooks](#Authentication-Hooks) that can be implemented at the client side to trigger the token acquisition process. Tokens thus acquired are expected to be stored in a local file, the format of which is specified by the Pkg Protocol. Pkg will be able to read the token from this file and include it in the request to the server. Pkg can also, optionally, detect when the token is about to expire and trigger a refresh. The Pkg client also supports automatic token refresh, since bearer tokens are recommended to be short-lived (no more than a day).
+
+The authorization information is saved locally in `$(DEPOT_PATH[1])/servers/$server/auth.toml` which is a TOML file with the following fields:
+
+- `access_token` (REQUIRED): the bearer token used to authorize normal requests
+- `expires_at` (OPTIONAL): an absolute expiration time
+- `expires_in` (OPTIONAL): a relative expiration time
+- `refresh_token` (OPTIONAL): bearer token used to authorize refresh requests
+- `refresh_url` (OPTIONAL): URL to fetch a new token from
+
+The `auth.toml` file may contain other fields (e.g. user name, user email), but they are ignored by Pkg. The two other fields mentioned in RFC6750 are `token_type` and `scope`: these are omitted since only tokens of type `Bearer` are supported currently and the scope is always implicitly to provide access to Pkg protocol URLs. Pkg servers should, however, not send `auth.toml` files with `token_type` or `scope` fields, as these names may be used in the future, e.g. to support other kinds of tokens or to limit the scope of an authorization to a subset of Pkg protocol URLs.
+
+Initially, the user or user agent (IDE) must acquire a `auth.toml` file and save it to the correct location. After that, Pkg will determine whether the access token needs to be refreshed by examining the `expires_at` and/or `expires_in` fields of the auth file. The expiration time is the minimum of `expires_at` and `mtime(auth_file) + expires_in`. When the Pkg client downloads a new `auth.toml` file, if there is a relative `expires_in` field, an absolute `expires_at` value is computed based on the client's current clock time. This combination of policies allows expiration to work gracefully even in the presence of clock skew between the server and the client.
+
+If the access token is expired and there are `refresh_token` and `refresh_url` fields in `auth.toml`, a new auth file is requested by making a request to `refresh_url` with an `Authorization: Bearer $refresh_token` header. Pkg will refuse to make a refresh request unless `refresh_url` is an HTTPS URL. Note that `refresh_url` need not be a URL on the Pkg server: token refresh can be handled by a separate server. If the request is successful and the returned `auth.toml` file is a well-formed TOML file with at least an `access_token` field, it is saved to `$(DEPOT_PATH[1])/servers/$server/auth.toml`.
+
+Checking for access token expiry and refreshing `auth.toml` is done before each Pkg client request to a Pkg server, and if the auth file is updated the new access token is used, so the token should in theory always be up to date. Practice is different from theory, of course, and if the Pkg server considers the access token expired, it may return an HTTP 401 Unauthorized response, and the Pkg client should attempt to refresh the auth token. If, after attempting to refresh the access token, the server still returns HTTP 401 Unauthorized, the Pkg client will present the body of the error response to the user or user agent (IDE).
+
+## Authentication Hooks
+A mechanism to register a hook at the client is provided to allow the user agent to handle an auth failure. It can, for example, present a login page and take the user through the necessary authentication flow to get a new auth token and store it in `auth.toml`.
+
+- A handler can also be registered using [`register_auth_error_handler`](@ref Pkg.PlatformEngines.register_auth_error_handler). It returns a function that can be called to deregister the handler.
+- A handler can also be deregistered using [`deregister_auth_error_handler`](@ref Pkg.PlatformEngines.deregister_auth_error_handler).
+
+Example:
+
+```julia
+# register a handler
+dispose = Pkg.PlatformEngines.register_auth_error_handler((url, svr, err) -> begin
+ PkgAuth.authenticate(svr*"/auth")
+ return true, true
+end)
+
+# ... client code ...
+
+# deregister the handler
+dispose()
+# or
+Pkg.PlatformEngines.deregister_auth_error_handler(url, svr)
+```
+
+### Resources
+
+The client can make GET or HEAD requests to the following resources:
+
+- `/registries`: map of registry uuids at this server to their current tree hashes, each line of the response data is of the form `/registry/$uuid/$hash` representing a resource pointing to particular version of a registry
+- `/registry/$uuid/$hash`: tarball of registry uuid at the given tree hash
+- `/package/$uuid/$hash`: tarball of package uuid at the given tree hash
+- `/artifact/$hash`: tarball of an artifact with the given tree hash
+
+Only the `/registries` changes - all other resources can be cached forever and the server will indicate this with the appropriate HTTP headers.
+
+### Reference Implementation
+
+A reference implementation of the Pkg Server protocol is available at [PkgServer.jl](https://github.com/JuliaPackaging/PkgServer.jl).
+
+## The Storage Protocol
+
+This section describes the protocol used by Pkg Servers to get resources from Storage Servers, including the latest versions of registries, package source trees, and artifacts. The Pkg Server requests each type of resource when it needs it and caches it for as long as it can, so Storage Services should not have to serve the same resources to the same Pkg Server instance many times.
+
+### Authentication
+
+Since the Storage protocol is a server-to-server protocol, it uses certificate-based mutual authentication: each side of the connection presents certificates of identity to the other. The operator of a Storage Service must issue a client certificate to the operator of a Pkg Service certifying that it is authorized to use the Storage Service.
+
+### Resources
+
+The Storage Protocol is similar to the Pkg Protocol:
+
+- `/registries`: map of registry uuids at this server to their current tree hashes
+- `/registry/$uuid/$hash`: tarball of registry uuid at the given tree hash
+- `/package/$uuid/$hash`: tarball of package uuid at the given tree hash
+- `/artifact/$hash`: tarball of an artifact with the given tree hash
+
+As is the case with the Pkg Server protocol, only the `/registries` resource changes over time—all other resources are permanently cacheable and Pkg Servers are expected to cache resources indefinitely, only deleting them if they need to reclaim storage space.
+
+### Interaction
+
+Fetching resources from a single Storage Server is straightforward: the Pkg Server asks for a version of a registry by UUID and hash and the Storage Server returns a tarball of that registry tree if it knows about that registry and version, or an HTTP 404 error if it doesn't.
+
+Each Pkg Server may use multiple Storage Services for availability and depth of backup. For a given resource, the Pkg Server makes a HEAD request to each Storage Service requesting the resource, and then makes a GET request for the resource to the first Storage Server that replies to the HEAD request with a 200 OK. If no Storage Service responds with a 200 OK in enough time, the Pkg Server should respond to the request for the corresponding resource with a 404 error. Each Storage Service which responds with a 200 OK must behave as if it had served the resource, regardless of whether it does so or not - i.e. persist the resource to long-term storage.
+
+One subtlety is how the Pkg Server determines what the latest version of each registry is. It can get a map from registry UUIDs to version hashes from each Storage Server, but hashes are unordered - if multiple Storage Servers reply with different hashes, which one should the Pkg Server use? When Storage Servers disagree on the latest hash of a registry, the Pkg Server should ask each Storage Server about the hashes that the other servers returned: if Service A knows about Service B's hash but B doesn't know about A's hash, then A's hash is more recent and should be used. If each server doesn't know about the other's hash, then neither hash is strictly newer than the other one and either could be used. The Pkg Server can break the tie any way it wants, e.g. randomly or by using the lexicographically earlier hash.
+
+### Guarantees
+
+The primary guarantee that a Storage Server makes is that if it has ever successfully served a resource—registry tree, package source tree, artifact tree — it must be able to serve that same resource version forever.
+
+It's tempting to also require it to guarantee that if a Storage Server serves a registry tree, it can also serve every package source tree referred to within that registry tree. Similarly, it is tempting to require that if a Storage Server can serve a package source tree that it should be able to serve any artifacts referenced by that version of the package. However, this could fail for reasons entirely beyond the control of the server: what if the registry is published with wrong package hashes? What if someone registers a package version, doesn't git tag it, then force pushes the branch that the version was on? In both of these cases, the Storage Server may not be able to fetch a version of a package through no fault of its own. Similarly, artifact hashes in packages might be incorrect or vanish before the Storage Server can retrieve them.
+
+Therefore, we don't strictly require that Storage Servers guarantee this kind of closure under resource references. We do, however, recommend that Storage Servers proactively fetch resources referred to by other resources as soon as possible. When a new version of a registry is available, the Storage Server should fetch all the new package versions in the registry immediately. When a package version is fetched—for any reason, whether because it was included in a new registry snapshot or because an upstream Pkg Server requested it by hash—all artifacts that it references should be fetched immediately.
+
+## Verification
+
+Since all resources are content addressed, the Pkg Clients and Pkg Server can and should verify that resources that they receive from upstream have the correct content hash. If a resource does not have the right hash, it should not be used and not be served further downstream. Pkg Servers should try to fetch the resource from other Storage Services and serve one that has the correct content. Pkg Clients should error if they get a resource with an incorrect content hash.
+
+Git uses SHA1 for content hashing. There is a pure Julia implementation of git's content hashing algorithm, which is being used to verify artifacts in Julia 1.3 (among other things). The SHA1 hashing algorithm is considered to be cryptographically compromised at this point, and while it's not completely broken, git is already starting to plan how to move away from using SHA1 hashes. To that end, we should consider getting ahead of this problem by using a stronger hash like SHA3-256 in these protocols. Having control over these protocols actually makes this considerably easier than if we were continuing to rely on git for resource acquisition.
+
+The first step to using SHA3-256 instead of SHA1 is to populate registries with additional hashes for package versions. Currently each package version is identified by a git-tree-sha1 entry. We would add git-tree-sha3-256 entries that give the SHA3-256 hashes computed using the same git tree hashing logic. From this origin, the Pkg Client, Pkg Server and Storage Servers all just need to use SHA3-256 hashes rather than SHA1 hashes.
+
+## References
+
+1. Pkg & Storage Protocols [https://github.com/JuliaLang/Pkg.jl/issues/1377](https://github.com/JuliaLang/Pkg.jl/issues/1377)
+2. Authenticated Pkg Client Support: [https://github.com/JuliaLang/Pkg.jl/pull/1538](https://github.com/JuliaLang/Pkg.jl/pull/1538)
+3. Authentication Hooks: [https://github.com/JuliaLang/Pkg.jl/pull/1630](https://github.com/JuliaLang/Pkg.jl/pull/1630)
diff --git a/docs/src/registries.md b/docs/src/registries.md
index 7c50727204..cada0bdadf 100644
--- a/docs/src/registries.md
+++ b/docs/src/registries.md
@@ -1,4 +1,4 @@
-# **7.** Registries
+# **8.** Registries
Registries contain information about packages, such as
available releases and dependencies, and where they can be downloaded.
diff --git a/docs/src/toml-files.md b/docs/src/toml-files.md
index 79496e0321..a6363d2dc3 100644
--- a/docs/src/toml-files.md
+++ b/docs/src/toml-files.md
@@ -1,4 +1,4 @@
-# [**10.** `Project.toml` and `Manifest.toml`](@id Project-and-Manifest)
+# [**11.** `Project.toml` and `Manifest.toml`](@id Project-and-Manifest)
Two files that are central to Pkg are `Project.toml` and `Manifest.toml`. `Project.toml`
and `Manifest.toml` are written in [TOML](https://github.com/toml-lang/toml) (hence the
@@ -22,13 +22,38 @@ are described below.
### The `authors` field
-For a package, the optional `authors` field is a list of strings describing the
-package authors, in the form `NAME `. For example:
+For a package, the optional `authors` field is a TOML array describing the package authors.
+Entries in the array can either be a string in the form `"NAME"` or `"NAME "`, or a table keys following the [Citation File Format schema](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md) for either a
+[`person`](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md#definitionsperson) or an [`entity`](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md#definitionsentity).
+
+For example:
```toml
-authors = ["Some One ",
- "Foo Bar "]
+authors = [
+ "Some One ",
+ "Foo Bar ",
+ {given-names = "Baz", family-names = "Qux", email = "bazqux@example.com", orcid = "https://orcid.org/0000-0000-0000-0000", website = "https://github.com/bazqux"},
+]
```
+If all authors are specified by tables, it is possible to use [the TOML Array of Tables syntax](https://toml.io/en/v1.0.0#array-of-tables)
+```toml
+[[authors]]
+given-names = "Some"
+family-names = "One"
+email = "someone@email.com"
+
+[[authors]]
+given-names = "Foo"
+family-names = "Bar"
+email = "foo@bar.com"
+
+[[authors]]
+given-names = "Baz"
+family-names = "Qux"
+email = "bazqux@example.com"
+orcid = "https://orcid.org/0000-0000-0000-0000"
+website = "https://github.com/bazqux"
+```
### The `name` field
@@ -39,7 +64,7 @@ name = "Example"
The name must be a valid [identifier](https://docs.julialang.org/en/v1/base/base/#Base.isidentifier)
(a sequence of Unicode characters that does not start with a number and is neither `true` nor `false`).
For packages, it is recommended to follow the
-[package naming rules](@ref Package-naming-rules). The `name` field is mandatory
+[package naming rules](@ref Package-naming-guidelines). The `name` field is mandatory
for packages.
@@ -77,6 +102,18 @@ Note that Pkg.jl deviates from the SemVer specification when it comes to version
the section on [pre-1.0 behavior](@ref compat-pre-1.0) for more details.
+### The `readonly` field
+
+The `readonly` field is a boolean that, when set to `true`, marks the environment as read-only. This prevents any modifications to the environment, including adding, removing, or updating packages. For example:
+
+```toml
+readonly = true
+```
+
+When an environment is marked as readonly, Pkg will throw an error if any operation that would modify the environment is attempted.
+If the `readonly` field is not present or set to `false` (the default), the environment can be modified normally.
+
+
### The `[deps]` section
All dependencies of the package/project are listed in the `[deps]` section. Each dependency
@@ -93,13 +130,23 @@ handled by Pkg operations such as `add`.
### The `[sources]` section
-Specifiying a path or repo (+ branch) for a dependency is done in the `[sources]` section.
+Specifying a path or repo (+ branch) for a dependency is done in the `[sources]` section.
These are especially useful for controlling unregistered dependencies without having to bundle a
corresponding manifest file.
+Each entry in the `[sources]` section supports the following keys:
+
+- **`url`**: The URL of the Git repository. Cannot be used with `path`.
+- **`rev`**: The Git revision (branch name, tag, or commit hash) to use. Only valid with `url`.
+- **`subdir`**: A subdirectory within the repository containing the package.
+- **`path`**: A local filesystem path to the package. Cannot be used with `url` or `rev`.
+
+This might in practice look something like:
+
```toml
[sources]
Example = {url = "https://github.com/JuliaLang/Example.jl", rev = "custom_branch"}
+WithinMonorepo = {url = "https://github.org/author/BigProject", subdir = "SubPackage"}
SomeDependency = {path = "deps/SomeDependency.jl"}
```
@@ -161,7 +208,7 @@ For the details, see [`Pkg.instantiate`](@ref).
### Different Manifests for Different Julia versions
-Starting from Julia v1.11, there is an option to name manifest files in the format `Manifest-v{major}.{minor}.toml`.
+Starting from Julia v1.10.8, there is an option to name manifest files in the format `Manifest-v{major}.{minor}.toml`.
Julia will then preferentially use the version-specific manifest file if available.
For example, if both `Manifest-v1.11.toml` and `Manifest.toml` exist, Julia 1.11 will prioritize using `Manifest-v1.11.toml`.
However, Julia versions 1.10, 1.12, and all others will default to using `Manifest.toml`.
diff --git a/ext/REPLExt/REPLExt.jl b/ext/REPLExt/REPLExt.jl
index 92e636cd2f..3b0ade22e3 100644
--- a/ext/REPLExt/REPLExt.jl
+++ b/ext/REPLExt/REPLExt.jl
@@ -1,18 +1,25 @@
module REPLExt
+if Base.get_bool_env("JULIA_PKG_DISALLOW_PKG_PRECOMPILATION", false) == true
+ error("Precompililing Pkg extension REPLExt is disallowed. JULIA_PKG_DISALLOW_PKG_PRECOMPILATION=$(ENV["JULIA_PKG_DISALLOW_PKG_PRECOMPILATION"])")
+end
+
using Markdown, UUIDs, Dates
import REPL
import .REPL: LineEdit, REPLCompletions, TerminalMenus
import Pkg
-import .Pkg: linewrap, pathrepr, compat, can_fancyprint, printpkgstyle, PKGMODE_PROJECT
+import .Pkg: linewrap, pathrepr, can_fancyprint, printpkgstyle, PKGMODE_PROJECT
using .Pkg: Types, Operations, API, Registry, Resolve, REPLMode, safe_realpath
using .REPLMode: Statement, CommandSpec, Command, prepare_cmd, tokenize, core_parse, SPECS, api_options, parse_option, api_options, is_opt, wrap_option
using .Types: Context, PkgError, pkgerror, EnvCache
+using .API: set_current_compat
+import .API: _compat
+
include("completions.jl")
include("compat.jl")
@@ -23,7 +30,7 @@ include("compat.jl")
struct PkgCompletionProvider <: LineEdit.CompletionProvider end
-function LineEdit.complete_line(c::PkgCompletionProvider, s; hint::Bool=false)
+function LineEdit.complete_line(c::PkgCompletionProvider, s; hint::Bool = false)
partial = REPL.beforecursor(s.input_buffer)
full = LineEdit.input_string(s)
ret, range, should_complete = completions(full, lastindex(partial); hint)
@@ -113,16 +120,18 @@ function on_done(s, buf, ok, repl)
do_cmds(repl, input)
REPL.prepare_next(repl)
REPL.reset_state(s)
- s.current_mode.sticky || REPL.transition(s, main)
+ return s.current_mode.sticky || REPL.transition(s, main)
end
# Set up the repl Pkg REPLMode
function create_mode(repl::REPL.AbstractREPL, main::LineEdit.Prompt)
- pkg_mode = LineEdit.Prompt(promptf;
+ pkg_mode = LineEdit.Prompt(
+ promptf;
prompt_prefix = repl.options.hascolor ? Base.text_colors[:blue] : "",
prompt_suffix = "",
complete = PkgCompletionProvider(),
- sticky = true)
+ sticky = true
+ )
pkg_mode.repl = repl
hp = main.hist
@@ -145,24 +154,24 @@ function create_mode(repl::REPL.AbstractREPL, main::LineEdit.Prompt)
repl_keymap = Dict()
if shell_mode !== nothing
- let shell_mode=shell_mode
- repl_keymap[';'] = function (s,o...)
- if isempty(s) || position(LineEdit.buffer(s)) == 0
+ let shell_mode = shell_mode
+ repl_keymap[';'] = function (s, o...)
+ return if isempty(s) || position(LineEdit.buffer(s)) == 0
buf = copy(LineEdit.buffer(s))
LineEdit.transition(s, shell_mode) do
LineEdit.state(s, shell_mode).input_buffer = buf
end
else
LineEdit.edit_insert(s, ';')
- LineEdit.check_for_hint(s) && LineEdit.refresh_line(s)
+ LineEdit.check_show_hint(s)
end
end
end
end
- b = Dict{Any,Any}[
+ b = Dict{Any, Any}[
skeymap, repl_keymap, mk, prefix_keymap, LineEdit.history_keymap,
- LineEdit.default_keymap, LineEdit.escape_defaults
+ LineEdit.default_keymap, LineEdit.escape_defaults,
]
pkg_mode.keymap_dict = LineEdit.keymap(b)
return pkg_mode
@@ -172,16 +181,16 @@ function repl_init(repl::REPL.LineEditREPL)
main_mode = repl.interface.modes[1]
pkg_mode = create_mode(repl, main_mode)
push!(repl.interface.modes, pkg_mode)
- keymap = Dict{Any,Any}(
- ']' => function (s,args...)
- if isempty(s) || position(LineEdit.buffer(s)) == 0
+ keymap = Dict{Any, Any}(
+ ']' => function (s, args...)
+ return if isempty(s) || position(LineEdit.buffer(s)) == 0
buf = copy(LineEdit.buffer(s))
LineEdit.transition(s, pkg_mode) do
LineEdit.state(s, pkg_mode).input_buffer = buf
end
else
LineEdit.edit_insert(s, ']')
- LineEdit.check_for_hint(s) && LineEdit.refresh_line(s)
+ LineEdit.check_show_hint(s)
end
end
)
@@ -201,9 +210,9 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
end
if isempty(ctx.registries)
if !REG_WARNED[]
- printstyled(ctx.io, " │ "; color=:green)
+ printstyled(ctx.io, " │ "; color = :green)
printstyled(ctx.io, "Attempted to find missing packages in package registries but no registries are installed.\n")
- printstyled(ctx.io, " └ "; color=:green)
+ printstyled(ctx.io, " └ "; color = :green)
printstyled(ctx.io, "Use package mode to install a registry. `pkg> registry add` will install the default registries.\n\n")
REG_WARNED[] = true
end
@@ -223,22 +232,22 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
available_pkg_list = length(available_pkgs) == 1 ? String(available_pkgs[1]) : "[$(join(available_pkgs, ", "))]"
msg1 = "Package$(plural1) $(missing_pkg_list) not found, but $(plural2) named $(available_pkg_list) $(plural3) available from a registry."
for line in linewrap(msg1, io = ctx.io, padding = length(" │ "))
- printstyled(ctx.io, " │ "; color=:green)
+ printstyled(ctx.io, " │ "; color = :green)
println(ctx.io, line)
end
- printstyled(ctx.io, " │ "; color=:green)
+ printstyled(ctx.io, " │ "; color = :green)
println(ctx.io, "Install package$(plural4)?")
msg2 = string("add ", join(available_pkgs, ' '))
for (i, line) in pairs(linewrap(msg2; io = ctx.io, padding = length(string(" | ", promptf()))))
- printstyled(ctx.io, " │ "; color=:green)
+ printstyled(ctx.io, " │ "; color = :green)
if i == 1
- printstyled(ctx.io, promptf(); color=:blue)
+ printstyled(ctx.io, promptf(); color = :blue)
else
print(ctx.io, " "^length(promptf()))
end
println(ctx.io, line)
end
- printstyled(ctx.io, " └ "; color=:green)
+ printstyled(ctx.io, " └ "; color = :green)
Base.prompt(stdin, ctx.io, "(y/n/o)", default = "y")
catch err
if err isa InterruptException # if ^C is entered
@@ -254,7 +263,7 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
resp = strip(resp)
lower_resp = lowercase(resp)
if lower_resp in ["y", "yes"]
- API.add(string.(available_pkgs); allow_autoprecomp=false)
+ API.add(string.(available_pkgs); allow_autoprecomp = false)
elseif lower_resp in ["o"]
editable_envs = filter(v -> v != "@stdlib", LOAD_PATH)
option_list = String[]
@@ -273,16 +282,16 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
push!(keybindings, only("$n"))
push!(shown_envs, expanded_env)
end
- menu = TerminalMenus.RadioMenu(option_list, keybindings=keybindings, pagesize=length(option_list))
+ menu = TerminalMenus.RadioMenu(option_list; keybindings = keybindings, pagesize = length(option_list), charset = :ascii)
default = something(
# select the first non-default env by default, if possible
findfirst(!=(Base.active_project()), shown_envs),
1
)
print(ctx.io, "\e[1A\e[1G\e[0J") # go up one line, to the start, and clear it
- printstyled(ctx.io, " └ "; color=:green)
+ printstyled(ctx.io, " └ "; color = :green)
choice = try
- TerminalMenus.request("Select environment:", menu, cursor=default)
+ TerminalMenus.request("Select environment:", menu, cursor = default)
catch err
if err isa InterruptException # if ^C is entered
println(ctx.io)
@@ -292,7 +301,7 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
end
choice == -1 && return false
API.activate(shown_envs[choice]) do
- API.add(string.(available_pkgs); allow_autoprecomp=false)
+ API.add(string.(available_pkgs); allow_autoprecomp = false)
end
elseif (lower_resp in ["n"])
return false
@@ -308,7 +317,6 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol})
end
-
function __init__()
if isdefined(Base, :active_repl)
if Base.active_repl isa REPL.LineEditREPL
@@ -326,7 +334,7 @@ function __init__()
end
end
end
- if !in(try_prompt_pkg_add, REPL.install_packages_hooks)
+ return if !in(try_prompt_pkg_add, REPL.install_packages_hooks)
push!(REPL.install_packages_hooks, try_prompt_pkg_add)
end
end
diff --git a/ext/REPLExt/compat.jl b/ext/REPLExt/compat.jl
index a9a537cf0f..b46ada6c72 100644
--- a/ext/REPLExt/compat.jl
+++ b/ext/REPLExt/compat.jl
@@ -1,7 +1,9 @@
# TODO: Overload
-function compat(ctx::Context; io = nothing)
+function _compat(ctx::Context; io = nothing, input_io = stdin)
io = something(io, ctx.io)
- can_fancyprint(io) || pkgerror("Pkg.compat cannot be run interactively in this terminal")
+ if input_io isa Base.TTY # testing uses IOBuffer
+ can_fancyprint(io) || pkgerror("Pkg.compat cannot be run interactively in this terminal")
+ end
printpkgstyle(io, :Compat, pathrepr(ctx.env.project_file))
longest_dep_len = max(5, length.(collect(keys(ctx.env.project.deps)))...)
opt_strs = String[]
@@ -9,14 +11,14 @@ function compat(ctx::Context; io = nothing)
compat_str = Operations.get_compat_str(ctx.env.project, "julia")
push!(opt_strs, Operations.compat_line(io, "julia", nothing, compat_str, longest_dep_len, indent = ""))
push!(opt_pkgs, "julia")
- for (dep, uuid) in sort(collect(ctx.env.project.deps); by = x->x.first)
+ for (dep, uuid) in sort(collect(ctx.env.project.deps); by = x -> x.first)
compat_str = Operations.get_compat_str(ctx.env.project, dep)
push!(opt_strs, Operations.compat_line(io, dep, uuid, compat_str, longest_dep_len, indent = ""))
push!(opt_pkgs, dep)
end
- menu = TerminalMenus.RadioMenu(opt_strs, pagesize=length(opt_strs))
+ menu = TerminalMenus.RadioMenu(opt_strs; pagesize = length(opt_strs), charset = :ascii)
choice = try
- TerminalMenus.request(" Select an entry to edit:", menu)
+ TerminalMenus.request(TerminalMenus.default_terminal(in = input_io, out = io), " Select an entry to edit:", menu)
catch err
if err isa InterruptException # if ^C is entered
println(io)
@@ -35,10 +37,12 @@ function compat(ctx::Context; io = nothing)
start_pos = length(prompt) + 2
move_start = "\e[$(start_pos)G"
clear_to_end = "\e[0J"
- ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid},Int32), stdin.handle, true)
+ if input_io isa Base.TTY
+ ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid}, Int32), input_io.handle, true)
+ end
while true
print(io, move_start, clear_to_end, buffer, "\e[$(start_pos + cursor)G")
- inp = TerminalMenus._readkey(stdin)
+ inp = TerminalMenus._readkey(input_io)
if inp == '\r' # Carriage return
println(io)
break
@@ -65,9 +69,9 @@ function compat(ctx::Context; io = nothing)
if cursor == 1
buffer = buffer[2:end]
elseif cursor == length(buffer)
- buffer = buffer[1:end - 1]
+ buffer = buffer[1:(end - 1)]
elseif cursor > 0
- buffer = buffer[1:(cursor-1)] * buffer[(cursor + 1):end]
+ buffer = buffer[1:(cursor - 1)] * buffer[(cursor + 1):end]
else
continue
end
@@ -85,9 +89,11 @@ function compat(ctx::Context; io = nothing)
end
buffer
finally
- ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid},Int32), stdin.handle, false)
+ if input_io isa Base.TTY
+ ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid}, Int32), input_io.handle, false)
+ end
end
new_entry = strip(resp)
- compat(ctx, dep, string(new_entry))
+ API._compat(ctx, dep, string(new_entry))
return
end
diff --git a/ext/REPLExt/completions.jl b/ext/REPLExt/completions.jl
index eca5e11218..8d41302d85 100644
--- a/ext/REPLExt/completions.jl
+++ b/ext/REPLExt/completions.jl
@@ -34,11 +34,11 @@ function complete_local_dir(s, i1, i2)
end
function complete_expanded_local_dir(s, i1, i2, expanded_user, oldi2)
- cmp = REPL.REPLCompletions.complete_path(s, i2, shell_escape=true)
+ cmp = REPL.REPLCompletions.complete_path(s, i2, shell_escape = true)
cmp2 = cmp[2]
completions = [REPL.REPLCompletions.completion_text(p) for p in cmp[1]]
completions = filter!(completions) do x
- Base.isaccessibledir(s[1:prevind(s, first(cmp2)-i1+1)]*x)
+ Base.isaccessibledir(s[1:prevind(s, first(cmp2) - i1 + 1)] * x)
end
if expanded_user
if length(completions) == 1 && endswith(joinpath(homedir(), ""), first(completions))
@@ -54,9 +54,9 @@ end
const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e")
-function complete_remote_package(partial; hint::Bool)
- found_match = false
- isempty(partial) && return String[]
+function complete_remote_package!(comps, partial; hint::Bool)
+ isempty(partial) && return true # true means returned early
+ found_match = !isempty(comps)
cmp = Set{String}()
for reg in Registry.reachable_registries()
for (uuid, regpkg) in reg
@@ -80,9 +80,9 @@ function complete_remote_package(partial; hint::Bool)
if is_julia_compat === nothing || is_julia_compat
push!(cmp, name)
# In hint mode the result is only used if there is a single matching entry
- # so we abort the search
+ # so we can return no matches in case of more than one match
if hint && found_match
- return sort!(collect(cmp))
+ return true # true means returned early
end
found_match = true
break
@@ -91,19 +91,21 @@ function complete_remote_package(partial; hint::Bool)
end
end
end
- return sort!(collect(cmp))
+ append!(comps, sort!(collect(cmp)))
+ return false # false means performed full search
end
function complete_help(options, partial; hint::Bool)
names = String[]
for cmds in values(SPECS)
- append!(names, [spec.canonical_name for spec in values(cmds)])
+ append!(names, [spec.canonical_name for spec in values(cmds)])
end
return sort!(unique!(append!(names, collect(keys(SPECS)))))
end
function complete_installed_packages(options, partial; hint::Bool)
- env = try EnvCache()
+ env = try
+ EnvCache()
catch err
err isa PkgError || rethrow()
return String[]
@@ -115,7 +117,8 @@ function complete_installed_packages(options, partial; hint::Bool)
end
function complete_all_installed_packages(options, partial; hint::Bool)
- env = try EnvCache()
+ env = try
+ EnvCache()
catch err
err isa PkgError || rethrow()
return String[]
@@ -124,7 +127,8 @@ function complete_all_installed_packages(options, partial; hint::Bool)
end
function complete_installed_packages_and_compat(options, partial; hint::Bool)
- env = try EnvCache()
+ env = try
+ EnvCache()
catch err
err isa PkgError || rethrow()
return String[]
@@ -136,7 +140,8 @@ function complete_installed_packages_and_compat(options, partial; hint::Bool)
end
function complete_fixed_packages(options, partial; hint::Bool)
- env = try EnvCache()
+ env = try
+ EnvCache()
catch err
err isa PkgError || rethrow()
return String[]
@@ -149,13 +154,31 @@ function complete_add_dev(options, partial, i1, i2; hint::Bool)
if occursin(Base.Filesystem.path_separator_re, partial)
return comps, idx, !isempty(comps)
end
- comps = vcat(comps, sort(complete_remote_package(partial; hint)))
- if !isempty(partial)
+ returned_early = complete_remote_package!(comps, partial; hint)
+ # returning early means that no further search should be done here
+ if !returned_early
append!(comps, filter!(startswith(partial), [info.name for info in values(Types.stdlib_infos())]))
end
return comps, idx, !isempty(comps)
end
+# TODO: Move
+import Pkg: Operations, Types, Apps
+function complete_installed_apps(options, partial; hint)
+ manifest = try
+ Types.read_manifest(joinpath(Apps.app_env_folder(), "AppManifest.toml"))
+ catch err
+ err isa PkgError || rethrow()
+ return String[]
+ end
+ apps = String[]
+ for (uuid, entry) in manifest.deps
+ append!(apps, keys(entry.apps))
+ push!(apps, entry.name)
+ end
+ return unique!(apps)
+end
+
########################
# COMPLETION INTERFACE #
########################
@@ -179,13 +202,23 @@ function complete_command(statement::Statement, final::Bool, on_sub::Bool)
end
complete_opt(opt_specs) =
- unique(sort(map(wrap_option,
- map(x -> getproperty(x, :name),
- collect(values(opt_specs))))))
+ unique(
+ sort(
+ map(
+ wrap_option,
+ map(
+ x -> getproperty(x, :name),
+ collect(values(opt_specs))
+ )
+ )
+ )
+)
-function complete_argument(spec::CommandSpec, options::Vector{String},
- partial::AbstractString, offset::Int,
- index::Int; hint::Bool)
+function complete_argument(
+ spec::CommandSpec, options::Vector{String},
+ partial::AbstractString, offset::Int,
+ index::Int; hint::Bool
+ )
if spec.completions isa Symbol
# if completions is a symbol, it is a function in REPLExt that needs to be forwarded
# to REPLMode (couldn't be linked there because REPLExt is not a dependency of REPLMode)
@@ -195,11 +228,11 @@ function complete_argument(spec::CommandSpec, options::Vector{String},
@error "REPLMode indicates a completion function called :$(spec.completions) that cannot be found in REPLExt"
rethrow()
end
- spec.completions = function(opts, partial, offset, index; hint::Bool)
- applicable(completions, opts, partial, offset, index) ?
- completions(opts, partial, offset, index; hint) :
- completions(opts, partial; hint)
- end
+ spec.completions = function (opts, partial, offset, index; hint::Bool)
+ return applicable(completions, opts, partial, offset, index) ?
+ completions(opts, partial, offset, index; hint) :
+ completions(opts, partial; hint)
+ end
end
spec.completions === nothing && return String[]
# finish parsing opts
@@ -216,7 +249,7 @@ end
function _completions(input, final, offset, index; hint::Bool)
statement, word_count, partial = nothing, nothing, nothing
try
- words = tokenize(input)[end]
+ words = tokenize(input; rm_leading_bracket = false)[end]
word_count = length(words)
statement, partial = core_parse(words)
if final
@@ -255,7 +288,7 @@ function _completions(input, final, offset, index; hint::Bool)
end
end
-function completions(full, index; hint::Bool=false)::Tuple{Vector{String},UnitRange{Int},Bool}
+function completions(full, index; hint::Bool = false)::Tuple{Vector{String}, UnitRange{Int}, Bool}
pre = full[1:index]
isempty(pre) && return default_commands(), 0:-1, false # empty input -> complete commands
offset_adjust = 0
@@ -264,8 +297,8 @@ function completions(full, index; hint::Bool=false)::Tuple{Vector{String},UnitRa
pre = string(pre[1], " ", pre[2:end])
offset_adjust = -1
end
- last = split(pre, ' ', keepempty=true)[end]
- offset = isempty(last) ? index+1+offset_adjust : last.offset+1+offset_adjust
- final = isempty(last) # is the cursor still attached to the final token?
+ last = split(pre, ' ', keepempty = true)[end]
+ offset = isempty(last) ? index + 1 + offset_adjust : last.offset + 1 + offset_adjust
+ final = isempty(last) # is the cursor still attached to the final token?
return _completions(pre, final, offset, index; hint)
end
diff --git a/ext/REPLExt/precompile.jl b/ext/REPLExt/precompile.jl
index 2deb9b84f0..fbfdf14baf 100644
--- a/ext/REPLExt/precompile.jl
+++ b/ext/REPLExt/precompile.jl
@@ -32,7 +32,7 @@ let
Base.precompile(Tuple{typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState})
Base.precompile(Tuple{typeof(REPL.REPLCompletions.completion_text), REPL.REPLCompletions.PackageCompletion})
Base.precompile(Tuple{typeof(REPLExt.on_done), REPL.LineEdit.MIState, Base.GenericIOBuffer{Memory{UInt8}}, Bool, REPL.LineEditREPL})
- Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:hint,), Tuple{Bool}}, typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState})
+ return Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:hint,), Tuple{Bool}}, typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState})
end
if Base.generating_output()
diff --git a/src/API.jl b/src/API.jl
index 5e5723a8b7..f1abc6cb6b 100644
--- a/src/API.jl
+++ b/src/API.jl
@@ -12,9 +12,9 @@ import FileWatching
import Base: StaleCacheKey
-import ..depots, ..depots1, ..logdir, ..devdir, ..printpkgstyle
+import ..depots, ..depots1, ..logdir, ..devdir, ..printpkgstyle, .._autoprecompilation_enabled_scoped
import ..Operations, ..GitTools, ..Pkg, ..Registry
-import ..can_fancyprint, ..pathrepr, ..isurl, ..PREV_ENV_PATH
+import ..can_fancyprint, ..pathrepr, ..isurl, ..PREV_ENV_PATH, ..atomic_toml_write
using ..Types, ..TOML
using ..Types: VersionTypes
using Base.BinaryPlatforms
@@ -27,17 +27,17 @@ include("generate.jl")
Base.@kwdef struct PackageInfo
name::String
- version::Union{Nothing,VersionNumber}
- tree_hash::Union{Nothing,String}
+ version::Union{Nothing, VersionNumber}
+ tree_hash::Union{Nothing, String}
is_direct_dep::Bool
is_pinned::Bool
is_tracking_path::Bool
is_tracking_repo::Bool
is_tracking_registry::Bool
- git_revision::Union{Nothing,String}
- git_source::Union{Nothing,String}
+ git_revision::Union{Nothing, String}
+ git_source::Union{Nothing, String}
source::String
- dependencies::Dict{String,UUID}
+ dependencies::Dict{String, UUID}
end
function Base.:(==)(a::PackageInfo, b::PackageInfo)
@@ -53,29 +53,36 @@ end
function package_info(env::EnvCache, pkg::PackageSpec)::PackageInfo
entry = manifest_info(env.manifest, pkg.uuid)
if entry === nothing
- pkgerror("expected package $(err_rep(pkg)) to exist in the manifest",
- " (use `resolve` to populate the manifest)")
+ pkgerror(
+ "expected package $(err_rep(pkg)) to exist in the manifest",
+ " (use `resolve` to populate the manifest)"
+ )
end
- package_info(env, pkg, entry)
+ return package_info(env, pkg, entry)
end
function package_info(env::EnvCache, pkg::PackageSpec, entry::PackageEntry)::PackageInfo
git_source = pkg.repo.source === nothing ? nothing :
isurl(pkg.repo.source::String) ? pkg.repo.source::String :
Operations.project_rel_path(env, pkg.repo.source::String)
+ _source_path = Operations.source_path(env.manifest_file, pkg)
+ if _source_path === nothing
+ @debug "Manifest file $(env.manifest_file) contents:\n$(read(env.manifest_file, String))"
+ pkgerror("could not find source path for package $(err_rep(pkg)) based on $(env.manifest_file)")
+ end
info = PackageInfo(
- name = pkg.name,
- version = pkg.version != VersionSpec() ? pkg.version : nothing,
- tree_hash = pkg.tree_hash === nothing ? nothing : string(pkg.tree_hash), # TODO or should it just be a SHA?
- is_direct_dep = pkg.uuid in values(env.project.deps),
- is_pinned = pkg.pinned,
- is_tracking_path = pkg.path !== nothing,
- is_tracking_repo = pkg.repo.rev !== nothing || pkg.repo.source !== nothing,
+ name = pkg.name,
+ version = pkg.version != VersionSpec() ? pkg.version : nothing,
+ tree_hash = pkg.tree_hash === nothing ? nothing : string(pkg.tree_hash), # TODO or should it just be a SHA?
+ is_direct_dep = pkg.uuid in values(env.project.deps),
+ is_pinned = pkg.pinned,
+ is_tracking_path = pkg.path !== nothing,
+ is_tracking_repo = pkg.repo.rev !== nothing || pkg.repo.source !== nothing,
is_tracking_registry = Operations.is_tracking_registry(pkg),
- git_revision = pkg.repo.rev,
- git_source = git_source,
- source = Operations.project_rel_path(env, Operations.source_path(env.manifest_file, pkg)),
- dependencies = copy(entry.deps), #TODO is copy needed?
+ git_revision = pkg.repo.rev,
+ git_source = git_source,
+ source = Operations.project_rel_path(env, _source_path),
+ dependencies = copy(entry.deps), #TODO is copy needed?
)
return info
end
@@ -90,17 +97,17 @@ function dependencies(fn::Function, uuid::UUID)
if dep === nothing
pkgerror("dependency with UUID `$uuid` does not exist")
end
- fn(dep)
+ return fn(dep)
end
Base.@kwdef struct ProjectInfo
- name::Union{Nothing,String}
- uuid::Union{Nothing,UUID}
- version::Union{Nothing,VersionNumber}
+ name::Union{Nothing, String}
+ uuid::Union{Nothing, UUID}
+ version::Union{Nothing, VersionNumber}
ispackage::Bool
- dependencies::Dict{String,UUID}
- sources::Dict{String,Dict{String,String}}
+ dependencies::Dict{String, UUID}
+ sources::Dict{String, Dict{String, String}}
path::String
end
@@ -108,26 +115,28 @@ project() = project(EnvCache())
function project(env::EnvCache)::ProjectInfo
pkg = env.pkg
return ProjectInfo(
- name = pkg === nothing ? nothing : pkg.name,
- uuid = pkg === nothing ? nothing : pkg.uuid,
- version = pkg === nothing ? nothing : pkg.version::VersionNumber,
- ispackage = pkg !== nothing,
+ name = pkg === nothing ? nothing : pkg.name,
+ uuid = pkg === nothing ? nothing : pkg.uuid,
+ version = pkg === nothing ? nothing : pkg.version::VersionNumber,
+ ispackage = pkg !== nothing,
dependencies = env.project.deps,
- sources = env.project.sources,
- path = env.project_file
+ sources = env.project.sources,
+ path = env.project_file
)
end
-function check_package_name(x::AbstractString, mode::Union{Nothing,String,Symbol}=nothing)
+function check_package_name(x::AbstractString, mode::Union{Nothing, String, Symbol} = nothing)
if !Base.isidentifier(x)
message = sprint() do iostr
print(iostr, "`$x` is not a valid package name")
if endswith(lowercase(x), ".jl")
- print(iostr, ". Perhaps you meant `$(chop(x; tail=3))`")
+ print(iostr, ". Perhaps you meant `$(chop(x; tail = 3))`")
end
- if mode !== nothing && any(occursin.(['\\','/'], x)) # maybe a url or a path
- print(iostr, "\nThe argument appears to be a URL or path, perhaps you meant ",
- "`Pkg.$mode(url=\"...\")` or `Pkg.$mode(path=\"...\")`.")
+ if mode !== nothing && any(occursin.(['\\', '/'], x)) # maybe a url or a path
+ print(
+ iostr, "\nThe argument appears to be a URL or path, perhaps you meant ",
+ "`Pkg.$mode(url=\"...\")` or `Pkg.$mode(path=\"...\")`."
+ )
end
end
pkgerror(message)
@@ -137,15 +146,15 @@ end
check_package_name(::Nothing, ::Any) = nothing
function require_not_empty(pkgs, f::Symbol)
- isempty(pkgs) && pkgerror("$f requires at least one package")
+ return isempty(pkgs) && pkgerror("$f requires at least one package")
end
# Provide some convenience calls
for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, :precompile)
@eval begin
$f(pkg::Union{AbstractString, PackageSpec}; kwargs...) = $f([pkg]; kwargs...)
- $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...)
- function $f(pkgs::Vector{PackageSpec}; io::IO=$(f === :status ? :stdout_f : :stderr_f)(), kwargs...)
+ $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...)
+ function $f(pkgs::Vector{PackageSpec}; io::IO = $(f === :status ? :stdout_f : :stderr_f)(), kwargs...)
$(f != :precompile) && Registry.download_default_registries(io)
ctx = Context()
# Save initial environment for undo/redo functionality
@@ -153,7 +162,7 @@ for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, :
add_snapshot_to_undo(ctx.env)
saved_initial_snapshot[] = true
end
- kwargs = merge((;kwargs...), (:io => io,))
+ kwargs = merge((; kwargs...), (:io => io,))
pkgs = deepcopy(pkgs) # don't mutate input
foreach(handle_package_input!, pkgs)
ret = $f(ctx, pkgs; kwargs...)
@@ -162,53 +171,85 @@ for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, :
return ret
end
$f(ctx::Context; kwargs...) = $f(ctx, PackageSpec[]; kwargs...)
- function $f(; name::Union{Nothing,AbstractString}=nothing, uuid::Union{Nothing,String,UUID}=nothing,
- version::Union{VersionNumber, String, VersionSpec, Nothing}=nothing,
- url=nothing, rev=nothing, path=nothing, mode=PKGMODE_PROJECT, subdir=nothing, kwargs...)
+ function $f(;
+ name::Union{Nothing, AbstractString} = nothing, uuid::Union{Nothing, String, UUID} = nothing,
+ version::Union{VersionNumber, String, VersionSpec, Nothing} = nothing,
+ url = nothing, rev = nothing, path = nothing, mode = PKGMODE_PROJECT, subdir = nothing, kwargs...
+ )
pkg = PackageSpec(; name, uuid, version, url, rev, path, subdir)
if $f === status || $f === rm || $f === up
- kwargs = merge((;kwargs...), (:mode => mode,))
+ kwargs = merge((; kwargs...), (:mode => mode,))
end
# Handle $f() case
- if all(isnothing, [name,uuid,version,url,rev,path,subdir])
+ return if all(isnothing, [name, uuid, version, url, rev, path, subdir])
$f(PackageSpec[]; kwargs...)
else
$f(pkg; kwargs...)
end
end
function $f(pkgs::Vector{<:NamedTuple}; kwargs...)
- $f([PackageSpec(;pkg...) for pkg in pkgs]; kwargs...)
+ return $f([PackageSpec(; pkg...) for pkg in pkgs]; kwargs...)
end
end
end
-function update_source_if_set(project, pkg)
+function update_source_if_set(env, pkg)
+ project = env.project
source = get(project.sources, pkg.name, nothing)
- source === nothing && return
- # This should probably not modify the dicts directly...
- if pkg.repo.source !== nothing
- source["url"] = pkg.repo.source
- end
- if pkg.repo.rev !== nothing
- source["rev"] = pkg.repo.rev
- end
- if pkg.path !== nothing
- source["path"] = pkg.path
- end
- path, repo = get_path_repo(project, pkg.name)
- if path !== nothing
- pkg.path = path
- end
- if repo.source !== nothing
- pkg.repo.source = repo.source
+ if source !== nothing
+ if pkg.repo == GitRepo()
+ delete!(project.sources, pkg.name)
+ else
+ # This should probably not modify the dicts directly...
+ if pkg.repo.source !== nothing
+ source["url"] = pkg.repo.source
+ delete!(source, "path")
+ end
+ if pkg.repo.rev !== nothing
+ source["rev"] = pkg.repo.rev
+ delete!(source, "path")
+ end
+ if pkg.repo.subdir !== nothing
+ source["subdir"] = pkg.repo.subdir
+ end
+ if pkg.path !== nothing
+ source["path"] = pkg.path
+ delete!(source, "url")
+ delete!(source, "rev")
+ end
+ end
+ if pkg.subdir !== nothing
+ source["subdir"] = pkg.subdir
+ end
+ path, repo = get_path_repo(project, pkg.name)
+ if path !== nothing
+ pkg.path = path
+ end
+ if repo.source !== nothing
+ pkg.repo.source = repo.source
+ end
+ if repo.rev !== nothing
+ pkg.repo.rev = repo.rev
+ end
+ if repo.subdir !== nothing
+ pkg.repo.subdir = repo.subdir
+ end
end
- if repo.rev !== nothing
- pkg.repo.rev = repo.rev
+
+ # Packages in manifest should have their paths set to the path in the manifest
+ for (path, wproj) in env.workspace
+ if wproj.uuid == pkg.uuid
+ pkg.path = Types.relative_project_path(env.manifest_file, dirname(path))
+ break
+ end
end
+ return
end
-function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true,
- preserve::PreserveLevel=Operations.default_preserve(), platform::AbstractPlatform=HostPlatform(), kwargs...)
+function develop(
+ ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool = true,
+ preserve::PreserveLevel = Operations.default_preserve(), platform::AbstractPlatform = HostPlatform(), kwargs...
+ )
require_not_empty(pkgs, :develop)
Context!(ctx; kwargs...)
@@ -224,8 +265,10 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true,
pkgerror("rev argument not supported by `develop`; consider using `add` instead")
end
if pkg.version != VersionSpec()
- pkgerror("version specification invalid when calling `develop`:",
- " `$(pkg.version)` specified for package $(err_rep(pkg))")
+ pkgerror(
+ "version specification invalid when calling `develop`:",
+ " `$(pkg.version)` specified for package $(err_rep(pkg))"
+ )
end
# not strictly necessary to check these fields early, but it is more efficient
if pkg.name !== nothing && (length(findall(x -> x.name == pkg.name, pkgs)) > 1)
@@ -238,6 +281,7 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true,
new_git = handle_repos_develop!(ctx, pkgs, shared)
+ Operations.update_registries(ctx; force = false, update_cooldown = Day(1))
for pkg in pkgs
if Types.collides_with_project(ctx.env, pkg)
@@ -246,15 +290,17 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true,
if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1
pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))")
end
- update_source_if_set(ctx.env.project, pkg)
+ update_source_if_set(ctx.env, pkg)
end
- Operations.develop(ctx, pkgs, new_git; preserve=preserve, platform=platform)
+ Operations.develop(ctx, pkgs, new_git; preserve = preserve, platform = platform)
return
end
-function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Operations.default_preserve(),
- platform::AbstractPlatform=HostPlatform(), target::Symbol=:deps, allow_autoprecomp::Bool=true, kwargs...)
+function add(
+ ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel = Operations.default_preserve(),
+ platform::AbstractPlatform = HostPlatform(), target::Symbol = :deps, allow_autoprecomp::Bool = true, kwargs...
+ )
require_not_empty(pkgs, :add)
Context!(ctx; kwargs...)
@@ -268,8 +314,10 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op
end
if pkg.repo.source !== nothing || pkg.repo.rev !== nothing
if pkg.version != VersionSpec()
- pkgerror("version specification invalid when tracking a repository:",
- " `$(pkg.version)` specified for package $(err_rep(pkg))")
+ pkgerror(
+ "version specification invalid when tracking a repository:",
+ " `$(pkg.version)` specified for package $(err_rep(pkg))"
+ )
end
end
# not strictly necessary to check these fields early, but it is more efficient
@@ -286,12 +334,12 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op
# repo + unpinned -> name, uuid, repo.rev, repo.source, tree_hash
# repo + pinned -> name, uuid, tree_hash
- Operations.update_registries(ctx; force=false, update_cooldown=Day(1))
+ Operations.update_registries(ctx; force = false, update_cooldown = Day(1))
project_deps_resolve!(ctx.env, pkgs)
registry_resolve!(ctx.registries, pkgs)
stdlib_resolve!(pkgs)
- ensure_resolved(ctx, ctx.env.manifest, pkgs, registry=true)
+ ensure_resolved(ctx, ctx.env.manifest, pkgs, registry = true)
for pkg in pkgs
if Types.collides_with_project(ctx.env, pkg)
@@ -300,14 +348,14 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op
if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1
pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))")
end
- update_source_if_set(ctx.env.project, pkg)
+ update_source_if_set(ctx.env, pkg)
end
Operations.add(ctx, pkgs, new_git; allow_autoprecomp, preserve, platform, target)
return
end
-function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_pkgs::Bool=false, kwargs...)
+function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode = PKGMODE_PROJECT, all_pkgs::Bool = false, kwargs...)
Context!(ctx; kwargs...)
if all_pkgs
!isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages")
@@ -320,9 +368,11 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_p
if pkg.name === nothing && pkg.uuid === nothing
pkgerror("name or UUID specification required when calling `rm`")
end
- if !(pkg.version == VersionSpec() && pkg.pinned == false &&
- pkg.tree_hash === nothing && pkg.repo.source === nothing &&
- pkg.repo.rev === nothing && pkg.path === nothing)
+ if !(
+ pkg.version == VersionSpec() && pkg.pinned == false &&
+ pkg.tree_hash === nothing && pkg.repo.source === nothing &&
+ pkg.repo.rev === nothing && pkg.path === nothing
+ )
pkgerror("packages may only be specified by name or UUID when calling `rm`")
end
end
@@ -341,24 +391,26 @@ function append_all_pkgs!(pkgs, ctx, mode)
if mode == PKGMODE_PROJECT || mode == PKGMODE_COMBINED
for (name::String, uuid::UUID) in ctx.env.project.deps
path, repo = get_path_repo(ctx.env.project, name)
- push!(pkgs, PackageSpec(name=name, uuid=uuid, path=path, repo=repo))
+ push!(pkgs, PackageSpec(name = name, uuid = uuid, path = path, repo = repo))
end
end
if mode == PKGMODE_MANIFEST || mode == PKGMODE_COMBINED
for (uuid, entry) in ctx.env.manifest
path, repo = get_path_repo(ctx.env.project, entry.name)
- push!(pkgs, PackageSpec(name=entry.name, uuid=uuid, path=path, repo=repo))
+ push!(pkgs, PackageSpec(name = entry.name, uuid = uuid, path = path, repo = repo))
end
end
return
end
-function up(ctx::Context, pkgs::Vector{PackageSpec};
- level::UpgradeLevel=UPLEVEL_MAJOR, mode::PackageMode=PKGMODE_PROJECT,
- preserve::Union{Nothing,PreserveLevel}= isempty(pkgs) ? nothing : PRESERVE_ALL,
- update_registry::Bool=true,
- skip_writing_project::Bool=false,
- kwargs...)
+function up(
+ ctx::Context, pkgs::Vector{PackageSpec};
+ level::UpgradeLevel = UPLEVEL_MAJOR, mode::PackageMode = PKGMODE_PROJECT,
+ preserve::Union{Nothing, PreserveLevel} = isempty(pkgs) ? nothing : PRESERVE_ALL,
+ update_registry::Bool = true,
+ skip_writing_project::Bool = false,
+ kwargs...
+ )
Context!(ctx; kwargs...)
if Operations.is_fully_pinned(ctx)
printpkgstyle(ctx.io, :Update, "All dependencies are pinned - nothing to update.", color = Base.info_color())
@@ -366,7 +418,7 @@ function up(ctx::Context, pkgs::Vector{PackageSpec};
end
if update_registry
Registry.download_default_registries(ctx.io)
- Operations.update_registries(ctx; force=true)
+ Operations.update_registries(ctx; force = true)
end
Operations.prune_manifest(ctx.env)
if isempty(pkgs)
@@ -378,18 +430,20 @@ function up(ctx::Context, pkgs::Vector{PackageSpec};
manifest_resolve!(ctx.env.manifest, pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs)
end
-
+ for pkg in pkgs
+ update_source_if_set(ctx.env, pkg)
+ end
Operations.up(ctx, pkgs, level; skip_writing_project, preserve)
return
end
-resolve(; io::IO=stderr_f(), kwargs...) = resolve(Context(;io); kwargs...)
-function resolve(ctx::Context; skip_writing_project::Bool=false, kwargs...)
- up(ctx; level=UPLEVEL_FIXED, mode=PKGMODE_MANIFEST, update_registry=false, skip_writing_project, kwargs...)
+resolve(; io::IO = stderr_f(), kwargs...) = resolve(Context(; io); kwargs...)
+function resolve(ctx::Context; skip_writing_project::Bool = false, kwargs...)
+ up(ctx; level = UPLEVEL_FIXED, mode = PKGMODE_MANIFEST, update_registry = false, skip_writing_project, kwargs...)
return nothing
end
-function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...)
+function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool = false, kwargs...)
Context!(ctx; kwargs...)
if all_pkgs
!isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages")
@@ -403,12 +457,16 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar
pkgerror("name or UUID specification required when calling `pin`")
end
if pkg.repo.source !== nothing
- pkgerror("repository specification invalid when calling `pin`:",
- " `$(pkg.repo.source)` specified for package $(err_rep(pkg))")
+ pkgerror(
+ "repository specification invalid when calling `pin`:",
+ " `$(pkg.repo.source)` specified for package $(err_rep(pkg))"
+ )
end
if pkg.repo.rev !== nothing
- pkgerror("git revision specification invalid when calling `pin`:",
- " `$(pkg.repo.rev)` specified for package $(err_rep(pkg))")
+ pkgerror(
+ "git revision specification invalid when calling `pin`:",
+ " `$(pkg.repo.rev)` specified for package $(err_rep(pkg))"
+ )
end
version = pkg.version
if version isa VersionSpec
@@ -416,6 +474,7 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar
pkgerror("pinning a package requires a single version, not a versionrange")
end
end
+ update_source_if_set(ctx.env, pkg)
end
project_deps_resolve!(ctx.env, pkgs)
@@ -424,7 +483,7 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar
return
end
-function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...)
+function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool = false, kwargs...)
Context!(ctx; kwargs...)
if all_pkgs
!isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages")
@@ -437,9 +496,11 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwa
if pkg.name === nothing && pkg.uuid === nothing
pkgerror("name or UUID specification required when calling `free`")
end
- if !(pkg.version == VersionSpec() && pkg.pinned == false &&
- pkg.tree_hash === nothing && pkg.repo.source === nothing &&
- pkg.repo.rev === nothing && pkg.path === nothing)
+ if !(
+ pkg.version == VersionSpec() && pkg.pinned == false &&
+ pkg.tree_hash === nothing && pkg.repo.source === nothing &&
+ pkg.repo.rev === nothing && pkg.path === nothing
+ )
pkgerror("packages may only be specified by name or UUID when calling `free`")
end
end
@@ -451,14 +512,16 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwa
return
end
-function test(ctx::Context, pkgs::Vector{PackageSpec};
- coverage=false, test_fn=nothing,
- julia_args::Union{Cmd, AbstractVector{<:AbstractString}}=``,
- test_args::Union{Cmd, AbstractVector{<:AbstractString}}=``,
- force_latest_compatible_version::Bool=false,
- allow_earlier_backwards_compatible_versions::Bool=true,
- allow_reresolve::Bool=true,
- kwargs...)
+function test(
+ ctx::Context, pkgs::Vector{PackageSpec};
+ coverage = false, test_fn = nothing,
+ julia_args::Union{Cmd, AbstractVector{<:AbstractString}} = ``,
+ test_args::Union{Cmd, AbstractVector{<:AbstractString}} = ``,
+ force_latest_compatible_version::Bool = false,
+ allow_earlier_backwards_compatible_versions::Bool = true,
+ allow_reresolve::Bool = true,
+ kwargs...
+ )
julia_args = Cmd(julia_args)
test_args = Cmd(test_args)
Context!(ctx; kwargs...)
@@ -496,8 +559,8 @@ function is_manifest_current(path::AbstractString)
return Operations.is_manifest_current(env)
end
-const UsageDict = Dict{String,DateTime}
-const UsageByDepotDict = Dict{String,UsageDict}
+const UsageDict = Dict{String, DateTime}
+const UsageByDepotDict = Dict{String, UsageDict}
"""
gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, kwargs...)
@@ -515,7 +578,7 @@ admin privileges depending on the setup).
Use verbose mode (`verbose=true`) for detailed output.
"""
-function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, force=false, kwargs...)
+function gc(ctx::Context = Context(); collect_delay::Period = Day(7), verbose = false, force = false, kwargs...)
Context!(ctx; kwargs...)
env = ctx.env
@@ -549,6 +612,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
for (filename, infos) in parse_toml(usage_filepath)
f.(Ref(filename), infos)
end
+ return
end
# Extract usage data from this depot, (taking only the latest state for each
@@ -556,7 +620,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
# into the overall list across depots to create a single, coherent view across
# all depots.
usage = UsageDict()
- let usage=usage
+ let usage = usage
reduce_usage!(joinpath(logdir(depot), "manifest_usage.toml")) do filename, info
# For Manifest usage, store only the last DateTime for each filename found
usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime)
@@ -565,7 +629,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
manifest_usage_by_depot[depot] = usage
usage = UsageDict()
- let usage=usage
+ let usage = usage
reduce_usage!(joinpath(logdir(depot), "artifact_usage.toml")) do filename, info
# For Artifact usage, store only the last DateTime for each filename found
usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime)
@@ -576,7 +640,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
# track last-used
usage = UsageDict()
parents = Dict{String, Set{String}}()
- let usage=usage
+ let usage = usage
reduce_usage!(joinpath(logdir(depot), "scratch_usage.toml")) do filename, info
# For Artifact usage, store only the last DateTime for each filename found
usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime)
@@ -617,21 +681,20 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
# Write out the TOML file for this depot
usage_path = joinpath(logdir(depot), fname)
if !(isempty(usage)::Bool) || isfile(usage_path)
- let usage=usage
- open(usage_path, "w") do io
- TOML.print(io, usage, sorted=true)
- end
+ let usage = usage
+ atomic_toml_write(usage_path, usage, sorted = true)
end
end
end
+ return
end
# Write condensed Manifest usage
- let all_manifest_tomls=all_manifest_tomls
+ let all_manifest_tomls = all_manifest_tomls
write_condensed_toml(manifest_usage_by_depot, "manifest_usage.toml") do depot, usage
# Keep only manifest usage markers that are still existent
- let usage=usage
- filter!(((k,v),) -> k in all_manifest_tomls, usage)
+ let usage = usage
+ filter!(((k, v),) -> k in all_manifest_tomls, usage)
# Expand it back into a dict-of-dicts
return Dict(k => [Dict("time" => v)] for (k, v) in usage)
@@ -640,23 +703,23 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
# Write condensed Artifact usage
- let all_artifact_tomls=all_artifact_tomls
+ let all_artifact_tomls = all_artifact_tomls
write_condensed_toml(artifact_usage_by_depot, "artifact_usage.toml") do depot, usage
let usage = usage
- filter!(((k,v),) -> k in all_artifact_tomls, usage)
+ filter!(((k, v),) -> k in all_artifact_tomls, usage)
return Dict(k => [Dict("time" => v)] for (k, v) in usage)
end
end
end
# Write condensed scratch space usage
- let all_scratch_parents=all_scratch_parents, all_scratch_dirs=all_scratch_dirs
+ let all_scratch_parents = all_scratch_parents, all_scratch_dirs = all_scratch_dirs
write_condensed_toml(scratch_usage_by_depot, "scratch_usage.toml") do depot, usage
# Keep only scratch directories that still exist
- filter!(((k,v),) -> k in all_scratch_dirs, usage)
+ filter!(((k, v),) -> k in all_scratch_dirs, usage)
# Expand it back into a dict-of-dicts
- expanded_usage = Dict{String,Vector{Dict}}()
+ expanded_usage = Dict{String, Vector{Dict}}()
for (k, v) in usage
# Drop scratch spaces whose parents are all non-existent
parents = scratch_parents_by_depot[depot][k]
@@ -665,10 +728,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
continue
end
- expanded_usage[k] = [Dict(
- "time" => v,
- "parent_projects" => collect(parents),
- )]
+ expanded_usage[k] = [
+ Dict(
+ "time" => v,
+ "parent_projects" => collect(parents),
+ ),
+ ]
end
return expanded_usage
end
@@ -756,7 +821,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
# Mark packages/artifacts as active or not by calling the appropriate user function
- function mark(process_func::Function, index_files, ctx::Context; do_print=true, verbose=false, file_str=nothing)
+ function mark(process_func::Function, index_files, ctx::Context; do_print = true, verbose = false, file_str = nothing)
marked_paths = String[]
active_index_files = Set{String}()
for index_file in index_files
@@ -807,13 +872,16 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
push!(deletion_list, path)
end
end
+ return
end
# Scan manifests, parse them, read in all UUIDs listed and mark those as active
# printpkgstyle(ctx.io, :Active, "manifests:")
- packages_to_keep = mark(process_manifest_pkgs, all_manifest_tomls, ctx,
- verbose=verbose, file_str="manifest files")
+ packages_to_keep = mark(
+ process_manifest_pkgs, all_manifest_tomls, ctx,
+ verbose = verbose, file_str = "manifest files"
+ )
# Do an initial scan of our depots to get a preliminary `packages_to_delete`.
packages_to_delete = String[]
@@ -842,15 +910,19 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
# `packages_to_delete`, as `process_artifacts_toml()` uses it internally to discount
# `Artifacts.toml` files that will be deleted by the future culling operation.
# printpkgstyle(ctx.io, :Active, "artifacts:")
- artifacts_to_keep = let packages_to_delete=packages_to_delete
- mark(x -> process_artifacts_toml(x, packages_to_delete),
- all_artifact_tomls, ctx; verbose=verbose, file_str="artifact files")
+ artifacts_to_keep = let packages_to_delete = packages_to_delete
+ mark(
+ x -> process_artifacts_toml(x, packages_to_delete),
+ all_artifact_tomls, ctx; verbose = verbose, file_str = "artifact files"
+ )
end
- repos_to_keep = mark(process_manifest_repos, all_manifest_tomls, ctx; do_print=false)
+ repos_to_keep = mark(process_manifest_repos, all_manifest_tomls, ctx; do_print = false)
# printpkgstyle(ctx.io, :Active, "scratchspaces:")
- spaces_to_keep = let packages_to_delete=packages_to_delete
- mark(x -> process_scratchspace(x, packages_to_delete),
- all_scratch_dirs, ctx; verbose=verbose, file_str="scratchspaces")
+ spaces_to_keep = let packages_to_delete = packages_to_delete
+ mark(
+ x -> process_scratchspace(x, packages_to_delete),
+ all_scratch_dirs, ctx; verbose = verbose, file_str = "scratchspaces"
+ )
end
# Collect all orphaned paths (packages, artifacts and repos that are not reachable). These
@@ -922,8 +994,8 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
elseif uuid == Operations.PkgUUID && isfile(space_dir_or_file)
# special cleanup for the precompile cache files that Pkg saves
- if any(prefix->startswith(basename(space_dir_or_file), prefix), ("suspend_cache_", "pending_cache_"))
- if mtime(space_dir_or_file) < (time() - (24*60*60))
+ if any(prefix -> startswith(basename(space_dir_or_file), prefix), ("suspend_cache_", "pending_cache_"))
+ if mtime(space_dir_or_file) < (time() - (24 * 60 * 60))
push!(depot_orphaned_scratchspaces, space_dir_or_file)
end
end
@@ -950,9 +1022,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
# Write out the `new_orphanage` for this depot
mkpath(dirname(orphanage_file))
- open(orphanage_file, "w") do io
- TOML.print(io, new_orphanage, sorted=true)
- end
+ atomic_toml_write(orphanage_file, new_orphanage, sorted = true)
end
function recursive_dir_size(path)
@@ -964,12 +1034,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
try
size += lstat(path).size
catch ex
- @error("Failed to calculate size of $path", exception=ex)
+ @error("Failed to calculate size of $path", exception = ex)
end
end
end
catch ex
- @error("Failed to calculate size of $path", exception=ex)
+ @error("Failed to calculate size of $path", exception = ex)
end
return size
end
@@ -980,7 +1050,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
try
lstat(path).size
catch ex
- @error("Failed to calculate size of $path", exception=ex)
+ @error("Failed to calculate size of $path", exception = ex)
0
end
else
@@ -988,14 +1058,16 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
end
try
Base.Filesystem.prepare_for_deletion(path)
- Base.rm(path; recursive=true, force=true)
+ Base.rm(path; recursive = true, force = true)
catch e
- @warn("Failed to delete $path", exception=e)
+ @warn("Failed to delete $path", exception = e)
return 0
end
if verbose
- printpkgstyle(ctx.io, :Deleted, pathrepr(path) * " (" *
- Base.format_bytes(path_size) * ")")
+ printpkgstyle(
+ ctx.io, :Deleted, pathrepr(path) * " (" *
+ Base.format_bytes(path_size) * ")"
+ )
end
return path_size
end
@@ -1049,12 +1121,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
# Do this silently because it's out of scope for Pkg.gc() but it's helpful to use this opportunity to do it
if isdefined(Base.Filesystem, :delayed_delete_dir)
if isdir(Base.Filesystem.delayed_delete_dir())
- for p in readdir(Base.Filesystem.delayed_delete_dir(), join=true)
+ for p in readdir(Base.Filesystem.delayed_delete_dir(), join = true)
try
Base.Filesystem.prepare_for_deletion(p)
- Base.rm(p; recursive=true, force=true, allow_delayed_delete=false)
+ Base.rm(p; recursive = true, force = true, allow_delayed_delete = false)
catch e
- @debug "Failed to delete $p" exception=e
+ @debug "Failed to delete $p" exception = e
end
end
end
@@ -1072,7 +1144,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
s = ndel == 1 ? "" : "s"
bytes_saved_string = Base.format_bytes(freed)
- printpkgstyle(ctx.io, :Deleted, "$(ndel) $(name)$(s) ($bytes_saved_string)")
+ return printpkgstyle(ctx.io, :Deleted, "$(ndel) $(name)$(s) ($bytes_saved_string)")
end
print_deleted(ndel_pkg, package_space_freed, "package installation")
print_deleted(ndel_repo, repo_space_freed, "repo")
@@ -1086,7 +1158,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false,
return
end
-function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose=false, kwargs...)
+function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose = false, allow_reresolve::Bool = true, kwargs...)
Context!(ctx; kwargs...)
if isempty(pkgs)
@@ -1101,7 +1173,7 @@ function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose=false, kwargs...
project_resolve!(ctx.env, pkgs)
manifest_resolve!(ctx.env.manifest, pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs)
- Operations.build(ctx, Set{UUID}(pkg.uuid for pkg in pkgs), verbose)
+ return Operations.build(ctx, Set{UUID}(pkg.uuid for pkg in pkgs), verbose; allow_reresolve)
end
function get_or_make_pkgspec(pkgspecs::Vector{PackageSpec}, ctx::Context, uuid)
@@ -1123,13 +1195,15 @@ function get_or_make_pkgspec(pkgspecs::Vector{PackageSpec}, ctx::Context, uuid)
end
end
-function precompile(ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool=false,
- strict::Bool=false, warn_loaded = true, already_instantiated = false, timing::Bool = false,
- _from_loading::Bool=false, configs::Union{Base.Precompilation.Config,Vector{Base.Precompilation.Config}}=(``=>Base.CacheFlags()),
- workspace::Bool=false, kwargs...)
+function precompile(
+ ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool = false,
+ strict::Bool = false, warn_loaded = true, already_instantiated = false, timing::Bool = false,
+ _from_loading::Bool = false, configs::Union{Base.Precompilation.Config, Vector{Base.Precompilation.Config}} = (`` => Base.CacheFlags()),
+ workspace::Bool = false, kwargs...
+ )
Context!(ctx; kwargs...)
if !already_instantiated
- instantiate(ctx; allow_autoprecomp=false, kwargs...)
+ instantiate(ctx; allow_autoprecomp = false, kwargs...)
@debug "precompile: instantiated"
end
@@ -1140,16 +1214,25 @@ function precompile(ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool
end
io = ctx.io
- if io isa IOContext{IO}
+ if io isa IOContext{IO} && !isa(io.io, Base.PipeEndpoint)
# precompile does quite a bit of output and using the IOContext{IO} can cause
# some slowdowns, the important part here is to not specialize the whole
- # precompile function on the io
+ # precompile function on the io.
+ # But don't unwrap the IOContext if it is a PipeEndpoint, as that would
+ # cause the output to lose color.
io = io.io
end
- activate(dirname(ctx.env.project_file)) do
+ return activate(dirname(ctx.env.project_file)) do
pkgs_name = String[pkg.name for pkg in pkgs]
- return Base.Precompilation.precompilepkgs(pkgs_name; internal_call, strict, warn_loaded, timing, _from_loading, configs, manifest=workspace, io)
+ return Base.Precompilation.precompilepkgs(pkgs_name; internal_call, strict, warn_loaded, timing, _from_loading, configs, manifest = workspace, io)
+ end
+end
+
+function precompile(f, args...; kwargs...)
+ return Base.ScopedValues.@with _autoprecompilation_enabled_scoped => false begin
+ f()
+ Pkg.precompile(args...; kwargs...)
end
end
@@ -1163,10 +1246,12 @@ function tree_hash(repo::LibGit2.GitRepo, tree_hash::String)
end
instantiate(; kwargs...) = instantiate(Context(); kwargs...)
-function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
- update_registry::Bool=true, verbose::Bool=false,
- platform::AbstractPlatform=HostPlatform(), allow_build::Bool=true, allow_autoprecomp::Bool=true,
- workspace::Bool=false, julia_version_strict::Bool=false, kwargs...)
+function instantiate(
+ ctx::Context; manifest::Union{Bool, Nothing} = nothing,
+ update_registry::Bool = true, verbose::Bool = false,
+ platform::AbstractPlatform = HostPlatform(), allow_build::Bool = true, allow_autoprecomp::Bool = true,
+ workspace::Bool = false, julia_version_strict::Bool = false, kwargs...
+ )
Context!(ctx; kwargs...)
if Registry.download_default_registries(ctx.io)
copy!(ctx.registries, Registry.reachable_registries())
@@ -1174,7 +1259,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
if !isfile(ctx.env.project_file) && isfile(ctx.env.manifest_file)
_manifest = Pkg.Types.read_manifest(ctx.env.manifest_file)
Types.check_manifest_julia_version_compat(_manifest, ctx.env.manifest_file; julia_version_strict)
- deps = Dict{String,String}()
+ deps = Dict{String, String}()
for (uuid, pkg) in _manifest
if pkg.name in keys(deps)
# TODO, query what package to put in Project when in interactive mode?
@@ -1183,7 +1268,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
deps[pkg.name] = string(uuid)
end
Types.write_project(Dict("deps" => deps), ctx.env.project_file)
- return instantiate(Context(); manifest=manifest, update_registry=update_registry, allow_autoprecomp=allow_autoprecomp, verbose=verbose, platform=platform, kwargs...)
+ return instantiate(Context(); manifest = manifest, update_registry = update_registry, allow_autoprecomp = allow_autoprecomp, verbose = verbose, platform = platform, kwargs...)
end
if (!isfile(ctx.env.manifest_file) && manifest === nothing) || manifest == false
# given no manifest exists, only allow invoking a registry update if there are project deps
@@ -1198,17 +1283,24 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
Types.check_manifest_julia_version_compat(ctx.env.manifest, ctx.env.manifest_file; julia_version_strict)
if Operations.is_manifest_current(ctx.env) === false
+ resolve_cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()"
+ update_cmd = Pkg.in_repl_mode() ? "pkg> update" : "Pkg.update()"
@warn """The project dependencies or compat requirements have changed since the manifest was last resolved.
- It is recommended to `Pkg.resolve()` or consider `Pkg.update()` if necessary."""
+ It is recommended to `$resolve_cmd` or consider `$update_cmd` if necessary."""
end
Operations.prune_manifest(ctx.env)
for (name, uuid) in ctx.env.project.deps
get(ctx.env.manifest, uuid, nothing) === nothing || continue
- pkgerror("`$name` is a direct dependency, but does not appear in the manifest.",
- " If you intend `$name` to be a direct dependency, run `Pkg.resolve()` to populate the manifest.",
- " Otherwise, remove `$name` with `Pkg.rm(\"$name\")`.",
- " Finally, run `Pkg.instantiate()` again.")
+ resolve_cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()"
+ rm_cmd = Pkg.in_repl_mode() ? "pkg> rm $name" : "Pkg.rm(\"$name\")"
+ instantiate_cmd = Pkg.in_repl_mode() ? "pkg> instantiate" : "Pkg.instantiate()"
+ pkgerror(
+ "`$name` is a direct dependency, but does not appear in the manifest.",
+ " If you intend `$name` to be a direct dependency, run `$resolve_cmd` to populate the manifest.",
+ " Otherwise, remove `$name` with `$rm_cmd`.",
+ " Finally, run `$instantiate_cmd` again."
+ )
end
# check if all source code and artifacts are downloaded to exit early
if Operations.is_instantiated(ctx.env, workspace; platform)
@@ -1228,7 +1320,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
if !(e isa PkgError) || update_registry == false
rethrow(e)
end
- Operations.update_registries(ctx; force=false)
+ Operations.update_registries(ctx; force = false)
Operations.check_registered(ctx.registries, pkgs)
end
new_git = UUID[]
@@ -1247,12 +1339,12 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
pkgerror("Did not find path `$(repo_source)` for $(err_rep(pkg))")
end
repo_path = Types.add_repo_cache_path(repo_source)
- let repo_source=repo_source
- LibGit2.with(GitTools.ensure_clone(ctx.io, repo_path, repo_source; isbare=true)) do repo
+ let repo_source = repo_source
+ LibGit2.with(GitTools.ensure_clone(ctx.io, repo_path, repo_source; isbare = true)) do repo
# We only update the clone if the tree hash can't be found
tree_hash_object = tree_hash(repo, string(pkg.tree_hash))
if tree_hash_object === nothing
- GitTools.fetch(ctx.io, repo, repo_source; refspecs=Types.refspecs)
+ GitTools.fetch(ctx.io, repo, repo_source; refspecs = Types.refspecs)
tree_hash_object = tree_hash(repo, string(pkg.tree_hash))
end
if tree_hash_object === nothing
@@ -1270,35 +1362,35 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing,
# Install all artifacts
Operations.download_artifacts(ctx; platform, verbose)
# Run build scripts
- allow_build && Operations.build_versions(ctx, union(new_apply, new_git); verbose=verbose)
+ allow_build && Operations.build_versions(ctx, union(new_apply, new_git); verbose = verbose)
- allow_autoprecomp && Pkg._auto_precompile(ctx, already_instantiated = true)
+ return allow_autoprecomp && Pkg._auto_precompile(ctx, already_instantiated = true)
end
-@deprecate status(mode::PackageMode) status(mode=mode)
+@deprecate status(mode::PackageMode) status(mode = mode)
-function status(ctx::Context, pkgs::Vector{PackageSpec}; diff::Bool=false, mode=PKGMODE_PROJECT, workspace::Bool=false, outdated::Bool=false, compat::Bool=false, extensions::Bool=false, io::IO=stdout_f())
+function status(ctx::Context, pkgs::Vector{PackageSpec}; diff::Bool = false, mode = PKGMODE_PROJECT, workspace::Bool = false, outdated::Bool = false, compat::Bool = false, extensions::Bool = false, io::IO = stdout_f())
if compat
diff && pkgerror("Compat status has no `diff` mode")
outdated && pkgerror("Compat status has no `outdated` mode")
extensions && pkgerror("Compat status has no `extensions` mode")
Operations.print_compat(ctx, pkgs; io)
else
- Operations.status(ctx.env, ctx.registries, pkgs; mode, git_diff=diff, io, outdated, extensions, workspace)
+ Operations.status(ctx.env, ctx.registries, pkgs; mode, git_diff = diff, io, outdated, extensions, workspace)
end
return nothing
end
-function activate(;temp=false, shared=false, prev=false, io::IO=stderr_f())
+function activate(; temp = false, shared = false, prev = false, io::IO = stderr_f())
shared && pkgerror("Must give a name for a shared environment")
- temp && return activate(mktempdir(); io=io)
+ temp && return activate(mktempdir(); io = io)
if prev
if isempty(PREV_ENV_PATH[])
pkgerror("No previously active environment found")
else
- return activate(PREV_ENV_PATH[]; io=io)
+ return activate(PREV_ENV_PATH[]; io = io)
end
end
if !isnothing(Base.active_project())
@@ -1320,14 +1412,14 @@ function _activate_dep(dep_name::AbstractString)
return
end
uuid = get(ctx.env.project.deps, dep_name, nothing)
- if uuid !== nothing
+ return if uuid !== nothing
entry = manifest_info(ctx.env.manifest, uuid)
if entry.path !== nothing
return joinpath(dirname(ctx.env.manifest_file), entry.path::String)
end
end
end
-function activate(path::AbstractString; shared::Bool=false, temp::Bool=false, io::IO=stderr_f())
+function activate(path::AbstractString; shared::Bool = false, temp::Bool = false, io::IO = stderr_f())
temp && pkgerror("Can not give `path` argument when creating a temporary environment")
if !shared
# `pkg> activate path`/`Pkg.activate(path)` does the following
@@ -1374,23 +1466,39 @@ end
function activate(f::Function, new_project::AbstractString)
old = Base.ACTIVE_PROJECT[]
Base.ACTIVE_PROJECT[] = new_project
- try
+ return try
f()
finally
Base.ACTIVE_PROJECT[] = old
end
end
-function compat(ctx::Context, pkg::String, compat_str::Union{Nothing,String}; io = nothing, kwargs...)
+function _compat(ctx::Context, pkg::String, compat_str::Union{Nothing, String}; current::Bool = false, io = nothing, kwargs...)
+ if current
+ if compat_str !== nothing
+ pkgerror("`current` is true, but `compat_str` is not nothing. This is not allowed.")
+ end
+ return set_current_compat(ctx, pkg; io = io)
+ end
io = something(io, ctx.io)
pkg = pkg == "Julia" ? "julia" : pkg
isnothing(compat_str) || (compat_str = string(strip(compat_str, '"')))
+ existing_compat = Operations.get_compat_str(ctx.env.project, pkg)
+ # Double check before deleting a compat entry issue/3567
+ if isinteractive() && (isnothing(compat_str) || isempty(compat_str))
+ if !isnothing(existing_compat)
+ ans = Base.prompt(stdin, ctx.io, "No compat string was given. Delete existing compat entry `$pkg = $(repr(existing_compat))`? [y]/n", default = "y")
+ if lowercase(ans) !== "y"
+ return
+ end
+ end
+ end
if haskey(ctx.env.project.deps, pkg) || pkg == "julia"
success = Operations.set_compat(ctx.env.project, pkg, isnothing(compat_str) ? nothing : isempty(compat_str) ? nothing : compat_str)
success === false && pkgerror("invalid compat version specifier \"$(compat_str)\"")
write_env(ctx.env)
if isnothing(compat_str) || isempty(compat_str)
- printpkgstyle(io, :Compat, "entry removed for $(pkg)")
+ printpkgstyle(io, :Compat, "entry removed:\n $pkg = $(repr(existing_compat))")
else
printpkgstyle(io, :Compat, "entry set:\n $(pkg) = $(repr(compat_str))")
end
@@ -1410,15 +1518,81 @@ function compat(ctx::Context, pkg::String, compat_str::Union{Nothing,String}; io
pkgerror("No package named $pkg in current Project")
end
end
-compat(pkg::String; kwargs...) = compat(pkg, nothing; kwargs...)
-compat(pkg::String, compat_str::Union{Nothing,String}; kwargs...) = compat(Context(), pkg, compat_str; kwargs...)
-compat(;kwargs...) = compat(Context(); kwargs...)
+function compat(ctx::Context = Context(); current::Bool = false, kwargs...)
+ if current
+ return set_current_compat(ctx; kwargs...)
+ end
+ return _compat(ctx; kwargs...)
+end
+compat(pkg::String, compat_str::Union{Nothing, String} = nothing; kwargs...) = _compat(Context(), pkg, compat_str; kwargs...)
+
+
+function set_current_compat(ctx::Context, target_pkg::Union{Nothing, String} = nothing; io = nothing)
+ io = something(io, ctx.io)
+ updated_deps = String[]
+
+ deps_to_process = if target_pkg !== nothing
+ # Process only the specified package
+ if haskey(ctx.env.project.deps, target_pkg)
+ [(target_pkg, ctx.env.project.deps[target_pkg])]
+ else
+ pkgerror("Package $(target_pkg) not found in project dependencies")
+ end
+ else
+ # Process all packages (existing behavior)
+ collect(ctx.env.project.deps)
+ end
+
+ # Process regular package dependencies
+ for (dep, uuid) in deps_to_process
+ compat_str = Operations.get_compat_str(ctx.env.project, dep)
+ if target_pkg !== nothing || isnothing(compat_str)
+ entry = get(ctx.env.manifest, uuid, nothing)
+ entry === nothing && continue
+ v = entry.version
+ v === nothing && continue
+ pkgversion = string(Base.thispatch(v))
+ Operations.set_compat(ctx.env.project, dep, pkgversion) ||
+ pkgerror("invalid compat version specifier \"$(pkgversion)\"")
+ push!(updated_deps, dep)
+ end
+ end
+
+ # Also handle Julia compat entry when processing all packages (not when targeting a specific package)
+ if target_pkg === nothing
+ julia_compat_str = Operations.get_compat_str(ctx.env.project, "julia")
+ if isnothing(julia_compat_str)
+ # Set julia compat to current running version
+ julia_version = string(Base.thispatch(VERSION))
+ Operations.set_compat(ctx.env.project, "julia", julia_version) ||
+ pkgerror("invalid compat version specifier \"$(julia_version)\"")
+ push!(updated_deps, "julia")
+ end
+ end
+
+ # Update messaging
+ if isempty(updated_deps)
+ if target_pkg !== nothing
+ printpkgstyle(io, :Info, "$(target_pkg) already has a compat entry or is not in manifest. No changes made.", color = Base.info_color())
+ else
+ printpkgstyle(io, :Info, "no missing compat entries found. No changes made.", color = Base.info_color())
+ end
+ elseif length(updated_deps) == 1
+ printpkgstyle(io, :Info, "new entry set for $(only(updated_deps)) based on its current version", color = Base.info_color())
+ else
+ printpkgstyle(io, :Info, "new entries set for $(join(updated_deps, ", ", " and ")) based on their current versions", color = Base.info_color())
+ end
+
+ write_env(ctx.env)
+ return Operations.print_compat(ctx; io)
+end
+set_current_compat(; kwargs...) = set_current_compat(Context(); kwargs...)
#######
# why #
#######
-function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=false, kwargs...)
+function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool = false, kwargs...)
require_not_empty(pkgs, :why)
manifest_resolve!(ctx.env.manifest, pkgs)
@@ -1456,6 +1630,7 @@ function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=fa
end
find_paths!(final_paths, p, copy(path))
end
+ return
end
first = true
@@ -1467,11 +1642,12 @@ function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=fa
foreach(reverse!, final_paths)
final_paths_names = map(x -> [ctx.env.manifest[uuid].name for uuid in x], collect(final_paths))
sort!(final_paths_names, by = x -> (x, length(x)))
- delimiter = sprint((io, args) -> printstyled(io, args...; color=:light_green), "→", context=io)
+ delimiter = sprint((io, args) -> printstyled(io, args...; color = :light_green), "→", context = io)
for path in final_paths_names
println(io, " ", join(path, " $delimiter "))
end
end
+ return
end
@@ -1493,7 +1669,7 @@ const undo_entries = Dict{String, UndoState}()
const max_undo_limit = 50
const saved_initial_snapshot = Ref(false)
-function add_snapshot_to_undo(env=nothing)
+function add_snapshot_to_undo(env = nothing)
# only attempt to take a snapshot if there is
# an active project to be found
if env === nothing
@@ -1511,14 +1687,14 @@ function add_snapshot_to_undo(env=nothing)
return
end
snapshot = UndoSnapshot(now(), env.project, env.manifest)
- deleteat!(state.entries, 1:(state.idx-1))
+ deleteat!(state.entries, 1:(state.idx - 1))
pushfirst!(state.entries, snapshot)
state.idx = 1
- resize!(state.entries, min(length(state.entries), max_undo_limit))
+ return resize!(state.entries, min(length(state.entries), max_undo_limit))
end
-undo(ctx = Context()) = redo_undo(ctx, :undo, 1)
+undo(ctx = Context()) = redo_undo(ctx, :undo, 1)
redo(ctx = Context()) = redo_undo(ctx, :redo, -1)
function redo_undo(ctx, mode::Symbol, direction::Int)
@assert direction == 1 || direction == -1
@@ -1529,16 +1705,16 @@ function redo_undo(ctx, mode::Symbol, direction::Int)
state.idx += direction
snapshot = state.entries[state.idx]
ctx.env.manifest, ctx.env.project = snapshot.manifest, snapshot.project
- write_env(ctx.env; update_undo=false)
- Operations.show_update(ctx.env, ctx.registries; io=ctx.io)
+ write_env(ctx.env; update_undo = false)
+ return Operations.show_update(ctx.env, ctx.registries; io = ctx.io)
end
function setprotocol!(;
- domain::AbstractString="github.com",
- protocol::Union{Nothing, AbstractString}=nothing
-)
- GitTools.setprotocol!(domain=domain, protocol=protocol)
+ domain::AbstractString = "github.com",
+ protocol::Union{Nothing, AbstractString} = nothing
+ )
+ GitTools.setprotocol!(domain = domain, protocol = protocol)
return nothing
end
@@ -1546,10 +1722,15 @@ end
function handle_package_input!(pkg::PackageSpec)
if pkg.path !== nothing && pkg.url !== nothing
- pkgerror("`path` and `url` are conflicting specifications")
+ pkgerror("Conflicting `path` and `url` in PackageSpec")
end
- pkg.repo = Types.GitRepo(rev = pkg.rev, source = pkg.url !== nothing ? pkg.url : pkg.path,
- subdir = pkg.subdir)
+ if pkg.repo.source !== nothing || pkg.repo.rev !== nothing || pkg.repo.subdir !== nothing
+ pkgerror("`repo` is a private field of PackageSpec and should not be set directly")
+ end
+ pkg.repo = Types.GitRepo(
+ rev = pkg.rev, source = pkg.url !== nothing ? pkg.url : pkg.path,
+ subdir = pkg.subdir
+ )
pkg.path = nothing
pkg.tree_hash = nothing
if pkg.version === nothing
@@ -1558,7 +1739,7 @@ function handle_package_input!(pkg::PackageSpec)
if !(pkg.version isa VersionNumber)
pkg.version = VersionSpec(pkg.version)
end
- pkg.uuid = pkg.uuid isa String ? UUID(pkg.uuid) : pkg.uuid
+ return pkg.uuid = pkg.uuid isa String ? UUID(pkg.uuid) : pkg.uuid
end
function upgrade_manifest(man_path::String)
@@ -1567,7 +1748,7 @@ function upgrade_manifest(man_path::String)
Pkg.activate(dir) do
Pkg.upgrade_manifest()
end
- mv(joinpath(dir, "Manifest.toml"), man_path, force = true)
+ return mv(joinpath(dir, "Manifest.toml"), man_path, force = true)
end
function upgrade_manifest(ctx::Context = Context())
before_format = ctx.env.manifest.manifest_format
diff --git a/src/Apps/Apps.jl b/src/Apps/Apps.jl
new file mode 100644
index 0000000000..e5cfd8b10f
--- /dev/null
+++ b/src/Apps/Apps.jl
@@ -0,0 +1,523 @@
+module Apps
+
+using Pkg
+using Pkg: atomic_toml_write
+using Pkg.Versions
+using Pkg.Types: AppInfo, PackageSpec, Context, EnvCache, PackageEntry, Manifest, handle_repo_add!, handle_repo_develop!, write_manifest, write_project,
+ pkgerror, projectfile_path, manifestfile_path
+using Pkg.Operations: print_single, source_path, update_package_add
+using Pkg.API: handle_package_input!
+using TOML, UUIDs
+import Pkg.Registry
+
+app_env_folder() = joinpath(first(DEPOT_PATH), "environments", "apps")
+app_manifest_file() = joinpath(app_env_folder(), "AppManifest.toml")
+julia_bin_path() = joinpath(first(DEPOT_PATH), "bin")
+
+app_context() = Context(env = EnvCache(joinpath(app_env_folder(), "Project.toml")))
+
+function validate_app_name(name::AbstractString)
+ if isempty(name)
+ error("App name cannot be empty")
+ end
+ if !occursin(r"^[a-zA-Z][a-zA-Z0-9_-]*$", name)
+ error("App name must start with a letter and contain only letters, numbers, underscores, and hyphens")
+ end
+ return if occursin(r"\.\.", name) || occursin(r"[/\\]", name)
+ error("App name cannot contain path traversal sequences or path separators")
+ end
+end
+
+function validate_package_name(name::AbstractString)
+ if isempty(name)
+ error("Package name cannot be empty")
+ end
+ return if !occursin(r"^[a-zA-Z][a-zA-Z0-9_]*$", name)
+ error("Package name must start with a letter and contain only letters, numbers, and underscores")
+ end
+end
+
+function validate_submodule_name(name::Union{AbstractString, Nothing})
+ return if name !== nothing
+ if isempty(name)
+ error("Submodule name cannot be empty")
+ end
+ if !occursin(r"^[a-zA-Z][a-zA-Z0-9_]*$", name)
+ error("Submodule name must start with a letter and contain only letters, numbers, and underscores")
+ end
+ end
+end
+
+
+function rm_shim(name; kwargs...)
+ validate_app_name(name)
+ return Base.rm(joinpath(julia_bin_path(), name * (Sys.iswindows() ? ".bat" : "")); kwargs...)
+end
+
+function get_project(sourcepath)
+ project_file = projectfile_path(sourcepath)
+
+ isfile(project_file) || error("Project file not found: $project_file")
+
+ project = Pkg.Types.read_project(project_file)
+ isempty(project.apps) && error("No apps found in Project.toml for package $(project.name) at version $(project.version)")
+ return project
+end
+
+
+function overwrite_file_if_different(file, content)
+ return if !isfile(file) || read(file, String) != content
+ mkpath(dirname(file))
+ write(file, content)
+ end
+end
+
+function check_apps_in_path(apps)
+ for app_name in keys(apps)
+ which_result = Sys.which(app_name)
+ if which_result === nothing
+ @warn """
+ App '$app_name' was installed but is not available in PATH.
+ Consider adding '$(julia_bin_path())' to your PATH environment variable.
+ """ maxlog = 1
+ break # Only show warning once per installation
+ else
+ # Check for collisions
+ expected_path = joinpath(julia_bin_path(), app_name * (Sys.iswindows() ? ".bat" : ""))
+ if which_result != expected_path
+ @warn """
+ App '$app_name' collision detected:
+ Expected: $expected_path
+ Found: $which_result
+ Another application with the same name exists in PATH.
+ """
+ end
+ end
+ end
+ return
+end
+
+function get_max_version_register(pkg::PackageSpec, regs)
+ max_v = nothing
+ tree_hash = nothing
+ for reg in regs
+ if get(reg, pkg.uuid, nothing) !== nothing
+ reg_pkg = get(reg, pkg.uuid, nothing)
+ reg_pkg === nothing && continue
+ pkg_info = Registry.registry_info(reg_pkg)
+ for (version, info) in pkg_info.version_info
+ info.yanked && continue
+ if pkg.version isa VersionNumber
+ pkg.version == version || continue
+ else
+ version in pkg.version || continue
+ end
+ if max_v === nothing || version > max_v
+ max_v = version
+ tree_hash = info.git_tree_sha1
+ end
+ end
+ end
+ end
+ if max_v === nothing
+ error("Suitable package version for $(pkg.name) not found in any registries.")
+ end
+ return (max_v, tree_hash)
+end
+
+
+##################
+# Main Functions #
+##################
+
+function _resolve(manifest::Manifest, pkgname = nothing)
+ for (uuid, pkg) in manifest.deps
+ if pkgname !== nothing && pkg.name !== pkgname
+ continue
+ end
+
+ # TODO: Add support for existing manifest
+
+ projectfile = joinpath(app_env_folder(), pkg.name, "Project.toml")
+
+ sourcepath = source_path(app_manifest_file(), pkg)
+ original_project_file = projectfile_path(sourcepath)
+
+ mkpath(dirname(projectfile))
+
+ if isfile(original_project_file)
+ cp(original_project_file, projectfile; force = true)
+ chmod(projectfile, 0o644) # Make the copied project file writable
+
+ # Add entryfile stanza pointing to the package entry file
+ # TODO: What if project file has its own entryfile?
+ project_data = TOML.parsefile(projectfile)
+ project_data["entryfile"] = joinpath(sourcepath, "src", "$(pkg.name).jl")
+ atomic_toml_write(projectfile, project_data)
+ else
+ error("could not find project file for package $pkg")
+ end
+
+ # Create a manifest with the manifest entry
+ Pkg.activate(joinpath(app_env_folder(), pkg.name)) do
+ ctx = Context()
+ ctx.env.manifest.deps[uuid] = pkg
+ Pkg.resolve(ctx)
+ end
+
+ # TODO: Julia path
+ generate_shims_for_apps(pkg.name, pkg.apps, dirname(projectfile), joinpath(Sys.BINDIR, "julia"))
+ end
+ return write_manifest(manifest, app_manifest_file())
+end
+
+
+function add(pkg::Vector{PackageSpec})
+ for p in pkg
+ add(p)
+ end
+ return
+end
+
+
+function add(pkg::PackageSpec)
+ handle_package_input!(pkg)
+
+ ctx = app_context()
+ manifest = ctx.env.manifest
+ new = false
+
+ # Download package
+ if pkg.repo.source !== nothing || pkg.repo.rev !== nothing
+ entry = Pkg.API.manifest_info(ctx.env.manifest, pkg.uuid)
+ pkg = update_package_add(ctx, pkg, entry, false)
+ new = handle_repo_add!(ctx, pkg)
+ else
+ pkgs = [pkg]
+ Pkg.Operations.registry_resolve!(ctx.registries, pkgs)
+ Pkg.Operations.ensure_resolved(ctx, manifest, pkgs, registry = true)
+
+ pkg.version, pkg.tree_hash = get_max_version_register(pkg, ctx.registries)
+
+ new = Pkg.Operations.download_source(ctx, pkgs)
+ end
+
+ # Run Pkg.build()?
+
+ Base.rm(joinpath(app_env_folder(), pkg.name); force = true, recursive = true)
+ sourcepath = source_path(ctx.env.manifest_file, pkg)
+ project = get_project(sourcepath)
+ # TODO: Wrong if package itself has a sourcepath?
+ entry = PackageEntry(; apps = project.apps, name = pkg.name, version = project.version, tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid = pkg.uuid)
+ manifest.deps[pkg.uuid] = entry
+
+ _resolve(manifest, pkg.name)
+ precompile(pkg.name)
+
+ @info "For package: $(pkg.name) installed apps $(join(keys(project.apps), ","))"
+ return check_apps_in_path(project.apps)
+end
+
+function develop(pkg::Vector{PackageSpec})
+ for p in pkg
+ develop(p)
+ end
+ return
+end
+
+function develop(pkg::PackageSpec)
+ if pkg.path !== nothing
+ pkg.path == abspath(pkg.path)
+ end
+ handle_package_input!(pkg)
+ ctx = app_context()
+ handle_repo_develop!(ctx, pkg, #=shared =# true)
+ Base.rm(joinpath(app_env_folder(), pkg.name); force = true, recursive = true)
+ sourcepath = abspath(source_path(ctx.env.manifest_file, pkg))
+ project = get_project(sourcepath)
+
+ # Seems like the `.repo.source` field is not cleared.
+ # At least repo-url is still in the manifest after doing a dev with a path
+ # Figure out why for normal dev this is not needed.
+ # XXX: Why needed?
+ if pkg.path !== nothing
+ pkg.repo.source = nothing
+ end
+
+
+ entry = PackageEntry(; apps = project.apps, name = pkg.name, version = project.version, tree_hash = pkg.tree_hash, path = sourcepath, repo = pkg.repo, uuid = pkg.uuid)
+ manifest = ctx.env.manifest
+ manifest.deps[pkg.uuid] = entry
+
+ _resolve(manifest, pkg.name)
+ precompile(pkg.name)
+ @info "For package: $(pkg.name) installed apps: $(join(keys(project.apps), ","))"
+ return check_apps_in_path(project.apps)
+end
+
+
+update(pkgs_or_apps::String) = update([pkgs_or_apps])
+function update(pkgs_or_apps::Vector)
+ for pkg_or_app in pkgs_or_apps
+ if pkg_or_app isa String
+ pkg_or_app = PackageSpec(pkg_or_app)
+ end
+ update(pkg_or_app)
+ end
+ return
+end
+
+# XXX: Is updating an app ever different from rm-ing and adding it from scratch?
+function update(pkg::Union{PackageSpec, Nothing} = nothing)
+ ctx = app_context()
+ manifest = ctx.env.manifest
+ deps = Pkg.Operations.load_manifest_deps(manifest)
+ for dep in deps
+ info = manifest.deps[dep.uuid]
+ if pkg === nothing || info.name !== pkg.name
+ continue
+ end
+ Pkg.activate(joinpath(app_env_folder(), info.name)) do
+ # precompile only after updating all apps?
+ if pkg !== nothing
+ Pkg.update(pkg)
+ else
+ Pkg.update()
+ end
+ end
+ sourcepath = abspath(source_path(ctx.env.manifest_file, info))
+ project = get_project(sourcepath)
+ # Get the tree hash from the project file
+ manifest_file = manifestfile_path(joinpath(app_env_folder(), info.name))
+ manifest_app = Pkg.Types.read_manifest(manifest_file)
+ manifest_entry = manifest_app.deps[info.uuid]
+
+ entry = PackageEntry(;
+ apps = project.apps, name = manifest_entry.name, version = manifest_entry.version, tree_hash = manifest_entry.tree_hash,
+ path = manifest_entry.path, repo = manifest_entry.repo, uuid = manifest_entry.uuid
+ )
+
+ manifest.deps[dep.uuid] = entry
+ Pkg.Types.write_manifest(manifest, app_manifest_file())
+ end
+ return
+end
+
+function status(pkgs_or_apps::Vector)
+ return if isempty(pkgs_or_apps)
+ status()
+ else
+ for pkg_or_app in pkgs_or_apps
+ if pkg_or_app isa String
+ pkg_or_app = PackageSpec(pkg_or_app)
+ end
+ status(pkg_or_app)
+ end
+ end
+end
+
+function status(pkg_or_app::Union{PackageSpec, Nothing} = nothing)
+ # TODO: Sort.
+ pkg_or_app = pkg_or_app === nothing ? nothing : pkg_or_app.name
+ manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml"))
+ deps = Pkg.Operations.load_manifest_deps(manifest)
+
+ is_pkg = pkg_or_app !== nothing && any(dep -> dep.name == pkg_or_app, values(manifest.deps))
+
+ for dep in deps
+ info = manifest.deps[dep.uuid]
+ if is_pkg && dep.name !== pkg_or_app
+ continue
+ end
+ if !is_pkg && pkg_or_app !== nothing
+ if !(pkg_or_app in keys(info.apps))
+ continue
+ end
+ end
+
+ printstyled("[", string(dep.uuid)[1:8], "] "; color = :light_black)
+ print_single(stdout, dep)
+ println()
+ for (appname, appinfo) in info.apps
+ if !is_pkg && pkg_or_app !== nothing && appname !== pkg_or_app
+ continue
+ end
+ julia_cmd = contractuser(appinfo.julia_command)
+ printstyled(" $(appname)", color = :green)
+ printstyled(" $(julia_cmd) \n", color = :gray)
+ end
+ end
+ return
+end
+
+function precompile(pkg::Union{Nothing, String} = nothing)
+ manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml"))
+ deps = Pkg.Operations.load_manifest_deps(manifest)
+ for dep in deps
+ # TODO: Parallel app compilation..?
+ info = manifest.deps[dep.uuid]
+ if pkg !== nothing && info.name !== pkg
+ continue
+ end
+ Pkg.activate(joinpath(app_env_folder(), info.name)) do
+ Pkg.instantiate()
+ Pkg.precompile()
+ end
+ end
+ return
+end
+
+
+function require_not_empty(pkgs, f::Symbol)
+ return if pkgs === nothing || isempty(pkgs)
+ pkgerror("app $f requires at least one package")
+ end
+end
+
+rm(pkgs_or_apps::String) = rm([pkgs_or_apps])
+function rm(pkgs_or_apps::Vector)
+ for pkg_or_app in pkgs_or_apps
+ if pkg_or_app isa String
+ pkg_or_app = PackageSpec(pkg_or_app)
+ end
+ rm(pkg_or_app)
+ end
+ return
+end
+
+function rm(pkg_or_app::Union{PackageSpec, Nothing} = nothing)
+ pkg_or_app = pkg_or_app === nothing ? nothing : pkg_or_app.name
+
+ require_not_empty(pkg_or_app, :rm)
+
+ manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml"))
+ dep_idx = findfirst(dep -> dep.name == pkg_or_app, manifest.deps)
+ if dep_idx !== nothing
+ dep = manifest.deps[dep_idx]
+ @info "Deleting all apps for package $(dep.name)"
+ delete!(manifest.deps, dep.uuid)
+ for (appname, appinfo) in dep.apps
+ @info "Deleted $(appname)"
+ rm_shim(appname; force = true)
+ end
+ if dep.path === nothing
+ Base.rm(joinpath(app_env_folder(), dep.name); recursive = true)
+ end
+ else
+ for (uuid, pkg) in manifest.deps
+ app_idx = findfirst(app -> app.name == pkg_or_app, pkg.apps)
+ if app_idx !== nothing
+ app = pkg.apps[app_idx]
+ @info "Deleted app $(app.name)"
+ delete!(pkg.apps, app.name)
+ rm_shim(app.name; force = true)
+ end
+ if isempty(pkg.apps)
+ delete!(manifest.deps, uuid)
+ Base.rm(joinpath(app_env_folder(), pkg.name); recursive = true)
+ end
+ end
+ end
+ # XXX: What happens if something fails above and we do not write out the updated manifest?
+ Pkg.Types.write_manifest(manifest, app_manifest_file())
+ return
+end
+
+for f in (:develop, :add)
+ @eval begin
+ $f(pkg::Union{AbstractString, PackageSpec}; kwargs...) = $f([pkg]; kwargs...)
+ $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...)
+ function $f(;
+ name::Union{Nothing, AbstractString} = nothing, uuid::Union{Nothing, String, UUID} = nothing,
+ version::Union{VersionNumber, String, VersionSpec, Nothing} = nothing,
+ url = nothing, rev = nothing, path = nothing, subdir = nothing, kwargs...
+ )
+ pkg = PackageSpec(; name, uuid, version, url, rev, path, subdir)
+ return if all(isnothing, [name, uuid, version, url, rev, path, subdir])
+ $f(PackageSpec[]; kwargs...)
+ else
+ $f(pkg; kwargs...)
+ end
+ end
+ function $f(pkgs::Vector{<:NamedTuple}; kwargs...)
+ return $f([PackageSpec(; pkg...) for pkg in pkgs]; kwargs...)
+ end
+ end
+end
+
+
+#########
+# Shims #
+#########
+
+const SHIM_COMMENT = Sys.iswindows() ? "REM " : "#"
+const SHIM_VERSION = 1.0
+const SHIM_HEADER = """$SHIM_COMMENT This file is generated by the Julia package manager.
+$SHIM_COMMENT Shim version: $SHIM_VERSION"""
+
+function generate_shims_for_apps(pkgname, apps, env, julia)
+ for (_, app) in apps
+ generate_shim(pkgname, app, env, julia)
+ end
+ return
+end
+
+function generate_shim(pkgname, app::AppInfo, env, julia)
+ validate_package_name(pkgname)
+ validate_app_name(app.name)
+ validate_submodule_name(app.submodule)
+
+ module_spec = app.submodule === nothing ? pkgname : "$(pkgname).$(app.submodule)"
+
+ filename = app.name * (Sys.iswindows() ? ".bat" : "")
+ julia_bin_filename = joinpath(julia_bin_path(), filename)
+ mkpath(dirname(julia_bin_filename))
+ content = if Sys.iswindows()
+ julia_escaped = "\"$(Base.shell_escape_wincmd(julia))\""
+ module_spec_escaped = "\"$(Base.shell_escape_wincmd(module_spec))\""
+ windows_shim(julia_escaped, module_spec_escaped, env)
+ else
+ julia_escaped = Base.shell_escape(julia)
+ module_spec_escaped = Base.shell_escape(module_spec)
+ shell_shim(julia_escaped, module_spec_escaped, env)
+ end
+ overwrite_file_if_different(julia_bin_filename, content)
+ return if Sys.isunix()
+ chmod(julia_bin_filename, 0o755)
+ end
+end
+
+
+function shell_shim(julia_escaped::String, module_spec_escaped::String, env)
+ return """
+ #!/bin/sh
+
+ $SHIM_HEADER
+
+ export JULIA_LOAD_PATH=$(repr(env))
+ export JULIA_DEPOT_PATH=$(repr(join(DEPOT_PATH, ':')))
+ exec $julia_escaped \\
+ --startup-file=no \\
+ -m $module_spec_escaped \\
+ "\$@"
+ """
+end
+
+function windows_shim(julia_escaped::String, module_spec_escaped::String, env)
+ return """
+ @echo off
+
+ $SHIM_HEADER
+
+ setlocal
+ set JULIA_LOAD_PATH=$env
+ set JULIA_DEPOT_PATH=$(join(DEPOT_PATH, ';'))
+
+ $julia_escaped ^
+ --startup-file=no ^
+ -m $module_spec_escaped ^
+ %*
+ """
+end
+
+end
diff --git a/src/Artifacts.jl b/src/Artifacts.jl
index 957d14aab9..d6f2c948f4 100644
--- a/src/Artifacts.jl
+++ b/src/Artifacts.jl
@@ -1,647 +1,685 @@
-module Artifacts
-
-using Artifacts, Base.BinaryPlatforms, SHA
-using ..MiniProgressBars, ..PlatformEngines
-using Tar: can_symlink
-
-import ..set_readonly, ..GitTools, ..TOML, ..pkg_server, ..can_fancyprint,
- ..stderr_f, ..printpkgstyle
-
-import Base: get, SHA1
-import Artifacts: artifact_names, ARTIFACTS_DIR_OVERRIDE, ARTIFACT_OVERRIDES, artifact_paths,
- artifacts_dirs, pack_platform!, unpack_platform, load_artifacts_toml,
- query_override, with_artifacts_directory, load_overrides
-import ..Types: write_env_usage, parse_toml
-
-
-export create_artifact, artifact_exists, artifact_path, remove_artifact, verify_artifact,
- artifact_meta, artifact_hash, bind_artifact!, unbind_artifact!, download_artifact,
- find_artifacts_toml, ensure_artifact_installed, @artifact_str, archive_artifact,
- select_downloadable_artifacts
-
-"""
- create_artifact(f::Function)
-
-Creates a new artifact by running `f(artifact_path)`, hashing the result, and moving it
-to the artifact store (`~/.julia/artifacts` on a typical installation). Returns the
-identifying tree hash of this artifact.
-"""
-function create_artifact(f::Function)
- # Ensure the `artifacts` directory exists in our default depot
- artifacts_dir = first(artifacts_dirs())
- mkpath(artifacts_dir)
-
- # Temporary directory where we'll do our creation business
- temp_dir = mktempdir(artifacts_dir)
-
- try
- # allow the user to do their work inside the temporary directory
- f(temp_dir)
-
- # Calculate the tree hash for this temporary directory
- artifact_hash = SHA1(GitTools.tree_hash(temp_dir))
-
- # If we created a dupe, just let the temp directory get destroyed. It's got the
- # same contents as whatever already exists after all, so it doesn't matter. Only
- # move its contents if it actually contains new contents. Note that we explicitly
- # set `honor_overrides=false` here, as we wouldn't want to drop things into the
- # system directory by accidentally creating something with the same content-hash
- # as something that was foolishly overridden. This should be virtually impossible
- # unless the user has been very unwise, but let's be cautious.
- new_path = artifact_path(artifact_hash; honor_overrides=false)
- _mv_temp_artifact_dir(temp_dir, new_path)
-
- # Give the people what they want
- return artifact_hash
- finally
- # Always attempt to cleanup
- rm(temp_dir; recursive=true, force=true)
+module PkgArtifacts
+
+ using Artifacts, Base.BinaryPlatforms, SHA
+ using ..MiniProgressBars, ..PlatformEngines
+ using Tar: can_symlink
+ using FileWatching: FileWatching
+
+ import ..set_readonly, ..GitTools, ..TOML, ..pkg_server, ..can_fancyprint,
+ ..stderr_f, ..printpkgstyle, ..mv_temp_dir_retries, ..atomic_toml_write
+
+ import Base: get, SHA1
+ import Artifacts: artifact_names, ARTIFACTS_DIR_OVERRIDE, ARTIFACT_OVERRIDES, artifact_paths,
+ artifacts_dirs, pack_platform!, unpack_platform, load_artifacts_toml,
+ query_override, with_artifacts_directory, load_overrides
+ import ..Types: write_env_usage, parse_toml
+
+ const Artifacts = PkgArtifacts # This is to preserve compatability for folks who depend on the internals of this module
+ export Artifacts, create_artifact, artifact_exists, artifact_path, remove_artifact, verify_artifact,
+ artifact_meta, artifact_hash, bind_artifact!, unbind_artifact!, download_artifact,
+ find_artifacts_toml, ensure_artifact_installed, @artifact_str, archive_artifact,
+ select_downloadable_artifacts, ArtifactDownloadInfo
+
+ """
+ create_artifact(f::Function)
+
+ Creates a new artifact by running `f(artifact_path)`, hashing the result, and moving it
+ to the artifact store (`~/.julia/artifacts` on a typical installation). Returns the
+ identifying tree hash of this artifact.
+ """
+ function create_artifact(f::Function)
+ # Ensure the `artifacts` directory exists in our default depot
+ artifacts_dir = first(artifacts_dirs())
+ mkpath(artifacts_dir)
+
+ # Temporary directory where we'll do our creation business
+ temp_dir = mktempdir(artifacts_dir)
+
+ try
+ # allow the user to do their work inside the temporary directory
+ f(temp_dir)
+
+ # Calculate the tree hash for this temporary directory
+ artifact_hash = SHA1(GitTools.tree_hash(temp_dir))
+
+ # If we created a dupe, just let the temp directory get destroyed. It's got the
+ # same contents as whatever already exists after all, so it doesn't matter. Only
+ # move its contents if it actually contains new contents. Note that we explicitly
+ # set `honor_overrides=false` here, as we wouldn't want to drop things into the
+ # system directory by accidentally creating something with the same content-hash
+ # as something that was foolishly overridden. This should be virtually impossible
+ # unless the user has been very unwise, but let's be cautious.
+ new_path = artifact_path(artifact_hash; honor_overrides = false)
+ mv_temp_dir_retries(temp_dir, new_path)
+
+ # Give the people what they want
+ return artifact_hash
+ finally
+ # Always attempt to cleanup
+ rm(temp_dir; recursive = true, force = true)
+ end
end
-end
-
-"""
- _mv_temp_artifact_dir(temp_dir::String, new_path::String)::Nothing
-Either rename the directory at `temp_dir` to `new_path` and set it to read-only
-or if `new_path` artifact already exists try to do nothing.
-"""
-function _mv_temp_artifact_dir(temp_dir::String, new_path::String)::Nothing
- # Sometimes a rename can fail because the temp_dir is locked by
- # anti-virus software scanning the new files.
- # In this case we want to sleep and try again.
- # I am using the list of error codes to retry from:
- # https://github.com/isaacs/node-graceful-fs/blob/234379906b7d2f4c9cfeb412d2516f42b0fb4953/polyfills.js#L87
- # Retry for up to about 60 seconds by retrying 20 times with exponential backoff.
- retry = 0
- max_num_retries = 20 # maybe this should be configurable?
- sleep_amount = 0.01 # seconds
- max_sleep_amount = 5.0 # seconds
- while true
- isdir(new_path) && return
- # This next step is like
- # `mv(temp_dir, new_path)`.
- # However, `mv` defaults to `cp` if `rename` returns an error.
- # `cp` is not atomic, so avoid the potential of calling it.
- err = ccall(:jl_fs_rename, Int32, (Cstring, Cstring), temp_dir, new_path)
- if err ≥ 0
- # rename worked
- chmod(new_path, filemode(dirname(new_path)))
- set_readonly(new_path)
+
+ """
+ remove_artifact(hash::SHA1; honor_overrides::Bool=false)
+
+ Removes the given artifact (identified by its SHA1 git tree hash) from disk. Note that
+ if an artifact is installed in multiple depots, it will be removed from all of them. If
+ an overridden artifact is requested for removal, it will be silently ignored; this method
+ will never attempt to remove an overridden artifact.
+
+ In general, we recommend that you use `Pkg.gc()` to manage artifact installations and do
+ not use `remove_artifact()` directly, as it can be difficult to know if an artifact is
+ being used by another package.
+ """
+ function remove_artifact(hash::SHA1)
+ if query_override(hash) !== nothing
+ # We never remove overridden artifacts.
return
- else
- # Ignore rename error if `new_path` exists.
- isdir(new_path) && return
- if retry < max_num_retries && err ∈ (Base.UV_EACCES, Base.UV_EPERM, Base.UV_EBUSY)
- sleep(sleep_amount)
- sleep_amount = min(sleep_amount*2.0, max_sleep_amount)
- retry += 1
- else
- Base.uv_error("rename of $(repr(temp_dir)) to $(repr(new_path))", err)
+ end
+
+ # Get all possible paths (rooted in all depots)
+ possible_paths = artifacts_dirs(bytes2hex(hash.bytes))
+ for path in possible_paths
+ if isdir(path)
+ rm(path; recursive = true, force = true)
end
end
- end
-end
-
-"""
- remove_artifact(hash::SHA1; honor_overrides::Bool=false)
-
-Removes the given artifact (identified by its SHA1 git tree hash) from disk. Note that
-if an artifact is installed in multiple depots, it will be removed from all of them. If
-an overridden artifact is requested for removal, it will be silently ignored; this method
-will never attempt to remove an overridden artifact.
-
-In general, we recommend that you use `Pkg.gc()` to manage artifact installations and do
-not use `remove_artifact()` directly, as it can be difficult to know if an artifact is
-being used by another package.
-"""
-function remove_artifact(hash::SHA1)
- if query_override(hash) !== nothing
- # We never remove overridden artifacts.
return
end
- # Get all possible paths (rooted in all depots)
- possible_paths = artifacts_dirs(bytes2hex(hash.bytes))
- for path in possible_paths
- if isdir(path)
- rm(path; recursive=true, force=true)
+ """
+ verify_artifact(hash::SHA1; honor_overrides::Bool=false)
+
+ Verifies that the given artifact (identified by its SHA1 git tree hash) is installed on-
+ disk, and retains its integrity. If the given artifact is overridden, skips the
+ verification unless `honor_overrides` is set to `true`.
+ """
+ function verify_artifact(hash::SHA1; honor_overrides::Bool = false)
+ # Silently skip overridden artifacts unless we really ask for it
+ if !honor_overrides
+ if query_override(hash) !== nothing
+ return true
+ end
end
- end
-end
-
-"""
- verify_artifact(hash::SHA1; honor_overrides::Bool=false)
-
-Verifies that the given artifact (identified by its SHA1 git tree hash) is installed on-
-disk, and retains its integrity. If the given artifact is overridden, skips the
-verification unless `honor_overrides` is set to `true`.
-"""
-function verify_artifact(hash::SHA1; honor_overrides::Bool=false)
- # Silently skip overridden artifacts unless we really ask for it
- if !honor_overrides
- if query_override(hash) !== nothing
- return true
+
+ # If it doesn't even exist, then skip out
+ if !artifact_exists(hash)
+ return false
end
- end
- # If it doesn't even exist, then skip out
- if !artifact_exists(hash)
- return false
+ # Otherwise actually run the verification
+ return all(hash.bytes .== GitTools.tree_hash(artifact_path(hash)))
end
- # Otherwise actually run the verification
- return all(hash.bytes .== GitTools.tree_hash(artifact_path(hash)))
-end
+ """
+ archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool=false)
-"""
- archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool=false)
+ Archive an artifact into a tarball stored at `tarball_path`, returns the SHA256 of the
+ resultant tarball as a hexadecimal string. Throws an error if the artifact does not
+ exist. If the artifact is overridden, throws an error unless `honor_overrides` is set.
+ """
+ function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool = false)
+ if !honor_overrides
+ if query_override(hash) !== nothing
+ error("Will not archive an overridden artifact unless `honor_overrides` is set!")
+ end
+ end
-Archive an artifact into a tarball stored at `tarball_path`, returns the SHA256 of the
-resultant tarball as a hexadecimal string. Throws an error if the artifact does not
-exist. If the artifact is overridden, throws an error unless `honor_overrides` is set.
-"""
-function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool=false)
- if !honor_overrides
- if query_override(hash) !== nothing
- error("Will not archive an overridden artifact unless `honor_overrides` is set!")
+ if !artifact_exists(hash)
+ error("Unable to archive artifact $(bytes2hex(hash.bytes)): does not exist!")
end
- end
- if !artifact_exists(hash)
- error("Unable to archive artifact $(bytes2hex(hash.bytes)): does not exist!")
+ # Package it up
+ package(artifact_path(hash), tarball_path)
+
+ # Calculate its sha256 and return that
+ return open(tarball_path, "r") do io
+ return bytes2hex(sha256(io))
+ end
end
- # Package it up
- package(artifact_path(hash), tarball_path)
+ """
+ ArtifactDownloadInfo
- # Calculate its sha256 and return that
- return open(tarball_path, "r") do io
- return bytes2hex(sha256(io))
- end
-end
-
-"""
- bind_artifact!(artifacts_toml::String, name::String, hash::SHA1;
- platform::Union{AbstractPlatform,Nothing} = nothing,
- download_info::Union{Vector{Tuple},Nothing} = nothing,
- lazy::Bool = false,
- force::Bool = false)
-
-Writes a mapping of `name` -> `hash` within the given `(Julia)Artifacts.toml` file. If
-`platform` is not `nothing`, this artifact is marked as platform-specific, and will be
-a multi-mapping. It is valid to bind multiple artifacts with the same name, but
-different `platform`s and `hash`'es within the same `artifacts_toml`. If `force` is set
-to `true`, this will overwrite a pre-existant mapping, otherwise an error is raised.
-
-`download_info` is an optional vector that contains tuples of URLs and a hash. These
-URLs will be listed as possible locations where this artifact can be obtained. If `lazy`
-is set to `true`, even if download information is available, this artifact will not be
-downloaded until it is accessed via the `artifact"name"` syntax, or
-`ensure_artifact_installed()` is called upon it.
-"""
-function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1;
- platform::Union{AbstractPlatform,Nothing} = nothing,
- download_info::Union{Vector{<:Tuple},Nothing} = nothing,
- lazy::Bool = false,
- force::Bool = false)
- # First, check to see if this artifact is already bound:
- if isfile(artifacts_toml)
- artifact_dict = parse_toml(artifacts_toml)
+ Auxilliary information about an artifact to be used with `bind_artifact!()` to give
+ a download location for that artifact, as well as the hash and size of that artifact.
+ """
+ struct ArtifactDownloadInfo
+ # URL the artifact is available at as a gzip-compressed tarball
+ url::String
- if !force && haskey(artifact_dict, name)
- meta = artifact_dict[name]
- if !isa(meta, Vector)
- error("Mapping for '$name' within $(artifacts_toml) already exists!")
- elseif any(isequal(platform), unpack_platform(x, name, artifacts_toml) for x in meta)
- error("Mapping for '$name'/$(triplet(platform)) within $(artifacts_toml) already exists!")
+ # SHA256 hash of the tarball
+ hash::Vector{UInt8}
+
+ # Size in bytes of the tarball. `size <= 0` means unknown.
+ size::Int64
+
+ function ArtifactDownloadInfo(url, hash::AbstractVector, size = 0)
+ valid_hash_len = SHA.digestlen(SHA256_CTX)
+ hash_len = length(hash)
+ if hash_len != valid_hash_len
+ throw(ArgumentError("Invalid hash length '$(hash_len)', must be $(valid_hash_len)"))
end
+ return new(
+ String(url),
+ Vector{UInt8}(hash),
+ Int64(size),
+ )
end
- else
- artifact_dict = Dict{String, Any}()
end
- # Otherwise, the new piece of data we're going to write out is this dict:
- meta = Dict{String,Any}(
- "git-tree-sha1" => bytes2hex(hash.bytes),
- )
+ # Convenience constructor for string hashes
+ ArtifactDownloadInfo(url, hash::AbstractString, args...) = ArtifactDownloadInfo(url, hex2bytes(hash), args...)
+
+ # Convenience constructor for legacy Tuple representation
+ ArtifactDownloadInfo(args::Tuple) = ArtifactDownloadInfo(args...)
+
+ ArtifactDownloadInfo(adi::ArtifactDownloadInfo) = adi
- # If we're set to be lazy, then lazy we shall be
- if lazy
- meta["lazy"] = true
+ # Make the dict that will be embedded in the TOML
+ function make_dict(adi::ArtifactDownloadInfo)
+ ret = Dict{String, Any}(
+ "url" => adi.url,
+ "sha256" => bytes2hex(adi.hash),
+ )
+ if adi.size > 0
+ ret["size"] = adi.size
+ end
+ return ret
end
- # Integrate download info, if it is given. We represent the download info as a
- # vector of dicts, each with its own `url` and `sha256`, since different tarballs can
- # expand to the same tree hash.
- if download_info !== nothing
- meta["download"] = [
- Dict("url" => dl[1],
- "sha256" => dl[2],
- ) for dl in download_info
- ]
+ """
+ bind_artifact!(artifacts_toml::String, name::String, hash::SHA1;
+ platform::Union{AbstractPlatform,Nothing} = nothing,
+ download_info::Union{Vector{Tuple},Nothing} = nothing,
+ lazy::Bool = false,
+ force::Bool = false)
+
+ Writes a mapping of `name` -> `hash` within the given `(Julia)Artifacts.toml` file. If
+ `platform` is not `nothing`, this artifact is marked as platform-specific, and will be
+ a multi-mapping. It is valid to bind multiple artifacts with the same name, but
+ different `platform`s and `hash`'es within the same `artifacts_toml`. If `force` is set
+ to `true`, this will overwrite a pre-existant mapping, otherwise an error is raised.
+
+ `download_info` is an optional vector that contains tuples of URLs and a hash. These
+ URLs will be listed as possible locations where this artifact can be obtained. If `lazy`
+ is set to `true`, even if download information is available, this artifact will not be
+ downloaded until it is accessed via the `artifact"name"` syntax, or
+ `ensure_artifact_installed()` is called upon it.
+ """
+ function bind_artifact!(
+ artifacts_toml::String, name::String, hash::SHA1;
+ platform::Union{AbstractPlatform, Nothing} = nothing,
+ download_info::Union{Vector{<:Tuple}, Vector{<:ArtifactDownloadInfo}, Nothing} = nothing,
+ lazy::Bool = false,
+ force::Bool = false
+ )
+ # First, check to see if this artifact is already bound:
+ if isfile(artifacts_toml)
+ artifact_dict = parse_toml(artifacts_toml)
+
+ if !force && haskey(artifact_dict, name)
+ meta = artifact_dict[name]
+ if !isa(meta, Vector)
+ error("Mapping for '$name' within $(artifacts_toml) already exists!")
+ elseif any(p -> platforms_match(platform, p), unpack_platform(x, name, artifacts_toml) for x in meta)
+ error("Mapping for '$name'/$(triplet(platform)) within $(artifacts_toml) already exists!")
+ end
+ end
+ else
+ artifact_dict = Dict{String, Any}()
+ end
+
+ # Otherwise, the new piece of data we're going to write out is this dict:
+ meta = Dict{String, Any}(
+ "git-tree-sha1" => bytes2hex(hash.bytes),
+ )
+
+ # If we're set to be lazy, then lazy we shall be
+ if lazy
+ meta["lazy"] = true
+ end
+
+ # Integrate download info, if it is given. Note that there can be multiple
+ # download locations, each with its own tarball with its own hash, but which
+ # expands to the same content/treehash.
+ if download_info !== nothing
+ meta["download"] = make_dict.(ArtifactDownloadInfo.(download_info))
+ end
+
+ if platform === nothing
+ artifact_dict[name] = meta
+ else
+ # Add platform-specific keys to our `meta` dict
+ pack_platform!(meta, platform)
+
+ # Insert this entry into the list of artifacts
+ if !haskey(artifact_dict, name)
+ artifact_dict[name] = [meta]
+ else
+ # Delete any entries that contain identical platforms
+ artifact_dict[name] = filter(
+ x -> unpack_platform(x, name, artifacts_toml) != platform,
+ artifact_dict[name]
+ )
+ push!(artifact_dict[name], meta)
+ end
+ end
+
+ # Spit it out onto disk
+ let artifact_dict = artifact_dict
+ parent_dir = dirname(artifacts_toml)
+ atomic_toml_write(artifacts_toml, artifact_dict, sorted = true)
+ end
+
+ # Mark that we have used this Artifact.toml
+ write_env_usage(artifacts_toml, "artifact_usage.toml")
+ return
end
- if platform === nothing
- artifact_dict[name] = meta
- else
- # Add platform-specific keys to our `meta` dict
- pack_platform!(meta, platform)
- # Insert this entry into the list of artifacts
+ """
+ unbind_artifact!(artifacts_toml::String, name::String; platform = nothing)
+
+ Unbind the given `name` from an `(Julia)Artifacts.toml` file.
+ Silently fails if no such binding exists within the file.
+ """
+ function unbind_artifact!(
+ artifacts_toml::String, name::String;
+ platform::Union{AbstractPlatform, Nothing} = nothing
+ )
+ artifact_dict = parse_toml(artifacts_toml)
if !haskey(artifact_dict, name)
- artifact_dict[name] = [meta]
+ return
+ end
+
+ if platform === nothing
+ delete!(artifact_dict, name)
else
- # Delete any entries that contain identical platforms
artifact_dict[name] = filter(
x -> unpack_platform(x, name, artifacts_toml) != platform,
artifact_dict[name]
)
- push!(artifact_dict[name], meta)
end
- end
- # Spit it out onto disk
- let artifact_dict = artifact_dict
- parent_dir = dirname(artifacts_toml)
- temp_artifacts_toml = isempty(parent_dir) ? tempname(pwd()) : tempname(parent_dir)
- open(temp_artifacts_toml, "w") do io
- TOML.print(io, artifact_dict, sorted=true)
- end
- mv(temp_artifacts_toml, artifacts_toml; force=true)
+ atomic_toml_write(artifacts_toml, artifact_dict, sorted = true)
+ return
end
- # Mark that we have used this Artifact.toml
- write_env_usage(artifacts_toml, "artifact_usage.toml")
- return
-end
+ """
+ download_artifact(tree_hash::SHA1, tarball_url::String, tarball_hash::String;
+ verbose::Bool = false, io::IO=stderr)
+ Download/install an artifact into the artifact store. Returns `true` on success,
+ returns an error object on failure.
+
+ !!! compat "Julia 1.8"
+ As of Julia 1.8 this function returns the error object rather than `false` when
+ failure occurs
+ """
+ function download_artifact(
+ tree_hash::SHA1,
+ tarball_url::String,
+ tarball_hash::Union{String, Nothing} = nothing;
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO = stderr_f(),
+ progress::Union{Function, Nothing} = nothing,
+ )
+ _artifact_paths = artifact_paths(tree_hash)
+ pidfile = _artifact_paths[1] * ".pid"
+ mkpath(dirname(pidfile))
+ t_wait_msg = Timer(2) do t
+ if progress === nothing
+ @info "downloading $tarball_url ($hex) in another process"
+ else
+ progress(0, 0; status = "downloading in another process")
+ end
+ end
+ ret = FileWatching.mkpidlock(pidfile, stale_age = 20) do
+ close(t_wait_msg)
+ if artifact_exists(tree_hash)
+ return true
+ end
-"""
- unbind_artifact!(artifacts_toml::String, name::String; platform = nothing)
+ # Ensure the `artifacts` directory exists in our default depot
+ artifacts_dir = first(artifacts_dirs())
+ mkpath(artifacts_dir)
+ # expected artifact path
+ dst = joinpath(artifacts_dir, bytes2hex(tree_hash.bytes))
+
+ # We download by using a temporary directory. We do this because the download may
+ # be corrupted or even malicious; we don't want to clobber someone else's artifact
+ # by trusting the tree hash that has been given to us; we will instead download it
+ # to a temporary directory, calculate the true tree hash, then move it to the proper
+ # location only after knowing what it is, and if something goes wrong in the process,
+ # everything should be cleaned up.
+
+ # Temporary directory where we'll do our creation business
+ temp_dir = mktempdir(artifacts_dir)
+
+ try
+ download_verify_unpack(
+ tarball_url, tarball_hash, temp_dir;
+ ignore_existence = true, verbose, quiet_download, io, progress
+ )
+ isnothing(progress) || progress(10000, 10000; status = "verifying")
+ calc_hash = SHA1(GitTools.tree_hash(temp_dir))
+
+ # Did we get what we expected? If not, freak out.
+ if calc_hash.bytes != tree_hash.bytes
+ msg = """
+ Tree Hash Mismatch!
+ Expected git-tree-sha1: $(bytes2hex(tree_hash.bytes))
+ Calculated git-tree-sha1: $(bytes2hex(calc_hash.bytes))
+ """
+ # Since tree hash calculation is rather fragile and file system dependent,
+ # we allow setting JULIA_PKG_IGNORE_HASHES=1 to ignore the error and move
+ # the artifact to the expected location and return true
+ ignore_hash_env_set = get(ENV, "JULIA_PKG_IGNORE_HASHES", "") != ""
+ if ignore_hash_env_set
+ ignore_hash = Base.get_bool_env("JULIA_PKG_IGNORE_HASHES", false)
+ ignore_hash === nothing && @error(
+ "Invalid ENV[\"JULIA_PKG_IGNORE_HASHES\"] value",
+ ENV["JULIA_PKG_IGNORE_HASHES"],
+ )
+ ignore_hash = something(ignore_hash, false)
+ else
+ # default: false except Windows users who can't symlink
+ ignore_hash = Sys.iswindows() &&
+ !mktempdir(can_symlink, artifacts_dir)
+ end
+ if ignore_hash
+ desc = ignore_hash_env_set ?
+ "Environment variable \$JULIA_PKG_IGNORE_HASHES is true" :
+ "System is Windows and user cannot create symlinks"
+ msg *= "\n$desc: \
+ ignoring hash mismatch and moving \
+ artifact to the expected location"
+ @error(msg)
+ else
+ error(msg)
+ end
+ end
+ # Move it to the location we expected
+ isnothing(progress) || progress(10000, 10000; status = "moving to artifact store")
+ mv_temp_dir_retries(temp_dir, dst)
+ catch err
+ @debug "download_artifact error" tree_hash tarball_url tarball_hash err
+ if isa(err, InterruptException)
+ rethrow(err)
+ end
+ # If something went wrong during download, return the error
+ return err
+ finally
+ # Always attempt to cleanup
+ try
+ rm(temp_dir; recursive = true, force = true)
+ catch e
+ e isa InterruptException && rethrow()
+ @warn("Failed to clean up temporary directory $(repr(temp_dir))", exception = e)
+ end
+ end
+ return true
+ end
-Unbind the given `name` from an `(Julia)Artifacts.toml` file.
-Silently fails if no such binding exists within the file.
-"""
-function unbind_artifact!(artifacts_toml::String, name::String;
- platform::Union{AbstractPlatform,Nothing} = nothing)
- artifact_dict = parse_toml(artifacts_toml)
- if !haskey(artifact_dict, name)
- return
+ return ret
end
- if platform === nothing
- delete!(artifact_dict, name)
- else
- artifact_dict[name] = filter(
- x -> unpack_platform(x, name, artifacts_toml) != platform,
- artifact_dict[name]
+ """
+ ensure_artifact_installed(name::String, artifacts_toml::String;
+ platform::AbstractPlatform = HostPlatform(),
+ pkg_uuid::Union{Base.UUID,Nothing}=nothing,
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO=stderr)
+
+ Ensures an artifact is installed, downloading it via the download information stored in
+ `artifacts_toml` if necessary. Throws an error if unable to install.
+ """
+ function ensure_artifact_installed(
+ name::String, artifacts_toml::String;
+ platform::AbstractPlatform = HostPlatform(),
+ pkg_uuid::Union{Base.UUID, Nothing} = nothing,
+ pkg_server_eligible::Bool = true,
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ progress::Union{Function, Nothing} = nothing,
+ io::IO = stderr_f()
)
- end
+ meta = artifact_meta(name, artifacts_toml; pkg_uuid = pkg_uuid, platform = platform)
+ if meta === nothing
+ error("Cannot locate artifact '$(name)' in '$(artifacts_toml)'")
+ end
- open(artifacts_toml, "w") do io
- TOML.print(io, artifact_dict, sorted=true)
- end
- return
-end
-
-"""
- download_artifact(tree_hash::SHA1, tarball_url::String, tarball_hash::String;
- verbose::Bool = false, io::IO=stderr)
-
-Download/install an artifact into the artifact store. Returns `true` on success,
-returns an error object on failure.
-
-!!! compat "Julia 1.8"
- As of Julia 1.8 this function returns the error object rather than `false` when
- failure occurs
-"""
-function download_artifact(
- tree_hash::SHA1,
- tarball_url::String,
- tarball_hash::Union{String, Nothing} = nothing;
- verbose::Bool = false,
- quiet_download::Bool = false,
- io::IO=stderr_f(),
- progress::Union{Function, Nothing} = nothing,
-)
- if artifact_exists(tree_hash)
- return true
+ return ensure_artifact_installed(
+ name, meta, artifacts_toml;
+ pkg_server_eligible, platform, verbose, quiet_download, progress, io
+ )
end
- # Ensure the `artifacts` directory exists in our default depot
- artifacts_dir = first(artifacts_dirs())
- mkpath(artifacts_dir)
- # expected artifact path
- dst = joinpath(artifacts_dir, bytes2hex(tree_hash.bytes))
-
- # We download by using a temporary directory. We do this because the download may
- # be corrupted or even malicious; we don't want to clobber someone else's artifact
- # by trusting the tree hash that has been given to us; we will instead download it
- # to a temporary directory, calculate the true tree hash, then move it to the proper
- # location only after knowing what it is, and if something goes wrong in the process,
- # everything should be cleaned up.
-
- # Temporary directory where we'll do our creation business
- temp_dir = mktempdir(artifacts_dir)
-
- try
- download_verify_unpack(tarball_url, tarball_hash, temp_dir;
- ignore_existence=true, verbose, quiet_download, io, progress)
- isnothing(progress) || progress(10000, 10000; status="verifying")
- calc_hash = SHA1(GitTools.tree_hash(temp_dir))
-
- # Did we get what we expected? If not, freak out.
- if calc_hash.bytes != tree_hash.bytes
- msg = """
- Tree Hash Mismatch!
- Expected git-tree-sha1: $(bytes2hex(tree_hash.bytes))
- Calculated git-tree-sha1: $(bytes2hex(calc_hash.bytes))
- """
- # Since tree hash calculation is rather fragile and file system dependent,
- # we allow setting JULIA_PKG_IGNORE_HASHES=1 to ignore the error and move
- # the artifact to the expected location and return true
- ignore_hash_env_set = get(ENV, "JULIA_PKG_IGNORE_HASHES", "") != ""
- if ignore_hash_env_set
- ignore_hash = Base.get_bool_env("JULIA_PKG_IGNORE_HASHES", false)
- ignore_hash === nothing && @error(
- "Invalid ENV[\"JULIA_PKG_IGNORE_HASHES\"] value",
- ENV["JULIA_PKG_IGNORE_HASHES"],
- )
- ignore_hash = something(ignore_hash, false)
- else
- # default: false except Windows users who can't symlink
- ignore_hash = Sys.iswindows() &&
- !mktempdir(can_symlink, artifacts_dir)
- end
- if ignore_hash
- desc = ignore_hash_env_set ?
- "Environment variable \$JULIA_PKG_IGNORE_HASHES is true" :
- "System is Windows and user cannot create symlinks"
- msg *= "\n$desc: \
- ignoring hash mismatch and moving \
- artifact to the expected location"
- @error(msg)
+ function ensure_artifact_installed(
+ name::String, meta::Dict, artifacts_toml::String;
+ pkg_server_eligible::Bool = true,
+ platform::AbstractPlatform = HostPlatform(),
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ progress::Union{Function, Nothing} = nothing,
+ io::IO = stderr_f()
+ )
+ hash = SHA1(meta["git-tree-sha1"])
+ if !artifact_exists(hash)
+ if isnothing(progress) || verbose == true
+ return try_artifact_download_sources(name, hash, meta, artifacts_toml; pkg_server_eligible, platform, verbose, quiet_download, io)
else
- error(msg)
+ # if a custom progress handler is given it is taken to mean the caller wants to handle the download scheduling
+ return () -> try_artifact_download_sources(name, hash, meta, artifacts_toml; pkg_server_eligible, platform, quiet_download = true, io, progress)
end
- end
- # Move it to the location we expected
- isnothing(progress) || progress(10000, 10000; status="moving to artifact store")
- _mv_temp_artifact_dir(temp_dir, dst)
- catch err
- @debug "download_artifact error" tree_hash tarball_url tarball_hash err
- if isa(err, InterruptException)
- rethrow(err)
- end
- # If something went wrong during download, return the error
- return err
- finally
- # Always attempt to cleanup
- try
- rm(temp_dir; recursive=true, force=true)
- catch e
- e isa InterruptException && rethrow()
- @warn("Failed to clean up temporary directory $(repr(temp_dir))", exception=e)
- end
- end
- return true
-end
-
-"""
- ensure_artifact_installed(name::String, artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- pkg_uuid::Union{Base.UUID,Nothing}=nothing,
- verbose::Bool = false,
- quiet_download::Bool = false,
- io::IO=stderr)
-
-Ensures an artifact is installed, downloading it via the download information stored in
-`artifacts_toml` if necessary. Throws an error if unable to install.
-"""
-function ensure_artifact_installed(name::String, artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- pkg_uuid::Union{Base.UUID,Nothing}=nothing,
- verbose::Bool = false,
- quiet_download::Bool = false,
- progress::Union{Function,Nothing} = nothing,
- io::IO=stderr_f())
- meta = artifact_meta(name, artifacts_toml; pkg_uuid=pkg_uuid, platform=platform)
- if meta === nothing
- error("Cannot locate artifact '$(name)' in '$(artifacts_toml)'")
- end
-
- return ensure_artifact_installed(name, meta, artifacts_toml;
- platform, verbose, quiet_download, progress, io)
-end
-
-function ensure_artifact_installed(name::String, meta::Dict, artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- verbose::Bool = false,
- quiet_download::Bool = false,
- progress::Union{Function,Nothing} = nothing,
- io::IO=stderr_f())
-
- hash = SHA1(meta["git-tree-sha1"])
- if !artifact_exists(hash)
- if isnothing(progress) || verbose == true
- return try_artifact_download_sources(name, hash, meta, artifacts_toml; platform, verbose, quiet_download, io)
else
- # if a custom progress handler is given it is taken to mean the caller wants to handle the download scheduling
- return () -> try_artifact_download_sources(name, hash, meta, artifacts_toml; platform, quiet_download=true, io, progress)
+ return artifact_path(hash)
end
- else
- return artifact_path(hash)
end
-end
-function try_artifact_download_sources(
+ function try_artifact_download_sources(
name::String, hash::SHA1, meta::Dict, artifacts_toml::String;
- platform::AbstractPlatform=HostPlatform(),
- verbose::Bool=false,
- quiet_download::Bool=false,
- io::IO=stderr_f(),
- progress::Union{Function,Nothing}=nothing)
-
- errors = Any[]
- # first try downloading from Pkg server
- # TODO: only do this if Pkg server knows about this package
- if (server = pkg_server()) !== nothing
- url = "$server/artifact/$hash"
- download_success = let url = url
- @debug "Downloading artifact from Pkg server" name artifacts_toml platform url
- with_show_download_info(io, name, quiet_download) do
- download_artifact(hash, url; verbose, quiet_download, io, progress)
+ pkg_server_eligible::Bool = true,
+ platform::AbstractPlatform = HostPlatform(),
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO = stderr_f(),
+ progress::Union{Function, Nothing} = nothing
+ )
+
+ errors = Any[]
+ # first try downloading from Pkg server if the Pkg server knows about this package
+ if pkg_server_eligible && (server = pkg_server()) !== nothing
+ url = "$server/artifact/$hash"
+ download_success = let url = url
+ @debug "Downloading artifact from Pkg server" name artifacts_toml platform url
+ with_show_download_info(io, name, quiet_download) do
+ download_artifact(hash, url; verbose, quiet_download, io, progress)
+ end
+ end
+ # download_success is either `true` or an error object
+ if download_success === true
+ return artifact_path(hash)
+ else
+ @debug "Failed to download artifact from Pkg server" download_success
+ push!(errors, (url, download_success))
end
end
- # download_success is either `true` or an error object
- if download_success === true
- return artifact_path(hash)
- else
- @debug "Failed to download artifact from Pkg server" download_success
- push!(errors, (url, download_success))
- end
- end
- # If this artifact does not exist on-disk already, ensure it has download
- # information, then download it!
- if !haskey(meta, "download")
- error("Cannot automatically install '$(name)'; no download section in '$(artifacts_toml)'")
- end
+ # If this artifact does not exist on-disk already, ensure it has download
+ # information, then download it!
+ if !haskey(meta, "download")
+ error("Cannot automatically install '$(name)'; no download section in '$(artifacts_toml)'")
+ end
- # Attempt to download from all sources
- for entry in meta["download"]
- url = entry["url"]
- tarball_hash = entry["sha256"]
- download_success = let url = url
- @debug "Downloading artifact" name artifacts_toml platform url
- with_show_download_info(io, name, quiet_download) do
- download_artifact(hash, url, tarball_hash; verbose, quiet_download, io, progress)
+ # Attempt to download from all sources
+ for entry in meta["download"]
+ url = entry["url"]
+ tarball_hash = entry["sha256"]
+ download_success = let url = url
+ @debug "Downloading artifact" name artifacts_toml platform url
+ with_show_download_info(io, name, quiet_download) do
+ download_artifact(hash, url, tarball_hash; verbose, quiet_download, io, progress)
+ end
+ end
+ # download_success is either `true` or an error object
+ if download_success === true
+ return artifact_path(hash)
+ else
+ @debug "Failed to download artifact" download_success
+ push!(errors, (url, download_success))
end
end
- # download_success is either `true` or an error object
- if download_success === true
- return artifact_path(hash)
- else
- @debug "Failed to download artifact" download_success
- push!(errors, (url, download_success))
+ errmsg = """
+ Unable to automatically download/install artifact '$(name)' from sources listed in '$(artifacts_toml)'.
+ Sources attempted:
+ """
+ for (url, err) in errors
+ errmsg *= "- $(url)\n"
+ errmsg *= " Error: $(sprint(showerror, err))\n"
end
+ error(errmsg)
end
- errmsg = """
- Unable to automatically download/install artifact '$(name)' from sources listed in '$(artifacts_toml)'.
- Sources attempted:
- """
- for (url, err) in errors
- errmsg *= "- $(url)\n"
- errmsg *= " Error: $(sprint(showerror, err))\n"
- end
- error(errmsg)
-end
-function with_show_download_info(f, io, name, quiet_download)
- fancyprint = can_fancyprint(io)
- if !quiet_download
- fancyprint && print_progress_bottom(io)
- printpkgstyle(io, :Downloading, "artifact: $name")
- end
- success = false
- try
- result = f()
- success = result === true
- return result
- finally
+ function with_show_download_info(f, io, name, quiet_download)
+ fancyprint = can_fancyprint(io)
if !quiet_download
- fancyprint && print(io, "\033[1A") # move cursor up one line
- fancyprint && print(io, "\033[2K") # clear line
- if success
- fancyprint && printpkgstyle(io, :Downloaded, "artifact: $name")
- else
- printpkgstyle(io, :Failure, "artifact: $name", color = :red)
+ fancyprint && print_progress_bottom(io)
+ printpkgstyle(io, :Downloading, "artifact: $name")
+ end
+ success = false
+ try
+ result = f()
+ success = result === true
+ return result
+ finally
+ if !quiet_download
+ fancyprint && print(io, "\033[1A") # move cursor up one line
+ fancyprint && print(io, "\033[2K") # clear line
+ if success
+ fancyprint && printpkgstyle(io, :Downloaded, "artifact: $name")
+ else
+ printpkgstyle(io, :Failure, "artifact: $name", color = :red)
+ end
end
end
end
-end
-"""
- ensure_all_artifacts_installed(artifacts_toml::String;
- platform = HostPlatform(),
- pkg_uuid = nothing,
- include_lazy = false,
- verbose = false,
- quiet_download = false,
- io::IO=stderr)
+ """
+ ensure_all_artifacts_installed(artifacts_toml::String;
+ platform = HostPlatform(),
+ pkg_uuid = nothing,
+ include_lazy = false,
+ verbose = false,
+ quiet_download = false,
+ io::IO=stderr)
-Installs all non-lazy artifacts from a given `(Julia)Artifacts.toml` file. `package_uuid` must
-be provided to properly support overrides from `Overrides.toml` entries in depots.
+ Installs all non-lazy artifacts from a given `(Julia)Artifacts.toml` file. `package_uuid` must
+ be provided to properly support overrides from `Overrides.toml` entries in depots.
-If `include_lazy` is set to `true`, then lazy packages will be installed as well.
+ If `include_lazy` is set to `true`, then lazy packages will be installed as well.
-This function is deprecated and should be replaced with the following snippet:
+ This function is deprecated and should be replaced with the following snippet:
- artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy)
- for name in keys(artifacts)
- ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=platform)
- end
+ artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy)
+ for name in keys(artifacts)
+ ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=platform)
+ end
-!!! warning
- This function is deprecated in Julia 1.6 and will be removed in a future version.
- Use `select_downloadable_artifacts()` and `ensure_artifact_installed()` instead.
-"""
-function ensure_all_artifacts_installed(artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- pkg_uuid::Union{Nothing,Base.UUID} = nothing,
- include_lazy::Bool = false,
- verbose::Bool = false,
- quiet_download::Bool = false,
- io::IO=stderr_f())
- # This function should not be called anymore; use `select_downloadable_artifacts()` directly.
- Base.depwarn("`ensure_all_artifacts_installed()` is deprecated; iterate over `select_downloadable_artifacts()` output with `ensure_artifact_installed()`.", :ensure_all_artifacts_installed)
- # Collect all artifacts we're supposed to install
- artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy, pkg_uuid)
- for name in keys(artifacts)
- # Otherwise, let's try and install it!
- ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=platform,
- verbose=verbose, quiet_download=quiet_download, io=io)
- end
-end
-
-"""
- extract_all_hashes(artifacts_toml::String;
- platform = HostPlatform(),
- pkg_uuid = nothing,
- include_lazy = false)
-
-Extract all hashes from a given `(Julia)Artifacts.toml` file. `package_uuid` must
-be provided to properly support overrides from `Overrides.toml` entries in depots.
-
-If `include_lazy` is set to `true`, then lazy packages will be installed as well.
-"""
-function extract_all_hashes(artifacts_toml::String;
- platform::AbstractPlatform = HostPlatform(),
- pkg_uuid::Union{Nothing,Base.UUID} = nothing,
- include_lazy::Bool = false)
- hashes = Base.SHA1[]
- if !isfile(artifacts_toml)
- return hashes
+ !!! warning
+ This function is deprecated in Julia 1.6 and will be removed in a future version.
+ Use `select_downloadable_artifacts()` and `ensure_artifact_installed()` instead.
+ """
+ function ensure_all_artifacts_installed(
+ artifacts_toml::String;
+ platform::AbstractPlatform = HostPlatform(),
+ pkg_uuid::Union{Nothing, Base.UUID} = nothing,
+ include_lazy::Bool = false,
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO = stderr_f()
+ )
+ # This function should not be called anymore; use `select_downloadable_artifacts()` directly.
+ Base.depwarn("`ensure_all_artifacts_installed()` is deprecated; iterate over `select_downloadable_artifacts()` output with `ensure_artifact_installed()`.", :ensure_all_artifacts_installed)
+ # Collect all artifacts we're supposed to install
+ artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy, pkg_uuid)
+ for name in keys(artifacts)
+ # Otherwise, let's try and install it!
+ ensure_artifact_installed(
+ name, artifacts[name], artifacts_toml; platform = platform,
+ verbose = verbose, quiet_download = quiet_download, io = io
+ )
+ end
+ return
end
- artifact_dict = load_artifacts_toml(artifacts_toml; pkg_uuid=pkg_uuid)
+ """
+ extract_all_hashes(artifacts_toml::String;
+ platform = HostPlatform(),
+ pkg_uuid = nothing,
+ include_lazy = false)
- for name in keys(artifact_dict)
- # Get the metadata about this name for the requested platform
- meta = artifact_meta(name, artifact_dict, artifacts_toml; platform=platform)
+ Extract all hashes from a given `(Julia)Artifacts.toml` file. `package_uuid` must
+ be provided to properly support overrides from `Overrides.toml` entries in depots.
- # If there are no instances of this name for the desired platform, skip it
- meta === nothing && continue
+ If `include_lazy` is set to `true`, then lazy packages will be installed as well.
+ """
+ function extract_all_hashes(
+ artifacts_toml::String;
+ platform::AbstractPlatform = HostPlatform(),
+ pkg_uuid::Union{Nothing, Base.UUID} = nothing,
+ include_lazy::Bool = false
+ )
+ hashes = Base.SHA1[]
+ if !isfile(artifacts_toml)
+ return hashes
+ end
+
+ artifact_dict = load_artifacts_toml(artifacts_toml; pkg_uuid = pkg_uuid)
+
+ for name in keys(artifact_dict)
+ # Get the metadata about this name for the requested platform
+ meta = artifact_meta(name, artifact_dict, artifacts_toml; platform = platform)
- # If it's a lazy one and we aren't including lazy ones, skip
- if get(meta, "lazy", false) && !include_lazy
- continue
+ # If there are no instances of this name for the desired platform, skip it
+ meta === nothing && continue
+
+ # If it's a lazy one and we aren't including lazy ones, skip
+ if get(meta, "lazy", false) && !include_lazy
+ continue
+ end
+
+ # Otherwise, add it to the list!
+ push!(hashes, Base.SHA1(meta["git-tree-sha1"]))
end
- # Otherwise, add it to the list!
- push!(hashes, Base.SHA1(meta["git-tree-sha1"]))
+ return hashes
end
- return hashes
-end
-
-# Support `AbstractString`s, but avoid compilers needing to track backedges for callers
-# of these functions in case a user defines a new type that is `<: AbstractString`
-archive_artifact(hash::SHA1, tarball_path::AbstractString; kwargs...) =
- archive_artifact(hash, string(tarball_path)::String; kwargs...)
-bind_artifact!(artifacts_toml::AbstractString, name::AbstractString, hash::SHA1; kwargs...) =
- bind_artifact!(string(artifacts_toml)::String, string(name)::String, hash; kwargs...)
-unbind_artifact!(artifacts_toml::AbstractString, name::AbstractString) =
- unbind_artifact!(string(artifacts_toml)::String, string(name)::String)
-download_artifact(tree_hash::SHA1, tarball_url::AbstractString, args...; kwargs...) =
- download_artifact(tree_hash, string(tarball_url)::String, args...; kwargs...)
-ensure_artifact_installed(name::AbstractString, artifacts_toml::AbstractString; kwargs...) =
- ensure_artifact_installed(string(name)::String, string(artifacts_toml)::String; kwargs...)
-ensure_artifact_installed(name::AbstractString, meta::Dict, artifacts_toml::AbstractString; kwargs...) =
- ensure_artifact_installed(string(name)::String, meta, string(artifacts_toml)::String; kwargs...)
-ensure_all_artifacts_installed(artifacts_toml::AbstractString; kwargs...) =
- ensure_all_artifacts_installed(string(artifacts_toml)::String; kwargs...)
-extract_all_hashes(artifacts_toml::AbstractString; kwargs...) =
- extract_all_hashes(string(artifacts_toml)::String; kwargs...)
-
-end # module Artifacts
+ # Support `AbstractString`s, but avoid compilers needing to track backedges for callers
+ # of these functions in case a user defines a new type that is `<: AbstractString`
+ archive_artifact(hash::SHA1, tarball_path::AbstractString; kwargs...) =
+ archive_artifact(hash, string(tarball_path)::String; kwargs...)
+ bind_artifact!(artifacts_toml::AbstractString, name::AbstractString, hash::SHA1; kwargs...) =
+ bind_artifact!(string(artifacts_toml)::String, string(name)::String, hash; kwargs...)
+ unbind_artifact!(artifacts_toml::AbstractString, name::AbstractString) =
+ unbind_artifact!(string(artifacts_toml)::String, string(name)::String)
+ download_artifact(tree_hash::SHA1, tarball_url::AbstractString, args...; kwargs...) =
+ download_artifact(tree_hash, string(tarball_url)::String, args...; kwargs...)
+ ensure_artifact_installed(name::AbstractString, artifacts_toml::AbstractString; kwargs...) =
+ ensure_artifact_installed(string(name)::String, string(artifacts_toml)::String; kwargs...)
+ ensure_artifact_installed(name::AbstractString, meta::Dict, artifacts_toml::AbstractString; kwargs...) =
+ ensure_artifact_installed(string(name)::String, meta, string(artifacts_toml)::String; kwargs...)
+ ensure_all_artifacts_installed(artifacts_toml::AbstractString; kwargs...) =
+ ensure_all_artifacts_installed(string(artifacts_toml)::String; kwargs...)
+ extract_all_hashes(artifacts_toml::AbstractString; kwargs...) =
+ extract_all_hashes(string(artifacts_toml)::String; kwargs...)
+
+end # module PkgArtifacts
+
+const Artifacts = PkgArtifacts
diff --git a/src/BinaryPlatformsCompat.jl b/src/BinaryPlatformsCompat.jl
new file mode 100644
index 0000000000..93403e05bd
--- /dev/null
+++ b/src/BinaryPlatformsCompat.jl
@@ -0,0 +1,155 @@
+module BinaryPlatformsCompat
+
+ export platform_key_abi, platform_dlext, valid_dl_path, arch, libc,
+ libgfortran_version, libstdcxx_version, cxxstring_abi, parse_dl_name_version,
+ detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi,
+ call_abi, wordsize, triplet, select_platform, platforms_match,
+ CompilerABI, Platform, UnknownPlatform, Linux, MacOS, Windows, FreeBSD
+
+ using Base.BinaryPlatforms: parse_dl_name_version,
+ detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi,
+ os, call_abi, select_platform, platforms_match,
+ AbstractPlatform, Platform, HostPlatform
+
+ import Base.BinaryPlatforms: libgfortran_version, libstdcxx_version, platform_name,
+ wordsize, platform_dlext, tags, arch, libc, call_abi,
+ cxxstring_abi
+
+ struct UnknownPlatform <: AbstractPlatform
+ UnknownPlatform(args...; kwargs...) = new()
+ end
+ tags(::UnknownPlatform) = Dict{String, String}("os" => "unknown")
+
+
+ struct CompilerABI
+ libgfortran_version::Union{Nothing, VersionNumber}
+ libstdcxx_version::Union{Nothing, VersionNumber}
+ cxxstring_abi::Union{Nothing, Symbol}
+
+ function CompilerABI(;
+ libgfortran_version::Union{Nothing, VersionNumber} = nothing,
+ libstdcxx_version::Union{Nothing, VersionNumber} = nothing,
+ cxxstring_abi::Union{Nothing, Symbol} = nothing
+ )
+ return new(libgfortran_version, libstdcxx_version, cxxstring_abi)
+ end
+ end
+
+ # Easy replacement constructor
+ function CompilerABI(
+ cabi::CompilerABI; libgfortran_version = nothing,
+ libstdcxx_version = nothing,
+ cxxstring_abi = nothing
+ )
+ return CompilerABI(;
+ libgfortran_version = something(libgfortran_version, Some(cabi.libgfortran_version)),
+ libstdcxx_version = something(libstdcxx_version, Some(cabi.libstdcxx_version)),
+ cxxstring_abi = something(cxxstring_abi, Some(cabi.cxxstring_abi)),
+ )
+ end
+
+ libgfortran_version(cabi::CompilerABI) = cabi.libgfortran_version
+ libstdcxx_version(cabi::CompilerABI) = cabi.libstdcxx_version
+ cxxstring_abi(cabi::CompilerABI) = cabi.cxxstring_abi
+
+ for T in (:Linux, :Windows, :MacOS, :FreeBSD)
+ @eval begin
+ struct $(T) <: AbstractPlatform
+ p::Platform
+ function $(T)(arch::Symbol; compiler_abi = nothing, kwargs...)
+ if compiler_abi !== nothing
+ kwargs = (;
+ kwargs...,
+ :libgfortran_version => libgfortran_version(compiler_abi),
+ :libstdcxx_version => libstdcxx_version(compiler_abi),
+ :cxxstring_abi => cxxstring_abi(compiler_abi),
+ )
+ end
+ return new(Platform(string(arch), $(string(T)); kwargs..., validate_strict = true))
+ end
+ end
+ end
+ end
+
+ const PlatformUnion = Union{Linux, MacOS, Windows, FreeBSD}
+
+ # First, methods we need to coerce to Symbol for backwards-compatibility
+ for f in (:arch, :libc, :call_abi, :cxxstring_abi)
+ @eval begin
+ function $(f)(p::PlatformUnion)
+ str = $(f)(p.p)
+ if str === nothing
+ return nothing
+ end
+ return Symbol(str)
+ end
+ end
+ end
+
+ # Next, things we don't need to coerce
+ for f in (:libgfortran_version, :libstdcxx_version, :platform_name, :wordsize, :platform_dlext, :tags, :triplet)
+ @eval begin
+ $(f)(p::PlatformUnion) = $(f)(p.p)
+ end
+ end
+
+ # Finally, add equality testing between these wrapper types and other AbstractPlatforms
+ @eval begin
+ Base.:(==)(a::PlatformUnion, b::AbstractPlatform) = b == a.p
+ end
+
+ # Add one-off functions
+ MacOS(; kwargs...) = MacOS(:x86_64; kwargs...)
+ FreeBSD(; kwargs...) = FreeBSD(:x86_64; kwargs...)
+
+ function triplet(p::AbstractPlatform)
+ # We are going to sub off to `Base.BinaryPlatforms.triplet()` here,
+ # with the important exception that we override `os_version` to better
+ # mimic the old behavior of `triplet()`
+ if Sys.isfreebsd(p)
+ p = deepcopy(p)
+ p["os_version"] = "11.1.0"
+ elseif Sys.isapple(p)
+ p = deepcopy(p)
+ p["os_version"] = "14.0.0"
+ end
+ return Base.BinaryPlatforms.triplet(p)
+ end
+
+ """
+ platform_key_abi(machine::AbstractString)
+
+ Returns the platform key for the current platform, or any other though the
+ the use of the `machine` parameter.
+
+ This method is deprecated, import `Base.BinaryPlatforms` and use either `HostPlatform()`
+ to get the current host platform, or `parse(Base.BinaryPlatforms.Platform, triplet)`
+ to parse the triplet for some other platform instead.
+ """
+ platform_key_abi() = HostPlatform()
+ platform_key_abi(triplet::AbstractString) = parse(Platform, triplet)
+
+ """
+ valid_dl_path(path::AbstractString, platform::Platform)
+
+ Return `true` if the given `path` ends in a valid dynamic library filename.
+ E.g. returns `true` for a path like `"usr/lib/libfoo.so.3.5"`, but returns
+ `false` for a path like `"libbar.so.f.a"`.
+
+ This method is deprecated and will be removed in Julia 2.0.
+ """
+ function valid_dl_path(path::AbstractString, platform::AbstractPlatform)
+ try
+ parse_dl_name_version(path, string(os(platform))::String)
+ return true
+ catch e
+ if isa(e, ArgumentError)
+ return false
+ end
+ rethrow(e)
+ end
+ end
+
+end # module BinaryPlatformsCompat
+
+const BinaryPlatforms = BinaryPlatformsCompat
diff --git a/src/BinaryPlatforms_compat.jl b/src/BinaryPlatforms_compat.jl
deleted file mode 100644
index 879dcc0c83..0000000000
--- a/src/BinaryPlatforms_compat.jl
+++ /dev/null
@@ -1,148 +0,0 @@
-module BinaryPlatforms
-
-export platform_key_abi, platform_dlext, valid_dl_path, arch, libc,
- libgfortran_version, libstdcxx_version, cxxstring_abi, parse_dl_name_version,
- detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi,
- call_abi, wordsize, triplet, select_platform, platforms_match,
- CompilerABI, Platform, UnknownPlatform, Linux, MacOS, Windows, FreeBSD
-
-using Base.BinaryPlatforms: parse_dl_name_version,
- detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi,
- os, call_abi, select_platform, platforms_match,
- AbstractPlatform, Platform, HostPlatform
-
-import Base.BinaryPlatforms: libgfortran_version, libstdcxx_version, platform_name,
- wordsize, platform_dlext, tags, arch, libc, call_abi,
- cxxstring_abi
-
-struct UnknownPlatform <: AbstractPlatform
- UnknownPlatform(args...; kwargs...) = new()
-end
-tags(::UnknownPlatform) = Dict{String,String}("os"=>"unknown")
-
-
-struct CompilerABI
- libgfortran_version::Union{Nothing,VersionNumber}
- libstdcxx_version::Union{Nothing,VersionNumber}
- cxxstring_abi::Union{Nothing,Symbol}
-
- function CompilerABI(;libgfortran_version::Union{Nothing, VersionNumber} = nothing,
- libstdcxx_version::Union{Nothing, VersionNumber} = nothing,
- cxxstring_abi::Union{Nothing, Symbol} = nothing)
- return new(libgfortran_version, libstdcxx_version, cxxstring_abi)
- end
-end
-
-# Easy replacement constructor
-function CompilerABI(cabi::CompilerABI; libgfortran_version=nothing,
- libstdcxx_version=nothing,
- cxxstring_abi=nothing)
- return CompilerABI(;
- libgfortran_version=something(libgfortran_version, Some(cabi.libgfortran_version)),
- libstdcxx_version=something(libstdcxx_version, Some(cabi.libstdcxx_version)),
- cxxstring_abi=something(cxxstring_abi, Some(cabi.cxxstring_abi)),
- )
-end
-
-libgfortran_version(cabi::CompilerABI) = cabi.libgfortran_version
-libstdcxx_version(cabi::CompilerABI) = cabi.libstdcxx_version
-cxxstring_abi(cabi::CompilerABI) = cabi.cxxstring_abi
-
-for T in (:Linux, :Windows, :MacOS, :FreeBSD)
- @eval begin
- struct $(T) <: AbstractPlatform
- p::Platform
- function $(T)(arch::Symbol; compiler_abi=nothing, kwargs...)
- if compiler_abi !== nothing
- kwargs = (; kwargs...,
- :libgfortran_version => libgfortran_version(compiler_abi),
- :libstdcxx_version => libstdcxx_version(compiler_abi),
- :cxxstring_abi => cxxstring_abi(compiler_abi)
- )
- end
- return new(Platform(string(arch), $(string(T)); kwargs..., validate_strict=true))
- end
- end
- end
-end
-
-const PlatformUnion = Union{Linux,MacOS,Windows,FreeBSD}
-
-# First, methods we need to coerce to Symbol for backwards-compatibility
-for f in (:arch, :libc, :call_abi, :cxxstring_abi)
- @eval begin
- function $(f)(p::PlatformUnion)
- str = $(f)(p.p)
- if str === nothing
- return nothing
- end
- return Symbol(str)
- end
- end
-end
-
-# Next, things we don't need to coerce
-for f in (:libgfortran_version, :libstdcxx_version, :platform_name, :wordsize, :platform_dlext, :tags, :triplet)
- @eval begin
- $(f)(p::PlatformUnion) = $(f)(p.p)
- end
-end
-
-# Finally, add equality testing between these wrapper types and other AbstractPlatforms
-@eval begin
- Base.:(==)(a::PlatformUnion, b::AbstractPlatform) = b == a.p
-end
-
-# Add one-off functions
-MacOS(; kwargs...) = MacOS(:x86_64; kwargs...)
-FreeBSD(; kwargs...) = FreeBSD(:x86_64; kwargs...)
-
-function triplet(p::AbstractPlatform)
- # We are going to sub off to `Base.BinaryPlatforms.triplet()` here,
- # with the important exception that we override `os_version` to better
- # mimic the old behavior of `triplet()`
- if Sys.isfreebsd(p)
- p = deepcopy(p)
- p["os_version"] = "11.1.0"
- elseif Sys.isapple(p)
- p = deepcopy(p)
- p["os_version"] = "14.0.0"
- end
- return Base.BinaryPlatforms.triplet(p)
-end
-
-"""
- platform_key_abi(machine::AbstractString)
-
-Returns the platform key for the current platform, or any other though the
-the use of the `machine` parameter.
-
-This method is deprecated, import `Base.BinaryPlatforms` and use either `HostPlatform()`
-to get the current host platform, or `parse(Base.BinaryPlatforms.Platform, triplet)`
-to parse the triplet for some other platform instead.
-"""
-platform_key_abi() = HostPlatform()
-platform_key_abi(triplet::AbstractString) = parse(Platform, triplet)
-
-"""
- valid_dl_path(path::AbstractString, platform::Platform)
-
-Return `true` if the given `path` ends in a valid dynamic library filename.
-E.g. returns `true` for a path like `"usr/lib/libfoo.so.3.5"`, but returns
-`false` for a path like `"libbar.so.f.a"`.
-
-This method is deprecated and will be removed in Julia 2.0.
-"""
-function valid_dl_path(path::AbstractString, platform::AbstractPlatform)
- try
- parse_dl_name_version(path, string(os(platform))::String)
- return true
- catch e
- if isa(e, ArgumentError)
- return false
- end
- rethrow(e)
- end
-end
-
-end # module BinaryPlatforms
diff --git a/src/GitTools.jl b/src/GitTools.jl
index 02fae614ea..ddce9aac46 100644
--- a/src/GitTools.jl
+++ b/src/GitTools.jl
@@ -41,13 +41,13 @@ const GIT_USERS = Dict{String, Union{Nothing, String}}()
@deprecate setprotocol!(proto::Union{Nothing, AbstractString}) setprotocol!(protocol = proto) false
function setprotocol!(;
- domain::AbstractString="github.com",
- protocol::Union{Nothing, AbstractString}=nothing,
- user::Union{Nothing, AbstractString}=(protocol == "ssh" ? "git" : nothing)
-)
+ domain::AbstractString = "github.com",
+ protocol::Union{Nothing, AbstractString} = nothing,
+ user::Union{Nothing, AbstractString} = (protocol == "ssh" ? "git" : nothing)
+ )
domain = lowercase(domain)
GIT_PROTOCOLS[domain] = protocol
- GIT_USERS[domain] = user
+ return GIT_USERS[domain] = user
end
function normalize_url(url::AbstractString)
@@ -61,7 +61,7 @@ function normalize_url(url::AbstractString)
proto = get(GIT_PROTOCOLS, lowercase(host), nothing)
- if proto === nothing
+ return if proto === nothing
url
else
user = get(GIT_USERS, lowercase(host), nothing)
@@ -80,57 +80,59 @@ function ensure_clone(io::IO, target_path, url; kwargs...)
end
function checkout_tree_to_path(repo::LibGit2.GitRepo, tree::LibGit2.GitObject, path::String)
- GC.@preserve path begin
+ return GC.@preserve path begin
opts = LibGit2.CheckoutOptions(
checkout_strategy = LibGit2.Consts.CHECKOUT_FORCE,
target_directory = Base.unsafe_convert(Cstring, path)
)
- LibGit2.checkout_tree(repo, tree, options=opts)
+ LibGit2.checkout_tree(repo, tree, options = opts)
end
end
-function clone(io::IO, url, source_path; header=nothing, credentials=nothing, kwargs...)
+function clone(io::IO, url, source_path; header = nothing, credentials = nothing, isbare = false, kwargs...)
url = String(url)::String
source_path = String(source_path)::String
@assert !isdir(source_path) || isempty(readdir(source_path))
url = normalize_url(url)
printpkgstyle(io, :Cloning, header === nothing ? "git-repo `$url`" : header)
- bar = MiniProgressBar(header = "Fetching:", color = Base.info_color())
+ bar = MiniProgressBar(header = "Cloning:", color = Base.info_color())
fancyprint = can_fancyprint(io)
- callbacks = if fancyprint
- LibGit2.Callbacks(
- :transfer_progress => (
- @cfunction(transfer_progress, Cint, (Ptr{LibGit2.TransferProgress}, Any)),
- bar,
- )
- )
- else
- LibGit2.Callbacks()
- end
fancyprint && start_progress(io, bar)
if credentials === nothing
credentials = LibGit2.CachedCredentials()
end
- try
+ return try
if use_cli_git()
- cmd = `git clone --quiet $url $source_path`
+ args = ["--quiet", url, source_path]
+ isbare && pushfirst!(args, "--bare")
+ cmd = `git clone $args`
try
- run(pipeline(cmd; stdout=devnull))
+ run(pipeline(cmd; stdout = devnull))
catch err
Pkg.Types.pkgerror("The command $(cmd) failed, error: $err")
end
return LibGit2.GitRepo(source_path)
else
+ callbacks = if fancyprint
+ LibGit2.Callbacks(
+ :transfer_progress => (
+ @cfunction(transfer_progress, Cint, (Ptr{LibGit2.TransferProgress}, Any)),
+ bar,
+ )
+ )
+ else
+ LibGit2.Callbacks()
+ end
mkpath(source_path)
- return LibGit2.clone(url, source_path; callbacks=callbacks, credentials=credentials, kwargs...)
+ return LibGit2.clone(url, source_path; callbacks, credentials, isbare, kwargs...)
end
catch err
- rm(source_path; force=true, recursive=true)
+ rm(source_path; force = true, recursive = true)
err isa LibGit2.GitError || err isa InterruptException || rethrow()
if err isa InterruptException
Pkg.Types.pkgerror("git clone of `$url` interrupted")
elseif (err.class == LibGit2.Error.Net && err.code == LibGit2.Error.EINVALIDSPEC) ||
- (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ENOTFOUND)
+ (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ENOTFOUND)
Pkg.Types.pkgerror("git repository not found at `$(url)`")
else
Pkg.Types.pkgerror("failed to clone from $(url), error: $err")
@@ -141,17 +143,20 @@ function clone(io::IO, url, source_path; header=nothing, credentials=nothing, kw
end
end
-function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl=nothing; header=nothing, credentials=nothing, refspecs=[""], kwargs...)
+function geturl(repo)
+ return LibGit2.with(LibGit2.get(LibGit2.GitRemote, repo, "origin")) do remote
+ LibGit2.url(remote)
+ end
+end
+
+function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl = nothing; header = nothing, credentials = nothing, refspecs = [""], kwargs...)
if remoteurl === nothing
- remoteurl = LibGit2.with(LibGit2.get(LibGit2.GitRemote, repo, "origin")) do remote
- LibGit2.url(remote)
- end
+ remoteurl = geturl(repo)
end
fancyprint = can_fancyprint(io)
remoteurl = normalize_url(remoteurl)
printpkgstyle(io, :Updating, header === nothing ? "git-repo `$remoteurl`" : header)
bar = MiniProgressBar(header = "Fetching:", color = Base.info_color())
- fancyprint = can_fancyprint(io)
callbacks = if fancyprint
LibGit2.Callbacks(
:transfer_progress => (
@@ -166,20 +171,18 @@ function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl=nothing; header=nothing,
if credentials === nothing
credentials = LibGit2.CachedCredentials()
end
- try
+ return try
if use_cli_git()
- let remoteurl=remoteurl
- cd(LibGit2.path(repo)) do
- cmd = `git fetch -q $remoteurl $(only(refspecs))`
- try
- run(pipeline(cmd; stdout=devnull))
- catch err
- Pkg.Types.pkgerror("The command $(cmd) failed, error: $err")
- end
+ let remoteurl = remoteurl
+ cmd = `git -C $(LibGit2.path(repo)) fetch -q $remoteurl $(only(refspecs))`
+ try
+ run(pipeline(cmd; stdout = devnull))
+ catch err
+ Pkg.Types.pkgerror("The command $(cmd) failed, error: $err")
end
end
else
- return LibGit2.fetch(repo; remoteurl=remoteurl, callbacks=callbacks, refspecs=refspecs, kwargs...)
+ return LibGit2.fetch(repo; remoteurl, callbacks, credentials, refspecs, kwargs...)
end
catch err
err isa LibGit2.GitError || rethrow()
@@ -196,8 +199,8 @@ end
# This code gratefully adapted from https://github.com/simonbyrne/GitX.jl
-@enum GitMode mode_dir=0o040000 mode_normal=0o100644 mode_executable=0o100755 mode_symlink=0o120000 mode_submodule=0o160000
-Base.string(mode::GitMode) = string(UInt32(mode); base=8)
+@enum GitMode mode_dir = 0o040000 mode_normal = 0o100644 mode_executable = 0o100755 mode_symlink = 0o120000 mode_submodule = 0o160000
+Base.string(mode::GitMode) = string(UInt32(mode); base = 8)
Base.print(io::IO, mode::GitMode) = print(io, string(mode))
function gitmode(path::AbstractString)
@@ -227,7 +230,7 @@ end
Calculate the git blob hash of a given path.
"""
-function blob_hash(::Type{HashType}, path::AbstractString) where HashType
+function blob_hash(::Type{HashType}, path::AbstractString) where {HashType}
ctx = HashType()
if islink(path)
datalen = length(readlink(path))
@@ -239,7 +242,7 @@ function blob_hash(::Type{HashType}, path::AbstractString) where HashType
SHA.update!(ctx, Vector{UInt8}("blob $(datalen)\0"))
# Next, read data in in chunks of 4KB
- buff = Vector{UInt8}(undef, 4*1024)
+ buff = Vector{UInt8}(undef, 4 * 1024)
try
if islink(path)
@@ -287,9 +290,9 @@ end
Calculate the git tree hash of a given path.
"""
-function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,Nothing} = nothing, indent::Int=0) where HashType
+function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO, Nothing} = nothing, indent::Int = 0) where {HashType}
entries = Tuple{String, Vector{UInt8}, GitMode}[]
- for f in sort(readdir(root; join=true); by = f -> gitmode(f) == mode_dir ? f*"/" : f)
+ for f in sort(readdir(root; join = true); by = f -> gitmode(f) == mode_dir ? f * "/" : f)
# Skip `.git` directories
if basename(f) == ".git"
continue
@@ -306,7 +309,7 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N
if debug_out !== nothing
child_stream = IOBuffer()
end
- hash = tree_hash(HashType, filepath; debug_out=child_stream, indent=indent+1)
+ hash = tree_hash(HashType, filepath; debug_out = child_stream, indent = indent + 1)
if debug_out !== nothing
indent_str = "| "^indent
println(debug_out, "$(indent_str)+ [D] $(basename(filepath)) - $(bytes2hex(hash))")
@@ -326,7 +329,7 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N
content_size = 0
for (n, h, m) in entries
- content_size += ndigits(UInt32(m); base=8) + 1 + sizeof(n) + 1 + sizeof(h)
+ content_size += ndigits(UInt32(m); base = 8) + 1 + sizeof(n) + 1 + sizeof(h)
end
# Return the hash of these entries
@@ -338,17 +341,24 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N
end
return SHA.digest!(ctx)
end
-tree_hash(root::AbstractString; debug_out::Union{IO,Nothing} = nothing) = tree_hash(SHA.SHA1_CTX, root; debug_out)
+tree_hash(root::AbstractString; debug_out::Union{IO, Nothing} = nothing) = tree_hash(SHA.SHA1_CTX, root; debug_out)
function check_valid_HEAD(repo)
- try LibGit2.head(repo)
+ return try
+ LibGit2.head(repo)
catch err
- Pkg.Types.pkgerror("invalid git HEAD ($(err.msg))")
+ url = try
+ geturl(repo)
+ catch
+ "(unknown url)"
+ end
+ Pkg.Types.pkgerror("invalid git HEAD in $url ($(err.msg))")
end
end
-function git_file_stream(repo::LibGit2.GitRepo, spec::String; fakeit::Bool=false)::IO
- blob = try LibGit2.GitBlob(repo, spec)
+function git_file_stream(repo::LibGit2.GitRepo, spec::String; fakeit::Bool = false)::IO
+ blob = try
+ LibGit2.GitBlob(repo, spec)
catch err
err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow()
fakeit && return devnull
diff --git a/src/HistoricalStdlibs.jl b/src/HistoricalStdlibs.jl
index d5b4ad5049..6867d1e832 100644
--- a/src/HistoricalStdlibs.jl
+++ b/src/HistoricalStdlibs.jl
@@ -5,13 +5,13 @@ struct StdlibInfo
uuid::UUID
# This can be `nothing` if it's an unregistered stdlib
- version::Union{Nothing,VersionNumber}
+ version::Union{Nothing, VersionNumber}
deps::Vector{UUID}
weakdeps::Vector{UUID}
end
-const DictStdLibs = Dict{UUID,StdlibInfo}
+const DictStdLibs = Dict{UUID, StdlibInfo}
# Julia standard libraries with duplicate entries removed so as to store only the
# first release in a set of releases that all contain the same set of stdlibs.
diff --git a/src/MiniProgressBars.jl b/src/MiniProgressBars.jl
index c0a487d6b6..5682fc04a4 100644
--- a/src/MiniProgressBars.jl
+++ b/src/MiniProgressBars.jl
@@ -5,12 +5,12 @@ export MiniProgressBar, start_progress, end_progress, show_progress, print_progr
using Printf
# Until Base.format_bytes supports sigdigits
-function pkg_format_bytes(bytes; binary=true, sigdigits::Integer=3)
+function pkg_format_bytes(bytes; binary = true, sigdigits::Integer = 3)
units = binary ? Base._mem_units : Base._cnt_units
factor = binary ? 1024 : 1000
bytes, mb = Base.prettyprint_getunits(bytes, length(units), Int64(factor))
if mb == 1
- return string(Int(bytes), " ", Base._mem_units[mb], bytes==1 ? "" : "s")
+ return string(Int(bytes), " ", Base._mem_units[mb], bytes == 1 ? "" : "s")
else
return string(Base.Ryu.writefixed(Float64(bytes), sigdigits), binary ? " $(units[mb])" : "$(units[mb])B")
end
@@ -37,10 +37,10 @@ const PROGRESS_BAR_PERCENTAGE_GRANULARITY = Ref(0.1)
function start_progress(io::IO, _::MiniProgressBar)
ansi_disablecursor = "\e[?25l"
- print(io, ansi_disablecursor)
+ return print(io, ansi_disablecursor)
end
-function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagereturn=true)
+function show_progress(io::IO, p::MiniProgressBar; termwidth = nothing, carriagereturn = true)
if p.max == 0
perc = 0.0
prev_perc = 0.0
@@ -64,22 +64,22 @@ function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagere
progress_text = if p.mode == :percentage
@sprintf "%2.1f %%" perc
elseif p.mode == :int
- string(p.current, "/", p.max)
+ string(p.current, "/", p.max)
elseif p.mode == :data
- lpad(string(pkg_format_bytes(p.current; sigdigits=1), "/", pkg_format_bytes(p.max; sigdigits=1)), 20)
+ lpad(string(pkg_format_bytes(p.current; sigdigits = 1), "/", pkg_format_bytes(p.max; sigdigits = 1)), 20)
else
error("Unknown mode $(p.mode)")
end
termwidth = @something termwidth displaysize(io)[2]
- max_progress_width = max(0, min(termwidth - textwidth(p.header) - textwidth(progress_text) - 10 , p.width))
+ max_progress_width = max(0, min(termwidth - textwidth(p.header) - textwidth(progress_text) - 10, p.width))
n_filled = floor(Int, max_progress_width * perc / 100)
partial_filled = (max_progress_width * perc / 100) - n_filled
n_left = max_progress_width - n_filled
headers = split(p.header)
- to_print = sprint(; context=io) do io
+ to_print = sprint(; context = io) do io
print(io, " "^p.indent)
if p.main
- printstyled(io, headers[1], " "; color=:green, bold=true)
+ printstyled(io, headers[1], " "; color = :green, bold = true)
length(headers) > 1 && printstyled(io, join(headers[2:end], ' '), " ")
else
print(io, p.header, " ")
@@ -88,31 +88,31 @@ function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagere
print(io, p.status)
else
hascolor = get(io, :color, false)::Bool
- printstyled(io, "━"^n_filled; color=p.color)
+ printstyled(io, "━"^n_filled; color = p.color)
if n_left > 0
if hascolor
if partial_filled > 0.5
- printstyled(io, "╸"; color=p.color) # More filled, use ╸
+ printstyled(io, "╸"; color = p.color) # More filled, use ╸
else
- printstyled(io, "╺"; color=:light_black) # Less filled, use ╺
+ printstyled(io, "╺"; color = :light_black) # Less filled, use ╺
end
end
c = hascolor ? "━" : " "
- printstyled(io, c^(n_left-1+!hascolor); color=:light_black)
+ printstyled(io, c^(n_left - 1 + !hascolor); color = :light_black)
end
- printstyled(io, " "; color=:light_black)
+ printstyled(io, " "; color = :light_black)
print(io, progress_text)
end
carriagereturn && print(io, "\r")
end
# Print everything in one call
- print(io, to_print)
+ return print(io, to_print)
end
function end_progress(io, p::MiniProgressBar)
ansi_enablecursor = "\e[?25h"
ansi_clearline = "\e[2K"
- print(io, ansi_enablecursor * ansi_clearline)
+ return print(io, ansi_enablecursor * ansi_clearline)
end
# Useful when writing a progress bar in the bottom
@@ -130,7 +130,7 @@ function print_progress_bottom(io::IO)
ansi_clearline = "\e[2K"
ansi_movecol1 = "\e[1G"
ansi_moveup(n::Int) = string("\e[", n, "A")
- print(io, "\e[S" * ansi_moveup(1) * ansi_clearline * ansi_movecol1)
+ return print(io, "\e[S" * ansi_moveup(1) * ansi_clearline * ansi_movecol1)
end
end
diff --git a/src/Operations.jl b/src/Operations.jl
index 6d6bc94558..a16355d4ef 100644
--- a/src/Operations.jl
+++ b/src/Operations.jl
@@ -2,6 +2,7 @@
module Operations
+using FileWatching: FileWatching
using UUIDs
using Random: randstring
import LibGit2, Dates, TOML
@@ -9,19 +10,47 @@ import LibGit2, Dates, TOML
using ..Types, ..Resolve, ..PlatformEngines, ..GitTools, ..MiniProgressBars
import ..depots, ..depots1, ..devdir, ..set_readonly, ..Types.PackageEntry
import ..Artifacts: ensure_artifact_installed, artifact_names, extract_all_hashes,
- artifact_exists, select_downloadable_artifacts
+ artifact_exists, select_downloadable_artifacts, mv_temp_dir_retries
using Base.BinaryPlatforms
import ...Pkg
import ...Pkg: pkg_server, Registry, pathrepr, can_fancyprint, printpkgstyle, stderr_f, OFFLINE_MODE
import ...Pkg: UPDATED_REGISTRY_THIS_SESSION, RESPECT_SYSIMAGE_VERSIONS, should_autoprecompile
-import ...Pkg: usable_io
+import ...Pkg: usable_io, discover_repo
#########
# Utils #
#########
+# Helper functions for yanked package checking
+function is_pkgversion_yanked(uuid::UUID, version::VersionNumber, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries())
+ for reg in registries
+ reg_pkg = get(reg, uuid, nothing)
+ if reg_pkg !== nothing
+ info = Registry.registry_info(reg_pkg)
+ if haskey(info.version_info, version) && Registry.isyanked(info, version)
+ return true
+ end
+ end
+ end
+ return false
+end
+
+function is_pkgversion_yanked(pkg::PackageSpec, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries())
+ if pkg.uuid === nothing || pkg.version === nothing || !(pkg.version isa VersionNumber)
+ return false
+ end
+ return is_pkgversion_yanked(pkg.uuid, pkg.version, registries)
+end
+
+function is_pkgversion_yanked(entry::PackageEntry, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries())
+ if entry.version === nothing || !(entry.version isa VersionNumber)
+ return false
+ end
+ return is_pkgversion_yanked(entry.uuid, entry.version, registries)
+end
+
function default_preserve()
- if Base.get_bool_env("JULIA_PKG_PRESERVE_TIERED_INSTALLED", false)
+ return if Base.get_bool_env("JULIA_PKG_PRESERVE_TIERED_INSTALLED", false)
PRESERVE_TIERED_INSTALLED
else
PRESERVE_TIERED
@@ -42,14 +71,14 @@ end
# more accurate name is `should_be_tracking_registered_version`
# the only way to know for sure is to key into the registries
-tracking_registered_version(pkg::Union{PackageSpec, PackageEntry}, julia_version=VERSION) =
+tracking_registered_version(pkg::Union{PackageSpec, PackageEntry}, julia_version = VERSION) =
!is_stdlib(pkg.uuid, julia_version) && pkg.path === nothing && pkg.repo.source === nothing
function source_path(manifest_file::String, pkg::Union{PackageSpec, PackageEntry}, julia_version = VERSION)
- pkg.tree_hash !== nothing ? find_installed(pkg.name, pkg.uuid, pkg.tree_hash) :
- pkg.path !== nothing ? joinpath(dirname(manifest_file), pkg.path) :
- is_or_was_stdlib(pkg.uuid, julia_version) ? Types.stdlib_path(pkg.name) :
- nothing
+ return pkg.tree_hash !== nothing ? find_installed(pkg.name, pkg.uuid, pkg.tree_hash) :
+ pkg.path !== nothing ? joinpath(dirname(manifest_file), pkg.path) :
+ is_or_was_stdlib(pkg.uuid, julia_version) ? Types.stdlib_path(pkg.name) :
+ nothing
end
#TODO rename
@@ -67,8 +96,10 @@ function load_version(version, fixed, preserve::PreserveLevel)
end
end
-function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_DIRECT)
+function load_direct_deps(
+ env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_DIRECT
+ )
pkgs_direct = load_project_deps(env.project, env.project_file, env.manifest, env.manifest_file, pkgs; preserve)
for (path, project) in env.workspace
@@ -103,12 +134,14 @@ function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]
return vcat(pkgs, pkgs_direct)
end
-function load_project_deps(project::Project, project_file::String, manifest::Manifest, manifest_file::String, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_DIRECT)
+function load_project_deps(
+ project::Project, project_file::String, manifest::Manifest, manifest_file::String, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_DIRECT
+ )
pkgs_direct = PackageSpec[]
if project.name !== nothing && project.uuid !== nothing && findfirst(pkg -> pkg.uuid == project.uuid, pkgs) === nothing
path = Types.relative_project_path(manifest_file, dirname(project_file))
- pkg = PackageSpec(;name=project.name, uuid=project.uuid, version=project.version, path)
+ pkg = PackageSpec(; name = project.name, uuid = project.uuid, version = project.version, path)
push!(pkgs_direct, pkg)
end
@@ -116,43 +149,51 @@ function load_project_deps(project::Project, project_file::String, manifest::Man
findfirst(pkg -> pkg.uuid == uuid, pkgs) === nothing || continue # do not duplicate packages
path, repo = get_path_repo(project, name)
entry = manifest_info(manifest, uuid)
- push!(pkgs_direct, entry === nothing ?
- PackageSpec(;uuid, name, path, repo) :
- PackageSpec(;
- uuid = uuid,
- name = name,
- path = path === nothing ? entry.path : path,
- repo = repo == GitRepo() ? entry.repo : repo,
- pinned = entry.pinned,
- tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
- version = load_version(entry.version, isfixed(entry), preserve),
- ))
+ push!(
+ pkgs_direct, entry === nothing ?
+ PackageSpec(; uuid, name, path, repo) :
+ PackageSpec(;
+ uuid = uuid,
+ name = name,
+ path = path === nothing ? entry.path : path,
+ repo = repo == GitRepo() ? entry.repo : repo,
+ pinned = entry.pinned,
+ tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
+ version = load_version(entry.version, isfixed(entry), preserve),
+ )
+ )
end
return pkgs_direct
end
-function load_manifest_deps(manifest::Manifest, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_ALL)
+function load_manifest_deps(
+ manifest::Manifest, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_ALL
+ )
pkgs = copy(pkgs)
for (uuid, entry) in manifest
findfirst(pkg -> pkg.uuid == uuid, pkgs) === nothing || continue # do not duplicate packages
- push!(pkgs, PackageSpec(
- uuid = uuid,
- name = entry.name,
- path = entry.path,
- pinned = entry.pinned,
- repo = entry.repo,
- tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
- version = load_version(entry.version, isfixed(entry), preserve),
- ))
+ push!(
+ pkgs, PackageSpec(
+ uuid = uuid,
+ name = entry.name,
+ path = entry.path,
+ pinned = entry.pinned,
+ repo = entry.repo,
+ tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
+ version = load_version(entry.version, isfixed(entry), preserve),
+ )
+ )
end
return pkgs
end
-function load_all_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_ALL)
- pkgs = load_manifest_deps(env.manifest, pkgs; preserve=preserve)
+function load_all_deps(
+ env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_ALL
+ )
+ pkgs = load_manifest_deps(env.manifest, pkgs; preserve = preserve)
# Sources takes presedence over the manifest...
for pkg in pkgs
path, repo = get_path_repo(env.project, pkg.name)
@@ -166,7 +207,7 @@ function load_all_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[];
pkg.repo.rev = repo.rev
end
end
- return load_direct_deps(env, pkgs; preserve=preserve)
+ return load_direct_deps(env, pkgs; preserve = preserve)
end
function load_all_deps_loadable(env::EnvCache)
@@ -178,7 +219,7 @@ function load_all_deps_loadable(env::EnvCache)
end
-function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPlatform())::Bool
+function is_instantiated(env::EnvCache, workspace::Bool = false; platform = HostPlatform())::Bool
# Load everything
if workspace
pkgs = Operations.load_all_deps(env)
@@ -191,7 +232,7 @@ function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPl
# so only add it if it isn't there
idx = findfirst(x -> x.uuid == env.pkg.uuid, pkgs)
if idx === nothing
- push!(pkgs, Types.PackageSpec(name=env.pkg.name, uuid=env.pkg.uuid, version=env.pkg.version, path=dirname(env.project_file)))
+ push!(pkgs, Types.PackageSpec(name = env.pkg.name, uuid = env.pkg.uuid, version = env.pkg.version, path = dirname(env.project_file)))
end
else
# Make sure artifacts for project exist even if it is not a package
@@ -206,8 +247,10 @@ function update_manifest!(env::EnvCache, pkgs::Vector{PackageSpec}, deps_map, ju
empty!(manifest)
for pkg in pkgs
- entry = PackageEntry(;name = pkg.name, version = pkg.version, pinned = pkg.pinned,
- tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid=pkg.uuid)
+ entry = PackageEntry(;
+ name = pkg.name, version = pkg.version, pinned = pkg.pinned,
+ tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid = pkg.uuid
+ )
if is_stdlib(pkg.uuid, julia_version)
# Only set stdlib versions for versioned (external) stdlibs
entry.version = stdlib_version(pkg.uuid, julia_version)
@@ -216,30 +259,51 @@ function update_manifest!(env::EnvCache, pkgs::Vector{PackageSpec}, deps_map, ju
env.manifest[pkg.uuid] = entry
end
prune_manifest(env)
- record_project_hash(env)
+ return record_project_hash(env)
end
# This has to be done after the packages have been downloaded
# since we need access to the Project file to read the information
# about extensions
-function fixups_from_projectfile!(env::EnvCache)
+function fixups_from_projectfile!(ctx::Context)
+ env = ctx.env
for pkg in values(env.manifest)
- # isfile_casesenstive within locate_project_file used to error on Windows if given a
- # relative path so abspath it to be extra safe https://github.com/JuliaLang/julia/pull/55220
- project_file = Base.locate_project_file(abspath(source_path(env.manifest_file, pkg)))
- if project_file isa String && isfile(project_file)
- p = Types.read_project(project_file)
- pkg.weakdeps = p.weakdeps
- pkg.exts = p.exts
- pkg.entryfile = p.entryfile
- for (name, _) in p.weakdeps
- if !haskey(p.deps, name)
+ if ctx.julia_version !== VERSION && is_stdlib(pkg.uuid, ctx.julia_version)
+ # Special handling for non-current julia_version resolving given the source for historical stdlibs
+ # isn't available at this stage as Pkg thinks it should not be needed, so rely on STDLIBS_BY_VERSION
+ stdlibs = Types.get_last_stdlibs(ctx.julia_version)
+ p = stdlibs[pkg.uuid]
+ pkg.weakdeps = Dict{String, Base.UUID}(stdlibs[uuid].name => uuid for uuid in p.weakdeps)
+ # pkg.exts = p.exts # TODO: STDLIBS_BY_VERSION doesn't record this
+ # pkg.entryfile = p.entryfile # TODO: STDLIBS_BY_VERSION doesn't record this
+ for (name, _) in pkg.weakdeps
+ if !(name in p.deps)
delete!(pkg.deps, name)
end
end
+ else
+ # normal mode based on project files.
+ # isfile_casesenstive within locate_project_file used to error on Windows if given a
+ # relative path so abspath it to be extra safe https://github.com/JuliaLang/julia/pull/55220
+ sourcepath = source_path(env.manifest_file, pkg)
+ if sourcepath === nothing
+ pkgerror("could not find source path for package $(pkg.name) based on manifest $(env.manifest_file)")
+ end
+ project_file = Base.locate_project_file(abspath(sourcepath))
+ if project_file isa String && isfile(project_file)
+ p = Types.read_project(project_file)
+ pkg.weakdeps = p.weakdeps
+ pkg.exts = p.exts
+ pkg.entryfile = p.entryfile
+ for (name, _) in p.weakdeps
+ if !haskey(p.deps, name)
+ delete!(pkg.deps, name)
+ end
+ end
+ end
end
end
- prune_manifest(env)
+ return prune_manifest(env)
end
####################
@@ -301,8 +365,8 @@ end
function collect_project(pkg::Union{PackageSpec, Nothing}, path::String)
deps = PackageSpec[]
weakdeps = Set{UUID}()
- project_file = projectfile_path(path; strict=true)
- project = project_file === nothing ? Project() : read_project(project_file)
+ project_file = projectfile_path(path; strict = true)
+ project = project_file === nothing ? Project() : read_project(project_file)
julia_compat = get_compat(project, "julia")
if !isnothing(julia_compat) && !(VERSION in julia_compat)
pkgerror("julia version requirement from Project.toml's compat section not satisfied for package at `$path`")
@@ -310,7 +374,7 @@ function collect_project(pkg::Union{PackageSpec, Nothing}, path::String)
for (name, uuid) in project.deps
path, repo = get_path_repo(project, name)
vspec = get_compat(project, name)
- push!(deps, PackageSpec(name=name, uuid=uuid, version=vspec, path=path, repo=repo))
+ push!(deps, PackageSpec(name = name, uuid = uuid, version = vspec, path = path, repo = repo))
end
for (name, uuid) in project.weakdeps
vspec = get_compat(project, name)
@@ -329,7 +393,7 @@ function collect_project(pkg::Union{PackageSpec, Nothing}, path::String)
end
is_tracking_path(pkg) = pkg.path !== nothing
-is_tracking_repo(pkg) = pkg.repo.source !== nothing
+is_tracking_repo(pkg) = (pkg.repo.source !== nothing || pkg.repo.rev !== nothing)
is_tracking_registry(pkg) = !is_tracking_path(pkg) && !is_tracking_repo(pkg)
isfixed(pkg) = !is_tracking_registry(pkg) || pkg.pinned
@@ -337,19 +401,28 @@ function collect_developed!(env::EnvCache, pkg::PackageSpec, developed::Vector{P
source = project_rel_path(env, source_path(env.manifest_file, pkg))
source_env = EnvCache(projectfile_path(source))
pkgs = load_project_deps(source_env.project, source_env.project_file, source_env.manifest, source_env.manifest_file)
- for pkg in filter(is_tracking_path, pkgs)
+ for pkg in pkgs
if any(x -> x.uuid == pkg.uuid, developed)
continue
end
- # normalize path
- # TODO: If path is collected from project, it is relative to the project file
- # otherwise relative to manifest file....
- pkg.path = Types.relative_project_path(env.manifest_file,
- project_rel_path(source_env,
- source_path(source_env.manifest_file, pkg)))
- push!(developed, pkg)
- collect_developed!(env, pkg, developed)
+ if is_tracking_path(pkg)
+ # normalize path
+ # TODO: If path is collected from project, it is relative to the project file
+ # otherwise relative to manifest file....
+ pkg.path = Types.relative_project_path(
+ env.manifest_file,
+ project_rel_path(
+ source_env,
+ source_path(source_env.manifest_file, pkg)
+ )
+ )
+ push!(developed, pkg)
+ collect_developed!(env, pkg, developed)
+ elseif is_tracking_repo(pkg)
+ push!(developed, pkg)
+ end
end
+ return
end
function collect_developed(env::EnvCache, pkgs::Vector{PackageSpec})
@@ -361,8 +434,8 @@ function collect_developed(env::EnvCache, pkgs::Vector{PackageSpec})
end
function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UUID, String})
- deps_map = Dict{UUID,Vector{PackageSpec}}()
- weak_map = Dict{UUID,Set{UUID}}()
+ deps_map = Dict{UUID, Vector{PackageSpec}}()
+ weak_map = Dict{UUID, Set{UUID}}()
uuid = Types.project_uuid(env)
deps, weakdeps = collect_project(env.pkg, dirname(env.project_file))
@@ -372,7 +445,7 @@ function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UU
for (path, project) in env.workspace
uuid = Types.project_uuid(project, path)
- pkg = project.name === nothing ? nothing : PackageSpec(name=project.name, uuid=uuid)
+ pkg = project.name === nothing ? nothing : PackageSpec(name = project.name, uuid = uuid)
deps, weakdeps = collect_project(pkg, path)
deps_map[Types.project_uuid(env)] = deps
weak_map[Types.project_uuid(env)] = weakdeps
@@ -386,19 +459,37 @@ function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UU
if (path === nothing || !isdir(path)) && (pkg.repo.rev !== nothing || pkg.repo.source !== nothing)
# ensure revved package is installed
# pkg.tree_hash is set in here
- Types.handle_repo_add!(Types.Context(env=env), pkg)
+ Types.handle_repo_add!(Types.Context(env = env), pkg)
# Recompute path
path = project_rel_path(env, source_path(env.manifest_file, pkg))
end
if !isdir(path)
- pkgerror("expected package $(err_rep(pkg)) to exist at path `$path`")
+ # Find which packages depend on this missing package for better error reporting
+ dependents = String[]
+ for (dep_uuid, dep_entry) in env.manifest.deps
+ if pkg.uuid in values(dep_entry.deps) || pkg.uuid in values(dep_entry.weakdeps)
+ push!(dependents, dep_entry.name === nothing ? "unknown package [$dep_uuid]" : dep_entry.name)
+ end
+ end
+
+ error_msg = "expected package $(err_rep(pkg)) to exist at path `$path`"
+ error_msg *= "\n\nThis package is referenced in the manifest file: $(env.manifest_file)"
+
+ if !isempty(dependents)
+ if length(dependents) == 1
+ error_msg *= "\nIt is required by: $(dependents[1])"
+ else
+ error_msg *= "\nIt is required by:\n$(join([" - $dep" for dep in dependents], "\n"))"
+ end
+ end
+ pkgerror(error_msg)
end
deps, weakdeps = collect_project(pkg, path)
deps_map[pkg.uuid] = deps
weak_map[pkg.uuid] = weakdeps
end
- fixed = Dict{UUID,Resolve.Fixed}()
+ fixed = Dict{UUID, Resolve.Fixed}()
# Collect the dependencies for the fixed packages
for (uuid, deps) in deps_map
q = Dict{UUID, VersionSpec}()
@@ -438,8 +529,10 @@ end
# sets version to a VersionNumber
# adds any other packages which may be in the dependency graph
# all versioned packages should have a `tree_hash`
-function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version,
- installed_only::Bool)
+function resolve_versions!(
+ env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version,
+ installed_only::Bool
+ )
installed_only = installed_only || OFFLINE_MODE[]
# compatibility
if julia_version !== nothing
@@ -447,7 +540,7 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
env.manifest.julia_version = dropbuild(VERSION)
v = intersect(julia_version, get_compat_workspace(env, "julia"))
if isempty(v)
- @warn "julia version requirement for project not satisfied" _module=nothing _file=nothing
+ @warn "julia version requirement for project not satisfied" _module = nothing _file = nothing
end
end
@@ -480,8 +573,11 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
compat = get_compat_workspace(env, pkg.name)
v = intersect(pkg.version, compat)
if isempty(v)
- throw(Resolve.ResolverError(
- "empty intersection between $(pkg.name)@$(pkg.version) and project compatibility $(compat)"))
+ throw(
+ Resolve.ResolverError(
+ "empty intersection between $(pkg.name)@$(pkg.version) and project compatibility $(compat)"
+ )
+ )
end
# Work around not clobbering 0.x.y+ for checked out old type of packages
if !(pkg.version isa VersionNumber)
@@ -507,6 +603,13 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
old_v = get(jll_fix, uuid, nothing)
# We only fixup a JLL if the old major/minor/patch matches the new major/minor/patch
if old_v !== nothing && Base.thispatch(old_v) == Base.thispatch(vers_fix[uuid])
+ new_v = vers_fix[uuid]
+ if old_v != new_v && haskey(compat_map[uuid], old_v)
+ compat_map[uuid][old_v] = compat_map[uuid][new_v]
+ # Note that we don't delete!(compat_map[uuid], old_v) because we want to keep the compat info around
+ # in case there's JLL version confusion between the sysimage pkgorigins version and manifest
+ # but that issue hasn't been fully specified, so keep it to be cautious
+ end
vers_fix[uuid] = old_v
end
end
@@ -521,7 +624,7 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
pkg.version = vers[pkg.uuid]
else
name = is_stdlib(uuid) ? stdlib_infos()[uuid].name : registered_name(registries, uuid)
- push!(pkgs, PackageSpec(;name=name, uuid=uuid, version=ver))
+ push!(pkgs, PackageSpec(; name = name, uuid = uuid, version = ver))
end
end
final_deps_map = Dict{UUID, Dict{String, UUID}}()
@@ -536,8 +639,12 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
deps_fixed
else
d = Dict{String, UUID}()
+ if !haskey(compat_map[pkg.uuid], pkg.version)
+ available_versions = sort!(collect(keys(compat_map[pkg.uuid])))
+ pkgerror("version $(pkg.version) of package $(pkg.name) is not available. Available versions: $(join(available_versions, ", "))")
+ end
for (uuid, _) in compat_map[pkg.uuid][pkg.version]
- d[names[uuid]] = uuid
+ d[names[uuid]] = uuid
end
d
end
@@ -549,17 +656,21 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn
return final_deps_map
end
-get_or_make!(d::Dict{K,V}, k::K) where {K,V} = get!(d, k) do; V() end
+get_or_make!(d::Dict{K, V}, k::K) where {K, V} = get!(d, k) do;
+ V()
+end
const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e")
const PKGORIGIN_HAVE_VERSION = :version in fieldnames(Base.PkgOrigin)
-function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}, uuid_to_name::Dict{UUID,String},
- reqs::Resolve.Requires, fixed::Dict{UUID,Resolve.Fixed}, julia_version,
- installed_only::Bool)
+function deps_graph(
+ env::EnvCache, registries::Vector{Registry.RegistryInstance}, uuid_to_name::Dict{UUID, String},
+ reqs::Resolve.Requires, fixed::Dict{UUID, Resolve.Fixed}, julia_version,
+ installed_only::Bool
+ )
uuids = Set{UUID}()
union!(uuids, keys(reqs))
union!(uuids, keys(fixed))
- for fixed_uuids in map(fx->keys(fx.requires), values(fixed))
+ for fixed_uuids in map(fx -> keys(fx.requires), values(fixed))
union!(uuids, fixed_uuids)
end
@@ -567,11 +678,11 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}
seen = Set{UUID}()
# pkg -> version -> (dependency => compat):
- all_compat = Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}}()
- weak_compat = Dict{UUID,Dict{VersionNumber,Set{UUID}}}()
+ all_compat = Dict{UUID, Dict{VersionNumber, Dict{UUID, VersionSpec}}}()
+ weak_compat = Dict{UUID, Dict{VersionNumber, Set{UUID}}}()
for (fp, fx) in fixed
- all_compat[fp] = Dict(fx.version => Dict{UUID,VersionSpec}())
+ all_compat[fp] = Dict(fx.version => Dict{UUID, VersionSpec}())
end
while true
@@ -619,7 +730,7 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}
# TODO, pull this into a function
Registry.isyanked(info, v) && continue
if installed_only
- pkg_spec = PackageSpec(name=pkg.name, uuid=pkg.uuid, version=v, tree_hash=Registry.treehash(info, v))
+ pkg_spec = PackageSpec(name = pkg.name, uuid = pkg.uuid, version = v, tree_hash = Registry.treehash(info, v))
is_package_downloaded(env.manifest_file, pkg_spec) || continue
end
@@ -639,13 +750,14 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}
merge!(dv, compat_info)
union!(uuids, keys(compat_info))
end
+ return
end
add_compat!(all_compat_u, Registry.compat_info(info))
weak_compat_info = Registry.weak_compat_info(info)
if weak_compat_info !== nothing
add_compat!(all_compat_u, weak_compat_info)
# Version to Set
- for (v, compat_info) in weak_compat_info
+ for (v, compat_info) in weak_compat_info
weak_compat_u[v] = keys(compat_info)
end
end
@@ -667,7 +779,7 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}
end
return Resolve.Graph(all_compat, weak_compat, uuid_to_name, reqs, fixed, false, julia_version),
- all_compat
+ all_compat
end
########################
@@ -683,11 +795,16 @@ end
# Returns if archive successfully installed
function install_archive(
- urls::Vector{Pair{String,Bool}},
- hash::SHA1,
- version_path::String;
- io::IO=stderr_f()
-)::Bool
+ urls::Vector{Pair{String, Bool}},
+ hash::SHA1,
+ version_path::String;
+ io::IO = stderr_f()
+ )::Bool
+ # Because we use `mv_temp_dir_retries` which uses `rename` not `mv` it can fail if the temp
+ # files are on a different fs. So use a temp dir in the same depot dir as some systems might
+ # be serving different parts of the depot on different filesystems via links i.e. pkgeval does this.
+ depot_temp = mkpath(joinpath(dirname(dirname(version_path)), "temp")) # .julia/packages/temp
+
tmp_objects = String[]
url_success = false
for (url, top) in urls
@@ -695,19 +812,21 @@ function install_archive(
push!(tmp_objects, path) # for cleanup
url_success = true
try
- PlatformEngines.download(url, path; verbose=false, io=io)
+ PlatformEngines.download(url, path; verbose = false, io = io)
catch e
e isa InterruptException && rethrow()
url_success = false
end
url_success || continue
- dir = joinpath(tempdir(), randstring(12))
+ # the temp dir should be in the same depot because the `rename` operation in `mv_temp_dir_retries`
+ # is possible only if the source and destination are on the same filesystem
+ dir = tempname(depot_temp) * randstring(6)
push!(tmp_objects, dir) # for cleanup
# Might fail to extract an archive (https://github.com/JuliaPackaging/PkgServer.jl/issues/126)
try
- unpack(path, dir; verbose=false)
+ unpack(path, dir; verbose = false)
catch e
- e isa InterruptException && rethrow()
+ e isa ProcessFailedException || rethrow()
@warn "failed to extract archive downloaded from $(url)"
url_success = false
end
@@ -722,34 +841,43 @@ function install_archive(
unpacked = joinpath(dir, dirs[1])
end
# Assert that the tarball unpacked to the tree sha we wanted
- # TODO: Enable on Windows when tree_hash handles
- # executable bits correctly, see JuliaLang/julia #33212.
- if !Sys.iswindows()
- if SHA1(GitTools.tree_hash(unpacked)) != hash
- @warn "tarball content does not match git-tree-sha1"
- url_success = false
- end
- url_success || continue
+ if SHA1(GitTools.tree_hash(unpacked)) != hash
+ @warn "tarball content does not match git-tree-sha1"
+ url_success = false
end
+ url_success || continue
+
# Move content to version path
- !isdir(version_path) && mkpath(version_path)
- mv(unpacked, version_path; force=true)
+ !isdir(dirname(version_path)) && mkpath(dirname(version_path))
+ mv_temp_dir_retries(unpacked, version_path; set_permissions = false)
+
break # successful install
end
# Clean up and exit
- foreach(x -> Base.rm(x; force=true, recursive=true), tmp_objects)
+ foreach(x -> Base.rm(x; force = true, recursive = true), tmp_objects)
return url_success
end
const refspecs = ["+refs/*:refs/remotes/cache/*"]
function install_git(
- io::IO,
- uuid::UUID,
- name::String,
- hash::SHA1,
- urls::Set{String},
- version_path::String
-)::Nothing
+ io::IO,
+ uuid::UUID,
+ name::String,
+ hash::SHA1,
+ urls::Set{String},
+ version_path::String
+ )::Nothing
+ if isempty(urls)
+ pkgerror(
+ "Package $name [$uuid] has no repository URL available. This could happen if:\n" *
+ " - The package is not registered in any configured registry\n" *
+ " - The package exists in a registry but lacks repository information\n" *
+ " - Registry files are corrupted or incomplete\n" *
+ " - Network issues prevented registry updates\n" *
+ "Please check that the package name is correct and that your registries are up to date."
+ )
+ end
+
repo = nothing
tree = nothing
# TODO: Consolidate this with some of the repo handling in Types.jl
@@ -757,17 +885,21 @@ function install_git(
clones_dir = joinpath(depots1(), "clones")
ispath(clones_dir) || mkpath(clones_dir)
repo_path = joinpath(clones_dir, string(uuid))
- repo = GitTools.ensure_clone(io, repo_path, first(urls); isbare=true,
- header = "[$uuid] $name from $(first(urls))")
+ first_url = first(urls)
+ repo = GitTools.ensure_clone(
+ io, repo_path, first_url; isbare = true,
+ header = "[$uuid] $name from $first_url"
+ )
git_hash = LibGit2.GitHash(hash.bytes)
for url in urls
- try LibGit2.with(LibGit2.GitObject, repo, git_hash) do g
+ try
+ LibGit2.with(LibGit2.GitObject, repo, git_hash) do g
end
break # object was found, we can stop
catch err
err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow()
end
- GitTools.fetch(io, repo, url, refspecs=refspecs)
+ GitTools.fetch(io, repo, url, refspecs = refspecs)
end
tree = try
LibGit2.GitObject(repo, git_hash)
@@ -786,9 +918,9 @@ function install_git(
end
end
-function collect_artifacts(pkg_root::String; platform::AbstractPlatform=HostPlatform())
+function collect_artifacts(pkg_root::String; platform::AbstractPlatform = HostPlatform(), include_lazy::Bool = false)
# Check to see if this package has an (Julia)Artifacts.toml
- artifacts_tomls = Tuple{String,Base.TOML.TOMLDict}[]
+ artifacts_tomls = Tuple{String, Base.TOML.TOMLDict}[]
for f in artifact_names
artifacts_toml = joinpath(pkg_root, f)
if isfile(artifacts_toml)
@@ -803,14 +935,14 @@ function collect_artifacts(pkg_root::String; platform::AbstractPlatform=HostPlat
meta_toml = String(read(select_cmd))
res = TOML.tryparse(meta_toml)
if res isa TOML.ParserError
- errstr = sprint(showerror, res; context=stderr)
+ errstr = sprint(showerror, res; context = stderr)
pkgerror("failed to parse TOML output from running $(repr(selector_path)), got: \n$errstr")
else
push!(artifacts_tomls, (artifacts_toml, TOML.parse(meta_toml)))
end
else
# Otherwise, use the standard selector from `Artifacts`
- artifacts = select_downloadable_artifacts(artifacts_toml; platform)
+ artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy)
push!(artifacts_tomls, (artifacts_toml, artifacts))
end
break
@@ -827,28 +959,40 @@ mutable struct DownloadState
const bar::MiniProgressBar
end
-function download_artifacts(ctx::Context;
- platform::AbstractPlatform=HostPlatform(),
- julia_version = VERSION,
- verbose::Bool=false)
+function download_artifacts(
+ ctx::Context;
+ platform::AbstractPlatform = HostPlatform(),
+ julia_version = VERSION,
+ verbose::Bool = false,
+ io::IO = stderr_f(),
+ include_lazy::Bool = false
+ )
env = ctx.env
io = ctx.io
fancyprint = can_fancyprint(io)
- pkg_roots = String[]
+ pkg_info = Tuple{String, Union{Base.UUID, Nothing}}[]
for (uuid, pkg) in env.manifest
pkg = manifest_info(env.manifest, uuid)
pkg_root = source_path(env.manifest_file, pkg, julia_version)
- pkg_root === nothing || push!(pkg_roots, pkg_root)
+ pkg_root === nothing || push!(pkg_info, (pkg_root, uuid))
end
- push!(pkg_roots, dirname(env.project_file))
+ push!(pkg_info, (dirname(env.project_file), env.pkg !== nothing ? env.pkg.uuid : nothing))
download_jobs = Dict{SHA1, Function}()
+ # Check what registries the current pkg server tracks
+ # Disable if precompiling to not access internet
+ server_registry_info = if Base.JLOptions().incremental == 0
+ Registry.pkg_server_registry_info()
+ else
+ nothing
+ end
+
print_lock = Base.ReentrantLock() # for non-fancyprint printing
download_states = Dict{SHA1, DownloadState}()
errors = Channel{Any}(Inf)
- is_done = false
+ is_done = Ref{Bool}(false)
ansi_moveup(n::Int) = string("\e[", n, "A")
ansi_movecol1 = "\e[1G"
ansi_cleartoend = "\e[0J"
@@ -856,49 +1000,59 @@ function download_artifacts(ctx::Context;
ansi_enablecursor = "\e[?25h"
ansi_disablecursor = "\e[?25l"
- all_collected_artifacts = reduce(vcat, map(pkg_root -> collect_artifacts(pkg_root; platform), pkg_roots))
- used_artifact_tomls = Set{String}(map(first, all_collected_artifacts))
- longest_name_length = maximum(all_collected_artifacts; init=0) do (artifacts_toml, artifacts)
- maximum(textwidth, keys(artifacts); init=0)
+ all_collected_artifacts = reduce(
+ vcat, map(
+ ((pkg_root, pkg_uuid),) ->
+ map(ca -> (ca[1], ca[2], pkg_uuid), collect_artifacts(pkg_root; platform, include_lazy)), pkg_info
+ )
+ )
+ used_artifact_tomls = Set{String}(map(ca -> ca[1], all_collected_artifacts))
+ longest_name_length = maximum(all_collected_artifacts; init = 0) do (artifacts_toml, artifacts, pkg_uuid)
+ maximum(textwidth, keys(artifacts); init = 0)
end
- for (artifacts_toml, artifacts) in all_collected_artifacts
+ for (artifacts_toml, artifacts, pkg_uuid) in all_collected_artifacts
# For each Artifacts.toml, install each artifact we've collected from it
for name in keys(artifacts)
local rname = rpad(name, longest_name_length)
- local hash = SHA1(artifacts[name]["git-tree-sha1"])
- local bar = MiniProgressBar(;header=rname, main=false, indent=2, color = Base.info_color(), mode=:data, always_reprint=true)
+ local hash = SHA1(artifacts[name]["git-tree-sha1"]::String)
+ local bar = MiniProgressBar(; header = rname, main = false, indent = 2, color = Base.info_color()::Symbol, mode = :data, always_reprint = true)
local dstate = DownloadState(:ready, "", time_ns(), Base.ReentrantLock(), bar)
- function progress(total, current; status="")
+ function progress(total, current; status = "")
local t = time_ns()
if isempty(status)
dstate.bar.max = total
dstate.bar.current = current
end
- lock(dstate.status_lock) do
+ return lock(dstate.status_lock) do
dstate.status = status
dstate.status_update_time = t
end
end
+ # Check if the current package is eligible for PkgServer artifact downloads
+ local pkg_server_eligible = pkg_uuid !== nothing && Registry.is_pkg_in_pkgserver_registry(pkg_uuid, server_registry_info, ctx.registries)
+
# returns a string if exists, or function that downloads the artifact if not
- local ret = ensure_artifact_installed(name, artifacts[name], artifacts_toml;
- verbose, quiet_download=!(usable_io(io)), io, progress)
+ local ret = ensure_artifact_installed(
+ name, artifacts[name], artifacts_toml;
+ pkg_server_eligible, verbose, quiet_download = !(usable_io(io)), io, progress
+ )
if ret isa Function
download_states[hash] = dstate
download_jobs[hash] =
() -> begin
- try
- dstate.state = :running
- ret()
- if !fancyprint
- @lock print_lock printpkgstyle(io, :Installed, "artifact $rname $(MiniProgressBars.pkg_format_bytes(dstate.bar.max; sigdigits=1))")
- end
- catch
- dstate.state = :failed
- rethrow()
- else
- dstate.state = :done
+ try
+ dstate.state = :running
+ ret()
+ if !fancyprint && dstate.bar.max > 1 # if another process downloaded, then max is never set greater than 1
+ @lock print_lock printpkgstyle(io, :Installed, "artifact $rname $(MiniProgressBars.pkg_format_bytes(dstate.bar.max; sigdigits = 1))")
end
+ catch
+ dstate.state = :failed
+ rethrow()
+ else
+ dstate.state = :done
end
+ end
end
end
end
@@ -909,30 +1063,30 @@ function download_artifacts(ctx::Context;
try
print(io, ansi_disablecursor)
first = true
- timer = Timer(0, interval=1/10)
+ timer = Timer(0, interval = 1 / 10)
# TODO: Implement as a new MiniMultiProgressBar
- main_bar = MiniProgressBar(; indent=2, header = "Installing artifacts", color = :green, mode = :int, always_reprint=true)
+ main_bar = MiniProgressBar(; indent = 2, header = "Installing artifacts", color = :green, mode = :int, always_reprint = true)
main_bar.max = length(download_states)
- while !is_done
+ while !is_done[]
main_bar.current = count(x -> x.state == :done, values(download_states))
- str = sprint(context=io) do iostr
+ local str = sprint(context = io) do iostr
first || print(iostr, ansi_cleartoend)
n_printed = 1
- show_progress(iostr, main_bar; carriagereturn=false)
+ show_progress(iostr, main_bar; carriagereturn = false)
println(iostr)
- for dstate in sort!(collect(values(download_states)), by=v->v.bar.max, rev=true)
- local status, status_update_time = lock(()->(dstate.status, dstate.status_update_time), dstate.status_lock)
+ for dstate in sort!(collect(values(download_states)), by = v -> v.bar.max, rev = true)
+ local status, status_update_time = lock(() -> (dstate.status, dstate.status_update_time), dstate.status_lock)
# only update the bar's status message if it is stalled for at least 0.5 s.
# If the new status message is empty, go back to showing the bar without waiting.
if isempty(status) || time_ns() - status_update_time > UInt64(500_000_000)
dstate.bar.status = status
end
dstate.state == :running && (dstate.bar.max > 1000 || !isempty(dstate.bar.status)) || continue
- show_progress(iostr, dstate.bar; carriagereturn=false)
+ show_progress(iostr, dstate.bar; carriagereturn = false)
println(iostr)
n_printed += 1
end
- is_done || print(iostr, ansi_moveup(n_printed), ansi_movecol1)
+ is_done[] || print(iostr, ansi_moveup(n_printed), ansi_movecol1)
first = false
end
print(io, str)
@@ -940,7 +1094,7 @@ function download_artifacts(ctx::Context;
end
print(io, ansi_cleartoend)
main_bar.current = count(x -> x[2].state == :done, download_states)
- show_progress(io, main_bar; carriagereturn=false)
+ show_progress(io, main_bar; carriagereturn = false)
println(io)
catch e
e isa InterruptException || rethrow()
@@ -953,26 +1107,26 @@ function download_artifacts(ctx::Context;
printpkgstyle(io, :Installing, "$(length(download_jobs)) artifacts")
end
sema = Base.Semaphore(ctx.num_concurrent_downloads)
- interrupted = false
+ interrupted = Ref{Bool}(false)
@sync for f in values(download_jobs)
- interrupted && break
+ interrupted[] && break
Base.acquire(sema)
Threads.@spawn try
f()
catch e
- e isa InterruptException && (interrupted = true)
+ e isa InterruptException && (interrupted[] = true)
put!(errors, e)
finally
Base.release(sema)
end
end
- is_done = true
+ is_done[] = true
fancyprint && wait(t_print)
close(errors)
if !isempty(errors)
all_errors = collect(errors)
- str = sprint(context=io) do iostr
+ local str = sprint(context = io) do iostr
for e in all_errors
Base.showerror(iostr, e)
length(all_errors) > 1 && println(iostr)
@@ -982,12 +1136,11 @@ function download_artifacts(ctx::Context;
end
end
- for f in used_artifact_tomls
- write_env_usage(f, "artifact_usage.toml")
- end
+
+ return write_env_usage(used_artifact_tomls, "artifact_usage.toml")
end
-function check_artifacts_downloaded(pkg_root::String; platform::AbstractPlatform=HostPlatform())
+function check_artifacts_downloaded(pkg_root::String; platform::AbstractPlatform = HostPlatform())
for (artifacts_toml, artifacts) in collect_artifacts(pkg_root; platform)
for name in keys(artifacts)
if !artifact_exists(Base.SHA1(artifacts[name]["git-tree-sha1"]))
@@ -1014,15 +1167,30 @@ function find_urls(registries::Vector{Registry.RegistryInstance}, uuid::UUID)
end
-function download_source(ctx::Context; readonly=true)
- pkgs_to_install = NamedTuple{(:pkg, :urls, :path), Tuple{PackageEntry, Set{String}, String}}[]
- for pkg in values(ctx.env.manifest)
+download_source(ctx::Context; readonly = true) = download_source(ctx, values(ctx.env.manifest); readonly)
+
+function download_source(ctx::Context, pkgs; readonly = true)
+ pidfile_stale_age = 10 # recommended value is about 3-5x an estimated normal download time (i.e. 2-3s)
+ pkgs_to_install = NamedTuple{(:pkg, :urls, :path), Tuple{eltype(pkgs), Set{String}, String}}[]
+ for pkg in pkgs
tracking_registered_version(pkg, ctx.julia_version) || continue
path = source_path(ctx.env.manifest_file, pkg, ctx.julia_version)
path === nothing && continue
- ispath(path) && continue
+ if ispath(path) && iswritable(path)
+ pidfile = path * ".pid"
+ else
+ # If the path is not writable, we cannot create a pidfile there so use one in the first depot.
+ # (pidlocking probably isn't needed as in this case the package source logically is alredy installed
+ # in the readonly depot, but keep the pidfile logic for consistency)
+ dir = joinpath(depots1(), "packages", pkg.name)
+ mkpath(dir)
+ iswritable(dir) || pkgerror("The primary depot is not writable")
+ pidfile = joinpath(dir, basename(path) * ".pid")
+ end
+
+ FileWatching.mkpidlock(() -> ispath(path), pidfile, stale_age = pidfile_stale_age) && continue
urls = find_urls(ctx.registries, pkg.uuid)
- push!(pkgs_to_install, (;pkg, urls, path))
+ push!(pkgs_to_install, (; pkg, urls, path))
end
length(pkgs_to_install) == 0 && return Set{UUID}()
@@ -1033,7 +1201,7 @@ function download_source(ctx::Context; readonly=true)
missed_packages = eltype(pkgs_to_install)[]
widths = [textwidth(pkg.name) for (pkg, _) in pkgs_to_install]
- max_name = maximum(widths; init=0)
+ max_name = maximum(widths; init = 0)
# Check what registries the current pkg server tracks
# Disable if precompiling to not access internet
@@ -1043,7 +1211,8 @@ function download_source(ctx::Context; readonly=true)
nothing
end
- @sync begin
+ # use eager throw version
+ Base.Experimental.@sync begin
jobs = Channel{eltype(pkgs_to_install)}(ctx.num_concurrent_downloads)
results = Channel(ctx.num_concurrent_downloads)
@@ -1053,61 +1222,68 @@ function download_source(ctx::Context; readonly=true)
end
end
- for i in 1:ctx.num_concurrent_downloads
+ for i in 1:ctx.num_concurrent_downloads # (default 8)
@async begin
for (pkg, urls, path) in jobs
- if ctx.use_git_for_all_downloads
- put!(results, (pkg, false, (urls, path)))
- continue
- end
- try
- archive_urls = Pair{String,Bool}[]
+ mkpath(dirname(path)) # the `packages/Package` dir needs to exist for the pidfile to be created
+ FileWatching.mkpidlock(path * ".pid", stale_age = pidfile_stale_age) do
+ if ispath(path)
+ put!(results, (pkg, nothing, (urls, path)))
+ return
+ end
+ if ctx.use_git_for_all_downloads
+ put!(results, (pkg, false, (urls, path)))
+ return
+ end
+ archive_urls = Pair{String, Bool}[]
# Check if the current package is available in one of the registries being tracked by the pkg server
# In that case, download from the package server
- if server_registry_info !== nothing
+ if Registry.is_pkg_in_pkgserver_registry(pkg.uuid, server_registry_info, ctx.registries)
server, registry_info = server_registry_info
- for reg in ctx.registries
- if reg.uuid in keys(registry_info)
- if haskey(reg, pkg.uuid)
- url = "$server/package/$(pkg.uuid)/$(pkg.tree_hash)"
- push!(archive_urls, url => true)
- break
- end
- end
- end
+ url = "$server/package/$(pkg.uuid)/$(pkg.tree_hash)"
+ push!(archive_urls, url => true)
end
for repo_url in urls
url = get_archive_url_for_version(repo_url, pkg.tree_hash)
url !== nothing && push!(archive_urls, url => false)
end
- success = install_archive(archive_urls, pkg.tree_hash, path, io=ctx.io)
- if success && readonly
- set_readonly(path) # In add mode, files should be read-only
- end
- if ctx.use_only_tarballs_for_downloads && !success
- pkgerror("failed to get tarball from $(urls)")
+ try
+ success = install_archive(archive_urls, pkg.tree_hash, path, io = ctx.io)
+ if success && readonly
+ set_readonly(path) # In add mode, files should be read-only
+ end
+ if ctx.use_only_tarballs_for_downloads && !success
+ pkgerror("failed to get tarball from $(urls)")
+ end
+ put!(results, (pkg, success, (urls, path)))
+ catch err
+ put!(results, (pkg, err, catch_backtrace()))
end
- put!(results, (pkg, success, (urls, path)))
- catch err
- put!(results, (pkg, err, catch_backtrace()))
end
end
end
end
- bar = MiniProgressBar(; indent=1, header = "Downloading packages", color = Base.info_color(),
- mode=:int, always_reprint=true)
+ bar = MiniProgressBar(;
+ indent = 1, header = "Downloading packages", color = Base.info_color(),
+ mode = :int, always_reprint = true
+ )
bar.max = length(pkgs_to_install)
fancyprint = can_fancyprint(ctx.io)
try
for i in 1:length(pkgs_to_install)
- pkg::PackageEntry, exc_or_success, bt_or_pathurls = take!(results)
- exc_or_success isa Exception && pkgerror("Error when installing package $(pkg.name):\n",
- sprint(Base.showerror, exc_or_success, bt_or_pathurls))
- success, (urls, path) = exc_or_success, bt_or_pathurls
+ pkg::eltype(pkgs), exc_or_success_or_nothing, bt_or_pathurls = take!(results)
+ if exc_or_success_or_nothing isa Exception
+ exc = exc_or_success_or_nothing
+ pkgerror("Error when installing package $(pkg.name):\n", sprint(Base.showerror, exc, bt_or_pathurls))
+ end
+ if exc_or_success_or_nothing === nothing
+ continue # represents when another process did the install
+ end
+ success, (urls, path) = exc_or_success_or_nothing, bt_or_pathurls
success || push!(missed_packages, (; pkg, urls, path))
bar.current = i
- str = sprint(; context=ctx.io) do io
+ str = sprint(; context = ctx.io) do io
if success
fancyprint && print_progress_bottom(io)
vstr = if pkg.version !== nothing
@@ -1132,16 +1308,18 @@ function download_source(ctx::Context; readonly=true)
# Use LibGit2 to download any remaining packages #
##################################################
for (pkg, urls, path) in missed_packages
- uuid = pkg.uuid
- install_git(ctx.io, pkg.uuid, pkg.name, pkg.tree_hash, urls, path)
- readonly && set_readonly(path)
- vstr = if pkg.version !== nothing
- "v$(pkg.version)"
- else
- short_treehash = string(pkg.tree_hash)[1:16]
- "[$short_treehash]"
+ FileWatching.mkpidlock(path * ".pid", stale_age = pidfile_stale_age) do
+ ispath(path) && return
+ install_git(ctx.io, pkg.uuid, pkg.name, pkg.tree_hash, urls, path)
+ readonly && set_readonly(path)
+ vstr = if pkg.version !== nothing
+ "v$(pkg.version)"
+ else
+ short_treehash = string(pkg.tree_hash)[1:16]
+ "[$short_treehash]"
+ end
+ printpkgstyle(ctx.io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr))
end
- printpkgstyle(ctx.io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr))
end
return Set{UUID}(entry.pkg.uuid for entry in pkgs_to_install)
@@ -1192,10 +1370,11 @@ function prune_deps(iterator, keep::Set{UUID})
end
clean && break
end
+ return
end
function record_project_hash(env::EnvCache)
- env.manifest.other["project_hash"] = Types.workspace_resolve_hash(env)
+ return env.manifest.other["project_hash"] = Types.workspace_resolve_hash(env)
end
#########
@@ -1232,16 +1411,16 @@ function any_package_not_installed(manifest::Manifest)
return false
end
-function build(ctx::Context, uuids::Set{UUID}, verbose::Bool)
+function build(ctx::Context, uuids::Set{UUID}, verbose::Bool; allow_reresolve::Bool = true)
if any_package_not_installed(ctx.env.manifest) || !isfile(ctx.env.manifest_file)
Pkg.instantiate(ctx, allow_build = false, allow_autoprecomp = false)
end
all_uuids = get_deps(ctx.env, uuids)
- build_versions(ctx, all_uuids; verbose)
+ return build_versions(ctx, all_uuids; verbose, allow_reresolve)
end
-function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID,Int}
- order = Dict{UUID,Int}()
+function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID, Int}
+ order = Dict{UUID, Int}()
seen = UUID[]
k::Int = 0
function visit(uuid::UUID)
@@ -1257,7 +1436,7 @@ function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID,I
end
foreach(visit, deps)
pop!(seen)
- order[uuid] = k += 1
+ return order[uuid] = k += 1
end
visit(uuid::String) = visit(UUID(uuid))
foreach(visit, uuids)
@@ -1266,26 +1445,26 @@ end
function gen_build_code(build_file::String; inherit_project::Bool = false)
code = """
- $(Base.load_path_setup_code(false))
- cd($(repr(dirname(build_file))))
- include($(repr(build_file)))
- """
+ $(Base.load_path_setup_code(false))
+ cd($(repr(dirname(build_file))))
+ include($(repr(build_file)))
+ """
# This will make it so that running Pkg.build runs the build in a session with --startup=no
# *unless* the parent julia session is started with --startup=yes explicitly.
startup_flag = Base.JLOptions().startupfile == 1 ? "yes" : "no"
return ```
- $(Base.julia_cmd()) -O0 --color=no --history-file=no
- --startup-file=$startup_flag
- $(inherit_project ? `--project=$(Base.active_project())` : ``)
- --eval $code
- ```
+ $(Base.julia_cmd()) -O0 --color=no --history-file=no
+ --startup-file=$startup_flag
+ $(inherit_project ? `--project=$(Base.active_project())` : ``)
+ --eval $code
+ ```
end
with_load_path(f::Function, new_load_path::String) = with_load_path(f, [new_load_path])
function with_load_path(f::Function, new_load_path::Vector{String})
old_load_path = copy(Base.LOAD_PATH)
copy!(Base.LOAD_PATH, new_load_path)
- try
+ return try
f()
finally
copy!(LOAD_PATH, old_load_path)
@@ -1297,9 +1476,9 @@ pkg_scratchpath() = joinpath(depots1(), "scratchspaces", PkgUUID)
builddir(source_path::String) = joinpath(source_path, "deps")
buildfile(source_path::String) = joinpath(builddir(source_path), "build.jl")
-function build_versions(ctx::Context, uuids::Set{UUID}; verbose=false)
+function build_versions(ctx::Context, uuids::Set{UUID}; verbose = false, allow_reresolve::Bool = true)
# collect builds for UUIDs with `deps/build.jl` files
- builds = Tuple{UUID,String,String,VersionNumber}[]
+ builds = Tuple{UUID, String, String, VersionNumber}[]
for uuid in uuids
is_stdlib(uuid) && continue
if Types.is_project_uuid(ctx.env, uuid)
@@ -1324,84 +1503,94 @@ function build_versions(ctx::Context, uuids::Set{UUID}; verbose=false)
# toposort builds by dependencies
order = dependency_order_uuids(ctx.env, UUID[first(build) for build in builds])
sort!(builds, by = build -> order[first(build)])
- max_name = maximum(build->textwidth(build[2]), builds; init=0)
+ max_name = maximum(build -> textwidth(build[2]), builds; init = 0)
- bar = MiniProgressBar(; indent=2, header = "Building packages", color = Base.info_color(),
- mode=:int, always_reprint=true)
+ bar = MiniProgressBar(;
+ indent = 2, header = "Building packages", color = Base.info_color(),
+ mode = :int, always_reprint = true
+ )
bar.max = length(builds)
fancyprint = can_fancyprint(ctx.io)
fancyprint && start_progress(ctx.io, bar)
# build each package versions in a child process
try
- for (n, (uuid, name, source_path, version)) in enumerate(builds)
- pkg = PackageSpec(;uuid=uuid, name=name, version=version)
- build_file = buildfile(source_path)
- # compatibility shim
- local build_project_override, build_project_preferences
- if isfile(projectfile_path(builddir(source_path)))
- build_project_override = nothing
- with_load_path([builddir(source_path), Base.LOAD_PATH...]) do
- build_project_preferences = Base.get_preferences()
- end
- else
- build_project_override = gen_target_project(ctx, pkg, source_path, "build")
- with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do
- build_project_preferences = Base.get_preferences()
+ for (n, (uuid, name, source_path, version)) in enumerate(builds)
+ pkg = PackageSpec(; uuid = uuid, name = name, version = version)
+ build_file = buildfile(source_path)
+ # compatibility shim
+ local build_project_override, build_project_preferences
+ if isfile(projectfile_path(builddir(source_path)))
+ build_project_override = nothing
+ with_load_path([builddir(source_path), Base.LOAD_PATH...]) do
+ build_project_preferences = Base.get_preferences()
+ end
+ else
+ build_project_override = gen_target_project(ctx, pkg, source_path, "build")
+ with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do
+ build_project_preferences = Base.get_preferences()
+ end
end
- end
- # Put log output in Pkg's scratchspace if the package is content addressed
- # by tree sha and in the build directory if it is tracked by path etc.
- entry = manifest_info(ctx.env.manifest, uuid)
- if entry !== nothing && entry.tree_hash !== nothing
- key = string(entry.tree_hash)
- scratch = joinpath(pkg_scratchpath(), key)
- mkpath(scratch)
- log_file = joinpath(scratch, "build.log")
- # Associate the logfile with the package being built
- dict = Dict{String,Any}(scratch => [
- Dict{String,Any}("time" => Dates.now(), "parent_projects" => [projectfile_path(source_path)])
- ])
- open(joinpath(depots1(), "logs", "scratch_usage.toml"), "a") do io
- TOML.print(io, dict)
+ # Put log output in Pkg's scratchspace if the package is content addressed
+ # by tree sha and in the build directory if it is tracked by path etc.
+ entry = manifest_info(ctx.env.manifest, uuid)
+ if entry !== nothing && entry.tree_hash !== nothing
+ key = string(entry.tree_hash)
+ scratch = joinpath(pkg_scratchpath(), key)
+ mkpath(scratch)
+ log_file = joinpath(scratch, "build.log")
+ # Associate the logfile with the package being built
+ dict = Dict{String, Any}(
+ scratch => [
+ Dict{String, Any}("time" => Dates.now(), "parent_projects" => [projectfile_path(source_path)]),
+ ]
+ )
+ open(joinpath(depots1(), "logs", "scratch_usage.toml"), "a") do io
+ TOML.print(io, dict)
+ end
+ else
+ log_file = splitext(build_file)[1] * ".log"
end
- else
- log_file = splitext(build_file)[1] * ".log"
- end
-
- fancyprint && print_progress_bottom(ctx.io)
- printpkgstyle(ctx.io, :Building,
- rpad(name * " ", max_name + 1, "─") * "→ " * pathrepr(log_file))
- bar.current = n-1
+ fancyprint && print_progress_bottom(ctx.io)
- fancyprint && show_progress(ctx.io, bar)
-
- let log_file=log_file
- sandbox(ctx, pkg, builddir(source_path), build_project_override; preferences=build_project_preferences) do
- flush(ctx.io)
- ok = open(log_file, "w") do log
- std = verbose ? ctx.io : log
- success(pipeline(gen_build_code(buildfile(source_path)),
- stdout=std, stderr=std))
- end
- ok && return
- n_lines = isinteractive() ? 100 : 5000
- # TODO: Extract last n lines more efficiently
- log_lines = readlines(log_file)
- log_show = join(log_lines[max(1, length(log_lines) - n_lines):end], '\n')
- full_log_at, last_lines =
- if length(log_lines) > n_lines
- "\n\nFull log at $log_file",
- ", showing the last $n_lines of log"
- else
- "", ""
+ printpkgstyle(
+ ctx.io, :Building,
+ rpad(name * " ", max_name + 1, "─") * "→ " * pathrepr(log_file)
+ )
+ bar.current = n - 1
+
+ fancyprint && show_progress(ctx.io, bar)
+
+ let log_file = log_file
+ sandbox(ctx, pkg, builddir(source_path), build_project_override; preferences = build_project_preferences, allow_reresolve) do
+ flush(ctx.io)
+ ok = open(log_file, "w") do log
+ std = verbose ? ctx.io : log
+ success(
+ pipeline(
+ gen_build_code(buildfile(source_path)),
+ stdout = std, stderr = std
+ )
+ )
+ end
+ ok && return
+ n_lines = isinteractive() ? 100 : 5000
+ # TODO: Extract last n lines more efficiently
+ log_lines = readlines(log_file)
+ log_show = join(log_lines[max(1, length(log_lines) - n_lines):end], '\n')
+ full_log_at, last_lines =
+ if length(log_lines) > n_lines
+ "\n\nFull log at $log_file",
+ ", showing the last $n_lines of log"
+ else
+ "", ""
+ end
+ pkgerror("Error building `$(pkg.name)`$last_lines: \n$log_show$full_log_at")
end
- pkgerror("Error building `$(pkg.name)`$last_lines: \n$log_show$full_log_at")
end
end
- end
finally
fancyprint && end_progress(ctx.io, bar)
end
@@ -1482,47 +1671,53 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode::PackageMode)
record_project_hash(ctx.env)
# update project & manifest
write_env(ctx.env)
- show_update(ctx.env, ctx.registries; io=ctx.io)
+ return show_update(ctx.env, ctx.registries; io = ctx.io)
end
-update_package_add(ctx::Context, pkg::PackageSpec, ::Nothing, source_path, source_repo, is_dep::Bool) = pkg
-function update_package_add(ctx::Context, pkg::PackageSpec, entry::PackageEntry, source_path, source_repo, is_dep::Bool)
+update_package_add(ctx::Context, pkg::PackageSpec, ::Nothing, is_dep::Bool) = pkg
+function update_package_add(ctx::Context, pkg::PackageSpec, entry::PackageEntry, is_dep::Bool)
if entry.pinned
if pkg.version == VersionSpec()
println(ctx.io, "`$(pkg.name)` is pinned at `v$(entry.version)`: maintaining pinned version")
end
- return PackageSpec(; uuid=pkg.uuid, name=pkg.name, pinned=true,
- version=entry.version, tree_hash=entry.tree_hash,
- path=entry.path, repo=entry.repo)
+ return PackageSpec(;
+ uuid = pkg.uuid, name = pkg.name, pinned = true,
+ version = entry.version, tree_hash = entry.tree_hash,
+ path = entry.path, repo = entry.repo
+ )
end
if entry.path !== nothing || entry.repo.source !== nothing || pkg.repo.source !== nothing
return pkg # overwrite everything, nothing to copy over
end
if is_stdlib(pkg.uuid)
return pkg # stdlibs are not versioned like other packages
- elseif is_dep && ((isa(pkg.version, VersionNumber) && entry.version == pkg.version) ||
- (!isa(pkg.version, VersionNumber) && entry.version ∈ pkg.version))
+ elseif is_dep && (
+ (isa(pkg.version, VersionNumber) && entry.version == pkg.version) ||
+ (!isa(pkg.version, VersionNumber) && entry.version ∈ pkg.version)
+ )
# leave the package as is at the installed version
- return PackageSpec(; uuid=pkg.uuid, name=pkg.name, version=entry.version,
- tree_hash=entry.tree_hash)
+ return PackageSpec(;
+ uuid = pkg.uuid, name = pkg.name, version = entry.version,
+ tree_hash = entry.tree_hash
+ )
end
# adding a new version not compatible with the old version, so we just overwrite
return pkg
end
# Update registries AND read them back in.
-function update_registries(ctx::Context; force::Bool=true, kwargs...)
+function update_registries(ctx::Context; force::Bool = true, kwargs...)
OFFLINE_MODE[] && return
!force && UPDATED_REGISTRY_THIS_SESSION[] && return
- Registry.update(; io=ctx.io, kwargs...)
+ Registry.update(; io = ctx.io, kwargs...)
copy!(ctx.registries, Registry.reachable_registries())
- UPDATED_REGISTRY_THIS_SESSION[] = true
+ return UPDATED_REGISTRY_THIS_SESSION[] = true
end
function is_all_registered(registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec})
pkgs = filter(tracking_registered_version, pkgs)
for pkg in pkgs
- if !any(r->haskey(r, pkg.uuid), registries)
+ if !any(r -> haskey(r, pkg.uuid), registries)
return pkg
end
end
@@ -1530,9 +1725,32 @@ function is_all_registered(registries::Vector{Registry.RegistryInstance}, pkgs::
end
function check_registered(registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec})
+ if isempty(registries) && !isempty(pkgs)
+ registry_pkgs = filter(tracking_registered_version, pkgs)
+ if !isempty(registry_pkgs)
+ pkgerror("no registries have been installed. Cannot resolve the following packages:\n$(join(map(pkg -> " " * err_rep(pkg), registry_pkgs), "\n"))")
+ end
+ end
pkg = is_all_registered(registries, pkgs)
if pkg isa PackageSpec
- pkgerror("expected package $(err_rep(pkg)) to be registered")
+ msg = "expected package $(err_rep(pkg)) to be registered"
+ # check if the name exists in the registry with a different uuid
+ if pkg.name !== nothing
+ reg_uuid = Pair{String, Vector{UUID}}[]
+ for reg in registries
+ uuids = Registry.uuids_from_name(reg, pkg.name)
+ if !isempty(uuids)
+ push!(reg_uuid, reg.name => uuids)
+ end
+ end
+ if !isempty(reg_uuid)
+ msg *= "\n You may have provided the wrong UUID for package $(pkg.name).\n Found the following UUIDs for that name:"
+ for (reg, uuids) in reg_uuid
+ msg *= "\n - $(join(uuids, ", ")) from registry: $reg"
+ end
+ end
+ end
+ pkgerror(msg)
end
return nothing
end
@@ -1544,29 +1762,38 @@ function assert_can_add(ctx::Context, pkgs::Vector{PackageSpec})
# package with the same name exist in the project: assert that they have the same uuid
existing_uuid = get(ctx.env.project.deps, pkg.name, pkg.uuid)
existing_uuid == pkg.uuid ||
- pkgerror("""Refusing to add package $(err_rep(pkg)).
- Package `$(pkg.name)=$(existing_uuid)` with the same name already exists as a direct dependency.
- To remove the existing package, use `import Pkg; Pkg.rm("$(pkg.name)")`.
- """)
+ pkgerror(
+ """Refusing to add package $(err_rep(pkg)).
+ Package `$(pkg.name)=$(existing_uuid)` with the same name already exists as a direct dependency.
+ To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm $(pkg.name)""" : """import Pkg; Pkg.rm("$(pkg.name)")""")`.
+ """
+ )
# package with the same uuid exist in the project: assert they have the same name
name = findfirst(==(pkg.uuid), ctx.env.project.deps)
name === nothing || name == pkg.name ||
- pkgerror("""Refusing to add package $(err_rep(pkg)).
- Package `$name=$(pkg.uuid)` with the same UUID already exists as a direct dependency.
- To remove the existing package, use `import Pkg; Pkg.rm("$name")`.
- """)
+ pkgerror(
+ """Refusing to add package $(err_rep(pkg)).
+ Package `$name=$(pkg.uuid)` with the same UUID already exists as a direct dependency.
+ To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm $name""" : """import Pkg; Pkg.rm("$name")""")`.
+ """
+ )
# package with the same uuid exist in the manifest: assert they have the same name
entry = get(ctx.env.manifest, pkg.uuid, nothing)
entry === nothing || entry.name == pkg.name ||
- pkgerror("""Refusing to add package $(err_rep(pkg)).
- Package `$(entry.name)=$(pkg.uuid)` with the same UUID already exists in the manifest.
- To remove the existing package, use `import Pkg; Pkg.rm(Pkg.PackageSpec(uuid="$(pkg.uuid)"); mode=Pkg.PKGMODE_MANIFEST)`.
- """)
+ pkgerror(
+ """Refusing to add package $(err_rep(pkg)).
+ Package `$(entry.name)=$(pkg.uuid)` with the same UUID already exists in the manifest.
+ To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm --manifest $(entry.name)=$(pkg.uuid)""" : """import Pkg; Pkg.rm(Pkg.PackageSpec(uuid="$(pkg.uuid)"); mode=Pkg.PKGMODE_MANIFEST)""")`.
+ """
+ )
end
+ return
end
-function tiered_resolve(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version,
- try_all_installed::Bool)
+function tiered_resolve(
+ env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version,
+ try_all_installed::Bool
+ )
if try_all_installed
try # do not modify existing subgraph and only add installed versions of the new packages
@debug "tiered_resolve: trying PRESERVE_ALL_INSTALLED"
@@ -1609,29 +1836,47 @@ function targeted_resolve(env::EnvCache, registries::Vector{Registry.RegistryIns
return pkgs, deps_map
end
-function _resolve(io::IO, env::EnvCache, registries::Vector{Registry.RegistryInstance},
- pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version)
- printpkgstyle(io, :Resolving, "package versions...")
- if preserve == PRESERVE_TIERED_INSTALLED
- tiered_resolve(env, registries, pkgs, julia_version, true)
- elseif preserve == PRESERVE_TIERED
- tiered_resolve(env, registries, pkgs, julia_version, false)
- else
- targeted_resolve(env, registries, pkgs, preserve, julia_version)
+function _resolve(
+ io::IO, env::EnvCache, registries::Vector{Registry.RegistryInstance},
+ pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version
+ )
+ usingstrategy = preserve != PRESERVE_TIERED ? " using $preserve" : ""
+ printpkgstyle(io, :Resolving, "package versions$(usingstrategy)...")
+ return try
+ if preserve == PRESERVE_TIERED_INSTALLED
+ tiered_resolve(env, registries, pkgs, julia_version, true)
+ elseif preserve == PRESERVE_TIERED
+ tiered_resolve(env, registries, pkgs, julia_version, false)
+ else
+ targeted_resolve(env, registries, pkgs, preserve, julia_version)
+ end
+ catch err
+
+ if err isa Resolve.ResolverError
+ yanked_pkgs = filter(pkg -> is_pkgversion_yanked(pkg, registries), load_all_deps(env))
+ if !isempty(yanked_pkgs)
+ indent = " "^(Pkg.pkgstyle_indent)
+ yanked_str = join(map(pkg -> indent * " - " * err_rep(pkg, quotes = false) * " " * string(pkg.version), yanked_pkgs), "\n")
+ printpkgstyle(io, :Warning, """The following package versions were yanked from their registry and \
+ are not resolvable:\n$yanked_str""", color = Base.warn_color())
+ end
+ end
+ rethrow()
end
end
-function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}();
- allow_autoprecomp::Bool=true, preserve::PreserveLevel=default_preserve(), platform::AbstractPlatform=HostPlatform(),
- target::Symbol=:deps)
+function add(
+ ctx::Context, pkgs::Vector{PackageSpec}, new_git = Set{UUID}();
+ allow_autoprecomp::Bool = true, preserve::PreserveLevel = default_preserve(), platform::AbstractPlatform = HostPlatform(),
+ target::Symbol = :deps
+ )
assert_can_add(ctx, pkgs)
# load manifest data
for (i, pkg) in pairs(pkgs)
delete!(ctx.env.project.weakdeps, pkg.name)
entry = manifest_info(ctx.env.manifest, pkg.uuid)
is_dep = any(uuid -> uuid == pkg.uuid, [uuid for (name, uuid) in ctx.env.project.deps])
- source_path, source_repo = get_path_repo(ctx.env.project, pkg.name)
- pkgs[i] = update_package_add(ctx, pkg, entry, source_path, source_repo, is_dep)
+ pkgs[i] = update_package_add(ctx, pkg, entry, is_dep)
end
names = (p.name for p in pkgs)
@@ -1652,11 +1897,11 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}();
man_pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, preserve, ctx.julia_version)
update_manifest!(ctx.env, man_pkgs, deps_map, ctx.julia_version)
new_apply = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
+ fixups_from_projectfile!(ctx)
# After downloading resolutionary packages, search for (Julia)Artifacts.toml files
# and ensure they are all downloaded and unpacked as well:
- download_artifacts(ctx, platform=platform, julia_version=ctx.julia_version)
+ download_artifacts(ctx, platform = platform, julia_version = ctx.julia_version)
# if env is a package add compat entries
if ctx.env.project.name !== nothing && ctx.env.project.uuid !== nothing
@@ -1674,7 +1919,7 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}();
record_project_hash(ctx.env) # compat entries changed the hash after it was last recorded in update_manifest!
write_env(ctx.env) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io)
+ show_update(ctx.env, ctx.registries; io = ctx.io)
build_versions(ctx, union(new_apply, new_git))
allow_autoprecomp && Pkg._auto_precompile(ctx)
else
@@ -1687,8 +1932,10 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}();
end
# Input: name, uuid, and path
-function develop(ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID};
- preserve::PreserveLevel=default_preserve(), platform::AbstractPlatform=HostPlatform())
+function develop(
+ ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID};
+ preserve::PreserveLevel = default_preserve(), platform::AbstractPlatform = HostPlatform()
+ )
assert_can_add(ctx, pkgs)
# no need to look at manifest.. dev will just nuke whatever is there before
for pkg in pkgs
@@ -1699,11 +1946,11 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID};
pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, preserve, ctx.julia_version)
update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version)
new_apply = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
- download_artifacts(ctx; platform=platform, julia_version=ctx.julia_version)
+ fixups_from_projectfile!(ctx)
+ download_artifacts(ctx; platform = platform, julia_version = ctx.julia_version)
write_env(ctx.env) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io)
- build_versions(ctx, union(new_apply, new_git))
+ show_update(ctx.env, ctx.registries; io = ctx.io)
+ return build_versions(ctx, union(new_apply, new_git))
end
# load version constraint
@@ -1714,7 +1961,9 @@ function up_load_versions!(ctx::Context, pkg::PackageSpec, entry::PackageEntry,
entry.version !== nothing || return false # no version to set
if entry.pinned || level == UPLEVEL_FIXED
pkg.version = entry.version
- pkg.tree_hash = entry.tree_hash
+ if pkg.path === nothing
+ pkg.tree_hash = entry.tree_hash
+ end
elseif entry.repo.source !== nothing || source_repo.source !== nothing # repo packages have a version but are treated specially
if source_repo.source !== nothing
pkg.repo = source_repo
@@ -1739,7 +1988,7 @@ function up_load_versions!(ctx::Context, pkg::PackageSpec, entry::PackageEntry,
r = level == UPLEVEL_PATCH ? VersionRange(ver.major, ver.minor) :
level == UPLEVEL_MINOR ? VersionRange(ver.major) :
level == UPLEVEL_MAJOR ? VersionRange() :
- error("unexpected upgrade level: $level")
+ error("unexpected upgrade level: $level")
pkg.version = VersionSpec(r)
end
return false
@@ -1754,13 +2003,15 @@ function up_load_manifest_info!(pkg::PackageSpec, entry::PackageEntry)
if pkg.path === nothing
pkg.path = entry.path
end
- pkg.pinned = entry.pinned
+ return pkg.pinned = entry.pinned
# `pkg.version` and `pkg.tree_hash` is set by `up_load_versions!`
end
-function load_manifest_deps_up(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[];
- preserve::PreserveLevel=PRESERVE_ALL)
+function load_manifest_deps_up(
+ env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[];
+ preserve::PreserveLevel = PRESERVE_ALL
+ )
manifest = env.manifest
project = env.project
explicit_upgraded = Set(pkg.uuid for pkg in pkgs)
@@ -1795,28 +2046,35 @@ function load_manifest_deps_up(env::EnvCache, pkgs::Vector{PackageSpec}=PackageS
end
# The rest of the packages get fixed
- push!(pkgs, PackageSpec(
- uuid = uuid,
- name = entry.name,
- path = entry.path,
- pinned = entry.pinned,
- repo = entry.repo,
- tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
- version = something(entry.version, VersionSpec())
- ))
+ push!(
+ pkgs, PackageSpec(
+ uuid = uuid,
+ name = entry.name,
+ path = entry.path,
+ pinned = entry.pinned,
+ repo = entry.repo,
+ tree_hash = entry.tree_hash, # TODO should tree_hash be changed too?
+ version = something(entry.version, VersionSpec())
+ )
+ )
end
return pkgs
end
function targeted_resolve_up(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version)
- pkgs = load_manifest_deps_up(env, pkgs; preserve=preserve)
+ pkgs = load_manifest_deps_up(env, pkgs; preserve = preserve)
check_registered(registries, pkgs)
deps_map = resolve_versions!(env, registries, pkgs, julia_version, preserve == PRESERVE_ALL_INSTALLED)
return pkgs, deps_map
end
-function up(ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel;
- skip_writing_project::Bool=false, preserve::Union{Nothing,PreserveLevel}=nothing)
+function up(
+ ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel;
+ skip_writing_project::Bool = false, preserve::Union{Nothing, PreserveLevel} = nothing
+ )
+
+ requested_pkgs = pkgs
+
new_git = Set{UUID}()
# TODO check all pkg.version == VersionSpec()
# set version constraints according to `level`
@@ -1840,16 +2098,44 @@ function up(ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel;
end
update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version)
new_apply = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
- download_artifacts(ctx, julia_version=ctx.julia_version)
+ fixups_from_projectfile!(ctx)
+ download_artifacts(ctx, julia_version = ctx.julia_version)
write_env(ctx.env; skip_writing_project) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io, hidden_upgrades_info = true)
- build_versions(ctx, union(new_apply, new_git))
+ show_update(ctx.env, ctx.registries; io = ctx.io, hidden_upgrades_info = true)
+
+ if length(requested_pkgs) == 1
+ pkg = only(requested_pkgs)
+ entry = manifest_info(ctx.env.manifest, pkg.uuid)
+ if entry === nothing || (entry.path === nothing && entry.repo.source === nothing)
+ # Get current version after the update
+ current_version = entry !== nothing ? entry.version : nothing
+ original_entry = manifest_info(ctx.env.original_manifest, pkg.uuid)
+ original_version = original_entry !== nothing ? original_entry.version : nothing
+
+ # Check if version didn't change and there's a newer version available
+ if current_version == original_version && current_version !== nothing
+ temp_pkg = PackageSpec(name = pkg.name, uuid = pkg.uuid, version = current_version)
+ cinfo = status_compat_info(temp_pkg, ctx.env, ctx.registries)
+ if cinfo !== nothing
+ packages_holding_back, max_version, max_version_compat = cinfo
+ if current_version < max_version
+ printpkgstyle(
+ ctx.io, :Info, "$(pkg.name) can be updated but at the cost of downgrading other packages. " *
+ "To force upgrade to the latest version, try `add $(pkg.name)@$(max_version)`", color = Base.info_color()
+ )
+ end
+ end
+ end
+ end
+ end
+
+ return build_versions(ctx, union(new_apply, new_git))
end
function update_package_pin!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, entry::Union{Nothing, PackageEntry})
if entry === nothing
- pkgerror("package $(err_rep(pkg)) not found in the manifest, run `Pkg.resolve()` and retry.")
+ cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()"
+ pkgerror("package $(err_rep(pkg)) not found in the manifest, run `$cmd` and retry.")
end
#if entry.pinned && pkg.version == VersionSpec()
@@ -1886,11 +2172,11 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec})
update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version)
new = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
- download_artifacts(ctx; julia_version=ctx.julia_version)
+ fixups_from_projectfile!(ctx)
+ download_artifacts(ctx; julia_version = ctx.julia_version)
write_env(ctx.env) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io)
- build_versions(ctx, new)
+ show_update(ctx.env, ctx.registries; io = ctx.io)
+ return build_versions(ctx, new)
end
function update_package_free!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, entry::PackageEntry, err_if_free::Bool)
@@ -1910,22 +2196,24 @@ function update_package_free!(registries::Vector{Registry.RegistryInstance}, pkg
return # -> name, uuid
end
if err_if_free
- pkgerror("expected package $(err_rep(pkg)) to be pinned, tracking a path,",
- " or tracking a repository")
+ pkgerror(
+ "expected package $(err_rep(pkg)) to be pinned, tracking a path,",
+ " or tracking a repository"
+ )
end
return
end
# TODO: this is two technically different operations with the same name
# split into two subfunctions ...
-function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free=true)
+function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free = true)
for pkg in pkgs
entry = manifest_info(ctx.env.manifest, pkg.uuid)
delete!(ctx.env.project.sources, pkg.name)
update_package_free!(ctx.registries, pkg, entry, err_if_free)
end
- if any(pkg -> pkg.version == VersionSpec(), pkgs)
+ return if any(pkg -> pkg.version == VersionSpec(), pkgs)
pkgs = load_direct_deps(ctx.env, pkgs)
check_registered(ctx.registries, pkgs)
@@ -1934,38 +2222,45 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free=true)
update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version)
new = download_source(ctx)
- fixups_from_projectfile!(ctx.env)
+ fixups_from_projectfile!(ctx)
download_artifacts(ctx)
write_env(ctx.env) # write env before building
- show_update(ctx.env, ctx.registries; io=ctx.io)
+ show_update(ctx.env, ctx.registries; io = ctx.io)
build_versions(ctx, new)
else
foreach(pkg -> manifest_info(ctx.env.manifest, pkg.uuid).pinned = false, pkgs)
write_env(ctx.env)
- show_update(ctx.env, ctx.registries; io=ctx.io)
+ show_update(ctx.env, ctx.registries; io = ctx.io)
end
end
function gen_test_code(source_path::String; test_args::Cmd)
test_file = testfile(source_path)
return """
- $(Base.load_path_setup_code(false))
- cd($(repr(dirname(test_file))))
- append!(empty!(ARGS), $(repr(test_args.exec)))
- include($(repr(test_file)))
- """
+ $(Base.load_path_setup_code(false))
+ cd($(repr(dirname(test_file))))
+ append!(empty!(ARGS), $(repr(test_args.exec)))
+ include($(repr(test_file)))
+ """
end
function get_threads_spec()
- if Threads.nthreads(:interactive) > 0
+ return if haskey(ENV, "JULIA_NUM_THREADS")
+ if isempty(ENV["JULIA_NUM_THREADS"])
+ throw(ArgumentError("JULIA_NUM_THREADS is set to an empty string. It is not clear what Pkg.test should set for `-t` on the test worker."))
+ end
+ # if set, prefer JULIA_NUM_THREADS because this is passed to the test worker via --threads
+ # which takes precedence in the worker
+ ENV["JULIA_NUM_THREADS"]
+ elseif Threads.nthreads(:interactive) > 0
"$(Threads.nthreads(:default)),$(Threads.nthreads(:interactive))"
else
"$(Threads.nthreads(:default))"
end
end
-function gen_subprocess_flags(source_path::String; coverage, julia_args)
+function gen_subprocess_flags(source_path::String; coverage, julia_args::Cmd)
coverage_arg = if coverage isa Bool
# source_path is the package root, not "src" so "ext" etc. is included
coverage ? string("@", source_path) : "none"
@@ -1990,7 +2285,7 @@ end
function with_temp_env(fn::Function, temp_env::String)
load_path = copy(LOAD_PATH)
active_project = Base.ACTIVE_PROJECT[]
- try
+ return try
push!(empty!(LOAD_PATH), "@", temp_env)
Base.ACTIVE_PROJECT[] = nothing
fn()
@@ -2005,8 +2300,10 @@ function sandbox_preserve(env::EnvCache, target::PackageSpec, test_project::Stri
env = deepcopy(env)
# include root in manifest (in case any dependencies point back to it)
if env.pkg !== nothing
- env.manifest[env.pkg.uuid] = PackageEntry(;name=env.pkg.name, path=dirname(env.project_file),
- deps=env.project.deps)
+ env.manifest[env.pkg.uuid] = PackageEntry(;
+ name = env.pkg.name, path = dirname(env.project_file),
+ deps = env.project.deps
+ )
end
# if the source manifest is an old format, upgrade the manifest_format so
# that warnings aren't thrown for the temp sandbox manifest
@@ -2041,16 +2338,18 @@ function abspath!(env::EnvCache, project::Project)
end
# ctx + pkg used to compute parent dep graph
-function sandbox(fn::Function, ctx::Context, target::PackageSpec,
- sandbox_path::String, sandbox_project_override;
- preferences::Union{Nothing,Dict{String,Any}} = nothing,
- force_latest_compatible_version::Bool=false,
- allow_earlier_backwards_compatible_versions::Bool=true,
- allow_reresolve::Bool=true)
+function sandbox(
+ fn::Function, ctx::Context, target::PackageSpec,
+ sandbox_path::String, sandbox_project_override;
+ preferences::Union{Nothing, Dict{String, Any}} = nothing,
+ force_latest_compatible_version::Bool = false,
+ allow_earlier_backwards_compatible_versions::Bool = true,
+ allow_reresolve::Bool = true
+ )
sandbox_project = projectfile_path(sandbox_path)
- mktempdir() do tmp
- tmp_project = projectfile_path(tmp)
+ return mktempdir() do tmp
+ tmp_project = projectfile_path(tmp)
tmp_manifest = manifestfile_path(tmp)
tmp_preferences = joinpath(tmp, first(Base.preferences_names))
@@ -2118,14 +2417,22 @@ function sandbox(fn::Function, ctx::Context, target::PackageSpec,
end
try
- Pkg.resolve(temp_ctx; io=devnull, skip_writing_project=true)
+ Pkg.resolve(temp_ctx; io = devnull, skip_writing_project = true)
@debug "Using _parent_ dep graph"
- catch err# TODO
+ catch err # TODO
err isa Resolve.ResolverError || rethrow()
allow_reresolve || rethrow()
@debug err
- printpkgstyle(ctx.io, :Test, "Could not use exact versions of packages in manifest. Re-resolving dependencies", color=Base.warn_color())
- Pkg.update(temp_ctx; skip_writing_project=true, update_registry=false, io=ctx.io)
+ msg = string(
+ "Could not use exact versions of packages in manifest, re-resolving. ",
+ "Note: if you do not check your manifest file into source control, ",
+ "then you can probably ignore this message. ",
+ "However, if you do check your manifest file into source control, ",
+ "then you probably want to pass the `allow_reresolve = false` kwarg ",
+ "when calling the `Pkg.test` function.",
+ )
+ printpkgstyle(ctx.io, :Test, msg, color = Base.warn_color())
+ Pkg.update(temp_ctx; skip_writing_project = true, update_registry = false, io = ctx.io)
printpkgstyle(ctx.io, :Test, "Successfully re-resolved")
@debug "Using _clean_ dep graph"
end
@@ -2164,7 +2471,7 @@ function gen_target_project(ctx::Context, pkg::PackageSpec, source_path::String,
env = ctx.env
registries = ctx.registries
test_project = Types.Project()
- if projectfile_path(source_path; strict=true) === nothing
+ if projectfile_path(source_path; strict = true) === nothing
# no project file, assuming this is an old REQUIRE package
test_project.deps = copy(env.manifest[pkg.uuid].deps)
if target == "test"
@@ -2172,10 +2479,10 @@ function gen_target_project(ctx::Context, pkg::PackageSpec, source_path::String,
if isfile(test_REQUIRE_path)
@warn "using test/REQUIRE files is deprecated and current support is lacking in some areas"
test_pkgs = parse_REQUIRE(test_REQUIRE_path)
- package_specs = [PackageSpec(name=pkg) for pkg in test_pkgs]
+ package_specs = [PackageSpec(name = pkg) for pkg in test_pkgs]
registry_resolve!(registries, package_specs)
stdlib_resolve!(package_specs)
- ensure_resolved(ctx, env.manifest, package_specs, registry=true)
+ ensure_resolved(ctx, env.manifest, package_specs, registry = true)
for spec in package_specs
test_project.deps[spec.name] = spec.uuid
end
@@ -2211,12 +2518,14 @@ end
testdir(source_path::String) = joinpath(source_path, "test")
testfile(source_path::String) = joinpath(testdir(source_path), "runtests.jl")
-function test(ctx::Context, pkgs::Vector{PackageSpec};
- coverage=false, julia_args::Cmd=``, test_args::Cmd=``,
- test_fn=nothing,
- force_latest_compatible_version::Bool=false,
- allow_earlier_backwards_compatible_versions::Bool=true,
- allow_reresolve::Bool=true)
+function test(
+ ctx::Context, pkgs::Vector{PackageSpec};
+ coverage = false, julia_args::Cmd = ``, test_args::Cmd = ``,
+ test_fn = nothing,
+ force_latest_compatible_version::Bool = false,
+ allow_earlier_backwards_compatible_versions::Bool = true,
+ allow_reresolve::Bool = true
+ )
Pkg.instantiate(ctx; allow_autoprecomp = false) # do precomp later within sandbox
# load manifest data
@@ -2237,16 +2546,18 @@ function test(ctx::Context, pkgs::Vector{PackageSpec};
# See if we can find the test files for all packages
missing_runtests = String[]
- source_paths = String[] # source_path is the package root (not /src)
+ source_paths = String[] # source_path is the package root (not /src)
for pkg in pkgs
sourcepath = project_rel_path(ctx.env, source_path(ctx.env.manifest_file, pkg, ctx.julia_version)) # TODO
!isfile(testfile(sourcepath)) && push!(missing_runtests, pkg.name)
push!(source_paths, sourcepath)
end
if !isempty(missing_runtests)
- pkgerror(length(missing_runtests) == 1 ? "Package " : "Packages ",
- join(missing_runtests, ", "),
- " did not provide a `test/runtests.jl` file")
+ pkgerror(
+ length(missing_runtests) == 1 ? "Package " : "Packages ",
+ join(missing_runtests, ", "),
+ " did not provide a `test/runtests.jl` file"
+ )
end
# sandbox
@@ -2258,13 +2569,15 @@ function test(ctx::Context, pkgs::Vector{PackageSpec};
proj = Base.locate_project_file(abspath(testdir(source_path)))
env = EnvCache(proj)
# Instantiate test env
- Pkg.instantiate(Context(env=env); allow_autoprecomp = false)
- status(env, ctx.registries; mode=PKGMODE_COMBINED, io=ctx.io, ignore_indent = false, show_usagetips = false)
+ Pkg.instantiate(Context(env = env); allow_autoprecomp = false)
+ status(env, ctx.registries; mode = PKGMODE_COMBINED, io = ctx.io, ignore_indent = false, show_usagetips = false)
flags = gen_subprocess_flags(source_path; coverage, julia_args)
if should_autoprecompile()
cacheflags = Base.CacheFlags(parse(UInt8, read(`$(Base.julia_cmd()) $(flags) --eval 'show(ccall(:jl_cache_flags, UInt8, ()))'`, String)))
- Pkg.precompile(; io=ctx.io, configs = flags => cacheflags)
+ # Don't warn about already loaded packages, since we are going to run tests in a new
+ # subprocess anyway.
+ Pkg.precompile(; io = ctx.io, warn_loaded = false, configs = flags => cacheflags)
end
printpkgstyle(ctx.io, :Testing, "Running tests...")
@@ -2299,21 +2612,21 @@ function test(ctx::Context, pkgs::Vector{PackageSpec};
end
# now we sandbox
printpkgstyle(ctx.io, :Testing, pkg.name)
- sandbox(ctx, pkg, testdir(source_path), test_project_override; preferences=test_project_preferences, force_latest_compatible_version, allow_earlier_backwards_compatible_versions, allow_reresolve) do
+ sandbox(ctx, pkg, testdir(source_path), test_project_override; preferences = test_project_preferences, force_latest_compatible_version, allow_earlier_backwards_compatible_versions, allow_reresolve) do
test_fn !== nothing && test_fn()
- sandbox_ctx = Context(;io=ctx.io)
- status(sandbox_ctx.env, sandbox_ctx.registries; mode=PKGMODE_COMBINED, io=sandbox_ctx.io, ignore_indent = false, show_usagetips = false)
- flags = gen_subprocess_flags(source_path; coverage,julia_args)
+ sandbox_ctx = Context(; io = ctx.io)
+ status(sandbox_ctx.env, sandbox_ctx.registries; mode = PKGMODE_COMBINED, io = sandbox_ctx.io, ignore_indent = false, show_usagetips = false)
+ flags = gen_subprocess_flags(source_path; coverage, julia_args)
if should_autoprecompile()
cacheflags = Base.CacheFlags(parse(UInt8, read(`$(Base.julia_cmd()) $(flags) --eval 'show(ccall(:jl_cache_flags, UInt8, ()))'`, String)))
- Pkg.precompile(sandbox_ctx; io=sandbox_ctx.io, configs = flags => cacheflags)
+ Pkg.precompile(sandbox_ctx; io = sandbox_ctx.io, configs = flags => cacheflags)
end
printpkgstyle(ctx.io, :Testing, "Running tests...")
flush(ctx.io)
code = gen_test_code(source_path; test_args)
- cmd = `$(Base.julia_cmd()) $(flags) --threads=$(get_threads_spec()) --eval $code`
+ cmd = `$(Base.julia_cmd()) --threads=$(get_threads_spec()) $(flags) --eval $code`
p, interrupted = subprocess_handler(cmd, ctx.io, "Tests interrupted. Exiting the test process")
if success(p)
printpkgstyle(ctx.io, :Testing, pkg.name * " tests passed ")
@@ -2325,7 +2638,7 @@ function test(ctx::Context, pkgs::Vector{PackageSpec};
# TODO: Should be included in Base
function signal_name(signal::Integer)
- if signal == Base.SIGHUP
+ return if signal == Base.SIGHUP
"HUP"
elseif signal == Base.SIGINT
"INT"
@@ -2343,9 +2656,9 @@ function test(ctx::Context, pkgs::Vector{PackageSpec};
end
# report errors
- if !isempty(pkgs_errored)
+ return if !isempty(pkgs_errored)
function reason(p)
- if Base.process_signaled(p)
+ return if Base.process_signaled(p)
" (received signal: " * signal_name(p.termsignal) * ")"
elseif Base.process_exited(p) && p.exitcode != 1
" (exit code: " * string(p.exitcode) * ")"
@@ -2394,7 +2707,7 @@ end
# Display
-function stat_rep(x::PackageSpec; name=true)
+function stat_rep(x::PackageSpec; name = true)
name = name ? "$(x.name)" : ""
version = x.version == VersionSpec() ? "" : "v$(x.version)"
rev = ""
@@ -2405,7 +2718,7 @@ function stat_rep(x::PackageSpec; name=true)
repo = Operations.is_tracking_repo(x) ? "`$(x.repo.source)$(subdir_str)#$(rev)`" : ""
path = Operations.is_tracking_path(x) ? "$(pathrepr(x.path))" : ""
pinned = x.pinned ? "⚲" : ""
- return join(filter(!isempty, [name,version,repo,path,pinned]), " ")
+ return join(filter(!isempty, [name, version, repo, path, pinned]), " ")
end
print_single(io::IO, pkg::PackageSpec) = print(io, stat_rep(pkg))
@@ -2413,20 +2726,20 @@ print_single(io::IO, pkg::PackageSpec) = print(io, stat_rep(pkg))
is_instantiated(::Nothing) = false
is_instantiated(x::PackageSpec) = x.version != VersionSpec() || is_stdlib(x.uuid)
# Compare an old and new node of the dependency graph and print a single line to summarize the change
-function print_diff(io::IO, old::Union{Nothing,PackageSpec}, new::Union{Nothing,PackageSpec})
- if !is_instantiated(old) && is_instantiated(new)
- printstyled(io, "+ $(stat_rep(new))"; color=:light_green)
+function print_diff(io::IO, old::Union{Nothing, PackageSpec}, new::Union{Nothing, PackageSpec})
+ return if !is_instantiated(old) && is_instantiated(new)
+ printstyled(io, "+ $(stat_rep(new))"; color = :light_green)
elseif !is_instantiated(new)
- printstyled(io, "- $(stat_rep(old))"; color=:light_red)
+ printstyled(io, "- $(stat_rep(old))"; color = :light_red)
elseif is_tracking_registry(old) && is_tracking_registry(new) &&
- new.version isa VersionNumber && old.version isa VersionNumber && new.version != old.version
+ new.version isa VersionNumber && old.version isa VersionNumber && new.version != old.version
if new.version > old.version
- printstyled(io, "↑ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_yellow)
+ printstyled(io, "↑ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_yellow)
else
- printstyled(io, "↓ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_magenta)
+ printstyled(io, "↓ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_magenta)
end
else
- printstyled(io, "~ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_yellow)
+ printstyled(io, "~ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_yellow)
end
end
@@ -2442,11 +2755,11 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist
reg_compat_info = Registry.compat_info(info)
versions = keys(reg_compat_info)
versions = filter(v -> !Registry.isyanked(info, v), versions)
- max_version_reg = maximum(versions; init=v"0")
+ max_version_reg = maximum(versions; init = v"0")
max_version = max(max_version, max_version_reg)
compat_spec = get_compat_workspace(env, pkg.name)
versions_in_compat = filter(in(compat_spec), keys(reg_compat_info))
- max_version_in_compat = max(max_version_in_compat, maximum(versions_in_compat; init=v"0"))
+ max_version_in_compat = max(max_version_in_compat, maximum(versions_in_compat; init = v"0"))
end
max_version == v"0" && return nothing
pkg.version >= max_version && return nothing
@@ -2515,7 +2828,7 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist
return sort!(unique!(packages_holding_back)), max_version, max_version_in_compat
end
-function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifest=true, workspace=false)
+function diff_array(old_env::Union{EnvCache, Nothing}, new_env::EnvCache; manifest = true, workspace = false)
function index_pkgs(pkgs, uuid)
idx = findfirst(pkg -> pkg.uuid == uuid, pkgs)
return idx === nothing ? nothing : pkgs[idx]
@@ -2527,9 +2840,9 @@ function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifes
new = manifest ? load_all_deps_loadable(new_env) : load_project_deps(new_env.project, new_env.project_file, new_env.manifest, new_env.manifest_file)
end
- T, S = Union{UUID,Nothing}, Union{PackageSpec,Nothing}
+ T, S = Union{UUID, Nothing}, Union{PackageSpec, Nothing}
if old_env === nothing
- return Tuple{T,S,S}[(pkg.uuid, nothing, pkg)::Tuple{T,S,S} for pkg in new]
+ return Tuple{T, S, S}[(pkg.uuid, nothing, pkg)::Tuple{T, S, S} for pkg in new]
end
if workspace
old = manifest ? load_all_deps(old_env) : load_direct_deps(old_env)
@@ -2538,13 +2851,12 @@ function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifes
end
# merge old and new into single array
all_uuids = union(T[pkg.uuid for pkg in old], T[pkg.uuid for pkg in new])
- return Tuple{T,S,S}[(uuid, index_pkgs(old, uuid), index_pkgs(new, uuid))::Tuple{T,S,S} for uuid in all_uuids]
+ return Tuple{T, S, S}[(uuid, index_pkgs(old, uuid), index_pkgs(new, uuid))::Tuple{T, S, S} for uuid in all_uuids]
end
-function is_package_downloaded(manifest_file::String, pkg::PackageSpec; platform=HostPlatform())
+function is_package_downloaded(manifest_file::String, pkg::PackageSpec; platform = HostPlatform())
sourcepath = source_path(manifest_file, pkg)
- identifier = pkg.name !== nothing ? pkg.name : pkg.uuid
- (sourcepath === nothing) && pkgerror("Could not locate the source code for the $(identifier) package. Are you trying to use a manifest generated by a different version of Julia?")
+ sourcepath === nothing && return false
isdir(sourcepath) || return false
check_artifacts_downloaded(sourcepath; platform) || return false
return true
@@ -2564,11 +2876,13 @@ function status_ext_info(pkg::PackageSpec, env::EnvCache)
# Note: `get_extension` returns nothing for stdlibs that are loaded via `require_stdlib`
ext_loaded = (Base.get_extension(Base.PkgId(pkg.uuid, pkg.name), Symbol(ext)) !== nothing)
# Check if deps are loaded
- extdeps_info= Tuple{String, Bool}[]
+ extdeps_info = Tuple{String, Bool}[]
for extdep in extdeps
if !(haskey(weakdepses, extdep) || haskey(depses, extdep))
- pkgerror(isnothing(pkg.name) ? "M" : "$(pkg.name) has a malformed Project.toml, ",
- "the extension package $extdep is not listed in [weakdeps] or [deps]")
+ pkgerror(
+ isnothing(pkg.name) ? "M" : "$(pkg.name) has a malformed Project.toml, ",
+ "the extension package $extdep is not listed in [weakdeps] or [deps]"
+ )
end
uuid = get(weakdepses, extdep, nothing)
if uuid === nothing
@@ -2600,29 +2914,39 @@ struct PackageStatusData
extinfo::Union{Nothing, Vector{ExtInfo}}
end
-function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registries::Vector{Registry.RegistryInstance}, header::Symbol,
- uuids::Vector, names::Vector; manifest=true, diff=false, ignore_indent::Bool, workspace::Bool, outdated::Bool, extensions::Bool, io::IO,
- mode::PackageMode, hidden_upgrades_info::Bool, show_usagetips::Bool=true)
- not_installed_indicator = sprint((io, args) -> printstyled(io, args...; color=Base.error_color()), "→", context=io)
- upgradable_indicator = sprint((io, args) -> printstyled(io, args...; color=:green), "⌃", context=io)
- heldback_indicator = sprint((io, args) -> printstyled(io, args...; color=Base.warn_color()), "⌅", context=io)
+function print_status(
+ env::EnvCache, old_env::Union{Nothing, EnvCache}, registries::Vector{Registry.RegistryInstance}, header::Symbol,
+ uuids::Vector, names::Vector; manifest = true, diff = false, ignore_indent::Bool, workspace::Bool, outdated::Bool, extensions::Bool, io::IO,
+ mode::PackageMode, hidden_upgrades_info::Bool, show_usagetips::Bool = true
+ )
+ not_installed_indicator = sprint((io, args) -> printstyled(io, args...; color = Base.error_color()), "→", context = io)
+ upgradable_indicator = sprint((io, args) -> printstyled(io, args...; color = :green), "⌃", context = io)
+ heldback_indicator = sprint((io, args) -> printstyled(io, args...; color = Base.warn_color()), "⌅", context = io)
filter = !isempty(uuids) || !isempty(names)
# setup
xs = diff_array(old_env, env; manifest, workspace)
# filter and return early if possible
if isempty(xs) && !diff
- printpkgstyle(io, header, "$(pathrepr(manifest ? env.manifest_file : env.project_file)) (empty " *
- (manifest ? "manifest" : "project") * ")", ignore_indent)
+ printpkgstyle(
+ io, header, "$(pathrepr(manifest ? env.manifest_file : env.project_file)) (empty " *
+ (manifest ? "manifest" : "project") * ")", ignore_indent
+ )
return nothing
end
- no_changes = all(p-> p[2] == p[3], xs)
+ no_changes = all(p -> p[2] == p[3], xs)
if no_changes
- printpkgstyle(io, Symbol("No packages added to or removed from"), "$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent)
+ if manifest
+ printpkgstyle(io, :Manifest, "No packages added to or removed from $(pathrepr(env.manifest_file))", ignore_indent; color = Base.info_color())
+ else
+ printpkgstyle(io, :Project, "No packages added to or removed from $(pathrepr(env.project_file))", ignore_indent; color = Base.info_color())
+ end
else
xs = !filter ? xs : eltype(xs)[(id, old, new) for (id, old, new) in xs if (id in uuids || something(new, old).name in names)]
if isempty(xs)
- printpkgstyle(io, Symbol("No Matches"),
- "in $(diff ? "diff for " : "")$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent)
+ printpkgstyle(
+ io, Symbol("No Matches"),
+ "in $(diff ? "diff for " : "")$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent
+ )
return nothing
end
# main print
@@ -2681,8 +3005,8 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie
pkg_downloaded = !is_instantiated(new) || is_package_downloaded(env.manifest_file, new)
new_ver_avail = !latest_version && !Operations.is_tracking_repo(new) && !Operations.is_tracking_path(new)
- pkg_upgradable = new_ver_avail && isempty(cinfo[1])
- pkg_heldback = new_ver_avail && !isempty(cinfo[1])
+ pkg_upgradable = new_ver_avail && cinfo !== nothing && isempty(cinfo[1])
+ pkg_heldback = new_ver_avail && cinfo !== nothing && !isempty(cinfo[1])
if !pkg_downloaded && (pkg_upgradable || pkg_heldback)
# allow space in the gutter for two icons on a single line
@@ -2720,20 +3044,45 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie
diff ? print_diff(io, pkg.old, pkg.new) : print_single(io, pkg.new)
+ # show if package is yanked
+ pkg_spec = something(pkg.new, pkg.old)
+ if is_pkgversion_yanked(pkg_spec, registries)
+ printstyled(io, " [yanked]"; color = :yellow)
+ end
+
if outdated && !diff && pkg.compat_data !== nothing
packages_holding_back, max_version, max_version_compat = pkg.compat_data
if pkg.new.version !== max_version_compat && max_version_compat != max_version
- printstyled(io, " [ is_pkgversion_yanked(something(pkg.new, pkg.old), registries), package_statuses)
+
+ # Add warning for yanked packages
+ if any_yanked_packages
+ yanked_str = sprint((io, args) -> printstyled(io, args...; color = :yellow), "[yanked]", context = io)
+ printpkgstyle(io, :Warning, """Package versions marked with $yanked_str have been pulled from their registry. \
+ It is recommended to update them to resolve a valid version.""", color = Base.warn_color(), ignore_indent)
+ end
+
return nothing
end
@@ -2791,8 +3150,8 @@ function git_head_env(env, project_dir)
git_path = LibGit2.path(repo)
project_path = relpath(env.project_file, git_path)
manifest_path = relpath(env.manifest_file, git_path)
- new_env.project = read_project(GitTools.git_file_stream(repo, "HEAD:$project_path", fakeit=true))
- new_env.manifest = read_manifest(GitTools.git_file_stream(repo, "HEAD:$manifest_path", fakeit=true))
+ new_env.project = read_project(GitTools.git_file_stream(repo, "HEAD:$project_path", fakeit = true))
+ new_env.manifest = read_manifest(GitTools.git_file_stream(repo, "HEAD:$manifest_path", fakeit = true))
return new_env
end
catch err
@@ -2805,26 +3164,29 @@ function show_update(env::EnvCache, registries::Vector{Registry.RegistryInstance
old_env = EnvCache()
old_env.project = env.original_project
old_env.manifest = env.original_manifest
- status(env, registries; header=:Updating, mode=PKGMODE_COMBINED, env_diff=old_env, ignore_indent=false, io=io, hidden_upgrades_info)
+ status(env, registries; header = :Updating, mode = PKGMODE_COMBINED, env_diff = old_env, ignore_indent = false, io = io, hidden_upgrades_info)
return nothing
end
-function status(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}=PackageSpec[];
- header=nothing, mode::PackageMode=PKGMODE_PROJECT, git_diff::Bool=false, env_diff=nothing, ignore_indent=true,
- io::IO, workspace::Bool=false, outdated::Bool=false, extensions::Bool=false, hidden_upgrades_info::Bool=false, show_usagetips::Bool=true)
+function status(
+ env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec} = PackageSpec[];
+ header = nothing, mode::PackageMode = PKGMODE_PROJECT, git_diff::Bool = false, env_diff = nothing, ignore_indent = true,
+ io::IO, workspace::Bool = false, outdated::Bool = false, extensions::Bool = false, hidden_upgrades_info::Bool = false, show_usagetips::Bool = true
+ )
io == Base.devnull && return
# if a package, print header
if header === nothing && env.pkg !== nothing
- printpkgstyle(io, :Project, string(env.pkg.name, " v", env.pkg.version), true; color=Base.info_color())
+ printpkgstyle(io, :Project, string(env.pkg.name, " v", env.pkg.version), true; color = Base.info_color())
end
# load old env
old_env = nothing
if git_diff
project_dir = dirname(env.project_file)
- if !ispath(joinpath(project_dir, ".git"))
+ git_repo_dir = discover_repo(project_dir)
+ if git_repo_dir == nothing
@warn "diff option only available for environments in git repositories, ignoring."
else
- old_env = git_head_env(env, project_dir)
+ old_env = git_head_env(env, git_repo_dir)
if old_env === nothing
@warn "could not read project from HEAD, displaying absolute status instead."
end
@@ -2839,15 +3201,25 @@ function status(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pk
diff = old_env !== nothing
header = something(header, diff ? :Diff : :Status)
if mode == PKGMODE_PROJECT || mode == PKGMODE_COMBINED
- print_status(env, old_env, registries, header, filter_uuids, filter_names; manifest=false, diff, ignore_indent, io, workspace, outdated, extensions, mode, hidden_upgrades_info, show_usagetips)
+ print_status(env, old_env, registries, header, filter_uuids, filter_names; manifest = false, diff, ignore_indent, io, workspace, outdated, extensions, mode, hidden_upgrades_info, show_usagetips)
end
if mode == PKGMODE_MANIFEST || mode == PKGMODE_COMBINED
print_status(env, old_env, registries, header, filter_uuids, filter_names; diff, ignore_indent, io, workspace, outdated, extensions, mode, hidden_upgrades_info, show_usagetips)
end
- if is_manifest_current(env) === false
- tip = show_usagetips ? " It is recommended to `Pkg.resolve()` or consider `Pkg.update()` if necessary." : ""
- printpkgstyle(io, :Warning, "The project dependencies or compat requirements have changed since the manifest was last resolved.$tip",
- ignore_indent; color=Base.warn_color())
+ return if is_manifest_current(env) === false
+ tip = if show_usagetips
+ if Pkg.in_repl_mode()
+ " It is recommended to `pkg> resolve` or consider `pkg> update` if necessary."
+ else
+ " It is recommended to `Pkg.resolve()` or consider `Pkg.update()` if necessary."
+ end
+ else
+ ""
+ end
+ printpkgstyle(
+ io, :Warning, "The project dependencies or compat requirements have changed since the manifest was last resolved.$tip",
+ ignore_indent; color = Base.warn_color()
+ )
end
end
@@ -2884,7 +3256,7 @@ function print_compat(ctx::Context, pkgs_in::Vector{PackageSpec} = PackageSpec[]
printpkgstyle(io, :Compat, pathrepr(ctx.env.project_file))
names = [pkg.name for pkg in pkgs_in]
pkgs = isempty(pkgs_in) ? ctx.env.project.deps : filter(pkg -> in(first(pkg), names), ctx.env.project.deps)
- add_julia = isempty(pkgs_in) || any(p->p.name == "julia", pkgs_in)
+ add_julia = isempty(pkgs_in) || any(p -> p.name == "julia", pkgs_in)
longest_dep_len = isempty(pkgs) ? length("julia") : max(reduce(max, map(length, collect(keys(pkgs)))), length("julia"))
if add_julia
println(io, compat_line(io, "julia", nothing, get_compat_str(ctx.env.project, "julia"), longest_dep_len))
@@ -2892,13 +3264,16 @@ function print_compat(ctx::Context, pkgs_in::Vector{PackageSpec} = PackageSpec[]
for (dep, uuid) in pkgs
println(io, compat_line(io, dep, uuid, get_compat_str(ctx.env.project, dep), longest_dep_len))
end
+ return
end
print_compat(pkg::String; kwargs...) = print_compat(Context(), pkg; kwargs...)
print_compat(; kwargs...) = print_compat(Context(); kwargs...)
-function apply_force_latest_compatible_version!(ctx::Types.Context;
- target_name = nothing,
- allow_earlier_backwards_compatible_versions::Bool = true)
+function apply_force_latest_compatible_version!(
+ ctx::Types.Context;
+ target_name = nothing,
+ allow_earlier_backwards_compatible_versions::Bool = true
+ )
deps_from_env = load_direct_deps(ctx.env)
deps = [(; name = x.name, uuid = x.uuid) for x in deps_from_env]
for dep in deps
@@ -2914,10 +3289,12 @@ function apply_force_latest_compatible_version!(ctx::Types.Context;
return nothing
end
-function apply_force_latest_compatible_version!(ctx::Types.Context,
- dep::NamedTuple{(:name, :uuid), Tuple{String, Base.UUID}};
- target_name = nothing,
- allow_earlier_backwards_compatible_versions::Bool = true)
+function apply_force_latest_compatible_version!(
+ ctx::Types.Context,
+ dep::NamedTuple{(:name, :uuid), Tuple{String, Base.UUID}};
+ target_name = nothing,
+ allow_earlier_backwards_compatible_versions::Bool = true
+ )
name, uuid = dep
has_compat = haskey(ctx.env.project.compat, name)
if !has_compat
@@ -2957,17 +3334,21 @@ function get_earliest_backwards_compatible_version(ver::Base.VersionNumber)
return Base.VersionNumber(0, 0, ver.patch)
end
-function get_latest_compatible_version(ctx::Types.Context,
- uuid::Base.UUID,
- compat_spec::VersionSpec)
+function get_latest_compatible_version(
+ ctx::Types.Context,
+ uuid::Base.UUID,
+ compat_spec::VersionSpec
+ )
all_registered_versions = get_all_registered_versions(ctx, uuid)
compatible_versions = filter(in(compat_spec), all_registered_versions)
latest_compatible_version = maximum(compatible_versions)
return latest_compatible_version
end
-function get_all_registered_versions(ctx::Types.Context,
- uuid::Base.UUID)
+function get_all_registered_versions(
+ ctx::Types.Context,
+ uuid::Base.UUID
+ )
versions = Set{VersionNumber}()
for reg in ctx.registries
pkg = get(reg, uuid, nothing)
diff --git a/src/Pkg.jl b/src/Pkg.jl
index d6260607dd..087ee05af4 100644
--- a/src/Pkg.jl
+++ b/src/Pkg.jl
@@ -2,6 +2,11 @@
module Pkg
+# In Pkg tests we want to avoid Pkg being re-precompiled by subprocesses, so this is enabled in the test suite
+if Base.get_bool_env("JULIA_PKG_DISALLOW_PKG_PRECOMPILATION", false) == true
+ error("Precompililing Pkg is disallowed. JULIA_PKG_DISALLOW_PKG_PRECOMPILATION=$(ENV["JULIA_PKG_DISALLOW_PKG_PRECOMPILATION"])")
+end
+
if isdefined(Base, :Experimental) && isdefined(Base.Experimental, Symbol("@max_methods"))
@eval Base.Experimental.@max_methods 1
end
@@ -18,13 +23,17 @@ export PreserveLevel, PRESERVE_TIERED_INSTALLED, PRESERVE_TIERED, PRESERVE_ALL_I
export Registry, RegistrySpec
public activate, add, build, compat, develop, free, gc, generate, instantiate,
- pin, precompile, redo, rm, resolve, status, test, undo, update, why
+ pin, precompile, redo, rm, resolve, status, test, undo, update, why
depots() = Base.DEPOT_PATH
-function depots1()
- d = depots()
- isempty(d) && Pkg.Types.pkgerror("no depots found in DEPOT_PATH")
- return d[1]
+function depots1(depot_list::Union{String, Vector{String}} = depots())
+ # Get the first depot from a list, with proper error handling
+ if depot_list isa String
+ return depot_list
+ else
+ isempty(depot_list) && Pkg.Types.pkgerror("no depots provided")
+ return depot_list[1]
+ end
end
function pkg_server()
@@ -41,15 +50,18 @@ const UPDATED_REGISTRY_THIS_SESSION = Ref(false)
const OFFLINE_MODE = Ref(false)
const RESPECT_SYSIMAGE_VERSIONS = Ref(true)
# For globally overriding in e.g. tests
-const DEFAULT_IO = Ref{Union{IO,Nothing}}(nothing)
+const DEFAULT_IO = Ref{Union{IO, Nothing}}(nothing)
+
+# ScopedValue to track whether we're currently in REPL mode
+const IN_REPL_MODE = Base.ScopedValues.ScopedValue{Bool}()
# See discussion in https://github.com/JuliaLang/julia/pull/52249
function unstableio(@nospecialize(io::IO))
# Needed to prevent specialization https://github.com/JuliaLang/julia/pull/52249#discussion_r1401199265
_io = Base.inferencebarrier(io)
- IOContext{IO}(
+ return IOContext{IO}(
_io,
- get(_io,:color,false) ? Base.ImmutableDict{Symbol,Any}(:color, true) : Base.ImmutableDict{Symbol,Any}()
+ get(_io, :color, false) ? Base.ImmutableDict{Symbol, Any}(:color, true) : Base.ImmutableDict{Symbol, Any}()
)
end
stderr_f() = something(DEFAULT_IO[], unstableio(stderr))
@@ -58,7 +70,28 @@ const PREV_ENV_PATH = Ref{String}("")
usable_io(io) = (io isa Base.TTY) || (io isa IOContext{IO} && io.io isa Base.TTY)
can_fancyprint(io::IO) = (usable_io(io)) && (get(ENV, "CI", nothing) != "true")
-should_autoprecompile() = Base.JLOptions().use_compiled_modules == 1 && Base.get_bool_env("JULIA_PKG_PRECOMPILE_AUTO", true)
+
+_autoprecompilation_enabled::Bool = true
+const _autoprecompilation_enabled_scoped = Base.ScopedValues.ScopedValue{Bool}(true)
+autoprecompilation_enabled(state::Bool) = (global _autoprecompilation_enabled = state)
+function should_autoprecompile()
+ if Base.JLOptions().use_compiled_modules == 1 &&
+ _autoprecompilation_enabled &&
+ _autoprecompilation_enabled_scoped[] &&
+ Base.get_bool_env("JULIA_PKG_PRECOMPILE_AUTO", true)
+ return true
+ else
+ return false
+ end
+end
+
+"""
+ in_repl_mode()
+
+Check if we're currently executing in REPL mode. This is used to determine
+whether to show tips in REPL format (`pkg> add Foo`) or API format (`Pkg.add("Foo")`).
+"""
+in_repl_mode() = @something(Base.ScopedValues.get(IN_REPL_MODE), false)
include("utils.jl")
include("MiniProgressBars.jl")
@@ -68,10 +101,11 @@ include("Versions.jl")
include("Registry/Registry.jl")
include("Resolve/Resolve.jl")
include("Types.jl")
-include("BinaryPlatforms_compat.jl")
+include("BinaryPlatformsCompat.jl")
include("Artifacts.jl")
include("Operations.jl")
include("API.jl")
+include("Apps/Apps.jl")
include("REPLMode/REPLMode.jl")
import .REPLMode: @pkg_str
@@ -185,11 +219,21 @@ const add = API.add
Pkg.precompile(; strict::Bool=false, timing::Bool=false)
Pkg.precompile(pkg; strict::Bool=false, timing::Bool=false)
Pkg.precompile(pkgs; strict::Bool=false, timing::Bool=false)
+ Pkg.precompile(f, args...; kwargs...)
Precompile all or specific dependencies of the project in parallel.
Set `timing=true` to show the duration of the precompilation of each dependency.
+To delay autoprecompilation of multiple Pkg actions until the end use.
+This may be most efficient while manipulating the environment in various ways.
+
+```julia
+Pkg.precompile() do
+ # Pkg actions here
+end
+```
+
!!! note
Errors will only throw when precompiling the top-level dependencies, given that
not all manifest dependencies may be loaded by the top-level dependencies on the given system.
@@ -207,6 +251,9 @@ Set `timing=true` to show the duration of the precompilation of each dependency.
!!! compat "Julia 1.9"
Timing mode requires at least Julia 1.9.
+!!! compat "Julia 1.13"
+ The `Pkg.precompile(f, args...; kwargs...)` do-block syntax requires at least Julia 1.13.
+
# Examples
```julia
Pkg.precompile()
@@ -216,6 +263,39 @@ Pkg.precompile(["Foo", "Bar"])
"""
const precompile = API.precompile
+"""
+ Pkg.autoprecompilation_enabled(state::Bool)
+
+Enable or disable automatic precompilation for Pkg operations.
+
+When `state` is `true` (default), Pkg operations that modify the project environment
+will automatically trigger precompilation of affected packages. When `state` is `false`,
+automatic precompilation is disabled and packages will only be precompiled when
+explicitly requested via [`Pkg.precompile`](@ref).
+
+This setting affects the global state and persists across Pkg operations in the same
+Julia session. It can be used in combination with [`Pkg.precompile`](@ref) do-syntax
+for more fine-grained control over when precompilation occurs.
+
+!!! compat "Julia 1.13"
+ This function requires at least Julia 1.13.
+
+# Examples
+```julia
+# Disable automatic precompilation
+Pkg.autoprecompilation_enabled(false)
+Pkg.add("Example") # Will not trigger auto-precompilation
+Pkg.precompile() # Manual precompilation
+
+# Re-enable automatic precompilation
+Pkg.autoprecompilation_enabled(true)
+Pkg.add("AnotherPackage") # Will trigger auto-precompilation
+```
+
+See also [`Pkg.precompile`](@ref).
+"""
+autoprecompilation_enabled
+
"""
Pkg.rm(pkg::Union{String, Vector{String}}; mode::PackageMode = PKGMODE_PROJECT)
Pkg.rm(pkg::Union{PackageSpec, Vector{PackageSpec}}; mode::PackageMode = PKGMODE_PROJECT)
@@ -279,19 +359,18 @@ const update = API.up
!!! compat "Julia 1.9"
Passing a string to `coverage` requires at least Julia 1.9.
-Run the tests for package `pkg`, or for the current project (which thus needs to be a package) if no
-positional argument is given to `Pkg.test`. A package is tested by running its
-`test/runtests.jl` file.
+Run the tests for the given package(s), or for the current project if no positional argument is given to `Pkg.test`
+(the current project would need to be a package). The package is tested by running its `test/runtests.jl` file.
-The tests are run by generating a temporary environment with only the `pkg` package
-and its (recursive) dependencies in it. If a manifest file exists and the `allow_reresolve`
-keyword argument is set to `false`, the versions in the manifest file are used.
-Otherwise a feasible set of packages is resolved and installed.
+The tests are run in a temporary environment that also includes the test specific dependencies
+of the package. The versions of dependencies in the current project are used for the
+test environment unless there is a compatibility conflict between the version of the dependencies and
+the test-specific dependencies. In that case, if `allow_reresolve` is `false` an error is thrown and
+if `allow_reresolve` is `true` a feasible set of versions of the dependencies is resolved and used.
-During the tests, test-specific dependencies are active, which are
-given in the project file as e.g.
+Test-specific dependnecies are declared in the project file as:
-```
+```toml
[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
@@ -301,24 +380,25 @@ test = ["Test"]
The tests are executed in a new process with `check-bounds=yes` and by default `startup-file=no`.
If using the startup file (`~/.julia/config/startup.jl`) is desired, start julia with `--startup-file=yes`.
+
Inlining of functions during testing can be disabled (for better coverage accuracy)
by starting julia with `--inline=no`. The tests can be run as if different command line arguments were
passed to julia by passing the arguments instead to the `julia_args` keyword argument, e.g.
-```
+```julia
Pkg.test("foo"; julia_args=["--inline"])
```
To pass some command line arguments to be used in the tests themselves, pass the arguments to the
`test_args` keyword argument. These could be used to control the code being tested, or to control the
tests in some way. For example, the tests could have optional additional tests:
-```
+```julia
if "--extended" in ARGS
@test some_function()
end
```
which could be enabled by testing with
-```
+```julia
Pkg.test("foo"; test_args=["--extended"])
```
"""
@@ -345,14 +425,22 @@ const gc = API.gc
Pkg.build(pkg::Union{String, Vector{String}}; verbose = false, io::IO=stderr)
Pkg.build(pkgs::Union{PackageSpec, Vector{PackageSpec}}; verbose = false, io::IO=stderr)
+**Keyword arguments:**
+ - `verbose::Bool=false`: print the build output to `stdout`/`stderr` instead of redirecting to the `build.log` file.
+ - `allow_reresolve::Bool=true`: allow Pkg to reresolve the package versions in the build environment
+
+!!! compat "Julia 1.13"
+ `allow_reresolve` requires at least Julia 1.13.
+
Run the build script in `deps/build.jl` for `pkg` and all of its dependencies in
depth-first recursive order.
If no argument is given to `build`, the current project is built, which thus needs
to be a package.
This function is called automatically on any package that gets installed
for the first time.
-`verbose = true` prints the build output to `stdout`/`stderr` instead of
-redirecting to the `build.log` file.
+
+The build takes place in a new process matching the current process with default of `startup-file=no`.
+If using the startup file (`~/.julia/config/startup.jl`) is desired, start julia with an explicit `--startup-file=yes`.
"""
const build = API.build
@@ -370,8 +458,13 @@ To get updates from the origin path or remote repository the package must first
# Examples
```julia
+# Pin a package to its current version
Pkg.pin("Example")
+
+# Pin a package to a specific version
Pkg.pin(name="Example", version="0.3.1")
+
+# Pin all packages in the project
Pkg.pin(all_pkgs = true)
```
"""
@@ -390,7 +483,13 @@ To free all dependencies set `all_pkgs=true`.
# Examples
```julia
+# Free a single package (remove pin or stop tracking path)
Pkg.free("Package")
+
+# Free multiple packages
+Pkg.free(["PackageA", "PackageB"])
+
+# Free all packages in the project
Pkg.free(all_pkgs = true)
```
@@ -473,14 +572,14 @@ Request a `ProjectInfo` struct which contains information about the active proje
# `ProjectInfo` fields
-| Field | Description |
-|:-------------|:--------------------------------------------------------------------------------------------|
-| name | The project's name |
-| uuid | The project's UUID |
-| version | The project's version |
-| ispackage | Whether the project is a package (has a name and uuid) |
-| dependencies | The project's direct dependencies as a `Dict` which maps dependency name to dependency UUID |
-| path | The location of the project file which defines the active project |
+| Field | Description |
+|:---------------|:--------------------------------------------------------------------------------------------|
+| `name` | The project's name |
+| `uuid` | The project's UUID |
+| `version` | The project's version |
+| `ispackage` | Whether the project is a package (has a name and uuid) |
+| `dependencies` | The project's direct dependencies as a `Dict` which maps dependency name to dependency UUID |
+| `path` | The location of the project file which defines the active project |
"""
const project = API.project
@@ -499,10 +598,11 @@ dependencies in the manifest and instantiate the resulting project.
`julia_version_strict=true` will turn manifest version check failures into errors instead of logging warnings.
After packages have been installed the project will be precompiled.
-See more at [Environment Precompilation](@ref).
+See more and how to disable auto-precompilation at [Environment Precompilation](@ref).
!!! compat "Julia 1.12"
The `julia_version_strict` keyword argument requires at least Julia 1.12.
+
"""
const instantiate = API.instantiate
@@ -524,6 +624,7 @@ Print out the status of the project/manifest.
Packages marked with `⌃` have new versions that can be installed, e.g. via [`Pkg.update`](@ref).
Those marked with `⌅` have new versions available, but cannot be installed due to compatibility conflicts with other packages. To see why, set the
keyword argument `outdated=true`.
+Packages marked with `[yanked]` are yanked versions that should be updated or replaced as they may contain bugs or security vulnerabilities.
Setting `outdated=true` will only show packages that are not on the latest version,
their maximum version and why they are not on the latest version (either due to other
@@ -604,7 +705,7 @@ If no argument is given to `activate`, then use the first project found in `LOAD
`@v#.#` environment.
# Examples
-```
+```julia
Pkg.activate()
Pkg.activate("local/path")
Pkg.activate("MyDependency")
@@ -627,7 +728,7 @@ versions that are already downloaded in version resolution.
To work in offline mode across Julia sessions you can set the environment
variable `JULIA_PKG_OFFLINE` to `"true"` before starting Julia.
"""
-offline(b::Bool=true) = (OFFLINE_MODE[] = b; nothing)
+offline(b::Bool = true) = (OFFLINE_MODE[] = b; nothing)
"""
Pkg.respect_sysimage_versions(b::Bool=true)
@@ -640,7 +741,7 @@ If this option is enabled, Pkg will only install packages that have been put int
Also, trying to add a package at a URL or `develop` a package that is in the sysimage
will error.
"""
-respect_sysimage_versions(b::Bool=true) = (RESPECT_SYSIMAGE_VERSIONS[] = b; nothing)
+respect_sysimage_versions(b::Bool = true) = (RESPECT_SYSIMAGE_VERSIONS[] = b; nothing)
"""
PackageSpec(name::String, [uuid::UUID, version::VersionNumber])
@@ -701,7 +802,17 @@ Other choices for `protocol` are `"https"` or `"git"`.
```julia-repl
julia> Pkg.setprotocol!(domain = "github.com", protocol = "ssh")
+# Use HTTPS for GitHub (default, good for most users)
+julia> Pkg.setprotocol!(domain = "github.com", protocol = "https")
+
+# Reset to default (let package developer decide)
+julia> Pkg.setprotocol!(domain = "github.com", protocol = nothing)
+
+# Set protocol for custom domain without specifying protocol
julia> Pkg.setprotocol!(domain = "gitlab.mycompany.com")
+
+# Use Git protocol for a custom domain
+julia> Pkg.setprotocol!(domain = "gitlab.mycompany.com", protocol = "git")
```
"""
const setprotocol! = API.setprotocol!
@@ -776,19 +887,15 @@ If the manifest doesn't have the project hash recorded, or if there is no manife
This function can be used in tests to verify that the manifest is synchronized with the project file:
- using Pkg, Test, Package
- @test Pkg.is_manifest_current(pkgdir(Package))
+```julia
+using Pkg, Test
+@test Pkg.is_manifest_current(pwd()) # Check current project
+@test Pkg.is_manifest_current("/path/to/project") # Check specific project
+```
"""
const is_manifest_current = API.is_manifest_current
function __init__()
- DEFAULT_IO[] = nothing
- Pkg.UPDATED_REGISTRY_THIS_SESSION[] = false
- if !isassigned(Base.PKG_PRECOMPILE_HOOK)
- # allows Base to use Pkg.precompile during loading
- # disable via `Base.PKG_PRECOMPILE_HOOK[] = Returns(nothing)`
- Base.PKG_PRECOMPILE_HOOK[] = precompile
- end
OFFLINE_MODE[] = Base.get_bool_env("JULIA_PKG_OFFLINE", false)
_auto_gc_enabled[] = Base.get_bool_env("JULIA_PKG_GC_AUTO", true)
return nothing
@@ -799,7 +906,7 @@ end
################
function installed()
- @warn "Pkg.installed() is deprecated"
+ @warn "`Pkg.installed()` is deprecated. Use `Pkg.dependencies()` instead." maxlog = 1
deps = dependencies()
installs = Dict{String, VersionNumber}()
for (uuid, dep) in deps
@@ -811,7 +918,7 @@ function installed()
end
function dir(pkg::String, paths::AbstractString...)
- @warn "`Pkg.dir(pkgname, paths...)` is deprecated; instead, do `import $pkg; joinpath(dirname(pathof($pkg)), \"..\", paths...)`." maxlog=1
+ @warn "`Pkg.dir(pkgname, paths...)` is deprecated; instead, do `import $pkg; joinpath(dirname(pathof($pkg)), \"..\", paths...)`." maxlog = 1
pkgid = Base.identify_package(pkg)
pkgid === nothing && return nothing
path = Base.locate_package(pkgid)
@@ -823,7 +930,7 @@ end
# AUTO GC #
###########
-const DEPOT_ORPHANAGE_TIMESTAMPS = Dict{String,Float64}()
+const DEPOT_ORPHANAGE_TIMESTAMPS = Dict{String, Float64}()
const _auto_gc_enabled = Ref{Bool}(true)
function _auto_gc(ctx::Types.Context; collect_delay::Period = Day(7))
if !_auto_gc_enabled[]
@@ -842,13 +949,13 @@ function _auto_gc(ctx::Types.Context; collect_delay::Period = Day(7))
DEPOT_ORPHANAGE_TIMESTAMPS[depots1()] = mtime(orphanage_path)
end
- if curr_time - DEPOT_ORPHANAGE_TIMESTAMPS[depots1()] > delay_secs
+ return if curr_time - DEPOT_ORPHANAGE_TIMESTAMPS[depots1()] > delay_secs
printpkgstyle(ctx.io, :Info, "We haven't cleaned this depot up for a bit, running Pkg.gc()...", color = Base.info_color())
try
Pkg.gc(ctx; collect_delay)
DEPOT_ORPHANAGE_TIMESTAMPS[depots1()] = curr_time
catch ex
- @error("GC failed", exception=ex)
+ @error("GC failed", exception = ex)
end
end
end
@@ -858,12 +965,18 @@ end
# Precompilation #
##################
-function _auto_precompile(ctx::Types.Context, pkgs::Vector{PackageSpec}=PackageSpec[]; warn_loaded = true, already_instantiated = false)
- if should_autoprecompile()
- Pkg.precompile(ctx, pkgs; internal_call=true, warn_loaded = warn_loaded, already_instantiated = already_instantiated)
+function _auto_precompile(ctx::Types.Context, pkgs::Vector{PackageSpec} = PackageSpec[]; warn_loaded = true, already_instantiated = false)
+ return if should_autoprecompile()
+ Pkg.precompile(ctx, pkgs; internal_call = true, warn_loaded = warn_loaded, already_instantiated = already_instantiated)
end
end
include("precompile.jl")
+# Reset globals that might have been mutated during precompilation.
+DEFAULT_IO[] = nothing
+Pkg.UPDATED_REGISTRY_THIS_SESSION[] = false
+PREV_ENV_PATH[] = ""
+Types.STDLIB[] = nothing
+
end # module
diff --git a/src/PlatformEngines.jl b/src/PlatformEngines.jl
index 98f1934559..22967bdf1a 100644
--- a/src/PlatformEngines.jl
+++ b/src/PlatformEngines.jl
@@ -5,7 +5,7 @@
module PlatformEngines
using SHA, Downloads, Tar
-import ...Pkg: Pkg, TOML, pkg_server, depots1, can_fancyprint, stderr_f
+import ...Pkg: Pkg, TOML, pkg_server, depots1, can_fancyprint, stderr_f, atomic_toml_write
using ..MiniProgressBars
using Base.BinaryPlatforms, p7zip_jll
@@ -20,7 +20,7 @@ function exe7z()
return p7zip_jll.p7zip()
end
- lock(EXE7Z_LOCK) do
+ return lock(EXE7Z_LOCK) do
if !isassigned(EXE7Z)
EXE7Z[] = find7z()
end
@@ -44,9 +44,9 @@ is_secure_url(url::AbstractString) =
occursin(r"^(https://|\w+://(127\.0\.0\.1|localhost)(:\d+)?($|/))"i, url)
function get_server_dir(
- url :: AbstractString,
- server :: Union{AbstractString, Nothing} = pkg_server(),
-)
+ url::AbstractString,
+ server::Union{AbstractString, Nothing} = pkg_server(),
+ )
server === nothing && return
url == server || startswith(url, "$server/") || return
m = match(r"^\w+://([^\\/]+)(?:$|/)", server)
@@ -60,7 +60,7 @@ function get_server_dir(
return joinpath(depots1(), "servers", dir)
end
-const AUTH_ERROR_HANDLERS = Pair{Union{String, Regex},Any}[]
+const AUTH_ERROR_HANDLERS = Pair{Union{String, Regex}, Any}[]
function handle_auth_error(url, err; verbose::Bool = false)
handled, should_retry = false, false
@@ -111,23 +111,23 @@ function get_auth_header(url::AbstractString; verbose::Bool = false)
server_dir = get_server_dir(url)
server_dir === nothing && return
auth_file = joinpath(server_dir, "auth.toml")
- isfile(auth_file) || return handle_auth_error(url, "no-auth-file"; verbose=verbose)
+ isfile(auth_file) || return handle_auth_error(url, "no-auth-file"; verbose = verbose)
# TODO: check for insecure auth file permissions
if !is_secure_url(url)
- @warn "refusing to send auth info over insecure connection" url=url
- return handle_auth_error(url, "insecure-connection"; verbose=verbose)
+ @warn "refusing to send auth info over insecure connection" url = url
+ return handle_auth_error(url, "insecure-connection"; verbose = verbose)
end
# parse the auth file
auth_info = try
TOML.parsefile(auth_file)
catch err
- @error "malformed auth file" file=auth_file err=err
- return handle_auth_error(url, "malformed-file"; verbose=verbose)
+ @error "malformed auth file" file = auth_file err = err
+ return handle_auth_error(url, "malformed-file"; verbose = verbose)
end
# check for an auth token
if !haskey(auth_info, "access_token")
- @warn "auth file without access_token field" file=auth_file
- return handle_auth_error(url, "no-access-token"; verbose=verbose)
+ @warn "auth file without access_token field" file = auth_file
+ return handle_auth_error(url, "no-access-token"; verbose = verbose)
end
auth_token = auth_info["access_token"]::String
auth_header = "Authorization" => "Bearer $auth_token"
@@ -141,44 +141,46 @@ function get_auth_header(url::AbstractString; verbose::Bool = false)
end
# if token is good until ten minutes from now, use it
time_now = time()
- if expires_at ≥ time_now + 10*60 # ten minutes
+ if expires_at ≥ time_now + 10 * 60 # ten minutes
return auth_header
end
if !haskey(auth_info, "refresh_url") || !haskey(auth_info, "refresh_token")
- if expires_at ≤ time_now
- @warn "expired auth without refresh keys" file=auth_file
+ if expires_at ≤ time_now
+ @warn "expired auth without refresh keys" file = auth_file
end
# try it anyway since we can't refresh
- return something(handle_auth_error(url, "no-refresh-key"; verbose=verbose), auth_header)
+ return something(handle_auth_error(url, "no-refresh-key"; verbose = verbose), auth_header)
end
refresh_url = auth_info["refresh_url"]::String
if !is_secure_url(refresh_url)
- @warn "ignoring insecure auth refresh URL" url=refresh_url
- return something(handle_auth_error(url, "insecure-refresh-url"; verbose=verbose), auth_header)
+ @warn "ignoring insecure auth refresh URL" url = refresh_url
+ return something(handle_auth_error(url, "insecure-refresh-url"; verbose = verbose), auth_header)
end
- verbose && @info "Refreshing expired auth token..." file=auth_file
+ verbose && @info "Refreshing expired auth token..." file = auth_file
tmp = tempname()
refresh_token = auth_info["refresh_token"]::String
refresh_auth = "Authorization" => "Bearer $refresh_token"
- try download(refresh_url, tmp, auth_header=refresh_auth, verbose=verbose)
+ try
+ download(refresh_url, tmp, auth_header = refresh_auth, verbose = verbose)
catch err
- @warn "token refresh failure" file=auth_file url=refresh_url err=err
- rm(tmp, force=true)
- return handle_auth_error(url, "token-refresh-failed"; verbose=verbose)
+ @warn "token refresh failure" file = auth_file url = refresh_url err = err
+ rm(tmp, force = true)
+ return handle_auth_error(url, "token-refresh-failed"; verbose = verbose)
end
- auth_info = try TOML.parsefile(tmp)
+ auth_info = try
+ TOML.parsefile(tmp)
catch err
- @warn "discarding malformed auth file" url=refresh_url err=err
- rm(tmp, force=true)
- return something(handle_auth_error(url, "malformed-file"; verbose=verbose), auth_header)
+ @warn "discarding malformed auth file" url = refresh_url err = err
+ rm(tmp, force = true)
+ return something(handle_auth_error(url, "malformed-file"; verbose = verbose), auth_header)
end
if !haskey(auth_info, "access_token")
if haskey(auth_info, "refresh_token")
auth_info["refresh_token"] = "*"^64
end
- @warn "discarding auth file without access token" auth=auth_info
- rm(tmp, force=true)
- return something(handle_auth_error(url, "no-access-token"; verbose=verbose), auth_header)
+ @warn "discarding auth file without access token" auth = auth_info
+ rm(tmp, force = true)
+ return something(handle_auth_error(url, "no-access-token"; verbose = verbose), auth_header)
end
if haskey(auth_info, "expires_in")
expires_in = auth_info["expires_in"]
@@ -188,12 +190,7 @@ function get_auth_header(url::AbstractString; verbose::Bool = false)
auth_info["expires_at"] = expires_at
end
end
- let auth_info = auth_info
- open(tmp, write=true) do io
- TOML.print(io, auth_info, sorted=true)
- end
- end
- mv(tmp, auth_file, force=true)
+ atomic_toml_write(auth_file, auth_info, sorted = true)
access_token = auth_info["access_token"]::String
return "Authorization" => "Bearer $access_token"
end
@@ -216,7 +213,7 @@ const CI_VARIABLES = [
]
function get_metadata_headers(url::AbstractString)
- headers = Pair{String,String}[]
+ headers = Pair{String, String}[]
server = pkg_server()
server_dir = get_server_dir(url, server)
server_dir === nothing && return headers
@@ -240,7 +237,7 @@ function get_metadata_headers(url::AbstractString)
m === nothing && continue
val = strip(val)
isempty(val) && continue
- words = split(m.captures[1], '_', keepempty=false)
+ words = split(m.captures[1], '_', keepempty = false)
isempty(words) && continue
hdr = "Julia-" * join(map(titlecase, words), '-')
any(hdr == k for (k, v) in headers) && continue
@@ -250,16 +247,16 @@ function get_metadata_headers(url::AbstractString)
end
function download(
- url::AbstractString,
- dest::AbstractString;
- verbose::Bool = false,
- headers::Vector{Pair{String,String}} = Pair{String,String}[],
- auth_header::Union{Pair{String,String}, Nothing} = nothing,
- io::IO=stderr_f(),
- progress::Union{Nothing,Function} = nothing, # (total, now) -> nothing
-)
+ url::AbstractString,
+ dest::AbstractString;
+ verbose::Bool = false,
+ headers::Vector{Pair{String, String}} = Pair{String, String}[],
+ auth_header::Union{Pair{String, String}, Nothing} = nothing,
+ io::IO = stderr_f(),
+ progress::Union{Nothing, Function} = nothing, # (total, now) -> nothing
+ )
if auth_header === nothing
- auth_header = get_auth_header(url, verbose=verbose)
+ auth_header = get_auth_header(url, verbose = verbose)
end
if auth_header !== nothing
push!(headers, auth_header)
@@ -272,9 +269,9 @@ function download(
progress = if !isnothing(progress)
progress
elseif do_fancy
- bar = MiniProgressBar(header="Downloading", color=Base.info_color())
+ bar = MiniProgressBar(header = "Downloading", color = Base.info_color())
start_progress(io, bar)
- let bar=bar
+ let bar = bar
(total, now) -> begin
bar.max = total
bar.current = now
@@ -288,7 +285,7 @@ function download(
else
nothing
end
- try
+ return try
Downloads.download(url, dest; headers, progress)
finally
do_fancy && end_progress(io, bar)
@@ -323,14 +320,14 @@ set to `false`) the downloading process will be completely silent. If
printed in addition to messages regarding downloading.
"""
function download_verify(
- url::AbstractString,
- hash::Union{AbstractString, Nothing},
- dest::AbstractString;
- verbose::Bool = false,
- force::Bool = false,
- quiet_download::Bool = false,
- progress::Union{Nothing,Function} = nothing, # (total, now) -> nothing
-)
+ url::AbstractString,
+ hash::Union{AbstractString, Nothing},
+ dest::AbstractString;
+ verbose::Bool = false,
+ force::Bool = false,
+ quiet_download::Bool = false,
+ progress::Union{Nothing, Function} = nothing, # (total, now) -> nothing
+ )
# Whether the file existed in the first place
file_existed = false
@@ -342,7 +339,7 @@ function download_verify(
# verify download, if it passes, return happy. If it fails, (and
# `force` is `true`, re-download!)
- if hash !== nothing && verify(dest, hash; verbose=verbose)
+ if hash !== nothing && verify(dest, hash; verbose = verbose)
return true
elseif !force
error("Verification failed, not overwriting $(dest)")
@@ -356,7 +353,7 @@ function download_verify(
attempts = 3
for i in 1:attempts
try
- download(url, dest; verbose=verbose || !quiet_download, progress)
+ download(url, dest; verbose = verbose || !quiet_download, progress)
break
catch err
@debug "download and verify failed on attempt $i/$attempts" url dest err
@@ -377,10 +374,10 @@ function download_verify(
if verbose
@info("Continued download didn't work, restarting from scratch")
end
- Base.rm(dest; force=true)
+ Base.rm(dest; force = true)
# Download and verify from scratch
- download(url, dest; verbose=verbose || !quiet_download)
+ download(url, dest; verbose = verbose || !quiet_download)
if hash !== nothing && !verify(dest, hash; verbose, details)
@goto verification_failed
end
@@ -402,16 +399,16 @@ end
# TODO: can probably delete this, only affects tests
function copy_symlinks()
var = get(ENV, "BINARYPROVIDER_COPYDEREF", "")
- lowercase(var) in ("true", "t", "yes", "y", "1") ? true :
- lowercase(var) in ("false", "f", "no", "n", "0") ? false : nothing
+ return lowercase(var) in ("true", "t", "yes", "y", "1") ? true :
+ lowercase(var) in ("false", "f", "no", "n", "0") ? false : nothing
end
function unpack(
- tarball_path::AbstractString,
- dest::AbstractString;
- verbose::Bool = false,
-)
- Tar.extract(`$(exe7z()) x $tarball_path -so`, dest, copy_symlinks = copy_symlinks())
+ tarball_path::AbstractString,
+ dest::AbstractString;
+ verbose::Bool = false,
+ )
+ return Tar.extract(`$(exe7z()) x $tarball_path -so`, dest, copy_symlinks = copy_symlinks())
end
"""
@@ -419,10 +416,10 @@ end
Compress `src_dir` into a tarball located at `tarball_path`.
"""
-function package(src_dir::AbstractString, tarball_path::AbstractString; io=stderr_f())
- rm(tarball_path, force=true)
+function package(src_dir::AbstractString, tarball_path::AbstractString; io = stderr_f())
+ rm(tarball_path, force = true)
cmd = `$(exe7z()) a -si -tgzip -mx9 $tarball_path`
- open(pipeline(cmd, stdout=devnull, stderr=io), write=true) do io
+ return open(pipeline(cmd, stdout = devnull, stderr = io), write = true) do io
Tar.create(src_dir, io)
end
end
@@ -464,17 +461,17 @@ Returns `true` if a tarball was actually unpacked, `false` if nothing was
changed in the destination prefix.
"""
function download_verify_unpack(
- url::AbstractString,
- hash::Union{AbstractString, Nothing},
- dest::AbstractString;
- tarball_path = nothing,
- ignore_existence::Bool = false,
- force::Bool = false,
- verbose::Bool = false,
- quiet_download::Bool = false,
- io::IO=stderr_f(),
- progress::Union{Nothing,Function} = nothing, # (total, now) -> nothing
-)
+ url::AbstractString,
+ hash::Union{AbstractString, Nothing},
+ dest::AbstractString;
+ tarball_path = nothing,
+ ignore_existence::Bool = false,
+ force::Bool = false,
+ verbose::Bool = false,
+ quiet_download::Bool = false,
+ io::IO = stderr_f(),
+ progress::Union{Nothing, Function} = nothing, # (total, now) -> nothing
+ )
# First, determine whether we should keep this tarball around
remove_tarball = false
if tarball_path === nothing
@@ -495,7 +492,7 @@ function download_verify_unpack(
return nothing
end
- return url[dot_idx+1:end]
+ return url[(dot_idx + 1):end]
end
# If extension of url contains a recognized extension, use it, otherwise use ".gz"
@@ -523,7 +520,7 @@ function download_verify_unpack(
if verbose
@info("Removing dest directory $(dest) as source tarball changed")
end
- Base.rm(dest; recursive=true, force=true)
+ Base.rm(dest; recursive = true, force = true)
end
# If the destination path already exists, don't bother to unpack
@@ -540,7 +537,7 @@ function download_verify_unpack(
if verbose
@info("Unpacking $(tarball_path) into $(dest)...")
end
- isnothing(progress) || progress(10000, 10000; status="unpacking")
+ isnothing(progress) || progress(10000, 10000; status = "unpacking")
open(`$(exe7z()) x $tarball_path -so`) do io
Tar.extract(io, dest, copy_symlinks = copy_symlinks())
end
@@ -548,7 +545,7 @@ function download_verify_unpack(
if remove_tarball
Base.rm(tarball_path)
# Remove cached tarball hash, if it exists.
- Base.rm(string(tarball_path, ".sha256"); force=true)
+ Base.rm(string(tarball_path, ".sha256"); force = true)
end
end
@@ -579,9 +576,11 @@ successfully.
If `details` is provided, any pertinent detail will be pushed to it rather than logged.
"""
-function verify(path::AbstractString, hash::AbstractString; verbose::Bool = false,
- report_cache_status::Bool = false, hash_path::AbstractString="$(path).sha256",
- details::Union{Vector{String},Nothing} = nothing)
+function verify(
+ path::AbstractString, hash::AbstractString; verbose::Bool = false,
+ report_cache_status::Bool = false, hash_path::AbstractString = "$(path).sha256",
+ details::Union{Vector{String}, Nothing} = nothing
+ )
# Check hash string format
if !occursin(r"^[0-9a-f]{64}$"i, hash)
@@ -648,7 +647,7 @@ function verify(path::AbstractString, hash::AbstractString; verbose::Bool = fals
end
if calc_hash != hash
- msg = "Hash Mismatch!\n"
+ msg = "Hash Mismatch!\n"
msg *= " Expected sha256: $hash\n"
msg *= " Calculated sha256: $calc_hash"
if isnothing(details)
@@ -693,7 +692,7 @@ function verify_archive_tree_hash(tar_gz::AbstractString, expected_hash::Base.SH
calc_hash = try
Base.SHA1(open(Tar.tree_hash, `$(exe7z()) x $tar_gz -so`))
catch err
- @warn "unable to decompress and read archive" exception=err
+ @warn "unable to decompress and read archive" exception = err
return false
end
if calc_hash != expected_hash
diff --git a/src/REPLMode/REPLMode.jl b/src/REPLMode/REPLMode.jl
index aba9ef4dd8..d3b196aae3 100644
--- a/src/REPLMode/REPLMode.jl
+++ b/src/REPLMode/REPLMode.jl
@@ -6,8 +6,8 @@ module REPLMode
using Markdown, UUIDs, Dates
-import ..casesensitive_isdir, ..OFFLINE_MODE, ..linewrap, ..pathrepr
-using ..Types, ..Operations, ..API, ..Registry, ..Resolve
+import ..OFFLINE_MODE, ..linewrap, ..pathrepr, ..IN_REPL_MODE
+using ..Types, ..Operations, ..API, ..Registry, ..Resolve, ..Apps
import ..stdout_f, ..stderr_f
@@ -21,10 +21,10 @@ const PRINTED_REPL_WARNING = Ref{Bool}(false)
#---------#
# Options #
#---------#
-const OptionDeclaration = Vector{Pair{Symbol,Any}}
+const OptionDeclaration = Vector{Pair{Symbol, Any}}
struct OptionSpec
name::String
- short_name::Union{Nothing,String}
+ short_name::Union{Nothing, String}
api::Pair{Symbol, Any}
takes_arg::Bool
end
@@ -32,10 +32,12 @@ end
# TODO assert names matching lex regex
# assert now so that you don't fail at user time
# see function `REPLMode.api_options`
-function OptionSpec(;name::String,
- short_name::Union{Nothing,String}=nothing,
- takes_arg::Bool=false,
- api::Pair{Symbol,<:Any})::OptionSpec
+function OptionSpec(;
+ name::String,
+ short_name::Union{Nothing, String} = nothing,
+ takes_arg::Bool = false,
+ api::Pair{Symbol, <:Any}
+ )::OptionSpec
takes_arg && @assert hasmethod(api.second, Tuple{String})
return OptionSpec(name, short_name, api, takes_arg)
end
@@ -43,7 +45,7 @@ end
function OptionSpecs(decs::Vector{OptionDeclaration})
specs = Dict{String, OptionSpec}()
for x in decs
- opt_spec = OptionSpec(;x...)
+ opt_spec = OptionSpec(; x...)
@assert !haskey(specs, opt_spec.name) # don't overwrite
specs[opt_spec.name] = opt_spec
if opt_spec.short_name !== nothing
@@ -65,43 +67,46 @@ end
#----------#
# Commands #
#----------#
-const CommandDeclaration = Vector{Pair{Symbol,Any}}
+const CommandDeclaration = Vector{Pair{Symbol, Any}}
mutable struct CommandSpec
const canonical_name::String
- const short_name::Union{Nothing,String}
+ const short_name::Union{Nothing, String}
const api::Function
const should_splat::Bool
const argument_spec::ArgSpec
- const option_specs::Dict{String,OptionSpec}
- completions::Union{Nothing,Symbol,Function} # Symbol is used as a marker for REPLExt to assign the function of that name
+ const option_specs::Dict{String, OptionSpec}
+ completions::Union{Nothing, Symbol, Function} # Symbol is used as a marker for REPLExt to assign the function of that name
const description::String
- const help::Union{Nothing,Markdown.MD}
+ const help::Union{Nothing, Markdown.MD}
end
default_parser(xs, options) = unwrap(xs)
-function CommandSpec(;name::Union{Nothing,String} = nothing,
- short_name::Union{Nothing,String} = nothing,
- api::Union{Nothing,Function} = nothing,
- should_splat::Bool = true,
- option_spec::Vector{OptionDeclaration} = OptionDeclaration[],
- help::Union{Nothing,Markdown.MD} = nothing,
- description::Union{Nothing,String} = nothing,
- completions::Union{Nothing,Symbol,Function} = nothing,
- arg_count::Pair = (0=>0),
- arg_parser::Function = default_parser,
- )::CommandSpec
+function CommandSpec(;
+ name::Union{Nothing, String} = nothing,
+ short_name::Union{Nothing, String} = nothing,
+ api::Union{Nothing, Function} = nothing,
+ should_splat::Bool = true,
+ option_spec::Vector{OptionDeclaration} = OptionDeclaration[],
+ help::Union{Nothing, Markdown.MD} = nothing,
+ description::Union{Nothing, String} = nothing,
+ completions::Union{Nothing, Symbol, Function} = nothing,
+ arg_count::Pair = (0 => 0),
+ arg_parser::Function = default_parser,
+ )::CommandSpec
name === nothing && error("Supply a canonical name")
description === nothing && error("Supply a description")
api === nothing && error("Supply API dispatch function for `$(name)`")
# TODO assert isapplicable completions dict, string
- return CommandSpec(name, short_name, api, should_splat, ArgSpec(arg_count, arg_parser),
- OptionSpecs(option_spec), completions, description, help)
+ return CommandSpec(
+ name, short_name, api, should_splat, ArgSpec(arg_count, arg_parser),
+ OptionSpecs(option_spec), completions, description, help
+ )
end
function CommandSpecs(declarations::Vector{CommandDeclaration})
- specs = Dict{String,CommandSpec}()
+ specs = Dict{String, CommandSpec}()
for dec in declarations
- spec = CommandSpec(;dec...)
+ spec = CommandSpec(; dec...)
@assert !haskey(specs, spec.canonical_name) "duplicate spec entry"
specs[spec.canonical_name] = spec
if spec.short_name !== nothing
@@ -113,7 +118,7 @@ function CommandSpecs(declarations::Vector{CommandDeclaration})
end
function CompoundSpecs(compound_declarations)
- compound_specs = Dict{String,Dict{String,CommandSpec}}()
+ compound_specs = Dict{String, Dict{String, CommandSpec}}()
for (name, command_declarations) in compound_declarations
specs = CommandSpecs(command_declarations)
@assert !haskey(compound_specs, name) "duplicate super spec entry"
@@ -138,19 +143,19 @@ unwrap(xs::Vector{QString}) = map(x -> x.raw, xs)
#---------#
struct Option
val::String
- argument::Union{Nothing,String}
+ argument::Union{Nothing, String}
Option(val::AbstractString) = new(val, nothing)
- Option(val::AbstractString, arg::Union{Nothing,String}) = new(val, arg)
+ Option(val::AbstractString, arg::Union{Nothing, String}) = new(val, arg)
end
Base.show(io::IO, opt::Option) = print(io, "--$(opt.val)", opt.argument === nothing ? "" : "=$(opt.argument)")
-wrap_option(option::String) = length(option) == 1 ? "-$option" : "--$option"
+wrap_option(option::String) = length(option) == 1 ? "-$option" : "--$option"
is_opt(word::AbstractString) = first(word) == '-' && word != "-"
function parse_option(word::AbstractString)::Option
m = match(r"^(?: -([a-z]) | --((?:[a-z]{1,}-?)*)(?:\s*=\s*(\S*))? )$"ix, word)
m === nothing && pkgerror("malformed option: ", repr(word))
option_name = m.captures[1] !== nothing ? something(m.captures[1]) : something(m.captures[2])
- option_arg = m.captures[3] === nothing ? nothing : String(something(m.captures[3]))
+ option_arg = m.captures[3] === nothing ? nothing : String(something(m.captures[3]))
return Option(option_name, option_arg)
end
@@ -159,14 +164,14 @@ end
#-----------#
# Statement: text-based representation of a command
Base.@kwdef mutable struct Statement
- super::Union{Nothing,String} = nothing
- spec::Union{Nothing,CommandSpec} = nothing
- options::Union{Vector{Option},Vector{String}} = String[]
- arguments::Vector{QString} = QString[]
+ super::Union{Nothing, String} = nothing
+ spec::Union{Nothing, CommandSpec} = nothing
+ options::Union{Vector{Option}, Vector{String}} = String[]
+ arguments::Vector{QString} = QString[]
end
function lex(cmd::String)::Vector{QString}
- replace_comma = (nothing!=match(r"^(add|dev|develop|rm|remove|status|precompile)+\s", cmd))
+ replace_comma = (nothing != match(r"^(add|dev|develop|rm|remove|status|precompile)+\s", cmd))
in_doublequote = false
in_singlequote = false
qstrings = QString[]
@@ -220,13 +225,16 @@ function lex(cmd::String)::Vector{QString}
(in_doublequote || in_singlequote) ? pkgerror("unterminated quote") : push_token!(false)
# to avoid complexity in the main loop, empty tokens are allowed above and
# filtered out before returning
- return filter(x->!isempty(x.raw), qstrings)
+ return filter(x -> !isempty(x.raw), qstrings)
end
-function tokenize(cmd::String)
+function tokenize(cmd::AbstractString; rm_leading_bracket::Bool = true)
cmd = replace(replace(cmd, "\r\n" => "; "), "\n" => "; ") # for multiline commands
+ if rm_leading_bracket && startswith(cmd, ']')
+ cmd = string(lstrip(cmd, ']'))
+ end
qstrings = lex(cmd)
- statements = foldl(qstrings; init=[QString[]]) do collection, next
+ statements = foldl(qstrings; init = [QString[]]) do collection, next
(next.raw == ";" && !next.isquoted) ?
push!(collection, QString[]) :
push!(collection[end], next)
@@ -235,9 +243,9 @@ function tokenize(cmd::String)
return statements
end
-function core_parse(words::Vector{QString}; only_cmd=false)
+function core_parse(words::Vector{QString}; only_cmd = false)
statement = Statement()
- word::Union{Nothing,QString} = nothing
+ word::Union{Nothing, QString} = nothing
function next_word!()
isempty(words) && return false
word = popfirst!(words)
@@ -248,8 +256,8 @@ function core_parse(words::Vector{QString}; only_cmd=false)
next_word!() || return statement, ((word === nothing) ? nothing : word.raw)
# handle `?` alias for help
# It is special in that it requires no space between command and args
- if word.raw[1]=='?' && !word.isquoted
- length(word.raw) > 1 && pushfirst!(words, QString(word.raw[2:end],false))
+ if word.raw[1] == '?' && !word.isquoted
+ length(word.raw) > 1 && pushfirst!(words, QString(word.raw[2:end], false))
word = QString("?", false)
end
# determine command
@@ -282,12 +290,12 @@ function core_parse(words::Vector{QString}; only_cmd=false)
end
parse(input::String) =
- map(Base.Iterators.filter(!isempty, tokenize(input))) do words
- statement, input_word = core_parse(words)
- statement.spec === nothing && pkgerror("`$input_word` is not a recognized command. Type ? for help with available commands")
- statement.options = map(parse_option, statement.options)
- statement
- end
+ map(Base.Iterators.filter(!isempty, tokenize(strip(input)))) do words
+ statement, input_word = core_parse(words)
+ statement.spec === nothing && pkgerror("`$input_word` is not a recognized command. Type ? for help with available commands")
+ statement.options = map(parse_option, statement.options)
+ statement
+end
#------------#
# APIOptions #
@@ -296,8 +304,10 @@ parse(input::String) =
# Do NOT introduce a constructor for APIOptions
# as long as it's an alias for Dict
const APIOptions = Dict{Symbol, Any}
-function api_options(options::Vector{Option},
- specs::Dict{String, OptionSpec})
+function api_options(
+ options::Vector{Option},
+ specs::Dict{String, OptionSpec}
+ )
api_opts = APIOptions()
enforce_option(options, specs)
for option in options
@@ -315,15 +325,15 @@ Context!(ctx::APIOptions)::Context = Types.Context!(collect(ctx))
# Command #
#---------#
Base.@kwdef struct Command
- spec::Union{Nothing,CommandSpec} = nothing
- options::APIOptions = APIOptions()
- arguments::Vector = []
+ spec::Union{Nothing, CommandSpec} = nothing
+ options::APIOptions = APIOptions()
+ arguments::Vector = []
end
-function enforce_option(option::Option, specs::Dict{String,OptionSpec})
+function enforce_option(option::Option, specs::Dict{String, OptionSpec})
spec = get(specs, option.val, nothing)
spec !== nothing || pkgerror("option '$(option.val)' is not a valid option")
- if spec.takes_arg
+ return if spec.takes_arg
option.argument !== nothing ||
pkgerror("option '$(option.val)' expects an argument, but no argument given")
else # option is a switch
@@ -339,22 +349,23 @@ checks:
- options which take an argument are given arguments
- options which do not take arguments are not given arguments
"""
-function enforce_option(options::Vector{Option}, specs::Dict{String,OptionSpec})
+function enforce_option(options::Vector{Option}, specs::Dict{String, OptionSpec})
unique_keys = Symbol[]
get_key(opt::Option) = specs[opt.val].api.first
# per option checking
- foreach(x->enforce_option(x,specs), options)
+ foreach(x -> enforce_option(x, specs), options)
# checking for compatible options
for opt in options
key = get_key(opt)
if key in unique_keys
- conflicting = filter(opt->get_key(opt) == key, options)
+ conflicting = filter(opt -> get_key(opt) == key, options)
pkgerror("Conflicting options: $conflicting")
else
push!(unique_keys, key)
end
end
+ return
end
"""
@@ -382,7 +393,7 @@ function prepare_cmd(input)
return commands
end
-do_cmds(input::String, io=stdout_f()) = do_cmds(prepare_cmd(input), io)
+do_cmds(input::String, io = stdout_f()) = do_cmds(prepare_cmd(input), io)
function do_cmds(commands::Vector{Command}, io)
@@ -398,24 +409,27 @@ function do_cmds(commands::Vector{Command}, io)
end
function do_cmd(command::Command, io)
- # REPL specific commands
- command.spec === SPECS["package"]["help"] && return Base.invokelatest(do_help!, command, io)
- # API commands
- if command.spec.should_splat
- TEST_MODE[] && return command.spec.api, command.arguments..., command.options
- command.spec.api(command.arguments...; collect(command.options)...) # TODO is invokelatest still needed?
- else
- TEST_MODE[] && return command.spec.api, command.arguments, command.options
- command.spec.api(command.arguments; collect(command.options)...)
+ # Set the scoped value to indicate we're in REPL mode
+ return Base.ScopedValues.@with IN_REPL_MODE => true begin
+ # REPL specific commands
+ command.spec === SPECS["package"]["help"] && return Base.invokelatest(do_help!, command, io)
+ # API commands
+ if command.spec.should_splat
+ TEST_MODE[] && return command.spec.api, command.arguments..., command.options
+ command.spec.api(command.arguments...; collect(command.options)...) # TODO is invokelatest still needed?
+ else
+ TEST_MODE[] && return command.spec.api, command.arguments, command.options
+ command.spec.api(command.arguments; collect(command.options)...)
+ end
end
end
function parse_command(words::Vector{QString})
- statement, word = core_parse(words; only_cmd=true)
+ statement, word = core_parse(words; only_cmd = true)
if statement.super === nothing && statement.spec === nothing
pkgerror("invalid input: `$word` is not a command")
end
- return statement.spec === nothing ? statement.super : statement.spec
+ return statement.spec === nothing ? statement.super : statement.spec
end
function do_help!(command::Command, io)
@@ -428,8 +442,10 @@ function do_help!(command::Command, io)
cmd = parse_command(command.arguments)
if cmd isa String
# gather all helps for super spec `cmd`
- all_specs = sort!(unique(values(SPECS[cmd]));
- by=(spec->spec.canonical_name))
+ all_specs = sort!(
+ unique(values(SPECS[cmd]));
+ by = (spec -> spec.canonical_name)
+ )
for spec in all_specs
isempty(help_md.content) || push!(help_md.content, md"---")
push!(help_md.content, spec.help)
@@ -438,14 +454,14 @@ function do_help!(command::Command, io)
push!(help_md.content, cmd.help)
end
!isempty(command.arguments) && @warn "More than one command specified, only rendering help for first"
- show(io, MIME("text/plain"), help_md)
+ return show(io, MIME("text/plain"), help_md)
end
# Provide a string macro pkg"cmd" that can be used in the same way
# as the REPLMode `pkg> cmd`. Useful for testing and in environments
# where we do not have a REPL, e.g. IJulia.
macro pkg_str(str::String)
- :(pkgstr($str))
+ return :(pkgstr($str))
end
function pkgstr(str::String)
@@ -465,32 +481,32 @@ const SPECS = CompoundSpecs(compound_declarations)
function canonical_names()
# add "package" commands
xs = [(spec.canonical_name => spec) for spec in unique(values(SPECS["package"]))]
- sort!(xs, by=first)
+ sort!(xs, by = first)
# add other super commands, e.g. "registry"
for (super, specs) in SPECS
super != "package" || continue # skip "package"
temp = [(join([super, spec.canonical_name], " ") => spec) for spec in unique(values(specs))]
- append!(xs, sort!(temp, by=first))
+ append!(xs, sort!(temp, by = first))
end
return xs
end
function gen_help()
help = md"""
-**Welcome to the Pkg REPL-mode**. To return to the `julia>` prompt, either press
-backspace when the input line is empty or press Ctrl+C.
+ **Welcome to the Pkg REPL-mode**. To return to the `julia>` prompt, either press
+ backspace when the input line is empty or press Ctrl+C.
-Full documentation available at https://pkgdocs.julialang.org/
+ Full documentation available at https://pkgdocs.julialang.org/
-**Synopsis**
+ **Synopsis**
- pkg> cmd [opts] [args]
+ pkg> cmd [opts] [args]
-Multiple commands can be given on the same line by interleaving a `;` between the commands.
-Some commands have an alias, indicated below.
+ Multiple commands can be given on the same line by interleaving a `;` between the commands.
+ Some commands have an alias, indicated below.
-**Commands**
-"""
+ **Commands**
+ """
for (command, spec) in canonical_names()
short_name = spec.short_name === nothing ? "" : ", `" * spec.short_name::String * '`'
push!(help.content, Markdown.parse("`$command`$short_name: $(spec.description)"))
diff --git a/src/REPLMode/argument_parsers.jl b/src/REPLMode/argument_parsers.jl
index c0f284a4b0..f569b8614c 100644
--- a/src/REPLMode/argument_parsers.jl
+++ b/src/REPLMode/argument_parsers.jl
@@ -1,4 +1,5 @@
import ..isdir_nothrow, ..Registry.RegistrySpec, ..isurl
+using UUIDs
struct PackageIdentifier
val::String
@@ -16,112 +17,396 @@ struct Subdir
dir::String
end
-const PackageToken = Union{PackageIdentifier,
- VersionToken,
- Rev,
- Subdir}
-
-packagetoken(word::String)::PackageToken =
- first(word) == '@' ? VersionToken(word[2:end]) :
- first(word) == '#' ? Rev(word[2:end]) :
- first(word) == ':' ? Subdir(word[2:end]) :
- PackageIdentifier(word)
-
-###############
-# PackageSpec #
-###############
-"""
-Parser for PackageSpec objects.
-"""
-function parse_package(args::Vector{QString}, options; add_or_dev=false)::Vector{PackageSpec}
- words′ = package_lex(args)
- words = String[]
- for word in words′
- if (m = match(r"https://github.com/(.*?)/(.*?)/(?:tree|commit)/(.*?)$", word)) !== nothing
- push!(words, "https://github.com/$(m.captures[1])/$(m.captures[2])")
- push!(words, "#$(m.captures[3])")
- else
- push!(words, word)
+const PackageToken = Union{
+ PackageIdentifier,
+ VersionToken,
+ Rev,
+ Subdir,
+}
+
+# Check if a string is a valid UUID
+function is_valid_uuid(str::String)
+ try
+ UUID(str)
+ return true
+ catch
+ return false
+ end
+end
+
+# Simple URL detection
+function looks_like_url(str::String)
+ return startswith(str, "http://") || startswith(str, "https://") ||
+ startswith(str, "git@") || startswith(str, "ssh://") ||
+ contains(str, ".git")
+end
+
+# Simple path detection
+function looks_like_path(str::String)
+ return contains(str, '/') || contains(str, '\\') || str == "." || str == ".." ||
+ (length(str) >= 2 && isletter(str[1]) && str[2] == ':') # Windows drive letters
+end
+
+# Check if a string looks like a complete URL
+function looks_like_complete_url(str::String)
+ return (
+ startswith(str, "http://") || startswith(str, "https://") ||
+ startswith(str, "git@") || startswith(str, "ssh://")
+ ) &&
+ (contains(str, '.') || contains(str, '/'))
+end
+
+# Check if a colon at given position is part of a Windows drive letter
+function is_windows_drive_colon(input::String, colon_pos::Int)
+ # Windows drive letters are single letters followed by colon at beginning
+ # Examples: "C:", "D:", etc.
+ if colon_pos == 2 && length(input) >= 2
+ first_char = input[1]
+ return isletter(first_char) && input[2] == ':'
+ end
+ return false
+end
+
+# Extract subdir specifier from the end of input (rightmost : that's not a Windows drive letter)
+function extract_subdir(input::String)
+ colon_pos = findlast(':', input)
+ if colon_pos === nothing
+ return input, nothing
+ end
+
+ # Skip Windows drive letters (e.g., C:, D:)
+ if is_windows_drive_colon(input, colon_pos)
+ return input, nothing
+ end
+
+ subdir_part = input[nextind(input, colon_pos):end]
+ remaining = input[1:prevind(input, colon_pos)]
+ return remaining, subdir_part
+end
+
+# Extract revision specifier from input (first # that separates base from revision)
+function extract_revision(input::String)
+ hash_pos = findfirst('#', input)
+ if hash_pos === nothing
+ return input, nothing
+ end
+
+ rev_part = input[nextind(input, hash_pos):end]
+ remaining = input[1:prevind(input, hash_pos)]
+ return remaining, rev_part
+end
+
+# Extract version specifier from the end of input (rightmost @)
+function extract_version(input::String)
+ at_pos = findlast('@', input)
+ if at_pos === nothing
+ return input, nothing
+ end
+
+ version_part = input[nextind(input, at_pos):end]
+ remaining = input[1:prevind(input, at_pos)]
+ return remaining, version_part
+end
+
+function preprocess_github_url(input::String)
+ # Handle GitHub tree/commit URLs
+ if (m = match(r"https://github.com/(.*?)/(.*?)/(?:tree|commit)/(.*?)$", input)) !== nothing
+ return [PackageIdentifier("https://github.com/$(m.captures[1])/$(m.captures[2])"), Rev(m.captures[3])]
+ # Handle GitHub pull request URLs
+ elseif (m = match(r"https://github.com/(.*?)/(.*?)/pull/(\d+)$", input)) !== nothing
+ return [PackageIdentifier("https://github.com/$(m.captures[1])/$(m.captures[2])"), Rev("pull/$(m.captures[3])/head")]
+ else
+ return nothing
+ end
+end
+
+# Check if a colon in a URL string is part of URL structure (not a subdir separator)
+function is_url_structure_colon(input::String, colon_pos::Int)
+ after_colon = input[nextind(input, colon_pos):end]
+
+ # Check for git@host:path syntax
+ if startswith(input, "git@")
+ at_pos = findfirst('@', input)
+ if at_pos !== nothing
+ between_at_colon = input[nextind(input, at_pos):prevind(input, colon_pos)]
+ if !contains(between_at_colon, '/')
+ return true
+ end
end
end
- args = PackageToken[packagetoken(pkgword) for pkgword in words]
- return parse_package_args(args; add_or_dev=add_or_dev)
+ # Check for protocol:// syntax
+ if colon_pos <= lastindex(input) - 2
+ next_pos = nextind(input, colon_pos)
+ if next_pos <= lastindex(input) - 1 &&
+ input[colon_pos:nextind(input, nextind(input, colon_pos))] == "://"
+ return true
+ end
+ end
+
+ # Check for user:password@ syntax (: followed by text then @)
+ if contains(after_colon, '@')
+ at_in_after = findfirst('@', after_colon)
+ if at_in_after !== nothing
+ text_before_at = after_colon[1:prevind(after_colon, at_in_after)]
+ if !contains(text_before_at, '/')
+ return true
+ end
+ end
+ end
+
+ # Check for port numbers (: followed by digits then /)
+ if occursin(r"^\d+(/|$)", after_colon)
+ return true
+ end
+
+ return false
+end
+
+# Extract subdir from URL, being careful about URL structure
+function extract_url_subdir(input::String)
+ colon_pos = findlast(':', input)
+ if colon_pos === nothing
+ return input, nothing
+ end
+
+ # Check if this colon is part of URL structure
+ if is_url_structure_colon(input, colon_pos)
+ return input, nothing
+ end
+
+ after_colon = input[nextind(input, colon_pos):end]
+ before_colon = input[1:prevind(input, colon_pos)]
+
+ # Only treat as subdir if it looks like one and the part before looks like a URL
+ if (contains(after_colon, '/') || (!contains(after_colon, '@') && !contains(after_colon, '#'))) &&
+ (contains(before_colon, "://") || contains(before_colon, ".git") || contains(before_colon, '@'))
+ return before_colon, after_colon
+ end
+
+ return input, nothing
+end
+
+# Extract revision from URL, only after a complete URL
+function extract_url_revision(input::String)
+ hash_pos = findfirst('#', input)
+ if hash_pos === nothing
+ return input, nothing
+ end
+
+ before_hash = input[1:prevind(input, hash_pos)]
+ after_hash = input[nextind(input, hash_pos):end]
+
+ if looks_like_complete_url(before_hash)
+ return before_hash, after_hash
+ end
+
+ return input, nothing
end
- # Match a git repository URL. This includes uses of `@` and `:` but
- # requires that it has `.git` at the end.
-let url = raw"((git|ssh|http(s)?)|(git@[\w\-\.]+))(:(//)?)([\w\.@\:/\-~]+)(\.git$)(/)?",
+# Parse URLs with specifiers
+# URLs can only have revisions (#) and subdirs (:), NOT versions (@)
+function parse_url_with_specifiers(input::String)
+ tokens = PackageToken[]
+ remaining = input
- # Match a `NAME=UUID` package specifier.
- name_uuid = raw"[^@\#\s:]+\s*=\s*[^@\#\s:]+",
+ # Extract subdir if present (rightmost : that looks like a subdir)
+ remaining, subdir_part = extract_url_subdir(remaining)
- # Match a `#BRANCH` branch or tag specifier.
- branch = raw"\#\s*[^@\#\s]*",
+ # Extract revision (first # that comes after a complete URL)
+ remaining, rev_part = extract_url_revision(remaining)
- # Match an `@VERSION` version specifier.
- version = raw"@\s*[^@\#\s]*",
+ # What's left is the base URL
+ push!(tokens, PackageIdentifier(remaining))
- # Match a `:SUBDIR` subdir specifier.
- subdir = raw":[^@\#\s]+",
+ # Add the specifiers in the correct order
+ if rev_part !== nothing
+ push!(tokens, Rev(rev_part))
+ end
+ if subdir_part !== nothing
+ push!(tokens, Subdir(subdir_part))
+ end
+
+ return tokens
+end
+
+function parse_path_with_specifiers(input::String)
+ tokens = PackageToken[]
+ remaining = input
+
+ # Extract subdir if present (rightmost :)
+ remaining, subdir_part = extract_subdir(remaining)
+
+ # Extract revision if present (rightmost #)
+ remaining, rev_part = extract_revision(remaining)
+
+ # What's left is the base path
+ push!(tokens, PackageIdentifier(remaining))
+
+ # Add specifiers in correct order
+ if rev_part !== nothing
+ push!(tokens, Rev(rev_part))
+ end
+ if subdir_part !== nothing
+ push!(tokens, Subdir(subdir_part))
+ end
+
+ return tokens
+end
+
+# Parse package names with specifiers
+function parse_name_with_specifiers(input::String)
+ tokens = PackageToken[]
+ remaining = input
+
+ # Extract subdir if present (rightmost :)
+ remaining, subdir_part = extract_subdir(remaining)
- # Match any other way to specify a package. This includes package
- # names, local paths, and URLs that don't match the `url` part. In
- # order not to clash with the branch, version, and subdir
- # specifiers, these cannot include `@` or `#`, and `:` is only
- # allowed if followed by `/` or `\`. For URLs matching this part
- # of the regex, that means that `@` (e.g. user names) and `:`
- # (e.g. port) cannot be used but it doesn't have to end with
- # `.git`.
- other = raw"([^@\#\s:] | :(/|\\))+"
+ # Extract version if present (rightmost @)
+ remaining, version_part = extract_version(remaining)
- # Combine all of the above.
- global const package_id_re = Regex(
- "$url | $name_uuid | $branch | $version | $subdir | $other", "x")
+ # Extract revision if present (rightmost #)
+ remaining, rev_part = extract_revision(remaining)
+
+ # What's left is the base name
+ push!(tokens, PackageIdentifier(remaining))
+
+ # Add specifiers in correct order
+ if rev_part !== nothing
+ push!(tokens, Rev(rev_part))
+ end
+ if version_part !== nothing
+ push!(tokens, VersionToken(version_part))
+ end
+ if subdir_part !== nothing
+ push!(tokens, Subdir(subdir_part))
+ end
+
+ return tokens
end
-function package_lex(qwords::Vector{QString})::Vector{String}
- words = String[]
- for qword in qwords
- qword.isquoted ?
- push!(words, qword.raw) :
- append!(words, map(m->m.match, eachmatch(package_id_re, qword.raw)))
+# Parse a single package specification
+function parse_package_spec_new(input::String)
+ # Handle quoted strings
+ if (startswith(input, '"') && endswith(input, '"')) ||
+ (startswith(input, '\'') && endswith(input, '\''))
+ input = input[2:(end - 1)]
+ end
+
+ # Handle GitHub tree/commit URLs first (special case)
+ github_result = preprocess_github_url(input)
+ if github_result !== nothing
+ return github_result
+ end
+
+ # Handle name=uuid format
+ if contains(input, '=')
+ parts = split(input, '=', limit = 2)
+ if length(parts) == 2
+ name = String(strip(parts[1]))
+ uuid_str = String(strip(parts[2]))
+ if is_valid_uuid(uuid_str)
+ return [PackageIdentifier("$name=$uuid_str")]
+ end
+ end
+ end
+
+ # Check what type of input this is and parse accordingly
+ if looks_like_url(input)
+ return parse_url_with_specifiers(input)
+ elseif looks_like_path(input)
+ return parse_path_with_specifiers(input)
+ else
+ return parse_name_with_specifiers(input)
end
- return words
end
-function parse_package_args(args::Vector{PackageToken}; add_or_dev=false)::Vector{PackageSpec}
+function parse_package(args::Vector{QString}, options; add_or_dev = false)::Vector{PackageSpec}
+ tokens = PackageToken[]
+
+ i = 1
+ while i <= length(args)
+ arg = args[i]
+ input = arg.isquoted ? arg.raw : arg.raw
+
+ # Check if this argument is a standalone modifier (like #dev, @v1.0, :subdir)
+ if !arg.isquoted && (startswith(input, '#') || startswith(input, '@') || startswith(input, ':'))
+ # This is a standalone modifier - it should be treated as a token
+ if startswith(input, '#')
+ push!(tokens, Rev(input[2:end]))
+ elseif startswith(input, '@')
+ push!(tokens, VersionToken(input[2:end]))
+ elseif startswith(input, ':')
+ push!(tokens, Subdir(input[2:end]))
+ end
+ else
+ # Parse this argument normally
+ if arg.isquoted
+ # For quoted arguments, treat as literal without specifier extraction
+ arg_tokens = [PackageIdentifier(input)]
+ else
+ arg_tokens = parse_package_spec_new(input)
+ end
+ append!(tokens, arg_tokens)
+ end
+
+ i += 1
+ end
+
+ return parse_package_args(tokens; add_or_dev = add_or_dev)
+end
+
+
+function parse_package_args(args::Vector{PackageToken}; add_or_dev = false)::Vector{PackageSpec}
# check for and apply PackageSpec modifier (e.g. `#foo` or `@v1.0.2`)
function apply_modifier!(pkg::PackageSpec, args::Vector{PackageToken})
(isempty(args) || args[1] isa PackageIdentifier) && return
- modifier = popfirst!(args)
- if modifier isa Subdir
- pkg.subdir = modifier.dir
- (isempty(args) || args[1] isa PackageIdentifier) && return
+ parsed_subdir = false
+ parsed_version = false
+ parsed_rev = false
+ while !isempty(args)
modifier = popfirst!(args)
+ if modifier isa PackageIdentifier
+ pushfirst!(args, modifier)
+ return
+ elseif modifier isa Subdir
+ if parsed_subdir
+ pkgerror("Multiple subdir specifiers `$args` found.")
+ end
+ pkg.subdir = modifier.dir
+ parsed_subdir = true
+ (isempty(args) || args[1] isa PackageIdentifier) && return
+ elseif modifier isa VersionToken
+ if parsed_version
+ pkgerror("Multiple version specifiers `$args` found.")
+ end
+ pkg.version = modifier.version
+ parsed_version = true
+ elseif modifier isa Rev
+ if parsed_rev
+ pkgerror("Multiple revision specifiers `$args` found.")
+ end
+ pkg.rev = modifier.rev
+ parsed_rev = true
+ else
+ pkgerror("Package name/uuid must precede subdir specifier `$args`.")
+ end
end
-
- if modifier isa VersionToken
- pkg.version = modifier.version
- elseif modifier isa Rev
- pkg.rev = modifier.rev
- else
- pkgerror("Package name/uuid must precede subdir specifier `$args`.")
- end
+ return
end
pkgs = PackageSpec[]
while !isempty(args)
arg = popfirst!(args)
if arg isa PackageIdentifier
- pkg = parse_package_identifier(arg; add_or_develop=add_or_dev)
+ pkg = parse_package_identifier(arg; add_or_develop = add_or_dev)
apply_modifier!(pkg, args)
push!(pkgs, pkg)
- # Modifiers without a corresponding package identifier -- this is a user error
+ # Modifiers without a corresponding package identifier -- this is a user error
else
arg isa VersionToken ?
pkgerror("Package name/uuid must precede version specifier `@$arg`.") :
- arg isa Rev ?
+ arg isa Rev ?
pkgerror("Package name/uuid must precede revision specifier `#$(arg.rev)`.") :
pkgerror("Package name/uuid must precede subdir specifier `[$arg]`.")
end
@@ -130,31 +415,27 @@ function parse_package_args(args::Vector{PackageToken}; add_or_dev=false)::Vecto
end
let uuid = raw"(?i)[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}(?-i)",
- name = raw"(\w+)(?:\.jl)?"
+ name = raw"(\w+)(?:\.jl)?"
global const name_re = Regex("^$name\$")
global const uuid_re = Regex("^$uuid\$")
global const name_uuid_re = Regex("^$name\\s*=\\s*($uuid)\$")
end
# packages can be identified through: uuid, name, or name+uuid
# additionally valid for add/develop are: local path, url
-function parse_package_identifier(pkg_id::PackageIdentifier; add_or_develop=false)::PackageSpec
+function parse_package_identifier(pkg_id::PackageIdentifier; add_or_develop = false)::PackageSpec
word = pkg_id.val
if add_or_develop
- if isurl(word)
- return PackageSpec(; url=word)
- elseif any(occursin.(['\\','/'], word)) || word == "." || word == ".."
- if casesensitive_isdir(expanduser(word))
- return PackageSpec(; path=normpath(expanduser(word)))
- else
- pkgerror("`$word` appears to be a local path, but directory does not exist")
- end
- end
- if occursin(name_re, word) && casesensitive_isdir(expanduser(word))
+ if occursin(name_re, word) && isdir(expanduser(word))
@info "Use `./$word` to add or develop the local directory at `$(Base.contractuser(abspath(word)))`."
end
+ if isurl(word)
+ return PackageSpec(; url = word)
+ elseif any(occursin.(['\\', '/'], word)) || word == "." || word == ".."
+ return PackageSpec(; path = normpath(expanduser(word)))
+ end
end
if occursin(uuid_re, word)
- return PackageSpec(;uuid=UUID(word))
+ return PackageSpec(; uuid = UUID(word))
elseif occursin(name_re, word)
m = match(name_re, word)
return PackageSpec(String(something(m.captures[1])))
@@ -169,18 +450,18 @@ end
################
# RegistrySpec #
################
-function parse_registry(raw_args::Vector{QString}, options; add=false)
+function parse_registry(raw_args::Vector{QString}, options; add = false)
regs = RegistrySpec[]
- foreach(x -> push!(regs, parse_registry(x; add=add)), unwrap(raw_args))
+ foreach(x -> push!(regs, parse_registry(x; add = add)), unwrap(raw_args))
return regs
end
# Registries can be identified through: uuid, name, or name+uuid
# when updating/removing. When adding we can accept a local path or url.
-function parse_registry(word::AbstractString; add=false)::RegistrySpec
+function parse_registry(word::AbstractString; add = false)::RegistrySpec
word = expanduser(word)
registry = RegistrySpec()
- if add && isdir_nothrow(word) # TODO: Should be casesensitive_isdir
+ if add && isdir_nothrow(word)
if isdir(joinpath(word, ".git")) # add path as url and clone it from there
registry.url = abspath(word)
else # put the path
@@ -204,6 +485,14 @@ function parse_registry(word::AbstractString; add=false)::RegistrySpec
return registry
end
+#
+# # Apps
+#
+function parse_app_add(raw_args::Vector{QString}, options)
+ return parse_package(raw_args, options; add_or_dev = true)
+end
+
+
#
# # Other
#
diff --git a/src/REPLMode/command_declarations.jl b/src/REPLMode/command_declarations.jl
index cb00dfb260..c6bf19db3a 100644
--- a/src/REPLMode/command_declarations.jl
+++ b/src/REPLMode/command_declarations.jl
@@ -1,585 +1,726 @@
-const PSA = Pair{Symbol,Any}
+const PSA = Pair{Symbol, Any}
compound_declarations = [
-"package" => CommandDeclaration[
-PSA[:name => "test",
- :api => API.test,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "coverage", :api => :coverage => true],
+ "package" => CommandDeclaration[
+ PSA[
+ :name => "test",
+ :api => API.test,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "coverage", :api => :coverage => true],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "run tests for packages",
+ :help => md"""
+ test [--coverage] [pkg[=uuid]] ...
+
+ Run the tests for package `pkg`, or for the current project (which thus needs to be
+ a package) if `pkg` is omitted. This is done by running the file `test/runtests.jl`
+ in the package directory. The option `--coverage` can be used to run the tests with
+ coverage enabled. The `startup.jl` file is disabled during testing unless
+ julia is started with `--startup-file=yes`.
+ """,
+ ],
+ PSA[
+ :name => "help",
+ :short_name => "?",
+ :api => identity, # dummy API function
+ :arg_count => 0 => Inf,
+ :arg_parser => ((x, y) -> x),
+ :completions => :complete_help,
+ :description => "show this message",
+ :help => md"""
+ [?|help]
+
+ List available commands along with short descriptions.
+
+ [?|help] cmd
+
+ If `cmd` is a partial command, display help for all subcommands.
+ If `cmd` is a full command, display help for `cmd`.
+ """,
+ ],
+ PSA[
+ :name => "instantiate",
+ :api => API.instantiate,
+ :option_spec => [
+ PSA[:name => "project", :short_name => "p", :api => :manifest => false],
+ PSA[:name => "manifest", :short_name => "m", :api => :manifest => true],
+ PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
+ PSA[:name => "workspace", :api => :workspace => true],
+ PSA[:name => "julia_version_strict", :api => :julia_version_strict => false],
+ ],
+ :description => "downloads all the dependencies for the project",
+ :help => md"""
+ instantiate [-v|--verbose] [--workspace] [--julia_version_strict]
+ instantiate [-v|--verbose] [--workspace] [--julia_version_strict] [-m|--manifest]
+ instantiate [-v|--verbose] [--workspace] [--julia_version_strict] [-p|--project]
+
+ Download all the dependencies for the current project at the version given by the project's manifest.
+ If no manifest exists or the `--project` option is given, resolve and download the dependencies compatible with the project.
+ If `--workspace` is given, all dependencies in the workspace will be downloaded.
+ If `--julia_version_strict` is given, manifest version check failures will error instead of log warnings.
+
+ After packages have been installed the project will be precompiled. For more information see `pkg> ?precompile`.
+ """,
+ ],
+ PSA[
+ :name => "remove",
+ :short_name => "rm",
+ :api => API.rm,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
+ PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
+ PSA[:name => "all", :api => :all_pkgs => true],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "remove packages from project or manifest",
+ :help => md"""
+ [rm|remove] [-p|--project] pkg[=uuid] ...
+ [rm|remove] [-p|--project] [--all]
+
+ Remove package `pkg` from the project file. Since the name `pkg` can only
+ refer to one package in a project this is unambiguous, but you can specify
+ a `uuid` anyway, and the command is ignored, with a warning, if package name
+ and UUID do not match. When a package is removed from the project file, it
+ may still remain in the manifest if it is required by some other package in
+ the project. Project mode operation is the default, so passing `-p` or
+ `--project` is optional unless it is preceded by the `-m` or `--manifest`
+ options at some earlier point. All packages can be removed by passing `--all`.
+
+ [rm|remove] [-m|--manifest] pkg[=uuid] ...
+ [rm|remove] [-m|--manifest] [--all]
+
+ Remove package `pkg` from the manifest file. If the name `pkg` refers to
+ multiple packages in the manifest, `uuid` disambiguates it. Removing a package
+ from the manifest forces the removal of all packages that depend on it, as well
+ as any no-longer-necessary manifest packages due to project package removals.
+ All packages can be removed by passing `--all`.
+ """,
+ ],
+ PSA[
+ :name => "add",
+ :api => API.add,
+ :should_splat => false,
+ :arg_count => 1 => Inf,
+ :arg_parser => ((x, y) -> parse_package(x, y; add_or_dev = true)),
+ :option_spec => [
+ PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
+ PSA[:name => "weak", :short_name => "w", :api => :target => :weakdeps],
+ PSA[:name => "extra", :short_name => "e", :api => :target => :extras],
+ ],
+ :completions => :complete_add_dev,
+ :description => "add packages to project",
+ :help => md"""
+ add [--preserve=] [-w|--weak] [-e|--extra] pkg[=uuid] [@version] [#rev] ...
+
+ Add package `pkg` to the current project file. If `pkg` could refer to
+ multiple different packages, specifying `uuid` allows you to disambiguate.
+ `@version` optionally allows specifying which versions of packages to add. Version specifications
+ are of the form `@1`, `@1.2` or `@1.2.3`, allowing any version with a prefix
+ that matches, or ranges thereof, such as `@1.2-3.4.5`. A git revision can be
+ specified by `#branch` or `#commit`.
+
+ If the active environment is a package (the Project has both `name` and `uuid` fields) compat entries will be
+ added automatically with a lower bound of the added version.
+
+ If a local path is used as an argument to `add`, the path needs to be a git repository.
+ The project will then track that git repository just like it would track a remote repository online.
+ If the package is not located at the top of the git repository, a subdirectory can be specified with
+ `path:subdir/path`.
+
+ `Pkg` resolves the set of packages in your environment using a tiered approach.
+ The `--preserve` command line option allows you to key into a specific tier in the resolve algorithm.
+ The following table describes the command line arguments to `--preserve` (in order of strictness).
+
+ | Argument | Description |
+ |:-------------------|:-----------------------------------------------------------------------------------|
+ | `installed` | Like `all` except also only add versions that are already installed |
+ | `all` | Preserve the state of all existing dependencies (including recursive dependencies) |
+ | `direct` | Preserve the state of all existing direct dependencies |
+ | `semver` | Preserve semver-compatible versions of direct dependencies |
+ | `none` | Do not attempt to preserve any version information |
+ | `tiered_installed` | Like `tiered` except first try to add only installed versions |
+ | **`tiered`** | Use the tier that will preserve the most version information while |
+ | | allowing version resolution to succeed (this is the default) |
+
+ Note: To make the default strategy `tiered_installed` set the env var `JULIA_PKG_PRESERVE_TIERED_INSTALLED` to
+ true.
+
+ After the installation of new packages the project will be precompiled. For more information see `pkg> ?precompile`.
+
+ With the `installed` strategy the newly added packages will likely already be precompiled, but if not this may be
+ because either the combination of package versions resolved in this environment has not been resolved and
+ precompiled before, or the precompile cache has been deleted by the LRU cache storage
+ (see `JULIA_MAX_NUM_PRECOMPILE_FILES`).
+
+ **Examples**
+ ```
+ pkg> add Example
+ pkg> add --preserve=all Example
+ pkg> add --weak Example
+ pkg> add --extra Example
+ pkg> add Example@0.5
+ pkg> add Example#master
+ pkg> add Example#c37b675
+ pkg> add https://github.com/JuliaLang/Example.jl#master
+ pkg> add git@github.com:JuliaLang/Example.jl.git
+ pkg> add "git@github.com:JuliaLang/Example.jl.git"#master
+ pkg> add https://github.com/Company/MonoRepo:juliapkgs/Package.jl
+ pkg> add Example=7876af07-990d-54b4-ab0e-23690620f79a
+ ```
+ """,
+ ],
+ PSA[
+ :name => "develop",
+ :short_name => "dev",
+ :api => API.develop,
+ :should_splat => false,
+ :arg_count => 1 => Inf,
+ :arg_parser => ((x, y) -> parse_package(x, y; add_or_dev = true)),
+ :option_spec => [
+ PSA[:name => "strict", :api => :strict => true],
+ PSA[:name => "local", :api => :shared => false],
+ PSA[:name => "shared", :api => :shared => true],
+ PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
+ ],
+ :completions => :complete_add_dev,
+ :description => "clone the full package repo locally for development",
+ :help => md"""
+ [dev|develop] [--preserve=] [--shared|--local] pkg[=uuid] ...
+ [dev|develop] [--preserve=] path
+
+ Make a package available for development. If `pkg` is an existing local path, that path will be recorded in
+ the manifest and used. Otherwise, a full git clone of `pkg` is made. The location of the clone is
+ controlled by the `--shared` (default) and `--local` arguments. The `--shared` location defaults to
+ `~/.julia/dev`, but can be controlled with the `JULIA_PKG_DEVDIR` environment variable.
+
+ When `--local` is given, the clone is placed in a `dev` folder in the current project. This
+ is not supported for paths, only registered packages.
+
+ This operation is undone by `free`.
+
+ The preserve strategies offered by `add` are also available via the `preserve` argument.
+ See `add` for more information.
+
+ **Examples**
+ ```jl
+ pkg> develop Example
+ pkg> develop https://github.com/JuliaLang/Example.jl
+ pkg> develop ~/mypackages/Example
+ pkg> develop --local Example
+ ```
+ """,
+ ],
+ PSA[
+ :name => "free",
+ :api => API.free,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :option_spec => [
+ PSA[:name => "all", :api => :all_pkgs => true],
+ ],
+ :arg_parser => parse_package,
+ :completions => :complete_fixed_packages,
+ :description => "undoes a `pin`, `develop`, or stops tracking a repo",
+ :help => md"""
+ free pkg[=uuid] ...
+ free [--all]
+
+ Free pinned packages, which allows it to be upgraded or downgraded again. If the package is checked out (see `help develop`) then this command
+ makes the package no longer being checked out. Specifying `--all` will free all dependencies (direct and indirect).
+ """,
+ ],
+ PSA[
+ :name => "why",
+ :api => API.why,
+ :should_splat => false,
+ :arg_count => 1 => 1,
+ :option_spec => [
+ PSA[:name => "workspace", :api => :workspace => true],
+ ],
+ :arg_parser => parse_package,
+ :completions => :complete_all_installed_packages,
+ :description => "shows why a package is in the manifest",
+ :help => md"""
+ why [--workspace] pkg[=uuid] ...
+
+ Show the reason why packages are in the manifest, printed as a path through the
+ dependency graph starting at the direct dependencies.
+ The `workspace` option can be used to show the path from any dependency of a project in
+ the workspace.
+
+ !!! compat "Julia 1.9"
+ The `why` function is added in Julia 1.9
+ """,
+ ],
+ PSA[
+ :name => "pin",
+ :api => API.pin,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :option_spec => [
+ PSA[:name => "all", :api => :all_pkgs => true],
+ ],
+ :arg_parser => parse_package,
+ :completions => :complete_installed_packages,
+ :description => "pins the version of packages",
+ :help => md"""
+ pin pkg[=uuid] ...
+ pin [--all]
+
+ Pin packages to given versions, or the current version if no version is specified. A pinned package has its version fixed and will not be upgraded or downgraded.
+ A pinned package has the symbol `⚲` next to its version in the status list.. Specifying `--all` will pin all dependencies (direct and indirect).
+
+ **Examples**
+ ```
+ pkg> pin Example
+ pkg> pin Example@0.5.0
+ pkg> pin Example=7876af07-990d-54b4-ab0e-23690620f79a@0.5.0
+ pkg> pin --all
+ ```
+ """,
+ ],
+ PSA[
+ :name => "build",
+ :api => API.build,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "run the build script for packages",
+ :help => md"""
+ build [-v|--verbose] pkg[=uuid] ...
+
+ Run the build script in `deps/build.jl` for `pkg` and all of its dependencies in depth-first recursive order.
+ If no packages are given, run the build scripts for all packages in the manifest.
+ The `-v`/`--verbose` option redirects build output to `stdout`/`stderr` instead of the `build.log` file.
+ The `startup.jl` file is disabled during building unless julia is started with `--startup-file=yes`.
+ """,
+ ],
+ PSA[
+ :name => "resolve",
+ :api => API.resolve,
+ :description => "resolves to update the manifest from changes in dependencies of developed packages",
+ :help => md"""
+ resolve
+
+ Resolve the project i.e. run package resolution and update the Manifest. This is useful in case the dependencies of developed
+ packages have changed causing the current Manifest to be out of sync.
+ """,
+ ],
+ PSA[
+ :name => "activate",
+ :api => API.activate,
+ :arg_count => 0 => 1,
+ :arg_parser => parse_activate,
+ :option_spec => [
+ PSA[:name => "shared", :api => :shared => true],
+ PSA[:name => "temp", :api => :temp => true],
+ ],
+ :completions => :complete_activate,
+ :description => "set the primary environment the package manager manipulates",
+ :help => md"""
+ activate
+ activate [--shared] path
+ activate --temp
+ activate - (activates the previously active environment)
+
+ Activate the environment at the given `path`, or use the first project found in
+ `LOAD_PATH` (ignoring `"@"`) if no `path` is specified.
+ In the latter case, for the default value of `LOAD_PATH`, the result is to activate the
+ `@v#.#` environment.
+ The active environment is the environment that is modified by executing package commands.
+ When the option `--shared` is given, `path` will be assumed to be a directory name and searched for in the
+ `environments` folders of the depots in the depot stack. In case no such environment exists in any of the depots,
+ it will be placed in the first depot of the stack.
+ Use the `--temp` option to create temporary environments which are removed when the julia
+ process is exited.
+ Use a single `-` to activate the previously active environment.
+ """,
+ ],
+ PSA[
+ :name => "update",
+ :short_name => "up",
+ :api => API.up,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
+ PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
+ PSA[:name => "major", :api => :level => UPLEVEL_MAJOR],
+ PSA[:name => "minor", :api => :level => UPLEVEL_MINOR],
+ PSA[:name => "patch", :api => :level => UPLEVEL_PATCH],
+ PSA[:name => "fixed", :api => :level => UPLEVEL_FIXED],
+ PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "update packages in manifest",
+ :help => md"""
+ [up|update] [-p|--project] [opts] pkg[=uuid] [@version] ...
+ [up|update] [-m|--manifest] [opts] pkg[=uuid] [@version] ...
+
+ opts: --major | --minor | --patch | --fixed
+ --preserve=
+
+ Update `pkg` within the constraints of the indicated version
+ specifications. These specifications are of the form `@1`, `@1.2` or `@1.2.3`, allowing
+ any version with a prefix that matches, or ranges thereof, such as `@1.2-3.4.5`.
+ In `--project` mode, package specifications only match project packages, while
+ in `--manifest` mode they match any manifest package. Bound level options force
+ the following packages to be upgraded only within the current major, minor,
+ patch version; if the `--fixed` upgrade level is given, then the following
+ packages will not be upgraded at all.
+
+ After any package updates the project will be precompiled. For more information see `pkg> ?precompile`.
+ """,
+ ],
+ PSA[
+ :name => "generate",
+ :api => API.generate,
+ :arg_count => 1 => 1,
+ :arg_parser => ((x, y) -> map(expanduser, unwrap(x))),
+ :description => "generate files for a new project",
+ :help => md"""
+ generate pkgname
+
+ Create a minimal project called `pkgname` in the current folder. For more featureful package creation, please see `PkgTemplates.jl`.
+ """,
+ ],
+ PSA[
+ :name => "precompile",
+ :api => API.precompile,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :completions => :complete_installed_packages,
+ :description => "precompile all the project dependencies",
+ :option_spec => [
+ PSA[:name => "workspace", :api => :workspace => true],
+ ],
+ :help => md"""
+ precompile [--workspace]
+ precompile [--workspace] pkgs...
+
+ Precompile all or specified dependencies of the project in parallel.
+ The `startup.jl` file is disabled during precompilation unless julia is started with `--startup-file=yes`.
+ The `workspace` option will precompile all packages in the workspace and not only the active project.
+
+ Errors will only throw when precompiling the top-level dependencies, given that
+ not all manifest dependencies may be loaded by the top-level dependencies on the given system.
+
+ This method is called automatically after any Pkg action that changes the manifest.
+ Any packages that have previously errored during precompilation won't be retried in auto mode
+ until they have changed. To disable automatic precompilation set the environment variable `JULIA_PKG_PRECOMPILE_AUTO=0`.
+ To manually control the number of tasks used set the environment variable `JULIA_NUM_PRECOMPILE_TASKS`.
+ """,
+ ],
+ PSA[
+ :name => "status",
+ :short_name => "st",
+ :api => API.status,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :option_spec => [
+ PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
+ PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
+ PSA[:name => "diff", :short_name => "d", :api => :diff => true],
+ PSA[:name => "outdated", :short_name => "o", :api => :outdated => true],
+ PSA[:name => "compat", :short_name => "c", :api => :compat => true],
+ PSA[:name => "extensions", :short_name => "e", :api => :extensions => true],
+ PSA[:name => "workspace", :api => :workspace => true],
+ ],
+ :completions => :complete_installed_packages,
+ :description => "summarize contents of and changes to environment",
+ :help => md"""
+ [st|status] [-d|--diff] [--workspace] [-o|--outdated] [pkgs...]
+ [st|status] [-d|--diff] [--workspace] [-o|--outdated] [-p|--project] [pkgs...]
+ [st|status] [-d|--diff] [--workspace] [-o|--outdated] [-m|--manifest] [pkgs...]
+ [st|status] [-d|--diff] [--workspace] [-e|--extensions] [-p|--project] [pkgs...]
+ [st|status] [-d|--diff] [--workspace] [-e|--extensions] [-m|--manifest] [pkgs...]
+ [st|status] [-c|--compat] [pkgs...]
+
+ Show the status of the current environment. Packages marked with `⌃` have new
+ versions that may be installed, e.g. via `pkg> up`. Those marked with `⌅` have
+ new versions available, but cannot be installed due to compatibility
+ constraints. To see why use `pkg> status --outdated` which shows any packages
+ that are not at their latest version and if any packages are holding them back.
+ Packages marked with `[yanked]` have been yanked from the registry and should be
+ updated or removed.
+
+ Use `pkg> status --extensions` to show dependencies with extensions and what extension dependencies
+ of those that are currently loaded.
+
+ In `--project` mode (default), the status of the project file is summarized. In `--manifest`
+ mode the output also includes the recursive dependencies of added packages given in the manifest.
+ If there are any packages listed as arguments the output will be limited to those packages.
+ The `--diff` option will, if the environment is in a git repository, limit
+ the output to the difference as compared to the last git commit.
+ The `--compat` option alone shows project compat entries.
+ The `--workspace` option shows the (merged) status of packages in the workspace.
+
+ !!! compat "Julia 1.8"
+ The `⌃` and `⌅` indicators were added in Julia 1.8.
+ The `--outdated` and `--compat` options require at least Julia 1.8.
+ """,
+ ],
+ PSA[
+ :name => "compat",
+ :api => API.compat,
+ :arg_count => 0 => 2,
+ :completions => :complete_installed_packages_and_compat,
+ :option_spec => [
+ PSA[:name => "current", :api => :current => true],
+ ],
+ :description => "edit compat entries in the current Project and re-resolve",
+ :help => md"""
+ compat [pkg] [compat_string]
+ compat
+ compat --current
+ compat --current
+
+ Edit project [compat] entries directly, or via an interactive menu by not specifying any arguments.
+ Use --current flag to automatically populate missing compat entries with currently resolved versions.
+ When used alone, applies to all packages missing compat entries.
+ When combined with a package name, applies only to that package.
+ When directly editing use tab to complete the package name and any existing compat entry.
+ Specifying a package with a blank compat entry will remove the entry.
+ After changing compat entries a `resolve` will be attempted to check whether the current
+ environment is compliant with the new compat rules.
+ """,
+ ],
+ PSA[
+ :name => "gc",
+ :api => API.gc,
+ :option_spec => [
+ PSA[:name => "all", :api => :collect_delay => Hour(0)],
+ PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
+ ],
+ :description => "garbage collect packages not used for a significant time",
+ :help => md"""
+ gc [-v|--verbose] [--all]
+
+ Free disk space by garbage collecting packages not used for a significant time.
+ The `--all` option will garbage collect all packages which can not be immediately
+ reached from any existing project.
+ Use verbose mode for detailed output.
+ """,
+ ],
+ PSA[
+ :name => "undo",
+ :api => API.undo,
+ :description => "undo the latest change to the active project",
+ :help => md"""
+ undo
+
+ Undoes the latest change to the active project.
+ """,
+ ],
+ PSA[
+ :name => "redo",
+ :api => API.redo,
+ :description => "redo the latest change to the active project",
+ :help => md"""
+ redo
+
+ Redoes the changes from the latest [`undo`](@ref).
+ """,
+ ],
+ ], #package
+ "registry" => CommandDeclaration[
+ PSA[
+ :name => "add",
+ :api => Registry.add,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => ((x, y) -> parse_registry(x, y; add = true)),
+ :description => "add package registries",
+ :help => md"""
+ registry add reg...
+
+ Add package registries `reg...` to the user depot. Without arguments
+ it adds known registries, i.e. the General registry and registries
+ served by the configured package server.
+
+ **Examples**
+ ```
+ pkg> registry add General
+ pkg> registry add https://www.my-custom-registry.com
+ pkg> registry add
+ ```
+ """,
+ ],
+ PSA[
+ :name => "remove",
+ :short_name => "rm",
+ :api => Registry.rm,
+ :should_splat => false,
+ :arg_count => 1 => Inf,
+ :arg_parser => parse_registry,
+ :description => "remove package registries",
+ :help => md"""
+ registry [rm|remove] reg...
+
+ Remove package registries `reg...`.
+
+ **Examples**
+ ```
+ pkg> registry [rm|remove] General
+ ```
+ """,
+ ],
+ PSA[
+ :name => "update",
+ :short_name => "up",
+ :api => Registry.update,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_registry,
+ :description => "update package registries",
+ :help => md"""
+ registry [up|update]
+ registry [up|update] reg...
+
+ Update package registries `reg...`. If no registries are specified
+ all registries will be updated.
+
+ **Examples**
+ ```
+ pkg> registry up
+ pkg> registry up General
+ ```
+ """,
+ ],
+ PSA[
+ :name => "status",
+ :short_name => "st",
+ :api => Registry.status,
+ :description => "information about installed registries",
+ :help => md"""
+ registry [st|status]
+
+ Display information about installed registries.
+
+ **Examples**
+ ```
+ pkg> registry status
+ ```
+ """,
+ ],
+ ], #registry
+ "app" => CommandDeclaration[
+ PSA[
+ :name => "status",
+ :short_name => "st",
+ :api => Apps.status,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :completions => :complete_installed_apps,
+ :description => "show status of apps",
+ :help => md"""
+ app status [pkg[=uuid]] ...
+
+ Show the status of installed apps. If packages are specified, only show
+ apps for those packages.
+ """,
+ ],
+ PSA[
+ :name => "add",
+ :api => Apps.add,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_app_add,
+ :completions => :complete_add_dev,
+ :description => "add app",
+ :help => md"""
+ app add pkg[=uuid] ...
+
+ Add apps provided by packages `pkg...`. This will make the apps available
+ as executables in `~/.julia/bin` (which should be added to PATH).
+
+ **Examples**
+ ```
+ pkg> app add Example
+ pkg> app add Example@0.5.0
+ ```
+ """,
+ ],
+ PSA[
+ :name => "remove",
+ :short_name => "rm",
+ :api => Apps.rm,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :completions => :complete_installed_apps,
+ :description => "remove apps",
+ :help => md"""
+ app [rm|remove] pkg[=uuid] ...
+
+ Remove apps provided by packages `pkg...`. This will remove the executables
+ from `~/.julia/bin`.
+
+ **Examples**
+ ```
+ pkg> app rm Example
+ ```
+ """,
+ ],
+ PSA[
+ :name => "develop",
+ :short_name => "dev",
+ :api => Apps.develop,
+ :should_splat => false,
+ :arg_count => 1 => Inf,
+ :arg_parser => (x, y) -> parse_package(x, y; add_or_dev = true),
+ :completions => :complete_add_dev,
+ :description => "develop a package and install all the apps in it",
+ :help => md"""
+ app [dev|develop] pkg[=uuid] ...
+ app [dev|develop] path
+
+ Same as `develop` but also installs all the apps in the package.
+ This allows one to edit their app and have the changes immediately be reflected in the app.
+
+ **Examples**
+ ```jl
+ pkg> app develop Example
+ pkg> app develop https://github.com/JuliaLang/Example.jl
+ pkg> app develop ~/mypackages/Example
+ pkg> app develop --local Example
+ ```
+ """,
+ ],
+ PSA[
+ :name => "update",
+ :short_name => "up",
+ :api => Apps.update,
+ :completions => :complete_installed_apps,
+ :should_splat => false,
+ :arg_count => 0 => Inf,
+ :arg_parser => parse_package,
+ :description => "update app",
+ :help => md"""
+ app [up|update] [pkg[=uuid]] ...
+
+ Update apps for packages `pkg...`. If no packages are specified, all apps will be updated.
+
+ **Examples**
+ ```
+ pkg> app update
+ pkg> app update Example
+ ```
+ """,
+ ], # app
],
- :completions => :complete_installed_packages,
- :description => "run tests for packages",
- :help => md"""
- test [--coverage] [pkg[=uuid]] ...
-
-Run the tests for package `pkg`, or for the current project (which thus needs to be
-a package) if `pkg` is ommitted. This is done by running the file `test/runtests.jl`
-in the package directory. The option `--coverage` can be used to run the tests with
-coverage enabled. The `startup.jl` file is disabled during testing unless
-julia is started with `--startup-file=yes`.
-""",
-],
-PSA[:name => "help",
- :short_name => "?",
- :api => identity, # dummy API function
- :arg_count => 0 => Inf,
- :arg_parser => ((x,y) -> x),
- :completions => :complete_help,
- :description => "show this message",
- :help => md"""
- [?|help]
-
-List available commands along with short descriptions.
-
- [?|help] cmd
-
-If `cmd` is a partial command, display help for all subcommands.
-If `cmd` is a full command, display help for `cmd`.
-""",
-],
-PSA[:name => "instantiate",
- :api => API.instantiate,
- :option_spec => [
- PSA[:name => "project", :short_name => "p", :api => :manifest => false],
- PSA[:name => "manifest", :short_name => "m", :api => :manifest => true],
- PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
- PSA[:name => "workspace", :api => :workspace => true],
- PSA[:name => "julia_version_strict", :api => :julia_version_strict => false],
- ],
- :description => "downloads all the dependencies for the project",
- :help => md"""
- instantiate [-v|--verbose] [--workspace] [--julia_version_strict]
- instantiate [-v|--verbose] [--workspace] [--julia_version_strict] [-m|--manifest]
- instantiate [-v|--verbose] [--workspace] [--julia_version_strict] [-p|--project]
-
-Download all the dependencies for the current project at the version given by the project's manifest.
-If no manifest exists or the `--project` option is given, resolve and download the dependencies compatible with the project.
-If `--workspace` is given, all dependencies in the workspace will be downloaded.
-If `--julia_version_strict` is given, manifest version check failures will error instead of log warnings.
-
-After packages have been installed the project will be precompiled. For more information see `pkg> ?precompile`.
-""",
-],
-PSA[:name => "remove",
- :short_name => "rm",
- :api => API.rm,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
- PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
- PSA[:name => "all", :api => :all_pkgs => true],
- ],
- :completions => :complete_installed_packages,
- :description => "remove packages from project or manifest",
- :help => md"""
- [rm|remove] [-p|--project] pkg[=uuid] ...
- [rm|remove] [-p|--project] [--all]
-
-Remove package `pkg` from the project file. Since the name `pkg` can only
-refer to one package in a project this is unambiguous, but you can specify
-a `uuid` anyway, and the command is ignored, with a warning, if package name
-and UUID do not match. When a package is removed from the project file, it
-may still remain in the manifest if it is required by some other package in
-the project. Project mode operation is the default, so passing `-p` or
-`--project` is optional unless it is preceded by the `-m` or `--manifest`
-options at some earlier point. All packages can be removed by passing `--all`.
-
- [rm|remove] [-m|--manifest] pkg[=uuid] ...
- [rm|remove] [-m|--manifest] [--all]
-
-Remove package `pkg` from the manifest file. If the name `pkg` refers to
-multiple packages in the manifest, `uuid` disambiguates it. Removing a package
-from the manifest forces the removal of all packages that depend on it, as well
-as any no-longer-necessary manifest packages due to project package removals.
-All packages can be removed by passing `--all`.
-""",
-],
-PSA[:name => "add",
- :api => API.add,
- :should_splat => false,
- :arg_count => 1 => Inf,
- :arg_parser => ((x,y) -> parse_package(x,y; add_or_dev=true)),
- :option_spec => [
- PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
- PSA[:name => "weak", :short_name => "w", :api => :target => :weakdeps],
- PSA[:name => "extra", :short_name => "e", :api => :target => :extras],
- ],
- :completions => :complete_add_dev,
- :description => "add packages to project",
- :help => md"""
- add [--preserve=] [-w|--weak] [-e|--extra] pkg[=uuid] [@version] [#rev] ...
-
-Add package `pkg` to the current project file. If `pkg` could refer to
-multiple different packages, specifying `uuid` allows you to disambiguate.
-`@version` optionally allows specifying which versions of packages to add. Version specifications
-are of the form `@1`, `@1.2` or `@1.2.3`, allowing any version with a prefix
-that matches, or ranges thereof, such as `@1.2-3.4.5`. A git revision can be
-specified by `#branch` or `#commit`.
-
-If the active environment is a package (the Project has both `name` and `uuid` fields) compat entries will be
-added automatically with a lower bound of the added version.
-
-If a local path is used as an argument to `add`, the path needs to be a git repository.
-The project will then track that git repository just like it would track a remote repository online.
-If the package is not located at the top of the git repository, a subdirectory can be specified with
-`path:subdir/path`.
-
-`Pkg` resolves the set of packages in your environment using a tiered approach.
-The `--preserve` command line option allows you to key into a specific tier in the resolve algorithm.
-The following table describes the command line arguments to `--preserve` (in order of strictness).
-
-| Argument | Description |
-|:-------------------|:-----------------------------------------------------------------------------------|
-| `installed` | Like `all` except also only add versions that are already installed |
-| `all` | Preserve the state of all existing dependencies (including recursive dependencies) |
-| `direct` | Preserve the state of all existing direct dependencies |
-| `semver` | Preserve semver-compatible versions of direct dependencies |
-| `none` | Do not attempt to preserve any version information |
-| `tiered_installed` | Like `tiered` except first try to add only installed versions |
-| **`tiered`** | Use the tier that will preserve the most version information while |
-| | allowing version resolution to succeed (this is the default) |
-
-Note: To make the default strategy `tiered_installed` set the env var `JULIA_PKG_PRESERVE_TIERED_INSTALLED` to
-true.
-
-After the installation of new packages the project will be precompiled. For more information see `pkg> ?precompile`.
-
-With the `installed` strategy the newly added packages will likely already be precompiled, but if not this may be
-because either the combination of package versions resolved in this environment has not been resolved and
-precompiled before, or the precompile cache has been deleted by the LRU cache storage
-(see `JULIA_MAX_NUM_PRECOMPILE_FILES`).
-
-**Examples**
-```
-pkg> add Example
-pkg> add --preserve=all Example
-pkg> add --weak Example
-pkg> add --extra Example
-pkg> add Example@0.5
-pkg> add Example#master
-pkg> add Example#c37b675
-pkg> add https://github.com/JuliaLang/Example.jl#master
-pkg> add git@github.com:JuliaLang/Example.jl.git
-pkg> add "git@github.com:JuliaLang/Example.jl.git"#master
-pkg> add https://github.com/Company/MonoRepo:juliapkgs/Package.jl
-pkg> add Example=7876af07-990d-54b4-ab0e-23690620f79a
-```
-""",
-],
-PSA[:name => "develop",
- :short_name => "dev",
- :api => API.develop,
- :should_splat => false,
- :arg_count => 1 => Inf,
- :arg_parser => ((x,y) -> parse_package(x,y; add_or_dev=true)),
- :option_spec => [
- PSA[:name => "strict", :api => :strict => true],
- PSA[:name => "local", :api => :shared => false],
- PSA[:name => "shared", :api => :shared => true],
- PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
- ],
- :completions => :complete_add_dev,
- :description => "clone the full package repo locally for development",
- :help => md"""
- [dev|develop] [--preserve=] [--shared|--local] pkg[=uuid] ...
- [dev|develop] [--preserve=] path
-
-Make a package available for development. If `pkg` is an existing local path, that path will be recorded in
-the manifest and used. Otherwise, a full git clone of `pkg` is made. The location of the clone is
-controlled by the `--shared` (default) and `--local` arguments. The `--shared` location defaults to
-`~/.julia/dev`, but can be controlled with the `JULIA_PKG_DEVDIR` environment variable.
-
-When `--local` is given, the clone is placed in a `dev` folder in the current project. This
-is not supported for paths, only registered packages.
-
-This operation is undone by `free`.
-
-The preserve strategies offered by `add` are also available via the `preserve` argument.
-See `add` for more information.
-
-**Examples**
-```jl
-pkg> develop Example
-pkg> develop https://github.com/JuliaLang/Example.jl
-pkg> develop ~/mypackages/Example
-pkg> develop --local Example
-```
-""",
-],
-PSA[:name => "free",
- :api => API.free,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :option_spec => [
- PSA[:name => "all", :api => :all_pkgs => true],
- ],
- :arg_parser => parse_package,
- :completions => :complete_fixed_packages,
- :description => "undoes a `pin`, `develop`, or stops tracking a repo",
- :help => md"""
- free pkg[=uuid] ...
- free [--all]
-
-Free pinned packages, which allows it to be upgraded or downgraded again. If the package is checked out (see `help develop`) then this command
-makes the package no longer being checked out. Specifying `--all` will free all dependencies (direct and indirect).
-""",
-],
-PSA[:name => "why",
- :api => API.why,
- :should_splat => false,
- :arg_count => 1 => 1,
- :option_spec => [
- PSA[:name => "workspace", :api => :workspace => true],
- ],
- :arg_parser => parse_package,
- :completions => :complete_all_installed_packages,
- :description => "shows why a package is in the manifest",
- :help => md"""
- why [--workspace] pkg[=uuid] ...
-
-Show the reason why packages are in the manifest, printed as a path through the
-dependency graph starting at the direct dependencies.
-The `workspace` option can be used to show the path from any dependency of a project in
-the workspace.
-
-!!! compat "Julia 1.9"
- The `why` function is added in Julia 1.9
-""",
-],
-PSA[:name => "pin",
- :api => API.pin,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :option_spec => [
- PSA[:name => "all", :api => :all_pkgs => true],
- ],
- :arg_parser => parse_package,
- :completions => :complete_installed_packages,
- :description => "pins the version of packages",
- :help => md"""
- pin pkg[=uuid] ...
- pin [--all]
-
-Pin packages to given versions, or the current version if no version is specified. A pinned package has its version fixed and will not be upgraded or downgraded.
-A pinned package has the symbol `⚲` next to its version in the status list.. Specifying `--all` will pin all dependencies (direct and indirect).
-
-**Examples**
-```
-pkg> pin Example
-pkg> pin Example@0.5.0
-pkg> pin Example=7876af07-990d-54b4-ab0e-23690620f79a@0.5.0
-pkg> pin --all
-```
-""",
-],
-PSA[:name => "build",
- :api => API.build,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
- ],
- :completions => :complete_installed_packages,
- :description => "run the build script for packages",
- :help => md"""
- build [-v|--verbose] pkg[=uuid] ...
-
-Run the build script in `deps/build.jl` for `pkg` and all of its dependencies in depth-first recursive order.
-If no packages are given, run the build scripts for all packages in the manifest.
-The `-v`/`--verbose` option redirects build output to `stdout`/`stderr` instead of the `build.log` file.
-The `startup.jl` file is disabled during building unless julia is started with `--startup-file=yes`.
-""",
-],
-PSA[:name => "resolve",
- :api => API.resolve,
- :description => "resolves to update the manifest from changes in dependencies of developed packages",
- :help => md"""
- resolve
-
-Resolve the project i.e. run package resolution and update the Manifest. This is useful in case the dependencies of developed
-packages have changed causing the current Manifest to be out of sync.
-""",
-],
-PSA[:name => "activate",
- :api => API.activate,
- :arg_count => 0 => 1,
- :arg_parser => parse_activate,
- :option_spec => [
- PSA[:name => "shared", :api => :shared => true],
- PSA[:name => "temp", :api => :temp => true],
- ],
- :completions => :complete_activate,
- :description => "set the primary environment the package manager manipulates",
- :help => md"""
- activate
- activate [--shared] path
- activate --temp
- activate - (activates the previously active environment)
-
-Activate the environment at the given `path`, or use the first project found in
-`LOAD_PATH` (ignoring `"@"`) if no `path` is specified.
-In the latter case, for the default value of `LOAD_PATH`, the result is to activate the
-`@v#.#` environment.
-The active environment is the environment that is modified by executing package commands.
-When the option `--shared` is given, `path` will be assumed to be a directory name and searched for in the
-`environments` folders of the depots in the depot stack. In case no such environment exists in any of the depots,
-it will be placed in the first depot of the stack.
-Use the `--temp` option to create temporary environments which are removed when the julia
-process is exited.
-Use a single `-` to activate the previously active environment.
-""" ,
-],
-PSA[:name => "update",
- :short_name => "up",
- :api => API.up,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
- PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
- PSA[:name => "major", :api => :level => UPLEVEL_MAJOR],
- PSA[:name => "minor", :api => :level => UPLEVEL_MINOR],
- PSA[:name => "patch", :api => :level => UPLEVEL_PATCH],
- PSA[:name => "fixed", :api => :level => UPLEVEL_FIXED],
- PSA[:name => "preserve", :takes_arg => true, :api => :preserve => do_preserve],
- ],
- :completions => :complete_installed_packages,
- :description => "update packages in manifest",
- :help => md"""
- [up|update] [-p|--project] [opts] pkg[=uuid] [@version] ...
- [up|update] [-m|--manifest] [opts] pkg[=uuid] [@version] ...
-
- opts: --major | --minor | --patch | --fixed
- --preserve=
-
-Update `pkg` within the constraints of the indicated version
-specifications. These specifications are of the form `@1`, `@1.2` or `@1.2.3`, allowing
-any version with a prefix that matches, or ranges thereof, such as `@1.2-3.4.5`.
-In `--project` mode, package specifications only match project packages, while
-in `--manifest` mode they match any manifest package. Bound level options force
-the following packages to be upgraded only within the current major, minor,
-patch version; if the `--fixed` upgrade level is given, then the following
-packages will not be upgraded at all.
-
-After any package updates the project will be precompiled. For more information see `pkg> ?precompile`.
-""",
-],
-PSA[:name => "generate",
- :api => API.generate,
- :arg_count => 1 => 1,
- :arg_parser => ((x,y) -> map(expanduser, unwrap(x))),
- :description => "generate files for a new project",
- :help => md"""
- generate pkgname
-
-Create a minimal project called `pkgname` in the current folder. For more featureful package creation, please see `PkgTemplates.jl`.
-""",
-],
-PSA[:name => "precompile",
- :api => API.precompile,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :completions => :complete_installed_packages,
- :description => "precompile all the project dependencies",
- :option_spec => [
- PSA[:name => "workspace", :api => :workspace => true],
- ],
- :help => md"""
- precompile [--workspace]
- precompile [--workspace] pkgs...
-
-Precompile all or specified dependencies of the project in parallel.
-The `startup.jl` file is disabled during precompilation unless julia is started with `--startup-file=yes`.
-The `workspace` option will precompile all packages in the workspace and not only the active project.
-
-Errors will only throw when precompiling the top-level dependencies, given that
-not all manifest dependencies may be loaded by the top-level dependencies on the given system.
-
-This method is called automatically after any Pkg action that changes the manifest.
-Any packages that have previously errored during precompilation won't be retried in auto mode
-until they have changed. To disable automatic precompilation set the environment variable `JULIA_PKG_PRECOMPILE_AUTO=0`.
-To manually control the number of tasks used set the environment variable `JULIA_NUM_PRECOMPILE_TASKS`.
-""",
-],
-PSA[:name => "status",
- :short_name => "st",
- :api => API.status,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_package,
- :option_spec => [
- PSA[:name => "project", :short_name => "p", :api => :mode => PKGMODE_PROJECT],
- PSA[:name => "manifest", :short_name => "m", :api => :mode => PKGMODE_MANIFEST],
- PSA[:name => "diff", :short_name => "d", :api => :diff => true],
- PSA[:name => "outdated", :short_name => "o", :api => :outdated => true],
- PSA[:name => "compat", :short_name => "c", :api => :compat => true],
- PSA[:name => "extensions", :short_name => "e", :api => :extensions => true],
- PSA[:name => "workspace", :api => :workspace => true],
- ],
- :completions => :complete_installed_packages,
- :description => "summarize contents of and changes to environment",
- :help => md"""
- [st|status] [-d|--diff] [--workspace] [-o|--outdated] [pkgs...]
- [st|status] [-d|--diff] [--workspace] [-o|--outdated] [-p|--project] [pkgs...]
- [st|status] [-d|--diff] [--workspace] [-o|--outdated] [-m|--manifest] [pkgs...]
- [st|status] [-d|--diff] [--workspace] [-e|--extensions] [-p|--project] [pkgs...]
- [st|status] [-d|--diff] [--workspace] [-e|--extensions] [-m|--manifest] [pkgs...]
- [st|status] [-c|--compat] [pkgs...]
-
-Show the status of the current environment. Packages marked with `⌃` have new
-versions that may be installed, e.g. via `pkg> up`. Those marked with `⌅` have
-new versions available, but cannot be installed due to compatibility
-constraints. To see why use `pkg> status --outdated` which shows any packages
-that are not at their latest version and if any packages are holding them back.
-
-Use `pkg> status --extensions` to show dependencies with extensions and what extension dependencies
-of those that are currently loaded.
-
-In `--project` mode (default), the status of the project file is summarized. In `--manifest`
-mode the output also includes the recursive dependencies of added packages given in the manifest.
-If there are any packages listed as arguments the output will be limited to those packages.
-The `--diff` option will, if the environment is in a git repository, limit
-the output to the difference as compared to the last git commit.
-The `--compat` option alone shows project compat entries.
-The `--workspace` option shows the (merged) status of packages in the workspace.
-
-!!! compat "Julia 1.8"
- The `⌃` and `⌅` indicators were added in Julia 1.8.
- The `--outdated` and `--compat` options require at least Julia 1.8.
-""",
-],
-PSA[:name => "compat",
- :api => API.compat,
- :arg_count => 0 => 2,
- :completions => :complete_installed_packages_and_compat,
- :description => "edit compat entries in the current Project and re-resolve",
- :help => md"""
- compat [pkg] [compat_string]
-
-Edit project [compat] entries directly, or via an interactive menu by not specifying any arguments.
-When directly editing use tab to complete the package name and any existing compat entry.
-Specifying a package with a blank compat entry will remove the entry.
-After changing compat entries a `resolve` will be attempted to check whether the current
-environment is compliant with the new compat rules.
-""",
-],
-PSA[:name => "gc",
- :api => API.gc,
- :option_spec => [
- PSA[:name => "all", :api => :collect_delay => Hour(0)],
- PSA[:name => "verbose", :short_name => "v", :api => :verbose => true],
- ],
- :description => "garbage collect packages not used for a significant time",
- :help => md"""
- gc [-v|--verbose] [--all]
-
-Free disk space by garbage collecting packages not used for a significant time.
-The `--all` option will garbage collect all packages which can not be immediately
-reached from any existing project.
-Use verbose mode for detailed output.
-""",
-],
-PSA[:name => "undo",
- :api => API.undo,
- :description => "undo the latest change to the active project",
- :help => md"""
- undo
-
-Undoes the latest change to the active project.
-""",
-],
-PSA[:name => "redo",
- :api => API.redo,
- :description => "redo the latest change to the active project",
- :help => md"""
- redo
-
-Redoes the changes from the latest [`undo`](@ref).
-""",
-],
-], #package
-"registry" => CommandDeclaration[
-PSA[:name => "add",
- :api => Registry.add,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => ((x,y) -> parse_registry(x,y; add = true)),
- :description => "add package registries",
- :help => md"""
- registry add reg...
-
-Add package registries `reg...` to the user depot. Without arguments
-it adds known registries, i.e. the General registry and registries
-served by the configured package server.
-
-**Examples**
-```
-pkg> registry add General
-pkg> registry add https://www.my-custom-registry.com
-pkg> registry add
-```
-""",
-],
-PSA[:name => "remove",
- :short_name => "rm",
- :api => Registry.rm,
- :should_splat => false,
- :arg_count => 1 => Inf,
- :arg_parser => parse_registry,
- :description => "remove package registries",
- :help => md"""
- registry [rm|remove] reg...
-
-Remove package registries `reg...`.
-
-**Examples**
-```
-pkg> registry [rm|remove] General
-```
-""",
-],
-PSA[:name => "update",
- :short_name => "up",
- :api => Registry.update,
- :should_splat => false,
- :arg_count => 0 => Inf,
- :arg_parser => parse_registry,
- :description => "update package registries",
- :help => md"""
- registry [up|update]
- registry [up|update] reg...
-
-Update package registries `reg...`. If no registries are specified
-all registries will be updated.
-
-**Examples**
-```
-pkg> registry up
-pkg> registry up General
-```
-""",
-],
-PSA[:name => "status",
- :short_name => "st",
- :api => Registry.status,
- :description => "information about installed registries",
- :help => md"""
- registry [st|status]
-
-Display information about installed registries.
-
-**Examples**
-```
-pkg> registry status
-```
-""",
-]
-], #registry
] #command_declarations
diff --git a/src/Registry/Registry.jl b/src/Registry/Registry.jl
index d5e938baa1..b9319d5347 100644
--- a/src/Registry/Registry.jl
+++ b/src/Registry/Registry.jl
@@ -1,8 +1,8 @@
module Registry
import ..Pkg
-using ..Pkg: depots1, printpkgstyle, stderr_f, isdir_nothrow, pathrepr, pkg_server,
- GitTools
+using ..Pkg: depots, depots1, printpkgstyle, stderr_f, isdir_nothrow, pathrepr, pkg_server,
+ GitTools, atomic_toml_write
using ..Pkg.PlatformEngines: download_verify_unpack, download, download_verify, exe7z, verify_archive_tree_hash
using UUIDs, LibGit2, TOML, Dates
import FileWatching
@@ -12,17 +12,19 @@ public add, rm, status, update
include("registry_instance.jl")
mutable struct RegistrySpec
- name::Union{String,Nothing}
- uuid::Union{UUID,Nothing}
- url::Union{String,Nothing}
+ name::Union{String, Nothing}
+ uuid::Union{UUID, Nothing}
+ url::Union{String, Nothing}
# the path field can be a local source when adding a registry
# otherwise it is the path where the registry is installed
- path::Union{String,Nothing}
- linked::Union{Bool,Nothing}
+ path::Union{String, Nothing}
+ linked::Union{Bool, Nothing}
end
RegistrySpec(name::String) = RegistrySpec(name = name)
-RegistrySpec(;name::Union{String,Nothing}=nothing, uuid::Union{String,UUID,Nothing}=nothing,
-url::Union{String,Nothing}=nothing, path::Union{String,Nothing}=nothing, linked::Union{Bool,Nothing}=nothing) =
+RegistrySpec(;
+ name::Union{String, Nothing} = nothing, uuid::Union{String, UUID, Nothing} = nothing,
+ url::Union{String, Nothing} = nothing, path::Union{String, Nothing} = nothing, linked::Union{Bool, Nothing} = nothing
+) =
RegistrySpec(name, isa(uuid, String) ? UUID(uuid) : uuid, url, path, linked)
"""
@@ -39,27 +41,31 @@ Pkg.Registry.add(uuid = "23338594-aafe-5451-b93e-139f81909106")
Pkg.Registry.add(url = "https://github.com/JuliaRegistries/General.git")
```
"""
-add(reg::Union{String,RegistrySpec}; kwargs...) = add([reg]; kwargs...)
+add(reg::Union{String, RegistrySpec}; kwargs...) = add([reg]; kwargs...)
add(regs::Vector{String}; kwargs...) = add(RegistrySpec[RegistrySpec(name = name) for name in regs]; kwargs...)
-function add(; name=nothing, uuid=nothing, url=nothing, path=nothing, linked=nothing, kwargs...)
- if all(isnothing, (name, uuid, url, path, linked))
+function add(; name = nothing, uuid = nothing, url = nothing, path = nothing, linked = nothing, kwargs...)
+ return if all(isnothing, (name, uuid, url, path, linked))
add(RegistrySpec[]; kwargs...)
else
add([RegistrySpec(; name, uuid, url, path, linked)]; kwargs...)
end
end
-function add(regs::Vector{RegistrySpec}; io::IO=stderr_f(), depot=depots1())
- if isempty(regs)
- download_default_registries(io, only_if_empty = false; depot)
+function add(regs::Vector{RegistrySpec}; io::IO = stderr_f(), depots::Union{String, Vector{String}} = depots())
+ return if isempty(regs)
+ download_default_registries(io, only_if_empty = false; depots = depots)
else
- download_registries(io, regs, depot)
+ download_registries(io, regs, depots)
end
end
const DEFAULT_REGISTRIES =
- RegistrySpec[RegistrySpec(name = "General",
- uuid = UUID("23338594-aafe-5451-b93e-139f81909106"),
- url = "https://github.com/JuliaRegistries/General.git")]
+ RegistrySpec[
+ RegistrySpec(
+ name = "General",
+ uuid = UUID("23338594-aafe-5451-b93e-139f81909106"),
+ url = "https://github.com/JuliaRegistries/General.git"
+ ),
+]
function pkg_server_registry_info()
registry_info = Dict{UUID, Base.SHA1}()
@@ -69,12 +75,12 @@ function pkg_server_registry_info()
download_ok = false
try
f = retry(delays = fill(1.0, 3)) do
- download("$server/registries", tmp_path, verbose=false)
+ download("$server/registries", tmp_path, verbose = false)
end
f()
download_ok = true
catch err
- @warn "could not download $server/registries" exception=err
+ @warn "could not download $server/registries" exception = err
end
download_ok || return nothing
open(tmp_path) do io
@@ -86,7 +92,7 @@ function pkg_server_registry_info()
end
end
end
- Base.rm(tmp_path, force=true)
+ Base.rm(tmp_path, force = true)
return server, registry_info
end
@@ -103,12 +109,36 @@ end
pkg_server_url_hash(url::String) = Base.SHA1(split(url, '/')[end])
-function download_default_registries(io::IO; only_if_empty::Bool = true, depot=depots1())
- installed_registries = reachable_registries()
+"""
+ is_pkg_in_pkgserver_registry(pkg_uuid::Base.UUID, server_registry_info, registries)
+
+Check if a package UUID is tracked by the PkgServer by verifying it exists in
+a registry that is known to the PkgServer.
+"""
+function is_pkg_in_pkgserver_registry(pkg_uuid::Base.UUID, server_registry_info, registries)
+ server_registry_info === nothing && return false
+ registries === nothing && return false
+
+ server, registry_info = server_registry_info
+ for reg in registries
+ if reg.uuid in keys(registry_info)
+ if haskey(reg, pkg_uuid)
+ return true
+ end
+ end
+ end
+ return false
+end
+
+function download_default_registries(io::IO; only_if_empty::Bool = true, depots::Union{String, Vector{String}} = depots())
+ # Check the specified depots for installed registries
+ installed_registries = reachable_registries(; depots)
# Only clone if there are no installed registries, unless called
# with false keyword argument.
if isempty(installed_registries) || !only_if_empty
- printpkgstyle(io, :Installing, "known registries into $(pathrepr(depot))")
+ # Install to the first depot in the list
+ target_depot = depots1(depots)
+ printpkgstyle(io, :Installing, "known registries into $(pathrepr(target_depot))")
registries = copy(DEFAULT_REGISTRIES)
for uuid in keys(pkg_server_registry_urls())
if !(uuid in (reg.uuid for reg in registries))
@@ -116,7 +146,7 @@ function download_default_registries(io::IO; only_if_empty::Bool = true, depot=d
end
end
filter!(reg -> !(reg.uuid in installed_registries), registries)
- download_registries(io, registries, depot)
+ download_registries(io, registries, depots)
return true
end
return false
@@ -135,7 +165,9 @@ function populate_known_registries_with_urls!(registries::Vector{RegistrySpec})
elseif reg.name !== nothing
if reg.name == known.name
named_regs = filter(r -> r.name == reg.name, known_registries)
- if !all(r -> r.uuid == first(named_regs).uuid, named_regs)
+ if isempty(named_regs)
+ Pkg.Types.pkgerror("registry with name `$(reg.name)` not found in known registries.")
+ elseif !all(r -> r.uuid == first(named_regs).uuid, named_regs)
Pkg.Types.pkgerror("multiple registries with name `$(reg.name)`, please specify with uuid.")
end
reg.uuid = known.uuid
@@ -145,10 +177,11 @@ function populate_known_registries_with_urls!(registries::Vector{RegistrySpec})
end
end
end
+ return
end
function registry_use_pkg_server()
- get(ENV, "JULIA_PKG_SERVER", nothing) !== ""
+ return get(ENV, "JULIA_PKG_SERVER", nothing) !== ""
end
registry_read_from_tarball() =
@@ -158,125 +191,147 @@ function check_registry_state(reg)
reg_currently_uses_pkg_server = reg.tree_info !== nothing
reg_should_use_pkg_server = registry_use_pkg_server()
if reg_currently_uses_pkg_server && !reg_should_use_pkg_server
+ pkg_cmd = Pkg.in_repl_mode() ? "pkg> registry rm $(reg.name); registry add $(reg.name)" : "using Pkg; Pkg.Registry.rm(\"$(reg.name)\"); Pkg.Registry.add(\"$(reg.name)\")"
msg = string(
"Your registry may be outdated. We recommend that you run the ",
"following command: ",
- "using Pkg; Pkg.Registry.rm(\"$(reg.name)\"); Pkg.Registry.add(\"$(reg.name)\")",
+ pkg_cmd,
)
@warn(msg)
end
return nothing
end
-function download_registries(io::IO, regs::Vector{RegistrySpec}, depot::String=depots1())
+function download_registries(io::IO, regs::Vector{RegistrySpec}, depots::Union{String, Vector{String}} = depots())
+ # Use the first depot as the target
+ target_depot = depots1(depots)
populate_known_registries_with_urls!(regs)
- regdir = joinpath(depot, "registries")
+ registry_update_log = get_registry_update_log()
+ regdir = joinpath(target_depot, "registries")
isdir(regdir) || mkpath(regdir)
# only allow one julia process to download and install registries at a time
FileWatching.mkpidlock(joinpath(regdir, ".pid"), stale_age = 10) do
- registry_urls = pkg_server_registry_urls()
- for reg in regs
- if reg.path !== nothing && reg.url !== nothing
- Pkg.Types.pkgerror("""
- ambiguous registry specification; both `url` and `path` are set:
- url=\"$(reg.url)\"
- path=\"$(reg.path)\"
- """
- )
- end
- url = get(registry_urls, reg.uuid, nothing)
- if url !== nothing && registry_read_from_tarball()
- tmp = tempname()
- try
- download_verify(url, nothing, tmp)
- catch err
- Pkg.Types.pkgerror("could not download $url \nException: $(sprint(showerror, err))")
- end
- _hash = pkg_server_url_hash(url)
- if !verify_archive_tree_hash(tmp, _hash)
- Pkg.Types.pkgerror("unable to verify download from $url")
- end
- if reg.name === nothing
- # Need to look up the registry name here
- reg_unc = uncompress_registry(tmp)
- reg.name = TOML.parse(reg_unc["Registry.toml"])["name"]::String
- end
- mv(tmp, joinpath(regdir, reg.name * ".tar.gz"); force=true)
- reg_info = Dict("uuid" => string(reg.uuid), "git-tree-sha1" => string(_hash), "path" => reg.name * ".tar.gz")
- open(joinpath(regdir, reg.name * ".toml"), "w") do io
- TOML.print(io, reg_info)
+ # once we're pidlocked check if another process has installed any of the registries
+ reachable_uuids = map(r -> r.uuid, reachable_registries(; depots))
+ filter!(r -> !in(r.uuid, reachable_uuids), regs)
+
+ registry_urls = pkg_server_registry_urls()
+ for reg in regs
+ if reg.path !== nothing && reg.url !== nothing
+ Pkg.Types.pkgerror(
+ """
+ ambiguous registry specification; both `url` and `path` are set:
+ url=\"$(reg.url)\"
+ path=\"$(reg.path)\"
+ """
+ )
end
- printpkgstyle(io, :Added, "`$(reg.name)` registry to $(Base.contractuser(regdir))")
- else
- mktempdir() do tmp
- if reg.path !== nothing && reg.linked == true # symlink to local source
- registry = Registry.RegistryInstance(reg.path)
- regpath = joinpath(regdir, registry.name)
- printpkgstyle(io, :Symlinking, "registry from `$(Base.contractuser(reg.path))`")
- isdir(dirname(regpath)) || mkpath(dirname(regpath))
- symlink(reg.path, regpath)
- isfile(joinpath(regpath, "Registry.toml")) || Pkg.Types.pkgerror("no `Registry.toml` file in linked registry.")
- registry = Registry.RegistryInstance(regpath)
- printpkgstyle(io, :Symlinked, "registry `$(Base.contractuser(registry.name))` to `$(Base.contractuser(regpath))`")
- return
- elseif reg.url !== nothing && reg.linked == true
- Pkg.Types.pkgerror("""
- A symlinked registry was requested but `path` was not set and `url` was set to `$url`.
- Set only `path` and `linked = true` to use registry symlinking.
- """)
- elseif url !== nothing && registry_use_pkg_server()
- # download from Pkg server
- try
- download_verify_unpack(url, nothing, tmp, ignore_existence = true, io = io)
- catch err
- Pkg.Types.pkgerror("could not download $url \nException: $(sprint(showerror, err))")
- end
- tree_info_file = joinpath(tmp, ".tree_info.toml")
- hash = pkg_server_url_hash(url)
- write(tree_info_file, "git-tree-sha1 = " * repr(string(hash)))
- elseif reg.path !== nothing # copy from local source
- printpkgstyle(io, :Copying, "registry from `$(Base.contractuser(reg.path))`")
- isfile(joinpath(reg.path, "Registry.toml")) || Pkg.Types.pkgerror("no `Registry.toml` file in source directory.")
- registry = Registry.RegistryInstance(reg.path)
- regpath = joinpath(regdir, registry.name)
- cp(reg.path, regpath; force=true) # has to be cp given we're copying
- printpkgstyle(io, :Copied, "registry `$(Base.contractuser(registry.name))` to `$(Base.contractuser(regpath))`")
- return
- elseif reg.url !== nothing # clone from url
- # retry to help spurious connection issues, particularly on CI
- repo = retry(GitTools.clone, delays = fill(1.0, 5), check=(s,e)->isa(e, LibGit2.GitError))(io, reg.url, tmp; header = "registry from $(repr(reg.url))")
- LibGit2.close(repo)
- else
- Pkg.Types.pkgerror("no path or url specified for registry")
+ url = get(registry_urls, reg.uuid, nothing)
+ if url !== nothing && registry_read_from_tarball()
+ tmp = tempname()
+ try
+ download_verify(url, nothing, tmp)
+ catch err
+ Pkg.Types.pkgerror("could not download $url \nException: $(sprint(showerror, err))")
+ end
+ _hash = pkg_server_url_hash(url)
+ if !verify_archive_tree_hash(tmp, _hash)
+ Pkg.Types.pkgerror("unable to verify download from $url")
end
- # verify that the clone looks like a registry
- if !isfile(joinpath(tmp, "Registry.toml"))
- Pkg.Types.pkgerror("no `Registry.toml` file in cloned registry.")
+ if reg.name === nothing
+ # Need to look up the registry name here
+ reg_unc = uncompress_registry(tmp)
+ reg.name = TOML.parse(reg_unc["Registry.toml"])["name"]::String
end
- registry = Registry.RegistryInstance(tmp)
- regpath = joinpath(regdir, registry.name)
- # copy to `depot`
- ispath(dirname(regpath)) || mkpath(dirname(regpath))
- if isfile(joinpath(regpath, "Registry.toml"))
- existing_registry = Registry.RegistryInstance(regpath)
- if registry.uuid == existing_registry.uuid
- println(io,
- "Registry `$(registry.name)` already exists in `$(Base.contractuser(regpath))`.")
+ mv(tmp, joinpath(regdir, reg.name * ".tar.gz"); force = true)
+ reg_info = Dict("uuid" => string(reg.uuid), "git-tree-sha1" => string(_hash), "path" => reg.name * ".tar.gz")
+ atomic_toml_write(joinpath(regdir, reg.name * ".toml"), reg_info)
+ registry_update_log[string(reg.uuid)] = now()
+ printpkgstyle(io, :Added, "`$(reg.name)` registry to $(Base.contractuser(regdir))")
+ else
+ mktempdir() do tmp
+ if reg.path !== nothing && reg.linked == true # symlink to local source
+ registry = Registry.RegistryInstance(reg.path)
+ regpath = joinpath(regdir, registry.name)
+ printpkgstyle(io, :Symlinking, "registry from `$(Base.contractuser(reg.path))`")
+ isdir(dirname(regpath)) || mkpath(dirname(regpath))
+ symlink(reg.path, regpath)
+ isfile(joinpath(regpath, "Registry.toml")) || Pkg.Types.pkgerror("no `Registry.toml` file in linked registry.")
+ registry = Registry.RegistryInstance(regpath)
+ printpkgstyle(io, :Symlinked, "registry `$(Base.contractuser(registry.name))` to `$(Base.contractuser(regpath))`")
+ registry_update_log[string(reg.uuid)] = now()
+ save_registry_update_log(registry_update_log)
+ return
+ elseif reg.url !== nothing && reg.linked == true
+ Pkg.Types.pkgerror(
+ """
+ A symlinked registry was requested but `path` was not set and `url` was set to `$url`.
+ Set only `path` and `linked = true` to use registry symlinking.
+ """
+ )
+ elseif url !== nothing && registry_use_pkg_server()
+ # download from Pkg server
+ try
+ download_verify_unpack(url, nothing, tmp, ignore_existence = true, io = io)
+ catch err
+ Pkg.Types.pkgerror("could not download $url \nException: $(sprint(showerror, err))")
+ end
+ tree_info_file = joinpath(tmp, ".tree_info.toml")
+ hash = pkg_server_url_hash(url)
+ write(tree_info_file, "git-tree-sha1 = " * repr(string(hash)))
+ elseif reg.path !== nothing # copy from local source
+ printpkgstyle(io, :Copying, "registry from `$(Base.contractuser(reg.path))`")
+ isfile(joinpath(reg.path, "Registry.toml")) || Pkg.Types.pkgerror("no `Registry.toml` file in source directory.")
+ registry = Registry.RegistryInstance(reg.path)
+ regpath = joinpath(regdir, registry.name)
+ cp(reg.path, regpath; force = true) # has to be cp given we're copying
+ printpkgstyle(io, :Copied, "registry `$(Base.contractuser(registry.name))` to `$(Base.contractuser(regpath))`")
+ registry_update_log[string(reg.uuid)] = now()
+ save_registry_update_log(registry_update_log)
+ return
+ elseif reg.url !== nothing # clone from url
+ # retry to help spurious connection issues, particularly on CI
+ repo = retry(GitTools.clone, delays = fill(1.0, 5), check = (s, e) -> isa(e, LibGit2.GitError))(io, reg.url, tmp; header = "registry from $(repr(reg.url))")
+ LibGit2.close(repo)
else
- throw(Pkg.Types.PkgError("registry `$(registry.name)=\"$(registry.uuid)\"` conflicts with " *
- "existing registry `$(existing_registry.name)=\"$(existing_registry.uuid)\"`. " *
- "To install it you can clone it manually into e.g. " *
- "`$(Base.contractuser(joinpath(regdir, registry.name*"-2")))`."))
+ Pkg.Types.pkgerror("no path or url specified for registry")
+ end
+ # verify that the clone looks like a registry
+ if !isfile(joinpath(tmp, "Registry.toml"))
+ Pkg.Types.pkgerror("no `Registry.toml` file in cloned registry.")
+ end
+ registry = Registry.RegistryInstance(tmp)
+ regpath = joinpath(regdir, registry.name)
+ # copy to `depot`
+ ispath(dirname(regpath)) || mkpath(dirname(regpath))
+ if isfile(joinpath(regpath, "Registry.toml"))
+ existing_registry = Registry.RegistryInstance(regpath)
+ if registry.uuid == existing_registry.uuid
+ println(
+ io,
+ "Registry `$(registry.name)` already exists in `$(Base.contractuser(regpath))`."
+ )
+ else
+ throw(
+ Pkg.Types.PkgError(
+ "registry `$(registry.name)=\"$(registry.uuid)\"` conflicts with " *
+ "existing registry `$(existing_registry.name)=\"$(existing_registry.uuid)\"`. " *
+ "To install it you can clone it manually into e.g. " *
+ "`$(Base.contractuser(joinpath(regdir, registry.name * "-2")))`."
+ )
+ )
+ end
+ elseif (url !== nothing && registry_use_pkg_server()) || reg.linked !== true
+ # if the dir doesn't exist, or exists but doesn't contain a Registry.toml
+ mv(tmp, regpath, force = true)
+ registry_update_log[string(reg.uuid)] = now()
+ printpkgstyle(io, :Added, "registry `$(registry.name)` to `$(Base.contractuser(regpath))`")
end
- elseif (url !== nothing && registry_use_pkg_server()) || reg.linked !== true
- # if the dir doesn't exist, or exists but doesn't contain a Registry.toml
- mv(tmp, regpath, force=true)
- printpkgstyle(io, :Added, "registry `$(registry.name)` to `$(Base.contractuser(regpath))`")
end
end
end
- end
end # mkpidlock
+ save_registry_update_log(registry_update_log)
return nothing
end
@@ -292,29 +347,31 @@ Pkg.Registry.rm("General")
Pkg.Registry.rm(uuid = "23338594-aafe-5451-b93e-139f81909106")
```
"""
-rm(reg::Union{String,RegistrySpec}; kwargs...) = rm([reg]; kwargs...)
+rm(reg::Union{String, RegistrySpec}; kwargs...) = rm([reg]; kwargs...)
rm(regs::Vector{String}; kwargs...) = rm([RegistrySpec(name = name) for name in regs]; kwargs...)
-function rm(; name=nothing, uuid=nothing, url=nothing, path=nothing, linked=nothing, kwargs...)
- rm([RegistrySpec(; name, uuid, url, path, linked)]; kwargs...)
+function rm(; name = nothing, uuid = nothing, url = nothing, path = nothing, linked = nothing, kwargs...)
+ return rm([RegistrySpec(; name, uuid, url, path, linked)]; kwargs...)
end
-function rm(regs::Vector{RegistrySpec}; io::IO=stderr_f())
- for registry in find_installed_registries(io, regs; depots=first(Base.DEPOT_PATH))
+function rm(regs::Vector{RegistrySpec}; io::IO = stderr_f())
+ for registry in find_installed_registries(io, regs; depots = first(Base.DEPOT_PATH))
printpkgstyle(io, :Removing, "registry `$(registry.name)` from $(Base.contractuser(registry.path))")
if isfile(registry.path)
d = TOML.parsefile(registry.path)
if haskey(d, "path")
- Base.rm(joinpath(dirname(registry.path), d["path"]); force=true)
+ Base.rm(joinpath(dirname(registry.path), d["path"]); force = true)
end
end
- Base.rm(registry.path; force=true, recursive=true)
+ Base.rm(registry.path; force = true, recursive = true)
end
return nothing
end
# Search for the input registries among installed ones
-function find_installed_registries(io::IO,
- needles::Union{Vector{Registry.RegistryInstance}, Vector{RegistrySpec}};
- depots=Base.DEPOT_PATH)
+function find_installed_registries(
+ io::IO,
+ needles::Union{Vector{Registry.RegistryInstance}, Vector{RegistrySpec}};
+ depots = Base.DEPOT_PATH
+ )
haystack = reachable_registries(; depots)
output = Registry.RegistryInstance[]
for needle in needles
@@ -331,7 +388,9 @@ function find_installed_registries(io::IO,
elseif needle.name !== nothing
if needle.name == candidate.name
named_regs = filter(r -> r.name == needle.name, haystack)
- if !all(r -> r.uuid == first(named_regs).uuid, named_regs)
+ if isempty(named_regs)
+ Pkg.Types.pkgerror("registry with name `$(needle.name)` not found in reachable registries.")
+ elseif !all(r -> r.uuid == first(named_regs).uuid, named_regs)
Pkg.Types.pkgerror("multiple registries with name `$(needle.name)`, please specify with uuid.")
end
push!(output, candidate)
@@ -340,9 +399,13 @@ function find_installed_registries(io::IO,
end
end
if !found
- println(io, "registry `$(needle.name === nothing ? needle.uuid :
- needle.uuid === nothing ? needle.name :
- "$(needle.name)=$(needle.uuid)")` not found.")
+ println(
+ io, "registry `$(
+ needle.name === nothing ? needle.uuid :
+ needle.uuid === nothing ? needle.name :
+ "$(needle.name)=$(needle.uuid)"
+ )` not found."
+ )
end
end
return output
@@ -359,9 +422,7 @@ function save_registry_update_log(d::Dict)
pkg_scratch_space = joinpath(DEPOT_PATH[1], "scratchspaces", "44cfe95a-1eb2-52ea-b672-e2afdf69b78f")
mkpath(pkg_scratch_space)
pkg_reg_updated_file = joinpath(pkg_scratch_space, "registry_updates.toml")
- open(pkg_reg_updated_file, "w") do io
- TOML.print(io, d)
- end
+ return atomic_toml_write(pkg_reg_updated_file, d)
end
"""
@@ -379,172 +440,177 @@ Pkg.Registry.update("General")
Pkg.Registry.update(uuid = "23338594-aafe-5451-b93e-139f81909106")
```
"""
-update(reg::Union{String,RegistrySpec}; kwargs...) = update([reg]; kwargs...)
+update(reg::Union{String, RegistrySpec}; kwargs...) = update([reg]; kwargs...)
update(regs::Vector{String}; kwargs...) = update([RegistrySpec(name = name) for name in regs]; kwargs...)
-function update(; name=nothing, uuid=nothing, url=nothing, path=nothing, linked=nothing, kwargs...)
- if all(isnothing, (name, uuid, url, path, linked))
+function update(; name = nothing, uuid = nothing, url = nothing, path = nothing, linked = nothing, kwargs...)
+ return if all(isnothing, (name, uuid, url, path, linked))
update(RegistrySpec[]; kwargs...)
else
update([RegistrySpec(; name, uuid, url, path, linked)]; kwargs...)
end
end
-function update(regs::Vector{RegistrySpec}; io::IO=stderr_f(), force::Bool=true, depots = [depots1()], update_cooldown = Second(1))
+function update(regs::Vector{RegistrySpec}; io::IO = stderr_f(), force::Bool = true, depots = [depots1()], update_cooldown = Second(1))
registry_update_log = get_registry_update_log()
for depot in depots
- depot_regs = isempty(regs) ? reachable_registries(; depots=depot) : regs
+ depot_regs = isempty(regs) ? reachable_registries(; depots = depot) : regs
regdir = joinpath(depot, "registries")
isdir(regdir) || mkpath(regdir)
# only allow one julia process to update registries in this depot at a time
FileWatching.mkpidlock(joinpath(regdir, ".pid"), stale_age = 10) do
- errors = Tuple{String, String}[]
- registry_urls = pkg_server_registry_urls()
- for reg in unique(r -> r.uuid, find_installed_registries(io, depot_regs; depots=[depot]); seen=Set{UUID}())
- prev_update = get(registry_update_log, string(reg.uuid), nothing)::Union{Nothing, DateTime}
- if prev_update !== nothing
- diff = now() - prev_update
- if diff < update_cooldown
- @debug "Skipping updating registry $(reg.name) since it is on cooldown: $(Dates.canonicalize(Millisecond(update_cooldown) - diff)) left"
- continue
+ errors = Tuple{String, String}[]
+ registry_urls = pkg_server_registry_urls()
+ for reg in unique(r -> r.uuid, find_installed_registries(io, depot_regs; depots = [depot]); seen = Set{UUID}())
+ prev_update = get(registry_update_log, string(reg.uuid), nothing)::Union{Nothing, DateTime}
+ if prev_update !== nothing
+ diff = now() - prev_update
+ if diff < update_cooldown
+ @debug "Skipping updating registry $(reg.name) since it is on cooldown: $(Dates.canonicalize(Millisecond(update_cooldown) - diff)) left"
+ continue
+ end
end
- end
- let reg=reg, errors=errors
- regpath = pathrepr(reg.path)
- let regpath=regpath
- if reg.tree_info !== nothing
- printpkgstyle(io, :Updating, "registry at " * regpath)
- old_hash = reg.tree_info
- url = get(registry_urls, reg.uuid, nothing)
- if url !== nothing
- check_registry_state(reg)
- end
- if url !== nothing && (new_hash = pkg_server_url_hash(url)) != old_hash
- # TODO: update faster by using a diff, if available
- # TODO: DRY with the code in `download_default_registries`
- let new_hash = new_hash, url = url
- if registry_read_from_tarball()
- tmp = tempname()
- try
- download_verify(url, nothing, tmp)
- catch err
- push!(errors, (reg.path, "failed to download from $(url). Exception: $(sprint(showerror, err))"))
- @goto done_tarball_read
- end
- hash = pkg_server_url_hash(url)
- if !verify_archive_tree_hash(tmp, hash)
- push!(errors, (reg.path, "failed to verify download from $(url)"))
- @goto done_tarball_read
- end
- # If we have an uncompressed Pkg server registry, remove it and get the compressed version
- if isdir(reg.path)
- Base.rm(reg.path; recursive=true, force=true)
- end
- registry_path = dirname(reg.path)
- mv(tmp, joinpath(registry_path, reg.name * ".tar.gz"); force=true)
- reg_info = Dict("uuid" => string(reg.uuid), "git-tree-sha1" => string(hash), "path" => reg.name * ".tar.gz")
- open(joinpath(registry_path, reg.name * ".toml"), "w") do io
- TOML.print(io, reg_info)
- end
- registry_update_log[string(reg.uuid)] = now()
- @label done_tarball_read
- else
- if reg.name == "General" && Base.get_bool_env("JULIA_PKG_GEN_REG_FMT_CHECK", true)
- @info """
+ let reg = reg, errors = errors
+ regpath = pathrepr(reg.path)
+ let regpath = regpath
+ if reg.tree_info !== nothing
+ printpkgstyle(io, :Updating, "registry at " * regpath)
+ old_hash = reg.tree_info
+ url = get(registry_urls, reg.uuid, nothing)
+ if url !== nothing
+ check_registry_state(reg)
+ end
+ if url !== nothing && (new_hash = pkg_server_url_hash(url)) != old_hash
+ # TODO: update faster by using a diff, if available
+ # TODO: DRY with the code in `download_default_registries`
+ let new_hash = new_hash, url = url
+ if registry_read_from_tarball()
+ tmp = tempname()
+ try
+ download_verify(url, nothing, tmp)
+ catch err
+ push!(errors, (reg.path, "failed to download from $(url). Exception: $(sprint(showerror, err))"))
+ @goto done_tarball_read
+ end
+ hash = pkg_server_url_hash(url)
+ if !verify_archive_tree_hash(tmp, hash)
+ push!(errors, (reg.path, "failed to verify download from $(url)"))
+ @goto done_tarball_read
+ end
+ # If we have an uncompressed Pkg server registry, remove it and get the compressed version
+ if isdir(reg.path)
+ Base.rm(reg.path; recursive = true, force = true)
+ end
+ registry_path = dirname(reg.path)
+ mv(tmp, joinpath(registry_path, reg.name * ".tar.gz"); force = true)
+ reg_info = Dict("uuid" => string(reg.uuid), "git-tree-sha1" => string(hash), "path" => reg.name * ".tar.gz")
+ atomic_toml_write(joinpath(registry_path, reg.name * ".toml"), reg_info)
+ registry_update_log[string(reg.uuid)] = now()
+ @label done_tarball_read
+ else
+ if reg.name == "General" &&
+ Base.get_bool_env("JULIA_PKG_GEN_REG_FMT_CHECK", true) &&
+ get(ENV, "JULIA_PKG_SERVER", nothing) != ""
+ # warn if JULIA_PKG_SERVER is set to a non-empty string or not set
+ @info """
The General registry is installed via unpacked tarball.
Consider reinstalling it via the newer faster direct from
tarball format by running:
pkg> registry rm General; registry add General
- """ maxlog=1
- end
- mktempdir() do tmp
- try
- download_verify_unpack(url, nothing, tmp, ignore_existence = true, io=io)
- registry_update_log[string(reg.uuid)] = now()
- catch err
- push!(errors, (reg.path, "failed to download and unpack from $(url). Exception: $(sprint(showerror, err))"))
- @goto done_tarball_unpack
+ """ maxlog = 1
+ end
+ mktempdir() do tmp
+ try
+ download_verify_unpack(url, nothing, tmp, ignore_existence = true, io = io)
+ registry_update_log[string(reg.uuid)] = now()
+ catch err
+ push!(errors, (reg.path, "failed to download and unpack from $(url). Exception: $(sprint(showerror, err))"))
+ @goto done_tarball_unpack
+ end
+ tree_info_file = joinpath(tmp, ".tree_info.toml")
+ write(tree_info_file, "git-tree-sha1 = " * repr(string(new_hash)))
+ mv(tmp, reg.path, force = true)
+ @label done_tarball_unpack
end
- tree_info_file = joinpath(tmp, ".tree_info.toml")
- write(tree_info_file, "git-tree-sha1 = " * repr(string(new_hash)))
- mv(tmp, reg.path, force=true)
- @label done_tarball_unpack
end
end
end
- end
- elseif isdir(joinpath(reg.path, ".git"))
- printpkgstyle(io, :Updating, "registry at " * regpath)
- if reg.name == "General" && Base.get_bool_env("JULIA_PKG_GEN_REG_FMT_CHECK", true)
- @info """
+ elseif isdir(joinpath(reg.path, ".git"))
+ printpkgstyle(io, :Updating, "registry at " * regpath)
+ if reg.name == "General" &&
+ Base.get_bool_env("JULIA_PKG_GEN_REG_FMT_CHECK", true) &&
+ get(ENV, "JULIA_PKG_SERVER", nothing) != ""
+ # warn if JULIA_PKG_SERVER is set to a non-empty string or not set
+ @info """
The General registry is installed via git. Consider reinstalling it via
the newer faster direct from tarball format by running:
pkg> registry rm General; registry add General
- """ maxlog=1
- end
- LibGit2.with(LibGit2.GitRepo(reg.path)) do repo
- if LibGit2.isdirty(repo)
- push!(errors, (regpath, "registry dirty"))
- @goto done_git
+ """ maxlog = 1
end
- if !LibGit2.isattached(repo)
- push!(errors, (regpath, "registry detached"))
- @goto done_git
- end
- if !("origin" in LibGit2.remotes(repo))
- push!(errors, (regpath, "origin not in the list of remotes"))
- @goto done_git
- end
- branch = LibGit2.headname(repo)
- try
- GitTools.fetch(io, repo; refspecs=["+refs/heads/$branch:refs/remotes/origin/$branch"])
- catch e
- e isa Pkg.Types.PkgError || rethrow()
- push!(errors, (reg.path, "failed to fetch from repo: $(e.msg)"))
- @goto done_git
- end
- attempts = 0
- @label merge
- ff_succeeded = try
- LibGit2.merge!(repo; branch="refs/remotes/origin/$branch", fastforward=true)
- catch e
- attempts += 1
- if e isa LibGit2.GitError && e.code == LibGit2.Error.ELOCKED && attempts <= 3
- @warn "Registry update attempt failed because repository is locked. Resetting and retrying." e
- LibGit2.reset!(repo, LibGit2.head_oid(repo), LibGit2.Consts.RESET_HARD)
- sleep(1)
- @goto merge
- elseif e isa LibGit2.GitError && e.code == LibGit2.Error.ENOTFOUND
- push!(errors, (reg.path, "branch origin/$branch not found"))
+ LibGit2.with(LibGit2.GitRepo(reg.path)) do repo
+ if LibGit2.isdirty(repo)
+ push!(errors, (regpath, "registry dirty"))
@goto done_git
- else
- rethrow()
end
-
- end
-
- if !ff_succeeded
- try LibGit2.rebase!(repo, "origin/$branch")
+ if !LibGit2.isattached(repo)
+ push!(errors, (regpath, "registry detached"))
+ @goto done_git
+ end
+ if !("origin" in LibGit2.remotes(repo))
+ push!(errors, (regpath, "origin not in the list of remotes"))
+ @goto done_git
+ end
+ branch = LibGit2.headname(repo)
+ try
+ GitTools.fetch(io, repo; refspecs = ["+refs/heads/$branch:refs/remotes/origin/$branch"])
catch e
- e isa LibGit2.GitError || rethrow()
- push!(errors, (reg.path, "registry failed to rebase on origin/$branch"))
+ e isa Pkg.Types.PkgError || rethrow()
+ push!(errors, (reg.path, "failed to fetch from repo: $(e.msg)"))
@goto done_git
end
+ attempts = 0
+ @label merge
+ ff_succeeded = try
+ LibGit2.merge!(repo; branch = "refs/remotes/origin/$branch", fastforward = true)
+ catch e
+ attempts += 1
+ if e isa LibGit2.GitError && e.code == LibGit2.Error.ELOCKED && attempts <= 3
+ @warn "Registry update attempt failed because repository is locked. Resetting and retrying." e
+ LibGit2.reset!(repo, LibGit2.head_oid(repo), LibGit2.Consts.RESET_HARD)
+ sleep(1)
+ @goto merge
+ elseif e isa LibGit2.GitError && e.code == LibGit2.Error.ENOTFOUND
+ push!(errors, (reg.path, "branch origin/$branch not found"))
+ @goto done_git
+ else
+ rethrow()
+ end
+
+ end
+
+ if !ff_succeeded
+ try
+ LibGit2.rebase!(repo, "origin/$branch")
+ catch e
+ e isa LibGit2.GitError || rethrow()
+ push!(errors, (reg.path, "registry failed to rebase on origin/$branch"))
+ @goto done_git
+ end
+ end
+ registry_update_log[string(reg.uuid)] = now()
+ @label done_git
end
- registry_update_log[string(reg.uuid)] = now()
- @label done_git
end
end
end
end
- end
- if !isempty(errors)
- warn_str = "Some registries failed to update:"
- for (reg, err) in errors
- warn_str *= "\n — $reg — $err"
+ if !isempty(errors)
+ warn_str = "Some registries failed to update:"
+ for (reg, err) in errors
+ warn_str *= "\n — $reg — $err"
+ end
+ @error warn_str
end
- @error warn_str
- end
end # mkpidlock
end
save_registry_update_log(registry_update_log)
@@ -562,20 +628,93 @@ Display information about available registries.
Pkg.Registry.status()
```
"""
-function status(io::IO=stderr_f())
+function status(io::IO = stderr_f())
regs = reachable_registries()
- regs = unique(r -> r.uuid, regs; seen=Set{Union{UUID,Nothing}}())
+ regs = unique(r -> r.uuid, regs; seen = Set{Union{UUID, Nothing}}())
printpkgstyle(io, Symbol("Registry Status"), "")
- if isempty(regs)
+ return if isempty(regs)
println(io, " (no registries found)")
else
+ registry_update_log = get_registry_update_log()
+ server_registry_info = Pkg.OFFLINE_MODE[] ? nothing : pkg_server_registry_info()
+ flavor = get(ENV, "JULIA_PKG_SERVER_REGISTRY_PREFERENCE", "")
for reg in regs
printstyled(io, " [$(string(reg.uuid)[1:8])]"; color = :light_black)
print(io, " $(reg.name)")
reg.repo === nothing || print(io, " ($(reg.repo))")
println(io)
+
+ registry_type = get_registry_type(reg)
+ if registry_type == :git
+ print(io, " git registry")
+ elseif registry_type == :unpacked
+ print(io, " unpacked registry with hash $(reg.tree_info)")
+ elseif registry_type == :packed
+ print(io, " packed registry with hash $(reg.tree_info)")
+ elseif registry_type == :bare
+ # We could try to detect a symlink but this is too
+ # rarely used to be worth the complexity.
+ print(io, " bare registry")
+ else
+ print(io, " unknown registry format")
+ end
+ update_time = get(registry_update_log, string(reg.uuid), nothing)
+ if !isnothing(update_time)
+ time_string = Dates.format(update_time, dateformat"yyyy-mm-dd HH:MM:SS")
+ print(io, ", last updated $(time_string)")
+ end
+ println(io)
+
+ if registry_type != :git && !isnothing(server_registry_info)
+ server_url, registries = server_registry_info
+ if haskey(registries, reg.uuid)
+ print(io, " served by $(server_url)")
+ if flavor != ""
+ print(io, " ($flavor flavor)")
+ end
+ if registries[reg.uuid] != reg.tree_info
+ print(io, " - update available")
+ end
+ println(io)
+ end
+ end
end
end
end
+# The registry can be installed in a number of different ways, for
+# evolutionary reasons.
+#
+# 1. A tarball that is not unpacked. In this case Pkg handles the
+# registry in memory. The tarball is distributed by a package server.
+# This is the preferred option, in particular for the General
+# registry.
+#
+# 2. A tarball that is unpacked. This only differs from above by
+# having the files on disk instead of in memory. In both cases Pkg
+# keeps track of the tarball's tree hash to know if it can be updated.
+#
+# 3. A clone of a git repository. This is characterized by the
+# presence of a .git directory. All updating is handled with git.
+# This is not preferred for the General registry but may be the only
+# practical option for private registries.
+#
+# 4. A bare registry with only the registry files and no metadata.
+# This can be installed by adding or symlinking from a local path but
+# there is no way to update it from Pkg.
+#
+# It is also possible for a packed/unpacked registry to coexist on
+# disk with a git/bare registry, in which case a new Julia may use the
+# former and a sufficiently old Julia the latter.
+function get_registry_type(reg)
+ isnothing(reg.in_memory_registry) || return :packed
+ isnothing(reg.tree_info) || return :unpacked
+ isdir(joinpath(reg.path, ".git")) && return :git
+ isfile(joinpath(reg.path, "Registry.toml")) && return :bare
+ # Indicates either that the registry data is corrupt or that it
+ # has been handled by a future Julia version with non-backwards
+ # compatible conventions.
+ return :unknown
+end
+
end # module
diff --git a/src/Registry/registry_instance.jl b/src/Registry/registry_instance.jl
index c5743fed4f..b60e7a15d0 100644
--- a/src/Registry/registry_instance.jl
+++ b/src/Registry/registry_instance.jl
@@ -24,7 +24,7 @@ function parsefile(in_memory_registry::Union{Dict, Nothing}, folder::AbstractStr
return _parsefile(joinpath(folder, file))
else
content = in_memory_registry[to_tar_path_format(file)]
- parser = Base.TOML.Parser{Dates}(content; filepath=file)
+ parser = Base.TOML.Parser{Dates}(content; filepath = file)
return Base.TOML.parse(parser)
end
end
@@ -109,8 +109,8 @@ function initialize_uncompressed!(pkg::PkgInfo, versions = keys(pkg.version_info
sort!(versions)
- uncompressed_compat = uncompress(pkg.compat, versions)
- uncompressed_deps = uncompress(pkg.deps, versions)
+ uncompressed_compat = uncompress(pkg.compat, versions)
+ uncompressed_deps = uncompress(pkg.deps, versions)
for v in versions
vinfo = pkg.version_info[v]
@@ -137,7 +137,7 @@ function initialize_weak_uncompressed!(pkg::PkgInfo, versions = keys(pkg.version
sort!(versions)
weak_uncompressed_compat = uncompress(pkg.weak_compat, versions)
- weak_uncompressed_deps = uncompress(pkg.weak_deps, versions)
+ weak_uncompressed_deps = uncompress(pkg.weak_deps, versions)
for v in versions
vinfo = pkg.version_info[v]
@@ -175,81 +175,89 @@ mutable struct PkgEntry
const uuid::UUID
const in_memory_registry::Union{Dict{String, String}, Nothing}
+ # Lock for thread-safe lazy loading
+ const info_lock::ReentrantLock
# Version.toml / (Compat.toml / Deps.toml):
info::PkgInfo # lazily initialized
- PkgEntry(path, registry_path, name, uuid, in_memory_registry) = new(path, registry_path, name, uuid, in_memory_registry, #= undef =#)
+ PkgEntry(path, registry_path, name, uuid, in_memory_registry) = new(path, registry_path, name, uuid, in_memory_registry, ReentrantLock() #= undef =#)
end
registry_info(pkg::PkgEntry) = init_package_info!(pkg)
function init_package_info!(pkg::PkgEntry)
- # Already uncompressed the info for this package, return early
- isdefined(pkg, :info) && return pkg.info
- path = pkg.registry_path
-
- d_p = parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Package.toml"))
- name = d_p["name"]::String
- name != pkg.name && error("inconsistent name in Registry.toml ($(name)) and Package.toml ($(pkg.name)) for pkg at $(path)")
- repo = get(d_p, "repo", nothing)::Union{Nothing, String}
- subdir = get(d_p, "subdir", nothing)::Union{Nothing, String}
-
- # Versions.toml
- d_v = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Versions.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Versions.toml")) : Dict{String, Any}()
- version_info = Dict{VersionNumber, VersionInfo}(VersionNumber(k) =>
- VersionInfo(SHA1(v["git-tree-sha1"]::String), get(v, "yanked", false)::Bool) for (k, v) in d_v)
+ # Thread-safe lazy loading with double-check pattern
+ return @lock pkg.info_lock begin
+ # Double-check: if another thread loaded while we were waiting for the lock
+ isdefined(pkg, :info) && return pkg.info
+
+ path = pkg.registry_path
+
+ d_p = parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Package.toml"))
+ name = d_p["name"]::String
+ name != pkg.name && error("inconsistent name in Registry.toml ($(name)) and Package.toml ($(pkg.name)) for pkg at $(path)")
+ repo = get(d_p, "repo", nothing)::Union{Nothing, String}
+ subdir = get(d_p, "subdir", nothing)::Union{Nothing, String}
+
+ # Versions.toml
+ d_v = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Versions.toml")) ?
+ parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Versions.toml")) : Dict{String, Any}()
+ version_info = Dict{VersionNumber, VersionInfo}(
+ VersionNumber(k) =>
+ VersionInfo(SHA1(v["git-tree-sha1"]::String), get(v, "yanked", false)::Bool) for (k, v) in d_v
+ )
+
+ # Compat.toml
+ compat_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Compat.toml")) ?
+ parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Compat.toml")) : Dict{String, Any}()
+ compat = Dict{VersionRange, Dict{String, VersionSpec}}()
+ for (v, data) in compat_data_toml
+ data = data::Dict{String, Any}
+ vr = VersionRange(v)
+ d = Dict{String, VersionSpec}(dep => VersionSpec(vr_dep) for (dep, vr_dep::Union{String, Vector{String}}) in data)
+ compat[vr] = d
+ end
- # Compat.toml
- compat_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Compat.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Compat.toml")) : Dict{String, Any}()
- compat = Dict{VersionRange, Dict{String, VersionSpec}}()
- for (v, data) in compat_data_toml
- data = data::Dict{String, Any}
- vr = VersionRange(v)
- d = Dict{String, VersionSpec}(dep => VersionSpec(vr_dep) for (dep, vr_dep::Union{String, Vector{String}}) in data)
- compat[vr] = d
- end
+ # Deps.toml
+ deps_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Deps.toml")) ?
+ parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Deps.toml")) : Dict{String, Any}()
+ deps = Dict{VersionRange, Dict{String, UUID}}()
+ for (v, data) in deps_data_toml
+ data = data::Dict{String, Any}
+ vr = VersionRange(v)
+ d = Dict{String, UUID}(dep => UUID(uuid) for (dep, uuid::String) in data)
+ deps[vr] = d
+ end
+ # All packages depend on julia
+ deps[VersionRange()] = Dict("julia" => JULIA_UUID)
+
+ # WeakCompat.toml
+ weak_compat_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakCompat.toml")) ?
+ parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakCompat.toml")) : Dict{String, Any}()
+ weak_compat = Dict{VersionRange, Dict{String, VersionSpec}}()
+ for (v, data) in weak_compat_data_toml
+ data = data::Dict{String, Any}
+ vr = VersionRange(v)
+ d = Dict{String, VersionSpec}(dep => VersionSpec(vr_dep) for (dep, vr_dep::Union{String, Vector{String}}) in data)
+ weak_compat[vr] = d
+ end
- # Deps.toml
- deps_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Deps.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "Deps.toml")) : Dict{String, Any}()
- deps = Dict{VersionRange, Dict{String, UUID}}()
- for (v, data) in deps_data_toml
- data = data::Dict{String, Any}
- vr = VersionRange(v)
- d = Dict{String, UUID}(dep => UUID(uuid) for (dep, uuid::String) in data)
- deps[vr] = d
- end
- # All packages depend on julia
- deps[VersionRange()] = Dict("julia" => JULIA_UUID)
+ # WeakDeps.toml
+ weak_deps_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakDeps.toml")) ?
+ parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakDeps.toml")) : Dict{String, Any}()
+ weak_deps = Dict{VersionRange, Dict{String, UUID}}()
+ for (v, data) in weak_deps_data_toml
+ data = data::Dict{String, Any}
+ vr = VersionRange(v)
+ d = Dict{String, UUID}(dep => UUID(uuid) for (dep, uuid::String) in data)
+ weak_deps[vr] = d
+ end
- # WeakCompat.toml
- weak_compat_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakCompat.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakCompat.toml")) : Dict{String, Any}()
- weak_compat = Dict{VersionRange, Dict{String, VersionSpec}}()
- for (v, data) in weak_compat_data_toml
- data = data::Dict{String, Any}
- vr = VersionRange(v)
- d = Dict{String, VersionSpec}(dep => VersionSpec(vr_dep) for (dep, vr_dep::Union{String, Vector{String}}) in data)
- weak_compat[vr] = d
- end
+ @assert !isdefined(pkg, :info)
+ pkg.info = PkgInfo(repo, subdir, version_info, compat, deps, weak_compat, weak_deps)
- # WeakDeps.toml
- weak_deps_data_toml = custom_isfile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakDeps.toml")) ?
- parsefile(pkg.in_memory_registry, pkg.registry_path, joinpath(pkg.path, "WeakDeps.toml")) : Dict{String, Any}()
- weak_deps = Dict{VersionRange, Dict{String, UUID}}()
- for (v, data) in weak_deps_data_toml
- data = data::Dict{String, Any}
- vr = VersionRange(v)
- d = Dict{String, UUID}(dep => UUID(uuid) for (dep, uuid::String) in data)
- weak_deps[vr] = d
+ return pkg.info
end
-
- @assert !isdefined(pkg, :info)
- pkg.info = PkgInfo(repo, subdir, version_info, compat, deps, weak_compat, weak_deps)
-
- return pkg.info
end
@@ -261,9 +269,9 @@ function uncompress_registry(tar_gz::AbstractString)
buf = Vector{UInt8}(undef, Tar.DEFAULT_BUFFER_SIZE)
io = IOBuffer()
open(`$(exe7z()) x $tar_gz -so`) do tar
- Tar.read_tarball(x->true, tar; buf=buf) do hdr, _
+ Tar.read_tarball(x -> true, tar; buf = buf) do hdr, _
if hdr.type == :file
- Tar.read_data(tar, io; size=hdr.size, buf=buf)
+ Tar.read_data(tar, io; size = hdr.size, buf = buf)
data[hdr.path] = String(take!(io))
end
end
@@ -271,21 +279,81 @@ function uncompress_registry(tar_gz::AbstractString)
return data
end
-struct RegistryInstance
+mutable struct RegistryInstance
path::String
+ tree_info::Union{Base.SHA1, Nothing}
+ compressed_file::Union{String, Nothing}
+ const load_lock::ReentrantLock # Lock for thread-safe lazy loading
+
+ # Lazily loaded fields
name::String
uuid::UUID
repo::Union{String, Nothing}
description::Union{String, Nothing}
pkgs::Dict{UUID, PkgEntry}
- tree_info::Union{Base.SHA1, Nothing}
in_memory_registry::Union{Nothing, Dict{String, String}}
# various caches
name_to_uuids::Dict{String, Vector{UUID}}
+
+ # Inner constructor for lazy loading - leaves fields undefined
+ function RegistryInstance(path::String, tree_info::Union{Base.SHA1, Nothing}, compressed_file::Union{String, Nothing})
+ return new(path, tree_info, compressed_file, ReentrantLock())
+ end
+
+ # Full constructor for when all fields are known
+ function RegistryInstance(
+ path::String, tree_info::Union{Base.SHA1, Nothing}, compressed_file::Union{String, Nothing},
+ name::String, uuid::UUID, repo::Union{String, Nothing}, description::Union{String, Nothing},
+ pkgs::Dict{UUID, PkgEntry}, in_memory_registry::Union{Nothing, Dict{String, String}},
+ name_to_uuids::Dict{String, Vector{UUID}}
+ )
+ return new(path, tree_info, compressed_file, ReentrantLock(), name, uuid, repo, description, pkgs, in_memory_registry, name_to_uuids)
+ end
end
const REGISTRY_CACHE = Dict{String, Tuple{Base.SHA1, Bool, RegistryInstance}}()
+@noinline function _ensure_registry_loaded_slow!(r::RegistryInstance)
+ return @lock r.load_lock begin
+ # Double-check pattern: if another thread loaded while we were waiting for the lock
+ isdefined(r, :pkgs) && return r
+
+ if getfield(r, :compressed_file) !== nothing
+ r.in_memory_registry = uncompress_registry(joinpath(dirname(getfield(r, :path)), getfield(r, :compressed_file)))
+ else
+ r.in_memory_registry = nothing
+ end
+
+ d = parsefile(r.in_memory_registry, getfield(r, :path), "Registry.toml")
+ r.name = d["name"]::String
+ r.uuid = UUID(d["uuid"]::String)
+ r.repo = get(d, "repo", nothing)::Union{String, Nothing}
+ r.description = get(d, "description", nothing)::Union{String, Nothing}
+
+ r.pkgs = Dict{UUID, PkgEntry}()
+ for (uuid, info) in d["packages"]::Dict{String, Any}
+ uuid = UUID(uuid::String)
+ info::Dict{String, Any}
+ name = info["name"]::String
+ pkgpath = info["path"]::String
+ pkg = PkgEntry(pkgpath, getfield(r, :path), name, uuid, r.in_memory_registry)
+ r.pkgs[uuid] = pkg
+ end
+
+ r.name_to_uuids = Dict{String, Vector{UUID}}()
+
+ return r
+ end
+end
+
+# Property accessors that trigger lazy loading
+@inline function Base.getproperty(r::RegistryInstance, f::Symbol)
+ if f === :name || f === :uuid || f === :repo || f === :description || f === :pkgs || f === :name_to_uuids
+ _ensure_registry_loaded_slow!(r) # Takes a lock to ensure thread safety
+ end
+ return getfield(r, f)
+end
+
function get_cached_registry(path, tree_info::Base.SHA1, compressed::Bool)
if !ispath(path)
delete!(REGISTRY_CACHE, path)
@@ -326,33 +394,9 @@ function RegistryInstance(path::AbstractString)
end
end
- in_memory_registry = if compressed_file !== nothing
- uncompress_registry(joinpath(dirname(path), compressed_file))
- else
- nothing
- end
+ # Create partially initialized registry - defer expensive operations
+ reg = RegistryInstance(path, tree_info, compressed_file)
- d = parsefile(in_memory_registry, path, "Registry.toml")
- pkgs = Dict{UUID, PkgEntry}()
- for (uuid, info) in d["packages"]::Dict{String, Any}
- uuid = UUID(uuid::String)
- info::Dict{String, Any}
- name = info["name"]::String
- pkgpath = info["path"]::String
- pkg = PkgEntry(pkgpath, path, name, uuid, in_memory_registry)
- pkgs[uuid] = pkg
- end
- reg = RegistryInstance(
- path,
- d["name"]::String,
- UUID(d["uuid"]::String),
- get(d, "repo", nothing)::Union{String, Nothing},
- get(d, "description", nothing)::Union{String, Nothing},
- pkgs,
- tree_info,
- in_memory_registry,
- Dict{String, UUID}(),
- )
if tree_info !== nothing
REGISTRY_CACHE[path] = (tree_info, compressed_file !== nothing, reg)
end
@@ -366,8 +410,9 @@ function Base.show(io::IO, ::MIME"text/plain", r::RegistryInstance)
if r.tree_info !== nothing
println(io, " git-tree-sha1: ", r.tree_info)
end
- println(io, " packages: ", length(r.pkgs))
+ return println(io, " packages: ", length(r.pkgs))
end
+Base.show(io::IO, r::RegistryInstance) = Base.show(io, MIME"text/plain"(), r)
function uuids_from_name(r::RegistryInstance, name::String)
create_name_uuid_mapping!(r)
@@ -386,7 +431,7 @@ end
function verify_compressed_registry_toml(path::String)
d = TOML.tryparsefile(path)
if d isa TOML.ParserError
- @warn "Failed to parse registry TOML file at $(repr(path))" exception=d
+ @warn "Failed to parse registry TOML file at $(repr(path))" exception = d
return false
end
for key in ("git-tree-sha1", "uuid", "path")
@@ -403,7 +448,7 @@ function verify_compressed_registry_toml(path::String)
return true
end
-function reachable_registries(; depots::Union{String, Vector{String}}=Base.DEPOT_PATH)
+function reachable_registries(; depots::Union{String, Vector{String}} = Base.DEPOT_PATH)
# collect registries
if depots isa String
depots = [depots]
@@ -413,7 +458,7 @@ function reachable_registries(; depots::Union{String, Vector{String}}=Base.DEPOT
isdir(d) || continue
reg_dir = joinpath(d, "registries")
isdir(reg_dir) || continue
- reg_paths = readdir(reg_dir; join=true)
+ reg_paths = readdir(reg_dir; join = true)
candidate_registries = String[]
# All folders could be registries
append!(candidate_registries, filter(isdir, reg_paths))
diff --git a/src/Resolve/Resolve.jl b/src/Resolve/Resolve.jl
index dc11c7540f..93d0f036bd 100644
--- a/src/Resolve/Resolve.jl
+++ b/src/Resolve/Resolve.jl
@@ -14,7 +14,7 @@ export resolve, sanity_check, Graph, pkgID
####################
# Requires / Fixed #
####################
-const Requires = Dict{UUID,VersionSpec}
+const Requires = Dict{UUID, VersionSpec}
struct Fixed
version::VersionNumber
@@ -35,19 +35,19 @@ Base.show(io::IO, f::Fixed) = isempty(f.requires) ?
struct ResolverError <: Exception
msg::AbstractString
- ex::Union{Exception,Nothing}
+ ex::Union{Exception, Nothing}
end
ResolverError(msg::AbstractString) = ResolverError(msg, nothing)
struct ResolverTimeoutError <: Exception
msg::AbstractString
- ex::Union{Exception,Nothing}
+ ex::Union{Exception, Nothing}
end
ResolverTimeoutError(msg::AbstractString) = ResolverTimeoutError(msg, nothing)
function Base.showerror(io::IO, pkgerr::ResolverError)
print(io, pkgerr.msg)
- if pkgerr.ex !== nothing
+ return if pkgerr.ex !== nothing
pkgex = pkgerr.ex
if isa(pkgex, CompositeException)
for cex in pkgex
@@ -76,16 +76,16 @@ function resolve(graph::Graph)
return compute_output_dict(sol, graph)
end
-function _resolve(graph::Graph, lower_bound::Union{Vector{Int},Nothing}, previous_sol::Union{Vector{Int},Nothing})
+function _resolve(graph::Graph, lower_bound::Union{Vector{Int}, Nothing}, previous_sol::Union{Vector{Int}, Nothing})
np = graph.np
spp = graph.spp
gconstr = graph.gconstr
if lower_bound ≢ nothing
- for p0 = 1:np
+ for p0 in 1:np
v0 = lower_bound[p0]
@assert v0 ≠ spp[p0]
- gconstr[p0][1:(v0-1)] .= false
+ gconstr[p0][1:(v0 - 1)] .= false
end
end
@@ -114,11 +114,15 @@ function _resolve(graph::Graph, lower_bound::Union{Vector{Int},Nothing}, previou
else
@assert maxsum_result == :timedout
log_event_global!(graph, "maxsum solver timed out")
- throw(ResolverTimeoutError("""
- The resolution process timed out. This is likely due to unsatisfiable requirements.
- You can increase the maximum resolution time via the environment variable JULIA_PKG_RESOLVE_MAX_TIME
- (the current value is $(get(ENV, "JULIA_PKG_RESOLVE_MAX_TIME", DEFAULT_MAX_TIME))).
- """))
+ throw(
+ ResolverTimeoutError(
+ """
+ The resolution process timed out. This is likely due to unsatisfiable requirements.
+ You can increase the maximum resolution time via the environment variable JULIA_PKG_RESOLVE_MAX_TIME
+ (the current value is $(get(ENV, "JULIA_PKG_RESOLVE_MAX_TIME", DEFAULT_MAX_TIME))).
+ """
+ )
+ )
end
@@ -166,7 +170,7 @@ function sanity_check(graph::Graph, sources::Set{UUID} = Set{UUID}(), verbose::B
isempty(req_inds) || @warn("sanity check called on a graph with non-empty requirements")
if !any(is_julia(graph, fp0) for fp0 in fix_inds)
@warn("sanity check called on a graph without julia requirement, adding it")
- add_fixed!(graph, Dict(uuid_julia=>Fixed(VERSION)))
+ add_fixed!(graph, Dict(uuid_julia => Fixed(VERSION)))
end
if length(fix_inds) ≠ 1
@warn("sanity check called on a graph with extra fixed requirements (besides julia)")
@@ -190,22 +194,22 @@ function sanity_check(graph::Graph, sources::Set{UUID} = Set{UUID}(), verbose::B
pvers = data.pvers
eq_classes = data.eq_classes
- problematic = Tuple{String,VersionNumber}[]
+ problematic = Tuple{String, VersionNumber}[]
np == 0 && return problematic
- vers = [(pkgs[p0],pvers[p0][v0]) for p0 = 1:np for v0 = 1:(spp[p0]-1)]
- sort!(vers, by=pv->(-length(gadj[pdict[pv[1]]])))
+ vers = [(pkgs[p0], pvers[p0][v0]) for p0 in 1:np for v0 in 1:(spp[p0] - 1)]
+ sort!(vers, by = pv -> (-length(gadj[pdict[pv[1]]])))
nv = length(vers)
- svdict = Dict{Tuple{UUID,VersionNumber},Int}(vers[i] => i for i = 1:nv)
+ svdict = Dict{Tuple{UUID, VersionNumber}, Int}(vers[i] => i for i in 1:nv)
checked = falses(nv)
last_str_len = 0
- for (i,(p,vn)) in enumerate(vers)
+ for (i, (p, vn)) in enumerate(vers)
if verbose
frac_compl = i / nv
print("\r", " "^last_str_len, "\r")
@@ -249,8 +253,8 @@ function sanity_check(graph::Graph, sources::Set{UUID} = Set{UUID}(), verbose::B
else
@assert verify_solution(sol, graph)
sol_dict = compute_output_dict(sol, graph)
- for (sp,svn) in sol_dict
- j = svdict[sp,svn]
+ for (sp, svn) in sol_dict
+ j = svdict[sp, svn]
checked[j] = true
end
end
@@ -278,8 +282,8 @@ function compute_output_dict(sol::Vector{Int}, graph::Graph)
pvers = graph.data.pvers
pruned = graph.data.pruned
- want = Dict{UUID,VersionNumber}()
- for p0 = 1:np
+ want = Dict{UUID, VersionNumber}()
+ for p0 in 1:np
p0 ∈ fix_inds && continue
p = pkgs[p0]
s0 = sol[p0]
@@ -287,7 +291,7 @@ function compute_output_dict(sol::Vector{Int}, graph::Graph)
vn = pvers[p0][s0]
want[p] = vn
end
- for (p,vn) in pruned
+ for (p, vn) in pruned
@assert !haskey(want, p)
want[p] = vn
end
@@ -309,12 +313,12 @@ function greedysolver(graph::Graph)
gconstr = graph.gconstr
# initialize solution: all uninstalled
- sol = Int[spp[p0] for p0 = 1:np]
+ sol = Int[spp[p0] for p0 in 1:np]
# packages which are not allowed to be uninstalled
# (NOTE: this is potentially a superset of graph.req_inds,
# since it may include implicit requirements)
- req_inds = Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end])
+ req_inds = Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end])
# set up required packages to their highest allowed versions
for rp0 in req_inds
@@ -347,10 +351,10 @@ function greedysolver(graph::Graph)
@assert s0 < spp[p0]
# scan dependencies
- for (j1,p1) in enumerate(gadj[p0])
+ for (j1, p1) in enumerate(gadj[p0])
msk = gmsk[p0][j1]
# look for the highest version which satisfies the requirements
- v1 = findlast(msk[:,s0] .& gconstr[p1])
+ v1 = findlast(msk[:, s0] .& gconstr[p1])
v1 == spp[p1] && continue # p1 is not required by p0's current version
# if we found a version, and the package was uninstalled
# or the same version was already selected, we're ok;
@@ -374,7 +378,7 @@ function greedysolver(graph::Graph)
pop_snapshot!(graph)
- for p0 = 1:np
+ for p0 in 1:np
log_event_greedysolved!(graph, p0, sol[p0])
end
@@ -396,13 +400,13 @@ function verify_solution(sol::Vector{Int}, graph::Graph)
@assert all(sol .> 0)
# verify constraints and dependencies
- for p0 = 1:np
+ for p0 in 1:np
s0 = sol[p0]
gconstr[p0][s0] || (@warn("gconstr[$p0][$s0] fail"); return false)
- for (j1,p1) in enumerate(gadj[p0])
+ for (j1, p1) in enumerate(gadj[p0])
msk = gmsk[p0][j1]
s1 = sol[p1]
- msk[s1,s0] || (@warn("gmsk[$p0][$p1][$s1,$s0] fail"); return false)
+ msk[s1, s0] || (@warn("gmsk[$p0][$p1][$s1,$s0] fail"); return false)
end
end
return true
@@ -413,7 +417,7 @@ end
Uninstall unreachable packages:
start from the required ones and keep only the packages reachable from them along the graph.
"""
-function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol,Int}}, graph::Graph)
+function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol, Int}}, graph::Graph)
np = graph.np
spp = graph.spp
gadj = graph.gadj
@@ -421,8 +425,8 @@ function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol,Int}
gconstr = graph.gconstr
uninst = trues(np)
- staged = Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end])
- seen = copy(staged) ∪ Set{Int}(p0 for p0 = 1:np if sol[p0] == spp[p0]) # we'll skip uninstalled packages
+ staged = Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end])
+ seen = copy(staged) ∪ Set{Int}(p0 for p0 in 1:np if sol[p0] == spp[p0]) # we'll skip uninstalled packages
while !isempty(staged)
staged_next = Set{Int}()
@@ -430,9 +434,9 @@ function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol,Int}
s0 = sol[p0]
@assert s0 ≠ spp[p0]
uninst[p0] = false
- for (j1,p1) in enumerate(gadj[p0])
+ for (j1, p1) in enumerate(gadj[p0])
p1 ∈ seen && continue # we've already seen the package, or it is uninstalled
- gmsk[p0][j1][end,s0] && continue # the package is not required by p0 at version s0
+ gmsk[p0][j1][end, s0] && continue # the package is not required by p0 at version s0
push!(staged_next, p1)
end
end
@@ -444,6 +448,7 @@ function _uninstall_unreachable!(sol::Vector{Int}, why::Vector{Union{Symbol,Int}
sol[p0] = spp[p0]
why[p0] = :uninst
end
+ return
end
"""
@@ -463,7 +468,7 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
pkgs = graph.data.pkgs
# keep a track for the log
- why = Union{Symbol,Int}[0 for p0 = 1:np]
+ why = Union{Symbol, Int}[0 for p0 in 1:np]
# Strategy:
# There's a cycle in which first the unnecessary (unconnected) packages are removed,
@@ -510,10 +515,10 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
move_up .= sol .≠ spp
copy!(upperbound, spp)
let move_up = move_up
- lowerbound .= [move_up[p0] ? sol[p0] : 1 for p0 = 1:np]
+ lowerbound .= [move_up[p0] ? sol[p0] : 1 for p0 in 1:np]
end
- for p0 = 1:np
+ for p0 in 1:np
s0 = sol[p0]
s0 == spp[p0] && (why[p0] = :uninst; continue) # the package is not installed
move_up[p0] || continue # the package is only installed as a result of a previous bump, skip it
@@ -521,9 +526,9 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
@assert upperbound[p0] == spp[p0]
# pick the next version that doesn't violate a constraint (if any)
- bump_range = collect(s0+1:spp[p0])
+ bump_range = collect((s0 + 1):spp[p0])
bump = let gconstr = gconstr
- findfirst(v0->gconstr[p0][v0], bump_range)
+ findfirst(v0 -> gconstr[p0][v0], bump_range)
end
# no such version was found, skip this package
@@ -551,7 +556,7 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
while !isempty(staged)
for f0 in staged
- for (j1,f1) in enumerate(gadj[f0])
+ for (j1, f1) in enumerate(gadj[f0])
s1 = sol[f1]
msk = gmsk[f0][j1]
if f1 == p0 || try_uninstall
@@ -565,13 +570,13 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
s1 > lb1 && @assert s1 == spp[f1]
# the arrangement of the range gives precedence to improving the
# current situation, but allows reinstalling a package if needed
- bump_range = vcat(s1:ub1, s1-1:-1:lb1)
+ bump_range = vcat(s1:ub1, (s1 - 1):-1:lb1)
else
bump_range = collect(ub1:-1:lb1)
end
end
bump = let gconstr = gconstr
- findfirst(v1->(gconstr[f1][v1] && msk[v1, sol[f0]]), bump_range)
+ findfirst(v1 -> (gconstr[f1][v1] && msk[v1, sol[f0]]), bump_range)
end
if bump ≡ nothing
why[p0] = f1 # TODO: improve this? (ideally we might want the path from p0 to f1)
@@ -610,15 +615,16 @@ function enforce_optimality!(sol::Vector{Int}, graph::Graph)
@assert verify_solution(sol, graph)
- for p0 = 1:np
+ for p0 in 1:np
log_event_maxsumsolved!(graph, p0, sol[p0], why[p0])
end
+ return
end
function apply_maxsum_trace!(graph::Graph, sol::Vector{Int})
gconstr = graph.gconstr
- for (p0,s0) in enumerate(sol)
+ for (p0, s0) in enumerate(sol)
s0 == 0 && continue
gconstr0 = gconstr[p0]
old_constr = copy(gconstr0)
@@ -627,9 +633,10 @@ function apply_maxsum_trace!(graph::Graph, sol::Vector{Int})
gconstr0[s0] = true
gconstr0 ≠ old_constr && log_event_maxsumtrace!(graph, p0, s0)
end
+ return
end
-function trigger_failure!(graph::Graph, sol::Vector{Int}, staged::Tuple{Int,Int})
+function trigger_failure!(graph::Graph, sol::Vector{Int}, staged::Tuple{Int, Int})
apply_maxsum_trace!(graph, sol)
simplify_graph_soft!(graph, Set(findall(sol .> 0)), log_events = true) # this may throw an error...
@@ -643,8 +650,8 @@ function trigger_failure!(graph::Graph, sol::Vector{Int}, staged::Tuple{Int,Int}
log_event_maxsumtrace!(graph, p0, v0)
simplify_graph!(graph) # this may throw an error...
outdict = resolve(graph) # ...otherwise, this MUST throw an error
- open(io->showlog(io, graph, view=:chronological), "logchrono.errresolve.txt", "w")
- error("this is not supposed to happen... $(Dict(pkgID(p, graph) => vn for (p,vn) in outdict))")
+ open(io -> showlog(io, graph, view = :chronological), "logchrono.errresolve.txt", "w")
+ error("this is not supposed to happen... $(Dict(pkgID(p, graph) => vn for (p, vn) in outdict))")
end
end # module
diff --git a/src/Resolve/fieldvalues.jl b/src/Resolve/fieldvalues.jl
index 028d6c6036..a32929f14a 100644
--- a/src/Resolve/fieldvalues.jl
+++ b/src/Resolve/fieldvalues.jl
@@ -15,10 +15,12 @@ struct FieldValue
l1::VersionWeight
l2::VersionWeight
l3::Int64
- FieldValue(l0::Integer = 0,
- l1::VersionWeight = zero(VersionWeight),
- l2::VersionWeight = zero(VersionWeight),
- l3::Integer = 0) = new(l0, l1, l2, l3)
+ FieldValue(
+ l0::Integer = 0,
+ l1::VersionWeight = zero(VersionWeight),
+ l2::VersionWeight = zero(VersionWeight),
+ l3::Integer = 0
+ ) = new(l0, l1, l2, l3)
end
# This isn't nice, but it's for debugging only anyway
@@ -37,10 +39,10 @@ const Field = Vector{FieldValue}
Base.zero(::Type{FieldValue}) = FieldValue()
-Base.typemin(::Type{FieldValue}) = (x=typemin(Int64); y=typemin(VersionWeight); FieldValue(x, y, y, x))
+Base.typemin(::Type{FieldValue}) = (x = typemin(Int64); y = typemin(VersionWeight); FieldValue(x, y, y, x))
-Base.:-(a::FieldValue, b::FieldValue) = FieldValue(a.l0-b.l0, a.l1-b.l1, a.l2-b.l2, a.l3-b.l3)
-Base.:+(a::FieldValue, b::FieldValue) = FieldValue(a.l0+b.l0, a.l1+b.l1, a.l2+b.l2, a.l3+b.l3)
+Base.:-(a::FieldValue, b::FieldValue) = FieldValue(a.l0 - b.l0, a.l1 - b.l1, a.l2 - b.l2, a.l3 - b.l3)
+Base.:+(a::FieldValue, b::FieldValue) = FieldValue(a.l0 + b.l0, a.l1 + b.l1, a.l2 + b.l2, a.l3 + b.l3)
function Base.isless(a::FieldValue, b::FieldValue)
a.l0 < b.l0 && return true
@@ -59,7 +61,7 @@ validmax(a::FieldValue) = a.l0 >= 0
function Base.argmax(f::Field)
m = typemin(FieldValue)
mi = 0
- for j = length(f):-1:1
+ for j in length(f):-1:1
if f[j] > m
m = f[j]
mi = j
@@ -74,7 +76,7 @@ end
function secondmax(f::Field, msk::BitVector = trues(length(f)))
m = typemin(FieldValue)
m2 = typemin(FieldValue)
- for i = 1:length(f)
+ for i in 1:length(f)
msk[i] || continue
a = f[i]
if a > m
diff --git a/src/Resolve/graphtype.jl b/src/Resolve/graphtype.jl
index f2cffce50d..7365bd0b34 100644
--- a/src/Resolve/graphtype.jl
+++ b/src/Resolve/graphtype.jl
@@ -15,17 +15,17 @@
const UUID0 = UUID(UInt128(0))
-const ResolveJournal = Vector{Tuple{UUID,String}}
+const ResolveJournal = Vector{Tuple{UUID, String}}
mutable struct ResolveLogEntry
journal::ResolveJournal # shared with all other entries
pkg::UUID
header::String
- events::Vector{Tuple{Any,String}} # here Any should ideally be Union{ResolveLogEntry,Nothing}
+ events::Vector{Tuple{Any, String}} # here Any should ideally be Union{ResolveLogEntry,Nothing}
ResolveLogEntry(journal::ResolveJournal, pkg::UUID, header::String = "") = new(journal, pkg, header, [])
end
-function Base.push!(entry::ResolveLogEntry, reason::Tuple{Union{ResolveLogEntry,Nothing},String}, to_journal::Bool = true)
+function Base.push!(entry::ResolveLogEntry, reason::Tuple{Union{ResolveLogEntry, Nothing}, String}, to_journal::Bool = true)
push!(entry.events, reason)
to_journal && entry.pkg ≠ uuid_julia && push!(entry.journal, (entry.pkg, reason[2]))
return entry
@@ -41,7 +41,7 @@ mutable struct ResolveLog
globals::ResolveLogEntry
# pool: records entries associated to each package
- pool::Dict{UUID,ResolveLogEntry}
+ pool::Dict{UUID, ResolveLogEntry}
# journal: record all messages in order (shared between all entries)
journal::ResolveJournal
@@ -54,18 +54,18 @@ mutable struct ResolveLog
verbose::Bool
# UUID to names
- uuid_to_name::Dict{UUID,String}
+ uuid_to_name::Dict{UUID, String}
- function ResolveLog(uuid_to_name::Dict{UUID,String}, verbose::Bool = false)
+ function ResolveLog(uuid_to_name::Dict{UUID, String}, verbose::Bool = false)
journal = ResolveJournal()
init = ResolveLogEntry(journal, UUID0, "")
globals = ResolveLogEntry(journal, UUID0, "Global events:")
- return new(init, globals, Dict{UUID,ResolveLogEntry}(), journal, true, verbose, uuid_to_name)
+ return new(init, globals, Dict{UUID, ResolveLogEntry}(), journal, true, verbose, uuid_to_name)
end
end
# Installation state: either a version, or uninstalled
-const InstState = Union{VersionNumber,Nothing}
+const InstState = Union{VersionNumber, Nothing}
# GraphData is basically a part of Graph that collects data structures useful
@@ -82,7 +82,7 @@ mutable struct GraphData
spp::Vector{Int}
# package dict: associates an index to each package id
- pdict::Dict{UUID,Int}
+ pdict::Dict{UUID, Int}
# package versions: for each package, keep the list of the
# possible version numbers; this defines a
@@ -93,28 +93,28 @@ mutable struct GraphData
# versions dict: associates a version index to each package
# version; such that
# pvers[p0][vdict[p0][vn]] = vn
- vdict::Vector{Dict{VersionNumber,Int}}
+ vdict::Vector{Dict{VersionNumber, Int}}
# UUID to names
- uuid_to_name::Dict{UUID,String}
+ uuid_to_name::Dict{UUID, String}
# pruned packages: during graph simplification, packages that
# only have one allowed version are pruned.
# This keeps track of them, so that they may
# be returned in the solution (unless they
# were explicitly fixed)
- pruned::Dict{UUID,VersionNumber}
+ pruned::Dict{UUID, VersionNumber}
# equivalence classes: for each package and each of its possible
# states, keep track of other equivalent states
- eq_classes::Dict{UUID,Dict{InstState,Set{InstState}}}
+ eq_classes::Dict{UUID, Dict{InstState, Set{InstState}}}
# resolve log: keep track of the resolution process
rlog::ResolveLog
function GraphData(
- compat::Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}},
- uuid_to_name::Dict{UUID,String},
+ compat::Dict{UUID, Dict{VersionNumber, Dict{UUID, VersionSpec}}},
+ uuid_to_name::Dict{UUID, String},
verbose::Bool = false
)
# generate pkgs
@@ -122,28 +122,28 @@ mutable struct GraphData
np = length(pkgs)
# generate pdict
- pdict = Dict{UUID,Int}(pkgs[p0] => p0 for p0 = 1:np)
+ pdict = Dict{UUID, Int}(pkgs[p0] => p0 for p0 in 1:np)
# generate spp and pvers
- pvers = [sort!(collect(keys(compat[pkgs[p0]]))) for p0 = 1:np]
+ pvers = [sort!(collect(keys(compat[pkgs[p0]]))) for p0 in 1:np]
spp = length.(pvers) .+ 1
# generate vdict
- vdict = [Dict{VersionNumber,Int}(vn => i for (i,vn) in enumerate(pvers[p0])) for p0 = 1:np]
+ vdict = [Dict{VersionNumber, Int}(vn => i for (i, vn) in enumerate(pvers[p0])) for p0 in 1:np]
# nothing is pruned yet, of course
- pruned = Dict{UUID,VersionNumber}()
+ pruned = Dict{UUID, VersionNumber}()
# equivalence classes (at the beginning each state represents just itself)
eq_vn(v0, p0) = (v0 == spp[p0] ? nothing : pvers[p0][v0])
# Hot code, measure performance before changing
- eq_classes = Dict{UUID,Dict{InstState,Set{InstState}}}()
- for p0 = 1:np
+ eq_classes = Dict{UUID, Dict{InstState, Set{InstState}}}()
+ for p0 in 1:np
d = Dict{InstState, Set{InstState}}()
- for v0 = 1:spp[p0]
+ for v0 in 1:spp[p0]
let p0 = p0 # Due to https://github.com/JuliaLang/julia/issues/15276
- d[eq_vn(v0,p0)] = Set([eq_vn(v0,p0)])
+ d[eq_vn(v0, p0)] = Set([eq_vn(v0, p0)])
end
end
eq_classes[pkgs[p0]] = d
@@ -164,10 +164,10 @@ mutable struct GraphData
np = data.np
spp = copy(data.spp)
pdict = copy(data.pdict)
- pvers = [copy(data.pvers[p0]) for p0 = 1:np]
- vdict = [copy(data.vdict[p0]) for p0 = 1:np]
+ pvers = [copy(data.pvers[p0]) for p0 in 1:np]
+ vdict = [copy(data.vdict[p0]) for p0 in 1:np]
pruned = copy(data.pruned)
- eq_classes = Dict(p => copy(eq) for (p,eq) in data.eq_classes)
+ eq_classes = Dict(p => copy(eq) for (p, eq) in data.eq_classes)
rlog = deepcopy(data.rlog)
uuid_to_name = rlog.uuid_to_name
@@ -206,7 +206,7 @@ mutable struct Graph
# allows one to retrieve the indices in gadj, so that
# gadj[p0][adjdict[p1][p0]] = p1
# ("At which index does package p1 appear in gadj[p0]?")
- adjdict::Vector{Dict{Int,Int}}
+ adjdict::Vector{Dict{Int, Int}}
# indices of the packages that were *explicitly* required
# used to favor their versions at resolution
@@ -221,7 +221,7 @@ mutable struct Graph
# stack of constraints/ignored packages:
# allows to keep a sort of "versioning" of the constraints
# such that the solver can implement tentative solutions
- solve_stack::Vector{Tuple{Vector{BitVector},BitVector}}
+ solve_stack::Vector{Tuple{Vector{BitVector}, BitVector}}
# states per package: same as in GraphData
spp::Vector{Int}
@@ -235,50 +235,55 @@ mutable struct Graph
cavfld::Vector{FieldValue}
function Graph(
- compat::Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}},
- compat_weak::Dict{UUID,Dict{VersionNumber,Set{UUID}}},
- uuid_to_name::Dict{UUID,String},
+ compat::Dict{UUID, Dict{VersionNumber, Dict{UUID, VersionSpec}}},
+ compat_weak::Dict{UUID, Dict{VersionNumber, Set{UUID}}},
+ uuid_to_name::Dict{UUID, String},
reqs::Requires,
- fixed::Dict{UUID,Fixed},
+ fixed::Dict{UUID, Fixed},
verbose::Bool = false,
- julia_version::Union{VersionNumber,Nothing} = VERSION
+ julia_version::Union{VersionNumber, Nothing} = VERSION
)
# Tell the resolver about julia itself
uuid_to_name[uuid_julia] = "julia"
if julia_version !== nothing
fixed[uuid_julia] = Fixed(julia_version)
- compat[uuid_julia] = Dict(julia_version => Dict{VersionNumber,Dict{UUID,VersionSpec}}())
+ compat[uuid_julia] = Dict(julia_version => Dict{VersionNumber, Dict{UUID, VersionSpec}}())
else
- compat[uuid_julia] = Dict{VersionNumber,Dict{UUID,VersionSpec}}()
+ compat[uuid_julia] = Dict{VersionNumber, Dict{UUID, VersionSpec}}()
end
data = GraphData(compat, uuid_to_name, verbose)
pkgs, np, spp, pdict, pvers, vdict, rlog = data.pkgs, data.np, data.spp, data.pdict, data.pvers, data.vdict, data.rlog
extended_deps = let spp = spp # Due to https://github.com/JuliaLang/julia/issues/15276
- [Vector{Dict{Int,BitVector}}(undef, spp[p0]-1) for p0 = 1:np]
+ [Vector{Dict{Int, BitVector}}(undef, spp[p0] - 1) for p0 in 1:np]
end
- for p0 = 1:np, v0 = 1:(spp[p0]-1)
+ for p0 in 1:np, v0 in 1:(spp[p0] - 1)
vn = pvers[p0][v0]
- req = Dict{Int,VersionSpec}()
+ req = Dict{Int, VersionSpec}()
uuid0 = pkgs[p0]
- vnmap = get(Dict{UUID,VersionSpec}, compat[uuid0], vn)
+ vnmap = get(Dict{UUID, VersionSpec}, compat[uuid0], vn)
for (uuid1, vs) in vnmap
p1 = pdict[uuid1]
p1 == p0 && error("Package $(pkgID(pkgs[p0], uuid_to_name)) version $vn has a dependency with itself")
# check conflicts instead of intersecting?
# (intersecting is used by fixed packages though...)
- req_p1 = get!(VersionSpec, req, p1)
- req[p1] = req_p1 ∩ vs
+ req_p1 = get(req, p1, nothing)
+ if req_p1 == nothing
+ req[p1] = vs
+ else
+ req[p1] = req_p1 ∩ vs
+ end
end
# Translate the requirements into bit masks
# Hot code, measure performance before changing
- req_msk = Dict{Int,BitVector}()
+ req_msk = Dict{Int, BitVector}()
+ sizehint!(req_msk, length(req))
maybe_weak = haskey(compat_weak, uuid0) && haskey(compat_weak[uuid0], vn)
for (p1, vs) in req
pv = pvers[p1]
req_msk_p1 = BitVector(undef, spp[p1])
- @inbounds for i in 1:spp[p1] - 1
+ @inbounds for i in 1:(spp[p1] - 1)
req_msk_p1[i] = pv[i] ∈ vs
end
weak = maybe_weak && (pkgs[p1] ∈ compat_weak[uuid0][vn])
@@ -288,20 +293,20 @@ mutable struct Graph
extended_deps[p0][v0] = req_msk
end
- gadj = [Int[] for p0 = 1:np]
- gmsk = [BitMatrix[] for p0 = 1:np]
+ gadj = [Int[] for p0 in 1:np]
+ gmsk = [BitMatrix[] for p0 in 1:np]
gconstr = let spp = spp # Due to https://github.com/JuliaLang/julia/issues/15276
- [trues(spp[p0]) for p0 = 1:np]
+ [trues(spp[p0]) for p0 in 1:np]
end
- adjdict = [Dict{Int,Int}() for p0 = 1:np]
+ adjdict = [Dict{Int, Int}() for p0 in 1:np]
- for p0 = 1:np, v0 = 1:(spp[p0]-1), (p1,rmsk1) in extended_deps[p0][v0]
+ for p0 in 1:np, v0 in 1:(spp[p0] - 1), (p1, rmsk1) in extended_deps[p0][v0]
@assert p0 ≠ p1
j0 = get(adjdict[p1], p0, length(gadj[p0]) + 1)
j1 = get(adjdict[p0], p1, length(gadj[p1]) + 1)
@assert (j0 > length(gadj[p0]) && j1 > length(gadj[p1])) ||
- (j0 ≤ length(gadj[p0]) && j1 ≤ length(gadj[p1]))
+ (j0 ≤ length(gadj[p0]) && j1 ≤ length(gadj[p1]))
if j0 > length(gadj[p0])
push!(gadj[p0], p1)
@@ -322,7 +327,7 @@ mutable struct Graph
bmt = gmsk[p1][j1]
end
- for v1 = 1:spp[p1]
+ for v1 in 1:spp[p1]
rmsk1[v1] && continue
bm[v1, v0] = false
bmt[v0, v1] = false
@@ -333,10 +338,12 @@ mutable struct Graph
fix_inds = Set{Int}()
ignored = falses(np)
- solve_stack = Tuple{Vector{BitVector},BitVector}[]
+ solve_stack = Tuple{Vector{BitVector}, BitVector}[]
- graph = new(data, gadj, gmsk, gconstr, adjdict, req_inds, fix_inds, ignored, solve_stack, spp, np,
- FieldValue[], FieldValue[], FieldValue[])
+ graph = new(
+ data, gadj, gmsk, gconstr, adjdict, req_inds, fix_inds, ignored, solve_stack, spp, np,
+ FieldValue[], FieldValue[], FieldValue[]
+ )
_add_fixed!(graph, fixed)
_add_reqs!(graph, reqs, :explicit_requirement)
@@ -351,14 +358,14 @@ mutable struct Graph
data = copy(graph.data)
np = graph.np
spp = data.spp
- gadj = [copy(graph.gadj[p0]) for p0 = 1:np]
- gmsk = [[copy(graph.gmsk[p0][j0]) for j0 = 1:length(gadj[p0])] for p0 = 1:np]
- gconstr = [copy(graph.gconstr[p0]) for p0 = 1:np]
- adjdict = [copy(graph.adjdict[p0]) for p0 = 1:np]
+ gadj = [copy(graph.gadj[p0]) for p0 in 1:np]
+ gmsk = [[copy(graph.gmsk[p0][j0]) for j0 in 1:length(gadj[p0])] for p0 in 1:np]
+ gconstr = [copy(graph.gconstr[p0]) for p0 in 1:np]
+ adjdict = [copy(graph.adjdict[p0]) for p0 in 1:np]
req_inds = copy(graph.req_inds)
fix_inds = copy(graph.fix_inds)
ignored = copy(graph.ignored)
- solve_stack = [([copy(gc0) for gc0 in sav_gconstr],copy(sav_ignored)) for (sav_gconstr,sav_ignored) in graph.solve_stack]
+ solve_stack = [([copy(gc0) for gc0 in sav_gconstr], copy(sav_ignored)) for (sav_gconstr, sav_ignored) in graph.solve_stack]
return new(data, gadj, gmsk, gconstr, adjdict, req_inds, fix_inds, ignored, solve_stack, spp, np)
end
@@ -382,11 +389,11 @@ function _add_reqs!(graph::Graph, reqs::Requires, reason; weak_reqs::Set{UUID} =
pdict = graph.data.pdict
pvers = graph.data.pvers
- for (rp,rvs) in reqs
+ for (rp, rvs) in reqs
haskey(pdict, rp) || error("unknown required package $(pkgID(rp, graph))")
rp0 = pdict[rp]
new_constr = trues(spp[rp0])
- for rv0 = 1:(spp[rp0]-1)
+ for rv0 in 1:(spp[rp0] - 1)
rvn = pvers[rp0][rv0]
rvn ∈ rvs || (new_constr[rv0] = false)
end
@@ -401,21 +408,21 @@ function _add_reqs!(graph::Graph, reqs::Requires, reason; weak_reqs::Set{UUID} =
end
"Add fixed packages to the graph, and their requirements."
-function add_fixed!(graph::Graph, fixed::Dict{UUID,Fixed})
+function add_fixed!(graph::Graph, fixed::Dict{UUID, Fixed})
_add_fixed!(graph, fixed)
check_constraints(graph)
# TODO: add fixed to graph data?
return graph
end
-function _add_fixed!(graph::Graph, fixed::Dict{UUID,Fixed})
+function _add_fixed!(graph::Graph, fixed::Dict{UUID, Fixed})
gconstr = graph.gconstr
spp = graph.spp
fix_inds = graph.fix_inds
pdict = graph.data.pdict
vdict = graph.data.vdict
- for (fp,fx) in fixed
+ for (fp, fx) in fixed
haskey(pdict, fp) || error("unknown fixed package $(pkgID(fp, graph))")
fp0 = pdict[fp]
fv0 = vdict[fp0][fx.version]
@@ -424,7 +431,7 @@ function _add_fixed!(graph::Graph, fixed::Dict{UUID,Fixed})
gconstr[fp0] .&= new_constr
push!(fix_inds, fp0)
bkitem = log_event_fixed!(graph, fp, fx)
- _add_reqs!(graph, fx.requires, (fp, bkitem); weak_reqs=fx.weak)
+ _add_reqs!(graph, fx.requires, (fp, bkitem); weak_reqs = fx.weak)
end
return graph
end
@@ -435,7 +442,7 @@ pkgID(p0::Int, data::GraphData) = pkgID(data.pkgs[p0], data)
pkgID(p, graph::Graph) = pkgID(p, graph.data)
## user-friendly representation of package IDs ##
-function pkgID(p::UUID, uuid_to_name::Dict{UUID,String})
+function pkgID(p::UUID, uuid_to_name::Dict{UUID, String})
name = get(uuid_to_name, p, "(unknown)")
uuid_short = string(p)[1:8]
return "$name [$uuid_short]"
@@ -467,18 +474,18 @@ function check_consistency(graph::Graph)
for x in Any[spp, gadj, gmsk, gconstr, adjdict, ignored, rlog.pool, pkgs, pdict, pvers, vdict]
@assert length(x)::Int == np
end
- for p0 = 1:np
+ for p0 in 1:np
@assert pdict[pkgs[p0]] == p0
spp0 = spp[p0]
@assert spp0 ≥ 1
pvers0 = pvers[p0]
vdict0 = vdict[p0]
@assert length(pvers0) == spp0 - 1
- for v0 = 1:(spp0-1)
+ for v0 in 1:(spp0 - 1)
@assert vdict0[pvers0[v0]] == v0
end
- for (vn,v0) in vdict0
- @assert 1 ≤ v0 ≤ spp0-1
+ for (vn, v0) in vdict0
+ @assert 1 ≤ v0 ≤ spp0 - 1
@assert pvers0[v0] == vn
end
gconstr0 = gconstr[p0]
@@ -489,18 +496,18 @@ function check_consistency(graph::Graph)
adjdict0 = adjdict[p0]
@assert length(gmsk0) == length(gadj0)
@assert length(adjdict0) == length(gadj0)
- for (j0,p1) in enumerate(gadj0)
+ for (j0, p1) in enumerate(gadj0)
@assert p1 ≠ p0
@assert adjdict[p1][p0] == j0
spp1 = spp[p1]
- @assert size(gmsk0[j0]) == (spp1,spp0)
+ @assert size(gmsk0[j0]) == (spp1, spp0)
j1 = adjdict0[p1]
gmsk1 = gmsk[p1]
# This assert is a bit too expensive
# @assert gmsk1[j1] == permutedims(gmsk0[j0])
end
end
- for (p,p0) in pdict
+ for (p, p0) in pdict
@assert 1 ≤ p0 ≤ np
@assert pkgs[p0] == p
@assert !haskey(pruned, p)
@@ -515,14 +522,14 @@ function check_consistency(graph::Graph)
@assert count(gconstr[p0]) ≤ 1 # note: the 0 case should be handled by check_constraints
end
- for (p,eq_cl) in eq_classes, (rvn,rvs) in eq_cl
+ for (p, eq_cl) in eq_classes, (rvn, rvs) in eq_cl
@assert rvn ∈ rvs
end
- for (sav_gconstr,sav_ignored) in solve_stack
+ for (sav_gconstr, sav_ignored) in solve_stack
@assert length(sav_ignored) == np
@assert length(sav_gconstr) == np
- for p0 = 1:np
+ for p0 in 1:np
@assert length(sav_gconstr[p0]) == spp[p0]
end
end
@@ -570,8 +577,8 @@ pkgID_color(pkgID) = CONFLICT_COLORS[mod1(hash(pkgID), end)]
logstr(pkgID) = logstr(pkgID, pkgID)
function logstr(pkgID, args...)
# workout the string with the color codes, check stderr to decide if color is enabled
- return sprint(args; context=stderr::IO) do io, iargs
- printstyled(io, iargs...; color=pkgID_color(pkgID))
+ return sprint(args; context = stderr::IO) do io, iargs
+ printstyled(io, iargs...; color = pkgID_color(pkgID))
end
end
@@ -584,7 +591,7 @@ end
Finds a minimal collection of ranges as a `VersionSpec`, that permits everything in the
`subset`, but does not permit anything else from the `pool`.
"""
-function range_compressed_versionspec(pool, subset=pool)
+function range_compressed_versionspec(pool, subset = pool)
length(subset) == 1 && return VersionSpec(only(subset))
# PREM-OPT: we keep re-sorting these, probably not required.
sort!(pool)
@@ -597,7 +604,7 @@ function range_compressed_versionspec(pool, subset=pool)
pool_ii = findfirst(isequal(range_start), pool) + 1 # skip-forward til we have started
for s in @view subset[2:end]
if s != pool[pool_ii]
- range_end = pool[pool_ii-1] # previous element was last in this range
+ range_end = pool[pool_ii - 1] # previous element was last in this range
push!(contiguous_subsets, VersionRange(range_start, range_end))
range_start = s # start a new range
while (s != pool[pool_ii]) # advance til time to start
@@ -616,7 +623,7 @@ function init_log!(data::GraphData)
pkgs = data.pkgs
pvers = data.pvers
rlog = data.rlog
- for p0 = 1:np
+ for p0 in 1:np
p = pkgs[p0]
id = pkgID(p0, data)
versions = pvers[p0]
@@ -655,8 +662,8 @@ function log_event_fixed!(graph::Graph, fp::UUID, fx::Fixed)
end
function _vs_string(p0::Int, vmask::BitVector, id::String, pvers::Vector{Vector{VersionNumber}})
- if any(vmask[1:(end-1)])
- vspec = range_compressed_versionspec(pvers[p0], pvers[p0][vmask[1:(end-1)]])
+ if any(vmask[1:(end - 1)])
+ vspec = range_compressed_versionspec(pvers[p0], pvers[p0][vmask[1:(end - 1)]])
vns = logstr(id, vspec)
vmask[end] && (vns *= " or uninstalled")
else
@@ -678,7 +685,7 @@ function log_event_req!(graph::Graph, rp::UUID, rvs::VersionSpec, reason)
other_entry = nothing
msg *= "an explicit requirement"
else
- other_p, other_entry = reason::Tuple{UUID,ResolveLogEntry}
+ other_p, other_entry = reason::Tuple{UUID, ResolveLogEntry}
if other_p == uuid_julia
msg *= "julia compatibility requirements"
other_entry = nothing # don't propagate the log
@@ -739,7 +746,7 @@ end
function log_event_global!(graph::Graph, msg::String)
rlog = graph.data.rlog
rlog.verbose && @info(msg)
- push!(rlog.globals, (nothing, msg))
+ return push!(rlog.globals, (nothing, msg))
end
function log_event_implicit_req!(graph::Graph, p1::Int, vmask::BitVector, p0::Int)
@@ -754,7 +761,7 @@ function log_event_implicit_req!(graph::Graph, p1::Int, vmask::BitVector, p0::In
other_p, other_entry = pkgs[p0], rlog.pool[pkgs[p0]]
other_id = pkgID(other_p, rlog)
if any(vmask)
- if all(vmask[1:(end-1)]) # Check if all versions are allowed (except uninstalled)
+ if all(vmask[1:(end - 1)]) # Check if all versions are allowed (except uninstalled)
@assert other_p ≠ uuid_julia
msg = "required (without additional version restrictions) by $(logstr(other_id))"
else
@@ -847,7 +854,7 @@ function log_event_maxsumsolved!(graph::Graph, p0::Int, s0::Int, why::Symbol)
if s0 == spp[p0] - 1
msg = "set by the solver to its maximum version: $ver"
else
- xver = logstr(id, pvers[p0][s0+1])
+ xver = logstr(id, pvers[p0][s0 + 1])
msg = "set by the solver to version: $ver (version $xver would violate its constraints)"
end
end
@@ -870,7 +877,7 @@ function log_event_maxsumsolved!(graph::Graph, p0::Int, s0::Int, p1::Int)
if s0 == spp[p0] - 1
msg = "set by the solver to its maximum version: $ver (installation is required by $other_id)"
else
- xver = logstr(id, pvers[p0][s0+1])
+ xver = logstr(id, pvers[p0][s0 + 1])
msg = "set by the solver version: $ver (version $xver would violate a dependency relation with $other_id)"
end
other_entry = rlog.pool[pkgs[p1]]
@@ -890,8 +897,8 @@ function log_event_eq_classes!(graph::Graph, p0::Int)
id = pkgID(p, rlog)
msg = "versions reduced by equivalence to: "
- if any(gconstr[p0][1:(end-1)])
- vspec = range_compressed_versionspec(pvers[p0], pvers[p0][gconstr[p0][1:(end-1)]])
+ if any(gconstr[p0][1:(end - 1)])
+ vspec = range_compressed_versionspec(pvers[p0], pvers[p0][gconstr[p0][1:(end - 1)]])
msg *= logstr(id, vspec)
gconstr[p0][end] && (msg *= " or uninstalled")
elseif gconstr[p0][end]
@@ -945,11 +952,12 @@ function showlog(io::IO, rlog::ResolveLog; view::Symbol = :plain)
seen = IdDict()
recursive = (view === :tree)
_show(io, rlog, rlog.globals, _logindent, seen, false)
- initentries = Union{ResolveLogEntry,Nothing}[event[1]::Union{ResolveLogEntry,Nothing} for event in rlog.init.events]
- for entry in sort!(initentries, by=(entry->pkgID(entry.pkg, rlog)))
+ initentries = Union{ResolveLogEntry, Nothing}[event[1]::Union{ResolveLogEntry, Nothing} for event in rlog.init.events]
+ for entry in sort!(initentries, by = (entry -> pkgID(entry.pkg, rlog)))
seen[entry] = true
_show(io, rlog, entry, _logindent, seen, recursive)
end
+ return
end
ansi_length(s) = textwidth(replace(s, r"\e\[[0-9]+(?:;[0-9]+)*m" => ""))
@@ -957,13 +965,14 @@ ansi_length(s) = textwidth(replace(s, r"\e\[[0-9]+(?:;[0-9]+)*m" => ""))
function showlogjournal(io::IO, rlog::ResolveLog)
journal = rlog.journal
id(p) = p == UUID0 ? "[global event]" : logstr(pkgID(p, rlog))
- padding = maximum(ansi_length(id(p)) for (p,_) in journal; init=0)
- for (p,msg) in journal
+ padding = maximum(ansi_length(id(p)) for (p, _) in journal; init = 0)
+ for (p, msg) in journal
s = id(p)
l = ansi_length(s)
pad = max(0, padding - l)
println(io, ' ', s, ' '^pad, ": ", msg)
end
+ return
end
"""
@@ -974,16 +983,17 @@ the same as for `showlog(io, rlog)`); the default is `:tree`.
function showlog(io::IO, rlog::ResolveLog, p::UUID; view::Symbol = :tree)
view ∈ [:plain, :tree] || throw(ArgumentError("the view argument should be `:plain` or `:tree`"))
entry = rlog.pool[p]
- if view === :tree
- _show(io, rlog, entry, _logindent, IdDict{Any,Any}(entry=>true), true)
+ return if view === :tree
+ _show(io, rlog, entry, _logindent, IdDict{Any, Any}(entry => true), true)
else
entries = ResolveLogEntry[entry]
function getentries(entry)
- for (other_entry,_) in entry.events
+ for (other_entry, _) in entry.events
(other_entry ≡ nothing || other_entry ∈ entries) && continue
push!(entries, other_entry)
getentries(other_entry)
end
+ return
end
getentries(entry)
for entry in entries
@@ -999,11 +1009,11 @@ function _show(io::IO, rlog::ResolveLog, entry::ResolveLogEntry, indent::String,
pre = toplevel ? "" : " "
println(io, indent, firstglyph, entry.header)
l = length(entry.events)
- for (i,(otheritem,msg)) in enumerate(entry.events)
+ for (i, (otheritem, msg)) in enumerate(entry.events)
if !isempty(msg)
- print(io, indent * pre, (i==l ? '└' : '├'), '─')
+ print(io, indent * pre, (i == l ? '└' : '├'), '─')
println(io, msg)
- newindent = indent * pre * (i==l ? " " : "│ ")
+ newindent = indent * pre * (i == l ? " " : "│ ")
else
newindent = indent
end
@@ -1016,6 +1026,7 @@ function _show(io::IO, rlog::ResolveLog, entry::ResolveLogEntry, indent::String,
seen[otheritem] = true
_show(io, rlog, otheritem, newindent, seen, recursive)
end
+ return
end
is_julia(graph::Graph, p0::Int) = graph.data.pkgs[p0] == uuid_julia
@@ -1030,7 +1041,7 @@ function check_constraints(graph::Graph)
id(p0::Int) = pkgID(p0, graph)
- for p0 = 1:np
+ for p0 in 1:np
any(gconstr[p0]) && continue
if exact
err_msg = "Unsatisfiable requirements detected for package $(logstr(id(p0))):\n"
@@ -1068,7 +1079,7 @@ function propagate_constraints!(graph::Graph, sources::Set{Int} = Set{Int}(); lo
# unless otherwise specified, start from packages which
# are not allowed to be uninstalled
staged = isempty(sources) ?
- Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end]) :
+ Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end]) :
sources
seen = copy(staged)
@@ -1077,7 +1088,7 @@ function propagate_constraints!(graph::Graph, sources::Set{Int} = Set{Int}(); lo
staged_next = Set{Int}()
for p0 in staged
gconstr0 = gconstr[p0]
- for (j1,p1) in enumerate(gadj[p0])
+ for (j1, p1) in enumerate(gadj[p0])
# if p1 is ignored, the relation between it and all its neighbors
# has already been propagated
ignored[p1] && continue
@@ -1087,7 +1098,7 @@ function propagate_constraints!(graph::Graph, sources::Set{Int} = Set{Int}(); lo
msk = gmsk[p0][j1]
# consider the sub-mask with only allowed versions of p0
- sub_msk = msk[:,gconstr0]
+ sub_msk = msk[:, gconstr0]
# if an entire row of the sub-mask is false, that version of p1
# is effectively forbidden
# (this is just like calling `any` row-wise)
@@ -1139,15 +1150,15 @@ function disable_unreachable!(graph::Graph, sources::Set{Int} = Set{Int}())
log_event_global!(graph, "disabling unreachable nodes")
# 2nd argument are packages which are not allowed to be uninstalled
- staged = union(sources, Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end]))
+ staged = union(sources, Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end]))
seen = copy(staged)
while !isempty(staged)
staged_next = Set{Int}()
for p0 in staged
- gconstr0idx = findall(gconstr[p0][1:(end-1)])
- for (j1,p1) in enumerate(gadj[p0])
- all(gmsk[p0][j1][end,gconstr0idx]) && continue # the package is not required by any of the allowed versions of p0
+ gconstr0idx = findall(gconstr[p0][1:(end - 1)])
+ for (j1, p1) in enumerate(gadj[p0])
+ all(gmsk[p0][j1][end, gconstr0idx]) && continue # the package is not required by any of the allowed versions of p0
p1 ∈ seen || push!(staged_next, p1)
end
end
@@ -1156,7 +1167,7 @@ function disable_unreachable!(graph::Graph, sources::Set{Int} = Set{Int}())
end
# Force uninstalled state for all unseen packages
- for p0 = 1:np
+ for p0 in 1:np
p0 ∈ seen && continue
gconstr0 = gconstr[p0]
@assert gconstr0[end]
@@ -1196,10 +1207,10 @@ function validate_versions!(graph::Graph, sources::Set{Int} = Set{Int}(); skim::
log_event_global!(graph, "validating versions [mode=$(skim ? "skim" : "deep")]")
- sumspp = sum(count(gconstr[p0]) for p0 = 1:np)
+ sumspp = sum(count(gconstr[p0]) for p0 in 1:np)
# TODO: better data structure (need a FIFO queue with fast membership loopup)
- squeue = union(sources, Set{Int}(p0 for p0 = 1:np if !gconstr[p0][end]))
+ squeue = union(sources, Set{Int}(p0 for p0 in 1:np if !gconstr[p0][end]))
isempty(squeue) && (squeue = Set{Int}(1:np))
queue = collect(squeue)
@@ -1234,7 +1245,7 @@ function validate_versions!(graph::Graph, sources::Set{Int} = Set{Int}(); skim::
unsat = !any(gconstr0)
if unsat
# we'll trigger a failure by pinning the highest version
- v0 = findlast(old_gconstr0[1:(end-1)])
+ v0 = findlast(old_gconstr0[1:(end - 1)])
@assert v0 ≢ nothing # this should be ensured by a previous pruning
# @info "pinning $(logstr(id(p0))) to version $(pvers[p0][v0])"
log_event_pin!(graph, pkgs[p0], pvers[p0][v0])
@@ -1257,9 +1268,9 @@ function validate_versions!(graph::Graph, sources::Set{Int} = Set{Int}(); skim::
end
end
- sumspp_new = sum(count(gconstr[p0]) for p0 = 1:np)
+ sumspp_new = sum(count(gconstr[p0]) for p0 in 1:np)
- log_event_global!(graph, "versions validation completed, stats (total n. of states): before = $(sumspp) after = $(sumspp_new) diff = $(sumspp-sumspp_new)")
+ log_event_global!(graph, "versions validation completed, stats (total n. of states): before = $(sumspp) after = $(sumspp_new) diff = $(sumspp - sumspp_new)")
return graph, changed
end
@@ -1274,7 +1285,7 @@ function compute_eq_classes!(graph::Graph)
np = graph.np
sumspp = sum(graph.spp)
- for p0 = 1:np
+ for p0 in 1:np
build_eq_classes1!(graph, p0)
end
@@ -1302,7 +1313,7 @@ function build_eq_classes1!(graph::Graph, p0::Int)
# concatenate all the constraints; the columns of the
# result encode the behavior of each version
cmat = vcat(BitMatrix(permutedims(gconstr[p0])), gmsk[p0]...)
- cvecs = [cmat[:,v0] for v0 = 1:spp[p0]]
+ cvecs = [cmat[:, v0] for v0 in 1:spp[p0]]
# find unique behaviors
repr_vecs = unique(cvecs)
@@ -1314,7 +1325,7 @@ function build_eq_classes1!(graph::Graph, p0::Int)
# group versions into sets that behave identically
eq_sets = [Set{Int}(v0 for v0 in 1:spp[p0] if cvecs[v0] == rvec) for rvec in repr_vecs]
- sort!(eq_sets, by=maximum)
+ sort!(eq_sets, by = maximum)
# each set is represented by its highest-valued member
repr_vers = map(maximum, eq_sets)
@@ -1324,7 +1335,7 @@ function build_eq_classes1!(graph::Graph, p0::Int)
# update equivalence classes
eq_vn(v0) = (v0 == spp[p0] ? nothing : pvers[p0][v0])
eq_classes0 = eq_classes[pkgs[p0]]
- for (v0,rvs) in zip(repr_vers, eq_sets)
+ for (v0, rvs) in zip(repr_vers, eq_sets)
@assert v0 ∈ rvs
vn0 = eq_vn(v0)
for v1 in rvs
@@ -1339,16 +1350,16 @@ function build_eq_classes1!(graph::Graph, p0::Int)
# reduce the constraints and the interaction matrices
spp[p0] = neq
gconstr[p0] = gconstr[p0][repr_vers]
- for (j1,p1) in enumerate(gadj[p0])
- gmsk[p0][j1] = gmsk[p0][j1][:,repr_vers]
+ for (j1, p1) in enumerate(gadj[p0])
+ gmsk[p0][j1] = gmsk[p0][j1][:, repr_vers]
j0 = adjdict[p0][p1]
- gmsk[p1][j0] = gmsk[p1][j0][repr_vers,:]
+ gmsk[p1][j0] = gmsk[p1][j0][repr_vers, :]
end
# reduce/rebuild version dictionaries
- pvers[p0] = pvers[p0][repr_vers[1:(end-1)]]
- vdict[p0] = Dict(vn => i for (i,vn) in enumerate(pvers[p0]))
+ pvers[p0] = pvers[p0][repr_vers[1:(end - 1)]]
+ vdict[p0] = Dict(vn => i for (i, vn) in enumerate(pvers[p0]))
# put a record in the log
log_event_eq_classes!(graph, p0)
@@ -1365,14 +1376,14 @@ function compute_eq_classes_soft!(graph::Graph; log_events::Bool = true)
ignored = graph.ignored
gconstr = graph.gconstr
- sumspp = sum(count(gconstr[p0]) for p0 = 1:np)
- for p0 = 1:np
+ sumspp = sum(count(gconstr[p0]) for p0 in 1:np)
+ for p0 in 1:np
ignored[p0] && continue
build_eq_classes_soft1!(graph, p0)
end
- sumspp_new = sum(count(gconstr[p0]) for p0 = 1:np)
+ sumspp_new = sum(count(gconstr[p0]) for p0 in 1:np)
- log_events && log_event_global!(graph, "computed version equivalence classes, stats (total n. of states): before = $(sumspp) after = $(sumspp_new) diff = $(sumspp_new-sumspp)")
+ log_events && log_event_global!(graph, "computed version equivalence classes, stats (total n. of states): before = $(sumspp) after = $(sumspp_new) diff = $(sumspp_new - sumspp)")
@assert check_consistency(graph)
@@ -1393,7 +1404,7 @@ function build_eq_classes_soft1!(graph::Graph, p0::Int)
gmsk0 = gmsk[p0]
gconstr0 = gconstr[p0]
eff_spp0 = count(gconstr0)
- cvecs = BitVector[vcat(BitVector(), (gmsk0[j1][gconstr[gadj0[j1]],v0] for j1 = 1:length(gadj0) if !ignored[gadj0[j1]])...) for v0 in findall(gconstr0)]
+ cvecs = BitVector[vcat(BitVector(), (gmsk0[j1][gconstr[gadj0[j1]], v0] for j1 in 1:length(gadj0) if !ignored[gadj0[j1]])...) for v0 in findall(gconstr0)]
@assert length(cvecs) == eff_spp0
@@ -1407,7 +1418,7 @@ function build_eq_classes_soft1!(graph::Graph, p0::Int)
# group versions into sets that behave identically
# each set is represented by its highest-valued member
- repr_vers = sort!(Int[findlast(isequal(repr_vecs[w0]), cvecs) for w0 = 1:neq])
+ repr_vers = sort!(Int[findlast(isequal(repr_vecs[w0]), cvecs) for w0 in 1:neq])
@assert all(>(0), repr_vers)
@assert repr_vers[end] == eff_spp0
@@ -1428,7 +1439,7 @@ function update_ignored!(graph::Graph)
gconstr = graph.gconstr
ignored = graph.ignored
- for p0 = 1:np
+ for p0 in 1:np
ignored[p0] = (count(gconstr[p0]) == 1)
end
@@ -1459,15 +1470,15 @@ function prune_graph!(graph::Graph)
# We will remove all packages that only have one allowed state
# (includes fixed packages and forbidden packages)
- pkg_mask = BitVector(count(gconstr[p0]) ≠ 1 for p0 = 1:np)
+ pkg_mask = BitVector(count(gconstr[p0]) ≠ 1 for p0 in 1:np)
new_np = count(pkg_mask)
# a map that translates the new index ∈ 1:new_np into its
# corresponding old index ∈ 1:np
old_idx = findall(pkg_mask)
# the reverse of the above
- new_idx = Dict{Int,Int}()
- for new_p0 = 1:new_np
+ new_idx = Dict{Int, Int}()
+ for new_p0 in 1:new_np
new_idx[old_idx[new_p0]] = new_p0
end
@@ -1500,7 +1511,7 @@ function prune_graph!(graph::Graph)
# Update packages records
new_pkgs = pkgs[pkg_mask]
- new_pdict = Dict(new_pkgs[new_p0]=>new_p0 for new_p0 = 1:new_np)
+ new_pdict = Dict(new_pkgs[new_p0] => new_p0 for new_p0 in 1:new_np)
new_ignored = ignored[pkg_mask]
empty!(graph.solve_stack)
@@ -1508,26 +1519,26 @@ function prune_graph!(graph::Graph)
# versions that aren't allowed (but not the "uninstalled" state)
function keep_vers(new_p0)
p0 = old_idx[new_p0]
- return BitVector((v0 == spp[p0]) | gconstr[p0][v0] for v0 = 1:spp[p0])
+ return BitVector((v0 == spp[p0]) | gconstr[p0][v0] for v0 in 1:spp[p0])
end
- vers_mask = [keep_vers(new_p0) for new_p0 = 1:new_np]
+ vers_mask = [keep_vers(new_p0) for new_p0 in 1:new_np]
# Update number of states per package
- new_spp = Int[count(vers_mask[new_p0]) for new_p0 = 1:new_np]
+ new_spp = Int[count(vers_mask[new_p0]) for new_p0 in 1:new_np]
# Update versions maps
function compute_pvers(new_p0)
p0 = old_idx[new_p0]
pvers0 = pvers[p0]
vmsk0 = vers_mask[new_p0]
- return pvers0[vmsk0[1:(end-1)]]
+ return pvers0[vmsk0[1:(end - 1)]]
end
- new_pvers = [compute_pvers(new_p0) for new_p0 = 1:new_np]
+ new_pvers = [compute_pvers(new_p0) for new_p0 in 1:new_np]
# explicitly writing out the following loop since the generator equivalent caused type inference failure
new_vdict = Vector{Dict{VersionNumber, Int}}(undef, length(new_pvers))
for new_p0 in eachindex(new_vdict)
- new_vdict[new_p0] = Dict(vn => v0 for (v0,vn) in enumerate(new_pvers[new_p0]))
+ new_vdict[new_p0] = Dict(vn => v0 for (v0, vn) in enumerate(new_pvers[new_p0]))
end
# The new constraints are all going to be `true`, except possibly
@@ -1538,13 +1549,13 @@ function prune_graph!(graph::Graph)
new_gconstr0[end] = gconstr[p0][end]
return new_gconstr0
end
- new_gconstr = [compute_gconstr(new_p0) for new_p0 = 1:new_np]
+ new_gconstr = [compute_gconstr(new_p0) for new_p0 in 1:new_np]
# Recreate the graph adjacency list, skipping some packages
- new_gadj = [Int[] for new_p0 = 1:new_np]
- new_adjdict = [Dict{Int,Int}() for new_p0 = 1:new_np]
+ new_gadj = [Int[] for new_p0 in 1:new_np]
+ new_adjdict = [Dict{Int, Int}() for new_p0 in 1:new_np]
- for new_p0 = 1:new_np, (j1,p1) in enumerate(gadj[old_idx[new_p0]])
+ for new_p0 in 1:new_np, (j1, p1) in enumerate(gadj[old_idx[new_p0]])
pkg_mask[p1] || continue
new_p1 = new_idx[p1]
@@ -1552,7 +1563,7 @@ function prune_graph!(graph::Graph)
new_j1 = get(new_adjdict[new_p0], new_p1, length(new_gadj[new_p1]) + 1)
@assert (new_j0 > length(new_gadj[new_p0]) && new_j1 > length(new_gadj[new_p1])) ||
- (new_j0 ≤ length(new_gadj[new_p0]) && new_j1 ≤ length(new_gadj[new_p1]))
+ (new_j0 ≤ length(new_gadj[new_p0]) && new_j1 ≤ length(new_gadj[new_p1]))
new_j0 > length(new_gadj[new_p0]) || continue
push!(new_gadj[new_p0], new_p1)
@@ -1570,16 +1581,16 @@ function prune_graph!(graph::Graph)
new_p1 = new_gadj[new_p0][new_j0]
p1 = old_idx[new_p1]
j0 = adjdict[p1][p0]
- return gmsk[p0][j0][vers_mask[new_p1],vers_mask[new_p0]]
+ return gmsk[p0][j0][vers_mask[new_p1], vers_mask[new_p0]]
end
- new_gmsk = [[compute_gmsk(new_p0, new_j0) for new_j0 = 1:length(new_gadj[new_p0])] for new_p0 = 1:new_np]
+ new_gmsk = [[compute_gmsk(new_p0, new_j0) for new_j0 in 1:length(new_gadj[new_p0])] for new_p0 in 1:new_np]
# Reduce log pool (the other items are still reachable through rlog.init)
- rlog.pool = Dict(p=>rlog.pool[p] for p in new_pkgs)
+ rlog.pool = Dict(p => rlog.pool[p] for p in new_pkgs)
# Done
- log_event_global!(graph, "pruned graph — stats (n. of packages, mean connectivity): before = ($np,$(sum(spp)/length(spp))) after = ($new_np,$(sum(new_spp)/length(new_spp)))")
+ log_event_global!(graph, "pruned graph — stats (n. of packages, mean connectivity): before = ($np,$(sum(spp) / length(spp))) after = ($new_np,$(sum(new_spp) / length(new_spp)))")
# Replace old data with new
data.pkgs = new_pkgs
@@ -1619,7 +1630,7 @@ function simplify_graph!(graph::Graph, sources::Set{Int} = Set{Int}(); validate_
compute_eq_classes!(graph)
prune_graph!(graph)
if validate_versions
- _, changed = validate_versions!(graph, sources; skim=true)
+ _, changed = validate_versions!(graph, sources; skim = true)
if changed
compute_eq_classes!(graph)
prune_graph!(graph)
diff --git a/src/Resolve/maxsum.jl b/src/Resolve/maxsum.jl
index 1c683a6bbc..7fb71f12c7 100644
--- a/src/Resolve/maxsum.jl
+++ b/src/Resolve/maxsum.jl
@@ -9,10 +9,14 @@ mutable struct MaxSumParams
max_time::Float64 # maximum allowed time
function MaxSumParams()
- accuracy = parse(Int, get(ENV, "JULIA_PKG_RESOLVE_ACCURACY",
- # Allow for `JULIA_PKGRESOLVE_ACCURACY` for backward
- # compatibility with Julia v1.7-
- get(ENV, "JULIA_PKGRESOLVE_ACCURACY", "1")))
+ accuracy = parse(
+ Int, get(
+ ENV, "JULIA_PKG_RESOLVE_ACCURACY",
+ # Allow for `JULIA_PKGRESOLVE_ACCURACY` for backward
+ # compatibility with Julia v1.7-
+ get(ENV, "JULIA_PKGRESOLVE_ACCURACY", "1")
+ )
+ )
accuracy > 0 || error("JULIA_PKG_RESOLVE_ACCURACY must be > 0")
dec_interval = accuracy * 5
dec_fraction = 0.05 / accuracy
@@ -50,19 +54,19 @@ mutable struct Messages
pdict = graph.data.pdict
## generate wveights (v0 == spp[p0] is the "uninstalled" state)
- vweight = [[VersionWeight(v0 < spp[p0] ? pvers[p0][v0] : v"0") for v0 = 1:spp[p0]] for p0 = 1:np]
+ vweight = [[VersionWeight(v0 < spp[p0] ? pvers[p0][v0] : v"0") for v0 in 1:spp[p0]] for p0 in 1:np]
# external fields: favor newest versions over older, and no-version over all;
# explicit requirements use level l1 instead of l2
fv(p0, v0) = p0 ∈ req_inds ?
- FieldValue(0, vweight[p0][v0], zero(VersionWeight), (v0==spp[p0])) :
- FieldValue(0, zero(VersionWeight), vweight[p0][v0], (v0==spp[p0]))
- fld = [[fv(p0, v0) for v0 = 1:spp[p0]] for p0 = 1:np]
+ FieldValue(0, vweight[p0][v0], zero(VersionWeight), (v0 == spp[p0])) :
+ FieldValue(0, zero(VersionWeight), vweight[p0][v0], (v0 == spp[p0]))
+ fld = [[fv(p0, v0) for v0 in 1:spp[p0]] for p0 in 1:np]
initial_fld = [copy(f0) for f0 in fld]
# allocate cavity messages
- msg = [[Field(undef, spp[p0]) for j1 = 1:length(gadj[p0])] for p0 = 1:np]
+ msg = [[Field(undef, spp[p0]) for j1 in 1:length(gadj[p0])] for p0 in 1:np]
msgs = new(msg, fld, initial_fld)
@@ -82,12 +86,12 @@ function reset_messages!(msgs::Messages, graph::Graph)
spp = graph.spp
gconstr = graph.gconstr
ignored = graph.ignored
- for p0 = 1:np
+ for p0 in 1:np
ignored[p0] && continue
- map(m->fill!(m, zero(FieldValue)), msg[p0])
+ map(m -> fill!(m, zero(FieldValue)), msg[p0])
copyto!(fld[p0], initial_fld[p0])
gconstr0 = gconstr[p0]
- for v0 = 1:spp[p0]
+ for v0 in 1:spp[p0]
gconstr0[v0] || (fld[p0][v0] = FieldValue(-1))
end
fld[p0] .-= maximum(fld[p0])
@@ -101,7 +105,7 @@ mutable struct SolutionTrace
num_nondecimated::Int
best::Vector{Int}
- staged::Union{Tuple{Int,Int},Nothing}
+ staged::Union{Tuple{Int, Int}, Nothing}
function SolutionTrace(graph::Graph)
np = graph.np
@@ -190,13 +194,13 @@ function update!(p0::Int, graph::Graph, msgs::Messages)
# newmsg = [maximum(cavfld[bm1[:,v1]]) for v1 = 1:spp1]
# )
# This is hot code for the resolver
- @inbounds for v1 = 1:spp1, v0 = 1:spp0
+ @inbounds for v1 in 1:spp1, v0 in 1:spp0
bm1[v0, v1] || continue
newmsg[v1] = max(newmsg[v1], cavfld[v0])
end
m = maximum(newmsg)
validmax(m) || return Unsat(p0) # No state available without violating some
- # hard constraint
+ # hard constraint
# normalize the new message
@inbounds for i in 1:length(newmsg)
@@ -226,12 +230,13 @@ end
function Random.shuffle!(perm::NodePerm)
p = perm.p
- for j = length(p):-1:2
+ for j in length(p):-1:2
k = perm.step % j + 1
p[j], p[k] = p[k], p[j]
perm.step += isodd(j) ? 1 : k
end
#@assert isperm(p)
+ return
end
Base.iterate(perm::NodePerm, state...) = iterate(perm.p, state...)
@@ -271,7 +276,7 @@ function decimate1!(p0::Int, graph::Graph, strace::SolutionTrace, msgs::Messages
haskey(adjdict[p0], p1) || continue
s1 = solution[p1]
j1 = adjdict[p0][p1]
- gmsk[p1][j1][s0,s1] || return 0
+ gmsk[p1][j1][s0, s1] || return 0
end
solution[p0] = s0
strace.num_nondecimated -= 1
@@ -285,14 +290,14 @@ function decimate!(graph::Graph, strace::SolutionTrace, msgs::Messages, n::Integ
fld = msgs.fld
@assert n ≥ 1
- dtrace = Tuple{Int,Int}[]
+ dtrace = Tuple{Int, Int}[]
dec = 0
- fldorder = sort(findall(.!(ignored)), by=p0->secondmax(fld[p0], gconstr[p0]))
+ fldorder = sort(findall(.!(ignored)), by = p0 -> secondmax(fld[p0], gconstr[p0]))
for p0 in fldorder
s0 = decimate1!(p0, graph, strace, msgs)
s0 == 0 && continue
- push!(dtrace, (p0,s0))
+ push!(dtrace, (p0, s0))
dec += 1
dec == n && break
end
@@ -305,15 +310,15 @@ function clean_forbidden!(graph::Graph, msgs::Messages)
gconstr = graph.gconstr
ignored = graph.ignored
fld = msgs.fld
- affected = Tuple{Int,Int}[]
+ affected = Tuple{Int, Int}[]
- for p0 = 1:np
+ for p0 in 1:np
ignored[p0] && continue
fld0 = fld[p0]
gconstr0 = gconstr[p0]
for v0 in findall(gconstr0)
validmax(fld0[v0]) && continue
- push!(affected, (p0,v0))
+ push!(affected, (p0, v0))
end
end
return affected
@@ -377,7 +382,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
# perform some maxsum iterations, then decimate one node.
# If failure happens during this process, we bail (return :unsat)
it = 0
- for it = 1:params.dec_interval
+ for it in 1:params.dec_interval
maxdiff = iterate!(graph, msgs, perm)
if maxdiff isa Unsat
if is_best_sofar
@@ -397,7 +402,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
isempty(affected) && @goto decimate
sources = Set{Int}()
- for (p0,v0) in affected
+ for (p0, v0) in affected
graph.gconstr[p0][v0] = false
push!(sources, p0)
end
@@ -419,7 +424,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
if is_best_sofar
# pick the first decimation candidate
smx(p1) = secondmax(msgs.fld[p1], graph.gconstr[p1])
- p0 = reduce((p1,p2)->(smx(p1)≤smx(p2) ? p1 : p2), findall(.!(graph.ignored)))
+ p0 = reduce((p1, p2) -> (smx(p1) ≤ smx(p2) ? p1 : p2), findall(.!(graph.ignored)))
s0 = argmax(fld[p0])
strace.staged = dec_firstcandidate(graph, msgs)
end
@@ -437,7 +442,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
push_snapshot!(graph)
# info("setting dtrace=$dtrace")
- for (p0,s0) in dtrace
+ for (p0, s0) in dtrace
@assert !graph.ignored[p0]
@assert graph.gconstr[p0][s0]
fill!(graph.gconstr[p0], false)
@@ -467,7 +472,7 @@ function converge!(graph::Graph, msgs::Messages, strace::SolutionTrace, perm::No
lentr == 1 && break
# halve the dtrace
- deleteat!(dtrace, ((lentr÷2)+1):lentr)
+ deleteat!(dtrace, ((lentr ÷ 2) + 1):lentr)
end
@assert length(dtrace) == 1
diff --git a/src/Resolve/versionweights.jl b/src/Resolve/versionweights.jl
index 5113dd83c9..5af27881db 100644
--- a/src/Resolve/versionweights.jl
+++ b/src/Resolve/versionweights.jl
@@ -14,19 +14,19 @@ VersionWeight(vn::VersionNumber) = VersionWeight(vn.major, vn.minor, vn.patch)
Base.zero(::Type{VersionWeight}) = VersionWeight()
-Base.typemin(::Type{VersionWeight}) = (x=typemin(Int64); VersionWeight(x, x, x))
+Base.typemin(::Type{VersionWeight}) = (x = typemin(Int64); VersionWeight(x, x, x))
Base.:(-)(a::VersionWeight, b::VersionWeight) =
- VersionWeight(a.major-b.major, a.minor-b.minor, a.patch-b.patch)
+ VersionWeight(a.major - b.major, a.minor - b.minor, a.patch - b.patch)
Base.:(+)(a::VersionWeight, b::VersionWeight) =
- VersionWeight(a.major+b.major, a.minor+b.minor, a.patch+b.patch)
+ VersionWeight(a.major + b.major, a.minor + b.minor, a.patch + b.patch)
Base.:(-)(a::VersionWeight) =
VersionWeight(-a.major, -a.minor, -a.patch)
function Base.isless(a::VersionWeight, b::VersionWeight)
- (a.major, a.minor, a.patch) < (b.major, b.minor, b.patch)
+ return (a.major, a.minor, a.patch) < (b.major, b.minor, b.patch)
end
Base.abs(a::VersionWeight) =
@@ -40,5 +40,5 @@ function Base.show(io::IO, a::VersionWeight)
a == VersionWeight(a.major, a.minor) && @goto done
print(io, ".", a.patch)
@label done
- print(io, ")")
+ return print(io, ")")
end
diff --git a/src/Types.jl b/src/Types.jl
index 859b93221a..80f623548e 100644
--- a/src/Types.jl
+++ b/src/Types.jl
@@ -10,7 +10,7 @@ import Base.string
using TOML
import ..Pkg, ..Registry
-import ..Pkg: GitTools, depots, depots1, logdir, set_readonly, safe_realpath, pkg_server, stdlib_dir, stdlib_path, isurl, stderr_f, RESPECT_SYSIMAGE_VERSIONS
+import ..Pkg: GitTools, depots, depots1, logdir, set_readonly, safe_realpath, pkg_server, stdlib_dir, stdlib_path, isurl, stderr_f, RESPECT_SYSIMAGE_VERSIONS, atomic_toml_write
import Base.BinaryPlatforms: Platform
using ..Pkg.Versions
import FileWatching
@@ -75,25 +75,26 @@ Base.showerror(io::IO, err::PkgError) = print(io, err.msg)
@enum(PreserveLevel, PRESERVE_ALL_INSTALLED, PRESERVE_ALL, PRESERVE_DIRECT, PRESERVE_SEMVER, PRESERVE_TIERED, PRESERVE_TIERED_INSTALLED, PRESERVE_NONE)
@enum(PackageMode, PKGMODE_PROJECT, PKGMODE_MANIFEST, PKGMODE_COMBINED)
-const VersionTypes = Union{VersionNumber,VersionSpec,UpgradeLevel}
+const VersionTypes = Union{VersionNumber, VersionSpec, UpgradeLevel}
Base.@kwdef mutable struct GitRepo
- source::Union{Nothing,String} = nothing
- rev::Union{Nothing,String} = nothing
+ source::Union{Nothing, String} = nothing
+ rev::Union{Nothing, String} = nothing
subdir::Union{String, Nothing} = nothing
end
Base.:(==)(r1::GitRepo, r2::GitRepo) =
r1.source == r2.source && r1.rev == r2.rev && r1.subdir == r2.subdir
-
+Base.hash(r::GitRepo, h::UInt) =
+ foldr(hash, [r.source, r.rev, r.subdir], init = h)
mutable struct PackageSpec
- name::Union{Nothing,String}
- uuid::Union{Nothing,UUID}
- version::Union{Nothing,VersionTypes,String}
- tree_hash::Union{Nothing,SHA1}
- repo::GitRepo
- path::Union{Nothing,String}
+ name::Union{Nothing, String}
+ uuid::Union{Nothing, UUID}
+ version::Union{Nothing, VersionTypes, String}
+ tree_hash::Union{Nothing, SHA1}
+ repo::GitRepo # private
+ path::Union{Nothing, String}
pinned::Bool
# used for input only
url::Union{Nothing, String}
@@ -101,36 +102,42 @@ mutable struct PackageSpec
subdir::Union{Nothing, String}
end
-function PackageSpec(; name::Union{Nothing,AbstractString} = nothing,
- uuid::Union{Nothing,UUID,AbstractString} = nothing,
- version::Union{Nothing,VersionTypes,AbstractString} = VersionSpec(),
- tree_hash::Union{Nothing,SHA1} = nothing,
- repo::GitRepo = GitRepo(),
- path::Union{Nothing,AbstractString} = nothing,
- pinned::Bool = false,
- url = nothing,
- rev = nothing,
- subdir = nothing)
+function PackageSpec(;
+ name::Union{Nothing, AbstractString} = nothing,
+ uuid::Union{Nothing, UUID, AbstractString} = nothing,
+ version::Union{Nothing, VersionTypes, AbstractString} = VersionSpec(),
+ tree_hash::Union{Nothing, SHA1} = nothing,
+ repo::GitRepo = GitRepo(),
+ path::Union{Nothing, AbstractString} = nothing,
+ pinned::Bool = false,
+ url = nothing,
+ rev = nothing,
+ subdir = nothing
+ )
uuid = uuid === nothing ? nothing : UUID(uuid)
return PackageSpec(name, uuid, version, tree_hash, repo, path, pinned, url, rev, subdir)
end
-PackageSpec(name::AbstractString) = PackageSpec(;name=name)::PackageSpec
-PackageSpec(name::AbstractString, uuid::UUID) = PackageSpec(;name=name, uuid=uuid)::PackageSpec
-PackageSpec(name::AbstractString, version::VersionTypes) = PackageSpec(;name=name, version=version)::PackageSpec
-PackageSpec(n::AbstractString, u::UUID, v::VersionTypes) = PackageSpec(;name=n, uuid=u, version=v)::PackageSpec
+PackageSpec(name::AbstractString) = PackageSpec(; name = name)::PackageSpec
+PackageSpec(name::AbstractString, uuid::UUID) = PackageSpec(; name = name, uuid = uuid)::PackageSpec
+PackageSpec(name::AbstractString, version::VersionTypes) = PackageSpec(; name = name, version = version)::PackageSpec
+PackageSpec(n::AbstractString, u::UUID, v::VersionTypes) = PackageSpec(; name = n, uuid = u, version = v)::PackageSpec
+# XXX: These definitions are a bit fishy. It seems to be used in an `==` call in status printing
function Base.:(==)(a::PackageSpec, b::PackageSpec)
return a.name == b.name && a.uuid == b.uuid && a.version == b.version &&
- a.tree_hash == b.tree_hash && a.repo == b.repo && a.path == b.path &&
- a.pinned == b.pinned
+ a.tree_hash == b.tree_hash && a.repo == b.repo && a.path == b.path &&
+ a.pinned == b.pinned
+end
+function Base.hash(a::PackageSpec, h::UInt)
+ return foldr(hash, [a.name, a.uuid, a.version, a.tree_hash, a.repo, a.path, a.pinned], init = h)
end
-function err_rep(pkg::PackageSpec)
+function err_rep(pkg::PackageSpec; quotes::Bool = true)
x = pkg.name !== nothing && pkg.uuid !== nothing ? x = "$(pkg.name) [$(string(pkg.uuid)[1:8])]" :
pkg.name !== nothing ? pkg.name :
pkg.uuid !== nothing ? string(pkg.uuid)[1:8] :
pkg.repo.source
- return "`$x`"
+ return quotes ? "`$x`" : x
end
has_name(pkg::PackageSpec) = pkg.name !== nothing
@@ -163,14 +170,14 @@ function Base.show(io::IO, pkg::PackageSpec)
for (field, value) in f
print(io, " ", field, " = ", string(value)::String, "\n")
end
- print(io, ")")
+ return print(io, ")")
end
############
# EnvCache #
############
-function projectfile_path(env_path::String; strict=false)
+function projectfile_path(env_path::String; strict = false)
for name in Base.project_names
maybe_file = joinpath(env_path, name)
isfile(maybe_file) && return maybe_file
@@ -178,8 +185,8 @@ function projectfile_path(env_path::String; strict=false)
return strict ? nothing : joinpath(env_path, "Project.toml")
end
-function manifestfile_path(env_path::String; strict=false)
- for name in Base.manifest_names
+function manifestfile_path(env_path::String; strict = false)
+ for name in (Base.manifest_names..., "AppManifest.toml")
maybe_file = joinpath(env_path, name)
isfile(maybe_file) && return maybe_file
end
@@ -197,7 +204,7 @@ function manifestfile_path(env_path::String; strict=false)
end
end
-function find_project_file(env::Union{Nothing,String}=nothing)
+function find_project_file(env::Union{Nothing, String} = nothing)
project_file = nothing
if env isa Nothing
project_file = Base.active_project()
@@ -215,14 +222,18 @@ function find_project_file(env::Union{Nothing,String}=nothing)
end
end
if isfile(project_file) && !contains(basename(project_file), "Project")
- pkgerror("""
- The active project has been set to a file that isn't a Project file: $project_file
- The project path must be to a Project file or directory.
- """)
+ pkgerror(
+ """
+ The active project has been set to a file that isn't a Project file: $project_file
+ The project path must be to a Project file or directory.
+ """
+ )
end
@assert project_file isa String &&
- (isfile(project_file) || !ispath(project_file) ||
- isdir(project_file) && isempty(readdir(project_file)))
+ (
+ isfile(project_file) || !ispath(project_file) ||
+ isdir(project_file) && isempty(readdir(project_file))
+ )
return Pkg.safe_realpath(project_file)
end
@@ -233,8 +244,14 @@ end
Base.:(==)(t1::Compat, t2::Compat) = t1.val == t2.val
Base.hash(t::Compat, h::UInt) = hash(t.val, h)
+struct AppInfo
+ name::String
+ julia_command::Union{String, Nothing}
+ submodule::Union{String, Nothing}
+ other::Dict{String, Any}
+end
Base.@kwdef mutable struct Project
- other::Dict{String,Any} = Dict{String,Any}()
+ other::Dict{String, Any} = Dict{String, Any}()
# Fields
name::Union{String, Nothing} = nothing
uuid::Union{UUID, Nothing} = nothing
@@ -242,37 +259,39 @@ Base.@kwdef mutable struct Project
manifest::Union{String, Nothing} = nothing
entryfile::Union{String, Nothing} = nothing
# Sections
- deps::Dict{String,UUID} = Dict{String,UUID}()
+ deps::Dict{String, UUID} = Dict{String, UUID}()
# deps that are also in weakdeps for backwards compat
# we do not store them in deps because we want to ignore them
# but for writing out the project file we need to remember them:
- _deps_weak::Dict{String,UUID} = Dict{String,UUID}()
- weakdeps::Dict{String,UUID} = Dict{String,UUID}()
- exts::Dict{String,Union{Vector{String}, String}} = Dict{String,String}()
- extras::Dict{String,UUID} = Dict{String,UUID}()
- targets::Dict{String,Vector{String}} = Dict{String,Vector{String}}()
- compat::Dict{String,Compat} = Dict{String,Compat}()
- sources::Dict{String,Dict{String, String}} = Dict{String,Dict{String, String}}()
+ _deps_weak::Dict{String, UUID} = Dict{String, UUID}()
+ weakdeps::Dict{String, UUID} = Dict{String, UUID}()
+ exts::Dict{String, Union{Vector{String}, String}} = Dict{String, String}()
+ extras::Dict{String, UUID} = Dict{String, UUID}()
+ targets::Dict{String, Vector{String}} = Dict{String, Vector{String}}()
+ apps::Dict{String, AppInfo} = Dict{String, AppInfo}()
+ compat::Dict{String, Compat} = Dict{String, Compat}()
+ sources::Dict{String, Dict{String, String}} = Dict{String, Dict{String, String}}()
workspace::Dict{String, Any} = Dict{String, Any}()
+ readonly::Bool = false
end
Base.:(==)(t1::Project, t2::Project) = all(x -> (getfield(t1, x) == getfield(t2, x))::Bool, fieldnames(Project))
-Base.hash(t::Project, h::UInt) = foldr(hash, [getfield(t, x) for x in fieldnames(Project)], init=h)
-
+Base.hash(t::Project, h::UInt) = foldr(hash, [getfield(t, x) for x in fieldnames(Project)], init = h)
Base.@kwdef mutable struct PackageEntry
- name::Union{String,Nothing} = nothing
- version::Union{VersionNumber,Nothing} = nothing
- path::Union{String,Nothing} = nothing
- entryfile::Union{String,Nothing} = nothing
+ name::Union{String, Nothing} = nothing
+ version::Union{VersionNumber, Nothing} = nothing
+ path::Union{String, Nothing} = nothing
+ entryfile::Union{String, Nothing} = nothing
pinned::Bool = false
repo::GitRepo = GitRepo()
- tree_hash::Union{Nothing,SHA1} = nothing
- deps::Dict{String,UUID} = Dict{String,UUID}()
- weakdeps::Dict{String,UUID} = Dict{String,UUID}()
- exts::Dict{String,Union{Vector{String}, String}} = Dict{String,String}()
+ tree_hash::Union{Nothing, SHA1} = nothing
+ deps::Dict{String, UUID} = Dict{String, UUID}()
+ weakdeps::Dict{String, UUID} = Dict{String, UUID}()
+ exts::Dict{String, Union{Vector{String}, String}} = Dict{String, String}()
uuid::Union{Nothing, UUID} = nothing
- other::Union{Dict,Nothing} = nothing
+ apps::Dict{String, AppInfo} = Dict{String, AppInfo}() # used by AppManifest.toml
+ other::Union{Dict, Nothing} = nothing
end
Base.:(==)(t1::PackageEntry, t2::PackageEntry) = t1.name == t2.name &&
t1.version == t2.version &&
@@ -284,19 +303,20 @@ Base.:(==)(t1::PackageEntry, t2::PackageEntry) = t1.name == t2.name &&
t1.deps == t2.deps &&
t1.weakdeps == t2.weakdeps &&
t1.exts == t2.exts &&
- t1.uuid == t2.uuid
- # omits `other`
-Base.hash(x::PackageEntry, h::UInt) = foldr(hash, [x.name, x.version, x.path, x.entryfile, x.pinned, x.repo, x.tree_hash, x.deps, x.weakdeps, x.exts, x.uuid], init=h) # omits `other`
+ t1.uuid == t2.uuid &&
+ t1.apps == t2.apps
+# omits `other`
+Base.hash(x::PackageEntry, h::UInt) = foldr(hash, [x.name, x.version, x.path, x.entryfile, x.pinned, x.repo, x.tree_hash, x.deps, x.weakdeps, x.exts, x.uuid], init = h) # omits `other`
Base.@kwdef mutable struct Manifest
- julia_version::Union{Nothing,VersionNumber} = nothing # only set to VERSION when resolving
- project_hash::Union{Nothing,SHA1} = nothing
+ julia_version::Union{Nothing, VersionNumber} = nothing # only set to VERSION when resolving
+ project_hash::Union{Nothing, SHA1} = nothing
manifest_format::VersionNumber = v"2.0.0"
- deps::Dict{UUID,PackageEntry} = Dict{UUID,PackageEntry}()
- other::Dict{String,Any} = Dict{String,Any}()
+ deps::Dict{UUID, PackageEntry} = Dict{UUID, PackageEntry}()
+ other::Dict{String, Any} = Dict{String, Any}()
end
Base.:(==)(t1::Manifest, t2::Manifest) = all(x -> (getfield(t1, x) == getfield(t2, x))::Bool, fieldnames(Manifest))
-Base.hash(m::Manifest, h::UInt) = foldr(hash, [getfield(m, x) for x in fieldnames(Manifest)], init=h)
+Base.hash(m::Manifest, h::UInt) = foldr(hash, [getfield(m, x) for x in fieldnames(Manifest)], init = h)
Base.getindex(m::Manifest, i_or_key) = getindex(m.deps, i_or_key)
Base.get(m::Manifest, key, default) = get(m.deps, key, default)
Base.setindex!(m::Manifest, i_or_key, value) = setindex!(m.deps, i_or_key, value)
@@ -310,19 +330,19 @@ Base.haskey(m::Manifest, key) = haskey(m.deps, key)
function Base.show(io::IO, pkg::PackageEntry)
f = []
- pkg.name !== nothing && push!(f, "name" => pkg.name)
- pkg.version !== nothing && push!(f, "version" => pkg.version)
- pkg.tree_hash !== nothing && push!(f, "tree_hash" => pkg.tree_hash)
- pkg.path !== nothing && push!(f, "dev/path" => pkg.path)
- pkg.pinned && push!(f, "pinned" => pkg.pinned)
- pkg.repo.source !== nothing && push!(f, "url/path" => "`$(pkg.repo.source)`")
- pkg.repo.rev !== nothing && push!(f, "rev" => pkg.repo.rev)
- pkg.repo.subdir !== nothing && push!(f, "subdir" => pkg.repo.subdir)
+ pkg.name !== nothing && push!(f, "name" => pkg.name)
+ pkg.version !== nothing && push!(f, "version" => pkg.version)
+ pkg.tree_hash !== nothing && push!(f, "tree_hash" => pkg.tree_hash)
+ pkg.path !== nothing && push!(f, "dev/path" => pkg.path)
+ pkg.pinned && push!(f, "pinned" => pkg.pinned)
+ pkg.repo.source !== nothing && push!(f, "url/path" => "`$(pkg.repo.source)`")
+ pkg.repo.rev !== nothing && push!(f, "rev" => pkg.repo.rev)
+ pkg.repo.subdir !== nothing && push!(f, "subdir" => pkg.repo.subdir)
print(io, "PackageEntry(\n")
for (field, value) in f
print(io, " ", field, " = ", value, "\n")
end
- print(io, ")")
+ return print(io, ")")
end
function find_root_base_project(start_project::String)
@@ -332,14 +352,15 @@ function find_root_base_project(start_project::String)
base_project_file === nothing && return project_file
project_file = base_project_file
end
+ return
end
-function collect_workspace(base_project_file::String, d::Dict{String, Project}=Dict{String, Project}())
+function collect_workspace(base_project_file::String, d::Dict{String, Project} = Dict{String, Project}())
base_project = read_project(base_project_file)
d[base_project_file] = base_project
base_project_file_dir = dirname(base_project_file)
- projects = get(base_project.workspace, "projects", nothing)::Union{Nothing,Vector{String}}
+ projects = get(base_project.workspace, "projects", nothing)::Union{Nothing, Vector{String}}
projects === nothing && return d
project_paths = [abspath(base_project_file_dir, project) for project in projects]
for project_path in project_paths
@@ -353,7 +374,7 @@ end
mutable struct EnvCache
# environment info:
- env::Union{Nothing,String}
+ env::Union{Nothing, String}
# paths for files:
project_file::String
manifest_file::String
@@ -361,14 +382,14 @@ mutable struct EnvCache
pkg::Union{PackageSpec, Nothing}
# cache of metadata:
project::Project
- workspace::Dict{String,Project} # paths relative to base
+ workspace::Dict{String, Project} # paths relative to base
manifest::Manifest
# What these where at creation of the EnvCache
original_project::Project
original_manifest::Manifest
end
-function EnvCache(env::Union{Nothing,String}=nothing)
+function EnvCache(env::Union{Nothing, String} = nothing)
# @show env
project_file = find_project_file(env)
# @show project_file
@@ -405,7 +426,8 @@ function EnvCache(env::Union{Nothing,String}=nothing)
write_env_usage(manifest_file, "manifest_usage.toml")
manifest = read_manifest(manifest_file)
- env′ = EnvCache(env,
+ env′ = EnvCache(
+ env,
project_file,
manifest_file,
project_package,
@@ -414,7 +436,7 @@ function EnvCache(env::Union{Nothing,String}=nothing)
manifest,
deepcopy(project),
deepcopy(manifest),
- )
+ )
return env′
end
@@ -443,7 +465,7 @@ Base.@kwdef mutable struct Context
registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries()
# The Julia Version to resolve with respect to
- julia_version::Union{VersionNumber,Nothing} = VERSION
+ julia_version::Union{VersionNumber, Nothing} = VERSION
end
project_uuid(env::EnvCache) = project_uuid(env.project, env.project_file)
@@ -460,25 +482,25 @@ is_project_uuid(env::EnvCache, uuid::UUID) = project_uuid(env) == uuid
# Context #
###########
-const FORMER_STDLIBS = ["DelimitedFiles", "Statistics"]
-const FORMER_STDLIBS_UUIDS = Set{UUID}()
-const STDLIB = Ref{DictStdLibs}()
+const UPGRADABLE_STDLIBS = ["DelimitedFiles", "Statistics"]
+const UPGRADABLE_STDLIBS_UUIDS = Set{UUID}()
+const STDLIB = Ref{Union{DictStdLibs, Nothing}}(nothing)
function load_stdlib()
stdlib = DictStdLibs()
for name in readdir(stdlib_dir())
- projfile = projectfile_path(stdlib_path(name); strict=true)
+ projfile = projectfile_path(stdlib_path(name); strict = true)
nothing === projfile && continue
project = parse_toml(projfile)
uuid = get(project, "uuid", nothing)::Union{String, Nothing}
v_str = get(project, "version", nothing)::Union{String, Nothing}
version = isnothing(v_str) ? nothing : VersionNumber(v_str)
nothing === uuid && continue
- if name in FORMER_STDLIBS
- push!(FORMER_STDLIBS_UUIDS, UUID(uuid))
+ if name in UPGRADABLE_STDLIBS
+ push!(UPGRADABLE_STDLIBS_UUIDS, UUID(uuid))
continue
end
- deps = UUID.(values(get(project, "deps", Dict{String,Any}())))
- weakdeps = UUID.(values(get(project, "weakdeps", Dict{String,Any}())))
+ deps = UUID.(values(get(project, "deps", Dict{String, Any}())))
+ weakdeps = UUID.(values(get(project, "weakdeps", Dict{String, Any}())))
stdlib[UUID(uuid)] = StdlibInfo(name, Base.UUID(uuid), version, deps, weakdeps)
end
return stdlib
@@ -491,7 +513,7 @@ function stdlibs()
return Dict(uuid => (info.name, info.version) for (uuid, info) in stdlib_infos())
end
function stdlib_infos()
- if !isassigned(STDLIB)
+ if STDLIB[] === nothing
STDLIB[] = load_stdlib()
end
return STDLIB[]
@@ -499,12 +521,12 @@ end
is_stdlib(uuid::UUID) = uuid in keys(stdlib_infos())
# Includes former stdlibs
function is_or_was_stdlib(uuid::UUID, julia_version::Union{VersionNumber, Nothing})
- return is_stdlib(uuid, julia_version) || uuid in FORMER_STDLIBS_UUIDS
+ return is_stdlib(uuid, julia_version) || uuid in UPGRADABLE_STDLIBS_UUIDS
end
function historical_stdlibs_check()
- if isempty(STDLIBS_BY_VERSION)
+ return if isempty(STDLIBS_BY_VERSION)
pkgerror("If you want to set `julia_version`, you must first populate the `STDLIBS_BY_VERSION` global constant. Try `using HistoricalStdlibVersions`")
end
end
@@ -551,7 +573,7 @@ end
# `nothing` if that stdlib is not versioned. We only store version numbers for
# stdlibs that are external and thus could be installed from their repositories,
# e.g. things like `GMP_jll`, `Tar`, etc...
-function stdlib_version(uuid::UUID, julia_version::Union{VersionNumber,Nothing})
+function stdlib_version(uuid::UUID, julia_version::Union{VersionNumber, Nothing})
last_stdlibs = get_last_stdlibs(julia_version)
if !(uuid in keys(last_stdlibs))
return nothing
@@ -564,12 +586,22 @@ function is_unregistered_stdlib(uuid::UUID)
return haskey(UNREGISTERED_STDLIBS, uuid)
end
-Context!(kw_context::Vector{Pair{Symbol,Any}})::Context =
+Context!(kw_context::Vector{Pair{Symbol, Any}})::Context =
Context!(Context(); kw_context...)
function Context!(ctx::Context; kwargs...)
for (k, v) in kwargs
setfield!(ctx, k, v)
end
+
+ # Highlight for logging purposes if julia_version is set to a different version than current VERSION
+ if haskey(kwargs, :julia_version) && ctx.julia_version !== nothing && ctx.julia_version != VERSION
+ Pkg.printpkgstyle(
+ ctx.io, :Context,
+ "Pkg is operating with julia_version set to `$(ctx.julia_version)`",
+ color = Base.warn_color()
+ )
+ end
+
return ctx
end
@@ -591,24 +623,29 @@ function workspace_resolve_hash(env::EnvCache)
alldeps = merge(deps, weakdeps)
compats = Dict(name => Pkg.Operations.get_compat_workspace(env, name) for (name, uuid) in alldeps)
iob = IOBuffer()
- for (name, uuid) in sort!(collect(deps); by=first)
+ for (name, uuid) in sort!(collect(deps); by = first)
println(iob, name, "=", uuid)
end
println(iob)
- for (name, uuid) in sort!(collect(weakdeps); by=first)
+ for (name, uuid) in sort!(collect(weakdeps); by = first)
println(iob, name, "=", uuid)
end
println(iob)
- for (name, compat) in sort!(collect(compats); by=first)
+ for (name, compat) in sort!(collect(compats); by = first)
println(iob, name, "=", compat)
end
str = String(take!(iob))
return bytes2hex(sha1(str))
end
-function write_env_usage(source_file::AbstractString, usage_filepath::AbstractString)
+
+write_env_usage(source_file::AbstractString, usage_filepath::AbstractString) =
+ write_env_usage([source_file], usage_filepath)
+
+function write_env_usage(source_files, usage_filepath::AbstractString)
# Don't record ghost usage
- !isfile(source_file) && return
+ source_files = filter(isfile, source_files)
+ isempty(source_files) && return
# Ensure that log dir exists
!ispath(logdir()) && mkpath(logdir())
@@ -630,7 +667,9 @@ function write_env_usage(source_file::AbstractString, usage_filepath::AbstractSt
end
# record new usage
- usage[source_file] = [Dict("time" => timestamp)]
+ for source_file in source_files
+ usage[source_file] = [Dict("time" => timestamp)]
+ end
# keep only latest usage info
for k in keys(usage)
@@ -646,15 +685,10 @@ function write_env_usage(source_file::AbstractString, usage_filepath::AbstractSt
usage[k] = [Dict("time" => maximum(times))]
end
- tempfile = tempname()
try
- open(tempfile, "w") do io
- TOML.print(io, usage, sorted=true)
- end
- TOML.parsefile(tempfile) # compare to `usage` ?
- mv(tempfile, usage_file; force=true) # only mv if parse succeeds
+ atomic_toml_write(usage_file, usage, sorted = true)
catch err
- @error "Failed to write valid usage file `$usage_file`" tempfile
+ @error "Failed to write valid usage file `$usage_file`" exception = err
end
end
return
@@ -677,13 +711,15 @@ function read_package(path::String)
return project
end
-const refspecs = ["+refs/*:refs/remotes/cache/*"]
+const refspecs = ["+refs/heads/*:refs/remotes/cache/heads/*"]
function relative_project_path(project_file::String, path::String)
# compute path relative the project
# realpath needed to expand symlinks before taking the relative path
- return relpath(Pkg.safe_realpath(abspath(path)),
- Pkg.safe_realpath(dirname(project_file)))
+ return relpath(
+ Pkg.safe_realpath(abspath(path)),
+ Pkg.safe_realpath(dirname(project_file))
+ )
end
function devpath(env::EnvCache, name::AbstractString, shared::Bool)
@@ -699,7 +735,7 @@ function error_if_in_sysimage(pkg::PackageSpec)
return false
end
pkgid = Base.PkgId(pkg.uuid, pkg.name)
- if Base.in_sysimage(pkgid)
+ return if Base.in_sysimage(pkgid)
pkgerror("Tried to develop or add by URL package $(pkgid) which is already in the sysimage, use `Pkg.respect_sysimage_versions(false)` to disable this check.")
end
end
@@ -758,13 +794,13 @@ function handle_repo_develop!(ctx::Context, pkg::PackageSpec, shared::Bool)
resolve_projectfile!(pkg, package_path)
end
if pkg.repo.subdir !== nothing
- repo_name = split(pkg.repo.source, '/', keepempty=false)[end]
+ repo_name = split(pkg.repo.source, '/', keepempty = false)[end]
# Make the develop path prettier.
if endswith(repo_name, ".git")
- repo_name = chop(repo_name, tail=4)
+ repo_name = chop(repo_name, tail = 4)
end
if endswith(repo_name, ".jl")
- repo_name = chop(repo_name, tail=3)
+ repo_name = chop(repo_name, tail = 3)
end
dev_path = devpath(ctx.env, repo_name, shared)
else
@@ -783,7 +819,7 @@ function handle_repo_develop!(ctx::Context, pkg::PackageSpec, shared::Bool)
new = true
end
if !has_uuid(pkg)
- resolve_projectfile!(pkg, dev_path)
+ resolve_projectfile!(pkg, joinpath(dev_path, pkg.repo.subdir === nothing ? "" : pkg.repo.subdir))
end
error_if_in_sysimage(pkg)
pkg.path = shared ? dev_path : relative_project_path(ctx.env.manifest_file, dev_path)
@@ -812,10 +848,10 @@ function set_repo_source_from_registry!(ctx, pkg)
registry_resolve!(ctx.registries, pkg)
# Didn't find the package in the registry, but maybe it exists in the updated registry
if !isresolved(pkg)
- Pkg.Operations.update_registries(ctx; force=false)
+ Pkg.Operations.update_registries(ctx; force = false)
registry_resolve!(ctx.registries, pkg)
end
- ensure_resolved(ctx, ctx.env.manifest, [pkg]; registry=true)
+ ensure_resolved(ctx, ctx.env.manifest, [pkg]; registry = true)
# We might have been given a name / uuid combo that does not have an entry in the registry
for reg in ctx.registries
regpkg = get(reg, pkg.uuid, nothing)
@@ -839,7 +875,7 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
@assert pkg.repo.rev !== nothing
# First, we try resolving against the manifest and current registry to avoid updating registries if at all possible.
# This also handles the case where we _only_ wish to switch the tracking branch for a package.
- manifest_resolve!(ctx.env.manifest, [pkg]; force=true)
+ manifest_resolve!(ctx.env.manifest, [pkg]; force = true)
if isresolved(pkg)
entry = manifest_info(ctx.env.manifest, pkg.uuid)
if entry !== nothing
@@ -857,7 +893,16 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
repo_source = pkg.repo.source
if !isurl(pkg.repo.source)
if isdir(pkg.repo.source)
- if !isdir(joinpath(pkg.repo.source, ".git"))
+ git_path = joinpath(pkg.repo.source, ".git")
+ if isfile(git_path)
+ # Git submodule: .git is a file containing path to actual git directory
+ git_ref_content = readline(git_path)
+ git_info_path = joinpath(dirname(git_path), last(split(git_ref_content)))
+ else
+ # Regular git repo: .git is a directory
+ git_info_path = git_path
+ end
+ if !isdir(git_info_path)
msg = "Did not find a git repository at `$(pkg.repo.source)`"
if isfile(joinpath(pkg.repo.source, "Project.toml")) || isfile(joinpath(pkg.repo.source, "JuliaProject.toml"))
msg *= ", perhaps you meant `Pkg.develop`?"
@@ -865,6 +910,11 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
pkgerror(msg)
end
LibGit2.with(GitTools.check_valid_HEAD, LibGit2.GitRepo(pkg.repo.source)) # check for valid git HEAD
+ LibGit2.with(LibGit2.GitRepo(pkg.repo.source)) do repo
+ if LibGit2.isdirty(repo)
+ @warn "The repository at `$(pkg.repo.source)` has uncommitted changes. Consider using `Pkg.develop` instead of `Pkg.add` if you want to work with the current state of the repository."
+ end
+ end
pkg.repo.source = isabspath(pkg.repo.source) ? safe_realpath(pkg.repo.source) : relative_project_path(ctx.env.manifest_file, pkg.repo.source)
repo_source = normpath(joinpath(dirname(ctx.env.manifest_file), pkg.repo.source))
else
@@ -872,10 +922,10 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
end
end
- let repo_source = repo_source
+ return let repo_source = repo_source
# The type-assertions below are necessary presumably due to julia#36454
- LibGit2.with(GitTools.ensure_clone(ctx.io, add_repo_cache_path(repo_source::Union{Nothing,String}), repo_source::Union{Nothing,String}; isbare=true)) do repo
- repo_source_typed = repo_source::Union{Nothing,String}
+ LibGit2.with(GitTools.ensure_clone(ctx.io, add_repo_cache_path(repo_source::Union{Nothing, String}), repo_source::Union{Nothing, String}; isbare = true)) do repo
+ repo_source_typed = repo_source::Union{Nothing, String}
GitTools.check_valid_HEAD(repo)
# If the user didn't specify rev, assume they want the default (master) branch if on a branch, otherwise the current commit
@@ -887,7 +937,14 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
fetched = false
if obj_branch === nothing
fetched = true
- GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs=refspecs)
+ # For pull requests, fetch the specific PR ref
+ if startswith(rev_or_hash, "pull/") && endswith(rev_or_hash, "/head")
+ pr_number = rev_or_hash[6:(end - 5)] # Extract number from "pull/X/head"
+ pr_refspecs = ["+refs/pull/$(pr_number)/head:refs/remotes/cache/pull/$(pr_number)/head"]
+ GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs = pr_refspecs)
+ else
+ GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs = refspecs)
+ end
obj_branch = get_object_or_branch(repo, rev_or_hash)
if obj_branch === nothing
pkgerror("Did not find rev $(rev_or_hash) in repository")
@@ -899,7 +956,7 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
innerentry = manifest_info(ctx.env.manifest, pkg.uuid)
ispinned = innerentry !== nothing && innerentry.pinned
if isbranch && !fetched && !ispinned
- GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs=refspecs)
+ GitTools.fetch(ctx.io, repo, repo_source_typed; refspecs = refspecs)
gitobject, isbranch = get_object_or_branch(repo, rev_or_hash)
end
@@ -934,7 +991,7 @@ function handle_repo_add!(ctx::Context, pkg::PackageSpec)
# Otherwise, move the temporary path into its correct place and set read only
mkpath(version_path)
- mv(temp_path, version_path; force=true)
+ mv(temp_path, version_path; force = true)
set_readonly(version_path)
return true
end
@@ -951,16 +1008,20 @@ function handle_repos_add!(ctx::Context, pkgs::AbstractVector{PackageSpec})
end
function resolve_projectfile!(pkg, project_path)
- project_file = projectfile_path(project_path; strict=true)
- project_file === nothing && pkgerror(string("could not find project file (Project.toml or JuliaProject.toml) in package at `",
- something(pkg.repo.source, pkg.path, project_path), "` maybe `subdir` needs to be specified"))
+ project_file = projectfile_path(project_path; strict = true)
+ project_file === nothing && pkgerror(
+ string(
+ "could not find project file (Project.toml or JuliaProject.toml) in package at `",
+ something(pkg.repo.source, pkg.path, project_path), "` maybe `subdir` needs to be specified"
+ )
+ )
project_data = read_package(project_file)
if pkg.uuid === nothing || pkg.uuid == project_data.uuid
pkg.uuid = project_data.uuid
else
pkgerror("UUID `$(project_data.uuid)` given by project file `$project_file` does not match given UUID `$(pkg.uuid)`")
end
- if pkg.name === nothing || pkg.name == project_data.name
+ return if pkg.name === nothing || pkg.name == project_data.name
pkg.name = project_data.name
else
pkgerror("name `$(project_data.name)` given by project file `$project_file` does not match given name `$(pkg.name)`")
@@ -972,6 +1033,16 @@ get_object_or_branch(repo, rev::SHA1) =
# Returns nothing if rev could not be found in repo
function get_object_or_branch(repo, rev)
+ # Handle pull request references
+ if startswith(rev, "pull/") && endswith(rev, "/head")
+ try
+ gitobject = LibGit2.GitObject(repo, "remotes/cache/" * rev)
+ return gitobject, true
+ catch err
+ err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow()
+ end
+ end
+
try
gitobject = LibGit2.GitObject(repo, "remotes/cache/heads/" * rev)
return gitobject, true
@@ -1006,6 +1077,7 @@ function project_resolve!(env::EnvCache, pkgs::AbstractVector{PackageSpec})
pkg.uuid = env.pkg.uuid
end
end
+ return
end
# Disambiguate name/uuid package specifications using project info.
@@ -1023,12 +1095,13 @@ function project_deps_resolve!(env::EnvCache, pkgs::AbstractVector{PackageSpec})
pkg.name = names[pkg.uuid]
end
end
+ return
end
# Disambiguate name/uuid package specifications using manifest info.
-function manifest_resolve!(manifest::Manifest, pkgs::AbstractVector{PackageSpec}; force=false)
- uuids = Dict{String,Vector{UUID}}()
- names = Dict{UUID,String}()
+function manifest_resolve!(manifest::Manifest, pkgs::AbstractVector{PackageSpec}; force = false)
+ uuids = Dict{String, Vector{UUID}}()
+ names = Dict{UUID, String}()
for (uuid, entry) in manifest
push!(get!(uuids, entry.name, UUID[]), uuid)
names[uuid] = entry.name # can be duplicate but doesn't matter
@@ -1041,6 +1114,7 @@ function manifest_resolve!(manifest::Manifest, pkgs::AbstractVector{PackageSpec}
pkg.name = names[pkg.uuid]
end
end
+ return
end
# Disambiguate name/uuid package specifications using registry info.
@@ -1077,20 +1151,23 @@ function stdlib_resolve!(pkgs::AbstractVector{PackageSpec})
end
end
end
+ return
end
include("fuzzysorting.jl")
# Ensure that all packages are fully resolved
-function ensure_resolved(ctx::Context, manifest::Manifest,
+function ensure_resolved(
+ ctx::Context, manifest::Manifest,
pkgs::AbstractVector{PackageSpec};
- registry::Bool=false,)::Nothing
- unresolved_uuids = Dict{String,Vector{UUID}}()
+ registry::Bool = false,
+ )::Nothing
+ unresolved_uuids = Dict{String, Vector{UUID}}()
for pkg in pkgs
has_uuid(pkg) && continue
!has_name(pkg) && pkgerror("Package $pkg has neither name nor uuid")
uuids = [uuid for (uuid, entry) in manifest if entry.name == pkg.name]
- sort!(uuids, by=uuid -> uuid.value)
+ sort!(uuids, by = uuid -> uuid.value)
unresolved_uuids[pkg.name] = uuids
end
unresolved_names = UUID[]
@@ -1102,7 +1179,7 @@ function ensure_resolved(ctx::Context, manifest::Manifest,
msg = sprint(context = ctx.io) do io
if !isempty(unresolved_uuids)
print(io, "The following package names could not be resolved:")
- for (name, uuids) in sort!(collect(unresolved_uuids), by=lowercase ∘ first)
+ for (name, uuids) in sort!(collect(unresolved_uuids), by = lowercase ∘ first)
print(io, "\n * $name (")
if length(uuids) == 0
what = ["project", "manifest"]
@@ -1157,7 +1234,7 @@ function registered_uuids(registries::Vector{Registry.RegistryInstance}, name::S
return uuids
end
# Determine a single UUID for a given name, prompting if needed
-function registered_uuid(registries::Vector{Registry.RegistryInstance}, name::String)::Union{Nothing,UUID}
+function registered_uuid(registries::Vector{Registry.RegistryInstance}, name::String)::Union{Nothing, UUID}
uuids = registered_uuids(registries, name)
length(uuids) == 0 && return nothing
length(uuids) == 1 && return first(uuids)
@@ -1177,7 +1254,7 @@ end
# Determine current name for a given package UUID
-function registered_name(registries::Vector{Registry.RegistryInstance}, uuid::UUID)::Union{Nothing,String}
+function registered_name(registries::Vector{Registry.RegistryInstance}, uuid::UUID)::Union{Nothing, String}
name = nothing
for reg in registries
regpkg = get(reg, uuid, nothing)
@@ -1193,20 +1270,21 @@ end
# Find package by UUID in the manifest file
manifest_info(::Manifest, uuid::Nothing) = nothing
-function manifest_info(manifest::Manifest, uuid::UUID)::Union{PackageEntry,Nothing}
+function manifest_info(manifest::Manifest, uuid::UUID)::Union{PackageEntry, Nothing}
return get(manifest, uuid, nothing)
end
-function write_env(env::EnvCache; update_undo=true,
- skip_writing_project::Bool=false)
+function write_env(
+ env::EnvCache; update_undo = true,
+ skip_writing_project::Bool = false
+ )
# Verify that the generated manifest is consistent with `sources`
for (pkg, uuid) in env.project.deps
path, repo = get_path_repo(env.project, pkg)
entry = manifest_info(env.manifest, uuid)
if path !== nothing
- @assert entry.path == path
+ @assert normpath(entry.path) == normpath(path)
end
if repo != GitRepo()
- @assert entry.repo.source == repo.source
if repo.rev !== nothing
@assert entry.repo.rev == repo.rev
end
@@ -1214,6 +1292,22 @@ function write_env(env::EnvCache; update_undo=true,
@assert entry.repo.subdir == repo.subdir
end
end
+ if entry !== nothing
+ if entry.path !== nothing
+ env.project.sources[pkg] = Dict("path" => entry.path)
+ elseif entry.repo != GitRepo()
+ d = Dict{String, String}()
+ entry.repo.source !== nothing && (d["url"] = entry.repo.source)
+ entry.repo.rev !== nothing && (d["rev"] = entry.repo.rev)
+ entry.repo.subdir !== nothing && (d["subdir"] = entry.repo.subdir)
+ env.project.sources[pkg] = d
+ end
+ end
+ end
+
+ # Check if the environment is readonly before attempting to write
+ if env.project.readonly
+ pkgerror("Cannot modify a readonly environment. The project at $(env.project_file) is marked as readonly.")
end
if (env.project != env.original_project) && (!skip_writing_project)
@@ -1222,9 +1316,8 @@ function write_env(env::EnvCache; update_undo=true,
if env.manifest != env.original_manifest
write_manifest(env)
end
- update_undo && Pkg.API.add_snapshot_to_undo(env)
+ return update_undo && Pkg.API.add_snapshot_to_undo(env)
end
-
end # module
diff --git a/src/Versions.jl b/src/Versions.jl
index 22d489fe1a..a572e198fa 100644
--- a/src/Versions.jl
+++ b/src/Versions.jl
@@ -8,13 +8,13 @@ export VersionBound, VersionRange, VersionSpec, semver_spec, isjoinable
# VersionBound #
################
struct VersionBound
- t::NTuple{3,UInt32}
+ t::NTuple{3, UInt32}
n::Int
- function VersionBound(tin::NTuple{n,Integer}) where n
+ function VersionBound(tin::NTuple{n, Integer}) where {n}
n <= 3 || throw(ArgumentError("VersionBound: you can only specify major, minor and patch versions"))
- n == 0 && return new((0, 0, 0), n)
- n == 1 && return new((tin[1], 0, 0), n)
- n == 2 && return new((tin[1], tin[2], 0), n)
+ n == 0 && return new((0, 0, 0), n)
+ n == 1 && return new((tin[1], 0, 0), n)
+ n == 2 && return new((tin[1], tin[2], 0), n)
n == 3 && return new((tin[1], tin[2], tin[3]), n)
error("invalid $n")
end
@@ -40,7 +40,7 @@ end
function isless_ll(a::VersionBound, b::VersionBound)
m, n = a.n, b.n
- for i = 1:min(m, n)
+ for i in 1:min(m, n)
a[i] < b[i] && return true
a[i] > b[i] && return false
end
@@ -52,7 +52,7 @@ stricterlower(a::VersionBound, b::VersionBound) = isless_ll(a, b) ? b : a
# Comparison between two upper bounds
function isless_uu(a::VersionBound, b::VersionBound)
m, n = a.n, b.n
- for i = 1:min(m, n)
+ for i in 1:min(m, n)
a[i] < b[i] && return true
a[i] > b[i] && return false
end
@@ -70,7 +70,7 @@ function isjoinable(up::VersionBound, lo::VersionBound)
up.n == 0 && lo.n == 0 && return true
if up.n == lo.n
n = up.n
- for i = 1:(n - 1)
+ for i in 1:(n - 1)
up[i] > lo[i] && return true
up[i] < lo[i] && return false
end
@@ -78,7 +78,7 @@ function isjoinable(up::VersionBound, lo::VersionBound)
return true
else
l = min(up.n, lo.n)
- for i = 1:l
+ for i in 1:l
up[i] > lo[i] && return true
up[i] < lo[i] && return false
end
@@ -96,19 +96,19 @@ function VersionBound(s::AbstractString)
l = lastindex(s)
p = findnext('.', s, 1)
- b = p === nothing ? l : (p-1)
+ b = p === nothing ? l : (p - 1)
i = parse(Int64, SubString(s, 1, b))
p === nothing && return VersionBound(i)
- a = p+1
+ a = p + 1
p = findnext('.', s, a)
- b = p === nothing ? l : (p-1)
+ b = p === nothing ? l : (p - 1)
j = parse(Int64, SubString(s, a, b))
p === nothing && return VersionBound(i, j)
- a = p+1
+ a = p + 1
p = findnext('.', s, a)
- b = p === nothing ? l : (p-1)
+ b = p === nothing ? l : (p - 1)
k = parse(Int64, SubString(s, a, b))
p === nothing && return VersionBound(i, j, k)
@@ -130,9 +130,9 @@ struct VersionRange
return new(lo, hi)
end
end
-VersionRange(b::VersionBound=VersionBound()) = VersionRange(b, b)
-VersionRange(t::Integer...) = VersionRange(VersionBound(t...))
-VersionRange(v::VersionNumber) = VersionRange(VersionBound(v))
+VersionRange(b::VersionBound = VersionBound()) = VersionRange(b, b)
+VersionRange(t::Integer...) = VersionRange(VersionBound(t...))
+VersionRange(v::VersionNumber) = VersionRange(VersionBound(v))
VersionRange(lo::VersionNumber, hi::VersionNumber) = VersionRange(VersionBound(lo), VersionBound(hi))
# The vast majority of VersionRanges are in practice equal to "1"
@@ -149,7 +149,7 @@ function VersionRange(s::AbstractString)
end
function Base.isempty(r::VersionRange)
- for i = 1:min(r.lower.n, r.upper.n)
+ for i in 1:min(r.lower.n, r.upper.n)
r.lower[i] > r.upper[i] && return true
r.lower[i] < r.upper[i] && return false
end
@@ -158,7 +158,7 @@ end
function Base.print(io::IO, r::VersionRange)
m, n = r.lower.n, r.upper.n
- if (m, n) == (0, 0)
+ return if (m, n) == (0, 0)
print(io, '*')
elseif m == 0
print(io, "0 -")
@@ -184,14 +184,14 @@ function Base.union!(ranges::Vector{<:VersionRange})
l = length(ranges)
l == 0 && return ranges
- sort!(ranges, lt=(a, b) -> (isless_ll(a.lower, b.lower) || (a.lower == b.lower && isless_uu(a.upper, b.upper))))
+ sort!(ranges, lt = (a, b) -> (isless_ll(a.lower, b.lower) || (a.lower == b.lower && isless_uu(a.upper, b.upper))))
k0 = 1
ks = findfirst(!isempty, ranges)
ks === nothing && return empty!(ranges)
lo, up, k0 = ranges[ks].lower, ranges[ks].upper, 1
- for k = (ks + 1):l
+ for k in (ks + 1):l
isempty(ranges[k]) && continue
lo1, up1 = ranges[k].lower, ranges[k].upper
if isjoinable(up, lo1)
@@ -253,7 +253,7 @@ function Base.intersect(A::VersionSpec, B::VersionSpec)
ranges[i] = intersect(a, b)
i += 1
end
- VersionSpec(ranges)
+ return VersionSpec(ranges)
end
Base.intersect(a::VersionNumber, B::VersionSpec) = a in B ? VersionSpec(a) : empty_versionspec
Base.intersect(A::VersionSpec, b::VersionNumber) = intersect(b, A)
@@ -273,11 +273,11 @@ function Base.print(io::IO, s::VersionSpec)
isempty(s) && return print(io, _empty_symbol)
length(s.ranges) == 1 && return print(io, s.ranges[1])
print(io, '[')
- for i = 1:length(s.ranges)
+ for i in 1:length(s.ranges)
1 < i && print(io, ", ")
print(io, s.ranges[i])
end
- print(io, ']')
+ return print(io, ']')
end
function Base.show(io::IO, s::VersionSpec)
@@ -286,13 +286,13 @@ function Base.show(io::IO, s::VersionSpec)
print(io, '"', s.ranges[1], '"')
else
print(io, "[")
- for i = 1:length(s.ranges)
+ for i in 1:length(s.ranges)
1 < i && print(io, ", ")
print(io, '"', s.ranges[i], '"')
end
print(io, ']')
end
- print(io, ")")
+ return print(io, ")")
end
@@ -328,7 +328,7 @@ function semver_interval(m::RegexMatch)
@assert length(m.captures) == 4
n_significant = count(x -> x !== nothing, m.captures) - 1
typ, _major, _minor, _patch = m.captures
- major = parse(Int, _major)
+ major = parse(Int, _major)
minor = (n_significant < 2) ? 0 : parse(Int, _minor)
patch = (n_significant < 3) ? 0 : parse(Int, _patch)
if n_significant == 3 && major == 0 && minor == 0 && patch == 0
@@ -337,7 +337,7 @@ function semver_interval(m::RegexMatch)
# Default type is :caret
vertyp = (typ == "" || typ == "^") ? :caret : :tilde
v0 = VersionBound((major, minor, patch))
- if vertyp === :caret
+ return if vertyp === :caret
if major != 0
return VersionRange(v0, VersionBound((v0[1],)))
elseif minor != 0
@@ -346,14 +346,14 @@ function semver_interval(m::RegexMatch)
if n_significant == 1
return VersionRange(v0, VersionBound((0,)))
elseif n_significant == 2
- return VersionRange(v0, VersionBound((0, 0,)))
+ return VersionRange(v0, VersionBound((0, 0)))
else
return VersionRange(v0, VersionBound((0, 0, v0[3])))
end
end
else
if n_significant == 3 || n_significant == 2
- return VersionRange(v0, VersionBound((v0[1], v0[2],)))
+ return VersionRange(v0, VersionBound((v0[1], v0[2])))
else
return VersionRange(v0, VersionBound((v0[1],)))
end
@@ -365,7 +365,7 @@ function inequality_interval(m::RegexMatch)
@assert length(m.captures) == 4
typ, _major, _minor, _patch = m.captures
n_significant = count(x -> x !== nothing, m.captures) - 1
- major = parse(Int, _major)
+ major = parse(Int, _major)
minor = (n_significant < 2) ? 0 : parse(Int, _minor)
patch = (n_significant < 3) ? 0 : parse(Int, _patch)
if n_significant == 3 && major == 0 && minor == 0 && patch == 0
@@ -376,18 +376,18 @@ function inequality_interval(m::RegexMatch)
nil = VersionBound(0, 0, 0)
if v[3] == 0
if v[2] == 0
- v1 = VersionBound(v[1]-1)
+ v1 = VersionBound(v[1] - 1)
else
- v1 = VersionBound(v[1], v[2]-1)
+ v1 = VersionBound(v[1], v[2] - 1)
end
else
- v1 = VersionBound(v[1], v[2], v[3]-1)
+ v1 = VersionBound(v[1], v[2], v[3] - 1)
end
return VersionRange(nil, v1)
elseif occursin(r"^=\s*$", typ)
return VersionRange(v)
elseif occursin(r"^>=\s*$", typ) || occursin(r"^≥\s*$", typ)
- return VersionRange(v, _inf)
+ return VersionRange(v, _inf)
else
error("invalid prefix $typ")
end
@@ -399,32 +399,40 @@ function hyphen_interval(m::RegexMatch)
if isnothing(_lower_minor)
lower_bound = VersionBound(parse(Int, _lower_major))
elseif isnothing(_lower_patch)
- lower_bound = VersionBound(parse(Int, _lower_major),
- parse(Int, _lower_minor))
+ lower_bound = VersionBound(
+ parse(Int, _lower_major),
+ parse(Int, _lower_minor)
+ )
else
- lower_bound = VersionBound(parse(Int, _lower_major),
- parse(Int, _lower_minor),
- parse(Int, _lower_patch))
+ lower_bound = VersionBound(
+ parse(Int, _lower_major),
+ parse(Int, _lower_minor),
+ parse(Int, _lower_patch)
+ )
end
if isnothing(_upper_minor)
upper_bound = VersionBound(parse(Int, _upper_major))
elseif isnothing(_upper_patch)
- upper_bound = VersionBound(parse(Int, _upper_major),
- parse(Int, _upper_minor))
+ upper_bound = VersionBound(
+ parse(Int, _upper_major),
+ parse(Int, _upper_minor)
+ )
else
- upper_bound = VersionBound(parse(Int, _upper_major),
- parse(Int, _upper_minor),
- parse(Int, _upper_patch))
+ upper_bound = VersionBound(
+ parse(Int, _upper_major),
+ parse(Int, _upper_minor),
+ parse(Int, _upper_patch)
+ )
end
return VersionRange(lower_bound, upper_bound)
end
const version = "v?([0-9]+?)(?:\\.([0-9]+?))?(?:\\.([0-9]+?))?"
const ver_regs =
-Pair{Regex,Any}[
- Regex("^([~^]?)?$version\$") => semver_interval, # 0.5 ^0.4 ~0.3.2
- Regex("^((?:≥\\s*)|(?:>=\\s*)|(?:=\\s*)|(?:<\\s*)|(?:=\\s*))v?$version\$") => inequality_interval,# < 0.2 >= 0.5,2
- Regex("^[\\s]*$version[\\s]*?\\s-\\s[\\s]*?$version[\\s]*\$") => hyphen_interval, # 0.7 - 1.3
+ Pair{Regex, Any}[
+ Regex("^([~^]?)?$version\$") => semver_interval, # 0.5 ^0.4 ~0.3.2
+ Regex("^((?:≥\\s*)|(?:>=\\s*)|(?:=\\s*)|(?:<\\s*)|(?:=\\s*))v?$version\$") => inequality_interval, # < 0.2 >= 0.5,2
+ Regex("^[\\s]*$version[\\s]*?\\s-\\s[\\s]*?$version[\\s]*\$") => hyphen_interval, # 0.7 - 1.3
]
end
diff --git a/src/fuzzysorting.jl b/src/fuzzysorting.jl
index 0d8d842b7f..b753a2b1c4 100644
--- a/src/fuzzysorting.jl
+++ b/src/fuzzysorting.jl
@@ -1,143 +1,300 @@
module FuzzySorting
-_displaysize(io::IO) = displaysize(io)::Tuple{Int,Int}
+_displaysize(io::IO) = displaysize(io)::Tuple{Int, Int}
-# This code is duplicated from REPL.jl
-# Considering breaking this into an independent package
+# Character confusion weights for fuzzy matching
+const CHARACTER_CONFUSIONS = Dict(
+ ('a', 'e') => 0.5, ('e', 'a') => 0.5,
+ ('i', 'y') => 0.5, ('y', 'i') => 0.5,
+ ('u', 'o') => 0.5, ('o', 'u') => 0.5,
+ ('c', 'k') => 0.3, ('k', 'c') => 0.3,
+ ('s', 'z') => 0.3, ('z', 's') => 0.3,
+ # Keyboard proximity (QWERTY layout)
+ ('q', 'w') => 0.4, ('w', 'q') => 0.4,
+ ('w', 'e') => 0.4, ('e', 'w') => 0.4,
+ ('e', 'r') => 0.4, ('r', 'e') => 0.4,
+ ('r', 't') => 0.4, ('t', 'r') => 0.4,
+ ('t', 'y') => 0.4, ('y', 't') => 0.4,
+ ('y', 'u') => 0.4, ('u', 'y') => 0.4,
+ ('u', 'i') => 0.4, ('i', 'u') => 0.4,
+ ('i', 'o') => 0.4, ('o', 'i') => 0.4,
+ ('o', 'p') => 0.4, ('p', 'o') => 0.4,
+ ('a', 's') => 0.4, ('s', 'a') => 0.4,
+ ('s', 'd') => 0.4, ('d', 's') => 0.4,
+ ('d', 'f') => 0.4, ('f', 'd') => 0.4,
+ ('f', 'g') => 0.4, ('g', 'f') => 0.4,
+ ('g', 'h') => 0.4, ('h', 'g') => 0.4,
+ ('h', 'j') => 0.4, ('j', 'h') => 0.4,
+ ('j', 'k') => 0.4, ('k', 'j') => 0.4,
+ ('k', 'l') => 0.4, ('l', 'k') => 0.4,
+ ('z', 'x') => 0.4, ('x', 'z') => 0.4,
+ ('x', 'c') => 0.4, ('c', 'x') => 0.4,
+ ('c', 'v') => 0.4, ('v', 'c') => 0.4,
+ ('v', 'b') => 0.4, ('b', 'v') => 0.4,
+ ('b', 'n') => 0.4, ('n', 'b') => 0.4,
+ ('n', 'm') => 0.4, ('m', 'n') => 0.4,
+)
-# Search & Rescue
-# Utilities for correcting user mistakes and (eventually)
-# doing full documentation searches from the repl.
+# Enhanced fuzzy scoring with multiple factors
+function fuzzyscore(needle::AbstractString, haystack::AbstractString)
+ needle_lower, haystack_lower = lowercase(needle), lowercase(haystack)
-# Fuzzy Search Algorithm
+ # Factor 1: Prefix matching bonus (highest priority)
+ prefix_score = prefix_match_score(needle_lower, haystack_lower)
-function matchinds(needle, haystack; acronym::Bool = false)
- chars = collect(needle)
- is = Int[]
- lastc = '\0'
- for (i, char) in enumerate(haystack)
- while !isempty(chars) && isspace(first(chars))
- popfirst!(chars) # skip spaces
- end
- isempty(chars) && break
- if lowercase(char) == lowercase(chars[1]) &&
- (!acronym || !isletter(lastc))
- push!(is, i)
- popfirst!(chars)
+ # Factor 2: Subsequence matching
+ subseq_score = subsequence_score(needle_lower, haystack_lower)
+
+ # Factor 3: Character-level similarity (improved edit distance)
+ char_score = character_similarity_score(needle_lower, haystack_lower)
+
+ # Factor 4: Case preservation bonus
+ case_score = case_preservation_score(needle, haystack)
+
+ # Factor 5: Length penalty for very long matches
+ length_penalty = length_penalty_score(needle, haystack)
+
+ # Weighted combination
+ base_score = 0.4 * prefix_score + 0.3 * subseq_score + 0.2 * char_score + 0.1 * case_score
+ final_score = base_score * length_penalty
+
+ return final_score
+end
+
+# Prefix matching: exact prefix gets maximum score
+function prefix_match_score(needle::AbstractString, haystack::AbstractString)
+ if startswith(haystack, needle)
+ return 1.0
+ elseif startswith(needle, haystack)
+ return 0.9 # Partial prefix match
+ else
+ # Check for prefix after common separators
+ for sep in ['_', '-', '.']
+ parts = split(haystack, sep)
+ for part in parts
+ if startswith(part, needle)
+ return 0.7 # Component prefix match
+ end
+ end
end
- lastc = char
+ return 0.0
end
- return is
end
-longer(x, y) = length(x) ≥ length(y) ? (x, true) : (y, false)
+# Subsequence matching with position weighting
+function subsequence_score(needle::AbstractString, haystack::AbstractString)
+ if isempty(needle)
+ return 1.0
+ end
-bestmatch(needle, haystack) =
- longer(matchinds(needle, haystack, acronym = true),
- matchinds(needle, haystack))
-
-# Optimal string distance: Counts the minimum number of insertions, deletions,
-# transpositions or substitutions to go from one string to the other.
-function string_distance(a::AbstractString, lena::Integer, b::AbstractString, lenb::Integer)
- if lena > lenb
- a, b = b, a
- lena, lenb = lenb, lena
+ needle_chars = collect(needle)
+ haystack_chars = collect(haystack)
+
+ matched_positions = Int[]
+ haystack_idx = 1
+
+ for needle_char in needle_chars
+ found = false
+ for i in haystack_idx:length(haystack_chars)
+ if haystack_chars[i] == needle_char
+ push!(matched_positions, i)
+ haystack_idx = i + 1
+ found = true
+ break
+ end
+ end
+ if !found
+ return 0.0
+ end
end
- start = 0
- for (i, j) in zip(a, b)
- if a == b
- start += 1
- else
- break
+
+ # Calculate score based on how clustered the matches are
+ if length(matched_positions) <= 1
+ return 1.0
+ end
+
+ # Penalize large gaps between matches
+ gaps = diff(matched_positions)
+ avg_gap = sum(gaps) / length(gaps)
+ gap_penalty = 1.0 / (1.0 + avg_gap / 3.0)
+
+ # Bonus for matches at word boundaries
+ boundary_bonus = 0.0
+ for pos in matched_positions
+ if pos == 1 || haystack_chars[pos - 1] in ['_', '-', '.']
+ boundary_bonus += 0.1
end
end
- start == lena && return lenb - start
- vzero = collect(1:(lenb - start))
- vone = similar(vzero)
- prev_a, prev_b = first(a), first(b)
- current = 0
- for (i, ai) in enumerate(a)
- i > start || (prev_a = ai; continue)
- left = i - start - 1
- current = i - start
- transition_next = 0
- for (j, bj) in enumerate(b)
- j > start || (prev_b = bj; continue)
- # No need to look beyond window of lower right diagonal
- above = current
- this_transition = transition_next
- transition_next = vone[j - start]
- vone[j - start] = current = left
- left = vzero[j - start]
- if ai != bj
- # Minimum between substitution, deletion and insertion
- current = min(current + 1, above + 1, left + 1)
- if i > start + 1 && j > start + 1 && ai == prev_b && prev_a == bj
- current = min(current, (this_transition += 1))
- end
+
+ coverage = length(needle) / length(haystack)
+ return min(1.0, gap_penalty + boundary_bonus) * coverage
+end
+
+# Improved character-level similarity
+function character_similarity_score(needle::AbstractString, haystack::AbstractString)
+ if isempty(needle) || isempty(haystack)
+ return 0.0
+ end
+
+ # Use Damerau-Levenshtein distance with character confusion weights
+ distance = weighted_edit_distance(needle, haystack)
+ max_len = max(length(needle), length(haystack))
+
+ return max(0.0, 1.0 - distance / max_len)
+end
+
+# Weighted edit distance accounting for common typos
+function weighted_edit_distance(s1::AbstractString, s2::AbstractString)
+
+ a, b = collect(s1), collect(s2)
+ m, n = length(a), length(b)
+
+ # Initialize distance matrix
+ d = Matrix{Float64}(undef, m + 1, n + 1)
+ d[1:(m + 1), 1] = 0:m
+ d[1, 1:(n + 1)] = 0:n
+
+ for i in 1:m, j in 1:n
+ if a[i] == b[j]
+ d[i + 1, j + 1] = d[i, j] # No cost for exact match
+ else
+ # Standard operations
+ insert_cost = d[i, j + 1] + 1.0
+ delete_cost = d[i + 1, j] + 1.0
+
+ # Check for repeated character deletion (common typo)
+ if i > 1 && a[i] == a[i - 1] && a[i - 1] == b[j]
+ delete_cost = d[i, j + 1] + 0.3 # Low cost for deleting repeated char
+ end
+
+ # Check for repeated character insertion (common typo)
+ if j > 1 && b[j] == b[j - 1] && a[i] == b[j - 1]
+ insert_cost = d[i, j + 1] + 0.3 # Low cost for inserting repeated char
+ end
+
+ # Substitution with confusion weighting
+ confusion_key = (a[i], b[j])
+ subst_cost = d[i, j] + get(CHARACTER_CONFUSIONS, confusion_key, 1.0)
+
+ d[i + 1, j + 1] = min(insert_cost, delete_cost, subst_cost)
+
+ # Transposition
+ if i > 1 && j > 1 && a[i] == b[j - 1] && a[i - 1] == b[j]
+ d[i + 1, j + 1] = min(d[i + 1, j + 1], d[i - 1, j - 1] + 1.0)
end
- vzero[j - start] = current
- prev_b = bj
end
- prev_a = ai
end
- current
-end
-function fuzzyscore(needle::AbstractString, haystack::AbstractString)
- lena, lenb = length(needle), length(haystack)
- 1 - (string_distance(needle, lena, haystack, lenb) / max(lena, lenb))
+ return d[m + 1, n + 1]
end
-function fuzzysort(search::String, candidates::Vector{String})
- scores = map(cand -> (FuzzySorting.fuzzyscore(search, cand), -Float64(FuzzySorting.levenshtein(search, cand))), candidates)
- candidates[sortperm(scores)] |> reverse, any(s -> s[1] >= print_score_threshold, scores)
+# Case preservation bonus
+function case_preservation_score(needle::AbstractString, haystack::AbstractString)
+ if isempty(needle) || isempty(haystack)
+ return 0.0
+ end
+
+ matches = 0
+ min_len = min(length(needle), length(haystack))
+
+ for i in 1:min_len
+ if needle[i] == haystack[i]
+ matches += 1
+ end
+ end
+
+ return matches / min_len
end
-# Levenshtein Distance
+# Length penalty for very long matches
+function length_penalty_score(needle::AbstractString, haystack::AbstractString)
+ needle_len = length(needle)
+ haystack_len = length(haystack)
-function levenshtein(s1, s2)
- a, b = collect(s1), collect(s2)
- m = length(a)
- n = length(b)
- d = Matrix{Int}(undef, m+1, n+1)
+ if needle_len == 0
+ return 0.0
+ end
+
+ # Strong preference for similar lengths
+ length_ratio = haystack_len / needle_len
+ length_diff = abs(haystack_len - needle_len)
- d[1:m+1, 1] = 0:m
- d[1, 1:n+1] = 0:n
+ # Bonus for very close lengths (within 1-2 characters)
+ if length_diff <= 1
+ return 1.1 # Small bonus for near-exact length
+ elseif length_diff <= 2
+ return 1.05
+ elseif length_ratio <= 1.5
+ return 1.0
+ elseif length_ratio <= 2.0
+ return 0.8
+ elseif length_ratio <= 3.0
+ return 0.6
+ else
+ return 0.4 # Heavy penalty for very long matches
+ end
+end
- for i = 1:m, j = 1:n
- d[i+1,j+1] = min(d[i , j+1] + 1,
- d[i+1, j ] + 1,
- d[i , j ] + (a[i] != b[j]))
+# Main sorting function with optional popularity weighting
+function fuzzysort(search::String, candidates::Vector{String}; popularity_weights::Dict{String, Float64} = Dict{String, Float64}())
+ scores = map(candidates) do cand
+ base_score = fuzzyscore(search, cand)
+ weight = get(popularity_weights, cand, 1.0)
+ score = base_score * weight
+ return (score, cand)
end
- return d[m+1, n+1]
+ # Sort by score descending, then by candidate name for ties
+ sorted_scores = sort(scores, by = x -> (-x[1], x[2]))
+
+ # Extract candidates and check if any meet threshold
+ result_candidates = [x[2] for x in sorted_scores]
+ has_good_matches = any(x -> x[1] >= print_score_threshold, sorted_scores)
+
+ return result_candidates, has_good_matches
end
-function levsort(search::String, candidates::Vector{String})
- scores = map(cand -> (Float64(levenshtein(search, cand)), -fuzzyscore(search, cand)), candidates)
- candidates = candidates[sortperm(scores)]
- i = 0
- for outer i = 1:length(candidates)
- levenshtein(search, candidates[i]) > 3 && break
+# Keep existing interface functions for compatibility
+function matchinds(needle, haystack; acronym::Bool = false)
+ chars = collect(needle)
+ is = Int[]
+ lastc = '\0'
+ for (i, char) in enumerate(haystack)
+ while !isempty(chars) && isspace(first(chars))
+ popfirst!(chars) # skip spaces
+ end
+ isempty(chars) && break
+ if lowercase(char) == lowercase(chars[1]) &&
+ (!acronym || !isletter(lastc))
+ push!(is, i)
+ popfirst!(chars)
+ end
+ lastc = char
end
- return candidates[1:i]
+ return is
end
-# Result printing
+longer(x, y) = length(x) ≥ length(y) ? (x, true) : (y, false)
+
+bestmatch(needle, haystack) =
+ longer(
+ matchinds(needle, haystack, acronym = true),
+ matchinds(needle, haystack)
+)
function printmatch(io::IO, word, match)
is, _ = bestmatch(word, match)
- for (i, char) = enumerate(match)
+ for (i, char) in enumerate(match)
if i in is
- printstyled(io, char, bold=true)
+ printstyled(io, char, bold = true)
else
print(io, char)
end
end
+ return
end
-const print_score_threshold = 0.5
+const print_score_threshold = 0.25
function printmatches(io::IO, word, matches; cols::Int = _displaysize(io)[2])
total = 0
@@ -148,29 +305,10 @@ function printmatches(io::IO, word, matches; cols::Int = _displaysize(io)[2])
printmatch(io, word, match)
total += length(match) + 1
end
+ return
end
printmatches(args...; cols::Int = _displaysize(stdout)[2]) = printmatches(stdout, args..., cols = cols)
-function print_joined_cols(io::IO, ss::Vector{String}, delim = "", last = delim; cols::Int = _displaysize(io)[2])
- i = 0
- total = 0
- for outer i = 1:length(ss)
- total += length(ss[i])
- total + max(i-2,0)*length(delim) + (i>1 ? 1 : 0)*length(last) > cols && (i-=1; break)
- end
- join(io, ss[1:i], delim, last)
-end
-
-print_joined_cols(args...; cols::Int = _displaysize(stdout)[2]) = print_joined_cols(stdout, args...; cols=cols)
-
-function print_correction(io::IO, word::String, mod::Module)
- cors = map(quote_spaces, levsort(word, accessible(mod)))
- pre = "Perhaps you meant "
- print(io, pre)
- print_joined_cols(io, cors, ", ", " or "; cols = _displaysize(io)[2] - length(pre))
- println(io)
- return
-end
end
diff --git a/src/generate.jl b/src/generate.jl
index 6134a1686c..8e317b039b 100644
--- a/src/generate.jl
+++ b/src/generate.jl
@@ -1,8 +1,8 @@
# This file is a part of Julia. License is MIT: https://julialang.org/license
-function generate(path::String; io::IO=stderr_f())
+function generate(path::String; io::IO = stderr_f())
base = basename(path)
- pkg = endswith(lowercase(base), ".jl") ? chop(base, tail=3) : base
+ pkg = endswith(lowercase(base), ".jl") ? chop(base, tail = 3) : base
Base.isidentifier(pkg) || pkgerror("$(repr(pkg)) is not a valid package name")
isdir(path) && pkgerror("$(abspath(path)) already exists")
printpkgstyle(io, :Generating, " project $pkg:")
@@ -38,7 +38,7 @@ function project(io::IO, pkg::AbstractString, dir::AbstractString)
name === nothing && (name = "Unknown")
if email === nothing
- for env in ["GIT_AUTHOR_EMAIL", "GIT_COMMITTER_EMAIL", "EMAIL"];
+ for env in ["GIT_AUTHOR_EMAIL", "GIT_COMMITTER_EMAIL", "EMAIL"]
email = get(ENV, env, nothing)
email !== nothing && break
end
@@ -48,23 +48,37 @@ function project(io::IO, pkg::AbstractString, dir::AbstractString)
uuid = UUIDs.uuid4()
genfile(io, dir, "Project.toml") do file_io
- toml = Dict{String,Any}("authors" => authors,
- "name" => pkg,
- "uuid" => string(uuid),
- "version" => "0.1.0",
- )
- TOML.print(file_io, toml, sorted=true, by=key -> (Types.project_key_order(key), key))
+ toml = Dict{String, Any}(
+ "authors" => authors,
+ "name" => pkg,
+ "uuid" => string(uuid),
+ "version" => "0.1.0",
+ )
+ TOML.print(file_io, toml, sorted = true, by = key -> (Types.project_key_order(key), key))
end
return uuid
end
function entrypoint(io::IO, pkg::AbstractString, dir)
- genfile(io, joinpath(dir, "src"), "$pkg.jl") do file_io
- print(file_io,
- """
+ return genfile(io, joinpath(dir, "src"), "$pkg.jl") do file_io
+ print(
+ file_io,
+ """
module $pkg
- greet() = print("Hello World!")
+ \"""
+ hello(who::String)
+
+ Return "Hello, `who`".
+ \"""
+ hello(who::String) = "Hello, \$who"
+
+ \"""
+ domath(x::Number)
+
+ Return `x + 5`.
+ \"""
+ domath(x::Number) = x + 5
end # module $pkg
"""
diff --git a/src/manifest.jl b/src/manifest.jl
index db04bdbe7f..7f35f4e0e4 100644
--- a/src/manifest.jl
+++ b/src/manifest.jl
@@ -19,7 +19,8 @@ function read_pinned(pinned)
end
function safe_SHA1(sha::String)
- try sha = SHA1(sha)
+ try
+ sha = SHA1(sha)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse `git-tree-sha1` field as a SHA.")
@@ -28,7 +29,8 @@ function safe_SHA1(sha::String)
end
function safe_uuid(uuid::String)::UUID
- try uuid = UUID(uuid)
+ try
+ uuid = UUID(uuid)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse `uuid` field as a UUID.")
@@ -37,7 +39,8 @@ function safe_uuid(uuid::String)::UUID
end
function safe_bool(bool::String)
- try bool = parse(Bool, bool)
+ try
+ bool = parse(Bool, bool)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse `pinned` field as a Bool.")
@@ -47,7 +50,8 @@ end
# note: returns raw version *not* parsed version
function safe_version(version::String)::VersionNumber
- try version = VersionNumber(version)
+ try
+ version = VersionNumber(version)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse `version` as a `VersionNumber`.")
@@ -73,52 +77,83 @@ function read_deps(deps::AbstractVector)
end
return ret
end
-function read_deps(raw::Dict{String, Any})::Dict{String,UUID}
- deps = Dict{String,UUID}()
+function read_deps(raw::Dict{String, Any})::Dict{String, UUID}
+ deps = Dict{String, UUID}()
for (name, uuid) in raw
deps[name] = safe_uuid(uuid)
end
return deps
end
+read_apps(::Nothing) = Dict{String, AppInfo}()
+read_apps(::Any) = pkgerror("Expected `apps` field to be a Dict")
+function read_apps(apps::Dict)
+ appinfos = Dict{String, AppInfo}()
+ for (appname, app) in apps
+ submodule = get(app, "submodule", nothing)
+ appinfo = AppInfo(
+ appname::String,
+ app["julia_command"]::String,
+ submodule,
+ app
+ )
+ appinfos[appinfo.name] = appinfo
+ end
+ return appinfos
+end
+
+read_exts(::Nothing) = Dict{String, Union{String, Vector{String}}}()
+function read_exts(raw::Dict{String, Any})
+ exts = Dict{String, Union{String, Vector{String}}}()
+ for (key, val) in raw
+ val isa Union{String, Vector{String}} || pkgerror("Expected `ext` entry to be a `Union{String, Vector{String}}`.")
+ exts[key] = val
+ end
+ return exts
+end
+
struct Stage1
uuid::UUID
entry::PackageEntry
- deps::Union{Vector{String}, Dict{String,UUID}}
- weakdeps::Union{Vector{String}, Dict{String,UUID}}
+ deps::Union{Vector{String}, Dict{String, UUID}}
+ weakdeps::Union{Vector{String}, Dict{String, UUID}}
end
-normalize_deps(name, uuid, deps, manifest; isext=false) = deps
-function normalize_deps(name, uuid, deps::Vector{String}, manifest::Dict{String,Vector{Stage1}}; isext=false)
+normalize_deps(name, uuid, deps, manifest; isext = false) = deps
+function normalize_deps(name, uuid, deps::Vector{String}, manifest::Dict{String, Vector{Stage1}}; isext = false)
if length(deps) != length(unique(deps))
pkgerror("Duplicate entry in `$name=$uuid`'s `deps` field.")
end
- final = Dict{String,UUID}()
+ final = Dict{String, UUID}()
for dep in deps
infos = get(manifest, dep, nothing)
if !isext
if infos === nothing
- pkgerror("`$name=$uuid` depends on `$dep`, ",
- "but no such entry exists in the manifest.")
+ pkgerror(
+ "`$name=$uuid` depends on `$dep`, ",
+ "but no such entry exists in the manifest."
+ )
end
end
# should have used dict format instead of vector format
if isnothing(infos) || length(infos) != 1
- pkgerror("Invalid manifest format. ",
- "`$name=$uuid`'s dependency on `$dep` is ambiguous.")
+ pkgerror(
+ "Invalid manifest format. ",
+ "`$name=$uuid`'s dependency on `$dep` is ambiguous."
+ )
end
final[dep] = infos[1].uuid
end
return final
end
-function validate_manifest(julia_version::Union{Nothing,VersionNumber}, project_hash::Union{Nothing,SHA1}, manifest_format::VersionNumber, stage1::Dict{String,Vector{Stage1}}, other::Dict{String, Any})
+function validate_manifest(julia_version::Union{Nothing, VersionNumber}, project_hash::Union{Nothing, SHA1}, manifest_format::VersionNumber, stage1::Dict{String, Vector{Stage1}}, other::Dict{String, Any})
# expand vector format deps
for (name, infos) in stage1, info in infos
info.entry.deps = normalize_deps(name, info.uuid, info.deps, stage1)
end
for (name, infos) in stage1, info in infos
- info.entry.weakdeps = normalize_deps(name, info.uuid, info.weakdeps, stage1; isext=true)
+ info.entry.weakdeps = normalize_deps(name, info.uuid, info.weakdeps, stage1; isext = true)
end
# invariant: all dependencies are now normalized to Dict{String,UUID}
deps = Dict{UUID, PackageEntry}()
@@ -132,12 +167,16 @@ function validate_manifest(julia_version::Union{Nothing,VersionNumber}, project_
dep_entry = get(deps, uuid, nothing)
if !isext
if dep_entry === nothing
- pkgerror("`$(entry.name)=$(entry_uuid)` depends on `$name=$uuid`, ",
- "but no such entry exists in the manifest.")
+ pkgerror(
+ "`$(entry.name)=$(entry_uuid)` depends on `$name=$uuid`, ",
+ "but no such entry exists in the manifest."
+ )
end
if dep_entry.name != name
- pkgerror("`$(entry.name)=$(entry_uuid)` depends on `$name=$uuid`, ",
- "but entry with UUID `$uuid` has name `$(dep_entry.name)`.")
+ pkgerror(
+ "`$(entry.name)=$(entry_uuid)` depends on `$name=$uuid`, ",
+ "but entry with UUID `$uuid` has name `$(dep_entry.name)`."
+ )
end
end
end
@@ -158,7 +197,7 @@ function Manifest(raw::Dict{String, Any}, f_or_io::Union{String, IO})::Manifest
@warn "Unknown Manifest.toml format version detected in file `$(f_or_io)`. Unexpected behavior may occur" manifest_format maxlog = 1 _id = Symbol(f_or_io)
end
end
- stage1 = Dict{String,Vector{Stage1}}()
+ stage1 = Dict{String, Vector{Stage1}}()
if haskey(raw, "deps") # deps field doesn't exist if there are no deps
deps_raw = raw["deps"]::Dict{String, Any}
for (name::String, infos) in deps_raw
@@ -171,18 +210,19 @@ function Manifest(raw::Dict{String, Any}, f_or_io::Union{String, IO})::Manifest
deps = nothing
weakdeps = nothing
try
- entry.pinned = read_pinned(get(info, "pinned", nothing))
- uuid = read_field("uuid", nothing, info, safe_uuid)::UUID
- entry.version = read_field("version", nothing, info, safe_version)
- entry.path = read_field("path", nothing, info, safe_path)
- entry.repo.source = read_field("repo-url", nothing, info, identity)
- entry.repo.rev = read_field("repo-rev", nothing, info, identity)
- entry.repo.subdir = read_field("repo-subdir", nothing, info, identity)
- entry.tree_hash = read_field("git-tree-sha1", nothing, info, safe_SHA1)
- entry.uuid = uuid
+ entry.pinned = read_pinned(get(info, "pinned", nothing))
+ uuid = read_field("uuid", nothing, info, safe_uuid)::UUID
+ entry.version = read_field("version", nothing, info, safe_version)
+ entry.path = read_field("path", nothing, info, safe_path)
+ entry.repo.source = read_field("repo-url", nothing, info, identity)
+ entry.repo.rev = read_field("repo-rev", nothing, info, identity)
+ entry.repo.subdir = read_field("repo-subdir", nothing, info, identity)
+ entry.tree_hash = read_field("git-tree-sha1", nothing, info, safe_SHA1)
+ entry.uuid = uuid
deps = read_deps(get(info::Dict, "deps", nothing)::Union{Nothing, Dict{String, Any}, Vector{String}})
weakdeps = read_deps(get(info::Dict, "weakdeps", nothing)::Union{Nothing, Dict{String, Any}, Vector{String}})
- entry.exts = get(Dict{String, String}, info, "extensions")
+ entry.apps = read_apps(get(info::Dict, "apps", nothing)::Union{Nothing, Dict{String, Any}})
+ entry.exts = read_exts(get(info, "extensions", nothing))
catch
# TODO: Should probably not unconditionally log something
# @debug "Could not parse manifest entry for `$name`" f_or_io
@@ -230,10 +270,10 @@ end
function convert_v1_format_manifest(old_raw_manifest::Dict)
new_raw_manifest = Dict{String, Any}(
- "deps" => old_raw_manifest,
- "manifest_format" => "1.0.0" # must be a string here to match raw dict
- # don't set julia_version as it is unknown in old manifests
- )
+ "deps" => old_raw_manifest,
+ "manifest_format" => "1.0.0" # must be a string here to match raw dict
+ # don't set julia_version as it is unknown in old manifests
+ )
return new_raw_manifest
end
@@ -241,24 +281,24 @@ end
# WRITING #
###########
function destructure(manifest::Manifest)::Dict
- function entry!(entry, key, value; default=nothing)
- if value == default
+ function entry!(entry, key, value; default = nothing)
+ return if value == default
delete!(entry, key)
else
entry[key] = value
end
end
- unique_name = Dict{String,Bool}()
+ unique_name = Dict{String, Bool}()
for (uuid, entry) in manifest
unique_name[entry.name] = !haskey(unique_name, entry.name)
end
# maintain the format of the manifest when writing
if manifest.manifest_format.major == 1
- raw = Dict{String,Vector{Dict{String,Any}}}()
+ raw = Dict{String, Vector{Dict{String, Any}}}()
elseif manifest.manifest_format.major == 2
- raw = Dict{String,Any}()
+ raw = Dict{String, Any}()
if !isnothing(manifest.julia_version)
raw["julia_version"] = manifest.julia_version
end
@@ -266,18 +306,21 @@ function destructure(manifest::Manifest)::Dict
raw["project_hash"] = manifest.project_hash
end
raw["manifest_format"] = string(manifest.manifest_format.major, ".", manifest.manifest_format.minor)
- raw["deps"] = Dict{String,Vector{Dict{String,Any}}}()
+ raw["deps"] = Dict{String, Vector{Dict{String, Any}}}()
for (k, v) in manifest.other
raw[k] = v
end
end
for (uuid, entry) in manifest
- new_entry = something(entry.other, Dict{String,Any}())
+ # https://github.com/JuliaLang/Pkg.jl/issues/4086
+ @assert !(entry.tree_hash !== nothing && entry.path !== nothing)
+
+ new_entry = something(entry.other, Dict{String, Any}())
new_entry["uuid"] = string(uuid)
entry!(new_entry, "version", entry.version)
entry!(new_entry, "git-tree-sha1", entry.tree_hash)
- entry!(new_entry, "pinned", entry.pinned; default=false)
+ entry!(new_entry, "pinned", entry.pinned; default = false)
path = entry.path
if path !== nothing && Sys.iswindows() && !isabspath(path)
path = join(splitpath(path), "/")
@@ -298,7 +341,7 @@ function destructure(manifest::Manifest)::Dict
if all(dep -> haskey(unique_name, first(dep)), deptype) && all(dep -> unique_name[first(dep)], deptype)
new_entry[depname] = sort(collect(keys(deptype)))
else
- new_entry[depname] = Dict{String,String}()
+ new_entry[depname] = Dict{String, String}()
for (name, uuid) in deptype
new_entry[depname][name] = string(uuid)
end
@@ -310,24 +353,39 @@ function destructure(manifest::Manifest)::Dict
if !isempty(entry.exts)
entry!(new_entry, "extensions", entry.exts)
end
+
+ if !isempty(entry.apps)
+ new_entry["apps"] = Dict{String, Any}()
+ for (appname, appinfo) in entry.apps
+ julia_command = @something appinfo.julia_command joinpath(Sys.BINDIR, "julia" * (Sys.iswindows() ? ".exe" : ""))
+ app_dict = Dict{String, Any}("julia_command" => julia_command)
+ if appinfo.submodule !== nothing
+ app_dict["submodule"] = appinfo.submodule
+ end
+ new_entry["apps"][appname] = app_dict
+ end
+ end
if manifest.manifest_format.major == 1
- push!(get!(raw, entry.name, Dict{String,Any}[]), new_entry)
+ push!(get!(raw, entry.name, Dict{String, Any}[]), new_entry)
elseif manifest.manifest_format.major == 2
- push!(get!(raw["deps"], entry.name, Dict{String,Any}[]), new_entry)
+ push!(get!(raw["deps"], entry.name, Dict{String, Any}[]), new_entry)
end
end
return raw
end
function write_manifest(env::EnvCache)
+ if env.project.readonly
+ pkgerror("Cannot write to readonly manifest file at $(env.manifest_file)")
+ end
mkpath(dirname(env.manifest_file))
- write_manifest(env.manifest, env.manifest_file)
+ return write_manifest(env.manifest, env.manifest_file)
end
function write_manifest(manifest::Manifest, manifest_file::AbstractString)
if manifest.manifest_format.major == 1
@warn """The active manifest file at `$(manifest_file)` has an old format that is being maintained.
- To update to the new format, which is supported by Julia versions ≥ 1.6.2, run `import Pkg; Pkg.upgrade_manifest()` which will upgrade the format without re-resolving.
- To then record the julia version re-resolve with `Pkg.resolve()` and if there are resolve conflicts consider `Pkg.update()`.""" maxlog = 1 _id = Symbol(manifest_file)
+ To update to the new format, which is supported by Julia versions ≥ 1.6.2, run `import Pkg; Pkg.upgrade_manifest()` which will upgrade the format without re-resolving.
+ To then record the julia version re-resolve with `Pkg.resolve()` and if there are resolve conflicts consider `Pkg.update()`.""" maxlog = 1 _id = Symbol(manifest_file)
end
return write_manifest(destructure(manifest), manifest_file)
end
@@ -336,7 +394,7 @@ function write_manifest(io::IO, manifest::Manifest)
end
function write_manifest(io::IO, raw_manifest::Dict)
print(io, "# This file is machine-generated - editing it directly is not advised\n\n")
- TOML.print(io, raw_manifest, sorted=true) do x
+ TOML.print(io, raw_manifest, sorted = true) do x
(typeof(x) in [String, Nothing, UUID, SHA1, VersionNumber]) && return string(x)
error("unhandled type `$(typeof(x))`")
end
@@ -344,7 +402,8 @@ function write_manifest(io::IO, raw_manifest::Dict)
end
function write_manifest(raw_manifest::Dict, manifest_file::AbstractString)
str = sprint(write_manifest, raw_manifest)
- write(manifest_file, str)
+ mkpath(dirname(manifest_file))
+ return write(manifest_file, str)
end
############
@@ -374,7 +433,7 @@ function check_manifest_julia_version_compat(manifest::Manifest, manifest_file::
return
end
end
- if Base.thisminor(v) != Base.thisminor(VERSION)
+ return if Base.thisminor(v) != Base.thisminor(VERSION)
msg = """The active manifest file has dependencies that were resolved with a different julia \
version ($(manifest.julia_version)). Unexpected behavior may occur."""
if julia_version_strict
diff --git a/src/precompile.jl b/src/precompile.jl
index 761f64efda..c95eda5ab8 100644
--- a/src/precompile.jl
+++ b/src/precompile.jl
@@ -150,8 +150,8 @@ let
end
Base.precompile(Tuple{typeof(Pkg.API.status)})
- Base.precompile(Tuple{typeof(Pkg.Types.read_project_compat),Base.Dict{String,Any},Pkg.Types.Project,},)
- Base.precompile(Tuple{typeof(Pkg.Versions.semver_interval),Base.RegexMatch})
+ Base.precompile(Tuple{typeof(Pkg.Types.read_project_compat), Base.Dict{String, Any}, Pkg.Types.Project})
+ Base.precompile(Tuple{typeof(Pkg.Versions.semver_interval), Base.RegexMatch})
Base.precompile(Tuple{typeof(Pkg.REPLMode.do_cmds), Array{Pkg.REPLMode.Command, 1}, Base.TTY})
@@ -159,6 +159,55 @@ let
Base.precompile(Tuple{Type{Pkg.REPLMode.QString}, String, Bool})
Base.precompile(Tuple{typeof(Pkg.REPLMode.parse_package), Array{Pkg.REPLMode.QString, 1}, Base.Dict{Symbol, Any}})
Base.precompile(Tuple{Type{Pkg.REPLMode.Command}, Pkg.REPLMode.CommandSpec, Base.Dict{Symbol, Any}, Array{Pkg.Types.PackageSpec, 1}})
+
+ # Manually added from trace compiling Pkg.status.
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:color,), Tuple{Symbol}}, typeof(Base.printstyled), Base.IOContext{Base.GenericIOBuffer{Memory{UInt8}}}, Char})
+ Base.precompile(Tuple{typeof(Base.join), Base.GenericIOBuffer{Memory{UInt8}}, Tuple{UInt64}, Char})
+ Base.precompile(Tuple{typeof(Base.empty), Base.Dict{Any, Any}, Type{String}, Type{Base.UUID}})
+ Base.precompile(Tuple{typeof(Base.join), Base.GenericIOBuffer{Memory{UInt8}}, Tuple{UInt32}, Char})
+ Base.precompile(Tuple{typeof(Base.unsafe_read), Base.PipeEndpoint, Ptr{UInt8}, UInt64})
+ Base.precompile(Tuple{typeof(Base.readbytes!), Base.PipeEndpoint, Array{UInt8, 1}, Int64})
+ Base.precompile(Tuple{typeof(Base.closewrite), Base.PipeEndpoint})
+ Base.precompile(Tuple{typeof(Base.convert), Type{Base.Dict{String, Union{Array{String, 1}, String}}}, Base.Dict{String, Any}})
+ Base.precompile(Tuple{typeof(Base.map), Function, Array{Any, 1}})
+ Base.precompile(Tuple{Type{Array{Dates.DateTime, 1}}, UndefInitializer, Tuple{Int64}})
+ Base.precompile(Tuple{typeof(Base.maximum), Array{Dates.DateTime, 1}})
+ Base.precompile(Tuple{Type{Pair{A, B} where {B} where {A}}, String, Dates.DateTime})
+ Base.precompile(Tuple{typeof(Base.map), Function, Array{Base.Dict{String, Dates.DateTime}, 1}})
+ Base.precompile(Tuple{typeof(TOML.Internals.Printer.is_array_of_tables), Array{Base.Dict{String, Dates.DateTime}, 1}})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:indent, :sorted, :by, :inline_tables), Tuple{Int64, Bool, typeof(Base.identity), Base.IdSet{Base.Dict{String, V} where {V}}}}, typeof(TOML.Internals.Printer.print_table), Nothing, Base.IOStream, Base.Dict{String, Dates.DateTime}, Array{String, 1}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Base.Dict{String, Base.UUID}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Base.Dict{String, Union{Array{String, 1}, String}}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Base.Dict{String, Array{String, 1}}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Base.Dict{String, Base.Dict{String, String}}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{typeof(Base.deepcopy_internal), Tuple{String}, Base.IdDict{Any, Any}})
+ Base.precompile(Tuple{Type{Memory{Pkg.Types.PackageSpec}}, UndefInitializer, Int64})
+
+ # Manually added from trace compiling Pkg.add
+ # Why needed? Something with constant prop overspecialization?
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:io, :update_cooldown), Tuple{Base.IOContext{IO}, Dates.Day}}, typeof(Pkg.Registry.update)})
+
+ Base.precompile(Tuple{Type{Memory{Pkg.Types.PackageSpec}}, UndefInitializer, Int64})
+ Base.precompile(Tuple{typeof(Base.hash), Tuple{String, UInt64}, UInt64})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:context,), Tuple{Base.TTY}}, typeof(Base.sprint), Function, Tuple{Pkg.Versions.VersionSpec}})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:context,), Tuple{Base.TTY}}, typeof(Base.sprint), Function, Tuple{String}})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:context,), Tuple{Base.TTY}}, typeof(Base.sprint), Function, Tuple{Base.VersionNumber}})
+ Base.precompile(Tuple{typeof(Base.join), Base.IOContext{Base.GenericIOBuffer{Memory{UInt8}}}, Tuple{String, UInt64}, Char})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{2}, Base.BitArray{2}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{2}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{2}, Base.BitArray{2}, Base.BitArray{2}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{2}, Base.BitArray{2}, Base.BitArray{2}, Vararg{Base.BitArray{2}}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{1}, Base.BitArray{1}})
+ Base.precompile(Tuple{typeof(Base.vcat), Base.BitArray{1}, Base.BitArray{1}, Base.BitArray{1}, Vararg{Base.BitArray{1}}})
+ Base.precompile(Tuple{typeof(Base.:(==)), Base.Dict{String, Any}, Base.Dict{String, Any}})
+ Base.precompile(Tuple{typeof(Base.join), Base.GenericIOBuffer{Memory{UInt8}}, Tuple{String}, Char})
+ Base.precompile(Tuple{typeof(Base.values), Base.Dict{String, Array{Base.Dict{String, Any}, 1}}})
+ Base.precompile(Tuple{typeof(Base.all), Base.Generator{Base.ValueIterator{Base.Dict{String, Array{Base.Dict{String, Any}, 1}}}, TOML.Internals.Printer.var"#5#6"}})
+ Base.precompile(Tuple{typeof(TOML.Internals.Printer.is_array_of_tables), Array{Base.Dict{String, Any}, 1}})
+ Base.precompile(Tuple{Type{Array{Dates.DateTime, 1}}, UndefInitializer, Tuple{Int64}})
+ Base.precompile(Tuple{Type{Pair{A, B} where {B} where {A}}, String, Dates.DateTime})
+ Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:internal_call, :strict, :warn_loaded, :timing, :_from_loading, :configs, :manifest, :io), Tuple{Bool, Bool, Bool, Bool, Bool, Pair{Base.Cmd, Base.CacheFlags}, Bool, Base.TTY}}, typeof(Base.Precompilation.precompilepkgs), Array{String, 1}})
+ ################
end
copy!(DEPOT_PATH, original_depot_path)
copy!(LOAD_PATH, original_load_path)
@@ -175,6 +224,7 @@ let
end
check_edges(child)
end
+ return
end
if Base.generating_output() && Base.JLOptions().use_pkgimages != 0
diff --git a/src/project.jl b/src/project.jl
index f7a7e83757..5250d66111 100644
--- a/src/project.jl
+++ b/src/project.jl
@@ -9,9 +9,9 @@ function get_path_repo(project::Project, name::String)
if source === nothing
return nothing, GitRepo()
end
- path = get(source, "path", nothing)::Union{String, Nothing}
- url = get(source, "url", nothing)::Union{String, Nothing}
- rev = get(source, "rev", nothing)::Union{String, Nothing}
+ path = get(source, "path", nothing)::Union{String, Nothing}
+ url = get(source, "url", nothing)::Union{String, Nothing}
+ rev = get(source, "rev", nothing)::Union{String, Nothing}
subdir = get(source, "subdir", nothing)::Union{String, Nothing}
if path !== nothing && url !== nothing
pkgerror("`path` and `url` are conflicting specifications")
@@ -25,7 +25,8 @@ end
###########
read_project_uuid(::Nothing) = nothing
function read_project_uuid(uuid::String)
- try uuid = UUID(uuid)
+ try
+ uuid = UUID(uuid)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse project UUID as a UUID")
@@ -36,7 +37,8 @@ read_project_uuid(uuid) = pkgerror("Expected project UUID to be a string")
read_project_version(::Nothing) = nothing
function read_project_version(version::String)
- try version = VersionNumber(version)
+ return try
+ version = VersionNumber(version)
catch err
err isa ArgumentError || rethrow()
pkgerror("Could not parse project version as a version")
@@ -44,9 +46,9 @@ function read_project_version(version::String)
end
read_project_version(version) = pkgerror("Expected project version to be a string")
-read_project_deps(::Nothing, section::String) = Dict{String,UUID}()
-function read_project_deps(raw::Dict{String,Any}, section_name::String)
- deps = Dict{String,UUID}()
+read_project_deps(::Nothing, section::String) = Dict{String, UUID}()
+function read_project_deps(raw::Dict{String, Any}, section_name::String)
+ deps = Dict{String, UUID}()
for (name, uuid) in raw
try
uuid = UUID(uuid)
@@ -62,38 +64,59 @@ function read_project_deps(raw, section_name::String)
pkgerror("Expected `$(section_name)` section to be a key-value list")
end
-read_project_targets(::Nothing, project::Project) = Dict{String,Any}()
-function read_project_targets(raw::Dict{String,Any}, project::Project)
+read_project_targets(::Nothing, project::Project) = Dict{String, Vector{String}}()
+function read_project_targets(raw::Dict{String, Any}, project::Project)
+ targets = Dict{String, Vector{String}}()
for (target, deps) in raw
- deps isa Vector{String} || pkgerror("""
- Expected value for target `$target` to be a list of dependency names.
- """)
+ deps isa Vector{String} || pkgerror(
+ """
+ Expected value for target `$target` to be a list of dependency names.
+ """
+ )
+ targets[target] = deps
end
- return raw
+ return targets
end
read_project_targets(raw, project::Project) =
pkgerror("Expected `targets` section to be a key-value list")
-read_project_compat(::Nothing, project::Project) = Dict{String,Compat}()
-function read_project_compat(raw::Dict{String,Any}, project::Project)
- compat = Dict{String,Compat}()
+read_project_apps(::Nothing, project::Project) = Dict{String, AppInfo}()
+function read_project_apps(raw::Dict{String, Any}, project::Project)
+ other = raw
+ appinfos = Dict{String, AppInfo}()
+ for (name, info) in raw
+ info isa Dict{String, Any} || pkgerror(
+ """
+ Expected value for app `$name` to be a dictionary.
+ """
+ )
+ submodule = get(info, "submodule", nothing)
+ appinfos[name] = AppInfo(name, nothing, submodule, other)
+ end
+ return appinfos
+end
+
+read_project_compat(::Nothing, project::Project; file = nothing) = Dict{String, Compat}()
+function read_project_compat(raw::Dict{String, Any}, project::Project; file = nothing)
+ compat = Dict{String, Compat}()
+ location_string = file === nothing ? "" : " in $(repr(file))"
for (name, version) in raw
version = version::String
try
compat[name] = Compat(semver_spec(version), version)
catch err
- pkgerror("Could not parse compatibility version for dependency `$name`")
+ pkgerror("Could not parse compatibility version spec $(repr(version)) for dependency `$name`$location_string")
end
end
return compat
end
-read_project_compat(raw, project::Project) =
- pkgerror("Expected `compat` section to be a key-value list")
+read_project_compat(raw, project::Project; file = nothing) =
+ pkgerror("Expected `compat` section to be a key-value list" * (file === nothing ? "" : " in $(repr(file))"))
-read_project_sources(::Nothing, project::Project) = Dict{String,Any}()
-function read_project_sources(raw::Dict{String,Any}, project::Project)
- valid_keys = ("path", "url", "rev")
- sources = Dict{String,Any}()
+read_project_sources(::Nothing, project::Project) = Dict{String, Dict{String, String}}()
+function read_project_sources(raw::Dict{String, Any}, project::Project)
+ valid_keys = ("path", "url", "rev", "subdir")
+ sources = Dict{String, Dict{String, String}}()
for (name, source) in raw
if !(source isa AbstractDict)
pkgerror("Expected `source` section to be a table")
@@ -109,9 +132,9 @@ function read_project_sources(raw::Dict{String,Any}, project::Project)
return sources
end
-read_project_workspace(::Nothing, project::Project) = Dict{String,Any}()
+read_project_workspace(::Nothing, project::Project) = Dict{String, Any}()
function read_project_workspace(raw::Dict, project::Project)
- workspace_table = Dict{String,Any}()
+ workspace_table = Dict{String, Any}()
for (key, val) in raw
if key == "projects"
for path in val
@@ -128,7 +151,7 @@ read_project_workspace(raw, project::Project) =
pkgerror("Expected `workspace` section to be a key-value list")
-function validate(project::Project; file=nothing)
+function validate(project::Project; file = nothing)
# deps
location_string = file === nothing ? "" : " at $(repr(file))."
dep_uuids = collect(values(project.deps))
@@ -154,14 +177,16 @@ function validate(project::Project; file=nothing)
end
=#
# targets
- listed = listed_deps(project; include_weak=true)
+ listed = listed_deps(project; include_weak = true)
for (target, deps) in project.targets, dep in deps
if length(deps) != length(unique(deps))
pkgerror("A dependency was named twice in target `$target`")
end
- dep in listed || pkgerror("""
+ dep in listed || pkgerror(
+ """
Dependency `$dep` in target `$target` not listed in `deps`, `weakdeps` or `extras` section
- """ * location_string)
+ """ * location_string
+ )
end
# compat
for name in keys(project.compat)
@@ -169,37 +194,40 @@ function validate(project::Project; file=nothing)
name in listed ||
pkgerror("Compat `$name` not listed in `deps`, `weakdeps` or `extras` section" * location_string)
end
- # sources
- listed_nonweak = listed_deps(project; include_weak=false)
- for name in keys(project.sources)
+ # sources
+ listed_nonweak = listed_deps(project; include_weak = false)
+ for name in keys(project.sources)
name in listed_nonweak ||
pkgerror("Sources for `$name` not listed in `deps` or `extras` section" * location_string)
end
+ return
end
-function Project(raw::Dict; file=nothing)
+function Project(raw::Dict; file = nothing)
project = Project()
- project.other = raw
- project.name = get(raw, "name", nothing)::Union{String, Nothing}
+ project.other = raw
+ project.name = get(raw, "name", nothing)::Union{String, Nothing}
project.manifest = get(raw, "manifest", nothing)::Union{String, Nothing}
- project.entryfile = get(raw, "path", nothing)::Union{String, Nothing}
+ project.entryfile = get(raw, "path", nothing)::Union{String, Nothing}
if project.entryfile === nothing
project.entryfile = get(raw, "entryfile", nothing)::Union{String, Nothing}
end
- project.uuid = read_project_uuid(get(raw, "uuid", nothing))
- project.version = read_project_version(get(raw, "version", nothing))
- project.deps = read_project_deps(get(raw, "deps", nothing), "deps")
+ project.uuid = read_project_uuid(get(raw, "uuid", nothing))
+ project.version = read_project_version(get(raw, "version", nothing))
+ project.deps = read_project_deps(get(raw, "deps", nothing), "deps")
project.weakdeps = read_project_deps(get(raw, "weakdeps", nothing), "weakdeps")
- project.exts = get(Dict{String, String}, raw, "extensions")
- project.sources = read_project_sources(get(raw, "sources", nothing), project)
- project.extras = read_project_deps(get(raw, "extras", nothing), "extras")
- project.compat = read_project_compat(get(raw, "compat", nothing), project)
- project.targets = read_project_targets(get(raw, "targets", nothing), project)
+ project.exts = get(Dict{String, String}, raw, "extensions")
+ project.sources = read_project_sources(get(raw, "sources", nothing), project)
+ project.extras = read_project_deps(get(raw, "extras", nothing), "extras")
+ project.compat = read_project_compat(get(raw, "compat", nothing), project; file)
+ project.targets = read_project_targets(get(raw, "targets", nothing), project)
project.workspace = read_project_workspace(get(raw, "workspace", nothing), project)
+ project.apps = read_project_apps(get(raw, "apps", nothing), project)
+ project.readonly = get(raw, "readonly", false)::Bool
# Handle deps in both [deps] and [weakdeps]
project._deps_weak = Dict(intersect(project.deps, project.weakdeps))
- filter!(p->!haskey(project._deps_weak, p.first), project.deps)
+ filter!(p -> !haskey(project._deps_weak, p.first), project.deps)
validate(project; file)
return project
end
@@ -217,7 +245,7 @@ function read_project(f_or_io::Union{String, IO})
end
pkgerror("Errored when reading $f_or_io, got: ", sprint(showerror, e))
end
- return Project(raw; file= f_or_io isa IO ? nothing : f_or_io)
+ return Project(raw; file = f_or_io isa IO ? nothing : f_or_io)
end
@@ -237,32 +265,42 @@ function destructure(project::Project)::Dict
# if a field is set to its default value, don't include it in the write
function entry!(key::String, src)
should_delete(x::Dict) = isempty(x)
- should_delete(x) = x === nothing
- should_delete(src) ? delete!(raw, key) : (raw[key] = src)
+ should_delete(x) = x === nothing
+ return should_delete(src) ? delete!(raw, key) : (raw[key] = src)
end
- entry!("name", project.name)
- entry!("uuid", project.uuid)
- entry!("version", project.version)
+ entry!("name", project.name)
+ entry!("uuid", project.uuid)
+ entry!("version", project.version)
entry!("workspace", project.workspace)
entry!("manifest", project.manifest)
- entry!("entryfile", project.entryfile)
- entry!("deps", merge(project.deps, project._deps_weak))
+ entry!("entryfile", project.entryfile)
+ entry!("deps", merge(project.deps, project._deps_weak))
entry!("weakdeps", project.weakdeps)
- entry!("sources", project.sources)
- entry!("extras", project.extras)
- entry!("compat", Dict(name => x.str for (name, x) in project.compat))
- entry!("targets", project.targets)
+ entry!("sources", project.sources)
+ entry!("extras", project.extras)
+ entry!("compat", Dict(name => x.str for (name, x) in project.compat))
+ entry!("targets", project.targets)
+
+ # Only write readonly if it's true (not the default false)
+ if project.readonly
+ raw["readonly"] = true
+ else
+ delete!(raw, "readonly")
+ end
+
return raw
end
-const _project_key_order = ["name", "uuid", "keywords", "license", "desc", "version", "workspace", "deps", "weakdeps", "sources", "extensions", "compat"]
+const _project_key_order = ["name", "uuid", "keywords", "license", "desc", "version", "readonly", "workspace", "deps", "weakdeps", "sources", "extensions", "compat"]
project_key_order(key::String) =
something(findfirst(x -> x == key, _project_key_order), length(_project_key_order) + 1)
function write_project(env::EnvCache)
- mkpath(dirname(env.project_file))
- write_project(env.project, env.project_file)
+ if env.project.readonly
+ pkgerror("Cannot write to readonly project file at $(env.project_file)")
+ end
+ return write_project(env.project, env.project_file)
end
write_project(project::Project, project_file::AbstractString) =
write_project(destructure(project), project_file)
@@ -274,7 +312,7 @@ function write_project(io::IO, project::Dict)
push!(inline_tables, source)
end
end
- TOML.print(io, project; inline_tables, sorted=true, by=key -> (project_key_order(key), key)) do x
+ TOML.print(io, project; inline_tables, sorted = true, by = key -> (project_key_order(key), key)) do x
x isa UUID || x isa VersionNumber || pkgerror("unhandled type `$(typeof(x))`")
return string(x)
end
@@ -282,5 +320,6 @@ function write_project(io::IO, project::Dict)
end
function write_project(project::Dict, project_file::AbstractString)
str = sprint(write_project, project)
- write(project_file, str)
+ mkpath(dirname(project_file))
+ return write(project_file, str)
end
diff --git a/src/utils.jl b/src/utils.jl
index 12826de397..b4b9054ef2 100644
--- a/src/utils.jl
+++ b/src/utils.jl
@@ -1,9 +1,12 @@
+# "Precompiling" is the longest operation
+const pkgstyle_indent = textwidth(string(:Precompiling))
-function printpkgstyle(io::IO, cmd::Symbol, text::String, ignore_indent::Bool=false; color=:green)
- indent = textwidth(string(:Precompiling)) # "Precompiling" is the longest operation
- ignore_indent && (indent = 0)
- printstyled(io, lpad(string(cmd), indent), color=color, bold=true)
- println(io, " ", text)
+function printpkgstyle(io::IO, cmd::Symbol, text::String, ignore_indent::Bool = false; color = :green)
+ indent = ignore_indent ? 0 : pkgstyle_indent
+ return @lock io begin
+ printstyled(io, lpad(string(cmd), indent), color = color, bold = true)
+ println(io, " ", text)
+ end
end
function linewrap(str::String; io = stdout_f(), padding = 0, width = Base.displaysize(io)[2])
@@ -61,9 +64,62 @@ function set_readonly(path)
end
set_readonly(::Nothing) = nothing
+"""
+ mv_temp_dir_retries(temp_dir::String, new_path::String; set_permissions::Bool=true)::Nothing
+
+Either rename the directory at `temp_dir` to `new_path` and set it to read-only
+or if `new_path` already exists try to do nothing. Both `temp_dir` and `new_path` must
+be on the same filesystem.
+"""
+function mv_temp_dir_retries(temp_dir::String, new_path::String; set_permissions::Bool = true)::Nothing
+ # Sometimes a rename can fail because the temp_dir is locked by
+ # anti-virus software scanning the new files.
+ # In this case we want to sleep and try again.
+ # I am using the list of error codes to retry from:
+ # https://github.com/isaacs/node-graceful-fs/blob/234379906b7d2f4c9cfeb412d2516f42b0fb4953/polyfills.js#L87
+ # Retry for up to about 60 seconds by retrying 20 times with exponential backoff.
+ retry = 0
+ max_num_retries = 20 # maybe this should be configurable?
+ sleep_amount = 0.01 # seconds
+ max_sleep_amount = 5.0 # seconds
+ while true
+ isdir(new_path) && return
+ # This next step is like
+ # `mv(temp_dir, new_path)`.
+ # However, `mv` defaults to `cp` if `rename` returns an error.
+ # `cp` is not atomic, so avoid the potential of calling it.
+ err = ccall(:jl_fs_rename, Int32, (Cstring, Cstring), temp_dir, new_path)
+ if err ≥ 0
+ if set_permissions
+ # rename worked
+ new_path_mode = filemode(dirname(new_path))
+ if Sys.iswindows()
+ # If this is Windows, ensure the directory mode is executable,
+ # as `filemode()` is incomplete. Some day, that may not be the
+ # case, there exists a test that will fail if this is changes.
+ new_path_mode |= 0o111
+ end
+ chmod(new_path, new_path_mode)
+ set_readonly(new_path)
+ end
+ return
+ else
+ # Ignore rename error if `new_path` exists.
+ isdir(new_path) && return
+ if retry < max_num_retries && err ∈ (Base.UV_EACCES, Base.UV_EPERM, Base.UV_EBUSY)
+ sleep(sleep_amount)
+ sleep_amount = min(sleep_amount * 2.0, max_sleep_amount)
+ retry += 1
+ else
+ Base.uv_error("rename of $(repr(temp_dir)) to $(repr(new_path))", err)
+ end
+ end
+ end
+ return
+end
+
# try to call realpath on as much as possible
function safe_realpath(path)
- isempty(path) && return path
if ispath(path)
try
return realpath(path)
@@ -72,31 +128,73 @@ function safe_realpath(path)
end
end
a, b = splitdir(path)
+ # path cannot be reduced at the root or drive, avoid stack overflow
+ isempty(b) && return path
return joinpath(safe_realpath(a), b)
end
# Windows sometimes throw on `isdir`...
function isdir_nothrow(path::String)
- try isdir(path)
+ return try
+ isdir(path)
catch e
false
end
end
function isfile_nothrow(path::String)
- try isfile(path)
+ return try
+ isfile(path)
catch e
false
end
end
-function casesensitive_isdir(dir::String)
- dir = abspath(dir)
- lastdir = splitpath(dir)[end]
- isdir_nothrow(dir) && lastdir in readdir(joinpath(dir, ".."))
+
+"""
+ atomic_toml_write(path::String, data; kws...)
+
+Write TOML data to a file atomically by first writing to a temporary file and then moving it into place.
+This prevents "teared" writes if the process is interrupted or if multiple processes write to the same file.
+
+The `kws` are passed to `TOML.print`.
+"""
+function atomic_toml_write(path::String, data; kws...)
+ dir = dirname(path)
+ isempty(dir) && (dir = pwd())
+
+ temp_path, temp_io = mktemp(dir)
+ return try
+ TOML.print(temp_io, data; kws...)
+ close(temp_io)
+ mv(temp_path, path; force = true)
+ catch
+ close(temp_io)
+ rm(temp_path; force = true)
+ rethrow()
+ end
end
## ordering of UUIDs ##
if VERSION < v"1.2.0-DEV.269" # Defined in Base as of #30947
Base.isless(a::UUID, b::UUID) = a.value < b.value
end
+
+function discover_repo(path::AbstractString)
+ dir = abspath(path)
+ stop_dir = homedir()
+ depot = Pkg.depots1()
+
+ while true
+ dir == depot && return nothing
+ gitdir = joinpath(dir, ".git")
+ if isdir(gitdir) || isfile(gitdir)
+ return dir
+ end
+ dir == stop_dir && return nothing
+ parent = dirname(dir)
+ parent == dir && return nothing
+ dir = parent
+ end
+ return
+end
diff --git a/test/FakeTerminals.jl b/test/FakeTerminals.jl
index 1fe587b144..c359142c21 100644
--- a/test/FakeTerminals.jl
+++ b/test/FakeTerminals.jl
@@ -8,8 +8,8 @@ mutable struct FakeTerminal <: REPL.Terminals.UnixTerminal
err_stream::Base.IO
hascolor::Bool
raw::Bool
- FakeTerminal(stdin,stdout,stderr,hascolor=true) =
- new(stdin,stdout,stderr,hascolor,false)
+ FakeTerminal(stdin, stdout, stderr, hascolor = true) =
+ new(stdin, stdout, stderr, hascolor, false)
end
REPL.Terminals.hascolor(t::FakeTerminal) = t.hascolor
diff --git a/test/NastyGenerator.jl b/test/NastyGenerator.jl
index 16d52beee1..06fd22a635 100644
--- a/test/NastyGenerator.jl
+++ b/test/NastyGenerator.jl
@@ -29,34 +29,35 @@ Note that the "problematic" output assumes that all non-planted versions will be
uninstallable, which is only the case for some regimes of the parameters (e.g. large
enough d).
"""
-function generate_nasty(n::Int, # size of planted solutions
- m::Int; # size of the graph
- k::Int = 10, # version number limit
- q::Int = 10, # versions per package (upper bound)
- d::Int = 10, # neighbors per package
- seed::Integer = 32524,
- sat::Bool = true # create a satisfiable problem?
- )
+function generate_nasty(
+ n::Int, # size of planted solutions
+ m::Int; # size of the graph
+ k::Int = 10, # version number limit
+ q::Int = 10, # versions per package (upper bound)
+ d::Int = 10, # neighbors per package
+ seed::Integer = 32524,
+ sat::Bool = true # create a satisfiable problem?
+ )
@assert m ≥ n
- d ≤ m-1 || @warn "d=$d, should be ≤ m-1=$(m-1)"
+ d ≤ m - 1 || @warn "d=$d, should be ≤ m-1=$(m - 1)"
Random.seed!(seed)
- allvers = [sort(unique(randvers(k) for j = 1:q)) for i = 1:m]
+ allvers = [sort(unique(randvers(k) for j in 1:q)) for i in 1:m]
- planted1 = [rand(2:length(allvers[i])) for i = 1:n]
+ planted1 = [rand(2:length(allvers[i])) for i in 1:n]
- planted2 = [rand(1:(planted1[i]-1)) for i = 1:n]
+ planted2 = [rand(1:(planted1[i] - 1)) for i in 1:n]
deps = []
problematic = []
# random dependencies
- for i = 1:m, j = 1:length(allvers[i])
+ for i in 1:m, j in 1:length(allvers[i])
if i ≤ n && (planted1[i] == j || planted2[i] == j)
if j == planted1[i]
if i < n
- push!(deps, [pn(i), allvers[i][j], pn(i+1), "$(allvers[i+1][planted1[i+1]])-*"])
+ push!(deps, [pn(i), allvers[i][j], pn(i + 1), "$(allvers[i + 1][planted1[i + 1]])-*"])
else
if !sat
push!(deps, [pn(i), allvers[i][j], pn(1), "0-$(allvers[1][planted2[1]])"])
@@ -66,7 +67,7 @@ function generate_nasty(n::Int, # size of planted solutions
end
else # j == planted2[i]
if i < n
- push!(deps, [pn(i), allvers[i][j], pn(i+1), "0-$(allvers[i+1][planted2[i+1]])"])
+ push!(deps, [pn(i), allvers[i][j], pn(i + 1), "0-$(allvers[i + 1][planted2[i + 1]])"])
else
if !sat
push!(deps, [pn(i), allvers[i][j], pn(1), "$(allvers[1][planted1[1]])-*"])
@@ -79,7 +80,7 @@ function generate_nasty(n::Int, # size of planted solutions
continue
end
- s = shuffle([1:(i-1); (i+1):m])[1:min(d,m-1)]
+ s = shuffle([1:(i - 1); (i + 1):m])[1:min(d, m - 1)]
for a in s
push!(deps, [pn(i), allvers[i][j], pn(a), randvspec(k)])
end
@@ -92,7 +93,7 @@ function generate_nasty(n::Int, # size of planted solutions
# info("SOLUTION: $([(i,planted1[i]) for i = 1:n])")
# info("REST: $([(i,length(allvers[i])+1) for i = (n+1):m])")
- want = Dict(pn(i) => allvers[i][planted1[i]] for i = 1:n)
+ want = Dict(pn(i) => allvers[i][planted1[i]] for i in 1:n)
return deps, reqs, want, problematic
end
diff --git a/test/Project.toml b/test/Project.toml
index 0922624374..4760acba31 100644
--- a/test/Project.toml
+++ b/test/Project.toml
@@ -1,4 +1,5 @@
[deps]
+Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
HistoricalStdlibVersions = "6df8b67a-e8a0-4029-b4b7-ac196fe72102"
LibGit2 = "76f85450-5226-5b5a-8eaa-529ad045b433"
@@ -15,4 +16,5 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[compat]
+Aqua = "0.8.10"
HistoricalStdlibVersions = "2"
diff --git a/test/api.jl b/test/api.jl
index 242b5af2e5..95ad2b4169 100644
--- a/test/api.jl
+++ b/test/api.jl
@@ -10,37 +10,39 @@ using UUIDs
using ..Utils
@testset "Pkg.activate" begin
- isolate() do; cd_tempdir() do tmp
- path = pwd()
- Pkg.activate(".")
- mkdir("Foo")
- cd(mkdir("modules")) do
- Pkg.generate("Foo")
+ isolate() do;
+ cd_tempdir() do tmp
+ path = pwd()
+ Pkg.activate(".")
+ mkdir("Foo")
+ cd(mkdir("modules")) do
+ Pkg.generate("Foo")
+ end
+ Pkg.develop(Pkg.PackageSpec(path = "modules/Foo")) # to avoid issue #542
+ Pkg.activate("Foo") # activate path Foo over deps Foo
+ @test Base.active_project() == joinpath(path, "Foo", "Project.toml")
+ Pkg.activate(".")
+ rm("Foo"; force = true, recursive = true)
+ Pkg.activate("Foo") # activate path from developed Foo
+ @test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
+ Pkg.activate(".")
+ Pkg.activate("./Foo") # activate empty directory Foo (sidestep the developed Foo)
+ @test Base.active_project() == joinpath(path, "Foo", "Project.toml")
+ Pkg.activate(".")
+ Pkg.activate("Bar") # activate empty directory Bar
+ @test Base.active_project() == joinpath(path, "Bar", "Project.toml")
+ Pkg.activate(".")
+ Pkg.add("Example") # non-deved deps should not be activated
+ Pkg.activate("Example")
+ @test Base.active_project() == joinpath(path, "Example", "Project.toml")
+ Pkg.activate(".")
+ cd(mkdir("tests"))
+ Pkg.activate("Foo") # activate developed Foo from another directory
+ @test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
+ Pkg.activate() # activate LOAD_PATH project
+ @test Base.ACTIVE_PROJECT[] === nothing
end
- Pkg.develop(Pkg.PackageSpec(path="modules/Foo")) # to avoid issue #542
- Pkg.activate("Foo") # activate path Foo over deps Foo
- @test Base.active_project() == joinpath(path, "Foo", "Project.toml")
- Pkg.activate(".")
- rm("Foo"; force=true, recursive=true)
- Pkg.activate("Foo") # activate path from developed Foo
- @test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
- Pkg.activate(".")
- Pkg.activate("./Foo") # activate empty directory Foo (sidestep the developed Foo)
- @test Base.active_project() == joinpath(path, "Foo", "Project.toml")
- Pkg.activate(".")
- Pkg.activate("Bar") # activate empty directory Bar
- @test Base.active_project() == joinpath(path, "Bar", "Project.toml")
- Pkg.activate(".")
- Pkg.add("Example") # non-deved deps should not be activated
- Pkg.activate("Example")
- @test Base.active_project() == joinpath(path, "Example", "Project.toml")
- Pkg.activate(".")
- cd(mkdir("tests"))
- Pkg.activate("Foo") # activate developed Foo from another directory
- @test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
- Pkg.activate() # activate LOAD_PATH project
- @test Base.ACTIVE_PROJECT[] === nothing
- end end
+ end
end
include("FakeTerminals.jl")
@@ -48,219 +50,290 @@ import .FakeTerminals.FakeTerminal
@testset "Pkg.precompile" begin
# sequential precompile, depth-first
- isolate() do; cd_tempdir() do tmp
- Pkg.activate(".")
- cd(mkdir("packages")) do
- Pkg.generate("Dep1")
- Pkg.generate("Dep2")
- Pkg.generate("Dep3")
- Pkg.generate("Dep4")
- Pkg.generate("Dep5")
- Pkg.generate("Dep6")
- Pkg.generate("Dep7")
- Pkg.generate("Dep8")
- Pkg.generate("NoVersion")
- open(joinpath("NoVersion","Project.toml"), "w") do io
- write(io, "name = \"NoVersion\"\nuuid = \"$(UUIDs.uuid4())\"")
- end
- Pkg.generate("BrokenDep")
- open(joinpath("BrokenDep","src","BrokenDep.jl"), "w") do io
- write(io, "module BrokenDep\nerror()\nend")
- end
- Pkg.generate("TrailingTaskDep")
- open(joinpath("TrailingTaskDep","src","TrailingTaskDep.jl"), "w") do io
- write(io, """
- module TrailingTaskDep
- println(stderr, "waiting for IO to finish") # pretend to be a warning
- sleep(2)
- end""")
- end
- Pkg.generate("SlowPrecompile")
- open(joinpath("SlowPrecompile","src","SlowPrecompile.jl"), "w") do io
- write(io, """
- module SlowPrecompile
- sleep(10)
- end""")
- end
- end
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep1"))
-
- Pkg.activate("Dep1")
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep2"))
- Pkg.activate("Dep2")
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep3"))
-
- Pkg.activate(".")
- Pkg.resolve()
- Pkg.precompile()
-
- iob = IOBuffer()
- ENV["JULIA_PKG_PRECOMPILE_AUTO"]=1
- @info "Auto precompilation enabled"
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep4"))
- Pkg.develop(Pkg.PackageSpec(path="packages/NoVersion")) # a package with no version number
- Pkg.build(io=iob) # should trigger auto-precomp
- @test occursin("Precompiling", String(take!(iob)))
- Pkg.precompile(io=iob)
- @test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
-
- Pkg.precompile("Dep4", io=iob)
- @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
- Pkg.precompile(["Dep4", "NoVersion"], io=iob)
- @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
-
- Pkg.precompile(Pkg.PackageSpec(name="Dep4"))
- @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
- Pkg.precompile([Pkg.PackageSpec(name="Dep4"), Pkg.PackageSpec(name="NoVersion")])
- @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
-
- ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0
- @info "Auto precompilation disabled"
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep5"))
- Pkg.precompile(io=iob)
- @test occursin("Precompiling", String(take!(iob)))
-
- ENV["JULIA_PKG_PRECOMPILE_AUTO"]=1
- Pkg.develop(Pkg.PackageSpec(path="packages/BrokenDep"))
- Pkg.build(io=iob) # should trigger auto-precomp and soft-error
- @test occursin("Precompiling", String(take!(iob)))
-
- ptoml = joinpath("packages","BrokenDep","Project.toml")
- lines = readlines(ptoml)
- open(joinpath("packages","BrokenDep","src","BrokenDep.jl"), "w") do io
- write(io, "module BrokenDep\n\nend") # remove error
- end
- open(ptoml, "w") do io
- for line in lines
- if startswith(line, "version = \"0.1.0\"")
- println(io, replace(line, "version = \"0.1.0\"" => "version = \"0.1.1\"", count=1)) # up version
- else
- println(io, line)
+ isolate() do;
+ cd_tempdir() do tmp
+ Pkg.activate(".")
+ cd(mkdir("packages")) do
+ Pkg.generate("Dep1")
+ Pkg.generate("Dep2")
+ Pkg.generate("Dep3")
+ Pkg.generate("Dep4")
+ Pkg.generate("Dep5")
+ Pkg.generate("Dep6")
+ Pkg.generate("Dep7")
+ Pkg.generate("Dep8")
+ Pkg.generate("NoVersion")
+ open(joinpath("NoVersion", "Project.toml"), "w") do io
+ write(io, "name = \"NoVersion\"\nuuid = \"$(UUIDs.uuid4())\"")
+ end
+ Pkg.generate("BrokenDep")
+ open(joinpath("BrokenDep", "src", "BrokenDep.jl"), "w") do io
+ write(io, "module BrokenDep\nerror()\nend")
+ end
+ Pkg.generate("TrailingTaskDep")
+ open(joinpath("TrailingTaskDep", "src", "TrailingTaskDep.jl"), "w") do io
+ write(
+ io, """
+ module TrailingTaskDep
+ println(stderr, "waiting for IO to finish") # pretend to be a warning
+ sleep(2)
+ end"""
+ )
+ end
+ Pkg.generate("SlowPrecompile")
+ open(joinpath("SlowPrecompile", "src", "SlowPrecompile.jl"), "w") do io
+ write(
+ io, """
+ module SlowPrecompile
+ sleep(10)
+ end"""
+ )
end
end
- end
- Pkg.update("BrokenDep") # should trigger auto-precomp including the fixed BrokenDep
- Pkg.precompile(io=iob)
- @test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep1"))
- # https://github.com/JuliaLang/Pkg.jl/pull/2142
- Pkg.build(; verbose=true)
+ Pkg.activate("Dep1")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep2"))
+ Pkg.activate("Dep2")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep3"))
+
+ Pkg.activate(".")
+ Pkg.resolve()
+ Pkg.precompile()
- @testset "timing mode" begin
iob = IOBuffer()
- Pkg.develop(Pkg.PackageSpec(path="packages/Dep6"))
- Pkg.precompile(io=iob, timing=true)
- str = String(take!(iob))
- @test occursin("Precompiling", str)
- @test occursin(" ms", str)
- @test occursin("Dep6", str)
- Pkg.precompile(io=iob)
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 1
+ @info "Auto precompilation enabled"
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep4"))
+ Pkg.develop(Pkg.PackageSpec(path = "packages/NoVersion")) # a package with no version number
+ Pkg.build(io = iob) # should trigger auto-precomp
+ @test occursin("Precompiling", String(take!(iob)))
+ Pkg.precompile(io = iob)
@test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
- end
- @testset "instantiate" begin
- iob = IOBuffer()
- Pkg.activate("packages/Dep7")
- Pkg.resolve()
- @test isfile("packages/Dep7/Project.toml")
- @test isfile("packages/Dep7/Manifest.toml")
- Pkg.instantiate(io=iob) # with a Project.toml and Manifest.toml
+ Pkg.precompile("Dep4", io = iob)
+ @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
+ Pkg.precompile(["Dep4", "NoVersion"], io = iob)
+ @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
+
+ Pkg.precompile(Pkg.PackageSpec(name = "Dep4"))
+ @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
+ Pkg.precompile([Pkg.PackageSpec(name = "Dep4"), Pkg.PackageSpec(name = "NoVersion")])
+ @test !occursin("Precompiling", String(take!(iob))) # should be a no-op
+
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 0
+ @info "Auto precompilation disabled"
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep5"))
+ Pkg.precompile(io = iob)
@test occursin("Precompiling", String(take!(iob)))
- Pkg.activate("packages/Dep8")
- @test isfile("packages/Dep8/Project.toml")
- @test !isfile("packages/Dep8/Manifest.toml")
- Pkg.instantiate(io=iob) # with only a Project.toml
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 1
+ Pkg.develop(Pkg.PackageSpec(path = "packages/BrokenDep"))
+ Pkg.build(io = iob) # should trigger auto-precomp and soft-error
@test occursin("Precompiling", String(take!(iob)))
- end
- ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0
+ ptoml = joinpath("packages", "BrokenDep", "Project.toml")
+ lines = readlines(ptoml)
+ open(joinpath("packages", "BrokenDep", "src", "BrokenDep.jl"), "w") do io
+ write(io, "module BrokenDep\n\nend") # remove error
+ end
+ open(ptoml, "w") do io
+ for line in lines
+ if startswith(line, "version = \"0.1.0\"")
+ println(io, replace(line, "version = \"0.1.0\"" => "version = \"0.1.1\"", count = 1)) # up version
+ else
+ println(io, line)
+ end
+ end
+ end
+ Pkg.update("BrokenDep") # should trigger auto-precomp including the fixed BrokenDep
+ Pkg.precompile(io = iob)
+ @test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
- @testset "waiting for trailing tasks" begin
- Pkg.activate("packages/TrailingTaskDep")
- iob = IOBuffer()
- Pkg.precompile(io=iob)
- str = String(take!(iob))
- @test occursin("Precompiling", str)
- @test occursin("Waiting for background task / IO / timer.", str)
- end
+ # https://github.com/JuliaLang/Pkg.jl/pull/2142
+ Pkg.build(; verbose = true)
+
+ @testset "timing mode" begin
+ iob = IOBuffer()
+ Pkg.develop(Pkg.PackageSpec(path = "packages/Dep6"))
+ Pkg.precompile(io = iob, timing = true)
+ str = String(take!(iob))
+ @test occursin("Precompiling", str)
+ @test occursin(" ms", str)
+ @test occursin("Dep6", str)
+ Pkg.precompile(io = iob)
+ @test !occursin("Precompiling", String(take!(iob))) # test that the previous precompile was a no-op
+ end
+
+ dep8_path = git_init_package(tmp, joinpath("packages", "Dep8"))
+ function clear_dep8_cache()
+ rm(joinpath(Pkg.depots1(), "compiled", "v$(VERSION.major).$(VERSION.minor)", "Dep8"), force = true, recursive = true)
+ end
+ @testset "delayed precompilation with do-syntax" begin
+ iob = IOBuffer()
+ # Test that operations inside Pkg.precompile() do block don't trigger auto-precompilation
+ Pkg.precompile(io = iob) do
+ Pkg.add(Pkg.PackageSpec(path = dep8_path))
+ Pkg.rm("Dep8")
+ clear_dep8_cache()
+ Pkg.add(Pkg.PackageSpec(path = dep8_path))
+ end
- @testset "pidlocked precompile" begin
- proj = joinpath(pwd(), "packages", "SlowPrecompile")
- cmd = addenv(`$(Base.julia_cmd()) --color=no --startup-file=no --project="$(pkgdir(Pkg))" -e "
+ # The precompile should happen once at the end
+ @test count(r"Precompiling", String(take!(iob))) == 1 # should only precompile once
+
+ # Verify it was precompiled by checking a second call is a no-op
+ Pkg.precompile(io = iob)
+ @test !occursin("Precompiling", String(take!(iob)))
+ end
+
+ Pkg.rm("Dep8")
+
+ @testset "autoprecompilation_enabled global control" begin
+ iob = IOBuffer()
+ withenv("JULIA_PKG_PRECOMPILE_AUTO" => nothing) do
+ original_state = Pkg._autoprecompilation_enabled
+ try
+ Pkg.autoprecompilation_enabled(false)
+ @test Pkg._autoprecompilation_enabled == false
+
+ # Operations should not trigger autoprecompilation when globally disabled
+ clear_dep8_cache()
+ Pkg.add(Pkg.PackageSpec(path = dep8_path), io = iob)
+ @test !occursin("Precompiling", String(take!(iob)))
+
+ # Manual precompile should still work
+ @test Base.isprecompiled(Base.identify_package("Dep8")) == false
+ Pkg.precompile(io = iob)
+ @test occursin("Precompiling", String(take!(iob)))
+ @test Base.isprecompiled(Base.identify_package("Dep8"))
+
+ # Re-enable autoprecompilation
+ Pkg.autoprecompilation_enabled(true)
+ @test Pkg._autoprecompilation_enabled == true
+
+ # Operations should now trigger autoprecompilation again
+ Pkg.rm("Dep8", io = iob)
+ clear_dep8_cache()
+ Pkg.add(Pkg.PackageSpec(path = dep8_path), io = iob)
+ @test Base.isprecompiled(Base.identify_package("Dep8"))
+ @test occursin("Precompiling", String(take!(iob)))
+
+ finally
+ # Restore original state
+ Pkg.autoprecompilation_enabled(original_state)
+ end
+ end
+ end
+
+ @testset "instantiate" begin
+ iob = IOBuffer()
+ Pkg.activate("packages/Dep7")
+ Pkg.resolve()
+ @test isfile("packages/Dep7/Project.toml")
+ @test isfile("packages/Dep7/Manifest.toml")
+ Pkg.instantiate(io = iob) # with a Project.toml and Manifest.toml
+ @test occursin("Precompiling", String(take!(iob)))
+
+ Pkg.activate("packages/Dep8")
+ @test isfile("packages/Dep8/Project.toml")
+ @test !isfile("packages/Dep8/Manifest.toml")
+ Pkg.instantiate(io = iob) # with only a Project.toml
+ @test occursin("Precompiling", String(take!(iob)))
+ end
+
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 0
+
+ @testset "waiting for trailing tasks" begin
+ Pkg.activate("packages/TrailingTaskDep")
+ iob = IOBuffer()
+ Pkg.precompile(io = iob)
+ str = String(take!(iob))
+ @test occursin("Precompiling", str)
+ @test occursin("Waiting for background task / IO / timer.", str)
+ end
+
+ @testset "pidlocked precompile" begin
+ proj = joinpath(pwd(), "packages", "SlowPrecompile")
+ cmd = addenv(
+ `$(Base.julia_cmd()) --color=no --startup-file=no --project="$(pkgdir(Pkg))" -e "
using Pkg
Pkg.activate(\"$(escape_string(proj))\")
Pkg.precompile()
"`,
- "JULIA_PKG_PRECOMPILE_AUTO" => "0")
- iob1 = IOBuffer()
- iob2 = IOBuffer()
- try
- Base.Experimental.@sync begin
- @async run(pipeline(cmd, stderr=iob1, stdout=iob1))
- @async run(pipeline(cmd, stderr=iob2, stdout=iob2))
+ "JULIA_PKG_PRECOMPILE_AUTO" => "0"
+ )
+ iob1 = IOBuffer()
+ iob2 = IOBuffer()
+ try
+ Base.Experimental.@sync begin
+ @async run(pipeline(cmd, stderr = iob1, stdout = iob1))
+ @async run(pipeline(cmd, stderr = iob2, stdout = iob2))
+ end
+ catch
+ println("pidlocked precompile tests failed:")
+ println("process 1:\n", String(take!(iob1)))
+ println("process 2:\n", String(take!(iob2)))
+ rethrow()
end
- catch
- println("pidlocked precompile tests failed:")
- println("process 1:\n", String(take!(iob1)))
- println("process 2:\n", String(take!(iob2)))
- rethrow()
+ s1 = String(take!(iob1))
+ s2 = String(take!(iob2))
+ @test occursin("Precompiling", s1)
+ @test occursin("Precompiling", s2)
+ @test any(contains("Being precompiled by another process (pid: "), (s1, s2))
end
- s1 = String(take!(iob1))
- s2 = String(take!(iob2))
- @test occursin("Precompiling", s1)
- @test occursin("Precompiling", s2)
- @test any(contains("Being precompiled by another process (pid: "), (s1, s2))
- end
- end end
+ end
+ end
# ignoring circular deps, to avoid deadlock
- isolate() do; cd_tempdir() do tmp
- Pkg.activate(".")
- cd(mkdir("packages")) do
- Pkg.generate("CircularDep1")
- Pkg.generate("CircularDep2")
- Pkg.generate("CircularDep3")
+ isolate() do;
+ cd_tempdir() do tmp
+ Pkg.activate(".")
+ cd(mkdir("packages")) do
+ Pkg.generate("CircularDep1")
+ Pkg.generate("CircularDep2")
+ Pkg.generate("CircularDep3")
+ end
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep1"))
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep2"))
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep3"))
+
+ Pkg.activate("CircularDep1")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep2"))
+ Pkg.activate("CircularDep2")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep3"))
+ Pkg.activate("CircularDep3")
+ Pkg.develop(Pkg.PackageSpec(path = "packages/CircularDep1"))
+
+ Pkg.activate(".")
+ Pkg.resolve()
+
+ ## Tests when circularity is in dependencies
+ @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
+
+ ## Tests when circularity goes through the active project
+ Pkg.activate("CircularDep1")
+ Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
+ @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
+ Pkg.activate(".")
+ Pkg.activate("CircularDep2")
+ Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
+ @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
+ Pkg.activate(".")
+ Pkg.activate("CircularDep3")
+ Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
+ @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
+
+ Pkg.activate(temp = true)
+ Pkg.precompile() # precompile an empty env should be a no-op
+ # TODO: Reenable
+ #@test_throws ErrorException Pkg.precompile("DoesNotExist") # fail to find a nonexistant dep in an empty env
+
+ Pkg.add("Random")
+ #@test_throws ErrorException Pkg.precompile("DoesNotExist")
+ Pkg.precompile() # should be a no-op
end
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep1"))
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep2"))
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep3"))
-
- Pkg.activate("CircularDep1")
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep2"))
- Pkg.activate("CircularDep2")
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep3"))
- Pkg.activate("CircularDep3")
- Pkg.develop(Pkg.PackageSpec(path="packages/CircularDep1"))
-
- Pkg.activate(".")
- Pkg.resolve()
-
- ## Tests when circularity is in dependencies
- @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
-
- ## Tests when circularity goes through the active project
- Pkg.activate("CircularDep1")
- Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
- @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
- Pkg.activate(".")
- Pkg.activate("CircularDep2")
- Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
- @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
- Pkg.activate(".")
- Pkg.activate("CircularDep3")
- Pkg.resolve() # necessary because resolving in `Pkg.precompile` has been removed
- @test_logs (:warn, r"Circular dependency detected") Pkg.precompile()
-
- Pkg.activate(temp=true)
- Pkg.precompile() # precompile an empty env should be a no-op
- # TODO: Reenable
- #@test_throws ErrorException Pkg.precompile("DoesNotExist") # fail to find a nonexistant dep in an empty env
-
- Pkg.add("Random")
- #@test_throws ErrorException Pkg.precompile("DoesNotExist")
- Pkg.precompile() # should be a no-op
- end end
+ end
end
@testset "Pkg.API.check_package_name: Error message if package name ends in .jl" begin
@@ -296,21 +369,83 @@ end
@testset "set number of concurrent requests" begin
@test Pkg.Types.num_concurrent_downloads() == 8
- withenv("JULIA_PKG_CONCURRENT_DOWNLOADS"=>"5") do
+ withenv("JULIA_PKG_CONCURRENT_DOWNLOADS" => "5") do
@test Pkg.Types.num_concurrent_downloads() == 5
end
- withenv("JULIA_PKG_CONCURRENT_DOWNLOADS"=>"0") do
+ withenv("JULIA_PKG_CONCURRENT_DOWNLOADS" => "0") do
@test_throws ErrorException Pkg.Types.num_concurrent_downloads()
end
end
@testset "`[compat]` entries for `julia`" begin
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- pathf = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "FarFuture"))
- pathp = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "FarPast"))
- @test_throws "julia version requirement from Project.toml's compat section not satisfied for package" Pkg.add(path=pathf)
- @test_throws "julia version requirement from Project.toml's compat section not satisfied for package" Pkg.add(path=pathp)
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ pathf = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "FarFuture"))
+ pathp = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "FarPast"))
+ @test_throws "julia version requirement from Project.toml's compat section not satisfied for package" Pkg.add(path = pathf)
+ @test_throws "julia version requirement from Project.toml's compat section not satisfied for package" Pkg.add(path = pathp)
+ end
+ end
+end
+
+@testset "allow_reresolve parameter" begin
+ isolate(loaded_depot = false) do;
+ mktempdir() do tempdir
+ Pkg.Registry.add(url = "https://github.com/JuliaRegistries/Test")
+ # AllowReresolveTest has Example v0.5.1 which is yanked in the test registry.
+ test_dir = joinpath(tempdir, "AllowReresolveTest")
+
+ # Test that we can build and test with allow_reresolve=true
+ copy_test_package(tempdir, "AllowReresolveTest")
+ Pkg.activate(joinpath(tempdir, "AllowReresolveTest"))
+ @test Pkg.build(; allow_reresolve = true) == nothing
+
+ rm(test_dir, force = true, recursive = true)
+ copy_test_package(tempdir, "AllowReresolveTest")
+ Pkg.activate(joinpath(tempdir, "AllowReresolveTest"))
+ @test Pkg.test(; allow_reresolve = true) == nothing
+
+ # Test that allow_reresolve=false fails with the broken manifest
+ rm(test_dir, force = true, recursive = true)
+ copy_test_package(tempdir, "AllowReresolveTest")
+ Pkg.activate(joinpath(tempdir, "AllowReresolveTest"))
+ @test_throws Pkg.Resolve.ResolverError Pkg.build(; allow_reresolve = false)
+
+ rm(test_dir, force = true, recursive = true)
+ copy_test_package(tempdir, "AllowReresolveTest")
+ Pkg.activate(joinpath(tempdir, "AllowReresolveTest"))
+ @test_throws Pkg.Resolve.ResolverError Pkg.test(; allow_reresolve = false)
+ end
+ end
+end
+
+@testset "Yanked package handling" begin
+ isolate() do;
+ mktempdir() do tempdir
+ # Copy the yanked test environment
+ test_env_dir = joinpath(tempdir, "yanked_test")
+ cp(joinpath(@__DIR__, "manifest", "yanked"), test_env_dir)
+ Pkg.activate(test_env_dir)
+
+ @testset "status shows yanked packages" begin
+ iob = IOBuffer()
+ Pkg.status(io = iob)
+ status_output = String(take!(iob))
+
+ @test occursin("Mocking v0.7.4 [yanked]", status_output)
+ @test occursin("Package versions marked with [yanked] have been pulled from their registry.", status_output)
+ end
+ @testset "resolve error shows yanked packages warning" begin
+ # Try to add a package that will cause resolve conflicts with yanked package
+ iob = IOBuffer()
+ @test_throws Pkg.Resolve.ResolverError Pkg.add("Example"; preserve = Pkg.PRESERVE_ALL, io = iob)
+ error_output = String(take!(iob))
+
+ @test occursin("The following package versions were yanked from their registry and are not resolvable:", error_output)
+ @test occursin("Mocking [78c3b35d] 0.7.4", error_output)
+ end
+ end
+ end
end
end # module APITests
diff --git a/test/apps.jl b/test/apps.jl
new file mode 100644
index 0000000000..3eed8672ca
--- /dev/null
+++ b/test/apps.jl
@@ -0,0 +1,65 @@
+module AppsTests
+
+import ..Pkg # ensure we are using the correct Pkg
+using ..Utils
+
+using Test
+
+@testset "Apps" begin
+
+ isolate(loaded_depot = true) do
+ sep = Sys.iswindows() ? ';' : ':'
+ Pkg.Apps.develop(path = joinpath(@__DIR__, "test_packages", "Rot13.jl"))
+ current_path = ENV["PATH"]
+ exename = Sys.iswindows() ? "juliarot13.bat" : "juliarot13"
+ cliexename = Sys.iswindows() ? "juliarot13cli.bat" : "juliarot13cli"
+ withenv("PATH" => string(joinpath(first(DEPOT_PATH), "bin"), sep, current_path)) do
+ # Test original app
+ @test contains(Sys.which("$exename"), first(DEPOT_PATH))
+ @test read(`$exename test`, String) == "grfg\n"
+
+ # Test submodule app
+ @test contains(Sys.which("$cliexename"), first(DEPOT_PATH))
+ @test read(`$cliexename test`, String) == "CLI: grfg\n"
+
+ Pkg.Apps.rm("Rot13")
+ @test Sys.which(exename) == nothing
+ @test Sys.which(cliexename) == nothing
+ end
+ end
+
+ isolate(loaded_depot = true) do
+ mktempdir() do tmpdir
+ sep = Sys.iswindows() ? ';' : ':'
+ path = git_init_package(tmpdir, joinpath(@__DIR__, "test_packages", "Rot13.jl"))
+ Pkg.Apps.add(path = path)
+ exename = Sys.iswindows() ? "juliarot13.bat" : "juliarot13"
+ cliexename = Sys.iswindows() ? "juliarot13cli.bat" : "juliarot13cli"
+ current_path = ENV["PATH"]
+ withenv("PATH" => string(joinpath(first(DEPOT_PATH), "bin"), sep, current_path)) do
+ # Test original app
+ @test contains(Sys.which(exename), first(DEPOT_PATH))
+ @test read(`$exename test`, String) == "grfg\n"
+
+ # Test submodule app
+ @test contains(Sys.which(cliexename), first(DEPOT_PATH))
+ @test read(`$cliexename test`, String) == "CLI: grfg\n"
+
+ Pkg.Apps.rm("Rot13")
+ @test Sys.which(exename) == nothing
+ @test Sys.which(cliexename) == nothing
+ end
+
+ # https://github.com/JuliaLang/Pkg.jl/issues/4258
+ Pkg.Apps.add(path = path)
+ Pkg.Apps.develop(path = path)
+ mv(joinpath(path, "src", "Rot13_edited.jl"), joinpath(path, "src", "Rot13.jl"); force = true)
+ withenv("PATH" => string(joinpath(first(DEPOT_PATH), "bin"), sep, current_path)) do
+ @test read(`$exename test`, String) == "Updated!\n"
+ end
+ end
+ end
+
+end
+
+end # module
diff --git a/test/aqua.jl b/test/aqua.jl
new file mode 100644
index 0000000000..c5aeb90392
--- /dev/null
+++ b/test/aqua.jl
@@ -0,0 +1,2 @@
+using Aqua
+Aqua.test_all(Pkg)
diff --git a/test/artifacts.jl b/test/artifacts.jl
index 605c3b26f8..4c6e213eb7 100644
--- a/test/artifacts.jl
+++ b/test/artifacts.jl
@@ -19,7 +19,7 @@ using ..Utils
# important to keep hashes stable across platforms that have different umasks, changing
# the permissions within a tree hash, breaking our tests.
function create_artifact_chmod(f::Function)
- create_artifact() do path
+ return create_artifact() do path
f(path)
# Change all files to have 644 permissions, leave directories alone
@@ -36,51 +36,59 @@ end
# We're going to ensure that our artifact creation does in fact give git-tree-sha1's.
creators = [
# First test the empty artifact
- (path -> begin
- # add no contents
- end, "4b825dc642cb6eb9a060e54bf8d69288fbee4904"),
+ (
+ path -> begin
+ # add no contents
+ end, "4b825dc642cb6eb9a060e54bf8d69288fbee4904",
+ ),
# Next test creating a single file
- (path -> begin
- open(joinpath(path, "foo"), "w") do io
- print(io, "Hello, world!")
- end
- end, "339aad93c0f854604248ea3b7c5b7edea20625a9"),
+ (
+ path -> begin
+ open(joinpath(path, "foo"), "w") do io
+ print(io, "Hello, world!")
+ end
+ end, "339aad93c0f854604248ea3b7c5b7edea20625a9",
+ ),
# Next we will test creating multiple files
- (path -> begin
- open(joinpath(path, "foo1"), "w") do io
- print(io, "Hello")
- end
- open(joinpath(path, "foo2"), "w") do io
- print(io, "world!")
- end
- end, "98cda294312216b19e2a973e9c291c0f5181c98c"),
+ (
+ path -> begin
+ open(joinpath(path, "foo1"), "w") do io
+ print(io, "Hello")
+ end
+ open(joinpath(path, "foo2"), "w") do io
+ print(io, "world!")
+ end
+ end, "98cda294312216b19e2a973e9c291c0f5181c98c",
+ ),
# Finally, we will have nested directories and all that good stuff
- (path -> begin
- mkpath(joinpath(path, "bar", "bar"))
- open(joinpath(path, "bar", "bar", "foo1"), "w") do io
- print(io, "Hello")
- end
- open(joinpath(path, "bar", "foo2"), "w") do io
- print(io, "world!")
- end
- open(joinpath(path, "foo3"), "w") do io
- print(io, "baz!")
- end
+ (
+ path -> begin
+ mkpath(joinpath(path, "bar", "bar"))
+ open(joinpath(path, "bar", "bar", "foo1"), "w") do io
+ print(io, "Hello")
+ end
+ open(joinpath(path, "bar", "foo2"), "w") do io
+ print(io, "world!")
+ end
+ open(joinpath(path, "foo3"), "w") do io
+ print(io, "baz!")
+ end
- # Empty directories do nothing to effect the hash, so we create one with a
- # random name to prove that it does not get hashed into the rest. Also, it
- # turns out that life is cxomplex enough that we need to test the nested
- # empty directories case as well.
- rand_dir = joinpath(path, Random.randstring(8), "inner")
- mkpath(rand_dir)
-
- # Symlinks are not followed, even if they point to directories
- symlink("foo3", joinpath(path, "foo3_link"))
- symlink("../bar", joinpath(path, "bar", "infinite_link"))
- end, "86a1ce580587d5851fdfa841aeb3c8d55663f6f9"),
+ # Empty directories do nothing to effect the hash, so we create one with a
+ # random name to prove that it does not get hashed into the rest. Also, it
+ # turns out that life is cxomplex enough that we need to test the nested
+ # empty directories case as well.
+ rand_dir = joinpath(path, Random.randstring(8), "inner")
+ mkpath(rand_dir)
+
+ # Symlinks are not followed, even if they point to directories
+ symlink("foo3", joinpath(path, "foo3_link"))
+ symlink("../bar", joinpath(path, "bar", "infinite_link"))
+ end, "86a1ce580587d5851fdfa841aeb3c8d55663f6f9",
+ ),
]
# Enable the following code snippet to figure out the correct gitsha's:
@@ -145,7 +153,7 @@ end
@test !iszero(filemode(joinpath(artifact_dir, dir_link)) & 0o222)
# Make sure we can delete the artifact directory without having
# to manually change permissions
- rm(artifact_dir; recursive=true)
+ rm(artifact_dir; recursive = true)
end
end
end
@@ -166,10 +174,10 @@ end
# First, let's test our ability to find Artifacts.toml files;
ATS = joinpath(@__DIR__, "test_packages", "ArtifactTOMLSearch")
test_modules = [
- joinpath(ATS, "pkg.jl") => joinpath(ATS, "Artifacts.toml"),
- joinpath(ATS, "sub_module", "pkg.jl") => joinpath(ATS, "Artifacts.toml"),
- joinpath(ATS, "sub_package", "pkg.jl") => joinpath(ATS, "sub_package", "Artifacts.toml"),
- joinpath(ATS, "julia_artifacts_test", "pkg.jl") => joinpath(ATS, "julia_artifacts_test", "JuliaArtifacts.toml"),
+ joinpath(ATS, "pkg.jl") => joinpath(ATS, "Artifacts.toml"),
+ joinpath(ATS, "sub_module", "pkg.jl") => joinpath(ATS, "Artifacts.toml"),
+ joinpath(ATS, "sub_package", "pkg.jl") => joinpath(ATS, "sub_package", "Artifacts.toml"),
+ joinpath(ATS, "julia_artifacts_test", "pkg.jl") => joinpath(ATS, "julia_artifacts_test", "JuliaArtifacts.toml"),
joinpath(@__DIR__, "test_packages", "BasicSandbox", "src", "Foo.jl") => nothing,
]
for (test_src, artifacts_toml) in test_modules
@@ -229,7 +237,7 @@ end
end
@test_throws ErrorException bind_artifact!(artifacts_toml, "foo_txt", hash2)
@test artifact_hash("foo_txt", artifacts_toml) == hash
- bind_artifact!(artifacts_toml, "foo_txt", hash2; force=true)
+ bind_artifact!(artifacts_toml, "foo_txt", hash2; force = true)
@test artifact_hash("foo_txt", artifacts_toml) == hash2
# Test that we can un-bind
@@ -238,28 +246,36 @@ end
# Test platform-specific binding and providing download_info
download_info = [
- ("http://google.com/hello_world", "0"^64),
- ("http://microsoft.com/hello_world", "a"^64),
+ ArtifactDownloadInfo("http://google.com/hello_world", "0"^64),
+ ArtifactDownloadInfo("http://microsoft.com/hello_world", "a"^64, 1),
]
# First, test the binding of things with various platforms and overwriting and such works properly
linux64 = Platform("x86_64", "linux")
win32 = Platform("i686", "windows")
- bind_artifact!(artifacts_toml, "foo_txt", hash; download_info=download_info, platform=linux64)
- @test artifact_hash("foo_txt", artifacts_toml; platform=linux64) == hash
- @test artifact_hash("foo_txt", artifacts_toml; platform=Platform("x86_64", "macos")) == nothing
- @test_throws ErrorException bind_artifact!(artifacts_toml, "foo_txt", hash2; download_info=download_info, platform=linux64)
- bind_artifact!(artifacts_toml, "foo_txt", hash2; download_info=download_info, platform=linux64, force=true)
- bind_artifact!(artifacts_toml, "foo_txt", hash; download_info=download_info, platform=win32)
- @test artifact_hash("foo_txt", artifacts_toml; platform=linux64) == hash2
- @test artifact_hash("foo_txt", artifacts_toml; platform=win32) == hash
- @test ensure_artifact_installed("foo_txt", artifacts_toml; platform=linux64) == artifact_path(hash2)
- @test ensure_artifact_installed("foo_txt", artifacts_toml; platform=win32) == artifact_path(hash)
+ bind_artifact!(artifacts_toml, "foo_txt", hash; download_info = download_info, platform = linux64)
+ @test artifact_hash("foo_txt", artifacts_toml; platform = linux64) == hash
+ @test artifact_hash("foo_txt", artifacts_toml; platform = Platform("x86_64", "macos")) == nothing
+ @test_throws ErrorException bind_artifact!(artifacts_toml, "foo_txt", hash2; download_info = download_info, platform = linux64)
+ bind_artifact!(artifacts_toml, "foo_txt", hash; download_info = download_info, platform = win32)
+ bind_artifact!(artifacts_toml, "foo_txt", hash2; download_info = download_info, platform = linux64, force = true)
+ @test artifact_hash("foo_txt", artifacts_toml; platform = linux64) == hash2
+ @test artifact_hash("foo_txt", artifacts_toml; platform = win32) == hash
+ @test ensure_artifact_installed("foo_txt", artifacts_toml; platform = linux64) == artifact_path(hash2)
+ @test ensure_artifact_installed("foo_txt", artifacts_toml; platform = win32) == artifact_path(hash)
+
+ # Default HostPlatform() adds a compare_strategy key that doesn't get picked up from
+ # the Artifacts.toml
+ testhost = Platform("x86_64", "linux", Dict("libstdcxx_version" => "1.2.3"))
+ BinaryPlatforms.set_compare_strategy!(testhost, "libstdcxx_version", BinaryPlatforms.compare_version_cap)
+ @test_throws ErrorException bind_artifact!(artifacts_toml, "foo_txt", hash; download_info = download_info, platform = testhost)
# Next, check that we can get the download_info properly:
- meta = artifact_meta("foo_txt", artifacts_toml; platform=win32)
+ meta = artifact_meta("foo_txt", artifacts_toml; platform = win32)
@test meta["download"][1]["url"] == "http://google.com/hello_world"
+ @test !haskey(meta["download"][1], "size")
@test meta["download"][2]["sha256"] == "a"^64
+ @test meta["download"][2]["size"] == 1
rm(artifacts_toml)
@@ -290,20 +306,24 @@ end
@test_logs (:error, r"malformed, must be array or dict!") artifact_meta("broken_artifact", joinpath(badifact_dir, "not_a_table.toml"))
# Next, test incorrect download errors
- for ignore_hash in (false, true); withenv("JULIA_PKG_IGNORE_HASHES" => ignore_hash ? "1" : nothing) do; mktempdir() do dir
- with_artifacts_directory(dir) do
- @test artifact_meta("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml")) != nothing
- if !ignore_hash
- @test_throws ErrorException ensure_artifact_installed("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml"))
- else
- @test_logs (:error, r"Tree Hash Mismatch!") match_mode=:any begin
- path = ensure_artifact_installed("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml"))
- @test endswith(path, "0000000000000000000000000000000000000000")
- @test isdir(path)
+ for ignore_hash in (false, true)
+ withenv("JULIA_PKG_IGNORE_HASHES" => ignore_hash ? "1" : nothing) do;
+ mktempdir() do dir
+ with_artifacts_directory(dir) do
+ @test artifact_meta("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml")) != nothing
+ if !ignore_hash
+ @test_throws ErrorException ensure_artifact_installed("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml"))
+ else
+ @test_logs (:error, r"Tree Hash Mismatch!") match_mode = :any begin
+ path = ensure_artifact_installed("broken_artifact", joinpath(badifact_dir, "incorrect_gitsha.toml"))
+ @test endswith(path, "0000000000000000000000000000000000000000")
+ @test isdir(path)
+ end
end
end
end
- end end end
+ end
+ end
mktempdir() do dir
with_artifacts_directory(dir) do
@@ -340,20 +360,24 @@ end
with_pkg_env(project_path) do
path = git_init_package(project_path, joinpath(@__DIR__, "test_packages", "ArtifactInstallation"))
add_this_pkg()
- Pkg.add(Pkg.Types.PackageSpec(
- name="ArtifactInstallation",
- uuid=Base.UUID("02111abe-2050-1119-117e-b30112b5bdc4"),
- path=path,
- ))
+ Pkg.add(
+ Pkg.Types.PackageSpec(
+ name = "ArtifactInstallation",
+ uuid = Base.UUID("02111abe-2050-1119-117e-b30112b5bdc4"),
+ path = path,
+ )
+ )
# Run test harness
Pkg.test("ArtifactInstallation")
# Also manually do it
- Core.eval(Module(:__anon__), quote
- using ArtifactInstallation
- do_test()
- end)
+ Core.eval(
+ Module(:__anon__), quote
+ using ArtifactInstallation
+ do_test()
+ end
+ )
end
end
@@ -362,7 +386,7 @@ end
copy_test_package(project_path, "ArtifactInstallation")
Pkg.activate(joinpath(project_path, "ArtifactInstallation"))
add_this_pkg()
- Pkg.instantiate(; verbose=true)
+ Pkg.instantiate(; verbose = true)
# Manual test that artifact is installed by instantiate()
artifacts_toml = joinpath(project_path, "ArtifactInstallation", "Artifacts.toml")
@@ -378,21 +402,21 @@ end
# Try to install all artifacts for the given platform, knowing full well that
# HelloWorldC will fail to match any artifact to this bogus platform
bogus_platform = Platform("bogus", "linux")
- artifacts = select_downloadable_artifacts(artifacts_toml; platform=bogus_platform)
+ artifacts = select_downloadable_artifacts(artifacts_toml; platform = bogus_platform)
for name in keys(artifacts)
- ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=bogus_platform)
+ ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform = bogus_platform)
end
# Test that HelloWorldC doesn't even show up
- hwc_hash = artifact_hash("HelloWorldC", artifacts_toml; platform=bogus_platform)
+ hwc_hash = artifact_hash("HelloWorldC", artifacts_toml; platform = bogus_platform)
@test hwc_hash === nothing
# Test that socrates shows up, but is not installed, because it's lazy
- socrates_hash = artifact_hash("socrates", artifacts_toml; platform=bogus_platform)
+ socrates_hash = artifact_hash("socrates", artifacts_toml; platform = bogus_platform)
@test !artifact_exists(socrates_hash)
# Test that collapse_the_symlink is installed
- cts_hash = artifact_hash("collapse_the_symlink", artifacts_toml; platform=bogus_platform)
+ cts_hash = artifact_hash("collapse_the_symlink", artifacts_toml; platform = bogus_platform)
@test artifact_exists(cts_hash)
end
@@ -419,11 +443,12 @@ end
)
disengaged_platform = HostPlatform()
disengaged_platform["flooblecrank"] = "disengaged"
+ disengaged_adi = ArtifactDownloadInfo(disengaged_url, disengaged_sha256)
Pkg.Artifacts.bind_artifact!(
artifacts_toml,
"gooblebox",
disengaged_hash;
- download_info = [(disengaged_url, disengaged_sha256)],
+ download_info = [disengaged_adi],
platform = disengaged_platform,
)
end
@@ -463,22 +488,24 @@ end
artifacts_toml = joinpath(ap_path, "Artifacts.toml")
p = HostPlatform()
p["flooblecrank"] = flooblecrank_status
- flooblecrank_hash = artifact_hash("gooblebox", artifacts_toml; platform=p)
+ flooblecrank_hash = artifact_hash("gooblebox", artifacts_toml; platform = p)
@test flooblecrank_hash == right_hash
@test artifact_exists(flooblecrank_hash)
# Test that if we load the package, it knows how to find its own artifact,
# because it feeds the right `Platform` object through to `@artifact_str()`
- cmd = addenv(`$(Base.julia_cmd()) --color=yes --project=$(ap_path) -e 'using AugmentedPlatform; print(get_artifact_dir("gooblebox"))'`,
- "JULIA_DEPOT_PATH" => join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":"),
- "FLOOBLECRANK" => flooblecrank_status)
+ cmd = addenv(
+ `$(Base.julia_cmd()) --color=yes --project=$(ap_path) -e 'using AugmentedPlatform; print(get_artifact_dir("gooblebox"))'`,
+ "JULIA_DEPOT_PATH" => join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":"),
+ "FLOOBLECRANK" => flooblecrank_status
+ )
using_output = chomp(String(read(cmd)))
@test success(cmd)
@test artifact_path(right_hash) == using_output
tmpdir = mktempdir()
mkpath("$tmpdir/foo/$(flooblecrank_status)")
- rm("$tmpdir/foo/$(flooblecrank_status)"; recursive=true, force=true)
+ rm("$tmpdir/foo/$(flooblecrank_status)"; recursive = true, force = true)
cp(project_path, "$tmpdir/foo/$(flooblecrank_status)")
cp(Base.DEPOT_PATH[1], "$tmpdir/foo/$(flooblecrank_status)/depot")
end
@@ -503,7 +530,7 @@ end
p = HostPlatform()
p["flooblecrank"] = "engaged"
- add_this_pkg(; platform=p)
+ add_this_pkg(; platform = p)
@test isdir(artifact_path(engaged_hash))
@test !isdir(artifact_path(disengaged_hash))
end
@@ -529,7 +556,7 @@ end
p = HostPlatform()
p["flooblecrank"] = "engaged"
- Pkg.API.instantiate(; platform=p)
+ Pkg.API.instantiate(; platform = p)
@test isdir(artifact_path(engaged_hash))
@test isdir(artifact_path(disengaged_hash))
@@ -593,7 +620,7 @@ end
# This should reap the `die_hash` immediately, as it has already been moved to
# the orphaned list.
sleep(0.2)
- Pkg.gc(;collect_delay=Millisecond(100))
+ Pkg.gc(; collect_delay = Millisecond(100))
@test artifact_exists(live_hash)
@test !artifact_exists(die_hash)
@@ -608,7 +635,7 @@ end
# Next, unbind the live_hash, then run with collect_delay=0, and ensure that
# things are cleaned up immediately.
unbind_artifact!(artifacts_toml, "live")
- Pkg.gc(;collect_delay=Second(0))
+ Pkg.gc(; collect_delay = Second(0))
@test !artifact_exists(live_hash)
@test !artifact_exists(die_hash)
end
@@ -684,7 +711,7 @@ end
end
# Force Pkg to reload what it knows about artifact overrides
- @inferred Union{Nothing,Dict{Symbol,Any}} Pkg.Artifacts.load_overrides(;force=true)
+ @inferred Union{Nothing, Dict{Symbol, Any}} Pkg.Artifacts.load_overrides(; force = true)
# Verify that the hash-based override worked
@test artifact_path(baz_hash) == artifact_path(bar_hash)
@@ -694,17 +721,21 @@ end
# loads overridden package artifacts.
Pkg.activate(depot_container) do
copy_test_package(depot_container, "ArtifactOverrideLoading")
- Pkg.develop(Pkg.Types.PackageSpec(
- name="ArtifactOverrideLoading",
- uuid=aol_uuid,
- path=joinpath(depot_container, "ArtifactOverrideLoading"),
- ))
-
- (arty_path, barty_path) = Core.eval(Module(:__anon__), quote
- # TODO: This causes a loading.jl warning, probably Pkg is clashing because of a different UUID??
- using ArtifactOverrideLoading
- arty_path, barty_path
- end)
+ Pkg.develop(
+ Pkg.Types.PackageSpec(
+ name = "ArtifactOverrideLoading",
+ uuid = aol_uuid,
+ path = joinpath(depot_container, "ArtifactOverrideLoading"),
+ )
+ )
+
+ (arty_path, barty_path) = Core.eval(
+ Module(:__anon__), quote
+ # TODO: This causes a loading.jl warning, probably Pkg is clashing because of a different UUID??
+ using ArtifactOverrideLoading
+ arty_path, barty_path
+ end
+ )
@test arty_path == artifact_path(bar_hash)
@test barty_path == barty_override_path
@@ -727,7 +758,7 @@ end
end
# Force Pkg to reload what it knows about artifact overrides
- Pkg.Artifacts.load_overrides(;force=true)
+ Pkg.Artifacts.load_overrides(; force = true)
# Force Julia to re-load ArtifactOverrideLoading from scratch
pkgid = Base.PkgId(aol_uuid, "ArtifactOverrideLoading")
@@ -742,10 +773,12 @@ end
# loads overridden package artifacts.
Pkg.activate(depot_container) do
# TODO: This causes a loading.jl warning, probably Pkg is clashing because of a different UUID??
- (arty_path, barty_path) = Core.eval(Module(:__anon__), quote
- using ArtifactOverrideLoading
- arty_path, barty_path
- end)
+ (arty_path, barty_path) = Core.eval(
+ Module(:__anon__), quote
+ using ArtifactOverrideLoading
+ arty_path, barty_path
+ end
+ )
@test arty_path == barty_override_path
@test barty_path == barty_override_path
@@ -756,7 +789,7 @@ end
open(joinpath(depot1, "artifacts", "Overrides.toml"), "w") do io
TOML.print(io, overrides)
end
- @test_logs (:error, msg) match_mode=:any Pkg.Artifacts.load_overrides(;force=true)
+ @test_logs (:error, msg) match_mode = :any Pkg.Artifacts.load_overrides(; force = true)
end
# Mapping to a non-absolute path or SHA1 hash
@@ -781,7 +814,7 @@ end
empty!(DEPOT_PATH)
append!(DEPOT_PATH, old_depot_path)
Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.Artifacts.load_overrides(;force=true)
+ Pkg.Artifacts.load_overrides(; force = true)
end
end
@@ -800,22 +833,22 @@ end
@testset "installing artifacts when symlinks are copied" begin
# copy symlinks to simulate the typical Microsoft Windows user experience where
# developer mode is not enabled (no admin rights)
- withenv("BINARYPROVIDER_COPYDEREF"=>"true", "JULIA_PKG_IGNORE_HASHES"=>"true") do
+ withenv("BINARYPROVIDER_COPYDEREF" => "true", "JULIA_PKG_IGNORE_HASHES" => "true") do
temp_pkg_dir() do tmpdir
artifacts_toml = joinpath(tmpdir, "Artifacts.toml")
cp(joinpath(@__DIR__, "test_packages", "ArtifactInstallation", "Artifacts.toml"), artifacts_toml)
Pkg.activate(tmpdir)
cts_real_hash = create_artifact() do dir
- local meta = Artifacts.artifact_meta("collapse_the_symlink", artifacts_toml)
+ local meta = Pkg.Artifacts.artifact_meta("collapse_the_symlink", artifacts_toml)
local collapse_url = meta["download"][1]["url"]
local collapse_hash = meta["download"][1]["sha256"]
# Because "BINARYPROVIDER_COPYDEREF"=>"true", this will copy symlinks.
- download_verify_unpack(collapse_url, collapse_hash, dir; verbose=true, ignore_existence=true)
+ download_verify_unpack(collapse_url, collapse_hash, dir; verbose = true, ignore_existence = true)
end
cts_hash = artifact_hash("collapse_the_symlink", artifacts_toml)
@test !artifact_exists(cts_hash)
@test artifact_exists(cts_real_hash)
- @test_logs (:error, r"Tree Hash Mismatch!") match_mode=:any Pkg.instantiate()
+ @test_logs (:error, r"Tree Hash Mismatch!") match_mode = :any Pkg.instantiate()
@test artifact_exists(cts_hash)
# Make sure existing artifacts don't get deleted.
@test artifact_exists(cts_real_hash)
@@ -823,4 +856,16 @@ end
end
end
+if Sys.iswindows()
+ @testset "filemode(dir) non-executable on windows" begin
+ mktempdir() do dir
+ touch(joinpath(dir, "foo"))
+ @test !isempty(readdir(dir))
+ # This technically should be true, the fact that it's not is
+ # a wrinkle of libuv, it would be nice to fix it and so if we
+ # do, this test will let us know.
+ @test filemode(dir) & 0o001 == 0
+ end
+ end
+end
end # module
diff --git a/test/binaryplatforms.jl b/test/binaryplatforms.jl
index 3400f7ff2f..22482e60c4 100644
--- a/test/binaryplatforms.jl
+++ b/test/binaryplatforms.jl
@@ -9,39 +9,39 @@ const platform = @inferred Platform platform_key_abi()
# This is a compatibility test; once we've fully migrated away from Pkg.BinaryPlatforms
# to the new Base.BinaryPlatforms module, we can throw away the shim definitions in
-# `BinaryPlatforms_compat.jl` and drop these tests.
+# `BinaryPlatformsCompat.jl` and drop these tests.
@testset "Compat - PlatformNames" begin
# Ensure the platform type constructors are well behaved
@testset "Platform constructors" begin
@test_throws ArgumentError Linux(:not_a_platform)
- @test_throws ArgumentError Linux(:x86_64; libc=:crazy_libc)
- @test_throws ArgumentError Linux(:x86_64; libc=:glibc, call_abi=:crazy_abi)
- @test_throws ArgumentError Linux(:x86_64; libc=:glibc, call_abi=:eabihf)
- @test_throws ArgumentError Linux(:armv7l; libc=:glibc, call_abi=:kekeke)
+ @test_throws ArgumentError Linux(:x86_64; libc = :crazy_libc)
+ @test_throws ArgumentError Linux(:x86_64; libc = :glibc, call_abi = :crazy_abi)
+ @test_throws ArgumentError Linux(:x86_64; libc = :glibc, call_abi = :eabihf)
+ @test_throws ArgumentError Linux(:armv7l; libc = :glibc, call_abi = :kekeke)
@test_throws ArgumentError MacOS(:i686)
- @test_throws ArgumentError MacOS(:x86_64; libc=:glibc)
- @test_throws ArgumentError MacOS(:x86_64; call_abi=:eabihf)
- @test_throws ArgumentError Windows(:x86_64; libc=:glibc)
- @test_throws ArgumentError Windows(:x86_64; call_abi=:eabihf)
+ @test_throws ArgumentError MacOS(:x86_64; libc = :glibc)
+ @test_throws ArgumentError MacOS(:x86_64; call_abi = :eabihf)
+ @test_throws ArgumentError Windows(:x86_64; libc = :glibc)
+ @test_throws ArgumentError Windows(:x86_64; call_abi = :eabihf)
@test_throws ArgumentError FreeBSD(:not_a_platform)
- @test_throws ArgumentError FreeBSD(:x86_64; libc=:crazy_libc)
- @test_throws ArgumentError FreeBSD(:x86_64; call_abi=:crazy_abi)
- @test_throws ArgumentError FreeBSD(:x86_64; call_abi=:eabihf)
+ @test_throws ArgumentError FreeBSD(:x86_64; libc = :crazy_libc)
+ @test_throws ArgumentError FreeBSD(:x86_64; call_abi = :crazy_abi)
+ @test_throws ArgumentError FreeBSD(:x86_64; call_abi = :eabihf)
# Test copy constructor
cabi = CompilerABI(;
- libgfortran_version=v"3",
- libstdcxx_version=v"3.4.18",
- cxxstring_abi=:cxx03,
+ libgfortran_version = v"3",
+ libstdcxx_version = v"3.4.18",
+ cxxstring_abi = :cxx03,
)
- cabi2 = CompilerABI(cabi; cxxstring_abi=:cxx11)
+ cabi2 = CompilerABI(cabi; cxxstring_abi = :cxx11)
@test libgfortran_version(cabi) == libgfortran_version(cabi2)
@test libstdcxx_version(cabi) == libstdcxx_version(cabi2)
@test cxxstring_abi(cabi) != cxxstring_abi(cabi2)
# Explicitly test that we can pass arguments to UnknownPlatform,
# and it doesn't do anything.
- @test UnknownPlatform(:riscv; libc=:fuschia_libc) == UnknownPlatform()
+ @test UnknownPlatform(:riscv; libc = :fuschia_libc) == UnknownPlatform()
end
@testset "Platform properties" begin
@@ -51,7 +51,7 @@ const platform = @inferred Platform platform_key_abi()
end
# Test that we can get the arch of various platforms
- @test arch(Linux(:aarch64; libc=:musl)) == :aarch64
+ @test arch(Linux(:aarch64; libc = :musl)) == :aarch64
@test arch(Windows(:i686)) == :i686
@test arch(FreeBSD(:amd64)) == :x86_64
@test arch(FreeBSD(:i386)) == :i686
@@ -70,13 +70,13 @@ const platform = @inferred Platform platform_key_abi()
@test call_abi(Linux(:x86_64)) === nothing
@test call_abi(Linux(:armv6l)) == :eabihf
- @test call_abi(Linux(:armv7l; call_abi=:eabihf)) == :eabihf
- @test call_abi(UnknownPlatform(;call_abi=:eabihf)) === nothing
+ @test call_abi(Linux(:armv7l; call_abi = :eabihf)) == :eabihf
+ @test call_abi(UnknownPlatform(; call_abi = :eabihf)) === nothing
@test triplet(Windows(:i686)) == "i686-w64-mingw32"
- @test triplet(Linux(:x86_64; libc=:musl)) == "x86_64-linux-musl"
- @test triplet(Linux(:armv7l; libc=:musl)) == "armv7l-linux-musleabihf"
- @test triplet(Linux(:armv6l; libc=:musl, call_abi=:eabihf)) == "armv6l-linux-musleabihf"
+ @test triplet(Linux(:x86_64; libc = :musl)) == "x86_64-linux-musl"
+ @test triplet(Linux(:armv7l; libc = :musl)) == "armv7l-linux-musleabihf"
+ @test triplet(Linux(:armv6l; libc = :musl, call_abi = :eabihf)) == "armv6l-linux-musleabihf"
@test triplet(Linux(:x86_64)) == "x86_64-linux-gnu"
@test triplet(Linux(:armv6l)) == "armv6l-linux-gnueabihf"
@test triplet(MacOS()) == "x86_64-apple-darwin14"
@@ -100,20 +100,20 @@ const platform = @inferred Platform platform_key_abi()
@testset "platforms_match()" begin
# Just do a quick combinatorial sweep for completeness' sake for platform matching
for libgfortran_version in (nothing, v"3", v"5"),
- libstdcxx_version in (nothing, v"3.4.18", v"3.4.26"),
- cxxstring_abi in (nothing, :cxx03, :cxx11)
+ libstdcxx_version in (nothing, v"3.4.18", v"3.4.26"),
+ cxxstring_abi in (nothing, :cxx03, :cxx11)
cabi = CompilerABI(;
- libgfortran_version=libgfortran_version,
- libstdcxx_version=libstdcxx_version,
- cxxstring_abi=cxxstring_abi,
+ libgfortran_version = libgfortran_version,
+ libstdcxx_version = libstdcxx_version,
+ cxxstring_abi = cxxstring_abi,
)
- @test platforms_match(Linux(:x86_64), Linux(:x86_64, compiler_abi=cabi))
- @test platforms_match(Linux(:x86_64, compiler_abi=cabi), Linux(:x86_64))
+ @test platforms_match(Linux(:x86_64), Linux(:x86_64, compiler_abi = cabi))
+ @test platforms_match(Linux(:x86_64, compiler_abi = cabi), Linux(:x86_64))
# Also test auto-string-parsing
- @test platforms_match(triplet(Linux(:x86_64)), Linux(:x86_64, compiler_abi=cabi))
- @test platforms_match(Linux(:x86_64), triplet(Linux(:x86_64, compiler_abi=cabi)))
+ @test platforms_match(triplet(Linux(:x86_64)), Linux(:x86_64, compiler_abi = cabi))
+ @test platforms_match(Linux(:x86_64), triplet(Linux(:x86_64, compiler_abi = cabi)))
end
# Ensure many of these things do NOT match
@@ -124,18 +124,18 @@ const platform = @inferred Platform platform_key_abi()
# Make some explicitly non-matching cabi's
base_cabi = CompilerABI(;
- libgfortran_version=v"5",
- cxxstring_abi=:cxx11,
+ libgfortran_version = v"5",
+ cxxstring_abi = :cxx11,
)
for arch in (:x86_64, :i686, :aarch64, :armv6l, :armv7l),
- cabi in (
- CompilerABI(libgfortran_version=v"3"),
- CompilerABI(cxxstring_abi=:cxx03),
- CompilerABI(libgfortran_version=v"4", cxxstring_abi=:cxx11),
- CompilerABI(libgfortran_version=v"3", cxxstring_abi=:cxx03),
- )
-
- @test !platforms_match(Linux(arch, compiler_abi=base_cabi), Linux(arch, compiler_abi=cabi))
+ cabi in (
+ CompilerABI(libgfortran_version = v"3"),
+ CompilerABI(cxxstring_abi = :cxx03),
+ CompilerABI(libgfortran_version = v"4", cxxstring_abi = :cxx11),
+ CompilerABI(libgfortran_version = v"3", cxxstring_abi = :cxx03),
+ )
+
+ @test !platforms_match(Linux(arch, compiler_abi = base_cabi), Linux(arch, compiler_abi = cabi))
end
end
@@ -149,7 +149,7 @@ const platform = @inferred Platform platform_key_abi()
@test !Sys.isapple(Linux(:powerpc64le))
@test Sys.isbsd(MacOS())
@test Sys.isbsd(FreeBSD(:x86_64))
- @test !Sys.isbsd(Linux(:powerpc64le; libc=:musl))
+ @test !Sys.isbsd(Linux(:powerpc64le; libc = :musl))
end
end
diff --git a/test/extensions.jl b/test/extensions.jl
index f7d7ab26b9..5750e12ef2 100644
--- a/test/extensions.jl
+++ b/test/extensions.jl
@@ -1,43 +1,43 @@
-using .Utils
+using .Utils
using Test
using UUIDs
@testset "weak deps" begin
he_root = joinpath(@__DIR__, "test_packages", "ExtensionExamples", "HasExtensions.jl")
hdwe_root = joinpath(@__DIR__, "test_packages", "ExtensionExamples", "HasDepWithExtensions.jl")
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# clean out any .cov files from previous test runs
recursive_rm_cov_files(he_root)
recursive_rm_cov_files(hdwe_root)
- Pkg.activate(; temp=true)
- Pkg.develop(path=he_root)
- Pkg.test("HasExtensions", julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.activate(; temp = true)
+ Pkg.develop(path = he_root)
+ Pkg.test("HasExtensions", julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
@test !any(endswith(".cov"), readdir(joinpath(he_root, "src")))
@test !any(endswith(".cov"), readdir(joinpath(he_root, "ext")))
- Pkg.test("HasExtensions", coverage=true, julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.test("HasExtensions", coverage = true, julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
@test any(endswith(".cov"), readdir(joinpath(he_root, "src")))
@test any(endswith(".cov"), readdir(joinpath(he_root, "ext")))
end
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# clean out any .cov files from previous test runs
recursive_rm_cov_files(he_root)
recursive_rm_cov_files(hdwe_root)
- Pkg.activate(; temp=true)
- Pkg.develop(path=hdwe_root)
- Pkg.test("HasDepWithExtensions", julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.activate(; temp = true)
+ Pkg.develop(path = hdwe_root)
+ Pkg.test("HasDepWithExtensions", julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
io = IOBuffer()
- Pkg.status(; extensions=true, mode=Pkg.PKGMODE_MANIFEST, io)
- # TODO: Test output when ext deps are loaded etc.
+ Pkg.status(; extensions = true, mode = Pkg.PKGMODE_MANIFEST, io)
+ # TODO: Test output when ext deps are loaded etc.
str = String(take!(io))
- @test contains(str, "└─ OffsetArraysExt [OffsetArrays]" )
+ @test contains(str, "└─ OffsetArraysExt [OffsetArrays]") || contains(str, "├─ OffsetArraysExt [OffsetArrays]")
@test !any(endswith(".cov"), readdir(joinpath(hdwe_root, "src")))
@test !any(endswith(".cov"), readdir(joinpath(he_root, "src")))
@test !any(endswith(".cov"), readdir(joinpath(he_root, "ext")))
- Pkg.test("HasDepWithExtensions", coverage=true, julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.test("HasDepWithExtensions", coverage = true, julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
@test any(endswith(".cov"), readdir(joinpath(hdwe_root, "src")))
# No coverage files should be in HasExtensions even though it's used because coverage
@@ -46,61 +46,60 @@ using UUIDs
@test !any(endswith(".cov"), readdir(joinpath(he_root, "ext")))
end
- isolate(loaded_depot=true) do
- Pkg.activate(; temp=true)
- Pkg.develop(path=he_root)
+ isolate(loaded_depot = true) do
+ Pkg.activate(; temp = true)
+ Pkg.develop(path = he_root)
@test_throws Pkg.Resolve.ResolverError Pkg.add(; name = "OffsetArrays", version = "0.9.0")
end
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
depot = mktempdir(); empty!(DEPOT_PATH); push!(DEPOT_PATH, depot); Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.activate(; temp=true)
- Pkg.Registry.add(path=joinpath(@__DIR__, "test_packages", "ExtensionExamples", "ExtensionRegistry"))
+ Pkg.activate(; temp = true)
+ Pkg.Registry.add(path = joinpath(@__DIR__, "test_packages", "ExtensionExamples", "ExtensionRegistry"))
Pkg.Registry.add("General")
Pkg.add("HasExtensions")
- Pkg.test("HasExtensions", julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.test("HasExtensions", julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
Pkg.add("HasDepWithExtensions")
- Pkg.test("HasDepWithExtensions", julia_args=`--depwarn=no`) # OffsetArrays errors from depwarn
+ Pkg.test("HasDepWithExtensions", julia_args = `--depwarn=no`) # OffsetArrays errors from depwarn
@test_throws Pkg.Resolve.ResolverError Pkg.add(; name = "OffsetArrays", version = "0.9.0")
end
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
withenv("JULIA_PKG_PRECOMPILE_AUTO" => 0) do
depot = mktempdir(); empty!(DEPOT_PATH); push!(DEPOT_PATH, depot); Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.activate(; temp=true)
- Pkg.Registry.add(path=joinpath(@__DIR__, "test_packages", "ExtensionExamples", "ExtensionRegistry"))
+ Pkg.activate(; temp = true)
+ Pkg.Registry.add(path = joinpath(@__DIR__, "test_packages", "ExtensionExamples", "ExtensionRegistry"))
Pkg.Registry.add("General")
Pkg.add("HasDepWithExtensions")
end
iob = IOBuffer()
- Pkg.precompile("HasDepWithExtensions", io=iob)
+ Pkg.precompile("HasDepWithExtensions", io = iob)
out = String(take!(iob))
@test occursin("Precompiling", out)
@test occursin("OffsetArraysExt", out)
@test occursin("HasExtensions", out)
@test occursin("HasDepWithExtensions", out)
end
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
withenv("JULIA_PKG_PRECOMPILE_AUTO" => 0) do
- Pkg.activate(; temp=true)
- Pkg.add("Example", target=:weakdeps)
+ Pkg.activate(; temp = true)
+ Pkg.add("Example", target = :weakdeps)
proj = Pkg.Types.Context().env.project
@test isempty(proj.deps)
@test proj.weakdeps == Dict{String, Base.UUID}("Example" => Base.UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
- Pkg.activate(; temp=true)
- Pkg.add("Example", target=:extras)
+ Pkg.activate(; temp = true)
+ Pkg.add("Example", target = :extras)
proj = Pkg.Types.Context().env.project
@test isempty(proj.deps)
@test proj.extras == Dict{String, Base.UUID}("Example" => Base.UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
end
end
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
mktempdir() do dir
Pkg.Registry.add("General")
- path = joinpath(@__DIR__, "test_packages", "TestWeakDepProject")
- cp(path, joinpath(dir, "TestWeakDepProject"))
- Pkg.activate(joinpath(dir, "TestWeakDepProject"))
+ path = copy_test_package(dir, "TestWeakDepProject")
+ Pkg.activate(path)
Pkg.resolve()
@test Pkg.dependencies()[UUID("2ab3a3ac-af41-5b50-aa03-7779005ae688")].version == v"0.3.26"
diff --git a/test/force_latest_compatible_version.jl b/test/force_latest_compatible_version.jl
index 9547c06adc..bc5402b956 100644
--- a/test/force_latest_compatible_version.jl
+++ b/test/force_latest_compatible_version.jl
@@ -297,7 +297,7 @@ const test_package_parent_dir = joinpath(
)
@test_logs(
(:warn, message_2),
- match_mode=:any,
+ match_mode = :any,
Pkg.test(;
force_latest_compatible_version = true,
),
@@ -314,7 +314,7 @@ const test_package_parent_dir = joinpath(
)
@test_logs(
(:warn, message_2),
- match_mode=:any,
+ match_mode = :any,
Pkg.test(;
force_latest_compatible_version = true,
allow_earlier_backwards_compatible_versions,
diff --git a/test/historical_stdlib_version.jl b/test/historical_stdlib_version.jl
new file mode 100644
index 0000000000..01eafacd92
--- /dev/null
+++ b/test/historical_stdlib_version.jl
@@ -0,0 +1,354 @@
+module HistoricalStdlibVersionsTests
+using ..Pkg
+using Pkg.Types: is_stdlib
+using Pkg.Artifacts: artifact_meta, artifact_path
+using Base.BinaryPlatforms: HostPlatform, Platform, platforms_match
+using Test
+using TOML
+
+ENV["HISTORICAL_STDLIB_VERSIONS_AUTO_REGISTER"] = "false"
+using HistoricalStdlibVersions
+
+include("utils.jl")
+using .Utils
+
+@testset "is_stdlib() across versions" begin
+ HistoricalStdlibVersions.register!()
+
+ networkoptions_uuid = Base.UUID("ca575930-c2e3-43a9-ace4-1e988b2c1908")
+ pkg_uuid = Base.UUID("44cfe95a-1eb2-52ea-b672-e2afdf69b78f")
+ mbedtls_jll_uuid = Base.UUID("c8ffd9c3-330d-5841-b78e-0817d7145fa1")
+
+ # Test NetworkOptions across multiple versions (It became an stdlib in v1.6+, and was registered)
+ @test is_stdlib(networkoptions_uuid)
+ @test is_stdlib(networkoptions_uuid, v"1.6")
+ @test !is_stdlib(networkoptions_uuid, v"1.5")
+ @test !is_stdlib(networkoptions_uuid, v"1.0.0")
+ @test !is_stdlib(networkoptions_uuid, v"0.7")
+ @test !is_stdlib(networkoptions_uuid, nothing)
+
+ # Pkg is an unregistered stdlib and has always been an stdlib
+ @test is_stdlib(pkg_uuid)
+ @test is_stdlib(pkg_uuid, v"1.0")
+ @test is_stdlib(pkg_uuid, v"1.6")
+ @test is_stdlib(pkg_uuid, v"999.999.999")
+ @test is_stdlib(pkg_uuid, v"0.7")
+ @test is_stdlib(pkg_uuid, nothing)
+
+ # MbedTLS_jll stopped being a stdlib in 1.12
+ @test !is_stdlib(mbedtls_jll_uuid)
+ @test !is_stdlib(mbedtls_jll_uuid, v"1.12")
+ @test is_stdlib(mbedtls_jll_uuid, v"1.11")
+ @test is_stdlib(mbedtls_jll_uuid, v"1.10")
+
+ HistoricalStdlibVersions.unregister!()
+ # Test that we can probe for stdlibs for the current version with no STDLIBS_BY_VERSION,
+ # but that we throw a PkgError if we ask for a particular julia version.
+ @test is_stdlib(networkoptions_uuid)
+ @test_throws Pkg.Types.PkgError is_stdlib(networkoptions_uuid, v"1.6")
+end
+
+
+@testset "Pkg.add() with julia_version" begin
+ HistoricalStdlibVersions.register!()
+
+ # A package with artifacts that went from normal package -> stdlib
+ gmp_jll_uuid = "781609d7-10c4-51f6-84f2-b8444358ff6d"
+ # A package that has always only ever been an stdlib
+ linalg_uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
+ # A package that went from normal package - >stdlib
+ networkoptions_uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
+
+ function get_manifest_block(name)
+ manifest_path = joinpath(dirname(Base.active_project()), "Manifest.toml")
+ @test isfile(manifest_path)
+ deps = Base.get_deps(TOML.parsefile(manifest_path))
+ @test haskey(deps, name)
+ return only(deps[name])
+ end
+
+ isolate(loaded_depot = true) do
+ # Next, test that if we ask for `v1.5` it DOES have a version, and that GMP_jll installs v6.1.X
+ Pkg.add(["NetworkOptions", "GMP_jll"]; julia_version = v"1.5")
+ no_block = get_manifest_block("NetworkOptions")
+ @test haskey(no_block, "uuid")
+ @test no_block["uuid"] == networkoptions_uuid
+ @test haskey(no_block, "version")
+
+ gmp_block = get_manifest_block("GMP_jll")
+ @test haskey(gmp_block, "uuid")
+ @test gmp_block["uuid"] == gmp_jll_uuid
+ @test haskey(gmp_block, "version")
+ @test startswith(gmp_block["version"], "6.1.2")
+
+ # Test that the artifact of GMP_jll contains the right library
+ @test haskey(gmp_block, "git-tree-sha1")
+ gmp_jll_dir = Pkg.Operations.find_installed("GMP_jll", Base.UUID(gmp_jll_uuid), Base.SHA1(gmp_block["git-tree-sha1"]))
+ @test isdir(gmp_jll_dir)
+ artifacts_toml = joinpath(gmp_jll_dir, "Artifacts.toml")
+ @test isfile(artifacts_toml)
+ meta = artifact_meta("GMP", artifacts_toml)
+
+ # `meta` can be `nothing` on some of our newer platforms; we _know_ this should
+ # not be the case on the following platforms, so we check these explicitly to
+ # ensure that we haven't accidentally broken something, and then we gate some
+ # following tests on whether or not `meta` is `nothing`:
+ for arch in ("x86_64", "i686"), os in ("linux", "mac", "windows")
+ if platforms_match(HostPlatform(), Platform(arch, os))
+ @test meta !== nothing
+ end
+ end
+
+ # These tests require a matching platform artifact for this old version of GMP_jll,
+ # which is not the case on some of our newer platforms.
+ if meta !== nothing
+ gmp_artifact_path = artifact_path(Base.SHA1(meta["git-tree-sha1"]))
+ @test isdir(gmp_artifact_path)
+
+ # On linux, we can check the filename to ensure it's grabbing the correct library
+ if Sys.islinux()
+ libgmp_filename = joinpath(gmp_artifact_path, "lib", "libgmp.so.10.3.2")
+ @test isfile(libgmp_filename)
+ end
+ end
+ end
+
+ # Next, test that if we ask for `v1.6`, GMP_jll gets `v6.2.0`, and for `v1.7`, it gets `v6.2.1`
+ function do_gmp_test(julia_version, gmp_version)
+ isolate(loaded_depot = true) do
+ Pkg.add("GMP_jll"; julia_version)
+ gmp_block = get_manifest_block("GMP_jll")
+ @test haskey(gmp_block, "uuid")
+ @test gmp_block["uuid"] == gmp_jll_uuid
+ @test haskey(gmp_block, "version")
+ @test startswith(gmp_block["version"], string(gmp_version))
+ end
+ end
+ do_gmp_test(v"1.6", v"6.2.0")
+ do_gmp_test(v"1.7", v"6.2.1")
+
+ isolate(loaded_depot = true) do
+ # Next, test that if we ask for `nothing`, NetworkOptions has a `version` but `LinearAlgebra` does not.
+ Pkg.add(["LinearAlgebra", "NetworkOptions"]; julia_version = nothing)
+ no_block = get_manifest_block("NetworkOptions")
+ @test haskey(no_block, "uuid")
+ @test no_block["uuid"] == networkoptions_uuid
+ @test haskey(no_block, "version")
+ linalg_block = get_manifest_block("LinearAlgebra")
+ @test haskey(linalg_block, "uuid")
+ @test linalg_block["uuid"] == linalg_uuid
+ @test !haskey(linalg_block, "version")
+ end
+
+ isolate(loaded_depot = true) do
+ # Next, test that stdlibs do not get dependencies from the registry
+ # NOTE: this test depends on the fact that in Julia v1.6+ we added
+ # "fake" JLLs that do not depend on Pkg while the "normal" p7zip_jll does.
+ # A future p7zip_jll in the registry may not depend on Pkg, so be sure
+ # to verify your assumptions when updating this test.
+ Pkg.add("p7zip_jll")
+ p7zip_jll_uuid = Base.UUID("3f19e933-33d8-53b3-aaab-bd5110c3b7a0")
+ @test !("Pkg" in keys(Pkg.dependencies()[p7zip_jll_uuid].dependencies))
+ end
+
+ HistoricalStdlibVersions.unregister!()
+end
+
+@testset "Resolving for another version of Julia" begin
+ HistoricalStdlibVersions.register!()
+ temp_pkg_dir() do dir
+ function find_by_name(versions, name)
+ idx = findfirst(p -> p.name == name, versions)
+ if idx === nothing
+ return nothing
+ end
+ return versions[idx]
+ end
+
+ # First, we're going to resolve for specific versions of Julia, ensuring we get the right dep versions:
+ Pkg.Registry.download_default_registries(Pkg.stdout_f())
+ ctx = Pkg.Types.Context(; julia_version = v"1.5")
+ versions, deps = Pkg.Operations._resolve(
+ ctx.io, ctx.env, ctx.registries, [
+ Pkg.Types.PackageSpec(name = "MPFR_jll", uuid = Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3")),
+ ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version
+ )
+ gmp = find_by_name(versions, "GMP_jll")
+ @test gmp !== nothing
+ @test gmp.version.major == 6 && gmp.version.minor == 1
+ ctx = Pkg.Types.Context(; julia_version = v"1.6")
+ versions, deps = Pkg.Operations._resolve(
+ ctx.io, ctx.env, ctx.registries, [
+ Pkg.Types.PackageSpec(name = "MPFR_jll", uuid = Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3")),
+ ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version
+ )
+ gmp = find_by_name(versions, "GMP_jll")
+ @test gmp !== nothing
+ @test gmp.version.major == 6 && gmp.version.minor == 2
+
+ # We'll also test resolving an "impossible" manifest; one that requires two package versions that
+ # are not both loadable by the same Julia:
+ ctx = Pkg.Types.Context(; julia_version = nothing)
+ versions, deps = Pkg.Operations._resolve(
+ ctx.io, ctx.env, ctx.registries, [
+ # This version of GMP only works on Julia v1.6
+ Pkg.Types.PackageSpec(name = "GMP_jll", uuid = Base.UUID("781609d7-10c4-51f6-84f2-b8444358ff6d"), version = v"6.2.0"),
+ # This version of MPFR only works on Julia v1.5
+ Pkg.Types.PackageSpec(name = "MPFR_jll", uuid = Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3"), version = v"4.0.2"),
+ ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version
+ )
+ gmp = find_by_name(versions, "GMP_jll")
+ @test gmp !== nothing
+ @test gmp.version.major == 6 && gmp.version.minor == 2
+ mpfr = find_by_name(versions, "MPFR_jll")
+ @test mpfr !== nothing
+ @test mpfr.version.major == 4 && mpfr.version.minor == 0
+ end
+ HistoricalStdlibVersions.unregister!()
+end
+
+HelloWorldC_jll_UUID = Base.UUID("dca1746e-5efc-54fc-8249-22745bc95a49")
+GMP_jll_UUID = Base.UUID("781609d7-10c4-51f6-84f2-b8444358ff6d")
+OpenBLAS_jll_UUID = Base.UUID("4536629a-c528-5b80-bd46-f80d51c5b363")
+libcxxwrap_julia_jll_UUID = Base.UUID("3eaa8342-bff7-56a5-9981-c04077f7cee7")
+libblastrampoline_jll_UUID = Base.UUID("8e850b90-86db-534c-a0d3-1478176c7d93")
+
+isolate(loaded_depot = true) do
+ @testset "Elliot and Mosè's mini Pkg test suite" begin # https://github.com/JuliaPackaging/JLLPrefixes.jl/issues/6
+ HistoricalStdlibVersions.register!()
+ @testset "Standard add" begin
+ Pkg.activate(temp = true)
+ # Standard add (non-stdlib, flexible version)
+ Pkg.add(; name = "HelloWorldC_jll")
+ @test haskey(Pkg.dependencies(), HelloWorldC_jll_UUID)
+
+ Pkg.activate(temp = true)
+ # Standard add (non-stdlib, url and rev)
+ Pkg.add(; name = "HelloWorldC_jll", url = "https://github.com/JuliaBinaryWrappers/HelloWorldC_jll.jl", rev = "0b4959a49385d4bb00efd281447dc19348ebac08")
+ @test Pkg.dependencies()[Base.UUID("dca1746e-5efc-54fc-8249-22745bc95a49")].git_revision === "0b4959a49385d4bb00efd281447dc19348ebac08"
+
+ Pkg.activate(temp = true)
+ # Standard add (non-stdlib, specified version)
+ Pkg.add(; name = "HelloWorldC_jll", version = v"1.0.10+1")
+ @test Pkg.dependencies()[Base.UUID("dca1746e-5efc-54fc-8249-22745bc95a49")].version === v"1.0.10+1"
+
+ Pkg.activate(temp = true)
+ # Standard add (non-stdlib, versionspec)
+ Pkg.add(; name = "HelloWorldC_jll", version = Pkg.Types.VersionSpec("1.0.10"))
+ @test Pkg.dependencies()[Base.UUID("dca1746e-5efc-54fc-8249-22745bc95a49")].version === v"1.0.10+1"
+ end
+
+ @testset "Julia-version-dependent add" begin
+ Pkg.activate(temp = true)
+ # Julia-version-dependent add (non-stdlib, flexible version)
+ Pkg.add(; name = "libcxxwrap_julia_jll", julia_version = v"1.7")
+ @test Pkg.dependencies()[libcxxwrap_julia_jll_UUID].version >= v"0.14.0+0"
+
+ Pkg.activate(temp = true)
+ # Julia-version-dependent add (non-stdlib, specified version)
+ Pkg.add(; name = "libcxxwrap_julia_jll", version = v"0.9.4+0", julia_version = v"1.7")
+ @test Pkg.dependencies()[libcxxwrap_julia_jll_UUID].version === v"0.9.4+0"
+
+ Pkg.activate(temp = true)
+ Pkg.add(; name = "libcxxwrap_julia_jll", version = v"0.8.8+1", julia_version = v"1.9")
+ # FIXME? Pkg.dependencies() complains here that mbedtls_jll isn't installed so can't be used here.
+ # Perhaps Pkg.dependencies() should just return state and not error if source isn't installed?
+ @test_skip Pkg.dependencies()[libcxxwrap_julia_jll_UUID].version === v"0.9.4+0"
+ for pkgspec in Pkg.Operations.load_all_deps_loadable(Pkg.Types.Context().env)
+ if pkgspec.uuid == libcxxwrap_julia_jll_UUID
+ @test pkgspec.version === v"0.8.8+1"
+ end
+ end
+ end
+
+ @testset "Old Pkg add regression" begin
+ Pkg.activate(temp = true)
+ Pkg.add(; name = "Pkg", julia_version = v"1.11")
+ end
+
+ @testset "Stdlib add" begin
+ Pkg.activate(temp = true)
+ # Stdlib add (current julia version)
+ Pkg.add(; name = "GMP_jll")
+ @test Pkg.dependencies()[GMP_jll_UUID].version >= v"6.3.0+2" # v1.13.0-DEV
+
+ Pkg.activate(temp = true)
+ # Make sure the source of GMP_jll is installed
+ Pkg.add([PackageSpec("GMP_jll")]; julia_version = v"1.6")
+ src = Pkg.Operations.find_installed(
+ "GMP_jll",
+ Base.UUID("781609d7-10c4-51f6-84f2-b8444358ff6d"),
+ Base.SHA1("40388878122d491a2e55b0e730196098595d8a90")
+ )
+ @test src isa String
+ # issue https://github.com/JuliaLang/Pkg.jl/issues/2930
+ @test_broken isdir(src)
+ @test_broken isfile(joinpath(src, "Artifacts.toml"))
+
+ Pkg.activate(temp = true)
+ # Stdlib add (other julia version)
+ Pkg.add(; name = "GMP_jll", julia_version = v"1.7")
+ @test Pkg.dependencies()[GMP_jll_UUID].version === v"6.2.1+1"
+
+ # Stdlib add (other julia version, with specific version bound)
+ # Note, this doesn't work properly, it adds but doesn't install any artifacts.
+ # Technically speaking, this is probably okay from Pkg's perspective, since
+ # we're asking Pkg to resolve according to what Julia v1.7 would do.... and
+ # Julia v1.7 would not install anything because it's a stdlib! However, we
+ # would sometimes like to resolve the latest version of GMP_jll for Julia v1.7
+ # then install that. If we have to manually work around that and look up what
+ # GMP_jll for Julia v1.7 is, then ask for that version explicitly, that's ok.
+
+ Pkg.activate(temp = true)
+ Pkg.add(; name = "GMP_jll", julia_version = v"1.7")
+
+ # This is expected to fail, that version can't live with `julia_version = v"1.7"`
+ @test_throws Pkg.Resolve.ResolverError Pkg.add(; name = "GMP_jll", version = v"6.2.0+5", julia_version = v"1.7")
+
+ Pkg.activate(temp = true)
+ # Stdlib add (julia_version == nothing)
+ # Note: this is currently known to be broken, we get the wrong GMP_jll!
+ Pkg.add(; name = "GMP_jll", version = v"6.2.1+1", julia_version = nothing)
+ @test_broken Pkg.dependencies()[GMP_jll_UUID].version === v"6.2.1+1"
+ end
+
+ @testset "julia_version = nothing" begin
+ @testset "stdlib add" begin
+ Pkg.activate(temp = true)
+ # Stdlib add (impossible constraints due to julia version compat, so
+ # must pass `julia_version=nothing`). In this case, we always fully
+ # specify versions, but if we don't, it's okay to just give us whatever
+ # the resolver prefers
+ Pkg.add(
+ [
+ PackageSpec(; name = "OpenBLAS_jll", version = v"0.3.13"),
+ PackageSpec(; name = "libblastrampoline_jll", version = v"5.1.1"),
+ ]; julia_version = nothing
+ )
+ @test v"0.3.14" > Pkg.dependencies()[OpenBLAS_jll_UUID].version >= v"0.3.13"
+ @test v"5.1.2" > Pkg.dependencies()[libblastrampoline_jll_UUID].version >= v"5.1.1"
+ end
+ @testset "non-stdlib JLL add" begin
+ platform = Platform("x86_64", "linux"; libc = "musl")
+ # specific version vs. compat spec
+ @testset for version in (v"3.24.3+0", "3.24.3")
+ dependencies = [PackageSpec(; name = "CMake_jll", version = version)]
+ @testset "with context (using private Pkg.add method)" begin
+ Pkg.activate(temp = true)
+ ctx = Pkg.Types.Context(; julia_version = nothing)
+ mydeps = deepcopy(dependencies)
+ foreach(Pkg.API.handle_package_input!, mydeps)
+ Pkg.add(ctx, mydeps; platform)
+ end
+ @testset "with julia_version" begin
+ Pkg.activate(temp = true)
+ Pkg.add(deepcopy(dependencies); platform, julia_version = nothing)
+ end
+ end
+ end
+ end
+ HistoricalStdlibVersions.unregister!()
+ end
+end
+
+end # module
diff --git a/test/manifest/formats/v2.0/Manifest.toml b/test/manifest/formats/v2.0/Manifest.toml
index 1156d8f6cc..da4bec6355 100644
--- a/test/manifest/formats/v2.0/Manifest.toml
+++ b/test/manifest/formats/v2.0/Manifest.toml
@@ -14,4 +14,3 @@ uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[deps.Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
-
diff --git a/test/manifest/yanked/Manifest.toml b/test/manifest/yanked/Manifest.toml
new file mode 100644
index 0000000000..39261c8e24
--- /dev/null
+++ b/test/manifest/yanked/Manifest.toml
@@ -0,0 +1,62 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.13.0-DEV"
+manifest_format = "2.0"
+project_hash = "8a91c3bdaf7537df6f842463e0505fb7c623875c"
+
+[[deps.Compat]]
+deps = ["TOML", "UUIDs"]
+git-tree-sha1 = "3a3dfb30697e96a440e4149c8c51bf32f818c0f3"
+uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
+version = "4.17.0"
+
+ [deps.Compat.extensions]
+ CompatLinearAlgebraExt = "LinearAlgebra"
+
+ [deps.Compat.weakdeps]
+ Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
+ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
+
+[[deps.Dates]]
+deps = ["Printf"]
+uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
+version = "1.11.0"
+
+[[deps.ExprTools]]
+git-tree-sha1 = "27415f162e6028e81c72b82ef756bf321213b6ec"
+uuid = "e2ba6199-217a-4e67-a87a-7c52f15ade04"
+version = "0.1.10"
+
+[[deps.Mocking]]
+deps = ["Compat", "ExprTools"]
+git-tree-sha1 = "d5ca7901d59738132d6f9be9a18da50bc85c5115"
+uuid = "78c3b35d-d492-501b-9361-3d52fe80e533"
+version = "0.7.4"
+
+[[deps.Printf]]
+deps = ["Unicode"]
+uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
+version = "1.11.0"
+
+[[deps.Random]]
+deps = ["SHA"]
+uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+version = "1.11.0"
+
+[[deps.SHA]]
+uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
+version = "0.7.0"
+
+[[deps.TOML]]
+deps = ["Dates"]
+uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
+version = "1.0.3"
+
+[[deps.UUIDs]]
+deps = ["Random", "SHA"]
+uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
+version = "1.11.0"
+
+[[deps.Unicode]]
+uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
+version = "1.11.0"
diff --git a/test/manifest/yanked/Project.toml b/test/manifest/yanked/Project.toml
new file mode 100644
index 0000000000..f61c7f288a
--- /dev/null
+++ b/test/manifest/yanked/Project.toml
@@ -0,0 +1,2 @@
+[deps]
+Mocking = "78c3b35d-d492-501b-9361-3d52fe80e533"
diff --git a/test/manifests.jl b/test/manifests.jl
index a1780673d1..c9fbd440d4 100644
--- a/test/manifests.jl
+++ b/test/manifests.jl
@@ -1,30 +1,36 @@
module ManifestTests
-using Test, UUIDs, Dates, TOML
+using Test, UUIDs, Dates, TOML
import ..Pkg, LibGit2
-using ..Utils
+using ..Utils
# used with the reference manifests in `test/manifest/formats`
# ensures the manifests are valid and restored after test
-function reference_manifest_isolated_test(f, dir::String; v1::Bool=false)
- env_dir = joinpath(@__DIR__, "manifest", "formats", dir)
- env_manifest = joinpath(env_dir, "Manifest.toml")
- env_project = joinpath(env_dir, "Project.toml")
- cp(env_manifest, string(env_manifest, "_backup"))
- cp(env_project, string(env_project, "_backup"))
- try
+function reference_manifest_isolated_test(f, dir::String; v1::Bool = false)
+ source_env_dir = joinpath(@__DIR__, "manifest", "formats", dir)
+ source_env_manifest = joinpath(source_env_dir, "Manifest.toml")
+ source_env_project = joinpath(source_env_dir, "Project.toml")
+
+ # Create a temporary directory for the test files
+ temp_base_dir = mktempdir()
+ return try
+ # Copy entire directory structure to preserve paths that tests expect
+ env_dir = joinpath(temp_base_dir, dir)
+ cp(source_env_dir, env_dir)
+
+ env_manifest = joinpath(env_dir, "Manifest.toml")
+ env_project = joinpath(env_dir, "Project.toml")
+
isfile(env_manifest) || error("Reference manifest is missing")
if Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == !v1
error("Reference manifest file at $(env_manifest) is invalid")
end
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
f(env_dir, env_manifest)
end
finally
- cp(string(env_manifest, "_backup"), env_manifest, force = true)
- rm(string(env_manifest, "_backup"))
- cp(string(env_project, "_backup"), env_project, force = true)
- rm(string(env_project, "_backup"))
+ # Clean up temporary directory
+ rm(temp_base_dir, recursive = true)
end
end
@@ -32,9 +38,9 @@ end
@testset "Manifest.toml formats" begin
@testset "Default manifest format is v2" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
io = IOBuffer()
- Pkg.activate(; io=io, temp=true)
+ Pkg.activate(; io = io, temp = true)
output = String(take!(io))
@test occursin(r"Activating.*project at.*", output)
Pkg.add("Profile")
@@ -45,12 +51,12 @@ end
end
@testset "Empty manifest file is automatically upgraded to v2" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
io = IOBuffer()
d = mktempdir()
manifest = joinpath(d, "Manifest.toml")
touch(manifest)
- Pkg.activate(d; io=io)
+ Pkg.activate(d; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*", output)
env_manifest = Pkg.Types.Context().env.manifest_file
@@ -59,7 +65,7 @@ end
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
@test Pkg.Types.Context().env.manifest.manifest_format == v"2.0.0"
- Pkg.add("Profile"; io=io)
+ Pkg.add("Profile"; io = io)
env_manifest = Pkg.Types.Context().env.manifest_file
@test samefile(env_manifest, manifest)
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
@@ -68,15 +74,15 @@ end
# check that having a Project with deps, and an empty manifest file doesn't error
rm(manifest)
touch(manifest)
- Pkg.activate(d; io=io)
- Pkg.add("Example"; io=io)
+ Pkg.activate(d; io = io)
+ Pkg.add("Example"; io = io)
end
end
@testset "v1.0: activate, change, maintain manifest format" begin
reference_manifest_isolated_test("v1.0", v1 = true) do env_dir, env_manifest
io = IOBuffer()
- Pkg.activate(env_dir; io=io)
+ Pkg.activate(env_dir; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*v1.0`", output)
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
@@ -92,7 +98,7 @@ end
@testset "v2.0: activate, change, maintain manifest format" begin
reference_manifest_isolated_test("v2.0") do env_dir, env_manifest
io = IOBuffer()
- Pkg.activate(env_dir; io=io)
+ Pkg.activate(env_dir; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*v2.0`", output)
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
@@ -105,7 +111,7 @@ end
m = Pkg.Types.read_manifest(env_manifest)
@test m.other["some_other_field"] == "other"
- @test m.other["some_other_data"] == [1,2,3,4]
+ @test m.other["some_other_data"] == [1, 2, 3, 4]
mktemp() do path, io
Pkg.Types.write_manifest(io, m)
@@ -121,12 +127,12 @@ end
m.julia_version = v"1.5.0"
msg = r"The active manifest file has dependencies that were resolved with a different julia version"
@test_logs (:warn, msg) Pkg.Types.check_manifest_julia_version_compat(m, env_manifest)
- @test_throws Pkg.Types.PkgError Pkg.Types.check_manifest_julia_version_compat(m, env_manifest, julia_version_strict=true)
+ @test_throws Pkg.Types.PkgError Pkg.Types.check_manifest_julia_version_compat(m, env_manifest, julia_version_strict = true)
m.julia_version = nothing
msg = r"The active manifest file is missing a julia version entry"
@test_logs (:warn, msg) Pkg.Types.check_manifest_julia_version_compat(m, env_manifest)
- @test_throws Pkg.Types.PkgError Pkg.Types.check_manifest_julia_version_compat(m, env_manifest, julia_version_strict=true)
+ @test_throws Pkg.Types.PkgError Pkg.Types.check_manifest_julia_version_compat(m, env_manifest, julia_version_strict = true)
end
end
@@ -134,21 +140,21 @@ end
# the reference file here is not actually v3.0. It just represents an unknown manifest format
reference_manifest_isolated_test("v3.0_unknown") do env_dir, env_manifest
io = IOBuffer()
- @test_logs (:warn,) Pkg.activate(env_dir; io=io)
+ @test_logs (:warn,) Pkg.activate(env_dir; io = io)
end
end
@testset "Pkg.upgrade_manifest()" begin
reference_manifest_isolated_test("v1.0", v1 = true) do env_dir, env_manifest
io = IOBuffer()
- Pkg.activate(env_dir; io=io)
+ Pkg.activate(env_dir; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*v1.0`", output)
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
Pkg.upgrade_manifest()
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
- Pkg.activate(env_dir; io=io)
+ Pkg.activate(env_dir; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*v1.0`", output)
@test Pkg.Types.Context().env.manifest.manifest_format == v"2.0.0"
@@ -157,14 +163,14 @@ end
@testset "Pkg.upgrade_manifest(manifest_path)" begin
reference_manifest_isolated_test("v1.0", v1 = true) do env_dir, env_manifest
io = IOBuffer()
- Pkg.activate(env_dir; io=io)
+ Pkg.activate(env_dir; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*v1.0`", output)
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest))
Pkg.upgrade_manifest(env_manifest)
@test Base.is_v1_format_manifest(Base.parsed_toml(env_manifest)) == false
- Pkg.activate(env_dir; io=io)
+ Pkg.activate(env_dir; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*v1.0`", output)
@test Pkg.Types.Context().env.manifest.manifest_format == v"2.0.0"
@@ -181,8 +187,8 @@ end
@test Pkg.Operations.dropbuild(v"1.2.3-rc1") == v"1.2.3-rc1"
end
@testset "new environment: value is `nothing`, then ~`VERSION` after resolve" begin
- isolate(loaded_depot=true) do
- Pkg.activate(; temp=true)
+ isolate(loaded_depot = true) do
+ Pkg.activate(; temp = true)
@test Pkg.Types.Context().env.manifest.julia_version == nothing
Pkg.add("Profile")
@test Pkg.Types.Context().env.manifest.julia_version == Pkg.Operations.dropbuild(VERSION)
@@ -212,10 +218,10 @@ end
end
end
@testset "project_hash for identifying out of sync manifest" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
iob = IOBuffer()
- Pkg.activate(; temp=true)
+ Pkg.activate(; temp = true)
Pkg.add("Example")
@test Pkg.is_manifest_current(Pkg.Types.Context()) === true
diff --git a/test/misc.jl b/test/misc.jl
index e9b3d00ff6..49e8dfcae1 100644
--- a/test/misc.jl
+++ b/test/misc.jl
@@ -12,19 +12,40 @@ end
@testset "hashing" begin
@test hash(Pkg.Types.Project()) == hash(Pkg.Types.Project())
@test hash(Pkg.Types.VersionBound()) == hash(Pkg.Types.VersionBound())
- @test hash(Pkg.Resolve.Fixed(VersionNumber(0,1,0))) == hash(Pkg.Resolve.Fixed(VersionNumber(0,1,0)))
+ @test hash(Pkg.Resolve.Fixed(VersionNumber(0, 1, 0))) == hash(Pkg.Resolve.Fixed(VersionNumber(0, 1, 0)))
hash(Pkg.Types.VersionSpec()) # hash isn't stable
hash(Pkg.Types.PackageEntry()) # hash isn't stable because the internal `repo` field is a mutable struct
end
@testset "safe_realpath" begin
+ realpath(Sys.BINDIR) == Pkg.safe_realpath(Sys.BINDIR)
# issue #3085
- for p in ("", "some-non-existing-path")
+ for p in ("", "some-non-existing-path", "some-non-existing-drive:")
@test p == Pkg.safe_realpath(p)
end
end
@test eltype([PackageSpec(a) for a in []]) == PackageSpec
+@testset "PackageSpec version default" begin
+ # Test that PackageSpec without explicit version gets set to VersionSpec("*")
+ # This behavior is relied upon by BinaryBuilderBase.jl for dependency filtering
+ # See: https://github.com/JuliaPackaging/BinaryBuilderBase.jl/blob/master/src/Prefix.jl
+ ps = PackageSpec(name = "Example")
+ @test ps.version == Pkg.Types.VersionSpec("*")
+
+ # Test with UUID as well
+ ps_uuid = PackageSpec(name = "Example", uuid = Base.UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
+ @test ps_uuid.version == Pkg.Types.VersionSpec("*")
+
+ # Test that explicitly set version is preserved
+ ps_versioned = PackageSpec(name = "Example", version = v"1.0.0")
+ @test ps_versioned.version == v"1.0.0"
+
+ # Test that explicitly set versionspec (string format) is preserved
+ ps_versioned = PackageSpec(name = "Example", version = "1.0.0")
+ @test ps_versioned.version == "1.0.0"
+end
+
end # module
diff --git a/test/new.jl b/test/new.jl
index 5b10b72965..cc6dfc1b1a 100644
--- a/test/new.jl
+++ b/test/new.jl
@@ -1,13 +1,12 @@
module NewTests
-using Test, UUIDs, Dates, TOML
+using Test, UUIDs, Dates, TOML
import ..Pkg, LibGit2
-using Pkg.Types: PkgError
-using Pkg.Resolve: ResolverError
+using Pkg.Types: PkgError
+using Pkg.Resolve: ResolverError
import Pkg.Artifacts: artifact_meta, artifact_path
import Base.BinaryPlatforms: HostPlatform, Platform, platforms_match
-using ..Utils
-import ..HistoricalStdlibVersions
+using ..Utils
using Logging
general_uuid = UUID("23338594-aafe-5451-b93e-139f81909106") # UUID for `General`
@@ -31,7 +30,7 @@ Pkg._auto_gc_enabled[] = false
@testset "Depot setup" begin
isolate() do
# Lets make sure we start with a clean slate.
- rm(LOADED_DEPOT; force=true, recursive=true)
+ rm(LOADED_DEPOT; force = true, recursive = true)
mkdir(LOADED_DEPOT)
# And set the loaded depot as our working depot.
empty!(DEPOT_PATH)
@@ -40,7 +39,7 @@ Pkg._auto_gc_enabled[] = false
# Now we double check we have a clean slate.
@test isempty(Pkg.dependencies())
# A simple `add` should set up some things for us:
- Pkg.add(name="Example", version="0.5.3")
+ Pkg.add(name = "Example", version = "0.5.3")
# - `General` should be initiated by default.
regs = Pkg.Registry.reachable_registries()
@test length(regs) == 1
@@ -58,7 +57,7 @@ Pkg._auto_gc_enabled[] = false
@test haskey(Pkg.project().dependencies, "Example")
@test length(Pkg.project().dependencies) == 1
# Now we install the same package at a different version:
- Pkg.add(name="Example", version="0.5.1")
+ Pkg.add(name = "Example", version = "0.5.1")
# - Check that the package was installed correctly.
Pkg.dependencies(exuuid) do pkg
@test pkg.version == v"0.5.1"
@@ -67,10 +66,10 @@ Pkg._auto_gc_enabled[] = false
@test pkg.source != source053
end
# Now a few more versions:
- Pkg.add(name="Example", version="0.5.0")
- Pkg.add(name="Example")
- Pkg.add(name="Example", version="0.3.0")
- Pkg.add(name="Example", version="0.3.3")
+ Pkg.add(name = "Example", version = "0.5.0")
+ Pkg.add(name = "Example")
+ Pkg.add(name = "Example", version = "0.3.0")
+ Pkg.add(name = "Example", version = "0.3.3")
# With similar checks
Pkg.dependencies(exuuid) do pkg
@test pkg.version == v"0.3.3"
@@ -78,42 +77,42 @@ Pkg._auto_gc_enabled[] = false
end
# Now we try adding a second dependency.
# We repeat the same class of tests.
- Pkg.add(name="JSON", version="0.18.0")
+ Pkg.add(name = "JSON", version = "0.18.0")
sourcej018 = nothing
Pkg.dependencies(json_uuid) do pkg
@test pkg.version == v"0.18.0"
@test isdir(pkg.source)
end
- Pkg.add(name="JSON", version="0.20.0")
+ Pkg.add(name = "JSON", version = "0.20.0")
Pkg.dependencies(json_uuid) do pkg
@test isdir(pkg.source)
@test pkg.source != sourcej018
end
# Now check packages which track repos instead of registered versions
- Pkg.add(url="https://github.com/JuliaLang/Example.jl", rev="v0.5.3")
+ Pkg.add(url = "https://github.com/JuliaLang/Example.jl", rev = "v0.5.3")
Pkg.dependencies(exuuid) do pkg
@test !pkg.is_tracking_registry
@test isdir(pkg.source)
@test isdir(Pkg.Types.add_repo_cache_path(pkg.git_source))
end
- Pkg.add(name="Example", rev="master")
+ Pkg.add(name = "Example", rev = "master")
Pkg.dependencies(exuuid) do pkg
@test !pkg.is_tracking_registry
@test isdir(pkg.source)
@test isdir(Pkg.Types.add_repo_cache_path(pkg.git_source))
end
# Also check that unregistered packages are installed properly.
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
Pkg.dependencies(unregistered_uuid) do pkg
@test isdir(pkg.source)
@test isdir(Pkg.Types.add_repo_cache_path(pkg.git_source))
end
# Check `develop`
- Pkg.develop(name="Example")
+ Pkg.develop(name = "Example")
Pkg.dependencies(exuuid) do pkg
@test isdir(pkg.source) # TODO check for full git clone, have to implement saving original URL first
end
- Pkg.develop(name="JSON")
+ Pkg.develop(name = "JSON")
Pkg.dependencies(json_uuid) do pkg
@test isdir(pkg.source) # TODO check for full git clone, have to implement saving original URL first
end
@@ -125,10 +124,10 @@ Pkg._auto_gc_enabled[] = false
@test reg.uuid == general_uuid
@test mtime(source053) == source053_time
# Now we clean up so that `isolate` can reuse the loaded depot properly
- rm(joinpath(LOADED_DEPOT, "environments"); force=true, recursive=true)
- rm(joinpath(LOADED_DEPOT, "clones"); force=true, recursive=true)
- rm(joinpath(LOADED_DEPOT, "logs"); force=true, recursive=true)
- rm(joinpath(LOADED_DEPOT, "dev"); force=true, recursive=true)
+ rm(joinpath(LOADED_DEPOT, "environments"); force = true, recursive = true)
+ rm(joinpath(LOADED_DEPOT, "clones"); force = true, recursive = true)
+ rm(joinpath(LOADED_DEPOT, "logs"); force = true, recursive = true)
+ rm(joinpath(LOADED_DEPOT, "dev"); force = true, recursive = true)
for (root, dirs, files) in walkdir(LOADED_DEPOT)
for file in files
filepath = joinpath(root, file)
@@ -142,17 +141,103 @@ Pkg._auto_gc_enabled[] = false
end
end
+function kill_with_info(p)
+ if Sys.islinux()
+ SIGINFO = 10
+ elseif Sys.isbsd()
+ SIGINFO = 29
+ end
+ if @isdefined(SIGINFO)
+ kill(p, SIGINFO)
+ timedwait(() -> process_exited(p), 20; pollint = 1.0) # Allow time for profile to collect and print before killing
+ end
+ kill(p)
+ wait(p)
+ return nothing
+end
+
+# This test tests that multiple julia processes can install within same depot concurrently without
+# corrupting the depot and being able to load the package. Only one process will do each of these, others will wait on
+# the specific action for the specific thing:
+# - Install the default registries
+# - Install source of package and deps
+# - Install artifacts
+# - Precompile package and deps
+# - Load & use package
+@testset "Concurrent setup/installation/precompilation across processes" begin
+ @testset for test in 1:1 # increase for stress testing
+ mktempdir() do tmp
+ copy_this_pkg_cache(tmp)
+ pathsep = Sys.iswindows() ? ";" : ":"
+ Pkg_dir = dirname(@__DIR__)
+ withenv("JULIA_DEPOT_PATH" => string(tmp, pathsep)) do
+ script = """
+ using Dates
+ t = Timer(t->println(stderr, Dates.now()), 4*60; interval = 10)
+ import Pkg
+ samefile(pkgdir(Pkg), $(repr(Pkg_dir))) || error("Using wrong Pkg")
+ Pkg.activate(temp=true)
+ Pkg.add(name="FFMPEG", version="0.4") # a package with a lot of deps but fast to load
+ using FFMPEG
+ @showtime FFMPEG.exe("-version")
+ @showtime FFMPEG.exe("-f", "lavfi", "-i", "testsrc=duration=1:size=128x128:rate=10", "-f", "null", "-") # more complete quick test (~10ms)
+ close(t)
+ """
+ cmd = `$(Base.julia_cmd()) --project=$(dirname(@__DIR__)) --startup-file=no --color=no -e $script`
+ did_install_package = Threads.Atomic{Int}(0)
+ did_install_artifact = Threads.Atomic{Int}(0)
+ any_failed = Threads.Atomic{Bool}(false)
+ outputs = fill("", 3)
+ t = @elapsed @sync begin
+ # All but 1 process should be waiting, so should be ok to run many
+ for i in 1:3
+ Threads.@spawn begin
+ iob = IOBuffer()
+ start = time()
+ p = run(pipeline(cmd, stdout = iob, stderr = iob), wait = false)
+ if timedwait(() -> process_exited(p), 5 * 60; pollint = 1.0) === :timed_out
+ kill_with_info(p)
+ end
+ if !success(p)
+ Threads.atomic_cas!(any_failed, false, true)
+ end
+ str = String(take!(iob))
+ if occursin(r"Installed FFMPEG ─", str)
+ Threads.atomic_add!(did_install_package, 1)
+ end
+ if occursin(r"Installed artifact FFMPEG ", str)
+ Threads.atomic_add!(did_install_artifact, 1)
+ end
+ outputs[i] = string("=== test $test, process $i. Took $(time() - start) seconds.\n", str)
+ end
+ end
+ end
+ if any_failed[] || did_install_package[] != 1 || did_install_artifact[] != 1
+ println("=== Concurrent Pkg.add test $test failed after $t seconds")
+ for i in 1:3
+ printstyled(stdout, outputs[i]; color = (:blue, :green, :yellow)[i])
+ end
+ end
+ # only 1 should have actually installed FFMPEG
+ @test !any_failed[]
+ @test did_install_package[] == 1
+ @test did_install_artifact[] == 1
+ end
+ end
+ end
+end
+
#
# ## Sandboxing
#
-inside_test_sandbox(fn, name; kwargs...) = Pkg.test(name; test_fn=fn, kwargs...)
-inside_test_sandbox(fn; kwargs...) = Pkg.test(;test_fn=fn, kwargs...)
+inside_test_sandbox(fn, name; kwargs...) = Pkg.test(name; test_fn = fn, kwargs...)
+inside_test_sandbox(fn; kwargs...) = Pkg.test(; test_fn = fn, kwargs...)
@testset "test: printing" begin
- isolate(loaded_depot=true) do
- Pkg.add(name="Example")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example")
io = Base.BufferStream()
- Pkg.test("Example"; io=io)
+ Pkg.test("Example"; io = io)
closewrite(io)
output = read(io, String)
@test occursin(r"Testing Example", output)
@@ -165,133 +250,155 @@ end
@testset "test: sandboxing" begin
# explicit test dependencies and the tested project are available within the test sandbox
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- foo_uuid = UUID("02250abe-2050-11e9-017e-b301a2b5bcc4")
- path = copy_test_package(tempdir, "BasicSandbox")
- # we set readonly here to simulate the permissions in the `$DEPOT/packages` directory
- Pkg.Types.set_readonly(path)
- Pkg.develop(path=path)
- inside_test_sandbox("BasicSandbox") do
- Pkg.dependencies(foo_uuid) do pkg
- @test length(pkg.dependencies) == 1
- @test haskey(pkg.dependencies, "Random")
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ foo_uuid = UUID("02250abe-2050-11e9-017e-b301a2b5bcc4")
+ path = copy_test_package(tempdir, "BasicSandbox")
+ # we set readonly here to simulate the permissions in the `$DEPOT/packages` directory
+ Pkg.Types.set_readonly(path)
+ Pkg.develop(path = path)
+ inside_test_sandbox("BasicSandbox") do
+ Pkg.dependencies(foo_uuid) do pkg
+ @test length(pkg.dependencies) == 1
+ @test haskey(pkg.dependencies, "Random")
+ end
+ @test haskey(Pkg.project().dependencies, "Test")
+ @test haskey(Pkg.project().dependencies, "BasicSandbox")
end
- @test haskey(Pkg.project().dependencies, "Test")
- @test haskey(Pkg.project().dependencies, "BasicSandbox")
end
- end end
+ end
# the active dependency graph is transferred to the test sandbox
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TransferSubgraph")
- Pkg.activate(path)
- active_json_version = Pkg.dependencies()[json_uuid].version
- inside_test_sandbox("Unregistered") do
- @test Pkg.dependencies()[json_uuid].version == active_json_version
- end
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TransferSubgraph")
+ Pkg.activate(path)
+ active_json_version = Pkg.dependencies()[json_uuid].version
+ inside_test_sandbox("Unregistered") do
+ @test Pkg.dependencies()[json_uuid].version == active_json_version
+ end
+ end
+ end
# the active dep graph is transferred to test sandbox, even when tracking unregistered repos
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestSubgraphTrackingRepo")
- Pkg.activate(path)
- inside_test_sandbox() do
- Pkg.dependencies(unregistered_uuid) do pkg
- @test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
- @test !pkg.is_tracking_registry
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestSubgraphTrackingRepo")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ Pkg.dependencies(unregistered_uuid) do pkg
+ @test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
+ @test !pkg.is_tracking_registry
+ end
end
end
- end end
+ end
# a test dependency can track a path
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestDepTrackingPath")
- Pkg.activate(path)
- inside_test_sandbox() do
- @test Pkg.dependencies()[unregistered_uuid].is_tracking_path
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestDepTrackingPath")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ @test Pkg.dependencies()[unregistered_uuid].is_tracking_path
+ end
end
- end end
+ end
# a test dependency can track a repo
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestDepTrackingRepo")
- Pkg.activate(path)
- inside_test_sandbox() do
- Pkg.dependencies(unregistered_uuid) do pkg
- @test !pkg.is_tracking_registry
- @test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestDepTrackingRepo")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ Pkg.dependencies(unregistered_uuid) do pkg
+ @test !pkg.is_tracking_registry
+ @test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
+ end
end
end
- end end
+ end
# `compat` for test dependencies is honored
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestDepCompat")
- Pkg.activate(path)
- inside_test_sandbox() do
- deps = Pkg.dependencies()
- @test deps[exuuid].version == v"0.3.0"
- @test deps[UUID("9cb9b0df-a8d1-4a6c-a371-7d2ae60a2f25")].version == v"0.1.0"
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestDepCompat")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ deps = Pkg.dependencies()
+ @test deps[exuuid].version == v"0.3.0"
+ @test deps[UUID("9cb9b0df-a8d1-4a6c-a371-7d2ae60a2f25")].version == v"0.1.0"
+ end
end
- end end
+ end
end
# These tests cover the original "targets" API for specifying test dependencies
@testset "test: 'targets' based testing" begin
# `Pkg.test` should work on dependency graphs with nodes sharing the same name but not the same UUID
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- Pkg.activate(joinpath(@__DIR__, "test_packages", "SameNameDifferentUUID"))
- inside_test_sandbox("Example") do
- Pkg.dependencies(UUID("6876af07-990d-54b4-ab0e-23690620f79a")) do pkg
- @test pkg.name == "Example"
- @test realpath(pkg.source) == realpath(joinpath(@__DIR__, "test_packages", "SameNameDifferentUUID", "dev", "Example"))
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ Pkg.activate(joinpath(@__DIR__, "test_packages", "SameNameDifferentUUID"))
+ inside_test_sandbox("Example") do
+ Pkg.dependencies(UUID("6876af07-990d-54b4-ab0e-23690620f79a")) do pkg
+ @test pkg.name == "Example"
+ @test realpath(pkg.source) == realpath(joinpath(@__DIR__, "test_packages", "SameNameDifferentUUID", "dev", "Example"))
+ end
end
end
- end end
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- basic_test_target = UUID("50adb811-5a1f-4be4-8146-2725c7f5d900")
- path = copy_test_package(tempdir, "BasicTestTarget")
- # we set readonly here to simulate the permissions in the `$DEPOT/packages` directory
- Pkg.Types.set_readonly(path)
- Pkg.develop(path=path)
- inside_test_sandbox("BasicTestTarget") do
- @test haskey(Pkg.project().dependencies, "Markdown")
- @test haskey(Pkg.project().dependencies, "Test")
- @test haskey(Pkg.project().dependencies, "BasicTestTarget")
- Pkg.dependencies(basic_test_target) do pkg
- @test pkg.is_tracking_path == true
- @test haskey(pkg.dependencies, "UUIDs")
- @test !haskey(pkg.dependencies, "Markdown")
- @test !haskey(pkg.dependencies, "Test")
+ end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ basic_test_target = UUID("50adb811-5a1f-4be4-8146-2725c7f5d900")
+ path = copy_test_package(tempdir, "BasicTestTarget")
+ # we set readonly here to simulate the permissions in the `$DEPOT/packages` directory
+ Pkg.Types.set_readonly(path)
+ Pkg.develop(path = path)
+ inside_test_sandbox("BasicTestTarget") do
+ @test haskey(Pkg.project().dependencies, "Markdown")
+ @test haskey(Pkg.project().dependencies, "Test")
+ @test haskey(Pkg.project().dependencies, "BasicTestTarget")
+ Pkg.dependencies(basic_test_target) do pkg
+ @test pkg.is_tracking_path == true
+ @test haskey(pkg.dependencies, "UUIDs")
+ @test !haskey(pkg.dependencies, "Markdown")
+ @test !haskey(pkg.dependencies, "Test")
+ end
end
end
- end end
+ end
# dependency of test dependency (#567)
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- for x in ["x1", "x2", "x3"]
- path = copy_test_package(tempdir, x)
- Pkg.develop(Pkg.PackageSpec(path = path))
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ for x in ["x1", "x2", "x3"]
+ path = copy_test_package(tempdir, x)
+ Pkg.develop(Pkg.PackageSpec(path = path))
+ end
+ Pkg.test("x3")
end
- Pkg.test("x3")
- end end
+ end
# preserve root of active project if it is a dependency (#1423)
- isolate(loaded_depot=false) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "ActiveProjectInTestSubgraph")
- Pkg.activate(path)
- inside_test_sandbox("B") do
- deps = Pkg.dependencies()
- @test deps[UUID("c86f0f68-174e-41db-bd5e-b032223de205")].version == v"1.2.3"
- end
- end end
+ isolate(loaded_depot = false) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "ActiveProjectInTestSubgraph")
+ Pkg.activate(path)
+ inside_test_sandbox("B") do
+ deps = Pkg.dependencies()
+ @test deps[UUID("c86f0f68-174e-41db-bd5e-b032223de205")].version == v"1.2.3"
+ end
+ end
+ end
# test targets should also honor compat
- isolate(loaded_depot=false) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "TestTargetCompat")
- Pkg.activate(path)
- inside_test_sandbox() do
- deps = Pkg.dependencies()
- @test deps[exuuid].version == v"0.3.0"
+ isolate(loaded_depot = false) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "TestTargetCompat")
+ Pkg.activate(path)
+ inside_test_sandbox() do
+ deps = Pkg.dependencies()
+ @test deps[exuuid].version == v"0.3.0"
+ end
end
- end end
+ end
end
@testset "test: fallback when no project file exists" begin
- isolate(loaded_depot=true) do
- Pkg.add(name="Permutations", version="0.3.2")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Permutations", version = "0.3.2")
if Sys.WORD_SIZE == 32
# The Permutations.jl v0.3.2 tests are known to fail on 32-bit Julia
@test_skip Pkg.test("Permutations")
@@ -303,7 +410,7 @@ end
@testset "using a test/REQUIRE file" begin
isolate() do
- Pkg.add(name="EnglishText", version="0.6.0")
+ Pkg.add(name = "EnglishText", version = "0.6.0")
Pkg.test("EnglishText")
end
end
@@ -312,7 +419,7 @@ end
# # Activate
#
@testset "activate: repl" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.REPLMode.TEST_MODE[] = true
# - activate shared env
api, args, opts = first(Pkg.pkg"activate --shared Foo")
@@ -346,41 +453,52 @@ end
arg = args[1]
@test arg.url == "https://github.com/JuliaLang/Pkg.jl"
@test arg.rev == "aa/gitlab"
+
+ api, args, opts = first(Pkg.pkg"add https://github.com/JuliaPy/PythonCall.jl/pull/529")
+ arg = args[1]
+ @test arg.url == "https://github.com/JuliaPy/PythonCall.jl"
+ @test arg.rev == "pull/529/head"
+
+ api, args, opts = first(Pkg.pkg"add https://github.com/TimG1964/XLSX.jl#Bug-fixing-post-#289:subdir")
+ arg = args[1]
+ @test arg.url == "https://github.com/TimG1964/XLSX.jl"
+ @test arg.rev == "Bug-fixing-post-#289"
+ @test arg.subdir == "subdir"
end
end
@testset "activate" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
io = IOBuffer()
- Pkg.activate("Foo"; io=io)
+ Pkg.activate("Foo"; io = io)
output = String(take!(io))
@test occursin(r"Activating.*project at.*`.*Foo`", output)
- Pkg.activate(; io=io, temp=true)
+ Pkg.activate(; io = io, temp = true)
output = String(take!(io))
@test occursin(r"Activating new project at `.*`", output)
prev_env = Base.active_project()
# - activating the previous project
- Pkg.activate(; temp=true)
+ Pkg.activate(; temp = true)
@test Base.active_project() != prev_env
- Pkg.activate(; prev=true)
+ Pkg.activate(; prev = true)
@test prev_env == Base.active_project()
- Pkg.activate(; temp=true)
+ Pkg.activate(; temp = true)
@test Base.active_project() != prev_env
- Pkg.activate(; prev=true)
+ Pkg.activate(; prev = true)
@test Base.active_project() == prev_env
Pkg.activate("")
@test Base.active_project() != prev_env
- Pkg.activate(; prev=true)
+ Pkg.activate(; prev = true)
@test Base.active_project() == prev_env
load_path_before = copy(LOAD_PATH)
try
empty!(LOAD_PATH) # unset active env
Pkg.activate() # shouldn't error
- Pkg.activate(; prev=true) # shouldn't error
+ Pkg.activate(; prev = true) # shouldn't error
finally
append!(empty!(LOAD_PATH), load_path_before)
end
@@ -397,41 +515,62 @@ end
# Here we check against invalid input.
@testset "add: input checking" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# Julia is not a valid package name.
- @test_throws PkgError("`julia` is not a valid package name") Pkg.add(name="julia")
+ @test_throws PkgError("`julia` is not a valid package name") Pkg.add(name = "julia")
# Package names must be valid Julia identifiers.
- @test_throws PkgError("`***` is not a valid package name") Pkg.add(name="***")
- @test_throws PkgError("`Foo Bar` is not a valid package name") Pkg.add(name="Foo Bar")
+ @test_throws PkgError("`***` is not a valid package name") Pkg.add(name = "***")
+ @test_throws PkgError("`Foo Bar` is not a valid package name") Pkg.add(name = "Foo Bar")
# Names which are invalid and are probably URLs or paths.
- @test_throws PkgError("""
- `https://github.com` is not a valid package name
- The argument appears to be a URL or path, perhaps you meant `Pkg.add(url="...")` or `Pkg.add(path="...")`.""") Pkg.add("https://github.com")
- @test_throws PkgError("""
- `./Foobar` is not a valid package name
- The argument appears to be a URL or path, perhaps you meant `Pkg.add(url="...")` or `Pkg.add(path="...")`.""") Pkg.add("./Foobar")
+ @test_throws PkgError(
+ """
+ `https://github.com` is not a valid package name
+ The argument appears to be a URL or path, perhaps you meant `Pkg.add(url="...")` or `Pkg.add(path="...")`."""
+ ) Pkg.add("https://github.com")
+ @test_throws PkgError(
+ """
+ `./Foobar` is not a valid package name
+ The argument appears to be a URL or path, perhaps you meant `Pkg.add(url="...")` or `Pkg.add(path="...")`."""
+ ) Pkg.add("./Foobar")
# An empty spec is invalid.
@test_throws PkgError(
"name, UUID, URL, or filesystem path specification required when calling `add`"
- ) Pkg.add(Pkg.PackageSpec())
+ ) Pkg.add(Pkg.PackageSpec())
# Versions imply that we are tracking a registered version.
@test_throws PkgError(
"version specification invalid when tracking a repository: `0.5.0` specified for package `Example`"
- ) Pkg.add(name="Example", rev="master", version="0.5.0")
+ ) Pkg.add(name = "Example", rev = "master", version = "0.5.0")
# Adding with a slight typo gives suggestions
try
- Pkg.add("Examplle")
+ io = IOBuffer()
+ Pkg.add("Examplle"; io)
@test false # to fail if add doesn't error
- catch err
+ catch err
@test err isa PkgError
@test occursin("The following package names could not be resolved:", err.msg)
@test occursin("Examplle (not found in project, manifest or registry)", err.msg)
- @test occursin("Suggestions:", err.msg)
- # @test occursin("Example", err.msg) # can't test this as each char in "Example" is individually colorized
+ @test occursin("Suggestions: Example", err.msg)
+ end
+ # Adding with lowercase suggests uppercase
+ try
+ io = IOBuffer()
+ Pkg.add("http"; io)
+ @test false # to fail if add doesn't error
+ catch err
+ @test err isa PkgError
+ @test occursin("Suggestions: HTTP", err.msg)
+ end
+ try
+ io = IOBuffer()
+ Pkg.add("Flix"; io)
+ @test false # to fail if add doesn't error
+ catch err
+ @test err isa PkgError
+ @test occursin("Suggestions: Flux", err.msg)
end
@test_throws PkgError(
"name, UUID, URL, or filesystem path specification required when calling `add`"
- ) Pkg.add(Pkg.PackageSpec())
+ ) Pkg.add(Pkg.PackageSpec())
# Adding an unregistered package
@test_throws PkgError Pkg.add("ThisIsHopefullyRandom012856014925701382")
# Wrong UUID
@@ -441,24 +580,29 @@ end
# Two packages with the same name
@test_throws PkgError(
"it is invalid to specify multiple packages with the same name: `Example`"
- ) Pkg.add([(;name="Example"), (;name="Example",version="0.5.0")])
+ ) Pkg.add([(; name = "Example"), (; name = "Example", version = "0.5.0")])
end
# Unregistered UUID in manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "UnregisteredUUID")
- Pkg.activate(package_path)
- @test_throws PkgError("expected package `Example [142fd7e7]` to be registered") Pkg.add("JSON")
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "UnregisteredUUID")
+ Pkg.activate(package_path)
+ @test_throws PkgError Pkg.add("JSON")
+ end
+ end
# empty git repo (no commits)
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- close(LibGit2.init(tempdir))
- try Pkg.add(path=tempdir)
- @test false # to fail if add doesn't error
- catch err
- @test err isa PkgError
- @test match(r"^invalid git HEAD", err.msg) !== nothing
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ close(LibGit2.init(tempdir))
+ try
+ Pkg.add(path = tempdir)
+ @test false # to fail if add doesn't error
+ catch err
+ @test err isa PkgError
+ @test match(r"^invalid git HEAD", err.msg) !== nothing
+ end
end
- end end
+ end
end
#
@@ -470,7 +614,7 @@ end
# The package should be added as a direct dependency.
@testset "add: changes to the active project" begin
# Basic add
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add(Pkg.PackageSpec("Example"))
Pkg.dependencies(exuuid) do ex
@test ex.is_tracking_registry
@@ -478,8 +622,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# Basic add by version
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.5.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.5.0")
Pkg.dependencies(exuuid) do ex
@test ex.is_tracking_registry
@test ex.version == v"0.5.0"
@@ -500,8 +644,8 @@ end
end
=#
# Basic add by URL
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/JuliaLang/Example.jl", rev="v0.5.3")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/JuliaLang/Example.jl", rev = "v0.5.3")
Pkg.dependencies(exuuid) do ex
@test !ex.is_tracking_registry
@test ex.git_source == "https://github.com/JuliaLang/Example.jl"
@@ -510,8 +654,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# Basic add by git revision
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="master")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
Pkg.dependencies(exuuid) do ex
@test !ex.is_tracking_registry
@test ex.git_source == "https://github.com/JuliaLang/Example.jl.git"
@@ -520,7 +664,7 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# Adding stdlibs should work.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
profile_uuid = UUID("9abbd945-dff8-562f-b5e8-e1ebf5ef1b79")
# - Adding a stdlib by name.
Pkg.add("Markdown")
@@ -528,48 +672,52 @@ end
@test pkg.name == "Markdown"
end
# - Adding a stdlib by UUID.
- Pkg.add(uuid=profile_uuid)
+ Pkg.add(uuid = profile_uuid)
Pkg.dependencies(profile_uuid) do pkg
@test pkg.name == "Profile"
end
# - Adding a stdlib by name/UUID.
- Pkg.add(name="Markdown", uuid=markdown_uuid)
+ Pkg.add(name = "Markdown", uuid = markdown_uuid)
Pkg.dependencies(markdown_uuid) do pkg
@test pkg.name == "Markdown"
end
end
# Basic add by local path.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
- Pkg.add(path=path)
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.git_source == realpath(path)
- # We take care to check that the project file has been parsed correctly.
- @test pkg.name == "SimplePackage"
- @test pkg.version == v"0.2.0"
- @test haskey(pkg.dependencies, "Example")
- @test haskey(pkg.dependencies, "Markdown")
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
+ Pkg.add(path = path)
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.git_source == realpath(path)
+ # We take care to check that the project file has been parsed correctly.
+ @test pkg.name == "SimplePackage"
+ @test pkg.version == v"0.2.0"
+ @test haskey(pkg.dependencies, "Example")
+ @test haskey(pkg.dependencies, "Markdown")
+ end
+ @test haskey(Pkg.project().dependencies, "SimplePackage")
+ @test length(Pkg.project().dependencies) == 1
end
- @test haskey(Pkg.project().dependencies, "SimplePackage")
- @test length(Pkg.project().dependencies) == 1
- end end
+ end
# add when depot does not exist should create the default project in the correct location
- isolate() do; mktempdir() do tempdir
- empty!(DEPOT_PATH)
- push!(DEPOT_PATH, tempdir)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- rm(tempdir; force=true, recursive=true)
- @test !isdir(first(DEPOT_PATH))
- Pkg.add("JSON")
- @test dirname(dirname(Pkg.project().path)) == realpath(joinpath(tempdir, "environments"))
- end end
+ isolate() do;
+ mktempdir() do tempdir
+ empty!(DEPOT_PATH)
+ push!(DEPOT_PATH, tempdir)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ rm(tempdir; force = true, recursive = true)
+ @test !isdir(first(DEPOT_PATH))
+ Pkg.add("JSON")
+ @test dirname(dirname(Pkg.project().path)) == realpath(joinpath(tempdir, "environments"))
+ end
+ end
end
# Here we can use a loaded depot because we are only checking changes to the active project.
@testset "add: package state changes" begin
# Check that `add` on an already added stdlib works.
# Stdlibs are special cased throughout the codebase.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Markdown")
Pkg.add("Markdown")
Pkg.dependencies(markdown_uuid) do pkg
@@ -578,9 +726,9 @@ end
@test haskey(Pkg.project().dependencies, "Markdown")
end
# Double add should not change state, this would be an unnecessary change.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test !haskey(Pkg.Types.Context().env.project.compat, "Example")
- Pkg.add(name="Example", version="0.3.0")
+ Pkg.add(name = "Example", version = "0.3.0")
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
@test !haskey(Pkg.Types.Context().env.project.compat, "Example")
Pkg.add("Example")
@@ -588,22 +736,22 @@ end
@test !haskey(Pkg.Types.Context().env.project.compat, "Example")
end
# Adding a new package should not alter the version of existing packages.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
Pkg.add("Test")
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
end
# Add by version should not override pinned version.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
Pkg.pin("Example")
Pkg.dependencies(exuuid) do ex
@test ex.version == v"0.3.0"
@test ex.is_tracking_registry
@test ex.is_pinned
end
- Pkg.add(name="Example", version="0.5.0")
+ Pkg.add(name = "Example", version = "0.5.0")
# We check that the package state is left unchanged.
Pkg.dependencies(exuuid) do ex
@test ex.version == v"0.3.0"
@@ -612,14 +760,14 @@ end
end
end
# Add by version should override add by repo.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="master")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
# First we check that we are not tracking a registered version.
Pkg.dependencies(exuuid) do ex
@test ex.git_revision == "master"
@test !ex.is_tracking_registry
end
- Pkg.add(name="Example", version="0.3.0")
+ Pkg.add(name = "Example", version = "0.3.0")
# We should now be tracking a registered version.
Pkg.dependencies(exuuid) do ex
@test ex.version == v"0.3.0"
@@ -628,31 +776,33 @@ end
end
end
# Add by version should override add by repo, even for indirect dependencies.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "DependsOnExample"))
- Pkg.add(path=path)
- Pkg.add(name="Example", rev="master")
- @test !Pkg.dependencies()[exuuid].is_tracking_registry
- # Now we remove the package as a direct dependency.
- # The package should still exist as an indirect dependency because `DependsOnExample` depends on it.
- Pkg.rm("Example")
- Pkg.add(name="Example", version="0.3.0")
- # Now we check that we are tracking a registered version.
- Pkg.dependencies(exuuid) do ex
- @test ex.version == v"0.3.0"
- @test ex.is_tracking_registry
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "DependsOnExample"))
+ Pkg.add(path = path)
+ Pkg.add(name = "Example", rev = "master")
+ @test !Pkg.dependencies()[exuuid].is_tracking_registry
+ # Now we remove the package as a direct dependency.
+ # The package should still exist as an indirect dependency because `DependsOnExample` depends on it.
+ Pkg.rm("Example")
+ Pkg.add(name = "Example", version = "0.3.0")
+ # Now we check that we are tracking a registered version.
+ Pkg.dependencies(exuuid) do ex
+ @test ex.version == v"0.3.0"
+ @test ex.is_tracking_registry
+ end
end
- end end
+ end
# Add by URL should not override pin.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
- Pkg.pin(name="Example")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
+ Pkg.pin(name = "Example")
Pkg.dependencies(exuuid) do ex
@test ex.is_pinned
@test ex.is_tracking_registry
@test ex.version == v"0.3.0"
end
- Pkg.add(url="https://github.com/JuliaLang/Example.jl")
+ Pkg.add(url = "https://github.com/JuliaLang/Example.jl")
Pkg.dependencies(exuuid) do ex
@test ex.is_pinned
@test ex.is_tracking_registry
@@ -660,8 +810,8 @@ end
end
end
# It should be possible to switch branches by reusing the URL.
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl", rev="0.2.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl", rev = "0.2.0")
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
@test !pkg.is_tracking_registry
@@ -670,7 +820,7 @@ end
@test haskey(pkg.dependencies, "Example")
end
# Now we refer to it by name so to check that we reuse the URL.
- Pkg.add(name="Unregistered", rev="0.1.0")
+ Pkg.add(name = "Unregistered", rev = "0.1.0")
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.git_source == "https://github.com/00vareladavid/Unregistered.jl"
@test !pkg.is_tracking_registry
@@ -680,168 +830,170 @@ end
end
end
# add should resolve the correct versions even when the manifest is out of sync with the project compat
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- Pkg.activate(copy_test_package(tempdir, "CompatOutOfSync"))
- Pkg.add("Libdl")
- Pkg.dependencies(exuuid) do pkg
- @test pkg.version == v"0.3.0"
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ Pkg.activate(copy_test_package(tempdir, "CompatOutOfSync"))
+ Pkg.add("Libdl")
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.version == v"0.3.0"
+ end
end
- end end
+ end
# Preserve syntax
# These tests mostly check the REPL side correctness.
# make sure the default behavior is invoked
withenv("JULIA_PKG_PRESERVE_TIERED_INSTALLED" => false) do
- # - Normal add should not change the existing version.
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(name="JSON", version="0.18.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- end
- # - `tiered_installed`.
- isolate(loaded_depot=false) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
-
- @test_logs(
- (:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
- (:debug, "tiered_resolve: trying PRESERVE_ALL"),
- min_level=Logging.Debug,
- match_mode=:any,
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_TIERED_INSTALLED)
- )
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ # - Normal add should not change the existing version.
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(name = "JSON", version = "0.18.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ end
+ # - `tiered_installed`.
+ isolate(loaded_depot = false) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.activate(temp=true)
- @test_logs(
- (:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
- min_level=Logging.Debug,
- match_mode=:any,
- Pkg.add("Example"; preserve=Pkg.PRESERVE_TIERED_INSTALLED) # should only add v0.3.0 as it was installed earlier
- )
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test_logs(
+ (:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
+ (:debug, "tiered_resolve: trying PRESERVE_ALL"),
+ min_level = Logging.Debug,
+ match_mode = :any,
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_TIERED_INSTALLED)
+ )
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- withenv("JULIA_PKG_PRESERVE_TIERED_INSTALLED" => true) do
- Pkg.activate(temp=true)
+ Pkg.activate(temp = true)
@test_logs(
(:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
- min_level=Logging.Debug,
- match_mode=:any,
- Pkg.add(name="Example")
+ min_level = Logging.Debug,
+ match_mode = :any,
+ Pkg.add("Example"; preserve = Pkg.PRESERVE_TIERED_INSTALLED) # should only add v0.3.0 as it was installed earlier
)
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
- end
- Pkg.activate(temp=true)
- @test_logs(
- (:debug, "tiered_resolve: trying PRESERVE_ALL"),
- min_level=Logging.Debug,
- match_mode=:any,
- Pkg.add(name="Example") # default 'add' should serve a newer version
- )
- @test Pkg.dependencies()[exuuid].version > v"0.3.0"
- end
- # - `tiered` is the default option.
- isolate(loaded_depot=false) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_TIERED)
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- end
- # - `installed`.
- isolate(loaded_depot=false) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- @test_throws Pkg.Resolve.ResolverError Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_ALL_INSTALLED) # no installed version
- end
- # - `all` should succeed in the same way as `tiered`.
- isolate(loaded_depot=false) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_ALL)
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ withenv("JULIA_PKG_PRESERVE_TIERED_INSTALLED" => true) do
+ Pkg.activate(temp = true)
+ @test_logs(
+ (:debug, "tiered_resolve: trying PRESERVE_ALL_INSTALLED"),
+ min_level = Logging.Debug,
+ match_mode = :any,
+ Pkg.add(name = "Example")
+ )
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ end
- Pkg.rm("JSON")
- Pkg.add(Pkg.PackageSpec(;name="JSON"); preserve=Pkg.PRESERVE_ALL_INSTALLED)
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- end
- # - `direct` should also succeed in the same way.
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_DIRECT)
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- end
- # - `semver` should update `Example` and the jll to the highest semver compatible version.
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_SEMVER)
- @test Pkg.dependencies()[exuuid].version == v"0.3.3"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version > v"1.6.37+4"
- end
- #- `none` should update `Example` and the jll to the highest compatible version.
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+4")
- Pkg.add(name="Example", version="0.3.0")
- @test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
- Pkg.add(Pkg.PackageSpec(;name="JSON", version="0.18.0"); preserve=Pkg.PRESERVE_NONE)
- @test Pkg.dependencies()[exuuid].version > v"0.3.0"
- @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
- @test Pkg.dependencies()[pngjll_uuid].version > v"1.6.37+4"
- end
- isolate(loaded_depot=true) do
- Pkg.add(name="libpng_jll", version=v"1.6.37+5")
- @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+5"
- end
- # Adding a new package to a package should add compat entries
- isolate(loaded_depot=true) do
- mktempdir() do tempdir
- Pkg.activate(tempdir)
- mkpath(joinpath(tempdir, "src"))
- touch(joinpath(tempdir, "src", "Foo.jl"))
- ctx = Pkg.Types.Context()
- ctx.env.project.name = "Foo"
- ctx.env.project.uuid = UUIDs.UUID(0)
- Pkg.Types.write_project(ctx.env)
- Pkg.add(name="Example", version="0.3.0")
+ Pkg.activate(temp = true)
+ @test_logs(
+ (:debug, "tiered_resolve: trying PRESERVE_ALL"),
+ min_level = Logging.Debug,
+ match_mode = :any,
+ Pkg.add(name = "Example") # default 'add' should serve a newer version
+ )
+ @test Pkg.dependencies()[exuuid].version > v"0.3.0"
+ end
+ # - `tiered` is the default option.
+ isolate(loaded_depot = false) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_TIERED)
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
- @test Pkg.Types.Context().env.project.compat["Example"] == Pkg.Types.Compat(Pkg.Types.VersionSpec("0.3"), "0.3.0")
- Pkg.add(name="Example", version="0.3.1")
- @test Pkg.Types.Context().env.project.compat["Example"] == Pkg.Types.Compat(Pkg.Types.VersionSpec("0.3"), "0.3.0")
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ end
+ # - `installed`.
+ isolate(loaded_depot = false) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ @test_throws Pkg.Resolve.ResolverError Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_ALL_INSTALLED) # no installed version
+ end
+ # - `all` should succeed in the same way as `tiered`.
+ isolate(loaded_depot = false) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_ALL)
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+
+ Pkg.rm("JSON")
+ Pkg.add(Pkg.PackageSpec(; name = "JSON"); preserve = Pkg.PRESERVE_ALL_INSTALLED)
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ end
+ # - `direct` should also succeed in the same way.
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_DIRECT)
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ end
+ # - `semver` should update `Example` and the jll to the highest semver compatible version.
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_SEMVER)
+ @test Pkg.dependencies()[exuuid].version == v"0.3.3"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version > v"1.6.37+4"
+ end
+ #- `none` should update `Example` and the jll to the highest compatible version.
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+4")
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+4"
+ Pkg.add(Pkg.PackageSpec(; name = "JSON", version = "0.18.0"); preserve = Pkg.PRESERVE_NONE)
+ @test Pkg.dependencies()[exuuid].version > v"0.3.0"
+ @test Pkg.dependencies()[json_uuid].version == v"0.18.0"
+ @test Pkg.dependencies()[pngjll_uuid].version > v"1.6.37+4"
+ end
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "libpng_jll", version = v"1.6.37+5")
+ @test Pkg.dependencies()[pngjll_uuid].version == v"1.6.37+5"
+ end
+ # Adding a new package to a package should add compat entries
+ isolate(loaded_depot = true) do
+ mktempdir() do tempdir
+ Pkg.activate(tempdir)
+ mkpath(joinpath(tempdir, "src"))
+ touch(joinpath(tempdir, "src", "Foo.jl"))
+ ctx = Pkg.Types.Context()
+ ctx.env.project.name = "Foo"
+ ctx.env.project.uuid = UUIDs.UUID(0)
+ Pkg.Types.write_project(ctx.env)
+ Pkg.add(name = "Example", version = "0.3.0")
+ @test Pkg.dependencies()[exuuid].version == v"0.3.0"
+ @test Pkg.Types.Context().env.project.compat["Example"] == Pkg.Types.Compat(Pkg.Types.VersionSpec("0.3"), "0.3.0")
+ Pkg.add(name = "Example", version = "0.3.1")
+ @test Pkg.Types.Context().env.project.compat["Example"] == Pkg.Types.Compat(Pkg.Types.VersionSpec("0.3"), "0.3.0")
+ end
end
- end
end # withenv
end
@@ -853,60 +1005,64 @@ end
# This tests shows that, packages added with an absolute path will not break
# if the project is moved to a new position.
# We can use the loaded depot here, it will help us avoid the original clone.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- empty_package = UUID("26187899-7657-4a90-a2f6-e79e0214bedc")
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "EmptyPackage"))
- path = abspath(path)
- Pkg.add(path=path)
- # Now we try to find the package.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
- @test !isdir(Pkg.dependencies()[empty_package].source)
- Pkg.instantiate()
- @test isdir(Pkg.dependencies()[empty_package].source)
- # Now we move the project and should still be able to find the package.
- mktempdir() do other_dir
- cp(dirname(Base.active_project()), other_dir; force=true)
- Pkg.activate(other_dir)
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ empty_package = UUID("26187899-7657-4a90-a2f6-e79e0214bedc")
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "EmptyPackage"))
+ path = abspath(path)
+ Pkg.add(path = path)
+ # Now we try to find the package.
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
@test !isdir(Pkg.dependencies()[empty_package].source)
Pkg.instantiate()
+ @test isdir(Pkg.dependencies()[empty_package].source)
+ # Now we move the project and should still be able to find the package.
+ mktempdir() do other_dir
+ cp(dirname(Base.active_project()), other_dir; force = true)
+ Pkg.activate(other_dir)
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
+ @test !isdir(Pkg.dependencies()[empty_package].source)
+ Pkg.instantiate()
+ end
end
- end end
+ end
# Dependencies added with relative paths should be stored relative to the active project.
# This test shows that packages added with a relative path will not break
# as long as they maintain the same relative position to the project.
# We can use the loaded depot here, it will help us avoid the original clone.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- empty_package = UUID("26187899-7657-4a90-a2f6-e79e0214bedc")
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "EmptyPackage"))
- # We add the package using a relative path.
- cd(path) do
- Pkg.add(path=".")
- manifest = Pkg.Types.read_manifest(joinpath(dirname(Base.active_project()), "Manifest.toml"))
- # Test that the relative path is canonicalized.
- repo = string("../../../", basename(tempdir), "/EmptyPackage")
- @test manifest[empty_package].repo.source == repo
- end
- # Now we try to find the package.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
- rm(joinpath(DEPOT_PATH[1], "clones"); recursive=true)
- Pkg.instantiate()
- # Test that Operations.is_instantiated works with relative path
- @test Pkg.Operations.is_instantiated(Pkg.Types.EnvCache())
- # Now we destroy the relative position and should not be able to find the package.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
- # Test that Operations.is_instantiated works with relative path
- @test !Pkg.Operations.is_instantiated(Pkg.Types.EnvCache())
- mktempdir() do other_dir
- cp(dirname(Base.active_project()), other_dir; force=true)
- Pkg.activate(other_dir)
- @test_throws PkgError Pkg.instantiate() # TODO is there a way to pattern match on just part of the err message?
- end
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ empty_package = UUID("26187899-7657-4a90-a2f6-e79e0214bedc")
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "EmptyPackage"))
+ # We add the package using a relative path.
+ cd(path) do
+ Pkg.add(path = ".")
+ manifest = Pkg.Types.read_manifest(joinpath(dirname(Base.active_project()), "Manifest.toml"))
+ # Test that the relative path is canonicalized.
+ repo = string("../../../", basename(tempdir), "/EmptyPackage")
+ @test manifest[empty_package].repo.source == repo
+ end
+ # Now we try to find the package.
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
+ rm(joinpath(DEPOT_PATH[1], "clones"); recursive = true)
+ Pkg.instantiate()
+ # Test that Operations.is_instantiated works with relative path
+ @test Pkg.Operations.is_instantiated(Pkg.Types.EnvCache())
+ # Now we destroy the relative position and should not be able to find the package.
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
+ # Test that Operations.is_instantiated works with relative path
+ @test !Pkg.Operations.is_instantiated(Pkg.Types.EnvCache())
+ mktempdir() do other_dir
+ cp(dirname(Base.active_project()), other_dir; force = true)
+ Pkg.activate(other_dir)
+ @test_throws PkgError Pkg.instantiate() # TODO is there a way to pattern match on just part of the err message?
+ end
+ end
+ end
# Now we test packages added by URL.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# Details: `master` is past `0.1.0`
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl", rev="0.1.0")
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl", rev = "0.1.0")
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.name == "Unregistered"
@test isdir(pkg.source)
@@ -914,7 +1070,7 @@ end
@test haskey(Pkg.project().dependencies, "Unregistered")
# Now we remove the source so that we have to load it again.
# We should reuse the existing clone in this case.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
Pkg.instantiate()
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.name == "Unregistered"
@@ -923,8 +1079,8 @@ end
@test haskey(Pkg.project().dependencies, "Unregistered")
# Now we remove the source _and_ our cache, we have no choice to re-clone the remote.
# We should still be able to find the source.
- rm(joinpath(DEPOT_PATH[1], "packages"); recursive=true)
- rm(joinpath(DEPOT_PATH[1], "clones"); recursive=true)
+ rm(joinpath(DEPOT_PATH[1], "packages"); recursive = true)
+ rm(joinpath(DEPOT_PATH[1], "clones"); recursive = true)
Pkg.instantiate()
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.name == "Unregistered"
@@ -944,45 +1100,57 @@ end
# these tests.
registry_url = "https://github.com/JuliaRegistries/General.git"
registry_commit = "030d6dae0df2ad6c3b2f90d41749df3eedb8d1b1"
- Utils.isolate_and_pin_registry(; registry_url, registry_commit) do; mktempdir() do tmp
- # All
- copy_test_package(tmp, "ShouldPreserveAll"; use_pkg=false)
- Pkg.activate(joinpath(tmp, "ShouldPreserveAll"))
- parsers_uuid = UUID("69de0a69-1ddd-5017-9359-2bf0b02dc9f0")
- original_parsers_version = Pkg.dependencies()[parsers_uuid].version
- Pkg.add(name="Example", version="0.5.0")
- @test Pkg.dependencies()[parsers_uuid].version == original_parsers_version
- # Direct
- copy_test_package(tmp, "ShouldPreserveDirect"; use_pkg=false)
- Pkg.activate(joinpath(tmp, "ShouldPreserveDirect"))
- ordered_collections = UUID("bac558e1-5e72-5ebc-8fee-abe8a469f55d")
- Pkg.add(uuid=ordered_collections, version="1.0.1")
- lazy_json = UUID("fc18253b-5e1b-504c-a4a2-9ece4944c004")
- data_structures = UUID("864edb3b-99cc-5e75-8d2d-829cb0a9cfe8")
- @test Pkg.dependencies()[lazy_json].version == v"0.1.0" # stayed the same
- @test Pkg.dependencies()[data_structures].version == v"0.16.1" # forced to change
- @test Pkg.dependencies()[ordered_collections].version == v"1.0.1" # sanity check
- # SEMVER
- copy_test_package(tmp, "ShouldPreserveSemver"; use_pkg=false)
- Pkg.activate(joinpath(tmp, "ShouldPreserveSemver"))
- light_graphs = UUID("093fc24a-ae57-5d10-9952-331d41423f4d")
- meta_graphs = UUID("626554b9-1ddb-594c-aa3c-2596fe9399a5")
- light_graphs_version = Pkg.dependencies()[light_graphs].version
- Pkg.add(uuid=meta_graphs, version="0.6.4")
- @test Pkg.dependencies()[meta_graphs].version == v"0.6.4" # sanity check
- # did not break semver
- @test Pkg.dependencies()[light_graphs].version in Pkg.Types.semver_spec("$(light_graphs_version)")
- # did change version
- @test Pkg.dependencies()[light_graphs].version != light_graphs_version
- # NONE
- copy_test_package(tmp, "ShouldPreserveNone"; use_pkg=false)
- Pkg.activate(joinpath(tmp, "ShouldPreserveNone"))
- array_interface = UUID("4fba245c-0d91-5ea0-9b3e-6abc04ee57a9")
- diff_eq_diff_tools = UUID("01453d9d-ee7c-5054-8395-0335cb756afa")
- Pkg.add(uuid=diff_eq_diff_tools, version="1.0.0")
- @test Pkg.dependencies()[diff_eq_diff_tools].version == v"1.0.0" # sanity check
- @test Pkg.dependencies()[array_interface].version in Pkg.Types.semver_spec("1") # had to make breaking change
- end end
+ Utils.isolate_and_pin_registry(; registry_url, registry_commit) do;
+ mktempdir() do tmp
+ # All
+ copy_test_package(tmp, "ShouldPreserveAll"; use_pkg = false)
+ Pkg.activate(joinpath(tmp, "ShouldPreserveAll"))
+ parsers_uuid = UUID("69de0a69-1ddd-5017-9359-2bf0b02dc9f0")
+ original_parsers_version = Pkg.dependencies()[parsers_uuid].version
+ Pkg.add(name = "Example", version = "0.5.0")
+ @test Pkg.dependencies()[parsers_uuid].version == original_parsers_version
+ # Direct
+ copy_test_package(tmp, "ShouldPreserveDirect"; use_pkg = false)
+ Pkg.activate(joinpath(tmp, "ShouldPreserveDirect"))
+ ordered_collections = UUID("bac558e1-5e72-5ebc-8fee-abe8a469f55d")
+ Pkg.add(uuid = ordered_collections, version = "1.0.1")
+ lazy_json = UUID("fc18253b-5e1b-504c-a4a2-9ece4944c004")
+ data_structures = UUID("864edb3b-99cc-5e75-8d2d-829cb0a9cfe8")
+ @test Pkg.dependencies()[lazy_json].version == v"0.1.0" # stayed the same
+ @test Pkg.dependencies()[data_structures].version == v"0.16.1" # forced to change
+ @test Pkg.dependencies()[ordered_collections].version == v"1.0.1" # sanity check
+ # SEMVER
+ copy_test_package(tmp, "ShouldPreserveSemver"; use_pkg = false)
+
+ # Support julia versions before & after the MbedTLS > OpenSSL switch
+ OpenSSL_pkgid = Base.PkgId(Base.UUID("458c3c95-2e84-50aa-8efc-19380b2a3a95"), "OpenSSL_jll")
+ manifest_to_use = if Base.is_stdlib(OpenSSL_pkgid)
+ joinpath(tmp, "ShouldPreserveSemver", "Manifest_OpenSSL.toml")
+ else
+ joinpath(tmp, "ShouldPreserveSemver", "Manifest_MbedTLS.toml")
+ end
+ mv(manifest_to_use, joinpath(tmp, "ShouldPreserveSemver", "Manifest.toml"))
+
+ Pkg.activate(joinpath(tmp, "ShouldPreserveSemver"))
+ light_graphs = UUID("093fc24a-ae57-5d10-9952-331d41423f4d")
+ meta_graphs = UUID("626554b9-1ddb-594c-aa3c-2596fe9399a5")
+ light_graphs_version = Pkg.dependencies()[light_graphs].version
+ Pkg.add(uuid = meta_graphs, version = "0.6.4")
+ @test Pkg.dependencies()[meta_graphs].version == v"0.6.4" # sanity check
+ # did not break semver
+ @test Pkg.dependencies()[light_graphs].version in Pkg.Types.semver_spec("$(light_graphs_version)")
+ # did change version
+ @test Pkg.dependencies()[light_graphs].version != light_graphs_version
+ # NONE
+ copy_test_package(tmp, "ShouldPreserveNone"; use_pkg = false)
+ Pkg.activate(joinpath(tmp, "ShouldPreserveNone"))
+ array_interface = UUID("4fba245c-0d91-5ea0-9b3e-6abc04ee57a9")
+ diff_eq_diff_tools = UUID("01453d9d-ee7c-5054-8395-0335cb756afa")
+ Pkg.add(uuid = diff_eq_diff_tools, version = "1.0.0")
+ @test Pkg.dependencies()[diff_eq_diff_tools].version == v"1.0.0" # sanity check
+ @test Pkg.dependencies()[array_interface].version in Pkg.Types.semver_spec("1") # had to make breaking change
+ end
+ end
end
#
@@ -994,96 +1162,328 @@ end
# Add using UUID syntax
api, args, opts = first(Pkg.pkg"add 7876af07-990d-54b4-ab0e-23690620f79a")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;uuid=UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
+ @test args == [Pkg.PackageSpec(; uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
@test isempty(opts)
# Add using `name=UUID` syntax.
api, args, opts = first(Pkg.pkg"add Example=7876af07-990d-54b4-ab0e-23690620f79a")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example", uuid=UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
+ @test args == [Pkg.PackageSpec(; name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
@test isempty(opts)
# Add using git revision syntax.
api, args, opts = first(Pkg.pkg"add Example#master")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example", rev="master")]
+ @test args == [Pkg.PackageSpec(; name = "Example", rev = "master")]
@test isempty(opts)
# Add using git revision syntax.
- api,args, opt = first(Pkg.pkg"add Example#v0.5.3")
+ api, args, opt = first(Pkg.pkg"add Example#v0.5.3")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example", rev="v0.5.3")]
+ @test args == [Pkg.PackageSpec(; name = "Example", rev = "v0.5.3")]
@test isempty(opts)
# Add using registered version syntax.
api, args, opts = first(Pkg.pkg"add Example@0.5.0")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example", version="0.5.0")]
+ @test args == [Pkg.PackageSpec(; name = "Example", version = "0.5.0")]
+ @test isempty(opts)
+ # Add multiple packages with version specifier
+ api, args, opts = first(Pkg.pkg"add Example@0.5.5 Test")
+ @test api == Pkg.add
+ @test length(args) == 2
+ @test args[1].name == "Example"
+ @test args[1].version == "0.5.5"
+ @test args[2].name == "Test"
@test isempty(opts)
# Add as a weakdep.
api, args, opts = first(Pkg.pkg"add --weak Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:target => :weakdeps)
# Add as an extra.
api, args, opts = first(Pkg.pkg"add --extra Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:target => :extras)
# Add using direct URL syntax.
api, args, opts = first(Pkg.pkg"add https://github.com/00vareladavid/Unregistered.jl#0.1.0")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;url="https://github.com/00vareladavid/Unregistered.jl", rev="0.1.0")]
+ @test args == [Pkg.PackageSpec(; url = "https://github.com/00vareladavid/Unregistered.jl", rev = "0.1.0")]
@test isempty(opts)
+
+ api, args, opts = first(Pkg.pkg"add a/path/with/@/deal/with/it")
+ @test normpath(args[1].path) == normpath("a/path/with/@/deal/with/it")
+
+ # Test GitHub URLs with tree/commit paths
+ @testset "GitHub tree/commit URLs" begin
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/repo/tree/feature-branch")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/repo"
+ @test args[1].rev == "feature-branch"
+
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/repo/commit/abc123def")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/repo"
+ @test args[1].rev == "abc123def"
+ end
+
+ # Test Git URLs with branch specifiers
+ @testset "Git URLs with branch specifiers" begin
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/repo.git#main")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/repo.git"
+ @test args[1].rev == "main"
+
+ api, args, opts = first(Pkg.pkg"add https://bitbucket.org/user/repo.git#develop")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://bitbucket.org/user/repo.git"
+ @test args[1].rev == "develop"
+
+ api, args, opts = first(Pkg.pkg"add git@github.com:user/repo.git#feature")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "git@github.com:user/repo.git"
+ @test args[1].rev == "feature"
+
+ api, args, opts = first(Pkg.pkg"add ssh://git@server.com/path/repo.git#branch-name")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "ssh://git@server.com/path/repo.git"
+ @test args[1].rev == "branch-name"
+ end
+
+
+ # Test Git URLs with subdir specifiers
+ @testset "Git URLs with subdir specifiers" begin
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/monorepo.git:packages/MyPackage")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/monorepo.git"
+ @test args[1].subdir == "packages/MyPackage"
+
+ api, args, opts = first(Pkg.pkg"add ssh://git@server.com/repo.git:subdir/nested")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "ssh://git@server.com/repo.git"
+ @test args[1].subdir == "subdir/nested"
+ end
+
+ # Test complex URLs (with username in URL + branch/tag/subdir)
+ @testset "Complex Git URLs" begin
+ api, args, opts = first(Pkg.pkg"add https://username@bitbucket.org/org/repo.git#dev")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://username@bitbucket.org/org/repo.git"
+ @test args[1].rev == "dev"
+
+ api, args, opts = first(Pkg.pkg"add https://user:token@gitlab.company.com/group/project.git")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://user:token@gitlab.company.com/group/project.git"
+
+ api, args, opts = first(Pkg.pkg"add https://example.com:8080/git/repo.git:packages/core")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://example.com:8080/git/repo.git"
+ @test args[1].subdir == "packages/core"
+
+ # Test URLs with complex authentication and branch names containing #
+ api, args, opts = first(Pkg.pkg"add https://user:pass123@gitlab.example.com:8443/group/project.git#feature/fix-#42")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://user:pass123@gitlab.example.com:8443/group/project.git"
+ @test args[1].rev == "feature/fix-#42"
+
+ # Test URLs with complex authentication and subdirs
+ api, args, opts = first(Pkg.pkg"add https://api_key:secret@company.git.server.com/team/monorepo.git:libs/julia/pkg")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://api_key:secret@company.git.server.com/team/monorepo.git"
+ @test args[1].subdir == "libs/julia/pkg"
+
+ # Test URLs with authentication, branch with #, and subdir
+ api, args, opts = first(Pkg.pkg"add https://deploy:token123@internal.git.company.com/product/backend.git#hotfix/issue-#789:packages/core")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://deploy:token123@internal.git.company.com/product/backend.git"
+ @test args[1].rev == "hotfix/issue-#789"
+ @test args[1].subdir == "packages/core"
+
+ # Test SSH URLs with port numbers and subdirs
+ api, args, opts = first(Pkg.pkg"add ssh://git@custom.server.com:2222/path/to/repo.git:src/package")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "ssh://git@custom.server.com:2222/path/to/repo.git"
+ @test args[1].subdir == "src/package"
+
+ # Test URL with username in URL and multiple # in branch name
+ api, args, opts = first(Pkg.pkg"add https://ci_user@build.company.net/team/project.git#release/v2.0-#123-#456")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://ci_user@build.company.net/team/project.git"
+ @test args[1].rev == "release/v2.0-#123-#456"
+
+ # Test complex case: auth + port + branch with # + subdir
+ api, args, opts = first(Pkg.pkg"add https://robot:abc123@git.enterprise.com:9443/division/platform.git#bugfix/handle-#special-chars:modules/julia-pkg")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://robot:abc123@git.enterprise.com:9443/division/platform.git"
+ @test args[1].rev == "bugfix/handle-#special-chars"
+ @test args[1].subdir == "modules/julia-pkg"
+
+ # Test local paths with branch specifiers (paths can be repos)
+ api, args, opts = first(Pkg.pkg"add ./local/repo#feature-branch")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test normpath(args[1].path) == normpath("local/repo") # normpath removes "./"
+ @test args[1].rev == "feature-branch"
+
+ # Test local paths with subdir specifiers
+ api, args, opts = first(Pkg.pkg"add ./monorepo:packages/subpkg")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == "monorepo" # normpath removes "./"
+ @test args[1].subdir == "packages/subpkg"
+
+ # Test local paths with both branch and subdir
+ api, args, opts = first(Pkg.pkg"add ./project#develop:src/package")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == "project" # normpath removes "./"
+ @test args[1].rev == "develop"
+ @test args[1].subdir == "src/package"
+
+ # Test local paths with branch containing # characters
+ api, args, opts = first(Pkg.pkg"add ../workspace/repo#bugfix/issue-#123")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test normpath(args[1].path) == normpath("../workspace/repo")
+ @test args[1].rev == "bugfix/issue-#123"
+
+ # Test complex local path case: relative path + branch with # + subdir
+ if !Sys.iswindows()
+ api, args, opts = first(Pkg.pkg"add ~/projects/myrepo#feature/fix-#456:libs/core")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test startswith(args[1].path, "/") # ~ gets expanded to absolute path
+ @test endswith(normpath(args[1].path), normpath("/projects/myrepo"))
+ @test args[1].rev == "feature/fix-#456"
+ @test args[1].subdir == "libs/core"
+ end
+
+ # Test quoted URL with separate revision specifier (regression test)
+ api, args, opts = first(Pkg.pkg"add \"https://username@bitbucket.org/orgname/reponame.git\"#dev")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://username@bitbucket.org/orgname/reponame.git"
+ @test args[1].rev == "dev"
+
+ # Test quoted URL with separate version specifier
+ api, args, opts = first(Pkg.pkg"add \"https://company.git.server.com/project.git\"@v2.1.0")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://company.git.server.com/project.git"
+ @test args[1].version == "v2.1.0"
+
+ # Test quoted URL with separate subdir specifier
+ api, args, opts = first(Pkg.pkg"add \"https://gitlab.example.com/monorepo.git\":packages/core")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://gitlab.example.com/monorepo.git"
+ @test args[1].subdir == "packages/core"
+ end
+
+ # Test that regular URLs without .git still work
+ @testset "Non-.git URLs (unchanged behavior)" begin
+ api, args, opts = first(Pkg.pkg"add https://github.com/user/repo")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].url == "https://github.com/user/repo"
+ @test args[1].rev === nothing
+ @test args[1].subdir === nothing
+ end
+
+ @testset "Windows path handling" begin
+ # Test that Windows drive letters are not treated as subdir separators
+ api, args, opts = first(Pkg.pkg"add C:\\Users\\test\\project")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == normpath("C:\\\\Users\\\\test\\\\project")
+ @test args[1].subdir === nothing
+
+ # Test with forward slashes too
+ api, args, opts = first(Pkg.pkg"add C:/Users/test/project")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == normpath("C:/Users/test/project")
+ @test args[1].subdir === nothing
+
+ # Test that actual subdir syntax still works with Windows paths
+ api, args, opts = first(Pkg.pkg"add C:\\Users\\test\\project:subdir")
+ @test api == Pkg.add
+ @test length(args) == 1
+ @test args[1].path == normpath("C:\\\\Users\\\\test\\\\project")
+ @test args[1].subdir == "subdir"
+ end
+
# Add using preserve option
api, args, opts = first(Pkg.pkg"add --preserve=none Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_NONE)
api, args, opts = first(Pkg.pkg"add --preserve=semver Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_SEMVER)
api, args, opts = first(Pkg.pkg"add --preserve=tiered Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_TIERED)
api, args, opts = first(Pkg.pkg"add --preserve=all Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_ALL)
api, args, opts = first(Pkg.pkg"add --preserve=direct Example")
@test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_DIRECT)
end
# check casesensitive resolution of paths
- isolate() do; cd_tempdir() do dir
- Pkg.REPLMode.TEST_MODE[] = true
- mkdir("example")
- api, args, opts = first(Pkg.pkg"add Example")
- @test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="Example")]
- @test isempty(opts)
- api, args, opts = first(Pkg.pkg"add example")
- @test api == Pkg.add
- @test args == [Pkg.PackageSpec(;name="example")]
- @test isempty(opts)
- @test_throws PkgError Pkg.pkg"add ./Example"
- api, args, opts = first(Pkg.pkg"add ./example")
- @test api == Pkg.add
- @test args == [Pkg.PackageSpec(;path="example")]
- @test isempty(opts)
- cd("example")
- api, args, opts = first(Pkg.pkg"add .")
- @test api == Pkg.add
- @test args == [Pkg.PackageSpec(;path=".")]
- @test isempty(opts)
- end end
- isolate() do; cd_tempdir() do dir
- # adding a nonexistent directory
- @test_throws PkgError("`some/really/random/Dir` appears to be a local path, but directory does not exist"
- ) Pkg.pkg"add some/really/random/Dir"
- # warn if not explicit about adding directory
- mkdir("Example")
- @test_logs (:info, r"Use `./Example` to add or develop the local directory at `.*`.") match_mode=:any Pkg.pkg"add Example"
- end end
+ isolate() do;
+ cd_tempdir() do dir
+ Pkg.REPLMode.TEST_MODE[] = true
+ mkdir("example")
+ api, args, opts = first(Pkg.pkg"add Example")
+ @test api == Pkg.add
+ @test args == [Pkg.PackageSpec(; name = "Example")]
+ @test isempty(opts)
+ api, args, opts = first(Pkg.pkg"add example")
+ @test api == Pkg.add
+ @test args == [Pkg.PackageSpec(; name = "example")]
+ @test isempty(opts)
+ api, args, opts = first(Pkg.pkg"add ./example")
+ @test api == Pkg.add
+ @test args == [Pkg.PackageSpec(; path = "example")]
+ @test isempty(opts)
+ cd("example")
+ api, args, opts = first(Pkg.pkg"add .")
+ @test api == Pkg.add
+ @test args == [Pkg.PackageSpec(; path = ".")]
+ @test isempty(opts)
+ end
+ end
+ isolate() do;
+ cd_tempdir() do dir
+ # adding a nonexistent directory
+ @test_throws PkgError(
+ "Path `$(normpath("some/really/random/Dir"))` does not exist."
+ ) Pkg.pkg"add some/really/random/Dir"
+ # warn if not explicit about adding directory
+ mkdir("Example")
+ @test_logs (:info, r"Use `./Example` to add or develop the local directory at `.*`.") match_mode = :any Pkg.pkg"add Example"
+ end
+ end
end
#
@@ -1094,27 +1494,31 @@ end
# ## Input Checking
#
@testset "develop: input checking" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
# Julia is not a valid package name.
- @test_throws PkgError("`julia` is not a valid package name") Pkg.develop(name="julia")
+ @test_throws PkgError("`julia` is not a valid package name") Pkg.develop(name = "julia")
# Package names must be valid Julia identifiers.
- @test_throws PkgError("`***` is not a valid package name") Pkg.develop(name="***")
- @test_throws PkgError("`Foo Bar` is not a valid package name") Pkg.develop(name="Foo Bar")
+ @test_throws PkgError("`***` is not a valid package name") Pkg.develop(name = "***")
+ @test_throws PkgError("`Foo Bar` is not a valid package name") Pkg.develop(name = "Foo Bar")
# Names which are invalid and are probably URLs or paths.
- @test_throws PkgError("""
- `https://github.com` is not a valid package name
- The argument appears to be a URL or path, perhaps you meant `Pkg.develop(url="...")` or `Pkg.develop(path="...")`.""") Pkg.develop("https://github.com")
- @test_throws PkgError("""
- `./Foobar` is not a valid package name
- The argument appears to be a URL or path, perhaps you meant `Pkg.develop(url="...")` or `Pkg.develop(path="...")`.""") Pkg.develop("./Foobar")
+ @test_throws PkgError(
+ """
+ `https://github.com` is not a valid package name
+ The argument appears to be a URL or path, perhaps you meant `Pkg.develop(url="...")` or `Pkg.develop(path="...")`."""
+ ) Pkg.develop("https://github.com")
+ @test_throws PkgError(
+ """
+ `./Foobar` is not a valid package name
+ The argument appears to be a URL or path, perhaps you meant `Pkg.develop(url="...")` or `Pkg.develop(path="...")`."""
+ ) Pkg.develop("./Foobar")
# An empty spec is invalid.
@test_throws PkgError(
"name, UUID, URL, or filesystem path specification required when calling `develop`"
- ) Pkg.develop(Pkg.PackageSpec())
+ ) Pkg.develop(Pkg.PackageSpec())
# git revisions imply that `develop` tracks a git repo.
@test_throws PkgError(
"rev argument not supported by `develop`; consider using `add` instead"
- ) Pkg.develop(name="Example", rev="master")
+ ) Pkg.develop(name = "Example", rev = "master")
# Adding an unregistered package by name.
@test_throws PkgError Pkg.develop("ThisIsHopefullyRandom012856014925701382")
# Wrong UUID
@@ -1124,7 +1528,7 @@ end
# Two packages with the same name
@test_throws PkgError(
"it is invalid to specify multiple packages with the same UUID: `Example [7876af07]`"
- ) Pkg.develop([(;name="Example"), (;uuid=exuuid)])
+ ) Pkg.develop([(; name = "Example"), (; uuid = exuuid)])
end
end
@@ -1133,7 +1537,7 @@ end
#
@testset "develop: changes to the active project" begin
# It is possible to `develop` by specifying a registered name.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.develop("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1143,8 +1547,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# Develop with shared=false
- isolate(loaded_depot=true) do
- Pkg.develop("Example"; shared=false)
+ isolate(loaded_depot = true) do
+ Pkg.develop("Example"; shared = false)
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test Base.samefile(pkg.source, joinpath(dirname(Pkg.project().path), "dev", "Example"))
@@ -1153,8 +1557,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# It is possible to develop by specifying a registered UUID.
- isolate(loaded_depot=true) do
- Pkg.develop(uuid=exuuid)
+ isolate(loaded_depot = true) do
+ Pkg.develop(uuid = exuuid)
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test Base.samefile(pkg.source, joinpath(DEPOT_PATH[1], "dev", "Example"))
@@ -1163,8 +1567,8 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# It is possible to develop by specifying a URL.
- isolate(loaded_depot=true) do
- Pkg.develop(url="https://github.com/JuliaLang/Example.jl")
+ isolate(loaded_depot = true) do
+ Pkg.develop(url = "https://github.com/JuliaLang/Example.jl")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test Base.samefile(pkg.source, joinpath(DEPOT_PATH[1], "dev", "Example"))
@@ -1173,22 +1577,24 @@ end
@test haskey(Pkg.project().dependencies, "Example")
end
# It is possible to develop by directly specifying a path.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "SimplePackage")
- path = joinpath(tempdir, "SimplePackage")
- Pkg.develop(path=path)
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test realpath(pkg.source) == realpath(path)
- @test !pkg.is_tracking_registry
- @test haskey(pkg.dependencies, "Example")
- @test haskey(pkg.dependencies, "Markdown")
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "SimplePackage")
+ path = joinpath(tempdir, "SimplePackage")
+ Pkg.develop(path = path)
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test realpath(pkg.source) == realpath(path)
+ @test !pkg.is_tracking_registry
+ @test haskey(pkg.dependencies, "Example")
+ @test haskey(pkg.dependencies, "Markdown")
+ end
+ @test haskey(Pkg.project().dependencies, "SimplePackage")
end
- @test haskey(Pkg.project().dependencies, "SimplePackage")
- end end
+ end
# recursive `dev`
- isolate(loaded_depot=true) do
- Pkg.develop(path=joinpath(@__DIR__, "test_packages", "A"))
+ isolate(loaded_depot = true) do
+ Pkg.develop(path = joinpath(@__DIR__, "test_packages", "A"))
Pkg.dependencies(UUID("0829fd7c-1e7e-4927-9afa-b8c61d5e0e42")) do pkg # dep A
@test haskey(pkg.dependencies, "B")
@test haskey(pkg.dependencies, "C")
@@ -1206,53 +1612,59 @@ end
end
end
# primary depot is a relative path
- isolate() do; cd_tempdir() do dir
- empty!(DEPOT_PATH)
- push!(DEPOT_PATH, "temp")
- Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.develop("JSON")
- Pkg.dependencies(json_uuid) do pkg
- @test Base.samefile(pkg.source, abspath(joinpath("temp", "dev", "JSON")))
+ isolate() do;
+ cd_tempdir() do dir
+ empty!(DEPOT_PATH)
+ push!(DEPOT_PATH, "temp")
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ Pkg.develop("JSON")
+ Pkg.dependencies(json_uuid) do pkg
+ @test Base.samefile(pkg.source, abspath(joinpath("temp", "dev", "JSON")))
+ end
end
- end end
+ end
end
@testset "develop: interaction with `JULIA_PKG_DEVDIR`" begin
# A shared `develop` should obey `JULIA_PKG_DEVDIR`.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- withenv("JULIA_PKG_DEVDIR" => tempdir) do
- Pkg.develop("Example")
- end
- Pkg.dependencies(exuuid) do pkg
- @test pkg.name == "Example"
- @test Base.samefile(pkg.source, joinpath(tempdir, "Example"))
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ withenv("JULIA_PKG_DEVDIR" => tempdir) do
+ Pkg.develop("Example")
+ end
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.name == "Example"
+ @test Base.samefile(pkg.source, joinpath(tempdir, "Example"))
+ end
+ @test haskey(Pkg.project().dependencies, "Example")
end
- @test haskey(Pkg.project().dependencies, "Example")
- end end
+ end
# A local `develop` should not be affected by `JULIA_PKG_DEVDIR`
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- withenv("JULIA_PKG_DEVDIR" => tempdir) do
- Pkg.develop("Example"; shared=false)
- end
- Pkg.dependencies(exuuid) do pkg
- @test pkg.name == "Example"
- @test Base.samefile(pkg.source, joinpath(dirname(Pkg.project().path), "dev", "Example"))
- @test !pkg.is_tracking_registry
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ withenv("JULIA_PKG_DEVDIR" => tempdir) do
+ Pkg.develop("Example"; shared = false)
+ end
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.name == "Example"
+ @test Base.samefile(pkg.source, joinpath(dirname(Pkg.project().path), "dev", "Example"))
+ @test !pkg.is_tracking_registry
+ end
+ @test haskey(Pkg.project().dependencies, "Example")
end
- @test haskey(Pkg.project().dependencies, "Example")
- end end
+ end
end
@testset "develop: path handling" begin
# Relative paths
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
project_path = dirname(Pkg.project().path)
mkpath(project_path)
copy_test_package(project_path, "SimplePackage")
package_path = joinpath(project_path, "SimplePackage")
# Now we `develop` using a relative path.
cd(project_path) do
- Pkg.develop(Pkg.PackageSpec(path="SimplePackage"))
+ Pkg.develop(Pkg.PackageSpec(path = "SimplePackage"))
end
# Check that everything went ok.
original_source = nothing
@@ -1264,7 +1676,7 @@ end
end
# Now we move the project, but preserve the relative structure.
mktempdir() do tempdir
- cp(project_path, tempdir; force=true)
+ cp(project_path, tempdir; force = true)
Pkg.activate(tempdir)
# We check that we can still find the source.
Pkg.dependencies(simple_package_uuid) do pkg
@@ -1274,70 +1686,78 @@ end
end
end
# Absolute paths
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "SimplePackage")
- package_path = joinpath(tempdir, "SimplePackage")
- Pkg.activate(tempdir)
- Pkg.develop(path=package_path)
- original_source = nothing
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test isdir(pkg.source)
- @test realpath(pkg.source) == realpath(package_path)
- original_source = pkg.source
- end
- mktempdir() do tempdir2
- cp(joinpath(tempdir, "Project.toml"), joinpath(tempdir2, "Project.toml"))
- cp(joinpath(tempdir, "Manifest.toml"), joinpath(tempdir2, "Manifest.toml"))
- Pkg.activate(tempdir2)
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "SimplePackage")
+ package_path = joinpath(tempdir, "SimplePackage")
+ Pkg.activate(tempdir)
+ Pkg.develop(path = package_path)
+ original_source = nothing
Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
@test isdir(pkg.source)
- @test Base.samefile(pkg.source, original_source)
+ @test realpath(pkg.source) == realpath(package_path)
+ original_source = pkg.source
+ end
+ mktempdir() do tempdir2
+ cp(joinpath(tempdir, "Project.toml"), joinpath(tempdir2, "Project.toml"))
+ cp(joinpath(tempdir, "Manifest.toml"), joinpath(tempdir2, "Manifest.toml"))
+ Pkg.activate(tempdir2)
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test isdir(pkg.source)
+ @test Base.samefile(pkg.source, original_source)
+ end
end
end
- end end
+ end
# ### Special casing on path handling
# "." style path
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- cd(path) do
- Pkg.pkg"develop ."
- end
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test isdir(pkg.source)
- @test pkg.is_tracking_path
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ cd(path) do
+ Pkg.pkg"develop ."
+ end
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test isdir(pkg.source)
+ @test pkg.is_tracking_path
+ end
end
- end end
+ end
# ".." style path
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- cd(joinpath(path, "src")) do
- Pkg.pkg"develop .."
- end
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test isdir(pkg.source)
- @test pkg.is_tracking_path
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ cd(joinpath(path, "src")) do
+ Pkg.pkg"develop .."
+ end
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test isdir(pkg.source)
+ @test pkg.is_tracking_path
+ end
end
- end end
+ end
# Local directory name. This must be prepended by "./".
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- cd(dirname(path)) do
- Pkg.pkg"develop ./SimplePackage"
- end
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test isdir(pkg.source)
- @test pkg.is_tracking_path
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ cd(dirname(path)) do
+ Pkg.pkg"develop ./SimplePackage"
+ end
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test isdir(pkg.source)
+ @test pkg.is_tracking_path
+ end
end
- end end
+ end
end
@testset "develop: package state changes" begin
# Developing an existing package which is tracking the registry should just override.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
Pkg.develop("Example")
Pkg.dependencies(exuuid) do pkg
@@ -1349,8 +1769,8 @@ end
@test length(Pkg.project().dependencies) == 1
end
# Developing an existing package which is tracking a repo should just override.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="master")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
Pkg.develop("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1361,9 +1781,9 @@ end
@test length(Pkg.project().dependencies) == 1
end
# Develop with different target path should override old path with target path.
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.develop("Example")
- Pkg.develop("Example"; shared=false)
+ Pkg.develop("Example"; shared = false)
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test Base.samefile(pkg.source, joinpath(dirname(Pkg.project().path), "dev", "Example"))
@@ -1373,9 +1793,9 @@ end
@test length(Pkg.project().dependencies) == 1
end
# develop tries to resolve from the manifest
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
remote_url = "https://github.com/00vareladavid/Unregistered.jl"
- Pkg.add(Pkg.PackageSpec(url=remote_url))
+ Pkg.add(Pkg.PackageSpec(url = remote_url))
Pkg.develop("Unregistered")
Pkg.dependencies(unregistered_uuid) do pkg
@test pkg.name == "Unregistered"
@@ -1392,37 +1812,37 @@ end
# registered name
api, args, opts = first(Pkg.pkg"develop Example")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test isempty(opts)
# registered uuid
api, args, opts = first(Pkg.pkg"develop 7876af07-990d-54b4-ab0e-23690620f79a")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;uuid=UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
+ @test args == [Pkg.PackageSpec(; uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
@test isempty(opts)
# name=uuid
api, args, opts = first(Pkg.pkg"develop Example=7876af07-990d-54b4-ab0e-23690620f79a")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example", uuid=UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
+ @test args == [Pkg.PackageSpec(; name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))]
@test isempty(opts)
# local flag
api, args, opts = first(Pkg.pkg"develop --local Example")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:shared => false)
# shared flag
api, args, opts = first(Pkg.pkg"develop --shared Example")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:shared => true)
# URL
api, args, opts = first(Pkg.pkg"develop https://github.com/JuliaLang/Example.jl")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;url="https://github.com/JuliaLang/Example.jl")]
+ @test args == [Pkg.PackageSpec(; url = "https://github.com/JuliaLang/Example.jl")]
@test isempty(opts)
# develop using preserve option
api, args, opts = first(Pkg.pkg"dev --preserve=none Example")
@test api == Pkg.develop
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:preserve => Pkg.PRESERVE_NONE)
end
end
@@ -1432,17 +1852,19 @@ end
#
@testset "instantiate: input checking" begin
# Unregistered UUID in manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "UnregisteredUUID")
- Pkg.activate(package_path)
- @test_throws PkgError("expected package `Example [142fd7e7]` to be registered") Pkg.update()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "UnregisteredUUID")
+ Pkg.activate(package_path)
+ @test_throws PkgError Pkg.update()
+ end
+ end
end
@testset "instantiate: changes to the active project" begin
# Instantiate should preserve tree hash for regularly versioned packages.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
th = nothing
Pkg.dependencies(exuuid) do pkg
th = pkg.tree_hash
@@ -1450,8 +1872,8 @@ end
@test pkg.version == v"0.3.0"
@test isdir(pkg.source)
end
- rm(joinpath(DEPOT_PATH[1], "packages"); force=true, recursive=true)
- rm(joinpath(DEPOT_PATH[1], "clones"); force=true, recursive=true)
+ rm(joinpath(DEPOT_PATH[1], "packages"); force = true, recursive = true)
+ rm(joinpath(DEPOT_PATH[1], "clones"); force = true, recursive = true)
Pkg.instantiate()
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1461,16 +1883,16 @@ end
end
end
# `instantiate` should preserve tree hash for packages tracking repos.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="v0.5.3")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "v0.5.3")
th = nothing
Pkg.dependencies(exuuid) do pkg
th = pkg.tree_hash
@test pkg.name == "Example"
@test isdir(pkg.source)
end
- rm(joinpath(DEPOT_PATH[1], "packages"); force=true, recursive=true)
- rm(joinpath(DEPOT_PATH[1], "clones"); force=true, recursive=true)
+ rm(joinpath(DEPOT_PATH[1], "packages"); force = true, recursive = true)
+ rm(joinpath(DEPOT_PATH[1], "clones"); force = true, recursive = true)
Pkg.instantiate()
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1479,21 +1901,25 @@ end
end
# `instantiate` should check for a consistent dependency graph.
# Otherwise it is not clear what to instantiate.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "ExtraDirectDep")
- Pkg.activate(joinpath(tempdir, "ExtraDirectDep"))
- @test_throws PkgError Pkg.instantiate()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "ExtraDirectDep")
+ Pkg.activate(joinpath(tempdir, "ExtraDirectDep"))
+ @test_throws PkgError Pkg.instantiate()
+ end
+ end
# However, if `manifest=false`, we know to instantiate from the direct dependencies.
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "ExtraDirectDep")
- Pkg.activate(joinpath(tempdir, "ExtraDirectDep"))
- Pkg.instantiate(;manifest=false)
- @test haskey(Pkg.project().dependencies, "Example")
- @test haskey(Pkg.project().dependencies, "Unicode")
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "ExtraDirectDep")
+ Pkg.activate(joinpath(tempdir, "ExtraDirectDep"))
+ Pkg.instantiate(; manifest = false)
+ @test haskey(Pkg.project().dependencies, "Example")
+ @test haskey(Pkg.project().dependencies, "Unicode")
+ end
+ end
# `instantiate` lonely manifest
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
manifest_dir = joinpath(@__DIR__, "manifest", "noproject")
cd(manifest_dir) do
try
@@ -1503,12 +1929,12 @@ end
@test isinstalled("Example")
@test isinstalled("x1")
finally
- rm("Project.toml"; force=true)
+ rm("Project.toml"; force = true)
end
end
end
# instantiate old manifest
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
manifest_dir = joinpath(@__DIR__, "manifest", "old")
cd(manifest_dir) do
Pkg.activate(".")
@@ -1517,24 +1943,26 @@ end
end
end
# `instantiate` on a lonely manifest should detect duplicate names
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- simple_package_path = copy_test_package(tempdir, "SimplePackage")
- unregistered_example_path = copy_test_package(tempdir, "Example")
- Pkg.develop(path=simple_package_path)
- Pkg.develop(path=unregistered_example_path)
- rm(Pkg.project().path)
- @test_throws PkgError Pkg.instantiate()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ simple_package_path = copy_test_package(tempdir, "SimplePackage")
+ unregistered_example_path = copy_test_package(tempdir, "Example")
+ Pkg.develop(path = simple_package_path)
+ Pkg.develop(path = unregistered_example_path)
+ rm(Pkg.project().path)
+ @test_throws PkgError Pkg.instantiate()
+ end
+ end
# verbose smoke test
- isolate(loaded_depot=true) do
- Pkg.instantiate(;verbose=true)
+ isolate(loaded_depot = true) do
+ Pkg.instantiate(; verbose = true)
end
end
@testset "instantiate: caching" begin
# Instantiate should not override existing source.
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
th, t1 = nothing, nothing
Pkg.dependencies(exuuid) do pkg
th = pkg.tree_hash
@@ -1585,7 +2013,7 @@ end
@testset "why" begin
isolate() do
- Pkg.add(name = "StaticArrays", version = "1.5.0")
+ Pkg.add(name = "StaticArrays", version = "1.5.20")
io = IOBuffer()
Pkg.why("StaticArrays"; io)
@@ -1594,14 +2022,14 @@ end
Pkg.why("StaticArraysCore"; io)
str = String(take!(io))
- @test str == " StaticArrays → StaticArraysCore\n"
+ @test str == " StaticArrays → StaticArraysCore\n"
Pkg.why("LinearAlgebra"; io)
str = String(take!(io))
@test str ==
- """ StaticArrays → LinearAlgebra
- StaticArrays → Statistics → LinearAlgebra
- """
+ """ StaticArrays → LinearAlgebra
+ StaticArrays → Statistics → LinearAlgebra
+ """
end
end
@@ -1610,21 +2038,23 @@ end
#
@testset "update: input checking" begin
# Unregistered UUID in manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "UnregisteredUUID")
- Pkg.activate(package_path)
- @test_throws PkgError("expected package `Example [142fd7e7]` to be registered") Pkg.update()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "UnregisteredUUID")
+ Pkg.activate(package_path)
+ @test_throws PkgError Pkg.update()
+ end
+ end
# package does not exist in the manifest
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test_throws PkgError Pkg.update("Example")
end
end
@testset "update: changes to the active project" begin
# Basic testing of UPLEVEL
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
Pkg.update(; level = Pkg.UPLEVEL_FIXED)
@test Pkg.dependencies()[exuuid].version == v"0.3.0"
@@ -1634,34 +2064,38 @@ end
@test Pkg.dependencies()[exuuid].version.minor != 3
end
# `update` should prune manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "Unpruned")
- Pkg.activate(joinpath(tempdir, "Unpruned"))
- Pkg.update()
- @test haskey(Pkg.project().dependencies, "Example")
- Pkg.dependencies(exuuid) do pkg
- @test pkg.version > v"0.4.0"
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "Unpruned")
+ Pkg.activate(joinpath(tempdir, "Unpruned"))
+ Pkg.update()
+ @test haskey(Pkg.project().dependencies, "Example")
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.version > v"0.4.0"
+ end
+ @test !haskey(Pkg.dependencies(), unicode_uuid)
end
- @test !haskey(Pkg.dependencies(), unicode_uuid)
- end end
+ end
# `up` should work without a manifest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "SimplePackage")
- Pkg.activate(joinpath(tempdir, "SimplePackage"))
- Pkg.update()
- @test haskey(Pkg.project().dependencies, "Example")
- @test haskey(Pkg.project().dependencies, "Markdown")
- Pkg.dependencies(exuuid) do pkg
- @test pkg.name == "Example"
- @test pkg.is_tracking_registry
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "SimplePackage")
+ Pkg.activate(joinpath(tempdir, "SimplePackage"))
+ Pkg.update()
+ @test haskey(Pkg.project().dependencies, "Example")
+ @test haskey(Pkg.project().dependencies, "Markdown")
+ Pkg.dependencies(exuuid) do pkg
+ @test pkg.name == "Example"
+ @test pkg.is_tracking_registry
+ end
end
- end end
+ end
end
@testset "update: package state changes" begin
# basic update on old registered package
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
Pkg.update()
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1669,8 +2103,8 @@ end
end
end
# `update` should not update `pin`ed packages
- isolate(loaded_depot=true) do
- Pkg.add(name="Example",version="0.3.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.0")
Pkg.pin("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1685,7 +2119,7 @@ end
end
end
# stdlib special casing
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Markdown")
Pkg.update()
Pkg.dependencies(markdown_uuid) do pkg
@@ -1693,62 +2127,66 @@ end
end
end
# up should not affect `dev` packages
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- Pkg.develop(path=path)
- state = Pkg.dependencies()[simple_package_uuid]
- Pkg.update()
- @test Pkg.dependencies()[simple_package_uuid] == state
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ Pkg.develop(path = path)
+ state = Pkg.dependencies()[simple_package_uuid]
+ Pkg.update()
+ @test Pkg.dependencies()[simple_package_uuid] == state
+ end
+ end
# up and packages tracking repos
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
- Pkg.add(path=path)
- # test everything went ok
- Pkg.dependencies(simple_package_uuid) do pkg
- @test pkg.name == "SimplePackage"
- @test pkg.version == v"0.2.0"
- @test haskey(pkg.dependencies, "Example")
- @test haskey(pkg.dependencies, "Markdown")
- @test !haskey(pkg.dependencies, "Unicode")
- end
- simple_package_node = Pkg.dependencies()[simple_package_uuid]
- # now we bump the remote version
- mv(joinpath(path, "Project2.toml"), joinpath(path, "Project.toml"); force=true)
- new_commit = nothing
- LibGit2.with(LibGit2.GitRepo(path)) do repo
- LibGit2.add!(repo, "*")
- new_commit = string(LibGit2.commit(repo, "bump version"; author=TEST_SIG, committer=TEST_SIG))
- end
- # update with UPLEVEL != UPLEVEL_MAJOR should not update packages tracking repos
- Pkg.update(; level=Pkg.UPLEVEL_MINOR)
- @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
- Pkg.update(; level=Pkg.UPLEVEL_PATCH)
- @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
- Pkg.update(; level=Pkg.UPLEVEL_FIXED)
- @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
- # Update should not modify pinned packages which are tracking repos
- Pkg.pin("SimplePackage")
- Pkg.update()
- Pkg.free("SimplePackage")
- @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
- # update should update packages tracking repos if UPLEVEL_MAJOR
- Pkg.update()
- if !Sys.iswindows() # this test is very flaky on Windows, why?
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
+ Pkg.add(path = path)
+ # test everything went ok
Pkg.dependencies(simple_package_uuid) do pkg
@test pkg.name == "SimplePackage"
- @test pkg.version == v"0.3.0"
- @test !haskey(pkg.dependencies, "Example")
+ @test pkg.version == v"0.2.0"
+ @test haskey(pkg.dependencies, "Example")
@test haskey(pkg.dependencies, "Markdown")
- @test haskey(pkg.dependencies, "Unicode")
+ @test !haskey(pkg.dependencies, "Unicode")
+ end
+ simple_package_node = Pkg.dependencies()[simple_package_uuid]
+ # now we bump the remote version
+ mv(joinpath(path, "Project2.toml"), joinpath(path, "Project.toml"); force = true)
+ new_commit = nothing
+ LibGit2.with(LibGit2.GitRepo(path)) do repo
+ LibGit2.add!(repo, "*")
+ new_commit = string(LibGit2.commit(repo, "bump version"; author = TEST_SIG, committer = TEST_SIG))
+ end
+ # update with UPLEVEL != UPLEVEL_MAJOR should not update packages tracking repos
+ Pkg.update(; level = Pkg.UPLEVEL_MINOR)
+ @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
+ Pkg.update(; level = Pkg.UPLEVEL_PATCH)
+ @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
+ Pkg.update(; level = Pkg.UPLEVEL_FIXED)
+ @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
+ # Update should not modify pinned packages which are tracking repos
+ Pkg.pin("SimplePackage")
+ Pkg.update()
+ Pkg.free("SimplePackage")
+ @test simple_package_node == Pkg.dependencies()[simple_package_uuid]
+ # update should update packages tracking repos if UPLEVEL_MAJOR
+ Pkg.update()
+ if !Sys.iswindows() # this test is very flaky on Windows, why?
+ Pkg.dependencies(simple_package_uuid) do pkg
+ @test pkg.name == "SimplePackage"
+ @test pkg.version == v"0.3.0"
+ @test !haskey(pkg.dependencies, "Example")
+ @test haskey(pkg.dependencies, "Markdown")
+ @test haskey(pkg.dependencies, "Unicode")
+ end
end
end
- end end
+ end
# make sure that we preserve the state of packages which are not the target
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
Pkg.develop("Example")
- Pkg.add(name="JSON", version="0.18.0")
+ Pkg.add(name = "JSON", version = "0.18.0")
Pkg.add("Markdown")
Pkg.add("Unicode")
Pkg.update("Unicode")
@@ -1768,8 +2206,8 @@ end
@test haskey(Pkg.project().dependencies, "Markdown")
@test haskey(Pkg.project().dependencies, "Unicode")
end
- isolate(loaded_depot=true) do
- Pkg.add([(;name="Example", version="0.3.0"), (;name="JSON", version="0.21.0"), (;name="Parsers", version="1.1.2")])
+ isolate(loaded_depot = true) do
+ Pkg.add([(; name = "Example", version = "0.3.0"), (; name = "JSON", version = "0.21.0"), (; name = "Parsers", version = "1.1.2")])
Pkg.update("JSON")
Pkg.dependencies(json_uuid) do pkg
@test pkg.version > v"0.21.0"
@@ -1781,8 +2219,8 @@ end
@test pkg.version == v"1.1.2"
end
- Pkg.add(name="JSON", version="0.21.0")
- Pkg.update("JSON"; preserve=Pkg.PRESERVE_DIRECT)
+ Pkg.add(name = "JSON", version = "0.21.0")
+ Pkg.update("JSON"; preserve = Pkg.PRESERVE_DIRECT)
Pkg.dependencies(json_uuid) do pkg
@test pkg.version > v"0.21.0"
end
@@ -1793,10 +2231,10 @@ end
@test pkg.version == v"1.1.2"
end
- Pkg.add(name="JSON", version="0.21.0")
+ Pkg.add(name = "JSON", version = "0.21.0")
Pkg.rm("Parsers")
- Pkg.update("JSON"; preserve=Pkg.PRESERVE_DIRECT)
+ Pkg.update("JSON"; preserve = Pkg.PRESERVE_DIRECT)
Pkg.dependencies(json_uuid) do pkg
@test pkg.version > v"0.21.0"
end
@@ -1807,8 +2245,8 @@ end
@test pkg.version > v"1.1.2"
end
- Pkg.add([(;name="Example", version="0.3.0"), (;name="JSON", version="0.21.0"), (;name="Parsers", version="1.1.2")])
- Pkg.update("JSON"; preserve=Pkg.PRESERVE_NONE)
+ Pkg.add([(; name = "Example", version = "0.3.0"), (; name = "JSON", version = "0.21.0"), (; name = "Parsers", version = "1.1.2")])
+ Pkg.update("JSON"; preserve = Pkg.PRESERVE_NONE)
Pkg.dependencies(json_uuid) do pkg
@test pkg.version > v"0.21.0"
end
@@ -1822,6 +2260,8 @@ end
Pkg.dependencies(exuuid) do pkg
@test pkg.version > v"0.3.0"
end
+
+ @test_throws PkgError("`repo` is a private field of PackageSpec and should not be set directly") Pkg.add([Pkg.PackageSpec(; repo = Pkg.Types.GitRepo(source = "someurl"))])
end
end
@@ -1836,12 +2276,14 @@ end
@testset "update: caching" begin
# `up` should detect broken local packages
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
- Pkg.add(path=path)
- rm(joinpath(path, ".git"); force=true, recursive=true)
- @test_throws PkgError Pkg.update()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = git_init_package(tempdir, joinpath(@__DIR__, "test_packages", "SimplePackage"))
+ Pkg.add(path = path)
+ rm(joinpath(path, ".git"); force = true, recursive = true)
+ @test_throws PkgError Pkg.update()
+ end
+ end
end
#
@@ -1849,26 +2291,27 @@ end
#
@testset "pin: input checking" begin
# a package must exist in the dep graph in order to be pinned
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test_throws PkgError Pkg.pin("Example")
end
# pinning to an arbitrary version should check for unregistered packages
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
- @test_throws PkgError("unable to pin unregistered package `Unregistered [dcb67f36]` to an arbitrary version"
- ) Pkg.pin(name="Unregistered", version="0.1.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
+ @test_throws PkgError(
+ "unable to pin unregistered package `Unregistered [dcb67f36]` to an arbitrary version"
+ ) Pkg.pin(name = "Unregistered", version = "0.1.0")
end
# pinning to an arbitrary version should check version exists
- isolate(loaded_depot=true) do
- Pkg.add(name="Example",rev="master")
- @test_throws ResolverError Pkg.pin(name="Example",version="100.0.0")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
+ @test_throws ResolverError Pkg.pin(name = "Example", version = "100.0.0")
end
end
@testset "pin: package state changes" begin
# regular registered package
- isolate(loaded_depot=true) do
- Pkg.add( name="Example", version="0.3.3")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.3")
Pkg.pin("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1876,8 +2319,8 @@ end
end
end
# package tracking repo
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
Pkg.pin("Unregistered")
Pkg.dependencies(unregistered_uuid) do pkg
@test !pkg.is_tracking_registry
@@ -1885,18 +2328,18 @@ end
end
end
# versioned pin
- isolate(loaded_depot=true) do
- Pkg.add( name="Example", version="0.3.3")
- Pkg.pin( name="Example", version="0.5.1")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", version = "0.3.3")
+ Pkg.pin(name = "Example", version = "0.5.1")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@test pkg.is_pinned
end
end
# pin should check for a valid version number
- isolate(loaded_depot=true) do
- Pkg.add(name="Example", rev="master")
- @test_throws ResolverError Pkg.pin(name="Example",version="100.0.0") # TODO maybe make a PkgError
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
+ @test_throws ResolverError Pkg.pin(name = "Example", version = "100.0.0") # TODO maybe make a PkgError
end
end
@@ -1905,21 +2348,24 @@ end
#
@testset "free: input checking" begin
# free checks for existing package
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test_throws PkgError Pkg.free("Example")
end
# free checks for unpinned package
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Unicode")
- @test_throws PkgError(string("expected package `Unicode [4ec0a83e]` to be",
- " pinned, tracking a path, or tracking a repository"
- )) Pkg.free("Unicode")
+ @test_throws PkgError(
+ string(
+ "expected package `Unicode [4ec0a83e]` to be",
+ " pinned, tracking a path, or tracking a repository"
+ )
+ ) Pkg.free("Unicode")
end
end
@testset "free: package state changes" begin
# free pinned package
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
Pkg.pin("Example")
Pkg.free("Example")
@@ -1929,8 +2375,8 @@ end
end
end
# free package tracking repo
- isolate(loaded_depot=true) do
- Pkg.add( name="Example", rev="master")
+ isolate(loaded_depot = true) do
+ Pkg.add(name = "Example", rev = "master")
Pkg.free("Example")
Pkg.dependencies(exuuid) do pkg
@test pkg.name == "Example"
@@ -1938,7 +2384,7 @@ end
end
end
# free developed package
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.develop("Example")
Pkg.free("Example")
Pkg.dependencies(exuuid) do pkg
@@ -1947,12 +2393,12 @@ end
end
end
# free should error when called on packages tracking unregistered packages
- isolate(loaded_depot=true) do
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ isolate(loaded_depot = true) do
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
@test_throws PkgError("unable to free unregistered package `Unregistered [dcb67f36]`") Pkg.free("Unregistered")
end
- isolate(loaded_depot=true) do
- Pkg.develop(url="https://github.com/00vareladavid/Unregistered.jl")
+ isolate(loaded_depot = true) do
+ Pkg.develop(url = "https://github.com/00vareladavid/Unregistered.jl")
@test_throws PkgError("unable to free unregistered package `Unregistered [dcb67f36]`") Pkg.free("Unregistered")
end
end
@@ -1965,7 +2411,7 @@ end
Pkg.REPLMode.TEST_MODE[] = true
api, args, opts = first(Pkg.pkg"free Example")
@test api == Pkg.free
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test isempty(opts)
end
end
@@ -1975,13 +2421,15 @@ end
#
@testset "resolve" begin
# resolve should ignore `extras`
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "TestTarget")
- Pkg.activate(package_path)
- Pkg.resolve()
- @test !haskey(Pkg.dependencies(), markdown_uuid)
- @test !haskey(Pkg.dependencies(), test_stdlib_uuid)
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "TestTarget")
+ Pkg.activate(package_path)
+ Pkg.resolve()
+ @test !haskey(Pkg.dependencies(), markdown_uuid)
+ @test !haskey(Pkg.dependencies(), test_stdlib_uuid)
+ end
+ end
end
#
@@ -1989,22 +2437,92 @@ end
#
@testset "test" begin
# stdlib special casing
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("UUIDs")
Pkg.test("UUIDs")
end
# test args smoketest
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- copy_test_package(tempdir, "TestArguments")
- Pkg.activate(joinpath(tempdir, "TestArguments"))
- # test the old code path (no test/Project.toml)
- Pkg.test("TestArguments"; test_args=`a b`, julia_args=`--quiet --check-bounds=no`)
- Pkg.test("TestArguments"; test_args=["a", "b"], julia_args=["--quiet", "--check-bounds=no"])
- # test new code path
- touch(joinpath(tempdir, "TestArguments", "test", "Project.toml"))
- Pkg.test("TestArguments"; test_args=`a b`, julia_args=`--quiet --check-bounds=no`)
- Pkg.test("TestArguments"; test_args=["a", "b"], julia_args=["--quiet", "--check-bounds=no"])
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ copy_test_package(tempdir, "TestArguments")
+ Pkg.activate(joinpath(tempdir, "TestArguments"))
+ # test the old code path (no test/Project.toml)
+ Pkg.test("TestArguments"; test_args = `a b`, julia_args = `--quiet --check-bounds=no`)
+ Pkg.test("TestArguments"; test_args = ["a", "b"], julia_args = ["--quiet", "--check-bounds=no"])
+ # test new code path
+ touch(joinpath(tempdir, "TestArguments", "test", "Project.toml"))
+ Pkg.test("TestArguments"; test_args = `a b`, julia_args = `--quiet --check-bounds=no`)
+ Pkg.test("TestArguments"; test_args = ["a", "b"], julia_args = ["--quiet", "--check-bounds=no"])
+ end
+ end
+
+ @testset "threads" begin
+ mktempdir() do dir
+ path = copy_test_package(dir, "TestThreads")
+ cd(path) do
+ # Do this all in a subprocess to protect against the parent having non-default threadpool sizes.
+ script = """
+ using Pkg, Test
+ @testset "JULIA_NUM_THREADS=1" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "1",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "0", # https://github.com/JuliaLang/julia/pull/57454
+ "JULIA_NUM_THREADS" => "1",
+ ) do
+ Pkg.test("TestThreads")
+ end
+ end
+ @testset "JULIA_NUM_THREADS=2" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "2",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "1",
+ "JULIA_NUM_THREADS" => "2",
+ ) do
+ Pkg.test("TestThreads")
+ end
+ end
+ @testset "JULIA_NUM_THREADS=2,0" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "2",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "0",
+ "JULIA_NUM_THREADS" => "2,0",
+ ) do
+ Pkg.test("TestThreads")
+ end
+ end
+
+ @testset "--threads=1" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "1",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "0", # https://github.com/JuliaLang/julia/pull/57454
+ "JULIA_NUM_THREADS" => nothing,
+ ) do
+ Pkg.test("TestThreads"; julia_args=`--threads=1`)
+ end
+ end
+ @testset "--threads=2" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "2",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "1",
+ "JULIA_NUM_THREADS" => nothing,
+ ) do
+ Pkg.test("TestThreads"; julia_args=`--threads=2`)
+ end
+ end
+ @testset "--threads=2,0" begin
+ withenv(
+ "EXPECTED_NUM_THREADS_DEFAULT" => "2",
+ "EXPECTED_NUM_THREADS_INTERACTIVE" => "0",
+ "JULIA_NUM_THREADS" => nothing,
+ ) do
+ Pkg.test("TestThreads"; julia_args=`--threads=2,0`)
+ end
+ end
+ """
+ @test Utils.show_output_if_command_errors(`$(Base.julia_cmd()) --project=$(path) --startup-file=no -e "$script"`)
+ end
+ end
+ end
end
#
@@ -2012,66 +2530,76 @@ end
#
@testset "rm" begin
# simple rm
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
Pkg.rm("Example")
@test isempty(Pkg.project().dependencies)
@test isempty(Pkg.dependencies())
end
# remove should not alter other dependencies
- isolate(loaded_depot=true) do
- Pkg.add([(;name="Example"),
- (;name="JSON", version="0.18.0"),])
+ isolate(loaded_depot = true) do
+ Pkg.add(
+ [
+ (; name = "Example"),
+ (; name = "JSON", version = "0.18.0"),
+ ]
+ )
json = Pkg.dependencies()[json_uuid]
Pkg.rm("Example")
@test Pkg.dependencies()[json_uuid] == json
@test haskey(Pkg.project().dependencies, "JSON")
end
# rm should remove unused compat entries
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "BasicCompat")
- Pkg.activate(path)
- # TODO interface for `compat`
- @test haskey(Pkg.Types.Context().env.project.compat, "Example")
- @test haskey(Pkg.Types.Context().env.project.compat, "julia")
- Pkg.rm("Example")
- @test !haskey(Pkg.Types.Context().env.project.compat, "Example")
- @test haskey(Pkg.Types.Context().env.project.compat, "julia")
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "BasicCompat")
+ Pkg.activate(path)
+ # TODO interface for `compat`
+ @test haskey(Pkg.Types.Context().env.project.compat, "Example")
+ @test haskey(Pkg.Types.Context().env.project.compat, "julia")
+ Pkg.rm("Example")
+ @test !haskey(Pkg.Types.Context().env.project.compat, "Example")
+ @test haskey(Pkg.Types.Context().env.project.compat, "julia")
+ end
+ end
# rm should not unnecessarily remove compat entries
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "CompatExtras")
- Pkg.activate(path)
- @test haskey(Pkg.Types.Context().env.project.compat, "Aqua")
- @test haskey(Pkg.Types.Context().env.project.compat, "DataFrames")
- Pkg.rm("DataFrames")
- @test !haskey(Pkg.Types.Context().env.project.compat, "DataFrames")
- @test haskey(Pkg.Types.Context().env.project.compat, "Aqua")
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "CompatExtras")
+ Pkg.activate(path)
+ @test haskey(Pkg.Types.Context().env.project.compat, "Aqua")
+ @test haskey(Pkg.Types.Context().env.project.compat, "DataFrames")
+ Pkg.rm("DataFrames")
+ @test !haskey(Pkg.Types.Context().env.project.compat, "DataFrames")
+ @test haskey(Pkg.Types.Context().env.project.compat, "Aqua")
+ end
+ end
# rm removes unused recursive dependencies
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- path = copy_test_package(tempdir, "SimplePackage")
- Pkg.develop(path=path)
- Pkg.add(name="JSON", version="0.18.0")
- Pkg.rm("SimplePackage")
- @test haskey(Pkg.dependencies(), markdown_uuid)
- @test !haskey(Pkg.dependencies(), simple_package_uuid)
- @test !haskey(Pkg.dependencies(), exuuid)
- @test haskey(Pkg.dependencies(), json_uuid)
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ path = copy_test_package(tempdir, "SimplePackage")
+ Pkg.develop(path = path)
+ Pkg.add(name = "JSON", version = "0.18.0")
+ Pkg.rm("SimplePackage")
+ @test haskey(Pkg.dependencies(), markdown_uuid)
+ @test !haskey(Pkg.dependencies(), simple_package_uuid)
+ @test !haskey(Pkg.dependencies(), exuuid)
+ @test haskey(Pkg.dependencies(), json_uuid)
+ end
+ end
# rm manifest mode
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
- Pkg.add(name="JSON", version="0.18.0")
- Pkg.rm("Random"; mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.add(name = "JSON", version = "0.18.0")
+ Pkg.rm("Random"; mode = Pkg.PKGMODE_MANIFEST)
@test haskey(Pkg.dependencies(), exuuid)
@test !haskey(Pkg.dependencies(), json_uuid)
end
# rm nonexistent packages warns but does not error
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add("Example")
- @test_logs (:warn, r"not in project, ignoring") Pkg.rm(name="FooBar", uuid=UUIDs.UUID(0))
- @test_logs (:warn, r"not in manifest, ignoring") Pkg.rm(name="FooBar", uuid=UUIDs.UUID(0); mode=Pkg.PKGMODE_MANIFEST)
+ @test_logs (:warn, r"not in project, ignoring") Pkg.rm(name = "FooBar", uuid = UUIDs.UUID(0))
+ @test_logs (:warn, r"not in manifest, ignoring") Pkg.rm(name = "FooBar", uuid = UUIDs.UUID(0); mode = Pkg.PKGMODE_MANIFEST)
end
end
@@ -2080,15 +2608,15 @@ end
Pkg.REPLMode.TEST_MODE[] = true
api, args, opts = first(Pkg.pkg"rm Example")
@test api == Pkg.rm
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test isempty(opts)
api, args, opts = first(Pkg.pkg"rm --project Example")
@test api == Pkg.rm
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:mode => Pkg.PKGMODE_PROJECT)
api, args, opts = first(Pkg.pkg"rm --manifest Example")
@test api == Pkg.rm
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test opts == Dict(:mode => Pkg.PKGMODE_MANIFEST)
end
end
@@ -2098,7 +2626,7 @@ end
#
@testset "all" begin
# pin all, free all, rm all packages
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
Pkg.add(["Example", "JSON"])
Pkg.pin(all_pkgs = true)
@@ -2158,7 +2686,7 @@ end
@test isempty(opts)
api, args, opts = first(Pkg.pkg"build Example")
@test api == Pkg.build
- @test args == [Pkg.PackageSpec(;name="Example")]
+ @test args == [Pkg.PackageSpec(; name = "Example")]
@test isempty(opts)
api, args, opts = first(Pkg.pkg"build --verbose")
@test api == Pkg.build
@@ -2166,37 +2694,43 @@ end
@test opts == Dict(:verbose => true)
api, args, opts = first(Pkg.pkg"build -v Foo Bar")
@test api == Pkg.build
- @test args == [Pkg.PackageSpec(;name="Foo"), Pkg.PackageSpec(;name="Bar")]
+ @test args == [Pkg.PackageSpec(; name = "Foo"), Pkg.PackageSpec(; name = "Bar")]
@test opts == Dict(:verbose => true)
end
# Test package that fails build
- isolate(loaded_depot=true) do; mktempdir() do tempdir
- package_path = copy_test_package(tempdir, "FailBuild")
- Pkg.activate(package_path)
- @test_throws PkgError Pkg.build()
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ package_path = copy_test_package(tempdir, "FailBuild")
+ Pkg.activate(package_path)
+ @test_throws PkgError Pkg.build()
+ end
+ end
# Build log location
- isolate(loaded_depot=true) do; mktempdir() do tmp
- path = git_init_package(tmp, joinpath(@__DIR__, "test_packages", "FailBuild"))
- # Log file in the directory when it is deved
- Pkg.develop(path=path; io=devnull)
- log_file_dev = joinpath(path, "deps", "build.log")
- @test !isfile(log_file_dev)
- @test_throws PkgError Pkg.build("FailBuild"; io=devnull)
- @test isfile(log_file_dev)
- @test occursin("oops", read(log_file_dev, String))
- # Log file in scratchspace when added
- addpath = dirname(dirname(Base.find_package("FailBuild")))
- log_file_add = joinpath(path, "deps", "build.log")
- @test_throws PkgError Pkg.add(path=path; io=devnull)
- @test !isfile(joinpath(Base.find_package("FailBuild"), "..", "..", "deps", "build.log"))
- log_file_add = joinpath(DEPOT_PATH[1], "scratchspaces",
- "44cfe95a-1eb2-52ea-b672-e2afdf69b78f", "f99d57aad0e5eb2434491b47bac92bb88d463001", "build.log")
- @test isfile(log_file_add)
- @test occursin("oops", read(log_file_add, String))
- end end
+ isolate(loaded_depot = true) do;
+ mktempdir() do tmp
+ path = git_init_package(tmp, joinpath(@__DIR__, "test_packages", "FailBuild"))
+ # Log file in the directory when it is deved
+ Pkg.develop(path = path; io = devnull)
+ log_file_dev = joinpath(path, "deps", "build.log")
+ @test !isfile(log_file_dev)
+ @test_throws PkgError Pkg.build("FailBuild"; io = devnull)
+ @test isfile(log_file_dev)
+ @test occursin("oops", read(log_file_dev, String))
+ # Log file in scratchspace when added
+ addpath = dirname(dirname(Base.find_package("FailBuild")))
+ log_file_add = joinpath(path, "deps", "build.log")
+ @test_throws PkgError Pkg.add(path = path; io = devnull)
+ @test !isfile(joinpath(Base.find_package("FailBuild"), "..", "..", "deps", "build.log"))
+ log_file_add = joinpath(
+ DEPOT_PATH[1], "scratchspaces",
+ "44cfe95a-1eb2-52ea-b672-e2afdf69b78f", "f99d57aad0e5eb2434491b47bac92bb88d463001", "build.log"
+ )
+ @test isfile(log_file_add)
+ @test occursin("oops", read(log_file_add, String))
+ end
+ end
end
#
@@ -2274,55 +2808,55 @@ end
#
@testset "Pkg.status" begin
# other
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test_deprecated Pkg.status(Pkg.PKGMODE_MANIFEST)
- @test_logs (:warn, r"diff option only available") match_mode=:any Pkg.status(diff=true)
+ @test_logs (:warn, r"diff option only available") match_mode = :any Pkg.status(diff = true)
end
# State changes
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
io = IOBuffer()
# Basic Add
- Pkg.add(Pkg.PackageSpec(; name="Example", version="0.3.0"); io=io)
+ Pkg.add(Pkg.PackageSpec(; name = "Example", version = "0.3.0"); io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] \+ Example v0\.3\.0", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] \+ Example v0\.3\.0", output)
# Double add should not claim "Updating"
- Pkg.add(Pkg.PackageSpec(; name="Example", version="0.3.0"); io=io)
+ Pkg.add(Pkg.PackageSpec(; name = "Example", version = "0.3.0"); io = io)
output = String(take!(io))
@test occursin(r"No packages added to or removed from `.+Project\.toml`", output)
@test occursin(r"No packages added to or removed from `.+Manifest\.toml`", output)
# From tracking registry to tracking repo
- Pkg.add(Pkg.PackageSpec(; name="Example", rev="master"); io=io)
+ Pkg.add(Pkg.PackageSpec(; name = "Example", rev = "master"); io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v0\.3\.0 ⇒ v\d\.\d\.\d `https://github\.com/JuliaLang/Example\.jl\.git#master`", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v0\.3\.0 ⇒ v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master`", output)
# From tracking repo to tracking path
- Pkg.develop("Example"; io=io)
+ Pkg.develop("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `https://github\.com/JuliaLang/Example\.jl\.git#master` ⇒ v\d\.\d\.\d `.+`", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `https://github\.com/JuliaLang/Example\.jl\.git#master` ⇒ v\d\.\d\.\d `.+`", output)
# From tracking path to tracking repo
- Pkg.add(Pkg.PackageSpec(; name="Example", rev="master"); io=io)
+ Pkg.add(Pkg.PackageSpec(; name = "Example", rev = "master"); io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `.+` ⇒ v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master`", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `.+` ⇒ v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master`", output)
# From tracking repo to tracking registered version
- Pkg.free("Example"; io=io)
+ Pkg.free("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v\d\.\d\.\d", output)
@test occursin(r"Updating `.+Manifest\.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v\d\.\d\.\d", output)
# Removing registered version
- Pkg.rm("Example"; io=io)
+ Pkg.rm("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project.toml`", output)
@test occursin(r"\[7876af07\] - Example v\d\.\d\.\d", output)
@@ -2331,31 +2865,31 @@ end
# Pinning a registered package
Pkg.add("Example")
- Pkg.pin("Example"; io=io)
+ Pkg.pin("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d ⇒ v\d\.\d\.\d ⚲", output)
@test occursin(r"Updating `.+Manifest.toml`", output)
# Free a pinned package
- Pkg.free("Example"; io=io)
+ Pkg.free("Example"; io = io)
output = String(take!(io))
@test occursin(r"Updating `.+Project.toml`", output)
@test occursin(r"\[7876af07\] ~ Example v\d\.\d\.\d ⚲ ⇒ v\d\.\d\.\d", output)
@test occursin(r"Updating `.+Manifest.toml`", output)
end
# Project Status API
- isolate(loaded_depot=true) do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
+ isolate(loaded_depot = true) do
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
io = PipeBuffer()
## empty project
- Pkg.status(;io=io)
+ Pkg.status(; io = io)
@test occursin(r"Status `.+Project.toml` \(empty project\)", readline(io))
## loaded project
Pkg.add("Markdown")
- Pkg.add( name="JSON", version="0.18.0")
+ Pkg.add(name = "JSON", version = "0.18.0")
Pkg.develop("Example")
- Pkg.add(url="https://github.com/00vareladavid/Unregistered.jl")
+ Pkg.add(url = "https://github.com/00vareladavid/Unregistered.jl")
Pkg.status(; io = io)
@test occursin(r"Status `.+Project\.toml`", readline(io))
@test occursin(r"\[7876af07\] Example\s*v\d\.\d\.\d\s*`.+`", readline(io))
@@ -2365,24 +2899,24 @@ end
end
## status warns when package not installed
isolate() do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
Pkg.activate(joinpath(@__DIR__, "test_packages", "Status"))
io = PipeBuffer()
- Pkg.status(; io=io)
+ Pkg.status(; io = io)
@test occursin(r"Status `.+Project.toml`", readline(io))
@test occursin(r"^→⌃ \[7876af07\] Example\s*v\d\.\d\.\d", readline(io))
@test occursin(r"^ \[d6f4376e\] Markdown", readline(io))
@test "Info Packages marked with → are not downloaded, use `instantiate` to download" == strip(readline(io))
@test "Info Packages marked with ⌃ have new versions available and may be upgradable." == strip(readline(io))
- Pkg.status(;io=io, mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST)
@test occursin(r"Status `.+Manifest.toml`", readline(io))
@test occursin(r"^→⌃ \[7876af07\] Example\s*v\d\.\d\.\d", readline(io))
@test occursin(r"^ \[2a0f44e3\] Base64", readline(io))
@test occursin(r"^ \[d6f4376e\] Markdown", readline(io))
@test "Info Packages marked with → are not downloaded, use `instantiate` to download" == strip(readline(io))
@test "Info Packages marked with ⌃ have new versions available and may be upgradable." == strip(readline(io))
- Pkg.instantiate(;io=devnull) # download Example
- Pkg.status(;io=io, mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.instantiate(; io = devnull) # download Example
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST)
@test occursin(r"Status `.+Manifest.toml`", readline(io))
@test occursin(r"^⌃ \[7876af07\] Example\s*v\d\.\d\.\d", readline(io))
@test occursin(r"^ \[2a0f44e3\] Base64", readline(io))
@@ -2390,16 +2924,16 @@ end
@test "Info Packages marked with ⌃ have new versions available and may be upgradable." == strip(readline(io))
end
# Manifest Status API
- isolate(loaded_depot=true) do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
+ isolate(loaded_depot = true) do
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
io = PipeBuffer()
## empty manifest
- Pkg.status(;io=io, mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST)
@test occursin(r"Status `.+Manifest\.toml` \(empty manifest\)", readline(io))
# loaded manifest
- Pkg.add( name="Example", version="0.3.0")
+ Pkg.add(name = "Example", version = "0.3.0")
Pkg.add("Markdown")
- Pkg.status(; io=io, mode=Pkg.PKGMODE_MANIFEST)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST)
statuslines = readlines(io)
@test occursin(r"Status `.+Manifest.toml`", first(statuslines))
@test any(l -> occursin(r"\[7876af07\] Example\s*v0\.3\.0", l), statuslines)
@@ -2407,41 +2941,41 @@ end
@test any(l -> occursin(r"\[d6f4376e\] Markdown", l), statuslines)
end
# Diff API
- isolate(loaded_depot=true) do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
+ isolate(loaded_depot = true) do
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
io = PipeBuffer()
projdir = dirname(Pkg.project().path)
mkpath(projdir)
git_init_and_commit(projdir)
## empty project + empty diff
- Pkg.status(; io=io, diff=true)
+ Pkg.status(; io = io, diff = true)
@test occursin(r"No packages added to or removed from `.+Project\.toml`", readline(io))
- Pkg.status(; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
@test occursin(r"No packages added to or removed from `.+Manifest\.toml`", readline(io))
### empty diff + filter
- Pkg.status("Example"; io=io, diff=true)
+ Pkg.status("Example"; io = io, diff = true)
@test occursin(r"No packages added to or removed from `.+Project\.toml`", readline(io))
## non-empty project but empty diff
Pkg.add("Markdown")
git_init_and_commit(dirname(Pkg.project().path))
- Pkg.status(; io=io, diff=true)
+ Pkg.status(; io = io, diff = true)
@test occursin(r"No packages added to or removed from `.+Project\.toml`", readline(io))
- Pkg.status(; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
@test occursin(r"No packages added to or removed from `.+Manifest\.toml`", readline(io))
### filter should still show "empty diff"
- Pkg.status("Example"; io=io, diff=true)
+ Pkg.status("Example"; io = io, diff = true)
@test occursin(r"No packages added to or removed from `.+Project\.toml`", readline(io))
## non-empty project + non-empty diff
Pkg.rm("Markdown")
- Pkg.add(name="Example", version="0.3.0")
+ Pkg.add(name = "Example", version = "0.3.0")
## diff project
- Pkg.status(; io=io, diff=true)
+ Pkg.status(; io = io, diff = true)
@test occursin(r"Diff `.+Project\.toml`", readline(io))
@test occursin(r"\[7876af07\] \+ Example\s*v0\.3\.0", readline(io))
@test occursin(r"\[d6f4376e\] - Markdown", readline(io))
@test occursin("Info Packages marked with ⌃ have new versions available and may be upgradable.", readline(io))
## diff manifest
- Pkg.status(; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
statuslines = readlines(io)
@test occursin(r"Diff `.+Manifest.toml`", first(statuslines))
@test any(l -> occursin(r"\[7876af07\] \+ Example\s*v0\.3\.0", l), statuslines)
@@ -2449,32 +2983,32 @@ end
@test any(l -> occursin(r"\[d6f4376e\] - Markdown", l), statuslines)
@test any(l -> occursin("Info Packages marked with ⌃ have new versions available and may be upgradable.", l), statuslines)
## diff project with filtering
- Pkg.status("Markdown"; io=io, diff=true)
+ Pkg.status("Markdown"; io = io, diff = true)
@test occursin(r"Diff `.+Project\.toml`", readline(io))
@test occursin(r"\[d6f4376e\] - Markdown", readline(io))
## empty diff + filter
- Pkg.status("Base64"; io=io, diff=true)
+ Pkg.status("Base64"; io = io, diff = true)
@test occursin(r"No Matches in diff for `.+Project\.toml`", readline(io))
## diff manifest with filtering
- Pkg.status("Base64"; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status("Base64"; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
@test occursin(r"Diff `.+Manifest.toml`", readline(io))
@test occursin(r"\[2a0f44e3\] - Base64", readline(io))
## manifest diff + empty filter
- Pkg.status("FooBar"; io=io, mode=Pkg.PKGMODE_MANIFEST, diff=true)
+ Pkg.status("FooBar"; io = io, mode = Pkg.PKGMODE_MANIFEST, diff = true)
@test occursin(r"No Matches in diff for `.+Manifest.toml`", readline(io))
end
# Outdated API
- isolate(loaded_depot=true) do
- Pkg.Registry.add(Pkg.RegistrySpec[], io=devnull) # load reg before io capturing
- Pkg.add("Example"; io=devnull)
+ isolate(loaded_depot = true) do
+ Pkg.Registry.add(Pkg.RegistrySpec[], io = devnull) # load reg before io capturing
+ Pkg.add("Example"; io = devnull)
v = Pkg.dependencies()[exuuid].version
io = IOBuffer()
- Pkg.add(Pkg.PackageSpec(name="Example", version="0.4.0"); io=devnull)
- Pkg.status(; outdated=true, io=io)
+ Pkg.add(Pkg.PackageSpec(name = "Example", version = "0.4.0"); io = devnull)
+ Pkg.status(; outdated = true, io = io)
str = String(take!(io))
@test occursin(Regex("⌃\\s*\\[7876af07\\] Example\\s*v0.4.0\\s*\\( tmp) do
+ withenv("JULIA_DEPOT_PATH" => tmp * (Sys.iswindows() ? ";" : ":")) do
Base.init_depot_path()
cp(joinpath(@__DIR__, "test_packages", "BasicSandbox"), joinpath(tmp, "BasicSandbox"))
git_init_and_commit(joinpath(tmp, "BasicSandbox"))
cd(tmp) do
- Pkg.add(path="BasicSandbox")
+ Pkg.add(path = "BasicSandbox")
end
end
end
end
end
-using Pkg.Types: is_stdlib
-@testset "is_stdlib() across versions" begin
- HistoricalStdlibVersions.register!()
-
- networkoptions_uuid = UUID("ca575930-c2e3-43a9-ace4-1e988b2c1908")
- pkg_uuid = UUID("44cfe95a-1eb2-52ea-b672-e2afdf69b78f")
-
- # Test NetworkOptions across multiple versions (It became an stdlib in v1.6+, and was registered)
- @test is_stdlib(networkoptions_uuid)
- @test is_stdlib(networkoptions_uuid, v"1.6")
- @test !is_stdlib(networkoptions_uuid, v"1.5")
- @test !is_stdlib(networkoptions_uuid, v"1.0.0")
- @test !is_stdlib(networkoptions_uuid, v"0.7")
- @test !is_stdlib(networkoptions_uuid, nothing)
-
- # Pkg is an unregistered stdlib and has always been an stdlib
- @test is_stdlib(pkg_uuid)
- @test is_stdlib(pkg_uuid, v"1.0")
- @test is_stdlib(pkg_uuid, v"1.6")
- @test is_stdlib(pkg_uuid, v"999.999.999")
- @test is_stdlib(pkg_uuid, v"0.7")
- @test is_stdlib(pkg_uuid, nothing)
-
- HistoricalStdlibVersions.unregister!()
- # Test that we can probe for stdlibs for the current version with no STDLIBS_BY_VERSION,
- # but that we throw a PkgError if we ask for a particular julia version.
- @test is_stdlib(networkoptions_uuid)
- @test_throws Pkg.Types.PkgError is_stdlib(networkoptions_uuid, v"1.6")
-end
-
-
-@testset "Pkg.add() with julia_version" begin
- HistoricalStdlibVersions.register!()
-
- # A package with artifacts that went from normal package -> stdlib
- gmp_jll_uuid = "781609d7-10c4-51f6-84f2-b8444358ff6d"
- # A package that has always only ever been an stdlib
- linalg_uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
- # A package that went from normal package - >stdlib
- networkoptions_uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
-
- function get_manifest_block(name)
- manifest_path = joinpath(dirname(Base.active_project()), "Manifest.toml")
- @test isfile(manifest_path)
- deps = Base.get_deps(TOML.parsefile(manifest_path))
- @test haskey(deps, name)
- return only(deps[name])
- end
-
- isolate(loaded_depot=true) do
- # Next, test that if we ask for `v1.5` it DOES have a version, and that GMP_jll installs v6.1.X
- Pkg.add(["NetworkOptions", "GMP_jll"]; julia_version=v"1.5")
- no_block = get_manifest_block("NetworkOptions")
- @test haskey(no_block, "uuid")
- @test no_block["uuid"] == networkoptions_uuid
- @test haskey(no_block, "version")
-
- gmp_block = get_manifest_block("GMP_jll")
- @test haskey(gmp_block, "uuid")
- @test gmp_block["uuid"] == gmp_jll_uuid
- @test haskey(gmp_block, "version")
- @test startswith(gmp_block["version"], "6.1.2")
-
- # Test that the artifact of GMP_jll contains the right library
- @test haskey(gmp_block, "git-tree-sha1")
- gmp_jll_dir = Pkg.Operations.find_installed("GMP_jll", Base.UUID(gmp_jll_uuid), Base.SHA1(gmp_block["git-tree-sha1"]))
- @test isdir(gmp_jll_dir)
- artifacts_toml = joinpath(gmp_jll_dir, "Artifacts.toml")
- @test isfile(artifacts_toml)
- meta = artifact_meta("GMP", artifacts_toml)
-
- # `meta` can be `nothing` on some of our newer platforms; we _know_ this should
- # not be the case on the following platforms, so we check these explicitly to
- # ensure that we haven't accidentally broken something, and then we gate some
- # following tests on whether or not `meta` is `nothing`:
- for arch in ("x86_64", "i686"), os in ("linux", "mac", "windows")
- if platforms_match(HostPlatform(), Platform(arch, os))
- @test meta !== nothing
- end
- end
-
- # These tests require a matching platform artifact for this old version of GMP_jll,
- # which is not the case on some of our newer platforms.
- if meta !== nothing
- gmp_artifact_path = artifact_path(Base.SHA1(meta["git-tree-sha1"]))
- @test isdir(gmp_artifact_path)
-
- # On linux, we can check the filename to ensure it's grabbing the correct library
- if Sys.islinux()
- libgmp_filename = joinpath(gmp_artifact_path, "lib", "libgmp.so.10.3.2")
- @test isfile(libgmp_filename)
- end
- end
- end
-
- # Next, test that if we ask for `v1.6`, GMP_jll gets `v6.2.0`, and for `v1.7`, it gets `v6.2.1`
- function do_gmp_test(julia_version, gmp_version)
- isolate(loaded_depot=true) do
- Pkg.add("GMP_jll"; julia_version)
- gmp_block = get_manifest_block("GMP_jll")
- @test haskey(gmp_block, "uuid")
- @test gmp_block["uuid"] == gmp_jll_uuid
- @test haskey(gmp_block, "version")
- @test startswith(gmp_block["version"], string(gmp_version))
- end
- end
- do_gmp_test(v"1.6", v"6.2.0")
- do_gmp_test(v"1.7", v"6.2.1")
-
- isolate(loaded_depot=true) do
- # Next, test that if we ask for `nothing`, NetworkOptions has a `version` but `LinearAlgebra` does not.
- Pkg.add(["LinearAlgebra", "NetworkOptions"]; julia_version=nothing)
- no_block = get_manifest_block("NetworkOptions")
- @test haskey(no_block, "uuid")
- @test no_block["uuid"] == networkoptions_uuid
- @test haskey(no_block, "version")
- linalg_block = get_manifest_block("LinearAlgebra")
- @test haskey(linalg_block, "uuid")
- @test linalg_block["uuid"] == linalg_uuid
- @test !haskey(linalg_block, "version")
- end
-
- isolate(loaded_depot=true) do
- # Next, test that stdlibs do not get dependencies from the registry
- # NOTE: this test depends on the fact that in Julia v1.6+ we added
- # "fake" JLLs that do not depend on Pkg while the "normal" p7zip_jll does.
- # A future p7zip_jll in the registry may not depend on Pkg, so be sure
- # to verify your assumptions when updating this test.
- Pkg.add("p7zip_jll")
- p7zip_jll_uuid = UUID("3f19e933-33d8-53b3-aaab-bd5110c3b7a0")
- @test !("Pkg" in keys(Pkg.dependencies()[p7zip_jll_uuid].dependencies))
- end
-
- HistoricalStdlibVersions.unregister!()
-end
-
-
@testset "Issue #2931" begin
- isolate(loaded_depot=false) do
+ isolate(loaded_depot = false) do
temp_pkg_dir() do path
name = "Example"
version = "0.5.3"
@@ -3163,7 +3646,7 @@ end
# Delete directory where the package would be installed
pkg_dir = Pkg.Operations.find_installed(name, exuuid, tree_hash)
- rm(pkg_dir; recursive=true, force=true)
+ rm(pkg_dir; recursive = true, force = true)
# (Re-)download sources
Pkg.Operations.download_source(ctx)
@@ -3174,49 +3657,86 @@ end
end
end
-if :version in fieldnames(Base.PkgOrigin)
-@testset "sysimage functionality" begin
- old_sysimage_modules = copy(Base._sysimage_modules)
- old_pkgorigins = copy(Base.pkgorigins)
- try
- # Fake having a packages in the sysimage.
- json_pkgid = Base.PkgId(json_uuid, "JSON")
- push!(Base._sysimage_modules, json_pkgid)
- Base.pkgorigins[json_pkgid] = Base.PkgOrigin(nothing, nothing, v"0.20.1")
- isolate(loaded_depot=true) do
- Pkg.add("JSON"; io=devnull)
- Pkg.dependencies(json_uuid) do pkg
- pkg.version == v"0.20.1"
+@testset "Issue #4345: pidfile in writable location when depot is readonly" begin
+ isolate(loaded_depot = false) do
+ mktempdir() do readonly_depot
+ mktempdir() do writable_depot
+ # Set up initial depot with a package
+ old_depot_path = copy(DEPOT_PATH)
+ try
+ empty!(DEPOT_PATH)
+ push!(DEPOT_PATH, readonly_depot)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+
+ Pkg.activate(temp = true)
+ # Install Example.jl in the initial depot
+ Pkg.add(name = "Example", version = "0.5.3")
+
+ # Make the depot read-only
+ run(`chmod -R -w $readonly_depot`)
+
+ # Add writable depot to front of DEPOT_PATH
+ pushfirst!(DEPOT_PATH, writable_depot)
+
+ # Create a new temporary environment and try to add a package
+ # that depends on something in the readonly depot
+ Pkg.activate(temp = true)
+ # This should not fail with permission denied on pidfile creation
+ # The fix ensures pidfiles are created in writable locations
+ @test_nowarn Pkg.add(name = "Example", version = "0.5.3")
+ finally
+ # Restore depot path and make readonly depot writable again for cleanup
+ empty!(DEPOT_PATH)
+ append!(DEPOT_PATH, old_depot_path)
+ run(`chmod -R +w $readonly_depot`)
+ end
end
- io = IOBuffer()
- Pkg.status(; outdated=true, io=io)
- str = String(take!(io))
- @test occursin("⌅ [682c06a0] JSON v0.20.1", str)
- @test occursin("[sysimage]", str)
-
- @test_throws PkgError Pkg.add(name="JSON", rev="master"; io=devnull)
- @test_throws PkgError Pkg.develop("JSON"; io=devnull)
+ end
+ end
+end
- Pkg.respect_sysimage_versions(false)
- Pkg.add("JSON"; io=devnull)
- Pkg.dependencies(json_uuid) do pkg
- pkg.version != v"0.20.1"
+if :version in fieldnames(Base.PkgOrigin)
+ @testset "sysimage functionality" begin
+ old_sysimage_modules = copy(Base._sysimage_modules)
+ old_pkgorigins = copy(Base.pkgorigins)
+ try
+ # Fake having a packages in the sysimage.
+ json_pkgid = Base.PkgId(json_uuid, "JSON")
+ push!(Base._sysimage_modules, json_pkgid)
+ Base.pkgorigins[json_pkgid] = Base.PkgOrigin(nothing, nothing, v"0.20.1")
+ isolate(loaded_depot = true) do
+ Pkg.add("JSON"; io = devnull)
+ Pkg.dependencies(json_uuid) do pkg
+ pkg.version == v"0.20.1"
+ end
+ io = IOBuffer()
+ Pkg.status(; outdated = true, io = io)
+ str = String(take!(io))
+ @test occursin("⌅ [682c06a0] JSON v0.20.1", str)
+ @test occursin("[sysimage]", str)
+
+ @test_throws PkgError Pkg.add(name = "JSON", rev = "master"; io = devnull)
+ @test_throws PkgError Pkg.develop("JSON"; io = devnull)
+
+ Pkg.respect_sysimage_versions(false)
+ Pkg.add("JSON"; io = devnull)
+ Pkg.dependencies(json_uuid) do pkg
+ pkg.version != v"0.20.1"
+ end
end
+ finally
+ copy!(Base._sysimage_modules, old_sysimage_modules)
+ copy!(Base.pkgorigins, old_pkgorigins)
+ Pkg.respect_sysimage_versions(true)
end
- finally
- copy!(Base._sysimage_modules, old_sysimage_modules)
- copy!(Base.pkgorigins, old_pkgorigins)
- Pkg.respect_sysimage_versions(true)
end
end
-end
temp_pkg_dir() do project_path
@testset "test entryfile entries" begin
mktempdir() do dir
- path = abspath(joinpath(dirname(pathof(Pkg)), "../test", "test_packages", "ProjectPath"))
- cp(path, joinpath(dir, "ProjectPath"))
- cd(joinpath(dir, "ProjectPath")) do
+ path = copy_test_package(dir, "ProjectPath")
+ cd(path) do
with_current_env() do
Pkg.resolve()
@test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using ProjectPath'`))
@@ -3227,18 +3747,95 @@ temp_pkg_dir() do project_path
end
end
@testset "test resolve with tree hash" begin
+ isolate() do
+ mktempdir() do dir
+ path = copy_test_package(dir, "ResolveWithRev")
+ cd(path) do
+ with_current_env() do
+ @test !isfile("Manifest.toml")
+ @test !isdir(joinpath(DEPOT_PATH[1], "packages", "Example"))
+ Pkg.resolve()
+ @test isdir(joinpath(DEPOT_PATH[1], "packages", "Example"))
+ rm(joinpath(DEPOT_PATH[1], "packages", "Example"); recursive = true)
+ Pkg.resolve()
+ end
+ end
+ end
+ end
+end
+
+@testset "status diff non-root" begin
+ isolate(loaded_depot = true) do
+ cd_tempdir() do dir
+ Pkg.generate("A")
+ git_init_and_commit(".")
+ Pkg.activate("A")
+ Pkg.add("Example")
+ io = IOBuffer()
+ Pkg.status(; io, diff = true)
+ str = String(take!(io))
+ @test occursin("+ Example", str)
+ end
+ end
+end
+
+@testset "test instantiate with sources with only rev" begin
+ isolate() do
+ mktempdir() do dir
+ cp(joinpath(@__DIR__, "test_packages", "sources_only_rev", "Project.toml"), joinpath(dir, "Project.toml"))
+ cd(dir) do
+ with_current_env() do
+ @test !isfile("Manifest.toml")
+ Pkg.instantiate()
+ uuid, info = only(Pkg.dependencies())
+ @test info.git_revision == "ba3d6704f09330ae973773496a4212f85e0ffe45"
+ @test info.git_source == "https://github.com/JuliaLang/Example.jl.git"
+ end
+ end
+ end
+ end
+end
+
+@testset "status showing incompatible loaded deps" begin
+ cmd = addenv(`$(Base.julia_cmd()) --color=no --startup-file=no -e "
+ using Pkg
+ Pkg.activate(temp=true)
+ Pkg.add(Pkg.PackageSpec(name=\"Example\", version=v\"0.5.4\"))
+ using Example
+ Pkg.activate(temp=true)
+ Pkg.add(Pkg.PackageSpec(name=\"Example\", version=v\"0.5.5\"))
+ "`)
+ iob = IOBuffer()
+ run(pipeline(cmd, stderr = iob, stdout = iob))
+ out = String(take!(iob))
+ @test occursin("[loaded: v0.5.4]", out)
+end
+
+@test allunique(unique([Pkg.PackageSpec(path = "foo"), Pkg.PackageSpec(path = "foo")]))
+
+# Test the readonly functionality
+@testset "Readonly Environment Tests" begin
mktempdir() do dir
- path = abspath(joinpath(@__DIR__, "../test", "test_packages", "ResolveWithRev"))
- cp(path, joinpath(dir, "ResolveWithRev"))
- cd(joinpath(dir, "ResolveWithRev")) do
- with_current_env() do
- @test !isfile("Manifest.toml")
- @test !isdir(joinpath(DEPOT_PATH[1], "packages", "Example"))
- Pkg.resolve()
- @test isdir(joinpath(DEPOT_PATH[1], "packages", "Example"))
- rm(joinpath(DEPOT_PATH[1], "packages", "Example"); recursive = true)
- Pkg.resolve()
+ project_file = joinpath(dir, "Project.toml")
+
+ # Test that normal environment works
+ cd(dir) do
+ # Activate the environment
+ Pkg.activate(".")
+
+ # This should work fine
+ Pkg.add("Test") # Add Test package
+
+ # Now make it readonly
+ project_data = Dict("readonly" => true)
+ open(project_file, "w") do io
+ TOML.print(io, project_data)
end
+
+ # Now these should fail
+ @test_throws Pkg.Types.PkgError Pkg.add("Dates")
+ @test_throws Pkg.Types.PkgError Pkg.rm("Test")
+ @test_throws Pkg.Types.PkgError Pkg.update()
end
end
end
diff --git a/test/pkg.jl b/test/pkg.jl
index 2793c246e2..7ad0649e3c 100644
--- a/test/pkg.jl
+++ b/test/pkg.jl
@@ -24,44 +24,44 @@ const PackageSpec = Pkg.Types.PackageSpec
import Pkg.Types: semver_spec, VersionSpec
@testset "semver notation" begin
@test semver_spec("^1.2.3") == VersionSpec("1.2.3-1")
- @test semver_spec("^1.2") == VersionSpec("1.2.0-1")
- @test semver_spec("^1") == VersionSpec("1.0.0-1")
+ @test semver_spec("^1.2") == VersionSpec("1.2.0-1")
+ @test semver_spec("^1") == VersionSpec("1.0.0-1")
@test semver_spec("^0.2.3") == VersionSpec("0.2.3-0.2")
@test semver_spec("^0.0.3") == VersionSpec("0.0.3-0.0.3")
- @test semver_spec("^0.0") == VersionSpec("0.0.0-0.0")
- @test semver_spec("^0") == VersionSpec("0.0.0-0")
+ @test semver_spec("^0.0") == VersionSpec("0.0.0-0.0")
+ @test semver_spec("^0") == VersionSpec("0.0.0-0")
@test semver_spec("~1.2.3") == VersionSpec("1.2.3-1.2")
- @test semver_spec("~1.2") == VersionSpec("1.2.0-1.2")
- @test semver_spec("~1") == VersionSpec("1.0.0-1")
- @test semver_spec("1.2.3") == semver_spec("^1.2.3")
- @test semver_spec("1.2") == semver_spec("^1.2")
- @test semver_spec("1") == semver_spec("^1")
- @test semver_spec("0.0.3") == semver_spec("^0.0.3")
- @test semver_spec("0") == semver_spec("^0")
+ @test semver_spec("~1.2") == VersionSpec("1.2.0-1.2")
+ @test semver_spec("~1") == VersionSpec("1.0.0-1")
+ @test semver_spec("1.2.3") == semver_spec("^1.2.3")
+ @test semver_spec("1.2") == semver_spec("^1.2")
+ @test semver_spec("1") == semver_spec("^1")
+ @test semver_spec("0.0.3") == semver_spec("^0.0.3")
+ @test semver_spec("0") == semver_spec("^0")
@test semver_spec("0.0.3, 1.2") == VersionSpec(["0.0.3-0.0.3", "1.2.0-1"])
@test semver_spec("~1.2.3, ~v1") == VersionSpec(["1.2.3-1.2", "1.0.0-1"])
- @test v"1.5.2" in semver_spec("1.2.3")
- @test v"1.2.3" in semver_spec("1.2.3")
- @test !(v"2.0.0" in semver_spec("1.2.3"))
- @test !(v"1.2.2" in semver_spec("1.2.3"))
+ @test v"1.5.2" in semver_spec("1.2.3")
+ @test v"1.2.3" in semver_spec("1.2.3")
+ @test !(v"2.0.0" in semver_spec("1.2.3"))
+ @test !(v"1.2.2" in semver_spec("1.2.3"))
@test v"1.2.99" in semver_spec("~1.2.3")
- @test v"1.2.3" in semver_spec("~1.2.3")
- @test !(v"1.3" in semver_spec("~1.2.3"))
- @test v"1.2.0" in semver_spec("1.2")
- @test v"1.9.9" in semver_spec("1.2")
- @test !(v"2.0.0" in semver_spec("1.2"))
- @test !(v"1.1.9" in semver_spec("1.2"))
- @test v"0.2.3" in semver_spec("0.2.3")
- @test !(v"0.3.0" in semver_spec("0.2.3"))
- @test !(v"0.2.2" in semver_spec("0.2.3"))
- @test v"0.0.0" in semver_spec("0")
- @test v"0.99.0" in semver_spec("0")
- @test !(v"1.0.0" in semver_spec("0"))
- @test v"0.0.0" in semver_spec("0.0")
- @test v"0.0.99" in semver_spec("0.0")
- @test !(v"0.1.0" in semver_spec("0.0"))
+ @test v"1.2.3" in semver_spec("~1.2.3")
+ @test !(v"1.3" in semver_spec("~1.2.3"))
+ @test v"1.2.0" in semver_spec("1.2")
+ @test v"1.9.9" in semver_spec("1.2")
+ @test !(v"2.0.0" in semver_spec("1.2"))
+ @test !(v"1.1.9" in semver_spec("1.2"))
+ @test v"0.2.3" in semver_spec("0.2.3")
+ @test !(v"0.3.0" in semver_spec("0.2.3"))
+ @test !(v"0.2.2" in semver_spec("0.2.3"))
+ @test v"0.0.0" in semver_spec("0")
+ @test v"0.99.0" in semver_spec("0")
+ @test !(v"1.0.0" in semver_spec("0"))
+ @test v"0.0.0" in semver_spec("0.0")
+ @test v"0.0.99" in semver_spec("0.0")
+ @test !(v"0.1.0" in semver_spec("0.0"))
@test semver_spec("<1.2.3") == VersionSpec("0.0.0 - 1.2.2")
@test semver_spec("<1.2") == VersionSpec("0.0.0 - 1.1")
@@ -147,8 +147,8 @@ import Pkg.Types: semver_spec, VersionSpec
@test_throws ErrorException semver_spec("0.0.0")
@test_throws ErrorException semver_spec("0.7 1.0")
- @test Pkg.Types.isjoinable(Pkg.Types.VersionBound((1,5)), Pkg.Types.VersionBound((1,6)))
- @test !(Pkg.Types.isjoinable(Pkg.Types.VersionBound((1,5)), Pkg.Types.VersionBound((1,6,0))))
+ @test Pkg.Types.isjoinable(Pkg.Types.VersionBound((1, 5)), Pkg.Types.VersionBound((1, 6)))
+ @test !(Pkg.Types.isjoinable(Pkg.Types.VersionBound((1, 5)), Pkg.Types.VersionBound((1, 6, 0))))
end
# TODO: Should rewrite these tests not to rely on internals like field names
@@ -188,25 +188,37 @@ temp_pkg_dir() do project_path
pkgdir = joinpath(Pkg.depots1(), "packages")
# Test to ensure that with a long enough collect_delay, nothing gets reaped
- Pkg.gc(;collect_delay=Day(1000))
+ Pkg.gc(; collect_delay = Day(1000))
@test !isempty(readdir(pkgdir))
# Setting collect_delay to zero causes it to be reaped immediately, however
- Pkg.gc(;collect_delay=Second(0))
+ Pkg.gc(; collect_delay = Second(0))
@test isempty(readdir(pkgdir))
clonedir = joinpath(Pkg.depots1(), "clones")
- Pkg.add(Pkg.PackageSpec(name=TEST_PKG.name, rev="master"))
+ Pkg.add(Pkg.PackageSpec(name = TEST_PKG.name, rev = "master"))
@test !isempty(readdir(clonedir))
Pkg.rm(TEST_PKG.name)
- Pkg.gc(;collect_delay=Day(1000))
+ Pkg.gc(; collect_delay = Day(1000))
@test !isempty(readdir(clonedir))
- Pkg.gc(;collect_delay=Second(0))
+ Pkg.gc(; collect_delay = Second(0))
@test isempty(readdir(clonedir))
end
@testset "package with wrong UUID" begin
@test_throws PkgError Pkg.add(PackageSpec(TEST_PKG.name, UUID(UInt128(1))))
+ @testset "package with wrong UUID but correct name" begin
+ try
+ Pkg.add(PackageSpec(name = "Example", uuid = UUID(UInt128(2))))
+ catch e
+ @test e isa PkgError
+ errstr = sprint(showerror, e)
+ @test occursin("expected package `Example [00000000]` to be registered", errstr)
+ @test occursin("You may have provided the wrong UUID for package Example.", errstr)
+ @test occursin("Found the following UUIDs for that name:", errstr)
+ @test occursin("- 7876af07-990d-54b4-ab0e-23690620f79a from registry: General", errstr)
+ end
+ end
# Missing uuid
@test_throws PkgError Pkg.add(PackageSpec(uuid = uuid4()))
end
@@ -240,7 +252,7 @@ temp_pkg_dir() do project_path
recursive_rm_cov_files(pkgdir) # clean out cov files from previous test runs
@test !any(endswith(".cov"), readdir(pkgdir)) # should be no cov files to start with
- Pkg.test(TEST_PKG.name; coverage=true)
+ Pkg.test(TEST_PKG.name; coverage = true)
@test any(endswith(".cov"), readdir(pkgdir))
Pkg.rm(TEST_PKG.name)
end
@@ -258,7 +270,7 @@ temp_pkg_dir() do project_path
@testset "pinning / freeing" begin
Pkg.add(TEST_PKG.name)
old_v = Pkg.dependencies()[TEST_PKG.uuid].version
- Pkg.pin(Pkg.PackageSpec(;name=TEST_PKG.name, version=v"0.2"))
+ Pkg.pin(Pkg.PackageSpec(; name = TEST_PKG.name, version = v"0.2"))
@test Pkg.dependencies()[TEST_PKG.uuid].version.minor == 2
Pkg.update(TEST_PKG.name)
@test Pkg.dependencies()[TEST_PKG.uuid].version.minor == 2
@@ -269,20 +281,21 @@ temp_pkg_dir() do project_path
end
@testset "develop / freeing" begin
- Pkg.add(name=TEST_PKG.name, version=v"0.5.3")
+ Pkg.add(name = TEST_PKG.name, version = v"0.5.3")
old_v = Pkg.dependencies()[TEST_PKG.uuid].version
@test old_v == v"0.5.3"
Pkg.rm(TEST_PKG.name)
mktempdir() do devdir
withenv("JULIA_PKG_DEVDIR" => devdir) do
- @test_throws PkgError Pkg.develop(Pkg.PackageSpec(url="bleh", rev="blurg"))
+ @test_throws PkgError Pkg.develop(Pkg.PackageSpec(url = "bleh", rev = "blurg"))
Pkg.develop(TEST_PKG.name)
@test isinstalled(TEST_PKG)
@test Pkg.dependencies()[TEST_PKG.uuid].version > old_v
test_pkg_main_file = joinpath(devdir, TEST_PKG.name, "src", TEST_PKG.name * ".jl")
@test isfile(test_pkg_main_file)
# Pkg #152
- write(test_pkg_main_file,
+ write(
+ test_pkg_main_file,
"""
module Example
export hello, domath
@@ -293,9 +306,11 @@ temp_pkg_dir() do project_path
hello(who::String) = "Hello, \$who"
domath(x::Number) = x + 5
end
- """)
+ """
+ )
mkpath(joinpath(devdir, TEST_PKG.name, "deps"))
- write(joinpath(devdir, TEST_PKG.name, "deps", "build.jl"),
+ write(
+ joinpath(devdir, TEST_PKG.name, "deps", "build.jl"),
"""
touch("deps.jl")
"""
@@ -304,14 +319,16 @@ temp_pkg_dir() do project_path
proj_str = read(exa_proj, String)
compat_onwards = split(proj_str, "[compat]")[2]
open(exa_proj, "w") do io
- println(io, """
- name = "Example"
- uuid = "$(TEST_PKG.uuid)"
- version = "100.0.0"
-
- [compat]
- $compat_onwards
- """)
+ println(
+ io, """
+ name = "Example"
+ uuid = "$(TEST_PKG.uuid)"
+ version = "100.0.0"
+
+ [compat]
+ $compat_onwards
+ """
+ )
end
Pkg.resolve()
@test Pkg.dependencies()[TEST_PKG.uuid].version == v"100.0.0"
@@ -337,7 +354,7 @@ temp_pkg_dir() do project_path
@testset "package name in resolver errors" begin
try
- Pkg.add(PackageSpec(;name = TEST_PKG.name, version = v"55"))
+ Pkg.add(PackageSpec(; name = TEST_PKG.name, version = v"55"))
catch e
@test occursin(TEST_PKG.name, sprint(showerror, e))
end
@@ -401,12 +418,14 @@ temp_pkg_dir() do project_path
Sys.CPU_THREADS == 1 && error("Cannot test for atomic usage log file interaction effectively with only Sys.CPU_THREADS=1")
# Precompile Pkg given we're in a different depot
# and make sure the General registry is installed
- Utils.show_output_if_command_errors(`$(Base.julia_cmd()[1]) --project="$(pkgdir(Pkg))" -e "import Pkg; isempty(Pkg.Registry.reachable_registries()) && Pkg.Registry.add()"`)
+ Utils.show_output_if_command_errors(`$(Base.julia_cmd()) --project="$(pkgdir(Pkg))" -e "import Pkg; isempty(Pkg.Registry.reachable_registries()) && Pkg.Registry.add()"`)
flag_start_dir = tempdir() # once n=Sys.CPU_THREADS files are in here, the processes can proceed to the concurrent test
flag_end_file = tempname() # use creating this file as a way to stop the processes early if an error happens
for i in 1:Sys.CPU_THREADS
iob = IOBuffer()
- t = @async run(pipeline(`$(Base.julia_cmd()[1]) --project="$(pkgdir(Pkg))"
+ t = @async run(
+ pipeline(
+ `$(Base.julia_cmd()) --project="$(pkgdir(Pkg))"
-e "import Pkg;
Pkg.UPDATED_REGISTRY_THIS_SESSION[] = true;
Pkg.activate(temp = true);
@@ -432,7 +451,9 @@ temp_pkg_dir() do project_path
end
yield()
end"`,
- stderr = iob, stdout = devnull))
+ stderr = iob, stdout = devnull
+ )
+ )
push!(tasks, t)
push!(iobs, iob)
end
@@ -482,7 +503,7 @@ end
temp_pkg_dir() do project_path
@testset "libgit2 downloads" begin
- Pkg.add(TEST_PKG.name; use_git_for_all_downloads=true)
+ Pkg.add(TEST_PKG.name; use_git_for_all_downloads = true)
@test haskey(Pkg.dependencies(), TEST_PKG.uuid)
@eval import $(Symbol(TEST_PKG.name))
@test_throws SystemError open(pathof(eval(Symbol(TEST_PKG.name))), "w") do io end # check read-only
@@ -504,12 +525,12 @@ end
temp_pkg_dir() do project_path
@testset "libgit2 downloads" begin
- Pkg.add(TEST_PKG.name; use_git_for_all_downloads=true)
+ Pkg.add(TEST_PKG.name; use_git_for_all_downloads = true)
@test haskey(Pkg.dependencies(), TEST_PKG.uuid)
Pkg.rm(TEST_PKG.name)
end
@testset "tarball downloads" begin
- Pkg.add("JSON"; use_only_tarballs_for_downloads=true)
+ Pkg.add("JSON"; use_only_tarballs_for_downloads = true)
@test "JSON" in [pkg.name for (uuid, pkg) in Pkg.dependencies()]
Pkg.rm("JSON")
end
@@ -546,27 +567,29 @@ temp_pkg_dir() do project_path
cd(project_path) do
target_dir = mktempdir()
uuid = nothing
- mktempdir() do tmp; cd(tmp) do
- pkg_name = "FooBar"
- # create a project and grab its uuid
- Pkg.generate(pkg_name)
- uuid = extract_uuid(joinpath(pkg_name, "Project.toml"))
- # activate project env
- Pkg.activate(abspath(pkg_name))
- # add an example project to populate manifest file
- Pkg.add("Example")
- # change away from default names
- ## note: this is written awkwardly because a `mv` here causes failures on AppVeyor
- cp(joinpath(pkg_name, "src"), joinpath(target_dir, "src"))
- cp(joinpath(pkg_name, "Project.toml"), joinpath(target_dir, "JuliaProject.toml"))
- cp(joinpath(pkg_name, "Manifest.toml"), joinpath(target_dir, "JuliaManifest.toml"))
- end end
+ mktempdir() do tmp
+ cd(tmp) do
+ pkg_name = "FooBar"
+ # create a project and grab its uuid
+ Pkg.generate(pkg_name)
+ uuid = extract_uuid(joinpath(pkg_name, "Project.toml"))
+ # activate project env
+ Pkg.activate(abspath(pkg_name))
+ # add an example project to populate manifest file
+ Pkg.add("Example")
+ # change away from default names
+ ## note: this is written awkwardly because a `mv` here causes failures on AppVeyor
+ cp(joinpath(pkg_name, "src"), joinpath(target_dir, "src"))
+ cp(joinpath(pkg_name, "Project.toml"), joinpath(target_dir, "JuliaProject.toml"))
+ cp(joinpath(pkg_name, "Manifest.toml"), joinpath(target_dir, "JuliaManifest.toml"))
+ end
+ end
Pkg.activate()
# make sure things still work
Pkg.REPLMode.pkgstr("dev $target_dir")
- @test isinstalled((name="FooBar", uuid=UUID(uuid)))
+ @test isinstalled((name = "FooBar", uuid = UUID(uuid)))
Pkg.rm("FooBar")
- @test !isinstalled((name="FooBar", uuid=UUID(uuid)))
+ @test !isinstalled((name = "FooBar", uuid = UUID(uuid)))
end # cd project_path
end # @testset
end
@@ -581,53 +604,57 @@ temp_pkg_dir() do project_path
end
end
-temp_pkg_dir() do project_path; cd(project_path) do
- tmp = mktempdir()
- depo1 = mktempdir()
- depo2 = mktempdir()
- cd(tmp) do; @testset "instantiating updated repo" begin
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depo1)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- LibGit2.close(LibGit2.clone(TEST_PKG.url, "Example.jl"))
- mkdir("machine1")
- cd("machine1")
- Pkg.activate(".")
- Pkg.add(Pkg.PackageSpec(path="../Example.jl"))
- cd("..")
- cp("machine1", "machine2")
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depo2)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- cd("machine2")
- Pkg.activate(".")
- Pkg.instantiate()
- cd("..")
- cd("Example.jl")
- open("README.md", "a") do io
- print(io, "Hello")
- end
- LibGit2.with(LibGit2.GitRepo(".")) do repo
- LibGit2.add!(repo, "*")
- LibGit2.commit(repo, "changes"; author=TEST_SIG, committer=TEST_SIG)
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ tmp = mktempdir()
+ depo1 = mktempdir()
+ depo2 = mktempdir()
+ cd(tmp) do;
+ @testset "instantiating updated repo" begin
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depo1)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ LibGit2.close(LibGit2.clone(TEST_PKG.url, "Example.jl"))
+ mkdir("machine1")
+ cd("machine1")
+ Pkg.activate(".")
+ Pkg.add(Pkg.PackageSpec(path = "../Example.jl"))
+ cd("..")
+ cp("machine1", "machine2")
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depo2)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ cd("machine2")
+ Pkg.activate(".")
+ Pkg.instantiate()
+ cd("..")
+ cd("Example.jl")
+ open("README.md", "a") do io
+ print(io, "Hello")
+ end
+ LibGit2.with(LibGit2.GitRepo(".")) do repo
+ LibGit2.add!(repo, "*")
+ LibGit2.commit(repo, "changes"; author = TEST_SIG, committer = TEST_SIG)
+ end
+ cd("../machine1")
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depo1)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ Pkg.activate(".")
+ Pkg.update()
+ cd("..")
+ cp("machine1/Manifest.toml", "machine2/Manifest.toml"; force = true)
+ cd("machine2")
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depo2)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ Pkg.activate(".")
+ Pkg.instantiate()
+ end
end
- cd("../machine1")
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depo1)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.activate(".")
- Pkg.update()
- cd("..")
- cp("machine1/Manifest.toml", "machine2/Manifest.toml"; force=true)
- cd("machine2")
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depo2)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- Pkg.activate(".")
- Pkg.instantiate()
- end end
- Base.rm.([tmp, depo1, depo2]; force = true, recursive = true)
-end end
+ Base.rm.([tmp, depo1, depo2]; force = true, recursive = true)
+ end
+end
@testset "printing of stdlib paths, issue #605" begin
path = Pkg.Types.stdlib_path("Test")
@@ -635,13 +662,13 @@ end end
end
@testset "stdlib_resolve!" begin
- a = Pkg.Types.PackageSpec(name="Markdown")
- b = Pkg.Types.PackageSpec(uuid=UUID("9abbd945-dff8-562f-b5e8-e1ebf5ef1b79"))
+ a = Pkg.Types.PackageSpec(name = "Markdown")
+ b = Pkg.Types.PackageSpec(uuid = UUID("9abbd945-dff8-562f-b5e8-e1ebf5ef1b79"))
Pkg.Types.stdlib_resolve!([a, b])
@test a.uuid == UUID("d6f4376e-aef5-505a-96c1-9c027394607a")
@test b.name == "Profile"
- x = Pkg.Types.PackageSpec(name="Markdown", uuid=UUID("d6f4376e-aef5-505a-96c1-9c027394607a"))
+ x = Pkg.Types.PackageSpec(name = "Markdown", uuid = UUID("d6f4376e-aef5-505a-96c1-9c027394607a"))
Pkg.Types.stdlib_resolve!([x])
@test x.name == "Markdown"
@test x.uuid == UUID("d6f4376e-aef5-505a-96c1-9c027394607a")
@@ -650,10 +677,10 @@ end
@testset "issue #913" begin
temp_pkg_dir() do project_path
Pkg.activate(project_path)
- Pkg.add(Pkg.PackageSpec(name="Example", rev = "master"))
+ Pkg.add(Pkg.PackageSpec(name = "Example", rev = "master"))
@test isinstalled(TEST_PKG)
- rm.(joinpath.(project_path, ["Project.toml","Manifest.toml"]))
- Pkg.add(Pkg.PackageSpec(name="Example", rev = "master")) # should not fail
+ rm.(joinpath.(project_path, ["Project.toml", "Manifest.toml"]))
+ Pkg.add(Pkg.PackageSpec(name = "Example", rev = "master")) # should not fail
@test isinstalled(TEST_PKG)
end
end
@@ -674,33 +701,39 @@ end
#issue #876
@testset "targets should survive add/rm" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir
- cp(joinpath(@__DIR__, "project", "good", "pkg.toml"), "Project.toml")
- mkdir("src")
- touch("src/Pkg.jl")
- targets = deepcopy(Pkg.Types.read_project("Project.toml").targets)
- Pkg.activate(".")
- Pkg.add("Example")
- Pkg.rm("Example")
- @test targets == Pkg.Types.read_project("Project.toml").targets
- end end
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ cp(joinpath(@__DIR__, "project", "good", "pkg.toml"), "Project.toml")
+ mkdir("src")
+ touch("src/Pkg.jl")
+ targets = deepcopy(Pkg.Types.read_project("Project.toml").targets)
+ Pkg.activate(".")
+ Pkg.add("Example")
+ Pkg.rm("Example")
+ @test targets == Pkg.Types.read_project("Project.toml").targets
+ end
+ end
end
@testset "canonicalized relative paths in manifest" begin
- mktempdir() do tmp; cd(tmp) do
- write("Manifest.toml",
- """
- [[Foo]]
- path = "bar/Foo"
- uuid = "824dc81a-29a7-11e9-3958-fba342a32644"
- version = "0.1.0"
- """)
- manifest = Pkg.Types.read_manifest("Manifest.toml")
- package = manifest[Base.UUID("824dc81a-29a7-11e9-3958-fba342a32644")]
- @test package.path == (Sys.iswindows() ? "bar\\Foo" : "bar/Foo")
- Pkg.Types.write_manifest(manifest, "Manifest.toml")
- @test occursin("path = \"bar/Foo\"", read("Manifest.toml", String))
- end end
+ mktempdir() do tmp
+ cd(tmp) do
+ write(
+ "Manifest.toml",
+ """
+ [[Foo]]
+ path = "bar/Foo"
+ uuid = "824dc81a-29a7-11e9-3958-fba342a32644"
+ version = "0.1.0"
+ """
+ )
+ manifest = Pkg.Types.read_manifest("Manifest.toml")
+ package = manifest[Base.UUID("824dc81a-29a7-11e9-3958-fba342a32644")]
+ @test package.path == (Sys.iswindows() ? "bar\\Foo" : "bar/Foo")
+ Pkg.Types.write_manifest(manifest, "Manifest.toml")
+ @test occursin("path = \"bar/Foo\"", read("Manifest.toml", String))
+ end
+ end
end
@testset "building project should fix version of deps" begin
@@ -718,37 +751,76 @@ end
@test sprint(showerror, err) == "foobar"
end
-@testset "issue #1066: package with colliding name/uuid exists in project" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir
- Pkg.activate(".")
- Pkg.generate("A")
- cd(mkdir("packages")) do
+@testset "issue #2191: better diagnostic for missing package" begin
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ Pkg.activate(".")
+
+ # Create a package A that depends on package B
Pkg.generate("A")
+ Pkg.generate("B")
git_init_and_commit("A")
+ git_init_and_commit("B")
+
+ # Add B as a dependency of A
+ cd("A") do
+ Pkg.develop(PackageSpec(path = "../B"))
+ end
+
+ # Now remove the B directory to simulate the missing package scenario
+ rm("B", recursive = true)
+
+ # Try to perform an operation that would trigger the missing package error
+ cd("A") do
+ try
+ Pkg.resolve()
+ @test false # a PkgError should be thrown"
+ catch e
+ @test e isa PkgError
+ error_msg = sprint(showerror, e)
+ # Check that the improved error message contains helpful information
+ @test occursin("This package is referenced in the manifest file:", error_msg)
+ end
+ end
end
- Pkg.generate("B")
- project = Pkg.Types.read_project("A/Project.toml")
- project.name = "B"
- Pkg.Types.write_project(project, "B/Project.toml")
- git_init_and_commit("B")
- Pkg.develop(Pkg.PackageSpec(path = abspath("A")))
- # package with same name but different uuid exist in project
- @test_throws PkgError Pkg.develop(Pkg.PackageSpec(path = abspath("packages", "A")))
- @test_throws PkgError Pkg.add(Pkg.PackageSpec(path = abspath("packages", "A")))
- # package with same uuid but different name exist in project
- @test_throws PkgError Pkg.develop(Pkg.PackageSpec(path = abspath("B")))
- @test_throws PkgError Pkg.add(Pkg.PackageSpec(path = abspath("B")))
- end end
+ end
+end
+
+@testset "issue #1066: package with colliding name/uuid exists in project" begin
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ Pkg.activate(".")
+ Pkg.generate("A")
+ cd(mkdir("packages")) do
+ Pkg.generate("A")
+ git_init_and_commit("A")
+ end
+ Pkg.generate("B")
+ project = Pkg.Types.read_project("A/Project.toml")
+ project.name = "B"
+ Pkg.Types.write_project(project, "B/Project.toml")
+ git_init_and_commit("B")
+ Pkg.develop(Pkg.PackageSpec(path = abspath("A")))
+ # package with same name but different uuid exist in project
+ @test_throws PkgError Pkg.develop(Pkg.PackageSpec(path = abspath("packages", "A")))
+ @test_throws PkgError Pkg.add(Pkg.PackageSpec(path = abspath("packages", "A")))
+ # package with same uuid but different name exist in project
+ @test_throws PkgError Pkg.develop(Pkg.PackageSpec(path = abspath("B")))
+ @test_throws PkgError Pkg.add(Pkg.PackageSpec(path = abspath("B")))
+ end
+ end
end
@testset "issue #1180: broken toml-files in HEAD" begin
- temp_pkg_dir() do dir; cd(dir) do
- write("Project.toml", "[deps]\nExample = \n")
- git_init_and_commit(dir)
- write("Project.toml", "[deps]\nExample = \"7876af07-990d-54b4-ab0e-23690620f79a\"\n")
- Pkg.activate(dir)
- @test_logs (:warn, r"could not read project from HEAD") Pkg.status(diff=true)
- end end
+ temp_pkg_dir() do dir
+ cd(dir) do
+ write("Project.toml", "[deps]\nExample = \n")
+ git_init_and_commit(dir)
+ write("Project.toml", "[deps]\nExample = \"7876af07-990d-54b4-ab0e-23690620f79a\"\n")
+ Pkg.activate(dir)
+ @test_logs (:warn, r"could not read project from HEAD") Pkg.status(diff = true)
+ end
+ end
end
import Markdown
@@ -763,96 +835,102 @@ end
@testset "up should prune manifest" begin
example_uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")
unicode_uuid = UUID("4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5")
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Unpruned")
- Pkg.activate(joinpath(tmp, "Unpruned"))
- Pkg.update()
- manifest = Pkg.Types.Context().env.manifest
- package_example = get(manifest, example_uuid, nothing)
- @test package_example !== nothing
- @test package_example.version > v"0.4.0"
- @test get(manifest, unicode_uuid, nothing) === nothing
- end end
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Unpruned")
+ Pkg.activate(joinpath(tmp, "Unpruned"))
+ Pkg.update()
+ manifest = Pkg.Types.Context().env.manifest
+ package_example = get(manifest, example_uuid, nothing)
+ @test package_example !== nothing
+ @test package_example.version > v"0.4.0"
+ @test get(manifest, unicode_uuid, nothing) === nothing
+ end
+ end
end
@testset "undo redo functionality" begin
unicode_uuid = UUID("4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5")
- temp_pkg_dir() do project_path; with_temp_env() do
- Pkg.activate(project_path)
- # Example
- Pkg.add(TEST_PKG.name)
- @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
- #
- Pkg.undo()
- @test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
- # Example
- Pkg.redo()
- # Example, Unicode
- Pkg.add("Unicode")
- @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
- # Example
- Pkg.undo()
- @test !haskey(Pkg.dependencies(), unicode_uuid)
- #
- Pkg.undo()
- @test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
- # Example, Unicode
- Pkg.redo()
- Pkg.redo()
- @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
- @test haskey(Pkg.dependencies(), unicode_uuid)
- # Should not add states since they are nops
- Pkg.add("Unicode")
- Pkg.add("Unicode")
- # Example
- Pkg.undo()
- @test !haskey(Pkg.dependencies(), unicode_uuid)
- # Example, Unicode
- Pkg.redo()
- @test haskey(Pkg.dependencies(), unicode_uuid)
-
- # Example
- Pkg.undo()
-
- prev_project = Base.active_project()
- mktempdir() do tmp
- Pkg.activate(tmp)
- Pkg.add("Example")
+ temp_pkg_dir() do project_path
+ with_temp_env() do
+ Pkg.activate(project_path)
+ # Example
+ Pkg.add(TEST_PKG.name)
+ @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ #
Pkg.undo()
@test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
- end
- Pkg.activate(prev_project)
+ # Example
+ Pkg.redo()
+ # Example, Unicode
+ Pkg.add("Unicode")
+ @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ # Example
+ Pkg.undo()
+ @test !haskey(Pkg.dependencies(), unicode_uuid)
+ #
+ Pkg.undo()
+ @test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ # Example, Unicode
+ Pkg.redo()
+ Pkg.redo()
+ @test haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ @test haskey(Pkg.dependencies(), unicode_uuid)
+ # Should not add states since they are nops
+ Pkg.add("Unicode")
+ Pkg.add("Unicode")
+ # Example
+ Pkg.undo()
+ @test !haskey(Pkg.dependencies(), unicode_uuid)
+ # Example, Unicode
+ Pkg.redo()
+ @test haskey(Pkg.dependencies(), unicode_uuid)
+
+ # Example
+ Pkg.undo()
- # Check that undo state persists after swapping projects
- # Example, Unicode
- Pkg.redo()
- @test haskey(Pkg.dependencies(), unicode_uuid)
+ prev_project = Base.active_project()
+ mktempdir() do tmp
+ Pkg.activate(tmp)
+ Pkg.add("Example")
+ Pkg.undo()
+ @test !haskey(Pkg.dependencies(), TEST_PKG.uuid)
+ end
+ Pkg.activate(prev_project)
+
+ # Check that undo state persists after swapping projects
+ # Example, Unicode
+ Pkg.redo()
+ @test haskey(Pkg.dependencies(), unicode_uuid)
- end end
+ end
+ end
end
@testset "subdir functionality" begin
- temp_pkg_dir() do project_path; with_temp_env() do
- mktempdir() do tmp
- repodir = git_init_package(tmp, joinpath(@__DIR__, "test_packages", "MainRepo"))
- # Add with subdir
- subdir_uuid = UUID("6fe4e069-dcb0-448a-be67-3a8bf3404c58")
- Pkg.add(url = repodir, subdir = "SubDir")
- pkgdir = abspath(joinpath(dirname(Base.find_package("SubDir")), ".."))
-
- # Update with subdir in manifest
- Pkg.update()
- # Test instantiate with subdir
- rm(pkgdir; recursive=true)
- Pkg.instantiate()
- @test isinstalled("SubDir")
- Pkg.rm("SubDir")
-
- # Dev of local path with subdir
- Pkg.develop(path=repodir, subdir="SubDir")
- @test Pkg.dependencies()[subdir_uuid].source == joinpath(repodir, "SubDir")
+ temp_pkg_dir() do project_path
+ with_temp_env() do
+ mktempdir() do tmp
+ repodir = git_init_package(tmp, joinpath(@__DIR__, "test_packages", "MainRepo"))
+ # Add with subdir
+ subdir_uuid = UUID("6fe4e069-dcb0-448a-be67-3a8bf3404c58")
+ Pkg.add(url = repodir, subdir = "SubDir")
+ pkgdir = abspath(joinpath(dirname(Base.find_package("SubDir")), ".."))
+
+ # Update with subdir in manifest
+ Pkg.update()
+ # Test instantiate with subdir
+ rm(pkgdir; recursive = true)
+ Pkg.instantiate()
+ @test isinstalled("SubDir")
+ Pkg.rm("SubDir")
+
+ # Dev of local path with subdir
+ Pkg.develop(path = repodir, subdir = "SubDir")
+ @test Pkg.dependencies()[subdir_uuid].source == joinpath(repodir, "SubDir")
+ end
end
- end end
+ end
end
# PR #1784 - Remove trailing slash from URL.
@@ -943,9 +1021,10 @@ end
import Pkg.Resolve.range_compressed_versionspec
@testset "range_compressed_versionspec" begin
pool = [v"1.0.0", v"1.1.0", v"1.2.0", v"1.2.1", v"2.0.0", v"2.0.1", v"3.0.0", v"3.1.0"]
- @test (range_compressed_versionspec(pool)
- == range_compressed_versionspec(pool, pool)
- == VersionSpec("1.0.0-3.1.0")
+ @test (
+ range_compressed_versionspec(pool)
+ == range_compressed_versionspec(pool, pool)
+ == VersionSpec("1.0.0-3.1.0")
)
@test isequal(
@@ -978,7 +1057,7 @@ end
end
@testset "Issue #3069" begin
- p = PackageSpec(; path="test_packages/Example")
+ p = PackageSpec(; path = "test_packages/Example")
@test_throws Pkg.Types.PkgError("Package PackageSpec(\n path = test_packages/Example\n version = *\n) has neither name nor uuid") ensure_resolved(Pkg.Types.Context(), Pkg.Types.Manifest(), [p])
end
@@ -989,7 +1068,7 @@ end
Pkg.activate(temp = true)
mktempdir() do tmp_dir
LibGit2.close(LibGit2.clone(TEST_PKG.url, tmp_dir))
- Pkg.develop(path=tmp_dir)
+ Pkg.develop(path = tmp_dir)
Pkg.pin("Example")
Pkg.add("Example")
info = Pkg.dependencies()[TEST_PKG.uuid]
@@ -1000,7 +1079,7 @@ end
end
Pkg.rm("Example")
- Pkg.add(url=TEST_PKG.url, rev="29aa1b4")
+ Pkg.add(url = TEST_PKG.url, rev = "29aa1b4")
Pkg.pin("Example")
Pkg.add("Example")
info = Pkg.dependencies()[TEST_PKG.uuid]
@@ -1020,7 +1099,7 @@ end
hash = string(LibGit2.target(tag))
LibGit2.checkout!(repo, hash)
LibGit2.close(repo)
- Pkg.develop(path=tmp_dir)
+ Pkg.develop(path = tmp_dir)
Pkg.pin("Example")
Pkg.update("Example") # pkg should remain pinned
info = Pkg.dependencies()[TEST_PKG.uuid]
@@ -1048,11 +1127,47 @@ end
@test info.is_tracking_path
@test !info.is_tracking_repo
@test info.version == dev_ver
- end
- Pkg.rm("Example")
+ end
+ Pkg.rm("Example")
end
Pkg.activate(prev_project)
end
+@testset "check_registered error paths" begin
+ # Test the "no registries have been installed" error path
+ isolate(loaded_depot = false, linked_reg = false) do
+ with_temp_env() do
+ # Ensure we have no registries available
+ @test isempty(Pkg.Registry.reachable_registries())
+
+ # Should install General registry automatically
+ Pkg.add("Example")
+
+ Pkg.Registry.rm("General")
+ @test isempty(Pkg.Registry.reachable_registries())
+
+ @test_throws r"no registries have been installed\. Cannot resolve the following packages:" begin
+ Pkg.resolve()
+ end
+ end
+ end
+
+ # Test the "expected package to be registered" error path with a custom unregistered package
+ isolate(loaded_depot = true) do;
+ mktempdir() do tempdir
+ with_temp_env() do
+ # Create a fake package with a manifest that references an unregistered UUID
+ fake_pkg_path = copy_test_package(tempdir, "UnregisteredUUID")
+ Pkg.activate(fake_pkg_path)
+
+ # This should fail with "expected package to be registered" error
+ @test_throws r"expected package.*to be registered" begin
+ Pkg.add("JSON") # This will fail because Example UUID in manifest is unregistered
+ end
+ end
+ end
+ end
+end
+
end # module
diff --git a/test/platformengines.jl b/test/platformengines.jl
index d2e57048f7..bfacc6f51d 100644
--- a/test/platformengines.jl
+++ b/test/platformengines.jl
@@ -31,7 +31,7 @@ using ..Utils: list_tarball_files
# Next, package it up as a .tar.gz file
mktempdir() do output_dir
- tarball_path = joinpath(output_dir, "foo.tar.gz")
+ tarball_path = joinpath(output_dir, "foo.tar.gz")
package(prefix, tarball_path)
@test isfile(tarball_path)
@@ -55,8 +55,8 @@ end
foo_hash = bytes2hex(sha256("test"))
# Check that verifying with the right hash works
- @test_logs (:info, r"No hash cache found") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ @test_logs (:info, r"No hash cache found") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == true
@test status == :hash_cache_missing
end
@@ -65,8 +65,8 @@ end
@test isfile("$(foo_path).sha256")
# Check that it verifies the second time around properly
- @test_logs (:info, r"Hash cache is consistent") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ @test_logs (:info, r"Hash cache is consistent") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == true
@test status == :hash_cache_consistent
end
@@ -76,29 +76,29 @@ end
# Get coverage of messing with different parts of the verification chain
touch(foo_path)
- @test_logs (:info, r"File has been modified") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ @test_logs (:info, r"File has been modified") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == true
@test status == :file_modified
end
# Ensure that we print an error when verification fails
- rm("$(foo_path).sha256"; force=true)
- @test_logs (:error, r"Hash Mismatch!") match_mode=:any begin
- @test !verify(foo_path, "0"^64; verbose=true)
+ rm("$(foo_path).sha256"; force = true)
+ @test_logs (:error, r"Hash Mismatch!") match_mode = :any begin
+ @test !verify(foo_path, "0"^64; verbose = true)
end
# Ensure that incorrect lengths cause an exception
- @test_throws ErrorException verify(foo_path, "0"^65; verbose=true)
+ @test_throws ErrorException verify(foo_path, "0"^65; verbose = true)
# Ensure that messing with the hash file works properly
touch(foo_path)
- @test verify(foo_path, foo_hash; verbose=true)
+ @test verify(foo_path, foo_hash; verbose = true)
open("$(foo_path).sha256", "w") do file
write(file, "this is not the right hash")
end
- @test_logs (:info, r"hash cache invalidated") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ @test_logs (:info, r"hash cache invalidated") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == true
@test status == :hash_cache_mismatch
end
@@ -109,9 +109,9 @@ end
end
# Delete hash cache file to force re-verification
- rm("$(foo_path).sha256"; force=true)
- @test_logs (:error, r"Hash Mismatch!") match_mode=:any begin
- ret, status = verify(foo_path, foo_hash; verbose=true, report_cache_status=true)
+ rm("$(foo_path).sha256"; force = true)
+ @test_logs (:error, r"Hash Mismatch!") match_mode = :any begin
+ ret, status = verify(foo_path, foo_hash; verbose = true, report_cache_status = true)
@test ret == false
@test status == :hash_mismatch
end
@@ -121,11 +121,11 @@ end
const socrates_urls = [
"https://github.com/staticfloat/small_bin/raw/f1a92f5eafbd30a0c6a8efb6947485b0f6d1bec3/socrates.tar.gz" =>
- "e65d2f13f2085f2c279830e863292312a72930fee5ba3c792b14c33ce5c5cc58",
+ "e65d2f13f2085f2c279830e863292312a72930fee5ba3c792b14c33ce5c5cc58",
"https://github.com/staticfloat/small_bin/raw/f1a92f5eafbd30a0c6a8efb6947485b0f6d1bec3/socrates.tar.bz2" =>
- "13fc17b97be41763b02cbb80e9d048302cec3bd3d446c2ed6e8210bddcd3ac76",
+ "13fc17b97be41763b02cbb80e9d048302cec3bd3d446c2ed6e8210bddcd3ac76",
"https://github.com/staticfloat/small_bin/raw/f1a92f5eafbd30a0c6a8efb6947485b0f6d1bec3/socrates.tar.xz" =>
- "61bcf109fcb749ee7b6a570a6057602c08c836b6f81091eab7aa5f5870ec6475",
+ "61bcf109fcb749ee7b6a570a6057602c08c836b6f81091eab7aa5f5870ec6475",
]
const socrates_hash = "adcbcf15674eafe8905093183d9ab997cbfba9056fc7dde8bfa5a22dfcfb4967"
@@ -135,16 +135,16 @@ const socrates_hash = "adcbcf15674eafe8905093183d9ab997cbfba9056fc7dde8bfa5a22df
tarball_path = joinpath(prefix, "download_target.tar$(splitext(url)[2])")
target_dir = joinpath(prefix, "target")
- download_verify_unpack(url, hash, target_dir; tarball_path=tarball_path, verbose=true)
+ download_verify_unpack(url, hash, target_dir; tarball_path = tarball_path, verbose = true)
# Test downloading a second time, to get the "already exists" path
- download_verify_unpack(url, hash, target_dir; tarball_path=tarball_path, verbose=true)
+ download_verify_unpack(url, hash, target_dir; tarball_path = tarball_path, verbose = true)
# And a third time, after corrupting it, to get the "redownloading" path
open(tarball_path, "w") do io
println(io, "corruptify")
end
- download_verify_unpack(url, hash, target_dir; tarball_path=tarball_path, verbose=true, force=true)
+ download_verify_unpack(url, hash, target_dir; tarball_path = tarball_path, verbose = true, force = true)
# Test that it has the contents we expect
socrates_path = joinpath(target_dir, "bin", "socrates")
@@ -163,7 +163,7 @@ const collapse_hash = "956c1201405f64d3465cc28cb0dec9d63c11a08cad28c381e13bb22e1
withenv("BINARYPROVIDER_COPYDEREF" => "true") do
mktempdir() do prefix
target_dir = joinpath(prefix, "target")
- download_verify_unpack(collapse_url, collapse_hash, target_dir; verbose=true)
+ download_verify_unpack(collapse_url, collapse_hash, target_dir; verbose = true)
# Test that we get the files we expect
@test isfile(joinpath(target_dir, "collapse_the_symlink", "foo"))
@@ -182,7 +182,7 @@ end
@testset "Download GitHub API #88" begin
mktempdir() do tmp
- PlatformEngines.download("https://api.github.com/repos/JuliaPackaging/BinaryProvider.jl/tarball/c2a4fc38f29eb81d66e3322e585d0199722e5d71", joinpath(tmp, "BinaryProvider"); verbose=true)
+ PlatformEngines.download("https://api.github.com/repos/JuliaPackaging/BinaryProvider.jl/tarball/c2a4fc38f29eb81d66e3322e585d0199722e5d71", joinpath(tmp, "BinaryProvider"); verbose = true)
@test isfile(joinpath(tmp, "BinaryProvider"))
end
end
@@ -241,10 +241,12 @@ end
end
called = 0
- dispose = PlatformEngines.register_auth_error_handler("https://foo.bar/baz", function (url, svr, err)
- called += 1
- return true, called < 3
- end)
+ dispose = PlatformEngines.register_auth_error_handler(
+ "https://foo.bar/baz", function (url, svr, err)
+ called += 1
+ return true, called < 3
+ end
+ )
@test PlatformEngines.get_auth_header("https://foo.bar/baz") == nothing
@test called == 0
diff --git a/test/project/bad/targets_not_a_table.toml b/test/project/bad/targets_not_a_table.toml
index 9af8b2b342..cbca90e7f4 100644
--- a/test/project/bad/targets_not_a_table.toml
+++ b/test/project/bad/targets_not_a_table.toml
@@ -17,4 +17,3 @@ UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
-
diff --git a/test/project_manifest.jl b/test/project_manifest.jl
index ab6a6cc99f..4f8e379f9c 100644
--- a/test/project_manifest.jl
+++ b/test/project_manifest.jl
@@ -7,11 +7,10 @@ using ..Utils
temp_pkg_dir() do project_path
@testset "test Project.toml manifest" begin
mktempdir() do dir
- path = abspath(joinpath(dirname(pathof(Pkg)), "../test", "test_packages", "monorepo"))
- cp(path, joinpath(dir, "monorepo"))
- cd(joinpath(dir, "monorepo")) do
+ path = copy_test_package(dir, "monorepo")
+ cd(path) do
with_current_env() do
- Pkg.develop(path="packages/B")
+ Pkg.develop(path = "packages/B")
end
end
# test subpackage instantiates/tests
@@ -25,7 +24,7 @@ temp_pkg_dir() do project_path
# to make those Manifest changes "stick" before adding Test.
cd(joinpath(dir, "monorepo", "packages", "C")) do
with_current_env() do
- Pkg.develop(path="../D") # add unregistered local dependency
+ Pkg.develop(path = "../D") # add unregistered local dependency
Pkg.test()
end
end
@@ -37,7 +36,7 @@ temp_pkg_dir() do project_path
@test haskey(pkgC.deps, "D")
cd(joinpath(dir, "monorepo")) do
with_current_env() do
- Pkg.develop(path="packages/C")
+ Pkg.develop(path = "packages/C")
Pkg.add("Test")
Pkg.test()
end
@@ -60,4 +59,4 @@ temp_pkg_dir() do project_path
end
end
-end # module
\ No newline at end of file
+end # module
diff --git a/test/registry.jl b/test/registry.jl
index 20d70ea038..ad30823bd9 100644
--- a/test/registry.jl
+++ b/test/registry.jl
@@ -5,6 +5,7 @@ using Pkg, UUIDs, LibGit2, Test
using Pkg: depots1
using Pkg.REPLMode: pkgstr
using Pkg.Types: PkgError, manifest_info, PackageSpec, EnvCache
+using Dates: Second
using ..Utils
@@ -16,39 +17,50 @@ function setup_test_registries(dir = pwd())
for i in 1:2
regpath = joinpath(dir, "RegistryFoo$(i)")
mkpath(joinpath(regpath, "Example"))
- write(joinpath(regpath, "Registry.toml"), """
+ write(
+ joinpath(regpath, "Registry.toml"), """
name = "RegistryFoo"
uuid = "$(reg_uuids[i])"
repo = "https://github.com"
[packages]
$(pkg_uuids[i]) = { name = "Example$(i)", path = "Example" }
- """)
- write(joinpath(regpath, "Example", "Package.toml"), """
+ """
+ )
+ write(
+ joinpath(regpath, "Example", "Package.toml"), """
name = "Example$(i)"
uuid = "$(pkg_uuids[i])"
repo = "https://github.com/JuliaLang/Example.jl.git"
- """)
- write(joinpath(regpath, "Example", "Versions.toml"), """
+ """
+ )
+ write(
+ joinpath(regpath, "Example", "Versions.toml"), """
["0.5.1"]
git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8"
- """)
- write(joinpath(regpath, "Example", "Deps.toml"), """
+ """
+ )
+ write(
+ joinpath(regpath, "Example", "Deps.toml"), """
["0.5"]
julia = "0.6-1.0"
- """)
- write(joinpath(regpath, "Example", "Compat.toml"), """
+ """
+ )
+ write(
+ joinpath(regpath, "Example", "Compat.toml"), """
["0.5"]
julia = "0.6-1.0"
- """)
+ """
+ )
git_init_and_commit(regpath)
end
+ return
end
function test_installed(registries)
- @test setdiff(
+ return @test setdiff(
UUID[r.uuid for r in registries],
UUID[r.uuid for r in Pkg.Registry.reachable_registries()]
- ) == UUID[]
+ ) == UUID[]
end
function is_pkg_available(pkg::PackageSpec)
@@ -62,231 +74,274 @@ end
function with_depot2(f)
Base.DEPOT_PATH[1:2] .= Base.DEPOT_PATH[2:-1:1]
f()
- Base.DEPOT_PATH[1:2] .= Base.DEPOT_PATH[2:-1:1]
+ return Base.DEPOT_PATH[1:2] .= Base.DEPOT_PATH[2:-1:1]
end
@testset "registries" begin
- temp_pkg_dir() do depot; mktempdir() do depot2
- insert!(Base.DEPOT_PATH, 2, depot2)
- # set up registries
- regdir = mktempdir()
- setup_test_registries(regdir)
- general_url = Pkg.Registry.DEFAULT_REGISTRIES[1].url
- general_path = Pkg.Registry.DEFAULT_REGISTRIES[1].path
- general_linked = Pkg.Registry.DEFAULT_REGISTRIES[1].linked
- General = RegistrySpec(name = "General", uuid = "23338594-aafe-5451-b93e-139f81909106",
- url = general_url, path = general_path, linked = general_linked)
- Foo1 = RegistrySpec(name = "RegistryFoo", uuid = "e9fceed0-5623-4384-aff0-6db4c442647a",
- url = joinpath(regdir, "RegistryFoo1"))
- Foo2 = RegistrySpec(name = "RegistryFoo", uuid = "a8e078ad-b4bd-4e09-a52f-c464826eef9d",
- url = joinpath(regdir, "RegistryFoo2"))
-
- # Packages in registries
- Example = PackageSpec(name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
- Example1 = PackageSpec(name = "Example1", uuid = UUID("c5f1542f-b8aa-45da-ab42-05303d706c66"))
- Example2 = PackageSpec(name = "Example2", uuid = UUID("d7897d3a-8e65-4b65-bdc8-28ce4e859565"))
-
- # Add General registry
- ## Pkg REPL
- for reg in ("General",
+ temp_pkg_dir() do depot
+ mktempdir() do depot2
+ insert!(Base.DEPOT_PATH, 2, depot2)
+ # set up registries
+ regdir = mktempdir()
+ setup_test_registries(regdir)
+ general_url = Pkg.Registry.DEFAULT_REGISTRIES[1].url
+ general_path = Pkg.Registry.DEFAULT_REGISTRIES[1].path
+ general_linked = Pkg.Registry.DEFAULT_REGISTRIES[1].linked
+ General = RegistrySpec(
+ name = "General", uuid = "23338594-aafe-5451-b93e-139f81909106",
+ url = general_url, path = general_path, linked = general_linked
+ )
+ Foo1 = RegistrySpec(
+ name = "RegistryFoo", uuid = "e9fceed0-5623-4384-aff0-6db4c442647a",
+ url = joinpath(regdir, "RegistryFoo1")
+ )
+ Foo2 = RegistrySpec(
+ name = "RegistryFoo", uuid = "a8e078ad-b4bd-4e09-a52f-c464826eef9d",
+ url = joinpath(regdir, "RegistryFoo2")
+ )
+
+ # Packages in registries
+ Example = PackageSpec(name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
+ Example1 = PackageSpec(name = "Example1", uuid = UUID("c5f1542f-b8aa-45da-ab42-05303d706c66"))
+ Example2 = PackageSpec(name = "Example2", uuid = UUID("d7897d3a-8e65-4b65-bdc8-28ce4e859565"))
+
+ # Add General registry
+ ## Pkg REPL
+ for reg in (
+ "General",
"23338594-aafe-5451-b93e-139f81909106",
- "General=23338594-aafe-5451-b93e-139f81909106")
- pkgstr("registry add $(reg)")
- test_installed([General])
+ "General=23338594-aafe-5451-b93e-139f81909106",
+ )
+ pkgstr("registry add $(reg)")
+ test_installed([General])
+
+ pkgstr("registry up $(reg)")
+ test_installed([General])
+ pkgstr("registry rm $(reg)")
+ test_installed([])
+ end
- pkgstr("registry up $(reg)")
+ ## Pkg REPL without argument
+ pkgstr("registry add")
test_installed([General])
- pkgstr("registry rm $(reg)")
+ pkgstr("registry rm General")
test_installed([])
- end
-
- ## Pkg REPL without argument
- pkgstr("registry add")
- test_installed([General])
- pkgstr("registry rm General")
- test_installed([])
- ## Registry API
- for reg in ("General",
+ ## Registry API
+ for reg in (
+ "General",
RegistrySpec("General"),
RegistrySpec(name = "General"),
RegistrySpec(name = "General", path = general_path),
RegistrySpec(uuid = "23338594-aafe-5451-b93e-139f81909106"),
- RegistrySpec(name = "General", uuid = "23338594-aafe-5451-b93e-139f81909106"))
- Pkg.Registry.add(reg)
- test_installed([General])
+ RegistrySpec(name = "General", uuid = "23338594-aafe-5451-b93e-139f81909106"),
+ )
+ Pkg.Registry.add(reg)
+ test_installed([General])
+ @test is_pkg_available(Example)
+ Pkg.Registry.update(reg)
+ test_installed([General])
+ Pkg.Registry.rm(reg)
+ test_installed([])
+ @test !is_pkg_available(Example)
+ end
+
+ # Add registry from URL/local path.
+ pkgstr("registry add $(Foo1.url)")
+ test_installed([Foo1])
+ @test is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
+ with_depot2(() -> pkgstr("registry add $(Foo2.url)"))
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+
+ # reset installed registries
+ rm.(joinpath.(Base.DEPOT_PATH[1:2], "registries"); force = true, recursive = true)
+
+ Registry.add(url = Foo1.url)
+ test_installed([Foo1])
+ @test is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
+ with_depot2(() -> Registry.add(url = Foo2.url))
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+
+
+ pkgstr("registry up $(Foo1.uuid)")
+ pkgstr("registry update $(Foo1.name)=$(Foo1.uuid)")
+ Registry.update(uuid = Foo1.uuid)
+ Registry.update(name = Foo1.name, uuid = Foo1.uuid)
+
+ test_installed([Foo1, Foo2])
+ pkgstr("registry rm $(Foo1.uuid)")
+ test_installed([Foo2])
+ @test !is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.add(url = Foo1.url)
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ pkgstr("registry rm $(Foo1.name)=$(Foo1.uuid)")
+ test_installed([Foo2])
+ @test !is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ with_depot2() do
+ pkgstr("registry rm $(Foo2.name)")
+ end
+ test_installed([])
+ @test !is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
+
+ Registry.add(url = Foo1.url)
+ with_depot2(() -> Registry.add(url = Foo2.url))
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.rm(uuid = Foo1.uuid)
+ test_installed([Foo2])
+ @test !is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.add(url = Foo1.url)
+ test_installed([Foo1, Foo2])
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.rm(name = Foo1.name, uuid = Foo1.uuid)
+ test_installed([Foo2])
+ @test !is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ with_depot2() do
+ Registry.rm(Foo2.name)
+ end
+ test_installed([])
+ @test !is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
+
+ # multiple registries on the same time
+ pkgstr("registry add General $(Foo1.url)")
+ with_depot2(() -> pkgstr("registry add $(Foo2.url)"))
+ test_installed([General, Foo1, Foo2])
@test is_pkg_available(Example)
- Pkg.Registry.update(reg)
- test_installed([General])
- Pkg.Registry.rm(reg)
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ pkgstr("registry up General $(Foo1.uuid) $(Foo2.name)=$(Foo2.uuid)")
+ pkgstr("registry rm General $(Foo1.uuid)")
+ with_depot2() do
+ pkgstr("registry rm General $(Foo2.name)=$(Foo2.uuid)")
+ end
test_installed([])
@test !is_pkg_available(Example)
- end
+ @test !is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
- # Add registry from URL/local path.
- pkgstr("registry add $(Foo1.url)")
- test_installed([Foo1])
- @test is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
- with_depot2(() -> pkgstr("registry add $(Foo2.url)"))
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
-
- # reset installed registries
- rm.(joinpath.(Base.DEPOT_PATH[1:2], "registries"); force=true, recursive=true)
-
- Registry.add(url = Foo1.url)
- test_installed([Foo1])
- @test is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
- with_depot2(() -> Registry.add(url = Foo2.url))
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
-
-
- pkgstr("registry up $(Foo1.uuid)")
- pkgstr("registry update $(Foo1.name)=$(Foo1.uuid)")
- Registry.update(uuid = Foo1.uuid)
- Registry.update(name = Foo1.name, uuid = Foo1.uuid)
-
- test_installed([Foo1, Foo2])
- pkgstr("registry rm $(Foo1.uuid)")
- test_installed([Foo2])
- @test !is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.add(url = Foo1.url)
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- pkgstr("registry rm $(Foo1.name)=$(Foo1.uuid)")
- test_installed([Foo2])
- @test !is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- with_depot2() do
- pkgstr("registry rm $(Foo2.name)")
- end
- test_installed([])
- @test !is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
-
- Registry.add(url = Foo1.url)
- with_depot2(() -> Registry.add(url = Foo2.url))
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.rm(uuid = Foo1.uuid)
- test_installed([Foo2])
- @test !is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.add(url = Foo1.url)
- test_installed([Foo1, Foo2])
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.rm(name = Foo1.name, uuid = Foo1.uuid)
- test_installed([Foo2])
- @test !is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- with_depot2() do
- Registry.rm(Foo2.name)
- end
- test_installed([])
- @test !is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
-
- # multiple registries on the same time
- pkgstr("registry add General $(Foo1.url)")
- with_depot2(() -> pkgstr("registry add $(Foo2.url)"))
- test_installed([General, Foo1, Foo2])
- @test is_pkg_available(Example)
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- pkgstr("registry up General $(Foo1.uuid) $(Foo2.name)=$(Foo2.uuid)")
- pkgstr("registry rm General $(Foo1.uuid)")
- with_depot2() do
- pkgstr("registry rm General $(Foo2.name)=$(Foo2.uuid)")
- end
- test_installed([])
- @test !is_pkg_available(Example)
- @test !is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
-
- Registry.add([RegistrySpec("General"),
- RegistrySpec(url = Foo1.url)])
- with_depot2(() -> Registry.add([RegistrySpec(url = Foo2.url)]))
- test_installed([General, Foo1, Foo2])
- @test is_pkg_available(Example)
- @test is_pkg_available(Example1)
- @test is_pkg_available(Example2)
- Registry.update([RegistrySpec("General"),
- RegistrySpec(uuid = Foo1.uuid),
- RegistrySpec(name = Foo2.name, uuid = Foo2.uuid)])
- Registry.rm([RegistrySpec("General"),
- RegistrySpec(uuid = Foo1.uuid),
- ])
- with_depot2() do
- Registry.rm(name = Foo2.name, uuid = Foo2.uuid)
- end
- test_installed([])
- @test !is_pkg_available(Example)
- @test !is_pkg_available(Example1)
- @test !is_pkg_available(Example2)
+ Registry.add(
+ [
+ RegistrySpec("General"),
+ RegistrySpec(url = Foo1.url),
+ ]
+ )
+ with_depot2(() -> Registry.add([RegistrySpec(url = Foo2.url)]))
+ test_installed([General, Foo1, Foo2])
+ @test is_pkg_available(Example)
+ @test is_pkg_available(Example1)
+ @test is_pkg_available(Example2)
+ Registry.update(
+ [
+ RegistrySpec("General"),
+ RegistrySpec(uuid = Foo1.uuid),
+ RegistrySpec(name = Foo2.name, uuid = Foo2.uuid),
+ ]
+ )
+ Registry.rm(
+ [
+ RegistrySpec("General"),
+ RegistrySpec(uuid = Foo1.uuid),
+ ]
+ )
+ with_depot2() do
+ Registry.rm(name = Foo2.name, uuid = Foo2.uuid)
+ end
+ test_installed([])
+ @test !is_pkg_available(Example)
+ @test !is_pkg_available(Example1)
+ @test !is_pkg_available(Example2)
- # Trying to add a registry with the same name as existing one
- pkgstr("registry add $(Foo1.url)")
- @test_throws PkgError pkgstr("registry add $(Foo2.url)")
- @test_throws PkgError Registry.add([RegistrySpec(url = Foo2.url)])
+ # Trying to add a registry with the same name as existing one
+ pkgstr("registry add $(Foo1.url)")
+ @test_throws PkgError pkgstr("registry add $(Foo2.url)")
+ @test_throws PkgError Registry.add([RegistrySpec(url = Foo2.url)])
- end end
+ end
+ end
# issue #711
- temp_pkg_dir() do depot; mktempdir() do depot2
- insert!(Base.DEPOT_PATH, 2, depot2)
- Registry.add("General")
- with_depot2(() -> Registry.add("General"))
- # This add should not error because depot/Example and depot2/Example have the same uuid
- Pkg.add("Example")
- @test isinstalled((name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")))
- end end
+ temp_pkg_dir() do depot
+ mktempdir() do depot2
+ insert!(Base.DEPOT_PATH, 2, depot2)
+ Registry.add("General")
+ with_depot2(() -> Registry.add("General"))
+ # This add should not error because depot/Example and depot2/Example have the same uuid
+ Pkg.add("Example")
+ @test isinstalled((name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")))
+ end
+ end
# Test Registry.add and Registry.update with explicit depot values
- temp_pkg_dir() do depot_on_path; mktempdir() do depot_off_path
- # No registries anywhere
- @test isempty(Registry.reachable_registries())
- @test isempty(Registry.reachable_registries(; depots=[depot_off_path]))
-
- # After this, we have depots only in the depot that's off the path
- Registry.add("General"; depot=depot_off_path)
- @test isempty(Registry.reachable_registries())
- @test length(Registry.reachable_registries(; depots=[depot_off_path])) == 1
-
- # Test that `update()` with `depots` runs
- io = Base.BufferStream()
- Registry.update(; depots=[depot_off_path], io)
- closewrite(io)
- output = read(io, String)
- @test occursin("registry at `$(depot_off_path)", output)
-
- # Show that we can install `Example` off of that depot
- empty!(Base.DEPOT_PATH)
- push!(Base.DEPOT_PATH, depot_off_path)
- Pkg.add("Example")
- @test isinstalled((name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")))
- end end
+ temp_pkg_dir() do depot_on_path
+ mktempdir() do depot_off_path
+ # No registries anywhere
+ @test isempty(Registry.reachable_registries())
+ @test isempty(Registry.reachable_registries(; depots = [depot_off_path]))
+
+ # After this, we have depots only in the depot that's off the path
+ Registry.add("General"; depots = depot_off_path)
+ @test isempty(Registry.reachable_registries())
+ @test length(Registry.reachable_registries(; depots = [depot_off_path])) == 1
+
+ # Test that `update()` with `depots` runs
+ io = Base.BufferStream()
+ Registry.update(; depots = [depot_off_path], io, update_cooldown = Second(0))
+ closewrite(io)
+ output = read(io, String)
+ @test occursin("registry at `$(depot_off_path)", output)
+
+ # Show that we can install `Example` off of that depot
+ empty!(Base.DEPOT_PATH)
+ push!(Base.DEPOT_PATH, depot_off_path)
+ Pkg.add("Example")
+ @test isinstalled((name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a")))
+ end
+ end
+
+ # Registry status. Mostly verify that it runs without errors but
+ # also make some sanity checks on the output. We can't really know
+ # whether it was installed as a git clone or a tarball, so that
+ # limits how much information we are guaranteed to get from
+ # status.
+ temp_pkg_dir() do depot
+ Registry.add("General")
+ buf = IOBuffer()
+ Pkg.Registry.status(buf)
+ status = String(take!(buf))
+ @test contains(status, "[23338594] General (https://github.com/JuliaRegistries/General.git)")
+ @test contains(status, "last updated")
+ end
# only clone default registry if there are no registries installed at all
- temp_pkg_dir() do depot1; mktempdir() do depot2
- append!(empty!(DEPOT_PATH), [depot1, depot2])
- Base.append_bundled_depot_path!(DEPOT_PATH)
- @test length(Pkg.Registry.reachable_registries()) == 0
- Pkg.add("Example")
- @test length(Pkg.Registry.reachable_registries()) == 1
- Pkg.rm("Example")
- DEPOT_PATH[1:2] .= DEPOT_PATH[2:-1:1]
- Pkg.add("Example") # should not trigger a clone of default registries
- @test length(Pkg.Registry.reachable_registries()) == 1
- end end
+ temp_pkg_dir() do depot1
+ mktempdir() do depot2
+ append!(empty!(DEPOT_PATH), [depot1, depot2])
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ @test length(Pkg.Registry.reachable_registries()) == 0
+ Pkg.add("Example")
+ @test length(Pkg.Registry.reachable_registries()) == 1
+ Pkg.rm("Example")
+ DEPOT_PATH[1:2] .= DEPOT_PATH[2:-1:1]
+ Pkg.add("Example") # should not trigger a clone of default registries
+ @test length(Pkg.Registry.reachable_registries()) == 1
+ end
+ end
@testset "yanking" begin
uuid = Base.UUID("7876af07-990d-54b4-ab0e-23690620f79a") # Example
@@ -297,7 +352,7 @@ end
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.0"
Pkg.update() # should not update Example
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.0"
- @test_throws Pkg.Resolve.ResolverError Pkg.add(PackageSpec(name="Example", version=v"0.5.1"))
+ @test_throws Pkg.Resolve.ResolverError Pkg.add(PackageSpec(name = "Example", version = v"0.5.1"))
Pkg.rm("Example")
Pkg.add("JSON") # depends on Example
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.0"
@@ -307,27 +362,34 @@ end
# Test that Example@0.5.1 can be obtained from an existing manifest
temp_pkg_dir() do env
Pkg.Registry.add(url = "https://github.com/JuliaRegistries/Test")
- write(joinpath(env, "Project.toml"),"""
+ write(
+ joinpath(env, "Project.toml"), """
[deps]
Example = "7876af07-990d-54b4-ab0e-23690620f79a"
- """)
- write(joinpath(env, "Manifest.toml"),"""
+ """
+ )
+ write(
+ joinpath(env, "Manifest.toml"), """
[[Example]]
git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8"
uuid = "7876af07-990d-54b4-ab0e-23690620f79a"
version = "0.5.1"
- """)
+ """
+ )
Pkg.activate(env)
Pkg.instantiate()
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.1"
end
temp_pkg_dir() do env
Pkg.Registry.add(url = "https://github.com/JuliaRegistries/Test")
- write(joinpath(env, "Project.toml"),"""
+ write(
+ joinpath(env, "Project.toml"), """
[deps]
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
- """)
- write(joinpath(env, "Manifest.toml"),"""
+ """
+ )
+ write(
+ joinpath(env, "Manifest.toml"), """
[[Example]]
git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8"
uuid = "7876af07-990d-54b4-ab0e-23690620f79a"
@@ -338,7 +400,8 @@ end
git-tree-sha1 = "1f7a25b53ec67f5e9422f1f551ee216503f4a0fa"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.20.0"
- """)
+ """
+ )
Pkg.activate(env)
Pkg.instantiate()
@test manifest_info(EnvCache().manifest, uuid).version == v"0.5.1"
@@ -347,40 +410,44 @@ end
end
if Pkg.Registry.registry_use_pkg_server()
-@testset "compressed registry" begin
- for unpack in (true, nothing)
- withenv("JULIA_PKG_UNPACK_REGISTRY" => unpack) do
- temp_pkg_dir(;linked_reg=false) do depot
- # These get restored by temp_pkg_dir
- Pkg.Registry.DEFAULT_REGISTRIES[1].path = nothing
- Pkg.Registry.DEFAULT_REGISTRIES[1].url = "https://github.com/JuliaRegistries/General.git"
-
- # This should not uncompress the registry
- Registry.add(uuid = UUID("23338594-aafe-5451-b93e-139f81909106"))
- @test isfile(joinpath(DEPOT_PATH[1], "registries", "General.tar.gz")) != something(unpack, false)
- Pkg.add("Example")
-
- # Write some bad git-tree-sha1 here so that Pkg.update will have to update the registry
- if unpack == true
- write(joinpath(DEPOT_PATH[1], "registries", "General", ".tree_info.toml"),
- """
- git-tree-sha1 = "179182faa6a80b3cf24445e6f55c954938d57941"
- """)
- else
- write(joinpath(DEPOT_PATH[1], "registries", "General.toml"),
- """
- git-tree-sha1 = "179182faa6a80b3cf24445e6f55c954938d57941"
- uuid = "23338594-aafe-5451-b93e-139f81909106"
- path = "General.tar.gz"
- """)
+ @testset "compressed registry" begin
+ for unpack in (true, nothing)
+ withenv("JULIA_PKG_UNPACK_REGISTRY" => unpack) do
+ temp_pkg_dir(; linked_reg = false) do depot
+ # These get restored by temp_pkg_dir
+ Pkg.Registry.DEFAULT_REGISTRIES[1].path = nothing
+ Pkg.Registry.DEFAULT_REGISTRIES[1].url = "https://github.com/JuliaRegistries/General.git"
+
+ # This should not uncompress the registry
+ Registry.add(uuid = UUID("23338594-aafe-5451-b93e-139f81909106"))
+ @test isfile(joinpath(DEPOT_PATH[1], "registries", "General.tar.gz")) != something(unpack, false)
+ Pkg.add("Example")
+
+ # Write some bad git-tree-sha1 here so that Pkg.update will have to update the registry
+ if unpack == true
+ write(
+ joinpath(DEPOT_PATH[1], "registries", "General", ".tree_info.toml"),
+ """
+ git-tree-sha1 = "179182faa6a80b3cf24445e6f55c954938d57941"
+ """
+ )
+ else
+ write(
+ joinpath(DEPOT_PATH[1], "registries", "General.toml"),
+ """
+ git-tree-sha1 = "179182faa6a80b3cf24445e6f55c954938d57941"
+ uuid = "23338594-aafe-5451-b93e-139f81909106"
+ path = "General.tar.gz"
+ """
+ )
+ end
+ Pkg.update()
+ Pkg.Registry.rm(name = "General")
+ @test isempty(readdir(joinpath(DEPOT_PATH[1], "registries")))
end
- Pkg.update()
- Pkg.Registry.rm(name = "General")
- @test isempty(readdir(joinpath(DEPOT_PATH[1], "registries")))
end
end
end
end
-end
end # module
diff --git a/test/repl.jl b/test/repl.jl
index b0d729dd92..9bc6f3f4cc 100644
--- a/test/repl.jl
+++ b/test/repl.jl
@@ -25,8 +25,18 @@ using ..Utils
@test_throws PkgError pkg"helpadd"
end
+@testset "accidental" begin
+ pkg"]?"
+ pkg"] ?"
+ pkg"]st"
+ pkg"] st"
+ pkg"]st -m"
+ pkg"] st -m"
+ pkg"]" # noop
+end
+
temp_pkg_dir() do project_path
- with_pkg_env(project_path; change_dir=true) do;
+ with_pkg_env(project_path; change_dir = true) do;
pkg"generate HelloWorld"
LibGit2.close((LibGit2.init(".")))
cd("HelloWorld")
@@ -40,117 +50,124 @@ temp_pkg_dir() do project_path
@test_throws PkgError pkg"dev ./Foo"
###
mv(joinpath("Foo", "src", "Foo2.jl"), joinpath("Foo", "src", "Foo.jl"))
- write(joinpath("Foo", "Project.toml"), """
- name = "Foo"
- """
+ write(
+ joinpath("Foo", "Project.toml"), """
+ name = "Foo"
+ """
)
@test_throws PkgError pkg"dev ./Foo"
- write(joinpath("Foo", "Project.toml"), """
- uuid = "b7b78b08-812d-11e8-33cd-11188e330cbe"
- """
+ write(
+ joinpath("Foo", "Project.toml"), """
+ uuid = "b7b78b08-812d-11e8-33cd-11188e330cbe"
+ """
)
@test_throws PkgError pkg"dev ./Foo"
end
end
-temp_pkg_dir(;rm=false) do project_path; cd(project_path) do;
- tmp_pkg_path = mktempdir()
-
- pkg"activate ."
- pkg"add Example@0.5.3"
- @test isinstalled(TEST_PKG)
- v = Pkg.dependencies()[TEST_PKG.uuid].version
- @test v == v"0.5.3"
- pkg"rm Example"
- pkg"add Example, Random"
- pkg"rm Example Random"
- pkg"add Example,Random"
- pkg"rm Example,Random"
- pkg"add Example#master"
- pkg"rm Example"
- pkg"add https://github.com/JuliaLang/Example.jl#master"
-
- ## TODO: figure out how to test these in CI
- # pkg"rm Example"
- # pkg"add git@github.com:JuliaLang/Example.jl.git"
- # pkg"rm Example"
- # pkg"add \"git@github.com:JuliaLang/Example.jl.git\"#master"
- # pkg"rm Example"
-
- # Test upgrade --fixed doesn't change the tracking (https://github.com/JuliaLang/Pkg.jl/issues/434)
- entry = Pkg.Types.manifest_info(EnvCache().manifest, TEST_PKG.uuid)
- @test entry.repo.rev == "master"
- pkg"up --fixed"
- entry = Pkg.Types.manifest_info(EnvCache().manifest, TEST_PKG.uuid)
- @test entry.repo.rev == "master"
-
- pkg"test Example"
- @test isinstalled(TEST_PKG)
- @test Pkg.dependencies()[TEST_PKG.uuid].version > v
+temp_pkg_dir(; rm = false) do project_path
+ cd(project_path) do;
+ tmp_pkg_path = mktempdir()
- pkg2 = "UnregisteredWithProject"
- pkg2_uuid = UUID("58262bb0-2073-11e8-3727-4fe182c12249")
- p2 = git_init_package(tmp_pkg_path, joinpath(@__DIR__, "test_packages/$pkg2"))
- Pkg.REPLMode.pkgstr("add $p2")
- Pkg.REPLMode.pkgstr("pin $pkg2")
- # FIXME: this confuses the precompile logic to know what is going on with the user
- # FIXME: why isn't this testing the Pkg after importing, rather than after freeing it
- #@eval import Example
- #@eval import $(Symbol(pkg2))
- @test Pkg.dependencies()[pkg2_uuid].version == v"0.1.0"
- Pkg.REPLMode.pkgstr("free $pkg2")
- @test_throws PkgError Pkg.REPLMode.pkgstr("free $pkg2")
- Pkg.test("UnregisteredWithProject")
-
- write(joinpath(p2, "Project.toml"), """
- name = "UnregisteredWithProject"
- uuid = "58262bb0-2073-11e8-3727-4fe182c12249"
- version = "0.2.0"
- """
- )
- LibGit2.with(LibGit2.GitRepo, p2) do repo
- LibGit2.add!(repo, "*")
- LibGit2.commit(repo, "bump version"; author = TEST_SIG, committer=TEST_SIG)
- pkg"update"
- @test Pkg.dependencies()[pkg2_uuid].version == v"0.2.0"
- Pkg.REPLMode.pkgstr("rm $pkg2")
-
- c = LibGit2.commit(repo, "empty commit"; author = TEST_SIG, committer=TEST_SIG)
- c_hash = LibGit2.GitHash(c)
- Pkg.REPLMode.pkgstr("add $p2#$c")
- end
+ pkg"activate ."
+ pkg"add Example@0.5.3"
+ @test isinstalled(TEST_PKG)
+ v = Pkg.dependencies()[TEST_PKG.uuid].version
+ @test v == v"0.5.3"
+ pkg"rm Example"
+ pkg"add Example, Random"
+ pkg"rm Example Random"
+ pkg"add Example,Random"
+ pkg"rm Example,Random"
+ # Test leading whitespace handling (issue #4239)
+ pkg" add Example, Random"
+ pkg"rm Example Random"
+ pkg"add Example#master"
+ pkg"rm Example"
+ pkg"add https://github.com/JuliaLang/Example.jl#master"
+
+ ## TODO: figure out how to test these in CI
+ # pkg"rm Example"
+ # pkg"add git@github.com:JuliaLang/Example.jl.git"
+ # pkg"rm Example"
+ # pkg"add \"git@github.com:JuliaLang/Example.jl.git\"#master"
+ # pkg"rm Example"
+
+ # Test upgrade --fixed doesn't change the tracking (https://github.com/JuliaLang/Pkg.jl/issues/434)
+ entry = Pkg.Types.manifest_info(EnvCache().manifest, TEST_PKG.uuid)
+ @test entry.repo.rev == "master"
+ pkg"up --fixed"
+ entry = Pkg.Types.manifest_info(EnvCache().manifest, TEST_PKG.uuid)
+ @test entry.repo.rev == "master"
+
+ pkg"test Example"
+ @test isinstalled(TEST_PKG)
+ @test Pkg.dependencies()[TEST_PKG.uuid].version > v
+
+ pkg2 = "UnregisteredWithProject"
+ pkg2_uuid = UUID("58262bb0-2073-11e8-3727-4fe182c12249")
+ p2 = git_init_package(tmp_pkg_path, joinpath(@__DIR__, "test_packages/$pkg2"))
+ Pkg.REPLMode.pkgstr("add $p2")
+ Pkg.REPLMode.pkgstr("pin $pkg2")
+ # FIXME: this confuses the precompile logic to know what is going on with the user
+ # FIXME: why isn't this testing the Pkg after importing, rather than after freeing it
+ #@eval import Example
+ #@eval import $(Symbol(pkg2))
+ @test Pkg.dependencies()[pkg2_uuid].version == v"0.1.0"
+ Pkg.REPLMode.pkgstr("free $pkg2")
+ @test_throws PkgError Pkg.REPLMode.pkgstr("free $pkg2")
+ Pkg.test("UnregisteredWithProject")
+
+ write(
+ joinpath(p2, "Project.toml"), """
+ name = "UnregisteredWithProject"
+ uuid = "58262bb0-2073-11e8-3727-4fe182c12249"
+ version = "0.2.0"
+ """
+ )
+ LibGit2.with(LibGit2.GitRepo, p2) do repo
+ LibGit2.add!(repo, "*")
+ LibGit2.commit(repo, "bump version"; author = TEST_SIG, committer = TEST_SIG)
+ pkg"update"
+ @test Pkg.dependencies()[pkg2_uuid].version == v"0.2.0"
+ Pkg.REPLMode.pkgstr("rm $pkg2")
+
+ c = LibGit2.commit(repo, "empty commit"; author = TEST_SIG, committer = TEST_SIG)
+ c_hash = LibGit2.GitHash(c)
+ Pkg.REPLMode.pkgstr("add $p2#$c")
+ end
- mktempdir() do tmp_dev_dir
- withenv("JULIA_PKG_DEVDIR" => tmp_dev_dir) do
- pkg"develop Example"
- pkg"develop Example,PackageCompiler"
- pkg"develop Example PackageCompiler"
-
- # Copy the manifest + project and see that we can resolve it in a new environment
- # and get all the packages installed
- proj = read("Project.toml", String)
- manifest = read("Manifest.toml", String)
- cd_tempdir() do tmp
- old_depot = copy(DEPOT_PATH)
- try
- empty!(DEPOT_PATH)
- write("Project.toml", proj)
- write("Manifest.toml", manifest)
- mktempdir() do depot_dir
- pushfirst!(DEPOT_PATH, depot_dir)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- pkg"instantiate"
- @test Pkg.dependencies()[pkg2_uuid].version == v"0.2.0"
- end
- finally
- empty!(DEPOT_PATH)
- append!(DEPOT_PATH, old_depot)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- end
- end # cd_tempdir
- end # withenv
- end # mktempdir
-end # cd
+ mktempdir() do tmp_dev_dir
+ withenv("JULIA_PKG_DEVDIR" => tmp_dev_dir) do
+ pkg"develop Example"
+ pkg"develop Example,PackageCompiler"
+ pkg"develop Example PackageCompiler"
+
+ # Copy the manifest + project and see that we can resolve it in a new environment
+ # and get all the packages installed
+ proj = read("Project.toml", String)
+ manifest = read("Manifest.toml", String)
+ cd_tempdir() do tmp
+ old_depot = copy(DEPOT_PATH)
+ try
+ empty!(DEPOT_PATH)
+ write("Project.toml", proj)
+ write("Manifest.toml", manifest)
+ mktempdir() do depot_dir
+ pushfirst!(DEPOT_PATH, depot_dir)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ pkg"instantiate"
+ @test Pkg.dependencies()[pkg2_uuid].version == v"0.2.0"
+ end
+ finally
+ empty!(DEPOT_PATH)
+ append!(DEPOT_PATH, old_depot)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ end
+ end # cd_tempdir
+ end # withenv
+ end # mktempdir
+ end # cd
end # temp_pkg_dir
# issue #904: Pkg.status within a git repo
@@ -162,65 +179,66 @@ temp_pkg_dir() do path
Pkg.REPLMode.pkgstr("status") # should not throw
end
-temp_pkg_dir() do project_path; cd(project_path) do
- mktempdir() do tmp
- mktempdir() do depot_dir
- old_depot = copy(DEPOT_PATH)
- try
- empty!(DEPOT_PATH)
- pushfirst!(DEPOT_PATH, depot_dir)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- withenv("JULIA_PKG_DEVDIR" => tmp) do
- # Test an unregistered package
- p1_path = joinpath(@__DIR__, "test_packages", "UnregisteredWithProject")
- p1_new_path = joinpath(tmp, "UnregisteredWithProject")
- cp(p1_path, p1_new_path)
- Pkg.REPLMode.pkgstr("develop $(p1_new_path)")
- Pkg.REPLMode.pkgstr("build; precompile")
- @test realpath(Base.find_package("UnregisteredWithProject")) == realpath(joinpath(p1_new_path, "src", "UnregisteredWithProject.jl"))
- @test Pkg.dependencies()[UUID("58262bb0-2073-11e8-3727-4fe182c12249")].version == v"0.1.0"
- Pkg.test("UnregisteredWithProject")
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ mktempdir() do tmp
+ mktempdir() do depot_dir
+ old_depot = copy(DEPOT_PATH)
+ try
+ empty!(DEPOT_PATH)
+ pushfirst!(DEPOT_PATH, depot_dir)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
+ withenv("JULIA_PKG_DEVDIR" => tmp) do
+ # Test an unregistered package
+ p1_path = joinpath(@__DIR__, "test_packages", "UnregisteredWithProject")
+ p1_new_path = joinpath(tmp, "UnregisteredWithProject")
+ cp(p1_path, p1_new_path)
+ Pkg.REPLMode.pkgstr("develop $(p1_new_path)")
+ Pkg.REPLMode.pkgstr("build; precompile")
+ @test realpath(Base.find_package("UnregisteredWithProject")) == realpath(joinpath(p1_new_path, "src", "UnregisteredWithProject.jl"))
+ @test Pkg.dependencies()[UUID("58262bb0-2073-11e8-3727-4fe182c12249")].version == v"0.1.0"
+ Pkg.test("UnregisteredWithProject")
+ end
+ finally
+ empty!(DEPOT_PATH)
+ append!(DEPOT_PATH, old_depot)
+ Base.append_bundled_depot_path!(DEPOT_PATH)
end
- finally
- empty!(DEPOT_PATH)
- append!(DEPOT_PATH, old_depot)
- Base.append_bundled_depot_path!(DEPOT_PATH)
- end
- end # withenv
- end # mktempdir
- # nested
- mktempdir() do other_dir
- mktempdir() do tmp;
- cd(tmp) do
- pkg"generate HelloWorld"
- cd("HelloWorld") do
+ end # withenv
+ end # mktempdir
+ # nested
+ mktempdir() do other_dir
+ mktempdir() do tmp
+ cd(tmp) do
+ pkg"generate HelloWorld"
+ cd("HelloWorld") do
+ with_current_env() do
+ uuid1 = Pkg.generate("SubModule1")["SubModule1"]
+ uuid2 = Pkg.generate("SubModule2")["SubModule2"]
+ pkg"develop ./SubModule1"
+ mkdir("tests")
+ cd("tests")
+ pkg"develop ../SubModule2"
+ @test Pkg.dependencies()[uuid1].version == v"0.1.0"
+ @test Pkg.dependencies()[uuid2].version == v"0.1.0"
+ # make sure paths to SubModule1 and SubModule2 are relative
+ manifest = Pkg.Types.Context().env.manifest
+ @test manifest[uuid1].path == "SubModule1"
+ @test manifest[uuid2].path == "SubModule2"
+ end
+ end
+ cp("HelloWorld", joinpath(other_dir, "HelloWorld"))
+ cd(joinpath(other_dir, "HelloWorld"))
with_current_env() do
- uuid1 = Pkg.generate("SubModule1")["SubModule1"]
- uuid2 = Pkg.generate("SubModule2")["SubModule2"]
- pkg"develop ./SubModule1"
- mkdir("tests")
- cd("tests")
- pkg"develop ../SubModule2"
- @test Pkg.dependencies()[uuid1].version == v"0.1.0"
- @test Pkg.dependencies()[uuid2].version == v"0.1.0"
- # make sure paths to SubModule1 and SubModule2 are relative
- manifest = Pkg.Types.Context().env.manifest
- @test manifest[uuid1].path == "SubModule1"
- @test manifest[uuid2].path == "SubModule2"
+ # Check that these didn't generate absolute paths in the Manifest by copying
+ # to another directory
+ @test Base.find_package("SubModule1") == joinpath(pwd(), "SubModule1", "src", "SubModule1.jl")
+ @test Base.find_package("SubModule2") == joinpath(pwd(), "SubModule2", "src", "SubModule2.jl")
end
end
- cp("HelloWorld", joinpath(other_dir, "HelloWorld"))
- cd(joinpath(other_dir, "HelloWorld"))
- with_current_env() do
- # Check that these didn't generate absolute paths in the Manifest by copying
- # to another directory
- @test Base.find_package("SubModule1") == joinpath(pwd(), "SubModule1", "src", "SubModule1.jl")
- @test Base.find_package("SubModule2") == joinpath(pwd(), "SubModule2", "src", "SubModule2.jl")
- end
end
end
- end
-end # cd
+ end # cd
end # temp_pkg_dir
# activate
@@ -247,7 +265,7 @@ temp_pkg_dir() do project_path
#=@test_logs (:info, r"activating new environment at ")))=# pkg"activate --shared Foo" # activate shared Foo
@test Base.active_project() == joinpath(Pkg.envdir(), "Foo", "Project.toml")
pkg"activate ."
- rm("Foo"; force=true, recursive=true)
+ rm("Foo"; force = true, recursive = true)
pkg"activate Foo" # activate path from developed Foo
@test Base.active_project() == joinpath(path, "modules", "Foo", "Project.toml")
pkg"activate ."
@@ -300,207 +318,215 @@ end
test_complete(s) = REPLExt.completions(s, lastindex(s))
apply_completion(str) = begin
c, r, s = test_complete(str)
- str[1:prevind(str, first(r))]*first(c)
+ str[1:prevind(str, first(r))] * first(c)
end
# Autocompletions
-temp_pkg_dir() do project_path; cd(project_path) do
- @testset "tab completion while offline" begin
- # No registry and no network connection
- Pkg.offline()
- pkg"activate ."
- c, r = test_complete("add Exam")
- @test isempty(c)
- Pkg.offline(false)
- # Existing registry but no network connection
- pkg"registry add General" # instantiate the `General` registry to complete remote package names
- Pkg.offline(true)
- c, r = test_complete("add Exam")
- @test "Example" in c
- Pkg.offline(false)
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ @testset "tab completion while offline" begin
+ # No registry and no network connection
+ Pkg.offline()
+ pkg"activate ."
+ c, r = test_complete("add Exam")
+ @test isempty(c)
+ Pkg.offline(false)
+ # Existing registry but no network connection
+ pkg"registry add General" # instantiate the `General` registry to complete remote package names
+ Pkg.offline(true)
+ c, r = test_complete("add Exam")
+ @test "Example" in c
+ Pkg.offline(false)
+ end
end
-end end
+end
-temp_pkg_dir() do project_path; cd(project_path) do
- @testset "tab completion" begin
- pkg"registry add General" # instantiate the `General` registry to complete remote package names
- pkg"activate ."
- c, r = test_complete("add Exam")
- @test "Example" in c
- c, r = test_complete("rm Exam")
- @test isempty(c)
-
- Pkg.REPLMode.pkgstr("develop $(joinpath(@__DIR__, "test_packages", "PackageWithDependency"))")
-
- c, r = test_complete("rm PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm -p PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm --project PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm Exam")
- @test isempty(c)
- c, r = test_complete("rm -p Exam")
- @test isempty(c)
- c, r = test_complete("rm --project Exam")
- @test isempty(c)
- c, r = test_complete("free PackageWithDep")
- @test "PackageWithDependency" in c # given this was devved
-
- c, r = test_complete("rm -m PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm --manifest PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm -m Exam")
- @test "Example" in c
- c, r = test_complete("rm --manifest Exam")
- @test "Example" in c
- c, r = test_complete("why PackageWithDep")
- @test "PackageWithDependency" in c
-
- c, r = test_complete("rm PackageWithDep")
- @test "PackageWithDependency" in c
- c, r = test_complete("rm Exam")
- @test isempty(c)
- c, r = test_complete("rm -m Exam")
- c, r = test_complete("rm -m Exam")
- @test "Example" in c
-
- pkg"add Example"
- c, r = test_complete("rm Exam")
- @test "Example" in c
- c, r = test_complete("up --man")
- @test "--manifest" in c
- c, r = test_complete("rem")
- @test "remove" in c
- @test apply_completion("rm E") == "rm Example"
- @test apply_completion("add Exampl") == "add Example"
- c, r = test_complete("free Exa")
- @test isempty(c) # given this was added i.e. not fixed
- pkg"pin Example"
- c, r = test_complete("free Exa")
- @test "Example" in c
- pkg"free Example"
-
- # help mode
- @test apply_completion("?ad") == "?add"
- @test apply_completion("?act") == "?activate"
- @test apply_completion("? ad") == "? add"
- @test apply_completion("? act") == "? activate"
-
- # stdlibs
- c, r = test_complete("add Stat")
- @test "Statistics" in c
- c, r = test_complete("add Lib")
- @test "LibGit2" in c
- c, r = test_complete("add REPL")
- @test "REPL" in c
-
- # upper bounded
- c, r = test_complete("add Chu")
- @test !("Chunks" in c)
-
- # local paths
- mkpath("testdir/foo/bar")
- c, r = test_complete("add ")
- @test Sys.iswindows() ? ("testdir\\\\" in c) : ("testdir/" in c)
- @test apply_completion("add tes") == (Sys.iswindows() ? "add testdir\\\\" : "add testdir/")
- @test apply_completion("add ./tes") == (Sys.iswindows() ? "add ./testdir\\\\" : "add ./testdir/")
- c, r = test_complete("dev ./")
- @test (Sys.iswindows() ? ("testdir\\\\" in c) : ("testdir/" in c))
-
- # complete subdirs
- c, r = test_complete("add testdir/f")
- @test Sys.iswindows() ? ("foo\\\\" in c) : ("foo/" in c)
- @test apply_completion("add testdir/f") == (Sys.iswindows() ? "add testdir/foo\\\\" : "add testdir/foo/")
- # dont complete files
- touch("README.md")
- c, r = test_complete("add RE")
- @test !("README.md" in c)
-
- # Expand homedir and
- if !Sys.iswindows()
- dirname = "JuliaPkgTest744a757c-d313-11e9-1cac-118368d5977a"
- tildepath = "~/$dirname"
- try
- mkdir(expanduser(tildepath))
- c, r = test_complete("dev ~/JuliaPkgTest744a75")
- @test joinpath(homedir(), dirname, "") in c
- finally
- rm(expanduser(tildepath); force = true)
- end
- c, r = test_complete("dev ~")
- @test joinpath(homedir(), "") in c
-
- # nested directories
- nested_dirs = "foo/bar/baz"
- tildepath = "~/$nested_dirs"
- try
- mkpath(expanduser(tildepath))
- c, r = test_complete("dev ~/foo/bar/b")
- @test joinpath(homedir(), nested_dirs, "") in c
- finally
- rm(expanduser(tildepath); force = true)
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ @testset "tab completion" begin
+ pkg"registry add General" # instantiate the `General` registry to complete remote package names
+ pkg"activate ."
+ c, r = test_complete("add Exam")
+ @test "Example" in c
+ c, r = test_complete("rm Exam")
+ @test isempty(c)
+
+ Pkg.REPLMode.pkgstr("develop $(joinpath(@__DIR__, "test_packages", "PackageWithDependency"))")
+
+ c, r = test_complete("rm PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm -p PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm --project PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm Exam")
+ @test isempty(c)
+ c, r = test_complete("rm -p Exam")
+ @test isempty(c)
+ c, r = test_complete("rm --project Exam")
+ @test isempty(c)
+ c, r = test_complete("free PackageWithDep")
+ @test "PackageWithDependency" in c # given this was devved
+
+ c, r = test_complete("rm -m PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm --manifest PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm -m Exam")
+ @test "Example" in c
+ c, r = test_complete("rm --manifest Exam")
+ @test "Example" in c
+ c, r = test_complete("why PackageWithDep")
+ @test "PackageWithDependency" in c
+
+ c, r = test_complete("rm PackageWithDep")
+ @test "PackageWithDependency" in c
+ c, r = test_complete("rm Exam")
+ @test isempty(c)
+ c, r = test_complete("rm -m Exam")
+ c, r = test_complete("rm -m Exam")
+ @test "Example" in c
+
+ pkg"add Example"
+ c, r = test_complete("rm Exam")
+ @test "Example" in c
+ c, r = test_complete("up --man")
+ @test "--manifest" in c
+ c, r = test_complete("rem")
+ @test "remove" in c
+ @test apply_completion("rm E") == "rm Example"
+ @test apply_completion("add Exampl") == "add Example"
+ c, r = test_complete("free Exa")
+ @test isempty(c) # given this was added i.e. not fixed
+ pkg"pin Example"
+ c, r = test_complete("free Exa")
+ @test "Example" in c
+ pkg"free Example"
+
+ # help mode
+ @test apply_completion("?ad") == "?add"
+ @test apply_completion("?act") == "?activate"
+ @test apply_completion("? ad") == "? add"
+ @test apply_completion("? act") == "? activate"
+
+ # stdlibs
+ c, r = test_complete("add Stat")
+ @test "Statistics" in c
+ c, r = test_complete("add Lib")
+ @test "LibGit2" in c
+ c, r = test_complete("add REPL")
+ @test "REPL" in c
+
+ # upper bounded
+ c, r = test_complete("add Chu")
+ @test !("Chunks" in c)
+
+ # local paths
+ mkpath("testdir/foo/bar")
+ c, r = test_complete("add ")
+ @test Sys.iswindows() ? ("testdir\\\\" in c) : ("testdir/" in c)
+ @test apply_completion("add tes") == (Sys.iswindows() ? "add testdir\\\\" : "add testdir/")
+ @test apply_completion("add ./tes") == (Sys.iswindows() ? "add ./testdir\\\\" : "add ./testdir/")
+ c, r = test_complete("dev ./")
+ @test (Sys.iswindows() ? ("testdir\\\\" in c) : ("testdir/" in c))
+
+ # complete subdirs
+ c, r = test_complete("add testdir/f")
+ @test Sys.iswindows() ? ("foo\\\\" in c) : ("foo/" in c)
+ @test apply_completion("add testdir/f") == (Sys.iswindows() ? "add testdir/foo\\\\" : "add testdir/foo/")
+ # dont complete files
+ touch("README.md")
+ c, r = test_complete("add RE")
+ @test !("README.md" in c)
+
+ # Expand homedir and
+ if !Sys.iswindows()
+ dirname = "JuliaPkgTest744a757c-d313-11e9-1cac-118368d5977a"
+ tildepath = "~/$dirname"
+ try
+ mkdir(expanduser(tildepath))
+ c, r = test_complete("dev ~/JuliaPkgTest744a75")
+ @test joinpath(homedir(), dirname, "") in c
+ finally
+ rm(expanduser(tildepath); force = true)
+ end
+ c, r = test_complete("dev ~")
+ @test joinpath(homedir(), "") in c
+
+ # nested directories
+ nested_dirs = "foo/bar/baz"
+ tildepath = "~/$nested_dirs"
+ try
+ mkpath(expanduser(tildepath))
+ c, r = test_complete("dev ~/foo/bar/b")
+ @test joinpath(homedir(), nested_dirs, "") in c
+ finally
+ rm(expanduser(tildepath); force = true)
+ end
end
- end
- # activate
- pkg"activate --shared FooBar"
- pkg"add Example"
- pkg"activate ."
- c, r = test_complete("activate --shared ")
- @test "FooBar" in c
-
- # invalid options
- c, r = test_complete("rm -rf ")
- @test isempty(c)
-
- # parse errors should not throw
- _ = test_complete("add \"Foo")
- # invalid option should not throw
- _ = test_complete("add -z Foo")
- _ = test_complete("add --dontexist Foo")
- end # testset
-end end
-
-temp_pkg_dir() do project_path; cd(project_path) do
- mktempdir() do tmp
- cp(joinpath(@__DIR__, "test_packages", "BigProject"), joinpath(tmp, "BigProject"))
- cd(joinpath(tmp, "BigProject"))
- with_current_env() do
- # the command below also tests multiline input
- pkg"""
- dev ./RecursiveDep2
- dev ./RecursiveDep
- dev ./SubModule
- dev ./SubModule2
- add Random
- add Example
- add JSON
- build
- """
- @eval using BigProject
- pkg"build BigProject"
- @test_throws PkgError pkg"add BigProject"
- # the command below also tests multiline input
- Pkg.REPLMode.pkgstr("""
- test SubModule
- test SubModule2
- test BigProject
- test
- """)
- json_uuid = Pkg.project().dependencies["JSON"]
- current_json = Pkg.dependencies()[json_uuid].version
- old_project = read("Project.toml", String)
- Pkg.compat("JSON", "0.18.0")
- pkg"up"
- @test Pkg.dependencies()[json_uuid].version.minor == 18
- write("Project.toml", old_project)
- pkg"up"
- @test Pkg.dependencies()[json_uuid].version == current_json
+ # activate
+ pkg"activate --shared FooBar"
+ pkg"add Example"
+ pkg"activate ."
+ c, r = test_complete("activate --shared ")
+ @test "FooBar" in c
+
+ # invalid options
+ c, r = test_complete("rm -rf ")
+ @test isempty(c)
+
+ # parse errors should not throw
+ _ = test_complete("add \"Foo")
+ # invalid option should not throw
+ _ = test_complete("add -z Foo")
+ _ = test_complete("add --dontexist Foo")
+ end # testset
+ end
+end
+
+temp_pkg_dir() do project_path
+ cd(project_path) do
+ mktempdir() do tmp
+ cp(joinpath(@__DIR__, "test_packages", "BigProject"), joinpath(tmp, "BigProject"))
+ cd(joinpath(tmp, "BigProject"))
+ with_current_env() do
+ # the command below also tests multiline input
+ pkg"""
+ dev ./RecursiveDep2
+ dev ./RecursiveDep
+ dev ./SubModule
+ dev ./SubModule2
+ add Random
+ add Example
+ add JSON
+ build
+ """
+ @eval using BigProject
+ pkg"build BigProject"
+ @test_throws PkgError pkg"add BigProject"
+ # the command below also tests multiline input
+ Pkg.REPLMode.pkgstr(
+ """
+ test SubModule
+ test SubModule2
+ test BigProject
+ test
+ """
+ )
+ json_uuid = Pkg.project().dependencies["JSON"]
+ current_json = Pkg.dependencies()[json_uuid].version
+ old_project = read("Project.toml", String)
+ Pkg.compat("JSON", "0.18.0")
+ pkg"up"
+ @test Pkg.dependencies()[json_uuid].version.minor == 18
+ write("Project.toml", old_project)
+ pkg"up"
+ @test Pkg.dependencies()[json_uuid].version == current_json
+ end
end
end
-end; end
+end
temp_pkg_dir() do project_path
cd(project_path) do
@@ -537,9 +563,9 @@ temp_pkg_dir() do project_path
setup_package(dir_name, pkg_name)
uuid = extract_uuid("$dir_name/$pkg_name/Project.toml")
Pkg.REPLMode.pkgstr("add \"$dir_name/$pkg_name\"")
- @test isinstalled((name=pkg_name, uuid = UUID(uuid)))
+ @test isinstalled((name = pkg_name, uuid = UUID(uuid)))
Pkg.REPLMode.pkgstr("remove \"$pkg_name\"")
- @test !isinstalled((name=pkg_name, uuid = UUID(uuid)))
+ @test !isinstalled((name = pkg_name, uuid = UUID(uuid)))
# testing dir name with significant characters
dir_name = "some@d;ir#"
@@ -547,9 +573,9 @@ temp_pkg_dir() do project_path
setup_package(dir_name, pkg_name)
uuid = extract_uuid("$dir_name/$pkg_name/Project.toml")
Pkg.REPLMode.pkgstr("add \"$dir_name/$pkg_name\"")
- @test isinstalled((name=pkg_name, uuid = UUID(uuid)))
+ @test isinstalled((name = pkg_name, uuid = UUID(uuid)))
Pkg.REPLMode.pkgstr("remove '$pkg_name'")
- @test !isinstalled((name=pkg_name, uuid = UUID(uuid)))
+ @test !isinstalled((name = pkg_name, uuid = UUID(uuid)))
# more complicated input
## pkg1
@@ -565,35 +591,28 @@ temp_pkg_dir() do project_path
uuid2 = extract_uuid("$dir2/$pkg_name2/Project.toml")
Pkg.REPLMode.pkgstr("add '$dir1/$pkg_name1' \"$dir2/$pkg_name2\"")
- @test isinstalled((name=pkg_name1, uuid = UUID(uuid1)))
- @test isinstalled((name=pkg_name2, uuid = UUID(uuid2)))
+ @test isinstalled((name = pkg_name1, uuid = UUID(uuid1)))
+ @test isinstalled((name = pkg_name2, uuid = UUID(uuid2)))
Pkg.REPLMode.pkgstr("remove '$pkg_name1' $pkg_name2")
- @test !isinstalled((name=pkg_name1, uuid = UUID(uuid1)))
- @test !isinstalled((name=pkg_name2, uuid = UUID(uuid2)))
+ @test !isinstalled((name = pkg_name1, uuid = UUID(uuid1)))
+ @test !isinstalled((name = pkg_name2, uuid = UUID(uuid2)))
Pkg.REPLMode.pkgstr("add '$dir1/$pkg_name1' \"$dir2/$pkg_name2\"")
- @test isinstalled((name=pkg_name1, uuid = UUID(uuid1)))
- @test isinstalled((name=pkg_name2, uuid = UUID(uuid2)))
+ @test isinstalled((name = pkg_name1, uuid = UUID(uuid1)))
+ @test isinstalled((name = pkg_name2, uuid = UUID(uuid2)))
Pkg.REPLMode.pkgstr("remove '$pkg_name1' \"$pkg_name2\"")
- @test !isinstalled((name=pkg_name1, uuid = UUID(uuid1)))
- @test !isinstalled((name=pkg_name2, uuid = UUID(uuid2)))
+ @test !isinstalled((name = pkg_name1, uuid = UUID(uuid1)))
+ @test !isinstalled((name = pkg_name2, uuid = UUID(uuid2)))
end
end
end
@testset "parse package url win" begin
pkg_id = Pkg.REPLMode.PackageIdentifier("https://github.com/abc/ABC.jl")
- pkg_spec = Pkg.REPLMode.parse_package_identifier(pkg_id; add_or_develop=true)
+ pkg_spec = Pkg.REPLMode.parse_package_identifier(pkg_id; add_or_develop = true)
@test typeof(pkg_spec) == Pkg.Types.PackageSpec
end
-@testset "parse git url (issue #1935) " begin
- urls = ["https://github.com/abc/ABC.jl.git", "https://abc.github.io/ABC.jl"]
- for url in urls
- @test Pkg.REPLMode.package_lex([Pkg.REPLMode.QString((url), false)]) == [url]
- end
-end
-
@testset "unit test for REPLMode.promptf" begin
function set_name(projfile_path, newname)
sleep(1.1)
@@ -635,26 +654,32 @@ end
end
@testset "test" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir; with_temp_env() do;
- Pkg.add("Example")
- @test_throws PkgError Pkg.REPLMode.pkgstr("test --project Example")
- Pkg.REPLMode.pkgstr("test --coverage Example")
- Pkg.REPLMode.pkgstr("test Example")
- end
- end
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ with_temp_env() do;
+ Pkg.add("Example")
+ @test_throws PkgError Pkg.REPLMode.pkgstr("test --project Example")
+ Pkg.REPLMode.pkgstr("test --coverage Example")
+ Pkg.REPLMode.pkgstr("test Example")
+ end
+ end
end
end
@testset "activate" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir; with_temp_env() do;
- mkdir("Foo")
- pkg"activate"
- default = Base.active_project()
- pkg"activate Foo"
- @test Base.active_project() == joinpath(pwd(), "Foo", "Project.toml")
- pkg"activate"
- @test Base.active_project() == default
- end end end
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ with_temp_env() do;
+ mkdir("Foo")
+ pkg"activate"
+ default = Base.active_project()
+ pkg"activate Foo"
+ @test Base.active_project() == joinpath(pwd(), "Foo", "Project.toml")
+ pkg"activate"
+ @test Base.active_project() == default
+ end
+ end
+ end
end
@testset "status" begin
@@ -684,31 +709,37 @@ end
end
@testset "subcommands" begin
- temp_pkg_dir() do project_path; cd_tempdir() do tmpdir; with_temp_env() do
- Pkg.REPLMode.pkg"package add Example"
- @test isinstalled(TEST_PKG)
- Pkg.REPLMode.pkg"package rm Example"
- @test !isinstalled(TEST_PKG)
- end end end
+ temp_pkg_dir() do project_path
+ cd_tempdir() do tmpdir
+ with_temp_env() do
+ Pkg.REPLMode.pkg"package add Example"
+ @test isinstalled(TEST_PKG)
+ Pkg.REPLMode.pkg"package rm Example"
+ @test !isinstalled(TEST_PKG)
+ end
+ end
+ end
end
@testset "REPL API `up`" begin
# errors
- temp_pkg_dir() do project_path; with_temp_env() do;
- @test_throws PkgError Pkg.REPLMode.pkgstr("up --major --minor")
- end end
+ temp_pkg_dir() do project_path
+ with_temp_env() do;
+ @test_throws PkgError Pkg.REPLMode.pkgstr("up --major --minor")
+ end
+ end
end
@testset "Inference" begin
@inferred Pkg.REPLMode.OptionSpecs(Pkg.REPLMode.OptionDeclaration[])
@inferred Pkg.REPLMode.CommandSpecs(Pkg.REPLMode.CommandDeclaration[])
- @inferred Pkg.REPLMode.CompoundSpecs(Pair{String,Vector{Pkg.REPLMode.CommandDeclaration}}[])
+ @inferred Pkg.REPLMode.CompoundSpecs(Pair{String, Vector{Pkg.REPLMode.CommandDeclaration}}[])
end
# To be used to reply to a prompt
function withreply(f, ans)
p = Pipe()
- try
+ return try
redirect_stdin(p) do
@async println(p, ans)
f()
@@ -719,7 +750,7 @@ function withreply(f, ans)
end
@testset "REPL missing package install hook" begin
- isolate(loaded_depot=true) do
+ isolate(loaded_depot = true) do
@test REPLExt.try_prompt_pkg_add(Symbol[:notapackage]) == false
# don't offer to install the dummy "julia" entry that's in General
@@ -735,12 +766,74 @@ end
end
@testset "JuliaLang/julia #55850" begin
- tmp_55850 = mktempdir()
- tmp_sym_link = joinpath(tmp_55850, "sym")
- symlink(tmp_55850, tmp_sym_link; dir_target=true)
- withenv("JULIA_DEPOT_PATH" => tmp_sym_link * (Sys.iswindows() ? ";" : ":"), "JULIA_LOAD_PATH" => nothing) do
- prompt = readchomp(`$(Base.julia_cmd()[1]) --project=$(dirname(@__DIR__)) --startup-file=no -e "using Pkg, REPL; Pkg.activate(io=devnull); REPLExt = Base.get_extension(Pkg, :REPLExt); print(REPLExt.promptf())"`)
- @test prompt == "(@v$(VERSION.major).$(VERSION.minor)) pkg> "
+ mktempdir() do tmp
+ copy_this_pkg_cache(tmp)
+ tmp_sym_link = joinpath(tmp, "sym")
+ symlink(tmp, tmp_sym_link; dir_target = true)
+ depot_path = tmp_sym_link * (Sys.iswindows() ? ";" : ":")
+ # include the symlink in the depot path and include the regular default depot so we don't precompile this Pkg again
+ withenv("JULIA_DEPOT_PATH" => depot_path, "JULIA_LOAD_PATH" => nothing) do
+ prompt = readchomp(`$(Base.julia_cmd()) --project=$(dirname(@__DIR__)) --startup-file=no -e "using Pkg, REPL; Pkg.activate(io=devnull); REPLExt = Base.get_extension(Pkg, :REPLExt); print(REPLExt.promptf())"`)
+ @test prompt == "(@v$(VERSION.major).$(VERSION.minor)) pkg> "
+ end
+ end
+end
+
+@testset "in_repl_mode" begin
+ # Test that in_repl_mode() returns false by default (API mode)
+ @test Pkg.in_repl_mode() == false
+
+ # Test that in_repl_mode() returns true when running REPL commands
+ # This is tested indirectly by running a simple REPL command
+ temp_pkg_dir() do project_path
+ cd(project_path) do
+ # The pkg"" macro should set IN_REPL_MODE => true during execution
+ # We can't directly test the scoped value here, but we can test
+ # that REPL commands work correctly
+ pkg"status"
+ # The fact that this doesn't error confirms REPL mode is working
+ @test true
+ end
+ end
+
+ # Test manual scoped value setting (for completeness)
+ Base.ScopedValues.@with Pkg.IN_REPL_MODE => true begin
+ @test Pkg.in_repl_mode() == true
+ end
+
+ # Verify we're back to false after the scoped block
+ @test Pkg.in_repl_mode() == false
+end
+
+@testset "compat REPL mode" begin
+ temp_pkg_dir() do project_path
+ with_pkg_env(project_path; change_dir = true) do
+
+ pkg"add Example JSON"
+
+ test_ctx = Pkg.Types.Context()
+ test_ctx.io = IOBuffer()
+
+ @test Pkg.Operations.get_compat_str(test_ctx.env.project, "Example") === nothing
+ @test Pkg.Operations.get_compat_str(test_ctx.env.project, "JSON") === nothing
+
+ input_io = Base.BufferStream()
+ # Send input to stdin before starting the _compat function
+ # This simulates the user typing in the REPL
+ write(input_io, "\e[B") # Down arrow once to select Example
+ write(input_io, "\r") # Enter to confirm selection
+ # now editing Example compat
+ write(input_io, "0.4") # Set compat to 0.4
+ write(input_io, "\r") # Enter to confirm input
+ close(input_io)
+
+ Pkg.API._compat(test_ctx; input_io)
+
+ str = String(take!(test_ctx.io))
+ @test occursin("Example = \"0.4\"", str)
+ @test occursin("checking for compliance with the new compat rules..", str)
+ @test occursin("Error empty intersection between", str) # Latest Example is at least 0.5.5
+ end
end
end
diff --git a/test/resolve.jl b/test/resolve.jl
index 91907e2d10..f2c0e0c6e9 100644
--- a/test/resolve.jl
+++ b/test/resolve.jl
@@ -9,7 +9,6 @@ using Pkg.Types: VersionBound
using UUIDs
using Pkg.Resolve
import Pkg.Resolve: VersionWeight, add_reqs!, simplify_graph!, ResolverError, ResolverTimeoutError, Fixed, Requires
-import ..HistoricalStdlibVersions
include("utils.jl")
using .Utils
@@ -26,8 +25,8 @@ vlst = [
v"1.0.0",
v"1.0.1",
v"1.1.0",
- v"1.1.1"
- ]
+ v"1.1.1",
+]
for v1 in vlst, v2 in vlst
vw1 = VersionWeight(v1)
@@ -45,28 +44,28 @@ end
["A", v"1", "B", "1-*"],
["A", v"2", "B", "2-*"],
["B", v"1"],
- ["B", v"2"]
+ ["B", v"2"],
]
@test sanity_tst(deps_data)
- @test sanity_tst(deps_data, pkgs=["A", "B"])
- @test sanity_tst(deps_data, pkgs=["B"])
- @test sanity_tst(deps_data, pkgs=["A"])
+ @test sanity_tst(deps_data, pkgs = ["A", "B"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
+ @test sanity_tst(deps_data, pkgs = ["A"])
# require just B
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("B"=>v"2")
+ want_data = Dict("B" => v"2")
resolve_tst(deps_data, reqs_data, want_data)
@test resolve_tst(deps_data, reqs_data, want_data)
# require just A: must bring in B
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -76,30 +75,30 @@ end
["A", v"1", "B", "2-*"],
["A", v"2", "B", "1-*"],
["B", v"1", "A", "2-*"],
- ["B", v"2", "A", "1-*"]
+ ["B", v"2", "A", "1-*"],
]
@test sanity_tst(deps_data)
# require just A
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just B, force lower version
reqs_data = Any[
- ["B", "1"]
+ ["B", "1"],
]
- want_data = Dict("A"=>v"2", "B"=>v"1")
+ want_data = Dict("A" => v"2", "B" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just A, force lower version
reqs_data = Any[
- ["A", "1"]
+ ["A", "1"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2")
+ want_data = Dict("A" => v"1", "B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -111,36 +110,36 @@ end
["B", v"1", "C", "2-*"],
["B", v"2", "C", "1"],
["C", v"1", "A", "1"],
- ["C", v"2", "A", "2-*"]
+ ["C", v"2", "A", "2-*"],
]
@test sanity_tst(deps_data)
# require just A (must choose solution which has the highest version for A)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"1", "C"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"1", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just B (must choose solution which has the highest version for B)
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"1")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just A, force lower version
reqs_data = Any[
- ["A", "1"]
+ ["A", "1"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"1")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A and C, incompatible versions
reqs_data = Any[
["A", "1"],
- ["C", "2-*"]
+ ["C", "2-*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -149,22 +148,22 @@ end
## DEPENDENCY SCHEME 4: TWO PACKAGES, DAG, WITH TRIVIAL INCONSISTENCY
deps_data = Any[
["A", v"1", "B", "2-*"],
- ["B", v"1"]
+ ["B", v"1"],
]
@test sanity_tst(deps_data, [("A", v"1")])
- @test sanity_tst(deps_data, pkgs=["B"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
# require B (must not give errors)
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("B"=>v"1")
+ want_data = Dict("B" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A (must give an error)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -179,23 +178,23 @@ end
["B", v"1", "C", "2-*"],
["B", v"2", "C", "2-*"],
["C", v"1"],
- ["C", v"2"]
+ ["C", v"2"],
]
@test sanity_tst(deps_data, [("A", v"2")])
- @test sanity_tst(deps_data, pkgs=["B"])
- @test sanity_tst(deps_data, pkgs=["C"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
+ @test sanity_tst(deps_data, pkgs = ["C"])
# require A, any version (must use the highest non-inconsistent)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"2")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A, force highest version (impossible)
reqs_data = Any[
- ["A", "2-*"]
+ ["A", "2-*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -206,21 +205,25 @@ end
["A", v"1", "B", "2-*"],
["A", v"2", "B", "1"],
["B", v"1", "A", "1"],
- ["B", v"2", "A", "2-*"]
+ ["B", v"2", "A", "2-*"],
]
- @test sanity_tst(deps_data, [("A", v"1"), ("A", v"2"),
- ("B", v"1"), ("B", v"2")])
+ @test sanity_tst(
+ deps_data, [
+ ("A", v"1"), ("A", v"2"),
+ ("B", v"1"), ("B", v"2"),
+ ]
+ )
# require A (impossible)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
# require B (impossible)
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -236,26 +239,30 @@ end
["C", v"2", "A", "2-*"],
]
- @test sanity_tst(deps_data, [("A", v"1"), ("B", v"1"),
- ("C", v"1")])
+ @test sanity_tst(
+ deps_data, [
+ ("A", v"1"), ("B", v"1"),
+ ("C", v"1"),
+ ]
+ )
# require A
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2", "C"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require C
reqs_data = Any[
- ["C", "*"]
+ ["C", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2", "C"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require C, lowest version (impossible)
reqs_data = Any[
- ["C", "1"]
+ ["C", "1"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -271,25 +278,29 @@ end
["C", v"2", "A", "1"],
]
- @test sanity_tst(deps_data, [("A", v"1"), ("A", v"2"),
- ("B", v"1"), ("B", v"2"),
- ("C", v"1"), ("C", v"2")])
+ @test sanity_tst(
+ deps_data, [
+ ("A", v"1"), ("A", v"2"),
+ ("B", v"1"), ("B", v"2"),
+ ("C", v"1"), ("C", v"2"),
+ ]
+ )
# require A (impossible)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
# require B (impossible)
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
# require C (impossible)
reqs_data = Any[
- ["C", "*"]
+ ["C", "*"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
@@ -316,45 +327,55 @@ end
# require just F
reqs_data = Any[
- ["F", "*"]
+ ["F", "*"],
]
- want_data = Dict("A"=>v"3", "B"=>v"2", "C"=>v"2",
- "D"=>v"2", "E"=>v"1", "F"=>v"2")
+ want_data = Dict(
+ "A" => v"3", "B" => v"2", "C" => v"2",
+ "D" => v"2", "E" => v"1", "F" => v"2"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require just F, lower version
reqs_data = Any[
- ["F", "1"]
+ ["F", "1"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2", "D"=>v"2",
- "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"2", "B" => v"2", "D" => v"2",
+ "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and B; force lower B version -> must bring down F, A, and D versions too
reqs_data = Any[
["F", "*"],
- ["B", "1"]
+ ["B", "1"],
]
- want_data = Dict("A"=>v"1", "B"=>v"1", "D"=>v"1",
- "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"1", "B" => v"1", "D" => v"1",
+ "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and D; force lower D version -> must not bring down F version
reqs_data = Any[
["F", "*"],
- ["D", "1"]
+ ["D", "1"],
]
- want_data = Dict("A"=>v"3", "B"=>v"2", "C"=>v"2",
- "D"=>v"1", "E"=>v"1", "F"=>v"2")
+ want_data = Dict(
+ "A" => v"3", "B" => v"2", "C" => v"2",
+ "D" => v"1", "E" => v"1", "F" => v"2"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and C; force lower C version -> must bring down F and A versions
reqs_data = Any[
["F", "*"],
- ["C", "1"]
+ ["C", "1"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2", "C"=>v"1",
- "D"=>v"2", "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"2", "B" => v"2", "C" => v"1",
+ "D" => v"2", "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
VERBOSE && @info("SCHEME 10")
@@ -371,40 +392,39 @@ end
["D", v"1", "E", "1-*"],
["D", v"2", "E", "2-*"],
["E", v"1"],
- ["E", v"2"]
+ ["E", v"2"],
]
@test sanity_tst(deps_data, [("A", v"2")])
- @test sanity_tst(deps_data, pkgs=["B"])
- @test sanity_tst(deps_data, pkgs=["D"])
- @test sanity_tst(deps_data, pkgs=["E"])
- @test sanity_tst(deps_data, pkgs=["B", "D"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
+ @test sanity_tst(deps_data, pkgs = ["D"])
+ @test sanity_tst(deps_data, pkgs = ["E"])
+ @test sanity_tst(deps_data, pkgs = ["B", "D"])
# require A, any version (must use the highest non-inconsistent)
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"2")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just D: must bring in E
reqs_data = Any[
- ["D", "*"]
+ ["D", "*"],
]
- want_data = Dict("D"=>v"2", "E"=>v"2")
+ want_data = Dict("D" => v"2", "E" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A and D, must be the merge of the previous two cases
reqs_data = Any[
["A", "*"],
- ["D", "*"]
+ ["D", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"2", "C"=>v"2", "D"=>v"2", "E"=>v"2")
+ want_data = Dict("A" => v"1", "B" => v"2", "C" => v"2", "D" => v"2", "E" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
-
VERBOSE && @info("SCHEME 11")
## DEPENDENCY SCHEME 11: FOUR PACKAGES, WITH AN INCONSISTENCY
## ref Pkg.jl issue #2740
@@ -429,7 +449,7 @@ end
["A", "*"],
["B", "*"],
]
- want_data = Dict("A"=>v"1", "B"=>v"1", "C"=>v"1", "D"=>v"1")
+ want_data = Dict("A" => v"1", "B" => v"1", "C" => v"1", "D" => v"1")
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -439,40 +459,40 @@ end
["A", v"1", "B", "1-*", :weak],
["A", v"2", "B", "2-*", :weak],
["B", v"1"],
- ["B", v"2"]
+ ["B", v"2"],
]
@test sanity_tst(deps_data)
- @test sanity_tst(deps_data, pkgs=["A", "B"])
- @test sanity_tst(deps_data, pkgs=["B"])
- @test sanity_tst(deps_data, pkgs=["A"])
+ @test sanity_tst(deps_data, pkgs = ["A", "B"])
+ @test sanity_tst(deps_data, pkgs = ["B"])
+ @test sanity_tst(deps_data, pkgs = ["A"])
# require just B
reqs_data = Any[
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("B"=>v"2")
+ want_data = Dict("B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require just A
reqs_data = Any[
- ["A", "*"]
+ ["A", "*"],
]
- want_data = Dict("A"=>v"2")
+ want_data = Dict("A" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A and B
reqs_data = Any[
["A", "*"],
- ["B", "*"]
+ ["B", "*"],
]
- want_data = Dict("A"=>v"2", "B"=>v"2")
+ want_data = Dict("A" => v"2", "B" => v"2")
@test resolve_tst(deps_data, reqs_data, want_data)
# require A and B, invompatible versions
reqs_data = Any[
["A", "2-*"],
- ["B", "1"]
+ ["B", "1"],
]
@test_throws ResolverError resolve_tst(deps_data, reqs_data, want_data)
@@ -500,45 +520,55 @@ end
# require just F
reqs_data = Any[
- ["F", "*"]
+ ["F", "*"],
]
- want_data = Dict("A"=>v"3", "C"=>v"2",
- "D"=>v"2", "E"=>v"1", "F"=>v"2")
+ want_data = Dict(
+ "A" => v"3", "C" => v"2",
+ "D" => v"2", "E" => v"1", "F" => v"2"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require just F, lower version
reqs_data = Any[
- ["F", "1"]
+ ["F", "1"],
]
- want_data = Dict("A"=>v"2", "D"=>v"2",
- "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"2", "D" => v"2",
+ "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and B; force lower B version -> must bring down F, A, and D versions too
reqs_data = Any[
["F", "*"],
- ["B", "1"]
+ ["B", "1"],
]
- want_data = Dict("A"=>v"1", "B"=>v"1", "D"=>v"1",
- "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"1", "B" => v"1", "D" => v"1",
+ "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and D; force lower D version -> must not bring down F version, and bring in B
reqs_data = Any[
["F", "*"],
- ["D", "1"]
+ ["D", "1"],
]
- want_data = Dict("A"=>v"3", "B"=>v"2", "C"=>v"2",
- "D"=>v"1", "E"=>v"1", "F"=>v"2")
+ want_data = Dict(
+ "A" => v"3", "B" => v"2", "C" => v"2",
+ "D" => v"1", "E" => v"1", "F" => v"2"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
# require F and C; force lower C version -> must bring down F and A versions
reqs_data = Any[
["F", "*"],
- ["C", "1"]
+ ["C", "1"],
]
- want_data = Dict("A"=>v"2", "C"=>v"1",
- "D"=>v"2", "E"=>v"1", "F"=>v"1")
+ want_data = Dict(
+ "A" => v"2", "C" => v"1",
+ "D" => v"2", "E" => v"1", "F" => v"1"
+ )
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -578,16 +608,16 @@ end
["D", "*"],
]
want_data = Dict(
- "A"=>v"1",
- "B"=>v"2",
- "C"=>v"2",
- "D"=>v"2",
- "Y"=>v"0.2.2",
- "X"=>v"0.2",
- "F"=>v"1",
- "G"=>v"2",
- "H"=>v"1",
- "I"=>v"1",
+ "A" => v"1",
+ "B" => v"2",
+ "C" => v"2",
+ "D" => v"2",
+ "Y" => v"0.2.2",
+ "X" => v"0.2",
+ "F" => v"1",
+ "G" => v"2",
+ "H" => v"1",
+ "I" => v"1",
)
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -596,13 +626,13 @@ end
["D", "*"],
]
want_data = Dict(
- "B"=>v"2",
- "C"=>v"2",
- "D"=>v"2",
- "F"=>v"1",
- "G"=>v"2",
- "H"=>v"1",
- "I"=>v"1",
+ "B" => v"2",
+ "C" => v"2",
+ "D" => v"2",
+ "F" => v"1",
+ "G" => v"2",
+ "H" => v"1",
+ "I" => v"1",
)
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -611,9 +641,9 @@ end
["A", "*"],
]
want_data = Dict(
- "A"=>v"1",
- "Y"=>v"0.2.2",
- "X"=>v"0.2",
+ "A" => v"1",
+ "Y" => v"0.2.2",
+ "X" => v"0.2",
)
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -621,19 +651,19 @@ end
reqs_data = Any[
["A", "*"],
["D", "*"],
- ["Y", "0.2.1"]
+ ["Y", "0.2.1"],
]
want_data = Dict(
- "A"=>v"1",
- "B"=>v"1",
- "C"=>v"2",
- "D"=>v"2",
- "Y"=>v"0.2.1",
- "X"=>v"0.2",
- "F"=>v"1",
- "G"=>v"2",
- "H"=>v"1",
- "I"=>v"1",
+ "A" => v"1",
+ "B" => v"1",
+ "C" => v"2",
+ "D" => v"2",
+ "Y" => v"0.2.1",
+ "X" => v"0.2",
+ "F" => v"1",
+ "G" => v"2",
+ "H" => v"1",
+ "I" => v"1",
)
@test resolve_tst(deps_data, reqs_data, want_data)
@@ -674,7 +704,7 @@ end
@testset "realistic" begin
tmp = mktempdir()
- Pkg.PlatformEngines.unpack(joinpath(@__DIR__, "resolvedata.tar.gz"), tmp; verbose=false)
+ Pkg.PlatformEngines.unpack(joinpath(@__DIR__, "resolvedata.tar.gz"), tmp; verbose = false)
VERBOSE && @info("SCHEME REALISTIC 1")
## DEPENDENCY SCHEME 15: A REALISTIC EXAMPLE
@@ -711,12 +741,12 @@ end
include(joinpath(tmp, "resolvedata4.jl"))
@test sanity_tst(ResolveData4.deps_data, ResolveData4.problematic_data)
- withenv("JULIA_PKG_RESOLVE_MAX_TIME"=>10) do
+ withenv("JULIA_PKG_RESOLVE_MAX_TIME" => 10) do
@test_throws ResolverError resolve_tst(ResolveData4.deps_data, ResolveData4.reqs_data, ResolveData4.want_data)
end
- withenv("JULIA_PKG_RESOLVE_MAX_TIME"=>1e-5) do
+ withenv("JULIA_PKG_RESOLVE_MAX_TIME" => 1.0e-5) do
# this test may fail if graph preprocessing or the greedy solver get better
- @test_throws ResolverTimeoutError resolve_tst(ResolveData4.deps_data, ResolveData4.reqs_data, ResolveData4.want_data; validate_versions=false)
+ @test_throws ResolverTimeoutError resolve_tst(ResolveData4.deps_data, ResolveData4.reqs_data, ResolveData4.want_data; validate_versions = false)
end
end
@@ -726,68 +756,21 @@ end
## DEPENDENCY SCHEME 19: A NASTY CASE
include("NastyGenerator.jl")
- deps_data, reqs_data, want_data, problematic_data = NastyGenerator.generate_nasty(5, 20, q=20, d=4, sat=true)
+ deps_data, reqs_data, want_data, problematic_data = NastyGenerator.generate_nasty(5, 20, q = 20, d = 4, sat = true)
@test sanity_tst(deps_data, problematic_data)
@test resolve_tst(deps_data, reqs_data, want_data)
- deps_data, reqs_data, want_data, problematic_data = NastyGenerator.generate_nasty(5, 20, q=20, d=4, sat=false)
+ deps_data, reqs_data, want_data, problematic_data = NastyGenerator.generate_nasty(5, 20, q = 20, d = 4, sat = false)
@test sanity_tst(deps_data, problematic_data)
@test_throws ResolverError resolve_tst(deps_data, reqs_data)
end
-@testset "Resolving for another version of Julia" begin
- HistoricalStdlibVersions.register!()
- temp_pkg_dir() do dir
- function find_by_name(versions, name)
- idx = findfirst(p -> p.name == name, versions)
- if idx === nothing
- return nothing
- end
- return versions[idx]
- end
-
- # First, we're going to resolve for specific versions of Julia, ensuring we get the right dep versions:
- Pkg.Registry.download_default_registries(Pkg.stdout_f())
- ctx = Pkg.Types.Context(;julia_version=v"1.5")
- versions, deps = Pkg.Operations._resolve(ctx.io, ctx.env, ctx.registries, [
- Pkg.Types.PackageSpec(name="MPFR_jll", uuid=Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3")),
- ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version)
- gmp = find_by_name(versions, "GMP_jll")
- @test gmp !== nothing
- @test gmp.version.major == 6 && gmp.version.minor == 1
- ctx = Pkg.Types.Context(;julia_version=v"1.6")
- versions, deps = Pkg.Operations._resolve(ctx.io, ctx.env, ctx.registries, [
- Pkg.Types.PackageSpec(name="MPFR_jll", uuid=Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3")),
- ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version)
- gmp = find_by_name(versions, "GMP_jll")
- @test gmp !== nothing
- @test gmp.version.major == 6 && gmp.version.minor == 2
-
- # We'll also test resolving an "impossible" manifest; one that requires two package versions that
- # are not both loadable by the same Julia:
- ctx = Pkg.Types.Context(;julia_version=nothing)
- versions, deps = Pkg.Operations._resolve(ctx.io, ctx.env, ctx.registries, [
- # This version of GMP only works on Julia v1.6
- Pkg.Types.PackageSpec(name="GMP_jll", uuid=Base.UUID("781609d7-10c4-51f6-84f2-b8444358ff6d"), version=v"6.2.0"),
- # This version of MPFR only works on Julia v1.5
- Pkg.Types.PackageSpec(name="MPFR_jll", uuid=Base.UUID("3a97d323-0669-5f0c-9066-3539efd106a3"), version=v"4.0.2"),
- ], Pkg.Types.PRESERVE_TIERED, ctx.julia_version)
- gmp = find_by_name(versions, "GMP_jll")
- @test gmp !== nothing
- @test gmp.version.major == 6 && gmp.version.minor == 2
- mpfr = find_by_name(versions, "MPFR_jll")
- @test mpfr !== nothing
- @test mpfr.version.major == 4 && mpfr.version.minor == 0
- end
- HistoricalStdlibVersions.unregister!()
-end
-
@testset "Stdlib resolve smoketest" begin
# All stdlibs should be installable and resolvable
temp_pkg_dir() do dir
- Pkg.activate(temp=true)
+ Pkg.activate(temp = true)
Pkg.add(map(x -> x.name, values(Pkg.Types.load_stdlib()))) # add all stdlibs
iob = IOBuffer()
Pkg.resolve(io = iob)
diff --git a/test/resolve_utils.jl b/test/resolve_utils.jl
index df10e71ec8..f0a152b56f 100644
--- a/test/resolve_utils.jl
+++ b/test/resolve_utils.jl
@@ -17,7 +17,7 @@ const VERBOSE = false
# auxiliary functions
const uuid_package = UUID("cfb74b52-ec16-5bb7-a574-95d9e393895e")
pkguuid(p::String) = uuid5(uuid_package, p)
-function storeuuid(p::String, uuid_to_name::Dict{UUID,String})
+function storeuuid(p::String, uuid_to_name::Dict{UUID, String})
uuid = p == "julia" ? Resolve.uuid_julia : pkguuid(p)
if haskey(uuid_to_name, uuid)
@assert uuid_to_name[uuid] == p
@@ -26,7 +26,7 @@ function storeuuid(p::String, uuid_to_name::Dict{UUID,String})
end
return uuid
end
-wantuuids(want_data) = Dict{UUID,VersionNumber}(pkguuid(p) => v for (p,v) in want_data)
+wantuuids(want_data) = Dict{UUID, VersionNumber}(pkguuid(p) => v for (p, v) in want_data)
"""
graph = graph_from_data(deps_data)
@@ -37,47 +37,47 @@ This states that the package "PkgName" with version `v"x.y.z"` depends on "Depen
specified compatibility information. The last entry of the array can optionally be `:weak`.
"""
function graph_from_data(deps_data)
- uuid_to_name = Dict{UUID,String}()
+ uuid_to_name = Dict{UUID, String}()
uuid(p) = storeuuid(p, uuid_to_name)
- fixed = Dict{UUID,Fixed}()
- all_compat = Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}}()
- all_compat_w = Dict{UUID,Dict{VersionNumber,Set{UUID}}}()
+ fixed = Dict{UUID, Fixed}()
+ all_compat = Dict{UUID, Dict{VersionNumber, Dict{UUID, VersionSpec}}}()
+ all_compat_w = Dict{UUID, Dict{VersionNumber, Set{UUID}}}()
- deps = Dict{String,Dict{VersionNumber,Dict{String,VersionSpec}}}()
- deps_w = Dict{String,Dict{VersionNumber,Set{String}}}()
+ deps = Dict{String, Dict{VersionNumber, Dict{String, VersionSpec}}}()
+ deps_w = Dict{String, Dict{VersionNumber, Set{String}}}()
for d in deps_data
p, vn, r = d[1], d[2], d[3:end]
if !haskey(deps, p)
- deps[p] = Dict{VersionNumber,Dict{String,VersionSpec}}()
+ deps[p] = Dict{VersionNumber, Dict{String, VersionSpec}}()
end
if !haskey(deps[p], vn)
- deps[p][vn] = Dict{String,VersionSpec}()
+ deps[p][vn] = Dict{String, VersionSpec}()
end
isempty(r) && continue
rp = r[1]
weak = length(r) > 1 && r[end] == :weak
- rvs = VersionSpec(r[2:(end-weak)]...)
+ rvs = VersionSpec(r[2:(end - weak)]...)
deps[p][vn][rp] = rvs
if weak
# same as push!(deps_w[p][vn], rp) but create keys as needed
- push!(get!(Set{String}, get!(Dict{VersionNumber,Set{String}}, deps_w, p), vn), rp)
+ push!(get!(Set{String}, get!(Dict{VersionNumber, Set{String}}, deps_w, p), vn), rp)
end
end
- for (p,preq) in deps
+ for (p, preq) in deps
u = uuid(p)
- deps_pkgs = Dict{String,Set{VersionNumber}}()
- for (vn,vreq) in deps[p], rp in keys(vreq)
+ deps_pkgs = Dict{String, Set{VersionNumber}}()
+ for (vn, vreq) in deps[p], rp in keys(vreq)
push!(get!(Set{VersionNumber}, deps_pkgs, rp), vn)
end
- all_compat[u] = Dict{VersionNumber,Dict{UUID,VersionSpec}}()
- for (vn,vreq) in preq
- all_compat[u][vn] = Dict{UUID,VersionSpec}()
- for (rp,rvs) in vreq
+ all_compat[u] = Dict{VersionNumber, Dict{UUID, VersionSpec}}()
+ for (vn, vreq) in preq
+ all_compat[u][vn] = Dict{UUID, VersionSpec}()
+ for (rp, rvs) in vreq
all_compat[u][vn][uuid(rp)] = rvs
# weak dependency?
if haskey(deps_w, p) && haskey(deps_w[p], vn) && (rp ∈ deps_w[p][vn])
# same as push!(all_compat_w[u][vn], uuid(rp)) but create keys as needed
- push!(get!(Set{UUID}, get!(Dict{VersionNumber,Set{UUID}}, all_compat_w, u), vn), uuid(rp))
+ push!(get!(Set{UUID}, get!(Dict{VersionNumber, Set{UUID}}, all_compat_w, u), vn), uuid(rp))
end
end
end
@@ -85,7 +85,7 @@ function graph_from_data(deps_data)
return Graph(all_compat, all_compat_w, uuid_to_name, Requires(), fixed, VERBOSE)
end
function reqs_from_data(reqs_data, graph::Graph)
- reqs = Dict{UUID,VersionSpec}()
+ reqs = Dict{UUID, VersionSpec}()
function uuid_check(p)
uuid = pkguuid(p)
@assert graph.data.uuid_to_name[uuid] == p
@@ -95,9 +95,9 @@ function reqs_from_data(reqs_data, graph::Graph)
p = uuid_check(r[1])
reqs[p] = VersionSpec(r[2:end])
end
- reqs
+ return reqs
end
-function sanity_tst(deps_data, expected_result; pkgs=[])
+function sanity_tst(deps_data, expected_result; pkgs = [])
if VERBOSE
println()
@info("sanity check")
@@ -109,9 +109,9 @@ function sanity_tst(deps_data, expected_result; pkgs=[])
result = sanity_check(graph, Set(pkguuid(p) for p in pkgs), VERBOSE)
length(result) == length(expected_result) || return false
- expected_result_uuid = [(id(p), vn) for (p,vn) in expected_result]
+ expected_result_uuid = [(id(p), vn) for (p, vn) in expected_result]
for r in result
- r ∈ expected_result_uuid || return false
+ r ∈ expected_result_uuid || return false
end
return true
end
@@ -133,14 +133,14 @@ function resolve_tst(deps_data, reqs_data, want_data = nothing; validate_version
id(u) = pkgID(u, graph)
wd = wantuuids(want_data)
if want ≠ wd
- for (u,vn) in want
+ for (u, vn) in want
if u ∉ keys(wd)
@info "resolver decided to install $(id(u)) (v$vn), package wasn't expected"
elseif vn ≠ wd[u]
@info "version mismatch for $(id(u)), resolver wants v$vn, expected v$(wd[u])"
end
end
- for (u,vn) in wd
+ for (u, vn) in wd
if u ∉ keys(want)
@info "was expecting the resolver to install $(id(u)) (v$vn)"
end
diff --git a/test/runtests.jl b/test/runtests.jl
index bb4a0b86e8..60b4c8180a 100644
--- a/test/runtests.jl
+++ b/test/runtests.jl
@@ -9,103 +9,95 @@ original_project = Base.active_project()
module PkgTestsInner
-original_wd = pwd()
+ original_wd = pwd()
-import Pkg
-using Test, Logging
+ import Pkg
+ using Test, Logging
-if realpath(dirname(dirname(Base.pathof(Pkg)))) != realpath(dirname(@__DIR__))
- @show dirname(dirname(Base.pathof(Pkg))) realpath(dirname(@__DIR__))
- error("The wrong Pkg is being tested")
-end
-
-ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0
-ENV["HISTORICAL_STDLIB_VERSIONS_AUTO_REGISTER"]="false"
+ if realpath(dirname(dirname(Base.pathof(Pkg)))) != realpath(dirname(@__DIR__))
+ @show dirname(dirname(Base.pathof(Pkg))) realpath(dirname(@__DIR__))
+ error("The wrong Pkg is being tested")
+ end
-logdir = get(ENV, "JULIA_TEST_VERBOSE_LOGS_DIR", nothing)
-### Send all Pkg output to a file called Pkg.log
-islogging = logdir !== nothing
+ ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 0
+ ENV["JULIA_PKG_DISALLOW_PKG_PRECOMPILATION"] = 1
-if islogging
- logfile = joinpath(logdir, "Pkg.log")
- Pkg.DEFAULT_IO[] = open(logfile, "a")
- @info "Pkg test output is being logged to file" logfile
-else
- Pkg.DEFAULT_IO[] = devnull # or stdout
-end
+ logdir = get(ENV, "JULIA_TEST_VERBOSE_LOGS_DIR", nothing)
+ ### Send all Pkg output to a file called Pkg.log
+ islogging = logdir !== nothing
-include("utils.jl")
-Logging.with_logger((islogging || Pkg.DEFAULT_IO[] == devnull) ? Logging.ConsoleLogger(Pkg.DEFAULT_IO[]) : Logging.current_logger()) do
-
- # Because julia CI doesn't run stdlib tests via `Pkg.test` test deps must be manually installed if missing
- if Base.find_package("HistoricalStdlibVersions") === nothing
- @debug "Installing HistoricalStdlibVersions for Pkg tests"
- iob = IOBuffer()
- Pkg.activate(; temp = true)
- try
- # Needed for custom julia version resolve tests
- # Don't use the toplevel PKg.add() command to avoid accidentally installing another copy of the registry
- spec = Pkg.PackageSpec(
- name="HistoricalStdlibVersions",
- url="https://github.com/JuliaPackaging/HistoricalStdlibVersions.jl",
- rev="5879c5f690795208481c60b904f4af4e8c1eeef8", #= version="2.0.0", =#
- uuid="6df8b67a-e8a0-4029-b4b7-ac196fe72102")
- Pkg.API.handle_package_input!(spec)
- Pkg.add(Pkg.API.Context(), [spec], io=iob)
- catch
- println(String(take!(iob)))
- rethrow()
- end
+ if islogging
+ logfile = joinpath(logdir, "Pkg.log")
+ Pkg.DEFAULT_IO[] = open(logfile, "a")
+ @info "Pkg test output is being logged to file" logfile
+ else
+ Pkg.DEFAULT_IO[] = devnull # or stdout
end
- @eval import HistoricalStdlibVersions
+ include("utils.jl")
+ Logging.with_logger((islogging || Pkg.DEFAULT_IO[] == devnull) ? Logging.ConsoleLogger(Pkg.DEFAULT_IO[]) : Logging.current_logger()) do
- if (server = Pkg.pkg_server()) !== nothing && Sys.which("curl") !== nothing
- s = read(`curl -sLI $(server)`, String);
- @info "Pkg Server metadata:\n$s"
- end
+ if (server = Pkg.pkg_server()) !== nothing && Sys.which("curl") !== nothing
+ s = read(`curl -sLI $(server)`, String)
+ @info "Pkg Server metadata:\n$s"
+ end
- Utils.check_init_reg()
-
- @testset "Pkg" begin
- try
- @testset "$f" for f in [
- "new.jl",
- "pkg.jl",
- "repl.jl",
- "api.jl",
- "registry.jl",
- "subdir.jl",
- "extensions.jl",
- "artifacts.jl",
- "binaryplatforms.jl",
- "platformengines.jl",
- "sandbox.jl",
- "resolve.jl",
- "misc.jl",
- "force_latest_compatible_version.jl",
- "manifests.jl",
- "project_manifest.jl",
- "sources.jl",
- "workspaces.jl"
- ]
- @info "==== Testing `test/$f`"
- flush(Pkg.DEFAULT_IO[])
- include(f)
+ Utils.check_init_reg()
+
+ test_files = [
+ "new.jl",
+ "pkg.jl",
+ "repl.jl",
+ "api.jl",
+ "registry.jl",
+ "subdir.jl",
+ "extensions.jl",
+ "artifacts.jl",
+ "binaryplatforms.jl",
+ "platformengines.jl",
+ "sandbox.jl",
+ "resolve.jl",
+ "misc.jl",
+ "force_latest_compatible_version.jl",
+ "manifests.jl",
+ "project_manifest.jl",
+ "sources.jl",
+ "workspaces.jl",
+ "apps.jl",
+ ]
+
+ # Only test these if the test deps are available (they aren't typically via `Base.runtests`)
+ HSV_pkgid = Base.PkgId(Base.UUID("6df8b67a-e8a0-4029-b4b7-ac196fe72102"), "HistoricalStdlibVersions")
+ if Base.locate_package(HSV_pkgid) !== nothing
+ push!(test_files, "historical_stdlib_version.jl")
+ end
+ Aqua_pkgid = Base.PkgId(Base.UUID("4c88cf16-eb10-579e-8560-4a9242c79595"), "Aqua")
+ if Base.locate_package(Aqua_pkgid) !== nothing
+ push!(test_files, "aqua.jl")
+ end
+
+ verbose = true
+ @testset "Pkg" verbose = verbose begin
+ Pkg.activate(; temp = true) # make sure we're in an active project and that it's clean
+ try
+ @testset "$f" verbose = verbose for f in test_files
+ @info "==== Testing `test/$f`"
+ flush(Pkg.DEFAULT_IO[])
+ include(f)
+ end
+ finally
+ islogging && close(Pkg.DEFAULT_IO[])
+ cd(original_wd)
end
- finally
- islogging && close(Pkg.DEFAULT_IO[])
- cd(original_wd)
end
end
-end
-if haskey(ENV, "CI")
- # if CI don't clean up as it will be slower than the runner filesystem reset
- empty!(Base.Filesystem.TEMP_CLEANUP)
-else
- @showtime Base.Filesystem.temp_cleanup_purge(force=true)
-end
+ if haskey(ENV, "CI")
+ # if CI don't clean up as it will be slower than the runner filesystem reset
+ empty!(Base.Filesystem.TEMP_CLEANUP)
+ else
+ @showtime Base.Filesystem.temp_cleanup_purge(force = true)
+ end
end # module
diff --git a/test/sandbox.jl b/test/sandbox.jl
index d06ceaea96..dc0e6dd959 100644
--- a/test/sandbox.jl
+++ b/test/sandbox.jl
@@ -13,145 +13,165 @@ using Pkg
using Preferences
using ..Utils
-test_test(fn, name; kwargs...) = Pkg.test(name; test_fn=fn, kwargs...)
-test_test(fn; kwargs...) = Pkg.test(;test_fn=fn, kwargs...)
+test_test(fn, name; kwargs...) = Pkg.test(name; test_fn = fn, kwargs...)
+test_test(fn; kwargs...) = Pkg.test(; test_fn = fn, kwargs...)
@testset "Basic `test` sandboxing" begin
# also indirectly checks that test `compat` is obeyed
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "SandboxFallback2")
- proj = joinpath(tmp, "SandboxFallback2")
- Pkg.activate(proj)
- withenv("JULIA_PROJECT" => proj) do; test_test("Unregistered") do
- json = get(Pkg.Types.Context().env.manifest, UUID("682c06a0-de6a-54ab-a142-c8b1cf79cde6"), nothing)
- @test json !== nothing
- @test json.version == v"0.20.0"
- # test that the active project is the tmp one even though
- # JULIA_PROJECT might be set
- @test !haskey(ENV, "JULIA_PROJECT")
- @test Base.active_project() != proj
- @test Base.LOAD_PATH[1] == "@"
- @test startswith(Base.active_project(), Base.LOAD_PATH[2])
- end end
- end end
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "SandboxFallback2")
+ proj = joinpath(tmp, "SandboxFallback2")
+ Pkg.activate(proj)
+ withenv("JULIA_PROJECT" => proj) do;
+ test_test("Unregistered") do
+ json = get(Pkg.Types.Context().env.manifest, UUID("682c06a0-de6a-54ab-a142-c8b1cf79cde6"), nothing)
+ @test json !== nothing
+ @test json.version == v"0.20.0"
+ # test that the active project is the tmp one even though
+ # JULIA_PROJECT might be set
+ @test !haskey(ENV, "JULIA_PROJECT")
+ @test Base.active_project() != proj
+ @test Base.LOAD_PATH[1] == "@"
+ @test startswith(Base.active_project(), Base.LOAD_PATH[2])
+ end
+ end
+ end
+ end
# test dependencies should be preserved, when possible
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Sandbox_PreserveTestDeps")
- Pkg.activate(joinpath(tmp, "Sandbox_PreserveTestDeps"))
- test_test("Foo") do
- x = get(Pkg.Types.Context().env.manifest, UUID("7876af07-990d-54b4-ab0e-23690620f79a"), nothing)
- @test x !== nothing
- @test x.version == v"0.4.0"
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Sandbox_PreserveTestDeps")
+ Pkg.activate(joinpath(tmp, "Sandbox_PreserveTestDeps"))
+ test_test("Foo") do
+ x = get(Pkg.Types.Context().env.manifest, UUID("7876af07-990d-54b4-ab0e-23690620f79a"), nothing)
+ @test x !== nothing
+ @test x.version == v"0.4.0"
+ end
end
- end end
+ end
end
@testset "Preferences sandboxing without test/Project.toml" begin
# Preferences should be copied over into sandbox
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Sandbox_PreservePreferences")
- Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
- test_test() do
- uuid = UUID("3872bf94-3adb-11e9-01dc-bf80c7641364")
- @test !Preferences.has_preference(uuid, "does_not_exist")
- @test Preferences.load_preference(uuid, "tree") == "birch"
- @test Preferences.load_preference(uuid, "default") === nothing
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Sandbox_PreservePreferences")
+ Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
+ test_test() do
+ uuid = UUID("3872bf94-3adb-11e9-01dc-bf80c7641364")
+ @test !Preferences.has_preference(uuid, "does_not_exist")
+ @test Preferences.load_preference(uuid, "tree") == "birch"
+ @test Preferences.load_preference(uuid, "default") === nothing
+ end
end
- end end
+ end
end
@testset "Preferences sandboxing with test/Project.toml" begin
# Preferences should be copied over into sandbox
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Sandbox_PreservePreferences")
- spp_uuid = UUID("3872bf94-3adb-11e9-01dc-bf80c7641364")
- Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Sandbox_PreservePreferences")
+ spp_uuid = UUID("3872bf94-3adb-11e9-01dc-bf80c7641364")
+ Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
- # Create fake test/Project.toml and test/LocalPreferences.toml
- open(joinpath(tmp, "Sandbox_PreservePreferences", "test", "Project.toml"), write=true) do io
- print(io, """
- [deps]
- Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
- """)
- end
- Preferences.set_preferences!(
- joinpath(tmp, "Sandbox_PreservePreferences", "test", "LocalPreferences.toml"),
- "Sandbox_PreservePreferences",
- "scent" => "juniper",
- )
-
- # This test should have a set of preferences that have nothing to do with those
- # exported within `Sandbox_PreservePreferences/Project.toml`
- test_test() do
- @test !Preferences.has_preference(spp_uuid, "does_not_exist")
- # Because we are testing with the project set as the active project, we inherit
- # preferences set in the SPP project
- @test Preferences.load_preference(spp_uuid, "tree") === "birch"
- @test Preferences.load_preference(spp_uuid, "scent") == "juniper"
- @test Preferences.load_preference(spp_uuid, "default") === nothing
- end
-
- # Test that `Pkg.test()` layers the test project onto the `LOAD_PATH`,
- # so that preferences set in the calling environment can leak through.
- mktempdir() do outer_layer
- # Create a fake project that references SPP
- open(joinpath(outer_layer, "Project.toml"), write=true) do io
- println(io, """
+ # Create fake test/Project.toml and test/LocalPreferences.toml
+ open(joinpath(tmp, "Sandbox_PreservePreferences", "test", "Project.toml"), write = true) do io
+ print(
+ io, """
[deps]
- Sandbox_PreservePreferences = "$(spp_uuid)"
-
- [preferences.Sandbox_PreservePreferences]
- tree = "pine"
- scent = "shadowed"
- """)
+ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+ """
+ )
end
+ Preferences.set_preferences!(
+ joinpath(tmp, "Sandbox_PreservePreferences", "test", "LocalPreferences.toml"),
+ "Sandbox_PreservePreferences",
+ "scent" => "juniper",
+ )
- # Use `/` on windows as well
- spp_path = joinpath(tmp, "Sandbox_PreservePreferences")
- if Sys.iswindows()
- spp_path = replace(spp_path, "\\" => "/")
- end
- open(joinpath(outer_layer, "Manifest.toml"), write=true) do io
- println(io, """
- [[Sandbox_PreservePreferences]]
- path = "$(spp_path)"
- uuid = "$(spp_uuid)"
- """)
+ # This test should have a set of preferences that have nothing to do with those
+ # exported within `Sandbox_PreservePreferences/Project.toml`
+ test_test() do
+ @test !Preferences.has_preference(spp_uuid, "does_not_exist")
+ # Because we are testing with the project set as the active project, we inherit
+ # preferences set in the SPP project
+ @test Preferences.load_preference(spp_uuid, "tree") === "birch"
+ @test Preferences.load_preference(spp_uuid, "scent") == "juniper"
+ @test Preferences.load_preference(spp_uuid, "default") === nothing
end
- Pkg.activate(outer_layer)
- test_test("Sandbox_PreservePreferences") do
- # The tree that leaks through is from the outer layer,
- # rather than the overall project
- @test Preferences.load_preference(spp_uuid, "tree") === "pine"
- # The scent is still the inner test preference, since that takes priority.
- @test Preferences.load_preference(spp_uuid, "scent") == "juniper"
+ # Test that `Pkg.test()` layers the test project onto the `LOAD_PATH`,
+ # so that preferences set in the calling environment can leak through.
+ mktempdir() do outer_layer
+ # Create a fake project that references SPP
+ open(joinpath(outer_layer, "Project.toml"), write = true) do io
+ println(
+ io, """
+ [deps]
+ Sandbox_PreservePreferences = "$(spp_uuid)"
+
+ [preferences.Sandbox_PreservePreferences]
+ tree = "pine"
+ scent = "shadowed"
+ """
+ )
+ end
+
+ # Use `/` on windows as well
+ spp_path = joinpath(tmp, "Sandbox_PreservePreferences")
+ if Sys.iswindows()
+ spp_path = replace(spp_path, "\\" => "/")
+ end
+ open(joinpath(outer_layer, "Manifest.toml"), write = true) do io
+ println(
+ io, """
+ [[Sandbox_PreservePreferences]]
+ path = "$(spp_path)"
+ uuid = "$(spp_uuid)"
+ """
+ )
+ end
+
+ Pkg.activate(outer_layer)
+ test_test("Sandbox_PreservePreferences") do
+ # The tree that leaks through is from the outer layer,
+ # rather than the overall project
+ @test Preferences.load_preference(spp_uuid, "tree") === "pine"
+ # The scent is still the inner test preference, since that takes priority.
+ @test Preferences.load_preference(spp_uuid, "scent") == "juniper"
+ end
end
end
- end end
+ end
end
@testset "Nested Preferences sandboxing" begin
# Preferences should be copied over into sandbox
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "Sandbox_PreservePreferences")
- Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
- test_test("Foo") do
- uuid = UUID("48898bec-3adb-11e9-02a6-a164ba74aeae")
- @test !Preferences.has_preference(uuid, "does_not_exist")
- @test Preferences.load_preference(uuid, "toy") == "car"
- @test Preferences.load_preference(uuid, "tree") == "birch"
- @test Preferences.load_preference(uuid, "default") === nothing
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "Sandbox_PreservePreferences")
+ Pkg.activate(joinpath(tmp, "Sandbox_PreservePreferences"))
+ test_test("Foo") do
+ uuid = UUID("48898bec-3adb-11e9-02a6-a164ba74aeae")
+ @test !Preferences.has_preference(uuid, "does_not_exist")
+ @test Preferences.load_preference(uuid, "toy") == "car"
+ @test Preferences.load_preference(uuid, "tree") == "birch"
+ @test Preferences.load_preference(uuid, "default") === nothing
+ end
end
- end end
+ end
end
@testset "Basic `build` sandbox" begin
- temp_pkg_dir() do project_path; mktempdir() do tmp
- copy_test_package(tmp, "BasicSandbox")
- Pkg.activate(joinpath(tmp, "BasicSandbox"))
- Pkg.build()
- end end
+ temp_pkg_dir() do project_path
+ mktempdir() do tmp
+ copy_test_package(tmp, "BasicSandbox")
+ Pkg.activate(joinpath(tmp, "BasicSandbox"))
+ Pkg.build()
+ end
+ end
end
end # module
diff --git a/test/sources.jl b/test/sources.jl
index 311b203f00..4d6010ffa6 100644
--- a/test/sources.jl
+++ b/test/sources.jl
@@ -7,22 +7,21 @@ using ..Utils
temp_pkg_dir() do project_path
@testset "test Project.toml [sources]" begin
mktempdir() do dir
- path = abspath(joinpath(dirname(pathof(Pkg)), "../test", "test_packages", "WithSources"))
- cp(path, joinpath(dir, "WithSources"))
- cd(joinpath(dir, "WithSources")) do
+ path = copy_test_package(dir, "WithSources")
+ cd(path) do
with_current_env() do
Pkg.resolve()
@test !isempty(Pkg.project().sources["Example"])
- project_backup = cp("Project.toml", "Project.toml.bak"; force=true)
+ project_backup = cp("Project.toml", "Project.toml.bak"; force = true)
Pkg.free("Example")
@test !haskey(Pkg.project().sources, "Example")
- cp("Project.toml.bak", "Project.toml"; force=true)
- Pkg.add(; url="https://github.com/JuliaLang/Example.jl/", rev="78406c204b8")
+ cp("Project.toml.bak", "Project.toml"; force = true)
+ Pkg.add(; url = "https://github.com/JuliaLang/Example.jl/", rev = "78406c204b8")
@test Pkg.project().sources["Example"] == Dict("url" => "https://github.com/JuliaLang/Example.jl/", "rev" => "78406c204b8")
- cp("Project.toml.bak", "Project.toml"; force=true)
- cp("BadManifest.toml", "Manifest.toml"; force=true)
+ cp("Project.toml.bak", "Project.toml"; force = true)
+ cp("BadManifest.toml", "Manifest.toml"; force = true)
Pkg.resolve()
- @test Pkg.project().sources["Example"] == Dict("url" => "https://github.com/JuliaLang/Example.jl")
+ @test Pkg.project().sources["Example"] == Dict("rev" => "master", "url" => "https://github.com/JuliaLang/Example.jl")
@test Pkg.project().sources["LocalPkg"] == Dict("path" => "LocalPkg")
end
end
@@ -33,11 +32,23 @@ temp_pkg_dir() do project_path
end
end
+ cd(joinpath(dir, "WithSources", "TestMonorepo")) do
+ with_current_env() do
+ Pkg.test()
+ end
+ end
+
cd(joinpath(dir, "WithSources", "TestProject")) do
with_current_env() do
Pkg.test()
end
end
+
+ cd(joinpath(dir, "WithSources", "URLSourceInDevvedPackage")) do
+ with_current_env() do
+ Pkg.test()
+ end
+ end
end
end
end
diff --git a/test/subdir.jl b/test/subdir.jl
index cddf27992f..7fa9c57c69 100644
--- a/test/subdir.jl
+++ b/test/subdir.jl
@@ -10,8 +10,12 @@ using ..Utils
# Derived from RegistryTools' gitcmd.
function gitcmd(path::AbstractString)
- Cmd(["git", "-C", path, "-c", "user.name=RegistratorTests",
- "-c", "user.email=ci@juliacomputing.com"])
+ return Cmd(
+ [
+ "git", "-C", path, "-c", "user.name=RegistratorTests",
+ "-c", "user.email=ci@juliacomputing.com",
+ ]
+ )
end
# Create a repository containing two packages in different
@@ -20,28 +24,36 @@ end
function setup_packages_repository(dir)
package_dir = joinpath(dir, "julia")
mkpath(joinpath(package_dir, "src"))
- write(joinpath(package_dir, "Project.toml"), """
+ write(
+ joinpath(package_dir, "Project.toml"), """
name = "Package"
uuid = "408b23ff-74ea-48c4-abc7-a671b41e2073"
version = "1.0.0"
[deps]
Dep = "d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"
- """)
- write(joinpath(package_dir, "src", "Package.jl"), """
+ """
+ )
+ write(
+ joinpath(package_dir, "src", "Package.jl"), """
module Package end
- """)
+ """
+ )
dep_dir = joinpath(dir, "dependencies", "Dep")
mkpath(joinpath(dep_dir, "src"))
- write(joinpath(dep_dir, "Project.toml"), """
+ write(
+ joinpath(dep_dir, "Project.toml"), """
name = "Dep"
uuid = "d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"
version = "1.0.0"
- """)
- write(joinpath(dep_dir, "src", "Dep.jl"), """
+ """
+ )
+ write(
+ joinpath(dep_dir, "src", "Dep.jl"), """
module Dep end
- """)
+ """
+ )
git = gitcmd(dir)
run(pipeline(`$git init -q`, stdout = stdout_f(), stderr = stderr_f()))
@@ -70,45 +82,57 @@ function setup_registry(dir, packages_dir_url, package_tree_hash, dep_tree_hash)
dep_path = joinpath(dir, "D", "Dep")
mkpath(package_path)
mkpath(dep_path)
- write(joinpath(dir, "Registry.toml"), """
+ write(
+ joinpath(dir, "Registry.toml"), """
name = "Registry"
uuid = "cade28e2-3b52-4f58-aeba-0b1386f9894b"
repo = "https://github.com"
[packages]
408b23ff-74ea-48c4-abc7-a671b41e2073 = { name = "Package", path = "P/Package" }
d43cb7ef-9818-40d3-bb27-28fb4aa46cc5 = { name = "Dep", path = "D/Dep" }
- """)
- write(joinpath(package_path, "Package.toml"), """
+ """
+ )
+ write(
+ joinpath(package_path, "Package.toml"), """
name = "Package"
uuid = "408b23ff-74ea-48c4-abc7-a671b41e2073"
repo = "$(packages_dir_url)"
subdir = "julia"
- """)
- write(joinpath(package_path, "Versions.toml"), """
+ """
+ )
+ write(
+ joinpath(package_path, "Versions.toml"), """
["1.0.0"]
git-tree-sha1 = "$(package_tree_hash)"
- """)
- write(joinpath(package_path, "Deps.toml"), """
+ """
+ )
+ write(
+ joinpath(package_path, "Deps.toml"), """
[1]
Dep = "d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"
- """)
+ """
+ )
- write(joinpath(dep_path, "Package.toml"), """
+ write(
+ joinpath(dep_path, "Package.toml"), """
name = "Dep"
uuid = "d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"
repo = "$(packages_dir_url)"
subdir = "dependencies/Dep"
- """)
- write(joinpath(dep_path, "Versions.toml"), """
+ """
+ )
+ write(
+ joinpath(dep_path, "Versions.toml"), """
["1.0.0"]
git-tree-sha1 = "$(dep_tree_hash)"
- """)
+ """
+ )
git = gitcmd(dir)
run(pipeline(`$git init -q`, stdout = stdout_f(), stderr = stderr_f()))
run(pipeline(`$git add .`, stdout = stdout_f(), stderr = stderr_f()))
run(pipeline(`$git commit -qm 'Create repository.'`, stdout = stdout_f(), stderr = stderr_f()))
- fix_default_branch(; dir)
+ return fix_default_branch(; dir)
end
# Some of our tests assume that the default branch name is `master`.
@@ -153,216 +177,222 @@ end
# with the `pkg"add ..."` calls. Just set it to something that
# exists.
cd(@__DIR__) do
- # Setup a repository with two packages and a registry where
- # these packages are registered.
- packages_dir = mktempdir()
- registry_dir = mktempdir()
- packages_dir_url = make_file_url(packages_dir)
- tree_hashes = setup_packages_repository(packages_dir)
- setup_registry(registry_dir, packages_dir_url, tree_hashes...)
- pkgstr("registry add $(registry_dir)")
- dep = (name="Dep", uuid=UUID("d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"))
-
- # Ordinary add from registry.
- pkg"add Package"
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkg"add Dep"
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add version from registry.
- pkg"add Package@1.0.0"
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkg"add Dep@1.0.0"
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add branch from registry.
- pkg"add Package#master"
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
-
- # Test that adding a second time doesn't error (#3391)
- pkg"add Package#master"
- @test isinstalled("Package")
- pkg"rm Package"
-
- pkg"add Dep#master"
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from registry.
- pkg"develop Package"
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
-
- # Test developing twice (#3391)
- pkg"develop Package"
- @test isinstalled("Package")
- pkg"rm Package"
-
- pkg"develop Dep"
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from path.
- Pkg.add(Pkg.PackageSpec(path=packages_dir, subdir="julia"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.add(Pkg.PackageSpec(path=packages_dir, subdir="dependencies/Dep"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from path, REPL subdir syntax.
- pkgstr("add $(packages_dir):julia")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("add $(packages_dir):dependencies/Dep")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from path at branch.
- Pkg.add(Pkg.PackageSpec(path=packages_dir, subdir="julia", rev="master"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.add(Pkg.PackageSpec(path=packages_dir, subdir="dependencies/Dep", rev="master"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from path at branch, REPL subdir syntax
- pkgstr("add $(packages_dir):julia#master")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("add $(packages_dir):dependencies/Dep#master")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from path.
- Pkg.develop(Pkg.PackageSpec(path=packages_dir, subdir="julia"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.develop(Pkg.PackageSpec(path=packages_dir, subdir="dependencies/Dep"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from path, REPL subdir syntax.
- pkgstr("develop $(packages_dir):julia")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("develop $(packages_dir):dependencies/Dep")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from url.
- Pkg.add(Pkg.PackageSpec(url=packages_dir_url, subdir="julia"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.add(Pkg.PackageSpec(url=packages_dir_url, subdir="dependencies/Dep"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from url, REPL subdir syntax.
- pkgstr("add $(packages_dir_url):julia")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("add $(packages_dir_url):dependencies/Dep")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from url at branch.
- Pkg.add(Pkg.PackageSpec(url=packages_dir_url, subdir="julia",
- rev="master"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.add(Pkg.PackageSpec(url=packages_dir_url, subdir="dependencies/Dep", rev="master"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Add from url at branch, REPL subdir syntax.
- pkgstr("add $(packages_dir_url):julia#master")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("add $(packages_dir_url):dependencies/Dep#master")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from url.
- Pkg.develop(Pkg.PackageSpec(url=packages_dir_url, subdir="julia"))
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- Pkg.develop(Pkg.PackageSpec(url=packages_dir_url, subdir="dependencies/Dep"))
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
-
- # Develop from url, REPL subdir syntax.
- pkgstr("develop $(packages_dir_url):julia")
- @test isinstalled("Package")
- @test !isinstalled("Dep")
- @test isinstalled(dep)
- pkg"rm Package"
-
- pkgstr("develop $(packages_dir_url):dependencies/Dep")
- @test !isinstalled("Package")
- @test isinstalled("Dep")
- pkg"rm Dep"
+ # Setup a repository with two packages and a registry where
+ # these packages are registered.
+ packages_dir = mktempdir()
+ registry_dir = mktempdir()
+ packages_dir_url = make_file_url(packages_dir)
+ tree_hashes = setup_packages_repository(packages_dir)
+ setup_registry(registry_dir, packages_dir_url, tree_hashes...)
+ pkgstr("registry add $(registry_dir)")
+ dep = (name = "Dep", uuid = UUID("d43cb7ef-9818-40d3-bb27-28fb4aa46cc5"))
+
+ # Ordinary add from registry.
+ pkg"add Package"
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkg"add Dep"
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add version from registry.
+ pkg"add Package@1.0.0"
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkg"add Dep@1.0.0"
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add branch from registry.
+ pkg"add Package#master"
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+
+ # Test that adding a second time doesn't error (#3391)
+ pkg"add Package#master"
+ @test isinstalled("Package")
+ pkg"rm Package"
+
+ pkg"add Dep#master"
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from registry.
+ pkg"develop Package"
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+
+ # Test developing twice (#3391)
+ pkg"develop Package"
+ @test isinstalled("Package")
+ pkg"rm Package"
+
+ pkg"develop Dep"
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from path.
+ Pkg.add(Pkg.PackageSpec(path = packages_dir, subdir = "julia"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.add(Pkg.PackageSpec(path = packages_dir, subdir = "dependencies/Dep"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from path, REPL subdir syntax.
+ pkgstr("add $(packages_dir):julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("add $(packages_dir):dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"dev Dep" # 4269
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from path at branch.
+ Pkg.add(Pkg.PackageSpec(path = packages_dir, subdir = "julia", rev = "master"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.add(Pkg.PackageSpec(path = packages_dir, subdir = "dependencies/Dep", rev = "master"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from path at branch, REPL subdir syntax
+ pkgstr("add $(packages_dir)#master:julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("add $(packages_dir)#master:dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from path.
+ Pkg.develop(Pkg.PackageSpec(path = packages_dir, subdir = "julia"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.develop(Pkg.PackageSpec(path = packages_dir, subdir = "dependencies/Dep"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from path, REPL subdir syntax.
+ pkgstr("develop $(packages_dir):julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("develop $(packages_dir):dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from url.
+ Pkg.add(Pkg.PackageSpec(url = packages_dir_url, subdir = "julia"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.add(Pkg.PackageSpec(url = packages_dir_url, subdir = "dependencies/Dep"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from url, REPL subdir syntax.
+ pkgstr("add $(packages_dir_url):julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("add $(packages_dir_url):dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from url at branch.
+ Pkg.add(
+ Pkg.PackageSpec(
+ url = packages_dir_url, subdir = "julia",
+ rev = "master"
+ )
+ )
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.add(Pkg.PackageSpec(url = packages_dir_url, subdir = "dependencies/Dep", rev = "master"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Add from url at branch, REPL subdir syntax.
+ pkgstr("add $(packages_dir_url)#master:julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("add $(packages_dir_url)#master:dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from url.
+ Pkg.develop(Pkg.PackageSpec(url = packages_dir_url, subdir = "julia"))
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ Pkg.develop(Pkg.PackageSpec(url = packages_dir_url, subdir = "dependencies/Dep"))
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
+
+ # Develop from url, REPL subdir syntax.
+ pkgstr("develop $(packages_dir_url):julia")
+ @test isinstalled("Package")
+ @test !isinstalled("Dep")
+ @test isinstalled(dep)
+ pkg"rm Package"
+
+ pkgstr("develop $(packages_dir_url):dependencies/Dep")
+ @test !isinstalled("Package")
+ @test isinstalled("Dep")
+ pkg"rm Dep"
end #cd
end
end
diff --git a/test/test_packages/.gitignore b/test/test_packages/.gitignore
new file mode 100644
index 0000000000..3d68ab37d6
--- /dev/null
+++ b/test/test_packages/.gitignore
@@ -0,0 +1,2 @@
+Manifest.toml
+!AllowReresolveTest/Manifest.toml
diff --git a/test/test_packages/AllowReresolveTest/Manifest.toml b/test/test_packages/AllowReresolveTest/Manifest.toml
new file mode 100644
index 0000000000..518f8d04bc
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/Manifest.toml
@@ -0,0 +1,62 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.13.0-DEV"
+manifest_format = "2.0"
+project_hash = "a100b4eee2a8dd47230a6724ae4de850bddbb7a5"
+
+[[deps.AllowReresolveTest]]
+deps = ["Example"]
+path = "."
+uuid = "12345678-1234-1234-1234-123456789abc"
+version = "0.1.0"
+
+[[deps.Base64]]
+uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
+version = "1.11.0"
+
+[[deps.Example]]
+deps = ["Test"]
+git-tree-sha1 = "8eb7b4d4ca487caade9ba3e85932e28ce6d6e1f8"
+uuid = "7876af07-990d-54b4-ab0e-23690620f79a"
+version = "0.5.1"
+
+[[deps.InteractiveUtils]]
+deps = ["Markdown"]
+uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
+version = "1.11.0"
+
+[[deps.JuliaSyntaxHighlighting]]
+deps = ["StyledStrings"]
+uuid = "ac6e5ff7-fb65-4e79-a425-ec3bc9c03011"
+version = "1.12.0"
+
+[[deps.Logging]]
+uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
+version = "1.11.0"
+
+[[deps.Markdown]]
+deps = ["Base64", "JuliaSyntaxHighlighting", "StyledStrings"]
+uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
+version = "1.11.0"
+
+[[deps.Random]]
+deps = ["SHA"]
+uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+version = "1.11.0"
+
+[[deps.SHA]]
+uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
+version = "0.7.0"
+
+[[deps.Serialization]]
+uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
+version = "1.11.0"
+
+[[deps.StyledStrings]]
+uuid = "f489334b-da3d-4c2e-b8f0-e476e12c162b"
+version = "1.11.0"
+
+[[deps.Test]]
+deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
+uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+version = "1.11.0"
diff --git a/test/test_packages/AllowReresolveTest/Project.toml b/test/test_packages/AllowReresolveTest/Project.toml
new file mode 100644
index 0000000000..643237b7b5
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/Project.toml
@@ -0,0 +1,16 @@
+name = "AllowReresolveTest"
+uuid = "12345678-1234-1234-1234-123456789abc"
+version = "0.1.0"
+
+[deps]
+Example = "7876af07-990d-54b4-ab0e-23690620f79a"
+
+[compat]
+Example = "0.5"
+
+[extras]
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+
+[targets]
+test = ["Test"]
+build = ["Test"]
diff --git a/test/test_packages/AllowReresolveTest/deps/build.jl b/test/test_packages/AllowReresolveTest/deps/build.jl
new file mode 100644
index 0000000000..28e53db871
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/deps/build.jl
@@ -0,0 +1,3 @@
+# Build script for AllowReresolveTest
+using Test
+println("Build completed successfully!")
diff --git a/test/test_packages/AllowReresolveTest/src/AllowReresolveTest.jl b/test/test_packages/AllowReresolveTest/src/AllowReresolveTest.jl
new file mode 100644
index 0000000000..e549c3a22e
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/src/AllowReresolveTest.jl
@@ -0,0 +1,7 @@
+module AllowReresolveTest
+
+import Example
+
+greet() = "Hello from AllowReresolveTest using Example!"
+
+end
diff --git a/test/test_packages/AllowReresolveTest/test/runtests.jl b/test/test_packages/AllowReresolveTest/test/runtests.jl
new file mode 100644
index 0000000000..a1c953c162
--- /dev/null
+++ b/test/test_packages/AllowReresolveTest/test/runtests.jl
@@ -0,0 +1,6 @@
+using Test
+using AllowReresolveTest
+
+@testset "AllowReresolveTest.jl" begin
+ @test AllowReresolveTest.greet() == "Hello from AllowReresolveTest using Example!"
+end
diff --git a/test/test_packages/ArtifactInstallation/Artifacts.toml b/test/test_packages/ArtifactInstallation/Artifacts.toml
index 798e65c7bd..e32e4c7d56 100644
--- a/test/test_packages/ArtifactInstallation/Artifacts.toml
+++ b/test/test_packages/ArtifactInstallation/Artifacts.toml
@@ -147,16 +147,16 @@ git-tree-sha1 = "43563e7631a7eafae1f9f8d9d332e3de44ad7239"
lazy = true
[[socrates.download]]
-url = "https://github.com/staticfloat/small_bin/raw/master/socrates.tar.gz"
+url = "https://github.com/staticfloat/small_bin/raw/91f3ecf327d1de943fe076657833252791ba9f60/socrates.tar.gz"
sha256 = "e65d2f13f2085f2c279830e863292312a72930fee5ba3c792b14c33ce5c5cc58"
[[socrates.download]]
-url = "https://github.com/staticfloat/small_bin/raw/master/socrates.tar.bz2"
+url = "https://github.com/staticfloat/small_bin/raw/91f3ecf327d1de943fe076657833252791ba9f60/socrates.tar.bz2"
sha256 = "13fc17b97be41763b02cbb80e9d048302cec3bd3d446c2ed6e8210bddcd3ac76"
[collapse_the_symlink]
git-tree-sha1 = "69a468bd51751f4ed7eda31c240e775df06d6ee6"
[[collapse_the_symlink.download]]
-url = "https://github.com/staticfloat/small_bin/raw/master/collapse_the_symlink/collapse_the_symlink.tar.gz"
+url = "https://github.com/staticfloat/small_bin/raw/91f3ecf327d1de943fe076657833252791ba9f60/collapse_the_symlink/collapse_the_symlink.tar.gz"
sha256 = "956c1201405f64d3465cc28cb0dec9d63c11a08cad28c381e13bb22e1fc469d3"
diff --git a/test/test_packages/ArtifactInstallation/src/ArtifactInstallation.jl b/test/test_packages/ArtifactInstallation/src/ArtifactInstallation.jl
index e5ee4a6fc3..062ec1c065 100644
--- a/test/test_packages/ArtifactInstallation/src/ArtifactInstallation.jl
+++ b/test/test_packages/ArtifactInstallation/src/ArtifactInstallation.jl
@@ -17,10 +17,10 @@ function do_test()
# Test that we can use a variable, not just a literal:
hello_world = "HelloWorldC"
hello_world_exe = joinpath(@artifact_str(hello_world), "bin", "hello_world")
- if Sys.iswindows()
+ if Sys.iswindows()
hello_world_exe = "$(hello_world_exe).exe"
end
- @test isfile(hello_world_exe)
+ return @test isfile(hello_world_exe)
end
end
diff --git a/test/test_packages/ArtifactOverrideLoading/Artifacts.toml b/test/test_packages/ArtifactOverrideLoading/Artifacts.toml
index faa5a53769..c06a4e2b32 100644
--- a/test/test_packages/ArtifactOverrideLoading/Artifacts.toml
+++ b/test/test_packages/ArtifactOverrideLoading/Artifacts.toml
@@ -3,4 +3,3 @@ git-tree-sha1 = "0000000000000000000000000000000000000000"
[barty]
git-tree-sha1 = "1111111111111111111111111111111111111111"
-
diff --git a/test/test_packages/ArtifactOverrideLoading/src/ArtifactOverrideLoading.jl b/test/test_packages/ArtifactOverrideLoading/src/ArtifactOverrideLoading.jl
index f541bd0068..63eb5b6e01 100644
--- a/test/test_packages/ArtifactOverrideLoading/src/ArtifactOverrideLoading.jl
+++ b/test/test_packages/ArtifactOverrideLoading/src/ArtifactOverrideLoading.jl
@@ -1,10 +1,10 @@
__precompile__(false)
module ArtifactOverrideLoading
-using Artifacts
-export arty_path, barty_path
+ using Artifacts
+ export arty_path, barty_path
-# These will fail (get set to `nothing`) unless they get redirected
-const arty_path = artifact"arty"
-const barty_path = artifact"barty"
+ # These will fail (get set to `nothing`) unless they get redirected
+ const arty_path = artifact"arty"
+ const barty_path = artifact"barty"
end # module
diff --git a/test/test_packages/ArtifactTOMLSearch/julia_artifacts_test/pkg.jl b/test/test_packages/ArtifactTOMLSearch/julia_artifacts_test/pkg.jl
index 051e436fe6..bb7279448a 100644
--- a/test/test_packages/ArtifactTOMLSearch/julia_artifacts_test/pkg.jl
+++ b/test/test_packages/ArtifactTOMLSearch/julia_artifacts_test/pkg.jl
@@ -9,4 +9,4 @@ function do_test()
return isfile(joinpath(arty, "bin", "socrates"))
end
-end # module
\ No newline at end of file
+end # module
diff --git a/test/test_packages/ArtifactTOMLSearch/pkg.jl b/test/test_packages/ArtifactTOMLSearch/pkg.jl
index 051e436fe6..bb7279448a 100644
--- a/test/test_packages/ArtifactTOMLSearch/pkg.jl
+++ b/test/test_packages/ArtifactTOMLSearch/pkg.jl
@@ -9,4 +9,4 @@ function do_test()
return isfile(joinpath(arty, "bin", "socrates"))
end
-end # module
\ No newline at end of file
+end # module
diff --git a/test/test_packages/ArtifactTOMLSearch/sub_module/pkg.jl b/test/test_packages/ArtifactTOMLSearch/sub_module/pkg.jl
index e5c4db6359..c2cabcd328 100644
--- a/test/test_packages/ArtifactTOMLSearch/sub_module/pkg.jl
+++ b/test/test_packages/ArtifactTOMLSearch/sub_module/pkg.jl
@@ -3,4 +3,4 @@ using Pkg.Artifacts
# All this module will do is reference its `arty` Artifact.
arty = artifact"arty"
-end
\ No newline at end of file
+end
diff --git a/test/test_packages/ArtifactTOMLSearch/sub_package/pkg.jl b/test/test_packages/ArtifactTOMLSearch/sub_package/pkg.jl
index 642d03d92d..898a5ac95a 100644
--- a/test/test_packages/ArtifactTOMLSearch/sub_package/pkg.jl
+++ b/test/test_packages/ArtifactTOMLSearch/sub_package/pkg.jl
@@ -3,4 +3,4 @@ using Pkg.Artifacts
# All this module will do is reference its `arty` Artifact.
arty = artifact"arty"
-end
\ No newline at end of file
+end
diff --git a/test/test_packages/BigProject/RecursiveDep/Project.toml b/test/test_packages/BigProject/RecursiveDep/Project.toml
index c6e7f49c49..3eb5284036 100644
--- a/test/test_packages/BigProject/RecursiveDep/Project.toml
+++ b/test/test_packages/BigProject/RecursiveDep/Project.toml
@@ -3,4 +3,4 @@ uuid = "f5db5478-804a-11e8-3275-3180cf89cd91"
version = "0.1.0"
[deps]
-RecursiveDep2 = "63fe803a-804b-11e8-2b51-3d06555b755a"
\ No newline at end of file
+RecursiveDep2 = "63fe803a-804b-11e8-2b51-3d06555b755a"
diff --git a/test/test_packages/BigProject/RecursiveDep/src/RecursiveDep.jl b/test/test_packages/BigProject/RecursiveDep/src/RecursiveDep.jl
index 209d0f6939..04e06c625d 100644
--- a/test/test_packages/BigProject/RecursiveDep/src/RecursiveDep.jl
+++ b/test/test_packages/BigProject/RecursiveDep/src/RecursiveDep.jl
@@ -4,4 +4,4 @@ module RecursiveDep
using RecursiveDep2
-end
\ No newline at end of file
+end
diff --git a/test/test_packages/BigProject/RecursiveDep2/Project.toml b/test/test_packages/BigProject/RecursiveDep2/Project.toml
index eca4098bf4..6a228c8984 100644
--- a/test/test_packages/BigProject/RecursiveDep2/Project.toml
+++ b/test/test_packages/BigProject/RecursiveDep2/Project.toml
@@ -1,3 +1,3 @@
name = "RecursiveDep2"
uuid = "63fe803a-804b-11e8-2b51-3d06555b755a"
-version = "0.1.0"
\ No newline at end of file
+version = "0.1.0"
diff --git a/test/test_packages/BigProject/RecursiveDep2/src/RecursiveDep2.jl b/test/test_packages/BigProject/RecursiveDep2/src/RecursiveDep2.jl
index ab97c773ed..fa1399704e 100644
--- a/test/test_packages/BigProject/RecursiveDep2/src/RecursiveDep2.jl
+++ b/test/test_packages/BigProject/RecursiveDep2/src/RecursiveDep2.jl
@@ -2,4 +2,4 @@
module RecursiveDep2
-end
\ No newline at end of file
+end
diff --git a/test/test_packages/BuildProjectFixedDeps/.gitignore b/test/test_packages/BuildProjectFixedDeps/.gitignore
index 387750f19f..ca97c0f7d8 100644
--- a/test/test_packages/BuildProjectFixedDeps/.gitignore
+++ b/test/test_packages/BuildProjectFixedDeps/.gitignore
@@ -1,2 +1,2 @@
deps/artifact
-deps/build.log
\ No newline at end of file
+deps/build.log
diff --git a/test/test_packages/ExtensionExamples/HasDepWithExtensions.jl/src/HasDepWithExtensions.jl b/test/test_packages/ExtensionExamples/HasDepWithExtensions.jl/src/HasDepWithExtensions.jl
index 1f4d5790e1..0ce3f4ab1f 100644
--- a/test/test_packages/ExtensionExamples/HasDepWithExtensions.jl/src/HasDepWithExtensions.jl
+++ b/test/test_packages/ExtensionExamples/HasDepWithExtensions.jl/src/HasDepWithExtensions.jl
@@ -15,7 +15,7 @@ function do_something()
HasExtensions.foo(OffsetArray(rand(Float64, 2), 0:1)) == 2 || error("Unexpected value")
# @info "Now do something with extended IndirectArray support"
- HasExtensions.foo(IndirectArray(rand(1:6, 32, 32), 1:6)) == 3 || error("Unexpected value")
+ return HasExtensions.foo(IndirectArray(rand(1:6, 32, 32), 1:6)) == 3 || error("Unexpected value")
end
end # module
diff --git a/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/IndirectArraysExt.jl b/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/IndirectArraysExt.jl
index 9f9611e533..b4c359b43c 100644
--- a/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/IndirectArraysExt.jl
+++ b/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/IndirectArraysExt.jl
@@ -8,7 +8,7 @@ function foo(::IndirectArray)
end
function __init__()
- HasExtensions.indirectarrays_loaded = true
+ return HasExtensions.indirectarrays_loaded = true
end
end
diff --git a/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/OffsetArraysExt.jl b/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/OffsetArraysExt.jl
index 7bdad0b352..03a784753c 100644
--- a/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/OffsetArraysExt.jl
+++ b/test/test_packages/ExtensionExamples/HasExtensions.jl/ext/OffsetArraysExt.jl
@@ -8,7 +8,7 @@ function foo(::OffsetArray)
end
function __init__()
- HasExtensions.offsetarrays_loaded = true
+ return HasExtensions.offsetarrays_loaded = true
end
end
diff --git a/test/test_packages/Rot13.jl/Manifest.toml b/test/test_packages/Rot13.jl/Manifest.toml
new file mode 100644
index 0000000000..bbef702fb7
--- /dev/null
+++ b/test/test_packages/Rot13.jl/Manifest.toml
@@ -0,0 +1,10 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.12.0-DEV"
+manifest_format = "2.0"
+project_hash = "2610b29b73f9f9432fb181a7f9f7c5c9e3de5557"
+
+[[deps.Rot13]]
+path = "."
+uuid = "43ef800a-eac4-47f4-949b-25107b932e8f"
+version = "0.1.0"
diff --git a/test/test_packages/Rot13.jl/Project.toml b/test/test_packages/Rot13.jl/Project.toml
new file mode 100644
index 0000000000..a1933ed8a2
--- /dev/null
+++ b/test/test_packages/Rot13.jl/Project.toml
@@ -0,0 +1,7 @@
+name = "Rot13"
+uuid = "43ef800a-eac4-47f4-949b-25107b932e8f"
+version = "0.1.0"
+
+[apps]
+juliarot13 = {}
+juliarot13cli = { submodule = "CLI" }
diff --git a/test/test_packages/Rot13.jl/src/CLI.jl b/test/test_packages/Rot13.jl/src/CLI.jl
new file mode 100644
index 0000000000..4d6a859264
--- /dev/null
+++ b/test/test_packages/Rot13.jl/src/CLI.jl
@@ -0,0 +1,18 @@
+module CLI
+
+using ..Rot13: rot13
+
+function (@main)(ARGS)
+ if length(ARGS) == 0
+ println("Usage: rot13cli ")
+ return 1
+ end
+
+ for arg in ARGS
+ # Add a prefix to distinguish from main module output
+ println("CLI: $(rot13(arg))")
+ end
+ return 0
+end
+
+end # module CLI
diff --git a/test/test_packages/Rot13.jl/src/Rot13.jl b/test/test_packages/Rot13.jl/src/Rot13.jl
new file mode 100644
index 0000000000..414201aa18
--- /dev/null
+++ b/test/test_packages/Rot13.jl/src/Rot13.jl
@@ -0,0 +1,19 @@
+module Rot13
+
+function rot13(c::Char)
+ shft = islowercase(c) ? 'a' : 'A'
+ return isletter(c) ? shft + (c - shft + 13) % 26 : c
+end
+
+rot13(str::AbstractString) = map(rot13, str)
+
+function (@main)(ARGS)
+ for arg in ARGS
+ println(rot13(arg))
+ end
+ return 0
+end
+
+include("CLI.jl")
+
+end # module Rot13
diff --git a/test/test_packages/Rot13.jl/src/Rot13_edited.jl b/test/test_packages/Rot13.jl/src/Rot13_edited.jl
new file mode 100644
index 0000000000..26de592056
--- /dev/null
+++ b/test/test_packages/Rot13.jl/src/Rot13_edited.jl
@@ -0,0 +1,7 @@
+module Rot13
+
+function (@main)(ARGS)
+ return println("Updated!")
+end
+
+end # module Rot13
diff --git a/test/test_packages/Sandbox_PreservePreferences/Project.toml b/test/test_packages/Sandbox_PreservePreferences/Project.toml
index fdef8266a3..2d979c2144 100644
--- a/test/test_packages/Sandbox_PreservePreferences/Project.toml
+++ b/test/test_packages/Sandbox_PreservePreferences/Project.toml
@@ -8,4 +8,4 @@ Example = "7876af07-990d-54b4-ab0e-23690620f79a"
Foo = "48898bec-3adb-11e9-02a6-a164ba74aeae"
[preferences]
-Sandbox_PreservePreferences.tree = "birch"
\ No newline at end of file
+Sandbox_PreservePreferences.tree = "birch"
diff --git a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/Project.toml b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/Project.toml
index d7ab2543f7..16b57b3a8e 100644
--- a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/Project.toml
+++ b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/Project.toml
@@ -8,4 +8,4 @@ Preferences = "21216c6a-2e73-6563-6e65-726566657250"
[preferences]
Foo.toy = "train"
Foo.tree = "oak"
-Foo.default = "default"
\ No newline at end of file
+Foo.default = "default"
diff --git a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/src/Foo.jl b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/src/Foo.jl
index 0cd72d4280..f1fd161198 100644
--- a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/src/Foo.jl
+++ b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/src/Foo.jl
@@ -2,7 +2,7 @@ module Foo
using Preferences
-set!(key, value) = @set_preferences!(key=>value)
+set!(key, value) = @set_preferences!(key => value)
get(key) = @load_preference(key)
end # module
diff --git a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/test/Project.toml b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/test/Project.toml
index ed0fb056c6..cc43ab0f6d 100644
--- a/test/test_packages/Sandbox_PreservePreferences/dev/Foo/test/Project.toml
+++ b/test/test_packages/Sandbox_PreservePreferences/dev/Foo/test/Project.toml
@@ -3,4 +3,4 @@ Preferences = "21216c6a-2e73-6563-6e65-726566657250"
Foo = "48898bec-3adb-11e9-02a6-a164ba74aeae"
[preferences]
-Foo.tree = "birch"
\ No newline at end of file
+Foo.tree = "birch"
diff --git a/test/test_packages/Sandbox_PreservePreferences/test/runtests.jl b/test/test_packages/Sandbox_PreservePreferences/test/runtests.jl
index e69de29bb2..1bb8bf6d7f 100644
--- a/test/test_packages/Sandbox_PreservePreferences/test/runtests.jl
+++ b/test/test_packages/Sandbox_PreservePreferences/test/runtests.jl
@@ -0,0 +1 @@
+# empty
diff --git a/test/test_packages/ShouldPreserveSemver/Manifest.toml b/test/test_packages/ShouldPreserveSemver/Manifest_MbedTLS.toml
similarity index 100%
rename from test/test_packages/ShouldPreserveSemver/Manifest.toml
rename to test/test_packages/ShouldPreserveSemver/Manifest_MbedTLS.toml
diff --git a/test/test_packages/ShouldPreserveSemver/Manifest_OpenSSL.toml b/test/test_packages/ShouldPreserveSemver/Manifest_OpenSSL.toml
new file mode 100644
index 0000000000..f1fe5150bd
--- /dev/null
+++ b/test/test_packages/ShouldPreserveSemver/Manifest_OpenSSL.toml
@@ -0,0 +1,258 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.9.0-DEV"
+manifest_format = "2.0"
+project_hash = "9af0d7a4d60a77b1a42f518d7da50edc4261ffcb"
+
+[[deps.ArgTools]]
+uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f"
+version = "1.1.1"
+
+[[deps.Arpack]]
+deps = ["BinaryProvider", "Libdl", "LinearAlgebra"]
+git-tree-sha1 = "07a2c077bdd4b6d23a40342a8a108e2ee5e58ab6"
+uuid = "7d9fca2a-8960-54d3-9f78-7d1dccf2cb97"
+version = "0.3.1"
+
+[[deps.Artifacts]]
+uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
+
+[[deps.Base64]]
+uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
+
+[[deps.BinaryProvider]]
+deps = ["Libdl", "Logging", "SHA"]
+git-tree-sha1 = "c7361ce8a2129f20b0e05a89f7070820cfed6648"
+uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
+version = "0.5.6"
+
+[[deps.CSTParser]]
+deps = ["Tokenize"]
+git-tree-sha1 = "c69698c3d4a7255bc1b4bc2afc09f59db910243b"
+uuid = "00ebfdb7-1f24-5e51-bd34-a7502290713f"
+version = "0.6.2"
+
+[[deps.CodecZlib]]
+deps = ["BinaryProvider", "Libdl", "TranscodingStreams"]
+git-tree-sha1 = "05916673a2627dd91b4969ff8ba6941bc85a960e"
+uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
+version = "0.6.0"
+
+[[deps.Compat]]
+deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"]
+git-tree-sha1 = "84aa74986c5b9b898b0d1acaf3258741ee64754f"
+uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
+version = "2.1.0"
+
+[[deps.CompilerSupportLibraries_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
+version = "0.5.2+0"
+
+[[deps.DataStructures]]
+deps = ["InteractiveUtils", "OrderedCollections"]
+git-tree-sha1 = "0809951a1774dc724da22d26e4289bbaab77809a"
+uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
+version = "0.17.0"
+
+[[deps.Dates]]
+deps = ["Printf"]
+uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
+
+[[deps.DelimitedFiles]]
+deps = ["Mmap"]
+git-tree-sha1 = "19b1417ff479c07e523fcbf2fd735a3fde3d1ab3"
+uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab"
+version = "1.9.0"
+
+[[deps.Distributed]]
+deps = ["Random", "Serialization", "Sockets"]
+uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
+
+[[deps.Downloads]]
+deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"]
+uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
+version = "1.6.0"
+
+[[deps.FileWatching]]
+uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee"
+
+[[deps.InteractiveUtils]]
+deps = ["Markdown"]
+uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
+
+[[deps.LibCURL]]
+deps = ["LibCURL_jll", "MozillaCACerts_jll"]
+uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21"
+version = "0.6.3"
+
+[[deps.LibCURL_jll]]
+deps = ["Artifacts", "LibSSH2_jll", "Libdl", "OpenSSL_jll", "Zlib_jll", "nghttp2_jll"]
+uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0"
+version = "8.9.1+0"
+
+[[deps.LibGit2]]
+deps = ["Base64", "NetworkOptions", "Printf", "SHA"]
+uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
+
+[[deps.LibSSH2_jll]]
+deps = ["Artifacts", "Libdl", "OpenSSL_jll"]
+uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8"
+version = "1.11.3+0"
+
+[[deps.Libdl]]
+uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
+
+[[deps.LightGraphs]]
+deps = ["Arpack", "Base64", "CodecZlib", "DataStructures", "DelimitedFiles", "Distributed", "LinearAlgebra", "Markdown", "Random", "SharedArrays", "SimpleTraits", "SparseArrays", "Statistics", "Test"]
+git-tree-sha1 = "e7e380a7c009019df1203bf400894aa04ee37ba0"
+uuid = "093fc24a-ae57-5d10-9952-331d41423f4d"
+version = "1.0.1"
+
+[[deps.LinearAlgebra]]
+deps = ["Libdl", "OpenBLAS_jll", "libblastrampoline_jll"]
+uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
+
+[[deps.Logging]]
+uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
+
+[[deps.MacroTools]]
+deps = ["CSTParser", "Compat", "DataStructures", "Test", "Tokenize"]
+git-tree-sha1 = "d6e9dedb8c92c3465575442da456aec15a89ff76"
+uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
+version = "0.5.1"
+
+[[deps.Markdown]]
+deps = ["Base64"]
+uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
+
+[[deps.OpenSSL_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95"
+version = "3.0.15+1"
+
+[[deps.Mmap]]
+uuid = "a63ad114-7e13-5084-954f-fe012c677804"
+
+[[deps.MozillaCACerts_jll]]
+uuid = "14a3606d-f60d-562e-9121-12d972cd8159"
+version = "2024.11.26"
+
+[[deps.NetworkOptions]]
+uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
+version = "1.2.0"
+
+[[deps.OpenBLAS_jll]]
+deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"]
+uuid = "4536629a-c528-5b80-bd46-f80d51c5b363"
+version = "0.3.20+0"
+
+[[deps.OrderedCollections]]
+deps = ["Random", "Serialization", "Test"]
+git-tree-sha1 = "c4c13474d23c60d20a67b217f1d7f22a40edf8f1"
+uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
+version = "1.1.0"
+
+[[deps.Pkg]]
+deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "TOML", "Tar", "UUIDs", "p7zip_jll"]
+uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
+version = "1.8.0"
+
+[[deps.Printf]]
+deps = ["Unicode"]
+uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
+
+[[deps.REPL]]
+deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"]
+uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
+
+[[deps.Random]]
+deps = ["SHA", "Serialization"]
+uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+
+[[deps.SHA]]
+uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
+version = "0.7.0"
+
+[[deps.Serialization]]
+uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
+
+[[deps.SharedArrays]]
+deps = ["Distributed", "Mmap", "Random", "Serialization"]
+uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
+
+[[deps.SimpleTraits]]
+deps = ["InteractiveUtils", "MacroTools"]
+git-tree-sha1 = "05bbf4484b975782e5e54bb0750f21f7f2f66171"
+uuid = "699a6c99-e7fa-54fc-8d76-47d257e15c1d"
+version = "0.9.0"
+
+[[deps.Sockets]]
+uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
+
+[[deps.SparseArrays]]
+deps = ["Libdl", "LinearAlgebra", "Random", "Serialization", "SuiteSparse_jll"]
+uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
+
+[[deps.Statistics]]
+deps = ["LinearAlgebra", "SparseArrays"]
+git-tree-sha1 = "83850190e0f902ae1673d63ae349fc2a36dc6afb"
+uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
+version = "1.11"
+
+[[deps.SuiteSparse_jll]]
+deps = ["Artifacts", "Libdl", "Pkg", "libblastrampoline_jll"]
+uuid = "bea87d4a-7f5b-5778-9afe-8cc45184846c"
+version = "5.10.1+0"
+
+[[deps.TOML]]
+deps = ["Dates"]
+uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
+version = "1.0.0"
+
+[[deps.Tar]]
+deps = ["ArgTools", "SHA"]
+uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e"
+version = "1.10.0"
+
+[[deps.Test]]
+deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
+uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+
+[[deps.Tokenize]]
+git-tree-sha1 = "dfcdbbfb2d0370716c815cbd6f8a364efb6f42cf"
+uuid = "0796e94c-ce3b-5d07-9a54-7f471281c624"
+version = "0.5.6"
+
+[[deps.TranscodingStreams]]
+deps = ["Random", "Test"]
+git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
+uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
+version = "0.9.5"
+
+[[deps.UUIDs]]
+deps = ["Random", "SHA"]
+uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
+
+[[deps.Unicode]]
+uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
+
+[[deps.Zlib_jll]]
+deps = ["Libdl"]
+uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
+version = "1.2.12+3"
+
+[[deps.libblastrampoline_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "8e850b90-86db-534c-a0d3-1478176c7d93"
+version = "5.1.1+0"
+
+[[deps.nghttp2_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d"
+version = "1.48.0+0"
+
+[[deps.p7zip_jll]]
+deps = ["Artifacts", "Libdl"]
+uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0"
+version = "17.4.0+0"
diff --git a/test/test_packages/TestDepTrackingPath/test/runtests.jl b/test/test_packages/TestDepTrackingPath/test/runtests.jl
index e69de29bb2..1bb8bf6d7f 100644
--- a/test/test_packages/TestDepTrackingPath/test/runtests.jl
+++ b/test/test_packages/TestDepTrackingPath/test/runtests.jl
@@ -0,0 +1 @@
+# empty
diff --git a/test/test_packages/TestFailure/Project.toml b/test/test_packages/TestFailure/Project.toml
index 1033510237..7f4c5c01b8 100644
--- a/test/test_packages/TestFailure/Project.toml
+++ b/test/test_packages/TestFailure/Project.toml
@@ -7,4 +7,4 @@ version = "0.1.0"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[targets]
-test = ["Test"]
\ No newline at end of file
+test = ["Test"]
diff --git a/test/test_packages/TestThreads/Project.toml b/test/test_packages/TestThreads/Project.toml
new file mode 100644
index 0000000000..35e36aed33
--- /dev/null
+++ b/test/test_packages/TestThreads/Project.toml
@@ -0,0 +1,2 @@
+name = "TestThreads"
+uuid = "79df5fe7-ed23-44ca-b7b9-b3881e57664d"
diff --git a/test/test_packages/TestThreads/src/TestThreads.jl b/test/test_packages/TestThreads/src/TestThreads.jl
new file mode 100644
index 0000000000..11d357747f
--- /dev/null
+++ b/test/test_packages/TestThreads/src/TestThreads.jl
@@ -0,0 +1,2 @@
+module TestThreads
+end
diff --git a/test/test_packages/TestThreads/test/runtests.jl b/test/test_packages/TestThreads/test/runtests.jl
new file mode 100644
index 0000000000..cd19c368da
--- /dev/null
+++ b/test/test_packages/TestThreads/test/runtests.jl
@@ -0,0 +1,11 @@
+@assert haskey(ENV, "EXPECTED_NUM_THREADS_DEFAULT")
+@assert haskey(ENV, "EXPECTED_NUM_THREADS_INTERACTIVE")
+EXPECTED_NUM_THREADS_DEFAULT = parse(Int, ENV["EXPECTED_NUM_THREADS_DEFAULT"])
+EXPECTED_NUM_THREADS_INTERACTIVE = parse(Int, ENV["EXPECTED_NUM_THREADS_INTERACTIVE"])
+@assert Threads.nthreads() == EXPECTED_NUM_THREADS_DEFAULT
+@assert Threads.nthreads(:default) == EXPECTED_NUM_THREADS_DEFAULT
+if Threads.nthreads() == 1
+ @info "Convert me back to an assert once https://github.com/JuliaLang/julia/pull/57454 has landed" Threads.nthreads(:interactive) EXPECTED_NUM_THREADS_INTERACTIVE
+else
+ @assert Threads.nthreads(:interactive) == EXPECTED_NUM_THREADS_INTERACTIVE
+end
diff --git a/test/test_packages/WithSources/TestMonorepo/Project.toml b/test/test_packages/WithSources/TestMonorepo/Project.toml
new file mode 100644
index 0000000000..7c726b9389
--- /dev/null
+++ b/test/test_packages/WithSources/TestMonorepo/Project.toml
@@ -0,0 +1,17 @@
+name = "TestMonorepo"
+uuid = "864d8eef-2526-4817-933e-34008eadd182"
+authors = ["KristofferC "]
+version = "0.1.0"
+
+[deps]
+Unregistered = "dcb67f36-efa0-11e8-0cef-2fc465ed98ae"
+
+[extras]
+Example = "d359f271-ef68-451f-b4fc-6b43e571086c"
+
+[sources]
+Example = {url = "https://github.com/JuliaLang/Pkg.jl", subdir = "test/test_packages/Example"}
+Unregistered = {url = "https://github.com/00vareladavid/Unregistered.jl", rev = "1b7a462"}
+
+[targets]
+test = ["Example"]
diff --git a/test/test_packages/WithSources/TestMonorepo/src/TestMonorepo.jl b/test/test_packages/WithSources/TestMonorepo/src/TestMonorepo.jl
new file mode 100644
index 0000000000..9a4aa4f8f7
--- /dev/null
+++ b/test/test_packages/WithSources/TestMonorepo/src/TestMonorepo.jl
@@ -0,0 +1,6 @@
+module TestMonorepo
+using Unregistered
+
+greet() = print("Hello World!")
+
+end
diff --git a/test/test_packages/WithSources/TestMonorepo/test/runtests.jl b/test/test_packages/WithSources/TestMonorepo/test/runtests.jl
new file mode 100644
index 0000000000..81a7bcd223
--- /dev/null
+++ b/test/test_packages/WithSources/TestMonorepo/test/runtests.jl
@@ -0,0 +1,2 @@
+using Example
+using Unregistered
diff --git a/test/test_packages/WithSources/URLSourceInDevvedPackage/Project.toml b/test/test_packages/WithSources/URLSourceInDevvedPackage/Project.toml
new file mode 100644
index 0000000000..a73c636c7d
--- /dev/null
+++ b/test/test_packages/WithSources/URLSourceInDevvedPackage/Project.toml
@@ -0,0 +1,10 @@
+name = "URLSourceInDevvedPackage"
+uuid = "78d3b172-12ec-4a7f-9187-8bf78594552a"
+version = "0.1.0"
+authors = ["Kristoffer "]
+
+[deps]
+TestMonorepo = "864d8eef-2526-4817-933e-34008eadd182"
+
+[sources]
+TestMonorepo = {path = "../TestMonorepo"}
diff --git a/test/test_packages/WithSources/URLSourceInDevvedPackage/src/URLSourceInDevvedPackage.jl b/test/test_packages/WithSources/URLSourceInDevvedPackage/src/URLSourceInDevvedPackage.jl
new file mode 100644
index 0000000000..e1de92b8a8
--- /dev/null
+++ b/test/test_packages/WithSources/URLSourceInDevvedPackage/src/URLSourceInDevvedPackage.jl
@@ -0,0 +1,5 @@
+module URLSourceInDevvedPackage
+
+greet() = print("Hello World!")
+
+end # module URLSourceInDevvedPackage
diff --git a/test/test_packages/WithSources/URLSourceInDevvedPackage/test/runtests.jl b/test/test_packages/WithSources/URLSourceInDevvedPackage/test/runtests.jl
new file mode 100644
index 0000000000..7279d9d735
--- /dev/null
+++ b/test/test_packages/WithSources/URLSourceInDevvedPackage/test/runtests.jl
@@ -0,0 +1,2 @@
+using URLSourceInDevvedPackage
+using TestMonorepo
diff --git a/test/test_packages/WorkspacePathResolution/Project.toml b/test/test_packages/WorkspacePathResolution/Project.toml
new file mode 100644
index 0000000000..3550d64c98
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/Project.toml
@@ -0,0 +1,5 @@
+[workspace]
+projects = [
+ "SubProjectA",
+ "SubProjectB",
+]
diff --git a/test/test_packages/WorkspacePathResolution/SubProjectA/Project.toml b/test/test_packages/WorkspacePathResolution/SubProjectA/Project.toml
new file mode 100644
index 0000000000..e5aa2bbe50
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/SubProjectA/Project.toml
@@ -0,0 +1,9 @@
+name = "SubProjectA"
+uuid = "87654321-4321-4321-4321-210987654321"
+version = "0.1.0"
+
+[deps]
+SubProjectB = "12345678-1234-1234-1234-123456789012"
+
+[sources]
+SubProjectB = {path = "SubProjectB"}
diff --git a/test/test_packages/WorkspacePathResolution/SubProjectA/src/SubProjectA.jl b/test/test_packages/WorkspacePathResolution/SubProjectA/src/SubProjectA.jl
new file mode 100644
index 0000000000..89fb8e379d
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/SubProjectA/src/SubProjectA.jl
@@ -0,0 +1,7 @@
+module SubProjectA
+
+using SubProjectB
+
+greet() = "Hello from SubProjectA! " * SubProjectB.greet()
+
+end
diff --git a/test/test_packages/WorkspacePathResolution/SubProjectB/Project.toml b/test/test_packages/WorkspacePathResolution/SubProjectB/Project.toml
new file mode 100644
index 0000000000..8f8bf6fd07
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/SubProjectB/Project.toml
@@ -0,0 +1,3 @@
+name = "SubProjectB"
+uuid = "12345678-1234-1234-1234-123456789012"
+version = "0.1.0"
diff --git a/test/test_packages/WorkspacePathResolution/SubProjectB/src/SubProjectB.jl b/test/test_packages/WorkspacePathResolution/SubProjectB/src/SubProjectB.jl
new file mode 100644
index 0000000000..4486af34ba
--- /dev/null
+++ b/test/test_packages/WorkspacePathResolution/SubProjectB/src/SubProjectB.jl
@@ -0,0 +1,5 @@
+module SubProjectB
+
+greet() = "Hello from SubProjectB!"
+
+end
diff --git a/test/test_packages/monorepo/packages/D/Project.toml b/test/test_packages/monorepo/packages/D/Project.toml
index a816c0ea74..b4469b5f58 100644
--- a/test/test_packages/monorepo/packages/D/Project.toml
+++ b/test/test_packages/monorepo/packages/D/Project.toml
@@ -1,4 +1,4 @@
name = "D"
uuid = "bf733257-898a-45a0-b2f2-c1c188bdd870"
version = "0.0.0"
-manifest = "../../Manifest.toml"
\ No newline at end of file
+manifest = "../../Manifest.toml"
diff --git a/test/test_packages/monorepo/test/runtests.jl b/test/test_packages/monorepo/test/runtests.jl
index 8cacea0deb..ef5dbaf141 100644
--- a/test/test_packages/monorepo/test/runtests.jl
+++ b/test/test_packages/monorepo/test/runtests.jl
@@ -1,4 +1,4 @@
using Test, A
@test A.test()
-@test A.testC()
\ No newline at end of file
+@test A.testC()
diff --git a/test/test_packages/sources_only_rev/Project.toml b/test/test_packages/sources_only_rev/Project.toml
new file mode 100644
index 0000000000..73a01c5d00
--- /dev/null
+++ b/test/test_packages/sources_only_rev/Project.toml
@@ -0,0 +1,5 @@
+[deps]
+Example = "7876af07-990d-54b4-ab0e-23690620f79a"
+
+[sources]
+Example = {rev = "ba3d6704f09330ae973773496a4212f85e0ffe45"}
diff --git a/test/utils.jl b/test/utils.jl
index ee19cb9bcb..b752f5bbb7 100644
--- a/test/utils.jl
+++ b/test/utils.jl
@@ -9,9 +9,9 @@ using TOML
using UUIDs
export temp_pkg_dir, cd_tempdir, isinstalled, write_build, with_current_env,
- with_temp_env, with_pkg_env, git_init_and_commit, copy_test_package,
- git_init_package, add_this_pkg, TEST_SIG, TEST_PKG, isolate, LOADED_DEPOT,
- list_tarball_files, recursive_rm_cov_files
+ with_temp_env, with_pkg_env, git_init_and_commit, copy_test_package,
+ git_init_package, add_this_pkg, TEST_SIG, TEST_PKG, isolate, LOADED_DEPOT,
+ list_tarball_files, recursive_rm_cov_files, copy_this_pkg_cache
const CACHE_DIRECTORY = realpath(mktempdir(; cleanup = true))
@@ -22,6 +22,18 @@ const REGISTRY_DIR = joinpath(REGISTRY_DEPOT, "registries", "General")
const GENERAL_UUID = UUID("23338594-aafe-5451-b93e-139f81909106")
+function copy_this_pkg_cache(new_depot)
+ for p in ("Pkg", "REPLExt")
+ subdir = joinpath("compiled", "v$(VERSION.major).$(VERSION.minor)")
+ source = joinpath(Base.DEPOT_PATH[1], subdir, p)
+ isdir(source) || continue # doesn't exist if using shipped Pkg (e.g. Julia CI)
+ dest = joinpath(new_depot, subdir, p)
+ mkpath(dirname(dest))
+ cp(source, dest)
+ end
+ return
+end
+
function check_init_reg()
isfile(joinpath(REGISTRY_DIR, "Registry.toml")) && return
mkpath(REGISTRY_DIR)
@@ -34,21 +46,24 @@ function check_init_reg()
write(tree_info_file, "git-tree-sha1 = " * repr(string(hash)))
else
Base.shred!(LibGit2.CachedCredentials()) do creds
- f = retry(delays = fill(5.0, 3), check=(s,e)->isa(e, Pkg.Types.PkgError)) do
- LibGit2.with(Pkg.GitTools.clone(
- stderr_f(),
- "https://github.com/JuliaRegistries/General.git",
- REGISTRY_DIR,
- credentials = creds)) do repo
+ f = retry(delays = fill(5.0, 3), check = (s, e) -> isa(e, Pkg.Types.PkgError)) do
+ LibGit2.with(
+ Pkg.GitTools.clone(
+ stderr_f(),
+ "https://github.com/JuliaRegistries/General.git",
+ REGISTRY_DIR,
+ credentials = creds
+ )
+ ) do repo
end
end
f() # retry returns a function that should be called
end
end
- isfile(joinpath(REGISTRY_DIR, "Registry.toml")) || error("Registry did not install properly")
+ return isfile(joinpath(REGISTRY_DIR, "Registry.toml")) || error("Registry did not install properly")
end
-function isolate(fn::Function; loaded_depot=false, linked_reg=true)
+function isolate(fn::Function; loaded_depot = false, linked_reg = true)
old_load_path = copy(LOAD_PATH)
old_depot_path = copy(DEPOT_PATH)
old_home_project = Base.HOME_PROJECT[]
@@ -57,7 +72,7 @@ function isolate(fn::Function; loaded_depot=false, linked_reg=true)
old_general_registry_url = Pkg.Registry.DEFAULT_REGISTRIES[1].url
old_general_registry_path = Pkg.Registry.DEFAULT_REGISTRIES[1].path
old_general_registry_linked = Pkg.Registry.DEFAULT_REGISTRIES[1].linked
- try
+ return try
# Clone/download the registry only once
check_init_reg()
@@ -70,9 +85,11 @@ function isolate(fn::Function; loaded_depot=false, linked_reg=true)
Pkg.Registry.DEFAULT_REGISTRIES[1].path = REGISTRY_DIR
Pkg.Registry.DEFAULT_REGISTRIES[1].linked = linked_reg
Pkg.REPLMode.TEST_MODE[] = false
- withenv("JULIA_PROJECT" => nothing,
- "JULIA_LOAD_PATH" => nothing,
- "JULIA_PKG_DEVDIR" => nothing) do
+ withenv(
+ "JULIA_PROJECT" => nothing,
+ "JULIA_LOAD_PATH" => nothing,
+ "JULIA_PKG_DEVDIR" => nothing
+ ) do
target_depot = realpath(mktempdir())
push!(LOAD_PATH, "@", "@v#.#", "@stdlib")
push!(DEPOT_PATH, target_depot)
@@ -91,7 +108,7 @@ function isolate(fn::Function; loaded_depot=false, linked_reg=true)
end
if !haskey(ENV, "CI") && target_depot !== nothing && isdir(target_depot)
try
- Base.rm(target_depot; force=true, recursive=true)
+ Base.rm(target_depot; force = true, recursive = true)
catch err
println("warning: isolate failed to clean up depot.\n $err")
end
@@ -127,7 +144,7 @@ function isolate_and_pin_registry(fn::Function; registry_url::String, registry_c
return nothing
end
-function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
+function temp_pkg_dir(fn::Function; rm = true, linked_reg = true)
old_load_path = copy(LOAD_PATH)
old_depot_path = copy(DEPOT_PATH)
old_home_project = Base.HOME_PROJECT[]
@@ -135,7 +152,7 @@ function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
old_general_registry_url = Pkg.Registry.DEFAULT_REGISTRIES[1].url
old_general_registry_path = Pkg.Registry.DEFAULT_REGISTRIES[1].path
old_general_registry_linked = Pkg.Registry.DEFAULT_REGISTRIES[1].linked
- try
+ return try
# Clone/download the registry only once
check_init_reg()
@@ -146,9 +163,11 @@ function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
Pkg.Registry.DEFAULT_REGISTRIES[1].url = nothing
Pkg.Registry.DEFAULT_REGISTRIES[1].path = REGISTRY_DIR
Pkg.Registry.DEFAULT_REGISTRIES[1].linked = linked_reg
- withenv("JULIA_PROJECT" => nothing,
- "JULIA_LOAD_PATH" => nothing,
- "JULIA_PKG_DEVDIR" => nothing) do
+ withenv(
+ "JULIA_PROJECT" => nothing,
+ "JULIA_LOAD_PATH" => nothing,
+ "JULIA_PKG_DEVDIR" => nothing
+ ) do
env_dir = realpath(mktempdir())
depot_dir = realpath(mktempdir())
try
@@ -159,8 +178,8 @@ function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
finally
if rm && !haskey(ENV, "CI")
try
- Base.rm(env_dir; force=true, recursive=true)
- Base.rm(depot_dir; force=true, recursive=true)
+ Base.rm(env_dir; force = true, recursive = true)
+ Base.rm(depot_dir; force = true, recursive = true)
catch err
# Avoid raising an exception here as it will mask the original exception
println(stderr_f(), "Exception in finally: $(sprint(showerror, err))")
@@ -181,12 +200,12 @@ function temp_pkg_dir(fn::Function;rm=true, linked_reg=true)
end
end
-function cd_tempdir(f; rm=true)
+function cd_tempdir(f; rm = true)
tmp = realpath(mktempdir())
cd(tmp) do
f(tmp)
end
- if rm && !haskey(ENV, "CI")
+ return if rm && !haskey(ENV, "CI")
try
Base.rm(tmp; force = true, recursive = true)
catch err
@@ -203,25 +222,25 @@ isinstalled(pkg::String) = Base.find_package(pkg) !== nothing
function write_build(path, content)
build_filename = joinpath(path, "deps", "build.jl")
mkpath(dirname(build_filename))
- write(build_filename, content)
+ return write(build_filename, content)
end
function with_current_env(f)
prev_active = Base.ACTIVE_PROJECT[]
Pkg.activate(".")
- try
+ return try
f()
finally
Base.ACTIVE_PROJECT[] = prev_active
end
end
-function with_temp_env(f, env_name::AbstractString="Dummy"; rm=true)
+function with_temp_env(f, env_name::AbstractString = "Dummy"; rm = true)
prev_active = Base.ACTIVE_PROJECT[]
env_path = joinpath(realpath(mktempdir()), env_name)
Pkg.generate(env_path)
Pkg.activate(env_path)
- try
+ return try
applicable(f, env_path) ? f(env_path) : f()
finally
Base.ACTIVE_PROJECT[] = prev_active
@@ -236,10 +255,10 @@ function with_temp_env(f, env_name::AbstractString="Dummy"; rm=true)
end
end
-function with_pkg_env(fn::Function, path::AbstractString="."; change_dir=false)
+function with_pkg_env(fn::Function, path::AbstractString = "."; change_dir = false)
prev_active = Base.ACTIVE_PROJECT[]
Pkg.activate(path)
- try
+ return try
if change_dir
cd(fn, path)
else
@@ -256,9 +275,9 @@ const TEST_SIG = LibGit2.Signature("TEST", "TEST@TEST.COM", round(time()), 0)
const TEST_PKG = (name = "Example", uuid = UUID("7876af07-990d-54b4-ab0e-23690620f79a"))
function git_init_and_commit(path; msg = "initial commit")
- LibGit2.with(LibGit2.init(path)) do repo
+ return LibGit2.with(LibGit2.init(path)) do repo
LibGit2.add!(repo, "*")
- LibGit2.commit(repo, msg; author=TEST_SIG, committer=TEST_SIG)
+ LibGit2.commit(repo, msg; author = TEST_SIG, committer = TEST_SIG)
end
end
@@ -270,9 +289,22 @@ function git_init_package(tmp, path)
return pkgpath
end
-function copy_test_package(tmpdir::String, name::String; use_pkg=true)
+function ensure_test_package_user_writable(dir)
+ for (root, _, files) in walkdir(dir)
+ chmod(root, filemode(root) | 0o200 | 0o100)
+
+ for file in files
+ filepath = joinpath(root, file)
+ chmod(filepath, filemode(filepath) | 0o200)
+ end
+ end
+ return
+end
+
+function copy_test_package(tmpdir::String, name::String; use_pkg = true)
target = joinpath(tmpdir, name)
cp(joinpath(@__DIR__, "test_packages", name), target)
+ ensure_test_package_user_writable(target)
use_pkg || return target
# The known Pkg UUID, and whatever UUID we're currently using for testing
@@ -290,15 +322,15 @@ function copy_test_package(tmpdir::String, name::String; use_pkg=true)
return target
end
-function add_this_pkg(; platform=Base.BinaryPlatforms.HostPlatform())
- try
+function add_this_pkg(; platform = Base.BinaryPlatforms.HostPlatform())
+ return try
Pkg.respect_sysimage_versions(false)
pkg_dir = dirname(@__DIR__)
pkg_uuid = TOML.parsefile(joinpath(pkg_dir, "Project.toml"))["uuid"]
spec = Pkg.PackageSpec(
- name="Pkg",
- uuid=UUID(pkg_uuid),
- path=pkg_dir,
+ name = "Pkg",
+ uuid = UUID(pkg_uuid),
+ path = pkg_dir,
)
Pkg.develop(spec; platform)
finally
@@ -316,14 +348,14 @@ end
function show_output_if_command_errors(cmd::Cmd)
out = IOBuffer()
- proc = run(pipeline(cmd; stdout=out); wait = false)
+ proc = run(pipeline(cmd; stdout = out); wait = false)
wait(proc)
if !success(proc)
seekstart(out)
println(read(out, String))
Base.pipeline_error(proc)
end
- return nothing
+ return true
end
function recursive_rm_cov_files(rootdir::String)
@@ -332,6 +364,7 @@ function recursive_rm_cov_files(rootdir::String)
endswith(file, ".cov") && rm(joinpath(root, file))
end
end
+ return
end
end
diff --git a/test/workspaces.jl b/test/workspaces.jl
index acb41be7bd..c123410978 100644
--- a/test/workspaces.jl
+++ b/test/workspaces.jl
@@ -11,146 +11,155 @@ end
temp_pkg_dir() do project_path
- cd(project_path) do; with_temp_env() do
- name = "MonorepoSub"
- rm(name, force=true, recursive=true)
- Pkg.generate(name)
- cd("MonorepoSub") do
- Pkg.activate(".")
- # Add Example, Crayons, PrivatePackage to the "MonorepoSub" package
- Pkg.add("Example")
- Pkg.add(;name="Crayons", version="v4.0.3")
- Pkg.compat("Crayons", "=4.0.0, =4.0.1, =4.0.2, =4.0.3")
- Pkg.generate("PrivatePackage")
- Pkg.develop(path="PrivatePackage")
- d = TOML.parsefile("Project.toml")
- d["workspace"] = Dict("projects" => ["test", "docs", "benchmarks", "PrivatePackage"])
- abs_path = abspath("PrivatePackage") # TODO: Make relative after #3842 is fixed
- d["sources"] = Dict("PrivatePackage" => Dict("path" => abs_path))
- Pkg.Types.write_project(d, "Project.toml")
- write("src/MonorepoSub.jl", """
- module MonorepoSub
- using Example, Crayons, PrivatePackage
- end
- """)
+ cd(project_path) do;
+ with_temp_env() do
+ name = "MonorepoSub"
+ rm(name, force = true, recursive = true)
+ Pkg.generate(name)
+ cd("MonorepoSub") do
+ Pkg.activate(".")
+ # Add Example, Crayons, PrivatePackage to the "MonorepoSub" package
+ Pkg.add("Example")
+ Pkg.add(; name = "Crayons", version = "v4.0.3")
+ Pkg.compat("Crayons", "=4.0.0, =4.0.1, =4.0.2, =4.0.3")
+ Pkg.generate("PrivatePackage")
+ Pkg.develop(path = "PrivatePackage")
+ d = TOML.parsefile("Project.toml")
+ d["workspace"] = Dict("projects" => ["test", "docs", "benchmarks", "PrivatePackage"])
+ abs_path = abspath("PrivatePackage") # TODO: Make relative after #3842 is fixed
+ d["sources"] = Dict("PrivatePackage" => Dict("path" => abs_path))
+ Pkg.Types.write_project(d, "Project.toml")
+ write(
+ "src/MonorepoSub.jl", """
+ module MonorepoSub
+ using Example, Crayons, PrivatePackage
+ end
+ """
+ )
- # Add some deps to PrivatePackage
- Pkg.activate("PrivatePackage")
- Pkg.add(; name="Chairmarks", version=v"1.1.2")
- @test !isfile("PrivatePackage/Manifest.toml")
- d = TOML.parsefile("PrivatePackage/Project.toml")
- d["workspace"] = Dict("projects" => ["test"])
- Pkg.Types.write_project(d, "PrivatePackage/Project.toml")
- write("PrivatePackage/src/PrivatePackage.jl", """
- module PrivatePackage
- using Chairmarks
- end
- """)
- io = IOBuffer()
- Pkg.status(; io)
- status = String(take!(io))
- for pkg in ["Crayons v", "Example v", "TestSpecificPackage v"]
- @test !occursin(pkg, status)
- end
- @test occursin("Chairmarks v", status)
+ # Add some deps to PrivatePackage
+ Pkg.activate("PrivatePackage")
+ Pkg.add(; name = "Chairmarks", version = v"1.1.2")
+ @test !isfile("PrivatePackage/Manifest.toml")
+ d = TOML.parsefile("PrivatePackage/Project.toml")
+ d["workspace"] = Dict("projects" => ["test"])
+ Pkg.Types.write_project(d, "PrivatePackage/Project.toml")
+ write(
+ "PrivatePackage/src/PrivatePackage.jl", """
+ module PrivatePackage
+ using Chairmarks
+ end
+ """
+ )
+ io = IOBuffer()
+ Pkg.status(; io)
+ status = String(take!(io))
+ for pkg in ["Crayons v", "Example v", "TestSpecificPackage v"]
+ @test !occursin(pkg, status)
+ end
+ @test occursin("Chairmarks v", status)
- # Make a test subproject in PrivatePackage
- # Note that this is a "nested subproject" since in this environment
- # PrivatePackage is a subproject of MonorepoSub
- mkdir("PrivatePackage/test")
- Pkg.activate("PrivatePackage/test")
- # This adds too many packages to the Project file...
- Pkg.add("Test")
- Pkg.develop(path="PrivatePackage")
- @test length(Pkg.project().dependencies) == 2
- write("PrivatePackage/test/runtests.jl", """
- using Test
- using PrivatePackage
- """)
- # A nested subproject should still use the root base manifest
- @test !isfile("PrivatePackage/test/Manifest.toml")
- # Test status shows deps in test-subproject + base (MonoRepoSub)
- io = IOBuffer()
- Pkg.status(; io)
- status = String(take!(io))
- for pkg in ["Crayons", "Example", "TestSpecificPackage"]
- @test !occursin(pkg, status)
- end
- @test occursin("Test v", status)
+ # Make a test subproject in PrivatePackage
+ # Note that this is a "nested subproject" since in this environment
+ # PrivatePackage is a subproject of MonorepoSub
+ mkdir("PrivatePackage/test")
+ Pkg.activate("PrivatePackage/test")
+ # This adds too many packages to the Project file...
+ Pkg.add("Test")
+ Pkg.develop(path = "PrivatePackage")
+ @test length(Pkg.project().dependencies) == 2
+ write(
+ "PrivatePackage/test/runtests.jl", """
+ using Test
+ using PrivatePackage
+ """
+ )
+ # A nested subproject should still use the root base manifest
+ @test !isfile("PrivatePackage/test/Manifest.toml")
+ # Test status shows deps in test-subproject + base (MonoRepoSub)
+ io = IOBuffer()
+ Pkg.status(; io)
+ status = String(take!(io))
+ for pkg in ["Crayons", "Example", "TestSpecificPackage"]
+ @test !occursin(pkg, status)
+ end
+ @test occursin("Test v", status)
- Pkg.status(; io, workspace=true)
- status = String(take!(io))
- for pkg in ["Crayons", "Example", "Test"]
- @test occursin(pkg, status)
- end
+ Pkg.status(; io, workspace = true)
+ status = String(take!(io))
+ for pkg in ["Crayons", "Example", "Test"]
+ @test occursin(pkg, status)
+ end
- # Add tests to MonorepoSub
- mkdir("test")
- Pkg.activate("test")
- # Test specific deps
- Pkg.add("Test")
- Pkg.add("Crayons")
- Pkg.compat("Crayons", "=4.0.1, =4.0.2, =4.0.3, =4.0.4")
- Pkg.develop(; path=".")
- # Compat in base package should prevent updating to 4.0.4
- Pkg.update()
- @test Pkg.dependencies()[UUID("a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f")].version == v"4.0.3"
- Pkg.generate("TestSpecificPackage")
- Pkg.develop(path="TestSpecificPackage")
- d = TOML.parsefile("test/Project.toml")
- abs_pkg = abspath("TestSpecificPackage") # TODO: Make relative after #3842 is fixed
- d["sources"] = Dict("TestSpecificPackage" => Dict("path" => abs_pkg))
- Pkg.Types.write_project(d, "test/Project.toml")
+ # Add tests to MonorepoSub
+ mkdir("test")
+ Pkg.activate("test")
+ # Test specific deps
+ Pkg.add("Test")
+ Pkg.add("Crayons")
+ Pkg.compat("Crayons", "=4.0.1, =4.0.2, =4.0.3, =4.0.4")
+ Pkg.develop(; path = ".")
+ # Compat in base package should prevent updating to 4.0.4
+ Pkg.update()
+ @test Pkg.dependencies()[UUID("a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f")].version == v"4.0.3"
+ Pkg.generate("TestSpecificPackage")
+ Pkg.develop(path = "TestSpecificPackage")
+ d = TOML.parsefile("test/Project.toml")
+ abs_pkg = abspath("TestSpecificPackage") # TODO: Make relative after #3842 is fixed
+ d["sources"] = Dict("TestSpecificPackage" => Dict("path" => abs_pkg))
+ Pkg.Types.write_project(d, "test/Project.toml")
- @test !isfile("test/Manifest.toml")
- write("test/runtests.jl", """
- using Test
- using Crayons
- using TestSpecificPackage
- using MonorepoSub
- """)
+ @test !isfile("test/Manifest.toml")
+ write(
+ "test/runtests.jl", """
+ using Test
+ using Crayons
+ using TestSpecificPackage
+ using MonorepoSub
+ """
+ )
- Pkg.activate(".")
- env = Pkg.Types.EnvCache()
- hash_1 = Pkg.Types.workspace_resolve_hash(env)
- Pkg.activate("PrivatePackage")
- env = Pkg.Types.EnvCache()
- hash_2 = Pkg.Types.workspace_resolve_hash(env)
- Pkg.activate("test")
- env = Pkg.Types.EnvCache()
- hash_3 = Pkg.Types.workspace_resolve_hash(env)
- Pkg.activate("PrivatePackage/test")
- env = Pkg.Types.EnvCache()
- hash_4 = Pkg.Types.workspace_resolve_hash(env)
+ Pkg.activate(".")
+ env = Pkg.Types.EnvCache()
+ hash_1 = Pkg.Types.workspace_resolve_hash(env)
+ Pkg.activate("PrivatePackage")
+ env = Pkg.Types.EnvCache()
+ hash_2 = Pkg.Types.workspace_resolve_hash(env)
+ Pkg.activate("test")
+ env = Pkg.Types.EnvCache()
+ hash_3 = Pkg.Types.workspace_resolve_hash(env)
+ Pkg.activate("PrivatePackage/test")
+ env = Pkg.Types.EnvCache()
+ hash_4 = Pkg.Types.workspace_resolve_hash(env)
- @test hash_1 == hash_2 == hash_3 == hash_4
+ @test hash_1 == hash_2 == hash_3 == hash_4
- # Test that the subprojects are working
- depot_path_string = join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":")
- withenv("JULIA_DEPOT_PATH" => depot_path_string) do
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="test" test/runtests.jl`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using MonorepoSub'`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage" -e 'using PrivatePackage'`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage/test" PrivatePackage/test/runtests.jl`))
+ # Test that the subprojects are working
+ depot_path_string = join(Base.DEPOT_PATH, Sys.iswindows() ? ";" : ":")
+ withenv("JULIA_DEPOT_PATH" => depot_path_string) do
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="test" test/runtests.jl`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using MonorepoSub'`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage" -e 'using PrivatePackage'`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage/test" PrivatePackage/test/runtests.jl`))
- rm("Manifest.toml")
- Pkg.activate(".")
- Pkg.resolve()
- # Resolve should have fixed the manifest so that everything above works from the existing project files
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="test" test/runtests.jl`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using MonorepoSub'`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage" -e 'using PrivatePackage'`))
- @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage/test" PrivatePackage/test/runtests.jl`))
+ rm("Manifest.toml")
+ Pkg.activate(".")
+ Pkg.resolve()
+ # Resolve should have fixed the manifest so that everything above works from the existing project files
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="test" test/runtests.jl`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project -e 'using MonorepoSub'`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage" -e 'using PrivatePackage'`))
+ @test success(run(`$(Base.julia_cmd()) --startup-file=no --project="PrivatePackage/test" PrivatePackage/test/runtests.jl`))
+ end
end
end
- end end
+ end
end
@testset "test resolve with tree hash" begin
mktempdir() do dir
- path = abspath(joinpath(@__DIR__, "../test", "test_packages", "WorkspaceTestInstantiate"))
- cp(path, joinpath(dir, "WorkspaceTestInstantiate"))
- cd(joinpath(dir, "WorkspaceTestInstantiate")) do
+ path = copy_test_package(dir, "WorkspaceTestInstantiate")
+ cd(path) do
with_current_env() do
@test !isfile("Manifest.toml")
@test !isfile("test/Manifest.toml")
@@ -164,4 +173,19 @@ end
end
end
+@testset "workspace path resolution issue #4222" begin
+ mktempdir() do dir
+ path = copy_test_package(dir, "WorkspacePathResolution")
+ cd(path) do
+ with_current_env() do
+ # First resolve SubProjectB (non-root project) without existing Manifest
+ Pkg.activate("SubProjectB")
+ @test !isfile("Manifest.toml")
+ # Should be able to find SubProjectA and succeed
+ Pkg.update()
+ end
+ end
+ end
+end
+
end # module