diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..37459703e9 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# ran runic on the code base +a84228360d6cff568a55911733e830cdf1c492da diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..d7a3ed5357 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" # Location of package manifests + schedule: + interval: "weekly" + groups: + all-actions: + patterns: + - "*" diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml new file mode 100644 index 0000000000..8376d060fb --- /dev/null +++ b/.github/workflows/check.yml @@ -0,0 +1,30 @@ +name: Code checks + +on: + pull_request: + push: + branches: ["master"] + +jobs: + + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 + env: + # Skip runic-pre-commit since we use runic-action below instead + SKIP: runic + + runic: + name: "Runic" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 + with: + version: '1.11' + - uses: julia-actions/cache@v2 + - uses: fredrikekre/runic-action@v1 + with: + version: "1.4" # Keep version in sync with .pre-commit-config.yaml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 11f1643502..332c33f7a5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,10 +4,12 @@ on: branches: - 'master' - 'release-*' + - 'backports-release-*' push: branches: - 'master' - 'release-*' + - 'backports-release-*' tags: '*' defaults: run: @@ -71,6 +73,7 @@ jobs: - uses: julia-actions/julia-runtest@v1 with: coverage: true + depwarn: error env: JULIA_PKG_SERVER: ${{ matrix.pkg-server }} JULIA_TEST_VERBOSE_LOGS_DIR: ${{ github.workspace }} @@ -82,9 +85,10 @@ jobs: - uses: julia-actions/julia-processcoverage@v1 env: JULIA_PKG_SERVER: ${{ matrix.pkg-server }} - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v5 with: - file: lcov.info + files: lcov.info + token: ${{ secrets.CODECOV_TOKEN }} docs: runs-on: ubuntu-latest timeout-minutes: 60 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..68066c2cc2 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +repos: + - repo: 'https://github.com/pre-commit/pre-commit-hooks' + rev: v5.0.0 + hooks: + - id: check-added-large-files + - id: check-case-conflict + # - id: check-toml # we have tomls with invalid syntax for tests + - id: check-yaml + - id: end-of-file-fixer + - id: mixed-line-ending + - id: trailing-whitespace + - repo: 'https://github.com/fredrikekre/runic-pre-commit' + rev: v2.0.1 + hooks: + - id: runic + additional_dependencies: + - 'Runic@1.4' # Keep version in sync with .github/workflows/Check.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 056a6f1f36..6b9c934159 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,69 @@ +Pkg v1.13 Release Notes +======================= + +- Project.toml environments now support a `readonly` field to mark environments as read-only, preventing modifications. + ([#4284]) +- `Pkg.build` now supports an `allow_reresolve` keyword argument to control whether the build process can re-resolve + package versions, similar to the existing option for `Pkg.test`. ([#3329]) +- Packages are now automatically added to `[sources]` when they are added by url or devved. ([#4225]) +- `update` now shows a helpful tip when trying to upgrade a specific package that can be upgraded but is held back + because it's part of a less optimal resolver solution ([#4266]) +- `Pkg.status` now displays yanked packages with a `[yanked]` indicator and shows a warning when yanked packages are + present. `Pkg.resolve` errors also display warnings about yanked packages that are not resolvable. ([#4310]) +- Added `pkg> compat --current` command to automatically populate missing compat entries with the currently resolved + package versions. Use `pkg> compat --current` for all packages or `pkg> compat Foo --current` for specific packages. + ([#3266]) +- Added `Pkg.precompile() do` block syntax to delay autoprecompilation until after multiple operations complete, + improving efficiency when performing several environment changes. ([#4262]) +- Added `Pkg.autoprecompilation_enabled(state::Bool)` to globally enable or disable automatic precompilation for Pkg + operations. ([#4262]) +- Implemented atomic TOML writes to prevent data corruption when Pkg operations are interrupted or multiple processes + write simultaneously. All TOML files are now written atomically using temporary files and atomic moves. ([#4293]) +- Implemented lazy loading for RegistryInstance to significantly improve startup performance for operations that don't + require full registry data. This reduces `Pkg.instantiate()` time by approximately 60% in many cases. ([#4304]) +- Added support for directly adding git submodules via `Pkg.add(path="/path/to/git-submodule.jl")`. ([#3344]) +- Enhanced REPL user experience by automatically detecting and stripping accidental leading `]` characters in commands. + ([#3122]) +- Improved tip messages to show REPL mode syntax when operating in REPL mode. ([#3854]) +- Enhanced error handling with more descriptive error messages when operations fail on empty URLs during git repository + installation or registry discovery. ([#4282]) +- Improved error messages for invalid compat entries to provide better guidance for fixing them. ([#4302]) +- Added warnings when attempting to add local paths that contain dirty git repositories. ([#4309]) +- Enhanced package parsing to better handle complex URLs and paths with branch/tag/subdir specifiers. ([#4299]) +- Improved artifact download behavior to only attempt downloads from the Pkg server when the package is registered on + that server's registries. ([#4297]) +- Added comprehensive documentation page about depots, including depot layouts and configuration. ([#2245]) +- Enhanced error handling for packages missing from registries or manifests with more informative messages. ([#4303]) +- Added more robust error handling when packages have revisions but no source information. ([#4311]) +- Enhanced registry status reporting with more detailed information. ([#4300]) +- Fixed various edge cases in package resolution and manifest handling. ([#4307], [#4308], [#4312]) +- Improved handling of path separators across different operating systems. ([#4305]) +- Added better error messages when accessing private PackageSpec.repo field. ([#4170]) + Pkg v1.12 Release Notes ======================= - Pkg now has support for "workspaces" which is a way to resolve multiple project files into a single manifest. - The functions `Pkg.status`, `Pkg.why`, `Pkg.instantiate`, `Pkg.precompile` (and their REPL variants) have been updated - to take a `workspace` option. Read more about this feature in the manual about the TOML-files. + The functions `Pkg.status`, `Pkg.why`, `Pkg.instantiate`, `Pkg.precompile` (and their REPL variants) have been + updated to take a `workspace` option, with fixes for workspace path collection and package resolution in workspace + environments. Read more about this feature in the manual about the TOML-files. ([#3841], [#4229]) +- Pkg now supports "apps" which are Julia packages that can be run directly from the terminal after installation. + Apps can be defined in a package's Project.toml and installed via Pkg. Apps now support multiple apps per package + via submodules, allowing packages to define multiple command-line applications, with enhanced functionality including + update capabilities and better handling of already installed apps. ([#3772], [#4277], [#4263]) +- `status` now shows when different versions/sources of dependencies are loaded than that which is expected by the + manifest ([#4109]) +- When adding or developing a package that exists in the `[weakdeps]` section, it is now automatically removed from + weak dependencies and added as a regular dependency. ([#3865]) +- Enhanced fuzzy matching algorithm for package name suggestions with improved multi-factor scoring for better package + name suggestions. ([#4287]) +- The Pkg REPL now supports GitHub pull request URLs, allowing direct package installation from PRs via + `pkg> add https://github.com/Org/Package.jl/pull/123` ([#4295]) +- Improved git repository cloning performance by changing from `refs/*` to `refs/heads/*` to speed up operations on + repositories with many branches. ([#2330]) +- Improved REPL command parsing to handle leading whitespace with comma-separated packages. ([#4274]) +- Improved error messages when providing incorrect package UUIDs. ([#4270]) +- Added confirmation prompts before removing compat entries to prevent accidental deletions. ([#4254]) Pkg v1.11 Release Notes ======================= @@ -21,7 +81,7 @@ Pkg v1.10 Release Notes ======================= Pkg v1.9 Release Notes -======================= +====================== - New functionality: `Pkg.why` and `pkg> why` to show why a package is inside the environment (shows all "paths" to a package starting at the direct dependencies). - When code coverage tracking is enabled for `Pkg.test` the new path-specific code-coverage option is used to limit coverage @@ -83,6 +143,16 @@ Pkg v1.7 Release Notes - The `mode` keyword for `PackageSpec` has been removed ([#2454]). +[#4225]: https://github.com/JuliaLang/Pkg.jl/issues/4225 +[#4284]: https://github.com/JuliaLang/Pkg.jl/issues/4284 +[#3526]: https://github.com/JuliaLang/Pkg.jl/issues/3526 +[#3708]: https://github.com/JuliaLang/Pkg.jl/issues/3708 +[#3732]: https://github.com/JuliaLang/Pkg.jl/issues/3732 +[#3772]: https://github.com/JuliaLang/Pkg.jl/issues/3772 +[#3783]: https://github.com/JuliaLang/Pkg.jl/issues/3783 +[#3841]: https://github.com/JuliaLang/Pkg.jl/issues/3841 +[#3865]: https://github.com/JuliaLang/Pkg.jl/issues/3865 +[#4109]: https://github.com/JuliaLang/Pkg.jl/issues/4109 [#2284]: https://github.com/JuliaLang/Pkg.jl/issues/2284 [#2431]: https://github.com/JuliaLang/Pkg.jl/issues/2431 [#2432]: https://github.com/JuliaLang/Pkg.jl/issues/2432 @@ -101,3 +171,36 @@ Pkg v1.7 Release Notes [#2995]: https://github.com/JuliaLang/Pkg.jl/issues/2995 [#3002]: https://github.com/JuliaLang/Pkg.jl/issues/3002 [#3021]: https://github.com/JuliaLang/Pkg.jl/issues/3021 +[#3266]: https://github.com/JuliaLang/Pkg.jl/pull/3266 +[#4266]: https://github.com/JuliaLang/Pkg.jl/pull/4266 +[#4310]: https://github.com/JuliaLang/Pkg.jl/pull/4310 +[#3329]: https://github.com/JuliaLang/Pkg.jl/pull/3329 +[#4262]: https://github.com/JuliaLang/Pkg.jl/pull/4262 +[#4293]: https://github.com/JuliaLang/Pkg.jl/pull/4293 +[#4304]: https://github.com/JuliaLang/Pkg.jl/pull/4304 +[#3344]: https://github.com/JuliaLang/Pkg.jl/pull/3344 +[#2330]: https://github.com/JuliaLang/Pkg.jl/pull/2330 +[#3122]: https://github.com/JuliaLang/Pkg.jl/pull/3122 +[#3854]: https://github.com/JuliaLang/Pkg.jl/pull/3854 +[#4282]: https://github.com/JuliaLang/Pkg.jl/pull/4282 +[#4302]: https://github.com/JuliaLang/Pkg.jl/pull/4302 +[#4309]: https://github.com/JuliaLang/Pkg.jl/pull/4309 +[#4299]: https://github.com/JuliaLang/Pkg.jl/pull/4299 +[#4295]: https://github.com/JuliaLang/Pkg.jl/pull/4295 +[#4277]: https://github.com/JuliaLang/Pkg.jl/pull/4277 +[#4297]: https://github.com/JuliaLang/Pkg.jl/pull/4297 +[#2245]: https://github.com/JuliaLang/Pkg.jl/pull/2245 +[#4303]: https://github.com/JuliaLang/Pkg.jl/pull/4303 +[#4254]: https://github.com/JuliaLang/Pkg.jl/pull/4254 +[#4270]: https://github.com/JuliaLang/Pkg.jl/pull/4270 +[#4263]: https://github.com/JuliaLang/Pkg.jl/pull/4263 +[#4229]: https://github.com/JuliaLang/Pkg.jl/pull/4229 +[#4274]: https://github.com/JuliaLang/Pkg.jl/pull/4274 +[#4311]: https://github.com/JuliaLang/Pkg.jl/pull/4311 +[#4300]: https://github.com/JuliaLang/Pkg.jl/pull/4300 +[#4307]: https://github.com/JuliaLang/Pkg.jl/pull/4307 +[#4308]: https://github.com/JuliaLang/Pkg.jl/pull/4308 +[#4312]: https://github.com/JuliaLang/Pkg.jl/pull/4312 +[#4305]: https://github.com/JuliaLang/Pkg.jl/pull/4305 +[#4170]: https://github.com/JuliaLang/Pkg.jl/pull/4170 +[#4287]: https://github.com/JuliaLang/Pkg.jl/pull/4287 diff --git a/Project.toml b/Project.toml index 4ddbbefd00..cdc10d1205 100644 --- a/Project.toml +++ b/Project.toml @@ -3,7 +3,7 @@ uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" keywords = ["package management"] license = "MIT" desc = "The next-generation Julia package manager." -version = "1.12.0" +version = "1.13.0" [workspace] projects = ["test", "docs"] @@ -32,4 +32,20 @@ REPL = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" REPLExt = "REPL" [compat] +Artifacts = "1.11" +Dates = "1.11" +Downloads = "1.6" +FileWatching = "1.11" +LibGit2 = "1.11" +Libdl = "1.11" +Logging = "1.11" +Markdown = "1.11" +Printf = "1.11" +Random = "1.11" +REPL = "1.11" +SHA = "0.7" +TOML = "1" +Tar = "1.10" +UUIDs = "1.11" julia = "1.12" +p7zip_jll = "17.5" diff --git a/README.md b/README.md index 5cc370c4c9..ad4ec9f25c 100644 --- a/README.md +++ b/README.md @@ -13,12 +13,29 @@ If you want to develop this package do the following steps: - Make a fork and then clone the repo locally on your computer - Change the current directory to the Pkg repo you just cloned and start julia with `julia --project`. - `import Pkg` will now load the files in the cloned repo instead of the Pkg stdlib. -- To test your changes, simply do `include("test/runtests.jl")`. +- To test your changes, simply do `Pkg.test()`. If you need to build Julia from source with a Git checkout of Pkg, then instead use `make DEPS_GIT=Pkg` when building Julia. The `Pkg` repo is in `stdlib/Pkg`, and created initially with a detached `HEAD`. If you're doing this from a pre-existing Julia repository, you may need to `make clean` beforehand. If you need to build Julia from source with Git checkouts of two or more stdlibs, please see the instructions in the [`Building Julia from source with a Git checkout of a stdlib`](https://github.com/JuliaLang/julia/blob/master/doc/src/devdocs/build/build.md#building-julia-from-source-with-a-git-checkout-of-a-stdlib) section of the [`doc/src/devdocs/build/build.md`](https://github.com/JuliaLang/julia/blob/master/doc/src/devdocs/build/build.md) file within the Julia devdocs. +## Pre-commit hooks + +This repository uses pre-commit hooks to automatically check and format code before commits. The hooks perform various checks including: + +- File size and case conflict validation +- YAML syntax checking +- Trailing whitespace removal and line ending fixes +- Julia code formatting with Runic + +To install and use the pre-commit hooks: + +1. Install pre-commit: `pip install pre-commit` (or use your system's package manager) +2. Install the hooks: `pre-commit install` from the root of the repository +3. Run on all files: `pre-commit run --all-files` from the root of the repository + +Once installed, the hooks will run automatically on each commit. You can also run them manually anytime with `pre-commit run`. + ## Synchronization with the Julia repo To check which commit julia master uses see [JuliaLang/julia/stdlib/Pkg.version](https://github.com/JuliaLang/julia/blob/master/stdlib/Pkg.version). diff --git a/contrib/list_missing_pkg_tags.jl b/contrib/list_missing_pkg_tags.jl new file mode 100644 index 0000000000..93309889aa --- /dev/null +++ b/contrib/list_missing_pkg_tags.jl @@ -0,0 +1,89 @@ +using LibGit2 + +const JULIA_REPO_URL = "https://github.com/JuliaLang/julia.git" +const JULIA_REPO_DIR = "julia" +const PKG_VERSION_PATH = "stdlib/Pkg.version" +const PKG_REPO_URL = "https://github.com/JuliaLang/Pkg.jl.git" +const PKG_REPO_DIR = "Pkg.jl" + +function checkout_or_update_repo(url, dir) + return if isdir(dir) + println("Updating existing repository: $dir") + repo = LibGit2.GitRepo(dir) + LibGit2.fetch(repo) + else + println("Cloning repository: $url") + LibGit2.clone(url, dir) + end +end + +function get_tags(repo) + refs = LibGit2.ref_list(repo) + tags = filter(ref -> startswith(ref, "refs/tags/"), refs) + return sort!(replace.(tags, "refs/tags/" => "")) +end + +function is_stable_v1_release(tag) + return occursin(r"^v\d+\.\d+\.\d+$", tag) && VersionNumber(tag) >= v"1.0.0" +end + +function extract_pkg_sha1(text::AbstractString) + m = match(r"PKG_SHA1\s*=\s*([a-f0-9]{40})", text) + return m !== nothing ? m[1] : nothing +end + +function get_commit_hash_for_pkg_version(repo, tag) + return try + tag_ref = LibGit2.GitReference(repo, "refs/tags/" * tag) + LibGit2.checkout!(repo, string(LibGit2.GitHash(LibGit2.peel(tag_ref)))) + version_file = joinpath(JULIA_REPO_DIR, PKG_VERSION_PATH) + if isfile(version_file) + return extract_pkg_sha1(readchomp(version_file)) + else + println("Warning: Pkg.version file missing for tag $tag") + return nothing + end + catch + println("Error processing tag $tag") + rethrow() + end +end + +tempdir = mktempdir() +cd(tempdir) do + # Update Julia repo + checkout_or_update_repo(JULIA_REPO_URL, JULIA_REPO_DIR) + julia_repo = LibGit2.GitRepo(JULIA_REPO_DIR) + + # Get Julia tags, filtering only stable releases + julia_tags = filter(is_stable_v1_release, get_tags(julia_repo)) + version_commit_map = Dict{String, String}() + + for tag in julia_tags + println("Processing Julia tag: $tag") + commit_hash = get_commit_hash_for_pkg_version(julia_repo, tag) + if commit_hash !== nothing + version_commit_map[tag] = commit_hash + end + end + + # Update Pkg.jl repo + checkout_or_update_repo(PKG_REPO_URL, PKG_REPO_DIR) + pkg_repo = LibGit2.GitRepo(PKG_REPO_DIR) + + # Get existing tags in Pkg.jl + pkg_tags = Set(get_tags(pkg_repo)) + + # Filter out versions that already exist + missing_versions = filter(v -> v ∉ pkg_tags, collect(keys(version_commit_map))) + + # Sort versions numerically + sort!(missing_versions, by = VersionNumber) + + # Generate `git tag` commands + println("\nGit tag commands for missing Pkg.jl versions:") + for version in missing_versions + commit = version_commit_map[version] + println("git tag $version $commit") + end +end diff --git a/docs/NEWS-update.jl b/docs/NEWS-update.jl index 3812e9e437..d0ca10d391 100644 --- a/docs/NEWS-update.jl +++ b/docs/NEWS-update.jl @@ -7,11 +7,11 @@ s = read(NEWS, String) m = match(r"\[#[0-9]+\]:", s) if m !== nothing - s = s[1:m.offset-1] + s = s[1:(m.offset - 1)] end footnote(n) = "[#$n]: https://github.com/JuliaLang/Pkg.jl/issues/$n" -N = map(m -> parse(Int,m.captures[1]), eachmatch(r"\[#([0-9]+)\]", s)) +N = map(m -> parse(Int, m.captures[1]), eachmatch(r"\[#([0-9]+)\]", s)) foots = join(map(footnote, sort!(unique(N))), "\n") open(NEWS, "w") do f diff --git a/docs/generate.jl b/docs/generate.jl index fa4af617ef..3d227f1374 100644 --- a/docs/generate.jl +++ b/docs/generate.jl @@ -4,38 +4,42 @@ function generate(io, command) cmd_nospace = replace(command, " " => "-") - println(io, """ - ```@raw html -
-
- - $(command) - - — - REPL command -
-
- ``` - ```@eval - using Pkg - Dict(Pkg.REPLMode.canonical_names())["$(command)"].help - ``` - ```@raw html -
-
- ``` - """) + return println( + io, """ + ```@raw html +
+
+ + $(command) + + — + REPL command +
+
+ ``` + ```@eval + using Pkg + Dict(Pkg.REPLMode.canonical_names())["$(command)"].help + ``` + ```@raw html +
+
+ ``` + """ + ) end function generate() io = IOBuffer() - println(io, """ + println( + io, """ # [**11.** REPL Mode Reference](@id REPL-Mode-Reference) This section describes available commands in the Pkg REPL. The Pkg REPL mode is mostly meant for interactive use, and for non-interactive use it is recommended to use the functional API, see [API Reference](@ref API-Reference). - """) + """ + ) # list commands println(io, "## `package` commands") foreach(command -> generate(io, command), ["add", "build", "compat", "develop", "free", "generate", "pin", "remove", "test", "update"]) diff --git a/docs/make.jl b/docs/make.jl index be6905de5a..6b38dad0d7 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -9,7 +9,7 @@ const formats = Any[ Documenter.HTML( prettyurls = get(ENV, "CI", nothing) == "true", canonical = "https://julialang.github.io/Pkg.jl/v1/", - assets = ["assets/custom.css"], + assets = ["assets/custom.css", "assets/favicon.ico"], ), ] if "pdf" in ARGS @@ -17,7 +17,7 @@ if "pdf" in ARGS end # setup for doctesting -DocMeta.setdocmeta!(Pkg.BinaryPlatforms, :DocTestSetup, :(using Base.BinaryPlatforms); recursive=true) +DocMeta.setdocmeta!(Pkg.BinaryPlatforms, :DocTestSetup, :(using Base.BinaryPlatforms); recursive = true) # Run doctests first and disable them in makedocs Documenter.doctest(joinpath(@__DIR__, "src"), [Pkg]) @@ -35,6 +35,7 @@ makedocs( "managing-packages.md", "environments.md", "creating-packages.md", + "apps.md", "compatibility.md", "registries.md", "artifacts.md", @@ -42,6 +43,8 @@ makedocs( "toml-files.md", "repl.md", "api.md", + "protocol.md", + "depots.md", ], ) diff --git a/docs/src/api.md b/docs/src/api.md index 61979453b9..d87169077f 100644 --- a/docs/src/api.md +++ b/docs/src/api.md @@ -1,4 +1,4 @@ -# [**12.** API Reference](@id API-Reference) +# [**13.** API Reference](@id API-Reference) This section describes the functional API for interacting with Pkg.jl. It is recommended to use the functional API, rather than the Pkg REPL mode, @@ -39,6 +39,7 @@ Pkg.gc Pkg.status Pkg.compat Pkg.precompile +Pkg.autoprecompilation_enabled Pkg.offline Pkg.why Pkg.dependencies @@ -79,3 +80,10 @@ Pkg.Artifacts.ensure_artifact_installed Pkg.Artifacts.ensure_all_artifacts_installed Pkg.Artifacts.archive_artifact ``` + +## [Package Server Authentication Hooks](@id Package-Server-Authentication-Hooks) + +```@docs +Pkg.PlatformEngines.register_auth_error_handler +Pkg.PlatformEngines.deregister_auth_error_handler +``` diff --git a/docs/src/apps.md b/docs/src/apps.md new file mode 100644 index 0000000000..00b12cada9 --- /dev/null +++ b/docs/src/apps.md @@ -0,0 +1,100 @@ +# [**6.** Apps](@id Apps) + +!!! note + The app support in Pkg is currently considered experimental and some functionality and API may change. + + Some inconveniences that can be encountered are: + - You need to manually make `~/.julia/bin` available on the PATH environment. + - The path to the julia executable used is the same as the one used to install the app. If this + julia installation gets removed, you might need to reinstall the app. + +Apps are Julia packages that are intended to be run as "standalone programs" (by e.g. typing the name of the app in the terminal possibly together with some arguments or flags/options). +This is in contrast to most Julia packages that are used as "libraries" and are loaded by other files or in the Julia REPL. + +## Creating a Julia app + +A Julia app is structured similar to a standard Julia library with the following additions: + +- A `@main` entry point in the package module (see the [Julia help on `@main`](https://docs.julialang.org/en/v1/manual/command-line-interface/#The-Main.main-entry-point) for details) +- An `[apps]` section in the `Project.toml` file listing the executable names that the package provides. + +A very simple example of an app that prints the reversed input arguments would be: + +```julia +# src/MyReverseApp.jl +module MyReverseApp + +function (@main)(ARGS) + for arg in ARGS + print(stdout, reverse(arg), " ") + end + return +end + +end # module +``` + +```toml +# Project.toml + +# standard fields here + +[apps] +reverse = {} +``` +The empty table `{}` is to allow for giving metadata about the app but it is currently unused. + +After installing this app one could run: + +``` +$ reverse some input string + emos tupni gnirts +``` + +directly in the terminal. + +## Multiple Apps per Package + +A single package can define multiple apps by using submodules. Each app can have its own entry point in a different submodule of the package. + +```julia +# src/MyMultiApp.jl +module MyMultiApp + +function (@main)(ARGS) + println("Main app: ", join(ARGS, " ")) +end + +include("CLI.jl") + +end # module +``` + +```julia +# src/CLI.jl +module CLI + +function (@main)(ARGS) + println("CLI submodule: ", join(ARGS, " ")) +end + +end # module CLI +``` + +```toml +# Project.toml + +# standard fields here + +[apps] +main-app = {} +cli-app = { submodule = "CLI" } +``` + +This will create two executables: +- `main-app` that runs `julia -m MyMultiApp` +- `cli-app` that runs `julia -m MyMultiApp.CLI` + +## Installing Julia apps + +The installation of Julia apps is similar to [installing Julia libraries](@ref Managing-Packages) but instead of using e.g. `Pkg.add` or `pkg> add` one uses `Pkg.Apps.add` or `pkg> app add` (`develop` is also available). diff --git a/docs/src/artifacts.md b/docs/src/artifacts.md index 66a55f99f5..d5fe5f38b7 100644 --- a/docs/src/artifacts.md +++ b/docs/src/artifacts.md @@ -1,4 +1,4 @@ -# [**8.** Artifacts](@id Artifacts) +# [**9.** Artifacts](@id Artifacts) `Pkg` can install and manage containers of data that are not Julia packages. These containers can contain platform-specific binaries, datasets, text, or any other kind of data that would be convenient to place within an immutable, life-cycled datastore. These containers, (called "Artifacts") can be created locally, hosted anywhere, and automatically downloaded and unpacked upon installation of your Julia package. @@ -230,7 +230,7 @@ This is deduced automatically by the `artifacts""` string macro, however, if you !!! compat "Julia 1.7" Pkg's extended platform selection requires at least Julia 1.7, and is considered experimental. -New in Julia 1.6, `Platform` objects can have extended attributes applied to them, allowing artifacts to be tagged with things such as CUDA driver version compatibility, microarchitectural compatibility, julia version compatibility and more! +New in Julia 1.7, `Platform` objects can have extended attributes applied to them, allowing artifacts to be tagged with things such as CUDA driver version compatibility, microarchitectural compatibility, julia version compatibility and more! Note that this feature is considered experimental and may change in the future. If you as a package developer find yourself needing this feature, please get in contact with us so it can evolve for the benefit of the whole ecosystem. In order to support artifact selection at `Pkg.add()` time, `Pkg` will run the specially-named file `/.pkg/select_artifacts.jl`, passing the current platform triplet as the first argument. diff --git a/docs/src/assets/favicon.ico b/docs/src/assets/favicon.ico new file mode 100644 index 0000000000..eeb1edd944 Binary files /dev/null and b/docs/src/assets/favicon.ico differ diff --git a/docs/src/basedocs.md b/docs/src/basedocs.md index 7d51728ffe..9e07aa4ca9 100644 --- a/docs/src/basedocs.md +++ b/docs/src/basedocs.md @@ -4,7 +4,7 @@ EditURL = "https://github.com/JuliaLang/Pkg.jl/blob/master/docs/src/basedocs.md" # Pkg -Pkg is Julia's builtin package manager, and handles operations +Pkg is Julia's built-in package manager, and handles operations such as installing, updating and removing packages. !!! note diff --git a/docs/src/compatibility.md b/docs/src/compatibility.md index bc1c58e3e9..dee8b05841 100644 --- a/docs/src/compatibility.md +++ b/docs/src/compatibility.md @@ -1,4 +1,4 @@ -# [**6.** Compatibility](@id Compatibility) +# [**7.** Compatibility](@id Compatibility) Compatibility refers to the ability to restrict the versions of the dependencies that your project is compatible with. If the compatibility for a dependency is not given, the project is assumed to be compatible with all versions of that dependency. @@ -22,7 +22,7 @@ The format of the version specifier is described in detail below. The rules below apply to the `Project.toml` file; for registries, see [Registry Compat.toml](@ref). !!! info - Note that registration into Julia's General Registry requires each dependency to have a `[compat`] entry with an upper bound. + Note that registration into Julia's General Registry requires each dependency to have a `[compat]` entry with an upper bound. ## Version specifier format @@ -97,7 +97,7 @@ PkgA = "~1.2.3" # [1.2.3, 1.3.0) PkgB = "~1.2" # [1.2.0, 1.3.0) PkgC = "~1" # [1.0.0, 2.0.0) PkgD = "~0.2.3" # [0.2.3, 0.3.0) -PkgE = "~0.0.3" # [0.0.3, 0.0.4) +PkgE = "~0.0.3" # [0.0.3, 0.1.0) PkgF = "~0.0" # [0.0.0, 0.1.0) PkgG = "~0" # [0.0.0, 1.0.0) ``` @@ -164,7 +164,7 @@ PkgA = "0.2 - 0" # 0.2.0 - 0.*.* = [0.2.0, 1.0.0) ``` -## Fixing conflicts +## [Fixing conflicts](@id Fixing-conflicts) Version conflicts were introduced previously with an [example](@ref conflicts) of a conflict arising in a package `D` used by two other packages, `B` and `C`. diff --git a/docs/src/creating-packages.md b/docs/src/creating-packages.md index 7bb72c2e91..4e01f0c27a 100644 --- a/docs/src/creating-packages.md +++ b/docs/src/creating-packages.md @@ -11,7 +11,7 @@ To generate the bare minimum files for a new package, use `pkg> generate`. ```julia-repl -(@v1.8) pkg> generate HelloWorld +(@v1.10) pkg> generate HelloWorld ``` This creates a new project `HelloWorld` in a subdirectory by the same name, with the following files (visualized with the external [`tree` command](https://linux.die.net/man/1/tree)): @@ -118,7 +118,7 @@ describe about public symbols. A public symbol is a symbol that is exported from package with the `export` keyword or marked as public with the `public` keyword. When you change the behavior of something that was previously public so that the new version no longer conforms to the specifications provided in the old version, you should -adjust your package version number according to [Julia's variant on SemVer](#Version-specifier-format). +adjust your package version number according to [Julia's variant on SemVer](@ref Version-specifier-format). If you would like to include a symbol in your public API without exporting it into the global namespace of folks who call `using YourPackage`, you should mark that symbol as public with `public that_symbol`. Symbols marked as public with the `public` keyword are @@ -127,7 +127,7 @@ just as public as those marked as public with the `export` keyword, but when fol `YourPackage.that_symbol`. Let's say we would like our `greet` function to be part of the public API, but not the -`greet_alien` function. We could the write the following and release it as version `1.0.0`. +`greet_alien` function. We could then write the following and release it as version `1.0.0`. ```julia module HelloWorld @@ -562,10 +562,10 @@ duplicated into `[extras]`. This is an unfortunate duplication, but without doing this the project verifier under older Julia versions will throw an error if it finds packages under `[compat]` that is not listed in `[extras]`. -## Package naming rules +## Package naming guidelines Package names should be sensible to most Julia users, *even to those who are not domain experts*. -The following rules apply to the `General` registry but may be useful for other package +The following guidelines apply to the `General` registry but may be useful for other package registries as well. Since the `General` registry belongs to the entire community, people may have opinions about @@ -575,8 +575,10 @@ may fit your package better. 1. Avoid jargon. In particular, avoid acronyms unless there is minimal possibility of confusion. - * It's ok to say `USA` if you're talking about the USA. - * It's not ok to say `PMA`, even if you're talking about positive mental attitude. + * It's ok for package names to contain `DNA` if you're talking about the DNA, which has a universally agreed upon definition. + * It's more difficult to justify package names containing the acronym `CI` for instance, which may mean continuous integration, confidence interval, etc. + * If there is risk of confusion it may be best to disambiguate an acronym with additional words such as a lab group or field. + * If your acronym is unambiguous, easily searchable, and/or unlikely to be confused across domains a good justification is often enough for approval. 2. Avoid using `Julia` in your package name or prefixing it with `Ju`. * It is usually clear from context and to your users that the package is a Julia package. @@ -593,6 +595,7 @@ may fit your package better. 4. Err on the side of clarity, even if clarity seems long-winded to you. * `RandomMatrices` is a less ambiguous name than `RndMat` or `RMT`, even though the latter are shorter. + * Generally package names should be at least 5 characters long not including the `.jl` extension 5. A less systematic name may suit a package that implements one of several possible approaches to its domain. @@ -621,9 +624,12 @@ may fit your package better. there's no copyright or trademark infringement etc.) 9. Packages should follow the [Stylistic Conventions](https://docs.julialang.org/en/v1/manual/variables/#Stylistic-Conventions). - * The package name begin with a capital letter and word separation is shown with upper camel case + * The package name should begin with a capital letter and word separation is shown with upper camel case * Packages that provide the functionality of a project from another language should use the Julia convention - * Packages that [provide pre-built libraries and executables](https://docs.binarybuilder.org/stable/jll/) can keep orignal name, but should get `_jll`as a suffix. For example `pandoc_jll` wraps pandoc. However, note that the generation and release of most JLL packages is handled by the [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil) system. + * Packages that [provide pre-built libraries and executables](https://docs.binarybuilder.org/stable/jll/) can keep their original name, but should get `_jll`as a suffix. For example `pandoc_jll` wraps pandoc. However, note that the generation and release of most JLL packages is handled by the [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil) system. + +10. For the complete list of rules for automatic merging into the General registry, see [these guidelines](https://juliaregistries.github.io/RegistryCI.jl/stable/guidelines/). + ## Registering packages @@ -649,3 +655,10 @@ To support the various use cases in the Julia package ecosystem, the Pkg develop * [`Preferences.jl`](https://github.com/JuliaPackaging/Preferences.jl) allows packages to read and write preferences to the top-level `Project.toml`. These preferences can be read at runtime or compile-time, to enable or disable different aspects of package behavior. Packages previously would write out files to their own package directories to record options set by the user or environment, but this is highly discouraged now that `Preferences` is available. + +## See Also + +- [Managing Packages](@ref Managing-Packages) - Learn how to add, update, and manage package dependencies +- [Working with Environments](@ref Working-with-Environments) - Understand environments and reproducible development +- [Compatibility](@ref Compatibility) - Specify version constraints for dependencies +- [API Reference](@ref) - Functional API for non-interactive package management diff --git a/docs/src/depots.md b/docs/src/depots.md new file mode 100644 index 0000000000..1d854b9367 --- /dev/null +++ b/docs/src/depots.md @@ -0,0 +1,55 @@ +# **15.** Depots + +The packages installed for a particular environment, defined in the +files `Project.toml` and `Manifest.toml` within the directory +structure, are not actually installed within that directory but into a +"depot". The location of the depots are set by the variable +[`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH). + +For details on the default depot locations and how they vary by installation method, +see the [`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH) documentation. + +Packages which are installed by a user go into the first depot and the Julia +standard library is in the last depot. + +You should not need to manage the user depot directly. Pkg will automatically clean up +the depots when packages are removed after a delay. However you may want to manually +remove old `.julia/compiled/` subdirectories if you have any that reside for older Julia +versions that you no longer use (hence have not been run to tidy themselves up). + +## Configuring the depot path with `JULIA_DEPOT_PATH` + +The depot path can be configured using the `JULIA_DEPOT_PATH` environment variable, +which is used to populate the global Julia [`DEPOT_PATH`](https://docs.julialang.org/en/v1/base/constants/#Base.DEPOT_PATH) variable +at startup. For complete details on the behavior of this environment variable, +see the [environment variables documentation](https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_DEPOT_PATH). + +Unlike the shell `PATH` variable, empty entries in `JULIA_DEPOT_PATH` +have special behavior for easy overriding of the user depot while retaining access to system resources. +For example, to switch the user depot to `/custom/depot` while still accessing bundled +resources, use a trailing path separator: + +```bash +export JULIA_DEPOT_PATH="/custom/depot:" +``` + +!!! note + The trailing path separator (`:` on Unix, `;` on Windows) is crucial for including + the default system depots, which contain the standard library and other bundled + resources. Without it, Julia will only use the specified depot and will have to precompile + standard library packages, which can be time-consuming and inefficient. + +## Shared depots for distributed computing + +When using Julia in distributed computing environments, such as high-performance computing +(HPC) clusters, it's recommended to use a shared depot via `JULIA_DEPOT_PATH`. This allows +multiple Julia processes to share precompiled packages and reduces redundant compilation. + +Since Julia v1.10, multiple processes using the same depot coordinate via pidfile locks +to ensure only one process precompiles a package while others wait. However, due to +the caching of native code in pkgimages since v1.9, you may need to set the `JULIA_CPU_TARGET` +environment variable appropriately to ensure cache compatibility across different +worker nodes with varying CPU capabilities. + +For more details, see the [FAQ section on distributed computing](https://docs.julialang.org/en/v1/manual/faq/#Computing-cluster) +and the [environment variables documentation](https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_CPU_TARGET). diff --git a/docs/src/environments.md b/docs/src/environments.md index 54fa4e9fe9..1464f5db60 100644 --- a/docs/src/environments.md +++ b/docs/src/environments.md @@ -1,16 +1,16 @@ -# [**4.** Working with Environment](@id Working-with-Environments) +# [**4.** Working with Environments](@id Working-with-Environments) The following discusses Pkg's interaction with environments. For more on the role, environments play in code loading, including the "stack" of environments from which code can be loaded, see [this section in the Julia manual](https://docs.julialang.org/en/v1/manual/code-loading/#Environments-1). ## Creating your own environments -So far we have added packages to the default environment at `~/.julia/environments/v1.9`. It is however easy to create other, independent, projects. +So far we have added packages to the default environment at `~/.julia/environments/v1.10`. It is however easy to create other, independent, projects. This approach has the benefit of allowing you to check in a `Project.toml`, and even a `Manifest.toml` if you wish, into version control (e.g. git) alongside your code. It should be pointed out that when two projects use the same package at the same version, the content of this package is not duplicated. In order to create a new project, create a directory for it and then activate that directory to make it the "active project", which package operations manipulate: ```julia-repl -(@v1.9) pkg> activate MyProject +(@v1.10) pkg> activate MyProject Activating new environment at `~/MyProject/Project.toml` (MyProject) pkg> st @@ -28,7 +28,7 @@ false Installed Example ─ v0.5.3 Updating `~/MyProject/Project.toml` [7876af07] + Example v0.5.3 - Updating `~~/MyProject/Manifest.toml` + Updating `~/MyProject/Manifest.toml` [7876af07] + Example v0.5.3 Precompiling environment... 1 dependency successfully precompiled in 2 seconds @@ -45,7 +45,7 @@ Example = "7876af07-990d-54b4-ab0e-23690620f79a" julia> print(read(joinpath("MyProject", "Manifest.toml"), String)) # This file is machine-generated - editing it directly is not advised -julia_version = "1.9.4" +julia_version = "1.10.0" manifest_format = "2.0" project_hash = "2ca1c6c58cb30e79e021fb54e5626c96d05d5fdc" @@ -66,7 +66,7 @@ shell> git clone https://github.com/JuliaLang/Example.jl.git Cloning into 'Example.jl'... ... -(@v1.12) pkg> activate Example.jl +(@v1.10) pkg> activate Example.jl Activating project at `~/Example.jl` (Example) pkg> instantiate @@ -82,7 +82,7 @@ If you only have a `Project.toml`, a `Manifest.toml` must be generated by "resol If you already have a resolved `Manifest.toml`, then you will still need to ensure that the packages are installed and with the correct versions. Again `instantiate` does this for you. -In short, `instantiate` is your friend to make sure an environment is ready to use. If there's nothing to do, `instantiate` does nothing. +In short, [`instantiate`](@ref Pkg.instantiate) is your friend to make sure an environment is ready to use. If there's nothing to do, `instantiate` does nothing. !!! note "Specifying project on startup" Instead of using `activate` from within Julia, you can specify the project on startup using @@ -103,7 +103,7 @@ also want a scratch space to try out a new package, or a sandbox to resolve vers between several incompatible packages. ```julia-repl -(@v1.9) pkg> activate --temp # requires Julia 1.5 or later +(@v1.10) pkg> activate --temp # requires Julia 1.5 or later Activating new environment at `/var/folders/34/km3mmt5930gc4pzq1d08jvjw0000gn/T/jl_a31egx/Project.toml` (jl_a31egx) pkg> add Example @@ -117,18 +117,18 @@ between several incompatible packages. ## Shared environments -A "shared" environment is simply an environment that exists in `~/.julia/environments`. The default `v1.9` environment is +A "shared" environment is simply an environment that exists in `~/.julia/environments`. The default `v1.10` environment is therefore a shared environment: ```julia-repl -(@v1.9) pkg> st -Status `~/.julia/environments/v1.9/Project.toml` +(@v1.10) pkg> st +Status `~/.julia/environments/v1.10/Project.toml` ``` Shared environments can be activated with the `--shared` flag to `activate`: ```julia-repl -(@v1.9) pkg> activate --shared mysharedenv +(@v1.10) pkg> activate --shared mysharedenv Activating project at `~/.julia/environments/mysharedenv` (@mysharedenv) pkg> @@ -151,7 +151,7 @@ or using Pkg's precompile option, which can precompile the entire environment, o which can be significantly faster than the code-load route above. ```julia-repl -(@v1.9) pkg> precompile +(@v1.10) pkg> precompile Precompiling environment... 23 dependencies successfully precompiled in 36 seconds ``` @@ -165,11 +165,11 @@ By default, any package that is added to a project or updated in a Pkg action wi with its dependencies. ```julia-repl -(@v1.9) pkg> add Images +(@v1.10) pkg> add Images Resolving package versions... - Updating `~/.julia/environments/v1.9/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [916415d5] + Images v0.25.2 - Updating `~/.julia/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` ... Precompiling environment... Progress [===================> ] 45/97 @@ -190,16 +190,53 @@ If a given package version errors during auto-precompilation, Pkg will remember automatically tries and will skip that package with a brief warning. Manual precompilation can be used to force these packages to be retried, as `pkg> precompile` will always retry all packages. -To disable the auto-precompilation, set `ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0`. - The indicators next to the package names displayed during precompilation -indicate the status of that package's precompilation. +indicate the status of that package's precompilation. - `[◐, ◓, ◑, ◒]` Animated "clock" characters indicate that the package is currently being precompiled. - `✓` A green checkmark indicates that the package has been successfully precompiled (after which that package will disappear from the list). If the checkmark is yellow it means that the package is currently loaded so the session will need to be restarted to access the version that was just precompiled. - `?` A question mark character indicates that a `PrecompilableError` was thrown, indicating that precompilation was disallowed, i.e. `__precompile__(false)` in that package. - `✗` A cross indicates that the package failed to precompile. +#### Controlling Auto-precompilation + +Auto-precompilation can be controlled in several ways: + +- **Environment variable**: Set `ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0` to disable auto-precompilation globally. +- **Programmatically**: Use `Pkg.autoprecompilation_enabled(false)` to disable auto-precompilation for the current session, or `Pkg.autoprecompilation_enabled(true)` to re-enable it. +- **Scoped control**: Use `Pkg.precompile(f, args...; kwargs...)` to execute a function `f` with auto-precompilation temporarily disabled, then automatically trigger precompilation afterward if any packages were modified during the execution. + +!!! compat "Julia 1.13" + The `Pkg.autoprecompilation_enabled()` function and `Pkg.precompile()` do-block syntax require at least Julia 1.13. + +For example, to add multiple packages without triggering precompilation after each one: + +```julia-repl +julia> Pkg.precompile() do + Pkg.add("Example") + Pkg.dev("JSON") + Pkg.update("HTTP") + end + Resolving package versions... + ... +Precompiling environment... + 14 dependencies successfully precompiled in 25 seconds +``` + +Or to temporarily disable auto-precompilation: + +```julia-repl +julia> Pkg.autoprecompilation_enabled(false) +false + +julia> Pkg.add("Example") # No precompilation happens + Resolving package versions... + ... + +julia> Pkg.autoprecompilation_enabled(true) +true +``` + ### Precompiling new versions of loaded packages If a package that has been updated is already loaded in the session, the precompilation process will go ahead and precompile diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md index 58693bc583..d822ebd3fa 100644 --- a/docs/src/getting-started.md +++ b/docs/src/getting-started.md @@ -22,18 +22,18 @@ To get back to the Julia REPL, press `Ctrl+C` or backspace (when the REPL cursor Upon entering the Pkg REPL, you should see the following prompt: ```julia-repl -(@v1.9) pkg> +(@v1.10) pkg> ``` To add a package, use `add`: ```julia-repl -(@v1.9) pkg> add Example +(@v1.10) pkg> add Example Resolving package versions... Installed Example ─ v0.5.3 - Updating `~/.julia/environments/v1.9/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] + Example v0.5.3 - Updating `~/.julia/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [7876af07] + Example v0.5.3 ``` @@ -49,14 +49,14 @@ julia> Example.hello("friend") We can also specify multiple packages at once to install: ```julia-repl -(@v1.9) pkg> add JSON StaticArrays +(@v1.10) pkg> add JSON StaticArrays ``` The `status` command (or the shorter `st` command) can be used to see installed packages. ```julia-repl -(@v1.9) pkg> st -Status `~/.julia/environments/v1.6/Project.toml` +(@v1.10) pkg> st +Status `~/.julia/environments/v1.10/Project.toml` [7876af07] Example v0.5.3 [682c06a0] JSON v0.21.3 [90137ffa] StaticArrays v1.5.9 @@ -68,13 +68,13 @@ Status `~/.julia/environments/v1.6/Project.toml` To remove packages, use `rm` (or `remove`): ```julia-repl -(@v1.9) pkg> rm JSON StaticArrays +(@v1.10) pkg> rm JSON StaticArrays ``` Use `up` (or `update`) to update the installed packages ```julia-repl -(@v1.9) pkg> up +(@v1.10) pkg> up ``` If you have been following this guide it is likely that the packages installed are at the latest version @@ -82,13 +82,13 @@ so `up` will not do anything. Below we show the status output in the case where an old version of the Example package and then upgrade it: ```julia-repl -(@v1.9) pkg> st -Status `~/.julia/environments/v1.9/Project.toml` +(@v1.10) pkg> st +Status `~/.julia/environments/v1.10/Project.toml` ⌃ [7876af07] Example v0.5.1 Info Packages marked with ⌃ have new versions available and may be upgradable. -(@v1.9) pkg> up - Updating `~/.julia/environments/v1.9/Project.toml` +(@v1.10) pkg> up + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ↑ Example v0.5.1 ⇒ v0.5.3 ``` @@ -101,8 +101,8 @@ For more information about managing packages, see the [Managing Packages](@ref M Up to this point, we have covered basic package management: adding, updating, and removing packages. -You may have noticed the `(@v1.9)` in the REPL prompt. -This lets us know that `v1.9` is the **active environment**. +You may have noticed the `(@v1.10)` in the REPL prompt. +This lets us know that `v1.10` is the **active environment**. Different environments can have totally different packages and versions installed from another environment. The active environment is the environment that will be modified by Pkg commands such as `add`, `rm` and `update`. @@ -110,7 +110,7 @@ Let's set up a new environment so we may experiment. To set the active environment, use `activate`: ```julia-repl -(@v1.9) pkg> activate tutorial +(@v1.10) pkg> activate tutorial [ Info: activating new environment at `~/tutorial/Project.toml`. ``` @@ -166,16 +166,16 @@ For more information about environments, see the [Working with Environments](@re If you are ever stuck, you can ask `Pkg` for help: ```julia-repl -(@v1.9) pkg> ? +(@v1.10) pkg> ? ``` You should see a list of available commands along with short descriptions. You can ask for more detailed help by specifying a command: ```julia-repl -(@v1.9) pkg> ?develop +(@v1.10) pkg> ?develop ``` This guide should help you get started with `Pkg`. -`Pkg` has much more to offer in terms of powerful package management, -read the full manual to learn more! +`Pkg` has much more to offer in terms of powerful package management. +For more advanced topics, see [Managing Packages](@ref Managing-Packages), [Working with Environments](@ref Working-with-Environments), and [Creating Packages](@ref creating-packages-tutorial). diff --git a/docs/src/glossary.md b/docs/src/glossary.md index 60e0546039..4914150ff1 100644 --- a/docs/src/glossary.md +++ b/docs/src/glossary.md @@ -1,4 +1,4 @@ -# [**9.** Glossary](@id Glossary) +# [**10.** Glossary](@id Glossary) **Project:** a source tree with a standard layout, including a `src` directory for the main body of Julia code, a `test` directory for testing the project, @@ -14,8 +14,8 @@ may optionally have a manifest file: - **Manifest file:** a file in the root directory of a project, named `Manifest.toml` (or `JuliaManifest.toml`), describing a complete dependency graph and exact versions of each package and library used by a project. The file name may - also be suffixed by `-v{major}.{minor}.toml` which julia will prefer if the version - matches `VERSION`, allowing multiple environments to be maintained for different julia + also be suffixed by `-v{major}.{minor}.toml` which Julia will prefer if the version + matches `VERSION`, allowing multiple environments to be maintained for different Julia versions. **Package:** a project which provides reusable functionality that can be used by @@ -46,7 +46,7 @@ since that could conflict with the configuration of the main application. **Environment:** the combination of the top-level name map provided by a project file combined with the dependency graph and map from packages to their entry points -provided by a manifest file. For more detail see the manual section on code loading. +provided by a manifest file. For more detail see the [manual section on code loading](https://docs.julialang.org/en/v1/manual/code-loading/). - **Explicit environment:** an environment in the form of an explicit project file and an optional corresponding manifest file together in a directory. If the diff --git a/docs/src/managing-packages.md b/docs/src/managing-packages.md index b5889221cf..3295080ef7 100644 --- a/docs/src/managing-packages.md +++ b/docs/src/managing-packages.md @@ -10,14 +10,14 @@ The most frequently used is `add` and its usage is described first. In the Pkg REPL, packages can be added with the `add` command followed by the name of the package, for example: ```julia-repl -(@v1.8) pkg> add JSON +(@v1.10) pkg> add JSON Installing known registries into `~/` Resolving package versions... Installed Parsers ─ v2.4.0 Installed JSON ──── v0.21.3 - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [682c06a0] + JSON v0.21.3 - Updating `~/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [682c06a0] + JSON v0.21.3 [69de0a69] + Parsers v2.4.0 [ade2ca70] + Dates @@ -28,7 +28,7 @@ Precompiling environment... 2 dependencies successfully precompiled in 2 seconds ``` -Here we added the package `JSON` to the current environment (which is the default `@v1.8` environment). +Here we added the package `JSON` to the current environment (which is the default `@v1.10` environment). In this example, we are using a fresh Julia installation, and this is our first time adding a package using Pkg. By default, Pkg installs the General registry and uses this registry to look up packages requested for inclusion in the current environment. @@ -40,16 +40,16 @@ It is possible to add multiple packages in one command as `pkg> add A B C`. The status output contains the packages you have added yourself, in this case, `JSON`: ```julia-repl -(@v1.11) pkg> st - Status `~/.julia/environments/v1.8/Project.toml` +(@v1.10) pkg> st + Status `~/.julia/environments/v1.10/Project.toml` [682c06a0] JSON v0.21.3 ``` The manifest status shows all the packages in the environment, including recursive dependencies: ```julia-repl -(@v1.11) pkg> st -m -Status `~/environments/v1.9/Manifest.toml` +(@v1.10) pkg> st -m +Status `~/.julia/environments/v1.10/Manifest.toml` [682c06a0] JSON v0.21.3 [69de0a69] Parsers v2.4.0 [ade2ca70] Dates @@ -64,18 +64,18 @@ To specify that you want a particular version (or set of versions) of a package, to require any patch release of the v0.21 series of JSON after v0.21.4, call `compat JSON 0.21.4`: ```julia-repl -(@1.11) pkg> compat JSON 0.21.4 +(@v1.10) pkg> compat JSON 0.21.4 Compat entry set: JSON = "0.21.4" Resolve checking for compliance with the new compat rules... Error empty intersection between JSON@0.21.3 and project compatibility 0.21.4 - 0.21 Suggestion Call `update` to attempt to meet the compatibility requirements. -(@1.11) pkg> update +(@v1.10) pkg> update Updating registry at `~/.julia/registries/General.toml` - Updating `~/.julia/environments/1.11/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [682c06a0] ↑ JSON v0.21.3 ⇒ v0.21.4 - Updating `~/.julia/environments/1.11/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [682c06a0] ↑ JSON v0.21.3 ⇒ v0.21.4 ``` @@ -96,11 +96,11 @@ julia> JSON.json(Dict("foo" => [1, "bar"])) |> print A specific version of a package can be installed by appending a version after a `@` symbol to the package name: ```julia-repl -(@v1.8) pkg> add JSON@0.21.1 +(@v1.10) pkg> add JSON@0.21.1 Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` ⌃ [682c06a0] + JSON v0.21.1 - Updating `~/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` ⌃ [682c06a0] + JSON v0.21.1 ⌅ [69de0a69] + Parsers v1.1.2 [ade2ca70] + Dates @@ -118,12 +118,12 @@ If a branch (or a certain commit) of `Example` has a hotfix that is not yet incl we can explicitly track that branch (or commit) by appending `#branchname` (or `#commitSHA1`) to the package name: ```julia-repl -(@v1.8) pkg> add Example#master +(@v1.10) pkg> add Example#master Cloning git-repo `https://github.com/JuliaLang/Example.jl.git` Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] + Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` - Updating `~/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [7876af07] + Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ``` @@ -139,12 +139,12 @@ When updating packages, updates are pulled from that branch. To go back to tracking the registry version of `Example`, the command `free` is used: ```julia-repl -(@v1.8) pkg> free Example +(@v1.10) pkg> free Example Resolving package versions... Installed Example ─ v0.5.3 - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ~ Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v0.5.3 - Updating `~/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [7876af07] ~ Example v0.5.4 `https://github.com/JuliaLang/Example.jl.git#master` ⇒ v0.5.3 ``` @@ -153,12 +153,12 @@ To go back to tracking the registry version of `Example`, the command `free` is If a package is not in a registry, it can be added by specifying a URL to the Git repository: ```julia-repl -(@v1.8) pkg> add https://github.com/fredrikekre/ImportMacros.jl +(@v1.10) pkg> add https://github.com/fredrikekre/ImportMacros.jl Cloning git-repo `https://github.com/fredrikekre/ImportMacros.jl` Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [92a963f6] + ImportMacros v1.0.0 `https://github.com/fredrikekre/ImportMacros.jl#master` - Updating `~/environments/v1.9/Manifest.toml` + Updating `~/.julia/environments/v1.10/Manifest.toml` [92a963f6] + ImportMacros v1.0.0 `https://github.com/fredrikekre/ImportMacros.jl#master` ``` @@ -167,7 +167,7 @@ For unregistered packages, we could have given a branch name (or commit SHA1) to If you want to add a package using the SSH-based `git` protocol, you have to use quotes because the URL contains a `@`. For example, ```julia-repl -(@v1.8) pkg> add "git@github.com:fredrikekre/ImportMacros.jl.git" +(@v1.10) pkg> add "git@github.com:fredrikekre/ImportMacros.jl.git" Cloning git-repo `git@github.com:fredrikekre/ImportMacros.jl.git` Updating registry at `~/.julia/registries/General` Resolving package versions... @@ -188,7 +188,7 @@ repository: pkg> add https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore Cloning git-repo `https://github.com/timholy/SnoopCompile.jl.git` Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [e2b509da] + SnoopCompileCore v2.9.0 `https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore#master` Updating `~/.julia/environments/v1.8/Manifest.toml` [e2b509da] + SnoopCompileCore v2.9.0 `https://github.com/timholy/SnoopCompile.jl.git:SnoopCompileCore#master` @@ -214,15 +214,15 @@ from that local repo are pulled when packages are updated. By only using `add` your environment always has a "reproducible state", in other words, as long as the repositories and registries used are still accessible it is possible to retrieve the exact state of all the dependencies in the environment. This has the advantage that you can send your environment (`Project.toml` and `Manifest.toml`) to someone else and they can [`Pkg.instantiate`](@ref) that environment in the same state as you had it locally. -However, when you are developing a package, it is more convenient to load packages at their current state at some path. For this reason, the `dev` command exists. +However, when you are [developing a package](@ref developing), it is more convenient to load packages at their current state at some path. For this reason, the `dev` command exists. Let's try to `dev` a registered package: ```julia-repl -(@v1.8) pkg> dev Example +(@v1.10) pkg> dev Example Updating git-repo `https://github.com/JuliaLang/Example.jl.git` Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] + Example v0.5.4 `~/.julia/dev/Example` Updating `~/.julia/environments/v1.8/Manifest.toml` [7876af07] + Example v0.5.4 `~/.julia/dev/Example` @@ -263,9 +263,9 @@ julia> Example.plusone(1) To stop tracking a path and use the registered version again, use `free`: ```julia-repl -(@v1.8) pkg> free Example +(@v1.10) pkg> free Example Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ~ Example v0.5.4 `~/.julia/dev/Example` ⇒ v0.5.3 Updating `~/.julia/environments/v1.8/Manifest.toml` [7876af07] ~ Example v0.5.4 `~/.julia/dev/Example` ⇒ v0.5.3 @@ -300,29 +300,29 @@ When new versions of packages are released, it is a good idea to update. Simply to the latest compatible version. Sometimes this is not what you want. You can specify a subset of the dependencies to upgrade by giving them as arguments to `up`, e.g: ```julia-repl -(@v1.8) pkg> up Example +(@v1.10) pkg> up Example ``` This will only allow Example do upgrade. If you also want to allow dependencies of Example to upgrade (with the exception of packages that are in the project) you can pass the `--preserve=direct` flag. ```julia-repl -(@v1.8) pkg> up --preserve=direct Example +(@v1.10) pkg> up --preserve=direct Example ``` And if you also want to allow dependencies of Example that are also in the project to upgrade, you can use `--preserve=none`: ```julia-repl -(@v1.8) pkg> up --preserve=none Example +(@v1.10) pkg> up --preserve=none Example ``` ## Pinning a package A pinned package will never be updated. A package can be pinned using `pin`, for example: ```julia-repl -(@v1.8) pkg> pin Example +(@v1.10) pkg> pin Example Resolving package versions... - Updating `~/.julia/environments/v1.8/Project.toml` + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ~ Example v0.5.3 ⇒ v0.5.3 ⚲ Updating `~/.julia/environments/v1.8/Manifest.toml` [7876af07] ~ Example v0.5.3 ⇒ v0.5.3 ⚲ @@ -331,8 +331,8 @@ A pinned package will never be updated. A package can be pinned using `pin`, for Note the pin symbol `⚲` showing that the package is pinned. Removing the pin is done using `free` ```julia-repl -(@v1.8) pkg> free Example - Updating `~/.julia/environments/v1.8/Project.toml` +(@v1.10) pkg> free Example + Updating `~/.julia/environments/v1.10/Project.toml` [7876af07] ~ Example v0.5.3 ⚲ ⇒ v0.5.3 Updating `~/.julia/environments/v1.8/Manifest.toml` [7876af07] ~ Example v0.5.3 ⚲ ⇒ v0.5.3 @@ -343,7 +343,7 @@ Note the pin symbol `⚲` showing that the package is pinned. Removing the pin i The tests for a package can be run using `test` command: ```julia-repl -(@v1.8) pkg> test Example +(@v1.10) pkg> test Example ... Testing Example Testing Example tests passed @@ -356,7 +356,7 @@ The output of the build process is directed to a file. To explicitly run the build step for a package, the `build` command is used: ```julia-repl -(@v1.8) pkg> build IJulia +(@v1.10) pkg> build IJulia Building Conda ─→ `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/6e47d11ea2776bc5627421d59cdcc1296c058071/build.log` Building IJulia → `~/.julia/scratchspaces/44cfe95a-1eb2-52ea-b672-e2afdf69b78f/98ab633acb0fe071b671f6c1785c46cd70bb86bd/build.log` @@ -486,7 +486,31 @@ To fix such errors, you have a number of options: - remove either `A` or `B` from your environment. Perhaps `B` is left over from something you were previously working on, and you don't need it anymore. If you don't need `A` and `B` at the same time, this is the easiest way to fix the problem. - try reporting your conflict. In this case, we were able to deduce that `B` requires an outdated version of `D`. You could thus report an issue in the development repository of `B.jl` asking for an updated version. - try fixing the problem yourself. - This becomes easier once you understand `Project.toml` files and how they declare their compatibility requirements. We'll return to this example in [Fixing conflicts](@ref). + This becomes easier once you understand `Project.toml` files and how they declare their compatibility requirements. We'll return to this example in [Fixing conflicts](@ref Fixing-conflicts). + +## Yanked packages + +Package registries can mark specific versions of packages as "yanked". A yanked package version +is one that should no longer be used, typically because it contains serious bugs, security +vulnerabilities, or other critical issues. When a package version is yanked, it becomes +unavailable for new installations but remains accessible (i.e. via `instantiate`) to maintain reproducibility +of existing environments. + +When you run `pkg> status`, yanked packages are clearly marked with a warning symbol: + +```julia-repl +(@v1.13) pkg> status + Status `~/.julia/environments/v1.13/Project.toml` + [682c06a0] JSON v0.21.3 + [f4259836] Example v1.2.0 [yanked] +``` + +The `[yanked]` annotation indicate that version `v1.2.0` of the `Example` package +has been yanked and should be updated or replaced. + +When resolving dependencies, Pkg will warn you if yanked packages are present and may provide +guidance on how to resolve the situation. It's important to address yanked packages promptly +to ensure the security and stability of your Julia environment. ## Garbage collecting old, unused packages @@ -502,7 +526,7 @@ If you are short on disk space and want to clean out as many unused packages and To run a typical garbage collection with default arguments, simply use the `gc` command at the `pkg>` REPL: ```julia-repl -(@v1.8) pkg> gc +(@v1.10) pkg> gc Active manifests at: `~/BinaryProvider/Manifest.toml` ... diff --git a/docs/src/protocol.md b/docs/src/protocol.md new file mode 100644 index 0000000000..211b1caf46 --- /dev/null +++ b/docs/src/protocol.md @@ -0,0 +1,190 @@ +# [**14.** Package and Storage Server Protocol Reference](@id Pkg-Server-Protocols) + +The Julia Package Server Protocol (Pkg Protocol) and the Package Storage Server Protocol (Storage Protocol) define how Julia's package manager, Pkg, obtains and manages packages and their associated resources. They aim to enhance the Julia package ecosystem, making it more efficient, reliable, and user-friendly, avoiding potential points of failure, and ensuring the permanent availability of package versions and artifacts, which is paramount for the stability and reproducibility of Julia projects. + +The Pkg client, by default, gets all resources over HTTPS from a single open source service run by the Julia community. This service for serving packages is additionally backed by multiple independent storage services which interface with proprietary origin services (GitHub, etc.) and guarantee persistent availability of resources into the future. + +The protocols also aim to address some of the limitations that existed prior to its introduction. + +- **Vanishing Resources.** It is possible for authors to delete code repositories of registered Julia packages. Without some kind of package server, no one can install a package which has been deleted. If someone happens to have a current fork of a deleted package, that can be made the new official repository for the package, but the chances of them having no or outdated forks are high. An even worse situation could happen for artifacts since they tend not to be kept in version control and are much more likely to be served from "random" web servers at a fixed URL with content changing over time. Artifact publishers are unlikely to retain all past versions of artifacts, so old versions of packages that depend on specific artifact content will not be reproducible in the future unless we do something to ensure that they are kept around after the publisher has stopped hosting them. By storing all package versions and artifacts in a single place, we can ensure that they are available forever. +- **Usage Insights.** It is valuable for the Julia community to know how many people are using Julia or what the relative popularity of different packages and operating systems is. Julia uses GitHub to host its ecosystem. GitHub - a commercial, proprietary service - has this information but does not make it available to the Julia community. We are of course using GitHub for free, so we can't complain, but it seems unfortunate that a commercial entity has this valuable information while the open source community remains in the dark. The Julia community really could use insight into who is using Julia and how, so that we can prioritize packages and platforms, and give real numbers when people ask "how many people are using Julia?" +- **Decoupling from Git and GitHub.** Prior to this, Julia package ecosystem was very deeply coupled to git and was even specialized on GitHub specifically in many ways. The Pkg and Storage Protocols allowed us to decouple ourselves from git as the primary mechanism for getting packages. Now Julia continues to support using git, but does not require it just to install packages from the default public registry anymore. This decoupling also paves the way for supporting other version control systems in the future, making git no longer so special. Special treatment of GitHub will also go away since we get the benefits of specializing for GitHub (fast tarball downloads) directly from the Pkg protocols. +- **Firewall problems.** Prior to this, Pkg's need to connect to arbitrary servers using a miscellany of protocols caused several problems with firewalls. A large set of protocols and an unbounded list of servers needed to be whitelisted just to support default Pkg operation. If Pkg only needed to talk to a single service over a single, secure protocol (i.e. HTTPS), then whitelisting Pkg for standard use would be dead simple. + +## Protocols & Services + +1. **Pkg Protocol:** what Julia Pkg Clients speak to Pkg Servers. The Pkg Server serves all resources that Pkg Clients need to install and use registered packages, including registry data, packages and artifacts. It is designed to be easily horizontally scalable and not to have any hard operational requirements: if service is slow, just start more servers; if a Pkg Server crashes, forget it and boot up a new one. +2. **Storage Protocol:** what Pkg Servers speak to get resources from Storage Services. Julia clients do not interact with Storage services directly and multiple independent Storage Services can symmetrically (all are treated equally) provide their service to a given Pkg Server. Since Pkg Servers cache what they serve to Clients and handle convenient content presentation, Storage Services can expose a much simpler protocol: all they do is serve up complete versions of registries, packages and artifacts, while guaranteeing persistence and completeness. Persistence means: once a version of a resource has been served, that version can be served forever. Completeness means: if the service serves a registry, it can serve all package versions referenced by that registry; if it serves a package version, it can serve all artifacts used by that package. + +Both protocols work over HTTPS, using only GET and HEAD requests. As is normal for HTTP, HEAD requests are used to get information about a resource, including whether it would be served, without actually downloading it. As described in what follows, the Pkg Protocol is client-to-server and may be unauthenticated, use basic auth, or OpenID; the Storage Protocol is server-to-server only and uses mutual authentication with TLS certificates. + +The following diagram shows how these services interact with each other and with external services such as GitHub, GitLab and BitBucket for source control, and S3 and HDFS for long-term persistence: + + ┌───────────┐ + + │ Amazon S3 │ + + │ Storage │ + + └───────────┘ + + ▲ + + ║ + + ▼ + + Storage ╔═══════════╗ ┌───────────┐ + + Pkg Protocol ║ Storage ║ ┌──▶│ GitHub │ + + Protocol ┌──▶║ Service A ║───┤ └───────────┘ + + ┏━━━━━━━━━━━━┓ ┏━━━━━━━━━━━━┓ │ ╚═══════════╝ │ ┌───────────┐ + + ┃ Pkg Client ┃────▶┃ Pkg Server ┃───┤ ╔═══════════╗ ├──▶│ GitLab │ + + ┗━━━━━━━━━━━━┛ ┗━━━━━━━━━━━━┛ │ ║ Storage ║ │ └───────────┘ + + └──▶║ Service B ║───┤ ┌───────────┐ + + ╚═══════════╝ └──▶│ BitBucket │ + + ▲ └───────────┘ + + ║ + + ▼ + + ┌───────────┐ + + │ HDFS │ + + │ Cluster │ + + └───────────┘ + +Each Julia Pkg Client is configured to talk to a Pkg Server. By default, they talk to `pkg.julialang.org`, a public, unauthenticated Pkg Server. If the environment variable `JULIA_PKG_SERVER` is set, the Pkg Client connects to that host instead. For example, if `JULIA_PKG_SERVER` is set to `pkg.company.com` then the Pkg Client will connect to `https://pkg.company.com`. So in typical operation, a Pkg Client will no longer rely on `libgit2` or a git command-line client, both of which have been an ongoing headache, especially behind firewalls and on Windows. If fact, git will only be necessary when working with git-hosted registries and unregistered packages - those will continue to work as they have previously, fetched using git. + +While the default Pkg Server at `pkg.julialang.org` is unauthenticated, other parties may host Pkg Server instances elsewhere, authenticated or unauthenticated, public or private, as they wish. People can connect to those servers by setting the `JULIA_PKG_SERVER` variable. There will be a configuration file for providing authentication information to Pkg Servers using either basic auth or OpenID. The Pkg Server implementation will be open source and have minimal operational requirements. Specifically, it needs: + +1. The ability to accept incoming connections on port 443; +2. The ability to connect to a configurable set of Storage Services; +3. Temporary disk storage for caching resources (registries, packages, artifacts). + +A Pkg Service may be backed by more than one actual server, as is typical for web services. The Pkg Service is stateless, so this kind of horizontal scaling is straightforward. Each Pkg Server serves registry, package and artifact resources to Pkg Clients and caches whatever it serves. Each Pkg Server, in turn, gets those resources from one or more Storage Services. Storage services are responsible for fetching resources from code hosting sites like GitHub, GitLab and BitBucket, and for persisting everything that they have ever served to long-term storage systems like Amazon S3, hosted HDFS clusters - or whatever an implementor wants to use. If the original copies of resources vanish, Pkg Servers must always serve up all previously served versions of resources. + +The Storage Protocol is designed to be extremely simple so that multiple independent implementations can coexist, and each Pkg Server may be symmetrically backed by multiple different Storage Services, providing both redundant backup and ensuring that no single implementation has a "choke hold" on the ecosystem - anyone can implement a new Storage Service and add it to the set of services backing the default Pkg Server at `pkg.julialang.org`. The simplest possible version of a Storage Service is a static HTTPS site serving files generated from a snapshot of a registry. Although this does not provide adequate long-term backup capabilities, and would need to be regenerated whenever a registry changes, it may be sufficient for some private uses. Having multiple independently operated Storage Services helps ensure that even if one Storage Service becomes unavailable or unreliable - for technical, financial, or political reasons - others will keep operating and so will the Pkg ecosystem. + +## The Pkg Protocol + +This section describes the protocol used by Pkg Clients to get resources from Pkg Servers, including the latest versions of registries, package source trees, and artifacts. There is also a standard system for asking for diffs of all of these from previous versions, to minimize how much data the client needs to download in order to update itself. There is additionally a bundle mechanism for requesting and receiving a set of resources in a single request. + +### Authentication + +The authentication scheme between a Pkg client and server will be HTTP authorization with bearer tokens, as standardized in RFC6750. This means that authenticated access is accomplished by the client by making an HTTPS request including a `Authorization: Bearer $access_token` header. + +The format of the token, its contents and validation mechanism are not specified by the Pkg Protocol. They are left to the server to define. The server is expected to validate the token and determine whether the client is authorized to access the requested resource. Similarly at the client side, the implementation of the token acquisition is not specified by the Pkg Protocol. However Pkg provides [hooks](#Authentication-Hooks) that can be implemented at the client side to trigger the token acquisition process. Tokens thus acquired are expected to be stored in a local file, the format of which is specified by the Pkg Protocol. Pkg will be able to read the token from this file and include it in the request to the server. Pkg can also, optionally, detect when the token is about to expire and trigger a refresh. The Pkg client also supports automatic token refresh, since bearer tokens are recommended to be short-lived (no more than a day). + +The authorization information is saved locally in `$(DEPOT_PATH[1])/servers/$server/auth.toml` which is a TOML file with the following fields: + +- `access_token` (REQUIRED): the bearer token used to authorize normal requests +- `expires_at` (OPTIONAL): an absolute expiration time +- `expires_in` (OPTIONAL): a relative expiration time +- `refresh_token` (OPTIONAL): bearer token used to authorize refresh requests +- `refresh_url` (OPTIONAL): URL to fetch a new token from + +The `auth.toml` file may contain other fields (e.g. user name, user email), but they are ignored by Pkg. The two other fields mentioned in RFC6750 are `token_type` and `scope`: these are omitted since only tokens of type `Bearer` are supported currently and the scope is always implicitly to provide access to Pkg protocol URLs. Pkg servers should, however, not send `auth.toml` files with `token_type` or `scope` fields, as these names may be used in the future, e.g. to support other kinds of tokens or to limit the scope of an authorization to a subset of Pkg protocol URLs. + +Initially, the user or user agent (IDE) must acquire a `auth.toml` file and save it to the correct location. After that, Pkg will determine whether the access token needs to be refreshed by examining the `expires_at` and/or `expires_in` fields of the auth file. The expiration time is the minimum of `expires_at` and `mtime(auth_file) + expires_in`. When the Pkg client downloads a new `auth.toml` file, if there is a relative `expires_in` field, an absolute `expires_at` value is computed based on the client's current clock time. This combination of policies allows expiration to work gracefully even in the presence of clock skew between the server and the client. + +If the access token is expired and there are `refresh_token` and `refresh_url` fields in `auth.toml`, a new auth file is requested by making a request to `refresh_url` with an `Authorization: Bearer $refresh_token` header. Pkg will refuse to make a refresh request unless `refresh_url` is an HTTPS URL. Note that `refresh_url` need not be a URL on the Pkg server: token refresh can be handled by a separate server. If the request is successful and the returned `auth.toml` file is a well-formed TOML file with at least an `access_token` field, it is saved to `$(DEPOT_PATH[1])/servers/$server/auth.toml`. + +Checking for access token expiry and refreshing `auth.toml` is done before each Pkg client request to a Pkg server, and if the auth file is updated the new access token is used, so the token should in theory always be up to date. Practice is different from theory, of course, and if the Pkg server considers the access token expired, it may return an HTTP 401 Unauthorized response, and the Pkg client should attempt to refresh the auth token. If, after attempting to refresh the access token, the server still returns HTTP 401 Unauthorized, the Pkg client will present the body of the error response to the user or user agent (IDE). + +## Authentication Hooks +A mechanism to register a hook at the client is provided to allow the user agent to handle an auth failure. It can, for example, present a login page and take the user through the necessary authentication flow to get a new auth token and store it in `auth.toml`. + +- A handler can also be registered using [`register_auth_error_handler`](@ref Pkg.PlatformEngines.register_auth_error_handler). It returns a function that can be called to deregister the handler. +- A handler can also be deregistered using [`deregister_auth_error_handler`](@ref Pkg.PlatformEngines.deregister_auth_error_handler). + +Example: + +```julia +# register a handler +dispose = Pkg.PlatformEngines.register_auth_error_handler((url, svr, err) -> begin + PkgAuth.authenticate(svr*"/auth") + return true, true +end) + +# ... client code ... + +# deregister the handler +dispose() +# or +Pkg.PlatformEngines.deregister_auth_error_handler(url, svr) +``` + +### Resources + +The client can make GET or HEAD requests to the following resources: + +- `/registries`: map of registry uuids at this server to their current tree hashes, each line of the response data is of the form `/registry/$uuid/$hash` representing a resource pointing to particular version of a registry +- `/registry/$uuid/$hash`: tarball of registry uuid at the given tree hash +- `/package/$uuid/$hash`: tarball of package uuid at the given tree hash +- `/artifact/$hash`: tarball of an artifact with the given tree hash + +Only the `/registries` changes - all other resources can be cached forever and the server will indicate this with the appropriate HTTP headers. + +### Reference Implementation + +A reference implementation of the Pkg Server protocol is available at [PkgServer.jl](https://github.com/JuliaPackaging/PkgServer.jl). + +## The Storage Protocol + +This section describes the protocol used by Pkg Servers to get resources from Storage Servers, including the latest versions of registries, package source trees, and artifacts. The Pkg Server requests each type of resource when it needs it and caches it for as long as it can, so Storage Services should not have to serve the same resources to the same Pkg Server instance many times. + +### Authentication + +Since the Storage protocol is a server-to-server protocol, it uses certificate-based mutual authentication: each side of the connection presents certificates of identity to the other. The operator of a Storage Service must issue a client certificate to the operator of a Pkg Service certifying that it is authorized to use the Storage Service. + +### Resources + +The Storage Protocol is similar to the Pkg Protocol: + +- `/registries`: map of registry uuids at this server to their current tree hashes +- `/registry/$uuid/$hash`: tarball of registry uuid at the given tree hash +- `/package/$uuid/$hash`: tarball of package uuid at the given tree hash +- `/artifact/$hash`: tarball of an artifact with the given tree hash + +As is the case with the Pkg Server protocol, only the `/registries` resource changes over time—all other resources are permanently cacheable and Pkg Servers are expected to cache resources indefinitely, only deleting them if they need to reclaim storage space. + +### Interaction + +Fetching resources from a single Storage Server is straightforward: the Pkg Server asks for a version of a registry by UUID and hash and the Storage Server returns a tarball of that registry tree if it knows about that registry and version, or an HTTP 404 error if it doesn't. + +Each Pkg Server may use multiple Storage Services for availability and depth of backup. For a given resource, the Pkg Server makes a HEAD request to each Storage Service requesting the resource, and then makes a GET request for the resource to the first Storage Server that replies to the HEAD request with a 200 OK. If no Storage Service responds with a 200 OK in enough time, the Pkg Server should respond to the request for the corresponding resource with a 404 error. Each Storage Service which responds with a 200 OK must behave as if it had served the resource, regardless of whether it does so or not - i.e. persist the resource to long-term storage. + +One subtlety is how the Pkg Server determines what the latest version of each registry is. It can get a map from registry UUIDs to version hashes from each Storage Server, but hashes are unordered - if multiple Storage Servers reply with different hashes, which one should the Pkg Server use? When Storage Servers disagree on the latest hash of a registry, the Pkg Server should ask each Storage Server about the hashes that the other servers returned: if Service A knows about Service B's hash but B doesn't know about A's hash, then A's hash is more recent and should be used. If each server doesn't know about the other's hash, then neither hash is strictly newer than the other one and either could be used. The Pkg Server can break the tie any way it wants, e.g. randomly or by using the lexicographically earlier hash. + +### Guarantees + +The primary guarantee that a Storage Server makes is that if it has ever successfully served a resource—registry tree, package source tree, artifact tree — it must be able to serve that same resource version forever. + +It's tempting to also require it to guarantee that if a Storage Server serves a registry tree, it can also serve every package source tree referred to within that registry tree. Similarly, it is tempting to require that if a Storage Server can serve a package source tree that it should be able to serve any artifacts referenced by that version of the package. However, this could fail for reasons entirely beyond the control of the server: what if the registry is published with wrong package hashes? What if someone registers a package version, doesn't git tag it, then force pushes the branch that the version was on? In both of these cases, the Storage Server may not be able to fetch a version of a package through no fault of its own. Similarly, artifact hashes in packages might be incorrect or vanish before the Storage Server can retrieve them. + +Therefore, we don't strictly require that Storage Servers guarantee this kind of closure under resource references. We do, however, recommend that Storage Servers proactively fetch resources referred to by other resources as soon as possible. When a new version of a registry is available, the Storage Server should fetch all the new package versions in the registry immediately. When a package version is fetched—for any reason, whether because it was included in a new registry snapshot or because an upstream Pkg Server requested it by hash—all artifacts that it references should be fetched immediately. + +## Verification + +Since all resources are content addressed, the Pkg Clients and Pkg Server can and should verify that resources that they receive from upstream have the correct content hash. If a resource does not have the right hash, it should not be used and not be served further downstream. Pkg Servers should try to fetch the resource from other Storage Services and serve one that has the correct content. Pkg Clients should error if they get a resource with an incorrect content hash. + +Git uses SHA1 for content hashing. There is a pure Julia implementation of git's content hashing algorithm, which is being used to verify artifacts in Julia 1.3 (among other things). The SHA1 hashing algorithm is considered to be cryptographically compromised at this point, and while it's not completely broken, git is already starting to plan how to move away from using SHA1 hashes. To that end, we should consider getting ahead of this problem by using a stronger hash like SHA3-256 in these protocols. Having control over these protocols actually makes this considerably easier than if we were continuing to rely on git for resource acquisition. + +The first step to using SHA3-256 instead of SHA1 is to populate registries with additional hashes for package versions. Currently each package version is identified by a git-tree-sha1 entry. We would add git-tree-sha3-256 entries that give the SHA3-256 hashes computed using the same git tree hashing logic. From this origin, the Pkg Client, Pkg Server and Storage Servers all just need to use SHA3-256 hashes rather than SHA1 hashes. + +## References + +1. Pkg & Storage Protocols [https://github.com/JuliaLang/Pkg.jl/issues/1377](https://github.com/JuliaLang/Pkg.jl/issues/1377) +2. Authenticated Pkg Client Support: [https://github.com/JuliaLang/Pkg.jl/pull/1538](https://github.com/JuliaLang/Pkg.jl/pull/1538) +3. Authentication Hooks: [https://github.com/JuliaLang/Pkg.jl/pull/1630](https://github.com/JuliaLang/Pkg.jl/pull/1630) diff --git a/docs/src/registries.md b/docs/src/registries.md index 7c50727204..cada0bdadf 100644 --- a/docs/src/registries.md +++ b/docs/src/registries.md @@ -1,4 +1,4 @@ -# **7.** Registries +# **8.** Registries Registries contain information about packages, such as available releases and dependencies, and where they can be downloaded. diff --git a/docs/src/toml-files.md b/docs/src/toml-files.md index 79496e0321..a6363d2dc3 100644 --- a/docs/src/toml-files.md +++ b/docs/src/toml-files.md @@ -1,4 +1,4 @@ -# [**10.** `Project.toml` and `Manifest.toml`](@id Project-and-Manifest) +# [**11.** `Project.toml` and `Manifest.toml`](@id Project-and-Manifest) Two files that are central to Pkg are `Project.toml` and `Manifest.toml`. `Project.toml` and `Manifest.toml` are written in [TOML](https://github.com/toml-lang/toml) (hence the @@ -22,13 +22,38 @@ are described below. ### The `authors` field -For a package, the optional `authors` field is a list of strings describing the -package authors, in the form `NAME `. For example: +For a package, the optional `authors` field is a TOML array describing the package authors. +Entries in the array can either be a string in the form `"NAME"` or `"NAME "`, or a table keys following the [Citation File Format schema](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md) for either a +[`person`](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md#definitionsperson) or an [`entity`](https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md#definitionsentity). + +For example: ```toml -authors = ["Some One ", - "Foo Bar "] +authors = [ + "Some One ", + "Foo Bar ", + {given-names = "Baz", family-names = "Qux", email = "bazqux@example.com", orcid = "https://orcid.org/0000-0000-0000-0000", website = "https://github.com/bazqux"}, +] ``` +If all authors are specified by tables, it is possible to use [the TOML Array of Tables syntax](https://toml.io/en/v1.0.0#array-of-tables) +```toml +[[authors]] +given-names = "Some" +family-names = "One" +email = "someone@email.com" + +[[authors]] +given-names = "Foo" +family-names = "Bar" +email = "foo@bar.com" + +[[authors]] +given-names = "Baz" +family-names = "Qux" +email = "bazqux@example.com" +orcid = "https://orcid.org/0000-0000-0000-0000" +website = "https://github.com/bazqux" +``` ### The `name` field @@ -39,7 +64,7 @@ name = "Example" The name must be a valid [identifier](https://docs.julialang.org/en/v1/base/base/#Base.isidentifier) (a sequence of Unicode characters that does not start with a number and is neither `true` nor `false`). For packages, it is recommended to follow the -[package naming rules](@ref Package-naming-rules). The `name` field is mandatory +[package naming rules](@ref Package-naming-guidelines). The `name` field is mandatory for packages. @@ -77,6 +102,18 @@ Note that Pkg.jl deviates from the SemVer specification when it comes to version the section on [pre-1.0 behavior](@ref compat-pre-1.0) for more details. +### The `readonly` field + +The `readonly` field is a boolean that, when set to `true`, marks the environment as read-only. This prevents any modifications to the environment, including adding, removing, or updating packages. For example: + +```toml +readonly = true +``` + +When an environment is marked as readonly, Pkg will throw an error if any operation that would modify the environment is attempted. +If the `readonly` field is not present or set to `false` (the default), the environment can be modified normally. + + ### The `[deps]` section All dependencies of the package/project are listed in the `[deps]` section. Each dependency @@ -93,13 +130,23 @@ handled by Pkg operations such as `add`. ### The `[sources]` section -Specifiying a path or repo (+ branch) for a dependency is done in the `[sources]` section. +Specifying a path or repo (+ branch) for a dependency is done in the `[sources]` section. These are especially useful for controlling unregistered dependencies without having to bundle a corresponding manifest file. +Each entry in the `[sources]` section supports the following keys: + +- **`url`**: The URL of the Git repository. Cannot be used with `path`. +- **`rev`**: The Git revision (branch name, tag, or commit hash) to use. Only valid with `url`. +- **`subdir`**: A subdirectory within the repository containing the package. +- **`path`**: A local filesystem path to the package. Cannot be used with `url` or `rev`. + +This might in practice look something like: + ```toml [sources] Example = {url = "https://github.com/JuliaLang/Example.jl", rev = "custom_branch"} +WithinMonorepo = {url = "https://github.org/author/BigProject", subdir = "SubPackage"} SomeDependency = {path = "deps/SomeDependency.jl"} ``` @@ -161,7 +208,7 @@ For the details, see [`Pkg.instantiate`](@ref). ### Different Manifests for Different Julia versions -Starting from Julia v1.11, there is an option to name manifest files in the format `Manifest-v{major}.{minor}.toml`. +Starting from Julia v1.10.8, there is an option to name manifest files in the format `Manifest-v{major}.{minor}.toml`. Julia will then preferentially use the version-specific manifest file if available. For example, if both `Manifest-v1.11.toml` and `Manifest.toml` exist, Julia 1.11 will prioritize using `Manifest-v1.11.toml`. However, Julia versions 1.10, 1.12, and all others will default to using `Manifest.toml`. diff --git a/ext/REPLExt/REPLExt.jl b/ext/REPLExt/REPLExt.jl index 92e636cd2f..3b0ade22e3 100644 --- a/ext/REPLExt/REPLExt.jl +++ b/ext/REPLExt/REPLExt.jl @@ -1,18 +1,25 @@ module REPLExt +if Base.get_bool_env("JULIA_PKG_DISALLOW_PKG_PRECOMPILATION", false) == true + error("Precompililing Pkg extension REPLExt is disallowed. JULIA_PKG_DISALLOW_PKG_PRECOMPILATION=$(ENV["JULIA_PKG_DISALLOW_PKG_PRECOMPILATION"])") +end + using Markdown, UUIDs, Dates import REPL import .REPL: LineEdit, REPLCompletions, TerminalMenus import Pkg -import .Pkg: linewrap, pathrepr, compat, can_fancyprint, printpkgstyle, PKGMODE_PROJECT +import .Pkg: linewrap, pathrepr, can_fancyprint, printpkgstyle, PKGMODE_PROJECT using .Pkg: Types, Operations, API, Registry, Resolve, REPLMode, safe_realpath using .REPLMode: Statement, CommandSpec, Command, prepare_cmd, tokenize, core_parse, SPECS, api_options, parse_option, api_options, is_opt, wrap_option using .Types: Context, PkgError, pkgerror, EnvCache +using .API: set_current_compat +import .API: _compat + include("completions.jl") include("compat.jl") @@ -23,7 +30,7 @@ include("compat.jl") struct PkgCompletionProvider <: LineEdit.CompletionProvider end -function LineEdit.complete_line(c::PkgCompletionProvider, s; hint::Bool=false) +function LineEdit.complete_line(c::PkgCompletionProvider, s; hint::Bool = false) partial = REPL.beforecursor(s.input_buffer) full = LineEdit.input_string(s) ret, range, should_complete = completions(full, lastindex(partial); hint) @@ -113,16 +120,18 @@ function on_done(s, buf, ok, repl) do_cmds(repl, input) REPL.prepare_next(repl) REPL.reset_state(s) - s.current_mode.sticky || REPL.transition(s, main) + return s.current_mode.sticky || REPL.transition(s, main) end # Set up the repl Pkg REPLMode function create_mode(repl::REPL.AbstractREPL, main::LineEdit.Prompt) - pkg_mode = LineEdit.Prompt(promptf; + pkg_mode = LineEdit.Prompt( + promptf; prompt_prefix = repl.options.hascolor ? Base.text_colors[:blue] : "", prompt_suffix = "", complete = PkgCompletionProvider(), - sticky = true) + sticky = true + ) pkg_mode.repl = repl hp = main.hist @@ -145,24 +154,24 @@ function create_mode(repl::REPL.AbstractREPL, main::LineEdit.Prompt) repl_keymap = Dict() if shell_mode !== nothing - let shell_mode=shell_mode - repl_keymap[';'] = function (s,o...) - if isempty(s) || position(LineEdit.buffer(s)) == 0 + let shell_mode = shell_mode + repl_keymap[';'] = function (s, o...) + return if isempty(s) || position(LineEdit.buffer(s)) == 0 buf = copy(LineEdit.buffer(s)) LineEdit.transition(s, shell_mode) do LineEdit.state(s, shell_mode).input_buffer = buf end else LineEdit.edit_insert(s, ';') - LineEdit.check_for_hint(s) && LineEdit.refresh_line(s) + LineEdit.check_show_hint(s) end end end end - b = Dict{Any,Any}[ + b = Dict{Any, Any}[ skeymap, repl_keymap, mk, prefix_keymap, LineEdit.history_keymap, - LineEdit.default_keymap, LineEdit.escape_defaults + LineEdit.default_keymap, LineEdit.escape_defaults, ] pkg_mode.keymap_dict = LineEdit.keymap(b) return pkg_mode @@ -172,16 +181,16 @@ function repl_init(repl::REPL.LineEditREPL) main_mode = repl.interface.modes[1] pkg_mode = create_mode(repl, main_mode) push!(repl.interface.modes, pkg_mode) - keymap = Dict{Any,Any}( - ']' => function (s,args...) - if isempty(s) || position(LineEdit.buffer(s)) == 0 + keymap = Dict{Any, Any}( + ']' => function (s, args...) + return if isempty(s) || position(LineEdit.buffer(s)) == 0 buf = copy(LineEdit.buffer(s)) LineEdit.transition(s, pkg_mode) do LineEdit.state(s, pkg_mode).input_buffer = buf end else LineEdit.edit_insert(s, ']') - LineEdit.check_for_hint(s) && LineEdit.refresh_line(s) + LineEdit.check_show_hint(s) end end ) @@ -201,9 +210,9 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) end if isempty(ctx.registries) if !REG_WARNED[] - printstyled(ctx.io, " │ "; color=:green) + printstyled(ctx.io, " │ "; color = :green) printstyled(ctx.io, "Attempted to find missing packages in package registries but no registries are installed.\n") - printstyled(ctx.io, " └ "; color=:green) + printstyled(ctx.io, " └ "; color = :green) printstyled(ctx.io, "Use package mode to install a registry. `pkg> registry add` will install the default registries.\n\n") REG_WARNED[] = true end @@ -223,22 +232,22 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) available_pkg_list = length(available_pkgs) == 1 ? String(available_pkgs[1]) : "[$(join(available_pkgs, ", "))]" msg1 = "Package$(plural1) $(missing_pkg_list) not found, but $(plural2) named $(available_pkg_list) $(plural3) available from a registry." for line in linewrap(msg1, io = ctx.io, padding = length(" │ ")) - printstyled(ctx.io, " │ "; color=:green) + printstyled(ctx.io, " │ "; color = :green) println(ctx.io, line) end - printstyled(ctx.io, " │ "; color=:green) + printstyled(ctx.io, " │ "; color = :green) println(ctx.io, "Install package$(plural4)?") msg2 = string("add ", join(available_pkgs, ' ')) for (i, line) in pairs(linewrap(msg2; io = ctx.io, padding = length(string(" | ", promptf())))) - printstyled(ctx.io, " │ "; color=:green) + printstyled(ctx.io, " │ "; color = :green) if i == 1 - printstyled(ctx.io, promptf(); color=:blue) + printstyled(ctx.io, promptf(); color = :blue) else print(ctx.io, " "^length(promptf())) end println(ctx.io, line) end - printstyled(ctx.io, " └ "; color=:green) + printstyled(ctx.io, " └ "; color = :green) Base.prompt(stdin, ctx.io, "(y/n/o)", default = "y") catch err if err isa InterruptException # if ^C is entered @@ -254,7 +263,7 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) resp = strip(resp) lower_resp = lowercase(resp) if lower_resp in ["y", "yes"] - API.add(string.(available_pkgs); allow_autoprecomp=false) + API.add(string.(available_pkgs); allow_autoprecomp = false) elseif lower_resp in ["o"] editable_envs = filter(v -> v != "@stdlib", LOAD_PATH) option_list = String[] @@ -273,16 +282,16 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) push!(keybindings, only("$n")) push!(shown_envs, expanded_env) end - menu = TerminalMenus.RadioMenu(option_list, keybindings=keybindings, pagesize=length(option_list)) + menu = TerminalMenus.RadioMenu(option_list; keybindings = keybindings, pagesize = length(option_list), charset = :ascii) default = something( # select the first non-default env by default, if possible findfirst(!=(Base.active_project()), shown_envs), 1 ) print(ctx.io, "\e[1A\e[1G\e[0J") # go up one line, to the start, and clear it - printstyled(ctx.io, " └ "; color=:green) + printstyled(ctx.io, " └ "; color = :green) choice = try - TerminalMenus.request("Select environment:", menu, cursor=default) + TerminalMenus.request("Select environment:", menu, cursor = default) catch err if err isa InterruptException # if ^C is entered println(ctx.io) @@ -292,7 +301,7 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) end choice == -1 && return false API.activate(shown_envs[choice]) do - API.add(string.(available_pkgs); allow_autoprecomp=false) + API.add(string.(available_pkgs); allow_autoprecomp = false) end elseif (lower_resp in ["n"]) return false @@ -308,7 +317,6 @@ function try_prompt_pkg_add(pkgs::Vector{Symbol}) end - function __init__() if isdefined(Base, :active_repl) if Base.active_repl isa REPL.LineEditREPL @@ -326,7 +334,7 @@ function __init__() end end end - if !in(try_prompt_pkg_add, REPL.install_packages_hooks) + return if !in(try_prompt_pkg_add, REPL.install_packages_hooks) push!(REPL.install_packages_hooks, try_prompt_pkg_add) end end diff --git a/ext/REPLExt/compat.jl b/ext/REPLExt/compat.jl index a9a537cf0f..b46ada6c72 100644 --- a/ext/REPLExt/compat.jl +++ b/ext/REPLExt/compat.jl @@ -1,7 +1,9 @@ # TODO: Overload -function compat(ctx::Context; io = nothing) +function _compat(ctx::Context; io = nothing, input_io = stdin) io = something(io, ctx.io) - can_fancyprint(io) || pkgerror("Pkg.compat cannot be run interactively in this terminal") + if input_io isa Base.TTY # testing uses IOBuffer + can_fancyprint(io) || pkgerror("Pkg.compat cannot be run interactively in this terminal") + end printpkgstyle(io, :Compat, pathrepr(ctx.env.project_file)) longest_dep_len = max(5, length.(collect(keys(ctx.env.project.deps)))...) opt_strs = String[] @@ -9,14 +11,14 @@ function compat(ctx::Context; io = nothing) compat_str = Operations.get_compat_str(ctx.env.project, "julia") push!(opt_strs, Operations.compat_line(io, "julia", nothing, compat_str, longest_dep_len, indent = "")) push!(opt_pkgs, "julia") - for (dep, uuid) in sort(collect(ctx.env.project.deps); by = x->x.first) + for (dep, uuid) in sort(collect(ctx.env.project.deps); by = x -> x.first) compat_str = Operations.get_compat_str(ctx.env.project, dep) push!(opt_strs, Operations.compat_line(io, dep, uuid, compat_str, longest_dep_len, indent = "")) push!(opt_pkgs, dep) end - menu = TerminalMenus.RadioMenu(opt_strs, pagesize=length(opt_strs)) + menu = TerminalMenus.RadioMenu(opt_strs; pagesize = length(opt_strs), charset = :ascii) choice = try - TerminalMenus.request(" Select an entry to edit:", menu) + TerminalMenus.request(TerminalMenus.default_terminal(in = input_io, out = io), " Select an entry to edit:", menu) catch err if err isa InterruptException # if ^C is entered println(io) @@ -35,10 +37,12 @@ function compat(ctx::Context; io = nothing) start_pos = length(prompt) + 2 move_start = "\e[$(start_pos)G" clear_to_end = "\e[0J" - ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid},Int32), stdin.handle, true) + if input_io isa Base.TTY + ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid}, Int32), input_io.handle, true) + end while true print(io, move_start, clear_to_end, buffer, "\e[$(start_pos + cursor)G") - inp = TerminalMenus._readkey(stdin) + inp = TerminalMenus._readkey(input_io) if inp == '\r' # Carriage return println(io) break @@ -65,9 +69,9 @@ function compat(ctx::Context; io = nothing) if cursor == 1 buffer = buffer[2:end] elseif cursor == length(buffer) - buffer = buffer[1:end - 1] + buffer = buffer[1:(end - 1)] elseif cursor > 0 - buffer = buffer[1:(cursor-1)] * buffer[(cursor + 1):end] + buffer = buffer[1:(cursor - 1)] * buffer[(cursor + 1):end] else continue end @@ -85,9 +89,11 @@ function compat(ctx::Context; io = nothing) end buffer finally - ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid},Int32), stdin.handle, false) + if input_io isa Base.TTY + ccall(:jl_tty_set_mode, Int32, (Ptr{Cvoid}, Int32), input_io.handle, false) + end end new_entry = strip(resp) - compat(ctx, dep, string(new_entry)) + API._compat(ctx, dep, string(new_entry)) return end diff --git a/ext/REPLExt/completions.jl b/ext/REPLExt/completions.jl index eca5e11218..8d41302d85 100644 --- a/ext/REPLExt/completions.jl +++ b/ext/REPLExt/completions.jl @@ -34,11 +34,11 @@ function complete_local_dir(s, i1, i2) end function complete_expanded_local_dir(s, i1, i2, expanded_user, oldi2) - cmp = REPL.REPLCompletions.complete_path(s, i2, shell_escape=true) + cmp = REPL.REPLCompletions.complete_path(s, i2, shell_escape = true) cmp2 = cmp[2] completions = [REPL.REPLCompletions.completion_text(p) for p in cmp[1]] completions = filter!(completions) do x - Base.isaccessibledir(s[1:prevind(s, first(cmp2)-i1+1)]*x) + Base.isaccessibledir(s[1:prevind(s, first(cmp2) - i1 + 1)] * x) end if expanded_user if length(completions) == 1 && endswith(joinpath(homedir(), ""), first(completions)) @@ -54,9 +54,9 @@ end const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e") -function complete_remote_package(partial; hint::Bool) - found_match = false - isempty(partial) && return String[] +function complete_remote_package!(comps, partial; hint::Bool) + isempty(partial) && return true # true means returned early + found_match = !isempty(comps) cmp = Set{String}() for reg in Registry.reachable_registries() for (uuid, regpkg) in reg @@ -80,9 +80,9 @@ function complete_remote_package(partial; hint::Bool) if is_julia_compat === nothing || is_julia_compat push!(cmp, name) # In hint mode the result is only used if there is a single matching entry - # so we abort the search + # so we can return no matches in case of more than one match if hint && found_match - return sort!(collect(cmp)) + return true # true means returned early end found_match = true break @@ -91,19 +91,21 @@ function complete_remote_package(partial; hint::Bool) end end end - return sort!(collect(cmp)) + append!(comps, sort!(collect(cmp))) + return false # false means performed full search end function complete_help(options, partial; hint::Bool) names = String[] for cmds in values(SPECS) - append!(names, [spec.canonical_name for spec in values(cmds)]) + append!(names, [spec.canonical_name for spec in values(cmds)]) end return sort!(unique!(append!(names, collect(keys(SPECS))))) end function complete_installed_packages(options, partial; hint::Bool) - env = try EnvCache() + env = try + EnvCache() catch err err isa PkgError || rethrow() return String[] @@ -115,7 +117,8 @@ function complete_installed_packages(options, partial; hint::Bool) end function complete_all_installed_packages(options, partial; hint::Bool) - env = try EnvCache() + env = try + EnvCache() catch err err isa PkgError || rethrow() return String[] @@ -124,7 +127,8 @@ function complete_all_installed_packages(options, partial; hint::Bool) end function complete_installed_packages_and_compat(options, partial; hint::Bool) - env = try EnvCache() + env = try + EnvCache() catch err err isa PkgError || rethrow() return String[] @@ -136,7 +140,8 @@ function complete_installed_packages_and_compat(options, partial; hint::Bool) end function complete_fixed_packages(options, partial; hint::Bool) - env = try EnvCache() + env = try + EnvCache() catch err err isa PkgError || rethrow() return String[] @@ -149,13 +154,31 @@ function complete_add_dev(options, partial, i1, i2; hint::Bool) if occursin(Base.Filesystem.path_separator_re, partial) return comps, idx, !isempty(comps) end - comps = vcat(comps, sort(complete_remote_package(partial; hint))) - if !isempty(partial) + returned_early = complete_remote_package!(comps, partial; hint) + # returning early means that no further search should be done here + if !returned_early append!(comps, filter!(startswith(partial), [info.name for info in values(Types.stdlib_infos())])) end return comps, idx, !isempty(comps) end +# TODO: Move +import Pkg: Operations, Types, Apps +function complete_installed_apps(options, partial; hint) + manifest = try + Types.read_manifest(joinpath(Apps.app_env_folder(), "AppManifest.toml")) + catch err + err isa PkgError || rethrow() + return String[] + end + apps = String[] + for (uuid, entry) in manifest.deps + append!(apps, keys(entry.apps)) + push!(apps, entry.name) + end + return unique!(apps) +end + ######################## # COMPLETION INTERFACE # ######################## @@ -179,13 +202,23 @@ function complete_command(statement::Statement, final::Bool, on_sub::Bool) end complete_opt(opt_specs) = - unique(sort(map(wrap_option, - map(x -> getproperty(x, :name), - collect(values(opt_specs)))))) + unique( + sort( + map( + wrap_option, + map( + x -> getproperty(x, :name), + collect(values(opt_specs)) + ) + ) + ) +) -function complete_argument(spec::CommandSpec, options::Vector{String}, - partial::AbstractString, offset::Int, - index::Int; hint::Bool) +function complete_argument( + spec::CommandSpec, options::Vector{String}, + partial::AbstractString, offset::Int, + index::Int; hint::Bool + ) if spec.completions isa Symbol # if completions is a symbol, it is a function in REPLExt that needs to be forwarded # to REPLMode (couldn't be linked there because REPLExt is not a dependency of REPLMode) @@ -195,11 +228,11 @@ function complete_argument(spec::CommandSpec, options::Vector{String}, @error "REPLMode indicates a completion function called :$(spec.completions) that cannot be found in REPLExt" rethrow() end - spec.completions = function(opts, partial, offset, index; hint::Bool) - applicable(completions, opts, partial, offset, index) ? - completions(opts, partial, offset, index; hint) : - completions(opts, partial; hint) - end + spec.completions = function (opts, partial, offset, index; hint::Bool) + return applicable(completions, opts, partial, offset, index) ? + completions(opts, partial, offset, index; hint) : + completions(opts, partial; hint) + end end spec.completions === nothing && return String[] # finish parsing opts @@ -216,7 +249,7 @@ end function _completions(input, final, offset, index; hint::Bool) statement, word_count, partial = nothing, nothing, nothing try - words = tokenize(input)[end] + words = tokenize(input; rm_leading_bracket = false)[end] word_count = length(words) statement, partial = core_parse(words) if final @@ -255,7 +288,7 @@ function _completions(input, final, offset, index; hint::Bool) end end -function completions(full, index; hint::Bool=false)::Tuple{Vector{String},UnitRange{Int},Bool} +function completions(full, index; hint::Bool = false)::Tuple{Vector{String}, UnitRange{Int}, Bool} pre = full[1:index] isempty(pre) && return default_commands(), 0:-1, false # empty input -> complete commands offset_adjust = 0 @@ -264,8 +297,8 @@ function completions(full, index; hint::Bool=false)::Tuple{Vector{String},UnitRa pre = string(pre[1], " ", pre[2:end]) offset_adjust = -1 end - last = split(pre, ' ', keepempty=true)[end] - offset = isempty(last) ? index+1+offset_adjust : last.offset+1+offset_adjust - final = isempty(last) # is the cursor still attached to the final token? + last = split(pre, ' ', keepempty = true)[end] + offset = isempty(last) ? index + 1 + offset_adjust : last.offset + 1 + offset_adjust + final = isempty(last) # is the cursor still attached to the final token? return _completions(pre, final, offset, index; hint) end diff --git a/ext/REPLExt/precompile.jl b/ext/REPLExt/precompile.jl index 2deb9b84f0..fbfdf14baf 100644 --- a/ext/REPLExt/precompile.jl +++ b/ext/REPLExt/precompile.jl @@ -32,7 +32,7 @@ let Base.precompile(Tuple{typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState}) Base.precompile(Tuple{typeof(REPL.REPLCompletions.completion_text), REPL.REPLCompletions.PackageCompletion}) Base.precompile(Tuple{typeof(REPLExt.on_done), REPL.LineEdit.MIState, Base.GenericIOBuffer{Memory{UInt8}}, Bool, REPL.LineEditREPL}) - Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:hint,), Tuple{Bool}}, typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState}) + return Base.precompile(Tuple{typeof(Core.kwcall), NamedTuple{(:hint,), Tuple{Bool}}, typeof(REPL.LineEdit.complete_line), REPLExt.PkgCompletionProvider, REPL.LineEdit.PromptState}) end if Base.generating_output() diff --git a/src/API.jl b/src/API.jl index 5e5723a8b7..f1abc6cb6b 100644 --- a/src/API.jl +++ b/src/API.jl @@ -12,9 +12,9 @@ import FileWatching import Base: StaleCacheKey -import ..depots, ..depots1, ..logdir, ..devdir, ..printpkgstyle +import ..depots, ..depots1, ..logdir, ..devdir, ..printpkgstyle, .._autoprecompilation_enabled_scoped import ..Operations, ..GitTools, ..Pkg, ..Registry -import ..can_fancyprint, ..pathrepr, ..isurl, ..PREV_ENV_PATH +import ..can_fancyprint, ..pathrepr, ..isurl, ..PREV_ENV_PATH, ..atomic_toml_write using ..Types, ..TOML using ..Types: VersionTypes using Base.BinaryPlatforms @@ -27,17 +27,17 @@ include("generate.jl") Base.@kwdef struct PackageInfo name::String - version::Union{Nothing,VersionNumber} - tree_hash::Union{Nothing,String} + version::Union{Nothing, VersionNumber} + tree_hash::Union{Nothing, String} is_direct_dep::Bool is_pinned::Bool is_tracking_path::Bool is_tracking_repo::Bool is_tracking_registry::Bool - git_revision::Union{Nothing,String} - git_source::Union{Nothing,String} + git_revision::Union{Nothing, String} + git_source::Union{Nothing, String} source::String - dependencies::Dict{String,UUID} + dependencies::Dict{String, UUID} end function Base.:(==)(a::PackageInfo, b::PackageInfo) @@ -53,29 +53,36 @@ end function package_info(env::EnvCache, pkg::PackageSpec)::PackageInfo entry = manifest_info(env.manifest, pkg.uuid) if entry === nothing - pkgerror("expected package $(err_rep(pkg)) to exist in the manifest", - " (use `resolve` to populate the manifest)") + pkgerror( + "expected package $(err_rep(pkg)) to exist in the manifest", + " (use `resolve` to populate the manifest)" + ) end - package_info(env, pkg, entry) + return package_info(env, pkg, entry) end function package_info(env::EnvCache, pkg::PackageSpec, entry::PackageEntry)::PackageInfo git_source = pkg.repo.source === nothing ? nothing : isurl(pkg.repo.source::String) ? pkg.repo.source::String : Operations.project_rel_path(env, pkg.repo.source::String) + _source_path = Operations.source_path(env.manifest_file, pkg) + if _source_path === nothing + @debug "Manifest file $(env.manifest_file) contents:\n$(read(env.manifest_file, String))" + pkgerror("could not find source path for package $(err_rep(pkg)) based on $(env.manifest_file)") + end info = PackageInfo( - name = pkg.name, - version = pkg.version != VersionSpec() ? pkg.version : nothing, - tree_hash = pkg.tree_hash === nothing ? nothing : string(pkg.tree_hash), # TODO or should it just be a SHA? - is_direct_dep = pkg.uuid in values(env.project.deps), - is_pinned = pkg.pinned, - is_tracking_path = pkg.path !== nothing, - is_tracking_repo = pkg.repo.rev !== nothing || pkg.repo.source !== nothing, + name = pkg.name, + version = pkg.version != VersionSpec() ? pkg.version : nothing, + tree_hash = pkg.tree_hash === nothing ? nothing : string(pkg.tree_hash), # TODO or should it just be a SHA? + is_direct_dep = pkg.uuid in values(env.project.deps), + is_pinned = pkg.pinned, + is_tracking_path = pkg.path !== nothing, + is_tracking_repo = pkg.repo.rev !== nothing || pkg.repo.source !== nothing, is_tracking_registry = Operations.is_tracking_registry(pkg), - git_revision = pkg.repo.rev, - git_source = git_source, - source = Operations.project_rel_path(env, Operations.source_path(env.manifest_file, pkg)), - dependencies = copy(entry.deps), #TODO is copy needed? + git_revision = pkg.repo.rev, + git_source = git_source, + source = Operations.project_rel_path(env, _source_path), + dependencies = copy(entry.deps), #TODO is copy needed? ) return info end @@ -90,17 +97,17 @@ function dependencies(fn::Function, uuid::UUID) if dep === nothing pkgerror("dependency with UUID `$uuid` does not exist") end - fn(dep) + return fn(dep) end Base.@kwdef struct ProjectInfo - name::Union{Nothing,String} - uuid::Union{Nothing,UUID} - version::Union{Nothing,VersionNumber} + name::Union{Nothing, String} + uuid::Union{Nothing, UUID} + version::Union{Nothing, VersionNumber} ispackage::Bool - dependencies::Dict{String,UUID} - sources::Dict{String,Dict{String,String}} + dependencies::Dict{String, UUID} + sources::Dict{String, Dict{String, String}} path::String end @@ -108,26 +115,28 @@ project() = project(EnvCache()) function project(env::EnvCache)::ProjectInfo pkg = env.pkg return ProjectInfo( - name = pkg === nothing ? nothing : pkg.name, - uuid = pkg === nothing ? nothing : pkg.uuid, - version = pkg === nothing ? nothing : pkg.version::VersionNumber, - ispackage = pkg !== nothing, + name = pkg === nothing ? nothing : pkg.name, + uuid = pkg === nothing ? nothing : pkg.uuid, + version = pkg === nothing ? nothing : pkg.version::VersionNumber, + ispackage = pkg !== nothing, dependencies = env.project.deps, - sources = env.project.sources, - path = env.project_file + sources = env.project.sources, + path = env.project_file ) end -function check_package_name(x::AbstractString, mode::Union{Nothing,String,Symbol}=nothing) +function check_package_name(x::AbstractString, mode::Union{Nothing, String, Symbol} = nothing) if !Base.isidentifier(x) message = sprint() do iostr print(iostr, "`$x` is not a valid package name") if endswith(lowercase(x), ".jl") - print(iostr, ". Perhaps you meant `$(chop(x; tail=3))`") + print(iostr, ". Perhaps you meant `$(chop(x; tail = 3))`") end - if mode !== nothing && any(occursin.(['\\','/'], x)) # maybe a url or a path - print(iostr, "\nThe argument appears to be a URL or path, perhaps you meant ", - "`Pkg.$mode(url=\"...\")` or `Pkg.$mode(path=\"...\")`.") + if mode !== nothing && any(occursin.(['\\', '/'], x)) # maybe a url or a path + print( + iostr, "\nThe argument appears to be a URL or path, perhaps you meant ", + "`Pkg.$mode(url=\"...\")` or `Pkg.$mode(path=\"...\")`." + ) end end pkgerror(message) @@ -137,15 +146,15 @@ end check_package_name(::Nothing, ::Any) = nothing function require_not_empty(pkgs, f::Symbol) - isempty(pkgs) && pkgerror("$f requires at least one package") + return isempty(pkgs) && pkgerror("$f requires at least one package") end # Provide some convenience calls for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, :precompile) @eval begin $f(pkg::Union{AbstractString, PackageSpec}; kwargs...) = $f([pkg]; kwargs...) - $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...) - function $f(pkgs::Vector{PackageSpec}; io::IO=$(f === :status ? :stdout_f : :stderr_f)(), kwargs...) + $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...) + function $f(pkgs::Vector{PackageSpec}; io::IO = $(f === :status ? :stdout_f : :stderr_f)(), kwargs...) $(f != :precompile) && Registry.download_default_registries(io) ctx = Context() # Save initial environment for undo/redo functionality @@ -153,7 +162,7 @@ for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, : add_snapshot_to_undo(ctx.env) saved_initial_snapshot[] = true end - kwargs = merge((;kwargs...), (:io => io,)) + kwargs = merge((; kwargs...), (:io => io,)) pkgs = deepcopy(pkgs) # don't mutate input foreach(handle_package_input!, pkgs) ret = $f(ctx, pkgs; kwargs...) @@ -162,53 +171,85 @@ for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, : return ret end $f(ctx::Context; kwargs...) = $f(ctx, PackageSpec[]; kwargs...) - function $f(; name::Union{Nothing,AbstractString}=nothing, uuid::Union{Nothing,String,UUID}=nothing, - version::Union{VersionNumber, String, VersionSpec, Nothing}=nothing, - url=nothing, rev=nothing, path=nothing, mode=PKGMODE_PROJECT, subdir=nothing, kwargs...) + function $f(; + name::Union{Nothing, AbstractString} = nothing, uuid::Union{Nothing, String, UUID} = nothing, + version::Union{VersionNumber, String, VersionSpec, Nothing} = nothing, + url = nothing, rev = nothing, path = nothing, mode = PKGMODE_PROJECT, subdir = nothing, kwargs... + ) pkg = PackageSpec(; name, uuid, version, url, rev, path, subdir) if $f === status || $f === rm || $f === up - kwargs = merge((;kwargs...), (:mode => mode,)) + kwargs = merge((; kwargs...), (:mode => mode,)) end # Handle $f() case - if all(isnothing, [name,uuid,version,url,rev,path,subdir]) + return if all(isnothing, [name, uuid, version, url, rev, path, subdir]) $f(PackageSpec[]; kwargs...) else $f(pkg; kwargs...) end end function $f(pkgs::Vector{<:NamedTuple}; kwargs...) - $f([PackageSpec(;pkg...) for pkg in pkgs]; kwargs...) + return $f([PackageSpec(; pkg...) for pkg in pkgs]; kwargs...) end end end -function update_source_if_set(project, pkg) +function update_source_if_set(env, pkg) + project = env.project source = get(project.sources, pkg.name, nothing) - source === nothing && return - # This should probably not modify the dicts directly... - if pkg.repo.source !== nothing - source["url"] = pkg.repo.source - end - if pkg.repo.rev !== nothing - source["rev"] = pkg.repo.rev - end - if pkg.path !== nothing - source["path"] = pkg.path - end - path, repo = get_path_repo(project, pkg.name) - if path !== nothing - pkg.path = path - end - if repo.source !== nothing - pkg.repo.source = repo.source + if source !== nothing + if pkg.repo == GitRepo() + delete!(project.sources, pkg.name) + else + # This should probably not modify the dicts directly... + if pkg.repo.source !== nothing + source["url"] = pkg.repo.source + delete!(source, "path") + end + if pkg.repo.rev !== nothing + source["rev"] = pkg.repo.rev + delete!(source, "path") + end + if pkg.repo.subdir !== nothing + source["subdir"] = pkg.repo.subdir + end + if pkg.path !== nothing + source["path"] = pkg.path + delete!(source, "url") + delete!(source, "rev") + end + end + if pkg.subdir !== nothing + source["subdir"] = pkg.subdir + end + path, repo = get_path_repo(project, pkg.name) + if path !== nothing + pkg.path = path + end + if repo.source !== nothing + pkg.repo.source = repo.source + end + if repo.rev !== nothing + pkg.repo.rev = repo.rev + end + if repo.subdir !== nothing + pkg.repo.subdir = repo.subdir + end end - if repo.rev !== nothing - pkg.repo.rev = repo.rev + + # Packages in manifest should have their paths set to the path in the manifest + for (path, wproj) in env.workspace + if wproj.uuid == pkg.uuid + pkg.path = Types.relative_project_path(env.manifest_file, dirname(path)) + break + end end + return end -function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true, - preserve::PreserveLevel=Operations.default_preserve(), platform::AbstractPlatform=HostPlatform(), kwargs...) +function develop( + ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool = true, + preserve::PreserveLevel = Operations.default_preserve(), platform::AbstractPlatform = HostPlatform(), kwargs... + ) require_not_empty(pkgs, :develop) Context!(ctx; kwargs...) @@ -224,8 +265,10 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true, pkgerror("rev argument not supported by `develop`; consider using `add` instead") end if pkg.version != VersionSpec() - pkgerror("version specification invalid when calling `develop`:", - " `$(pkg.version)` specified for package $(err_rep(pkg))") + pkgerror( + "version specification invalid when calling `develop`:", + " `$(pkg.version)` specified for package $(err_rep(pkg))" + ) end # not strictly necessary to check these fields early, but it is more efficient if pkg.name !== nothing && (length(findall(x -> x.name == pkg.name, pkgs)) > 1) @@ -238,6 +281,7 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true, new_git = handle_repos_develop!(ctx, pkgs, shared) + Operations.update_registries(ctx; force = false, update_cooldown = Day(1)) for pkg in pkgs if Types.collides_with_project(ctx.env, pkg) @@ -246,15 +290,17 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true, if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1 pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))") end - update_source_if_set(ctx.env.project, pkg) + update_source_if_set(ctx.env, pkg) end - Operations.develop(ctx, pkgs, new_git; preserve=preserve, platform=platform) + Operations.develop(ctx, pkgs, new_git; preserve = preserve, platform = platform) return end -function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Operations.default_preserve(), - platform::AbstractPlatform=HostPlatform(), target::Symbol=:deps, allow_autoprecomp::Bool=true, kwargs...) +function add( + ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel = Operations.default_preserve(), + platform::AbstractPlatform = HostPlatform(), target::Symbol = :deps, allow_autoprecomp::Bool = true, kwargs... + ) require_not_empty(pkgs, :add) Context!(ctx; kwargs...) @@ -268,8 +314,10 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op end if pkg.repo.source !== nothing || pkg.repo.rev !== nothing if pkg.version != VersionSpec() - pkgerror("version specification invalid when tracking a repository:", - " `$(pkg.version)` specified for package $(err_rep(pkg))") + pkgerror( + "version specification invalid when tracking a repository:", + " `$(pkg.version)` specified for package $(err_rep(pkg))" + ) end end # not strictly necessary to check these fields early, but it is more efficient @@ -286,12 +334,12 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op # repo + unpinned -> name, uuid, repo.rev, repo.source, tree_hash # repo + pinned -> name, uuid, tree_hash - Operations.update_registries(ctx; force=false, update_cooldown=Day(1)) + Operations.update_registries(ctx; force = false, update_cooldown = Day(1)) project_deps_resolve!(ctx.env, pkgs) registry_resolve!(ctx.registries, pkgs) stdlib_resolve!(pkgs) - ensure_resolved(ctx, ctx.env.manifest, pkgs, registry=true) + ensure_resolved(ctx, ctx.env.manifest, pkgs, registry = true) for pkg in pkgs if Types.collides_with_project(ctx.env, pkg) @@ -300,14 +348,14 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Op if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1 pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))") end - update_source_if_set(ctx.env.project, pkg) + update_source_if_set(ctx.env, pkg) end Operations.add(ctx, pkgs, new_git; allow_autoprecomp, preserve, platform, target) return end -function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_pkgs::Bool=false, kwargs...) +function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode = PKGMODE_PROJECT, all_pkgs::Bool = false, kwargs...) Context!(ctx; kwargs...) if all_pkgs !isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages") @@ -320,9 +368,11 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_p if pkg.name === nothing && pkg.uuid === nothing pkgerror("name or UUID specification required when calling `rm`") end - if !(pkg.version == VersionSpec() && pkg.pinned == false && - pkg.tree_hash === nothing && pkg.repo.source === nothing && - pkg.repo.rev === nothing && pkg.path === nothing) + if !( + pkg.version == VersionSpec() && pkg.pinned == false && + pkg.tree_hash === nothing && pkg.repo.source === nothing && + pkg.repo.rev === nothing && pkg.path === nothing + ) pkgerror("packages may only be specified by name or UUID when calling `rm`") end end @@ -341,24 +391,26 @@ function append_all_pkgs!(pkgs, ctx, mode) if mode == PKGMODE_PROJECT || mode == PKGMODE_COMBINED for (name::String, uuid::UUID) in ctx.env.project.deps path, repo = get_path_repo(ctx.env.project, name) - push!(pkgs, PackageSpec(name=name, uuid=uuid, path=path, repo=repo)) + push!(pkgs, PackageSpec(name = name, uuid = uuid, path = path, repo = repo)) end end if mode == PKGMODE_MANIFEST || mode == PKGMODE_COMBINED for (uuid, entry) in ctx.env.manifest path, repo = get_path_repo(ctx.env.project, entry.name) - push!(pkgs, PackageSpec(name=entry.name, uuid=uuid, path=path, repo=repo)) + push!(pkgs, PackageSpec(name = entry.name, uuid = uuid, path = path, repo = repo)) end end return end -function up(ctx::Context, pkgs::Vector{PackageSpec}; - level::UpgradeLevel=UPLEVEL_MAJOR, mode::PackageMode=PKGMODE_PROJECT, - preserve::Union{Nothing,PreserveLevel}= isempty(pkgs) ? nothing : PRESERVE_ALL, - update_registry::Bool=true, - skip_writing_project::Bool=false, - kwargs...) +function up( + ctx::Context, pkgs::Vector{PackageSpec}; + level::UpgradeLevel = UPLEVEL_MAJOR, mode::PackageMode = PKGMODE_PROJECT, + preserve::Union{Nothing, PreserveLevel} = isempty(pkgs) ? nothing : PRESERVE_ALL, + update_registry::Bool = true, + skip_writing_project::Bool = false, + kwargs... + ) Context!(ctx; kwargs...) if Operations.is_fully_pinned(ctx) printpkgstyle(ctx.io, :Update, "All dependencies are pinned - nothing to update.", color = Base.info_color()) @@ -366,7 +418,7 @@ function up(ctx::Context, pkgs::Vector{PackageSpec}; end if update_registry Registry.download_default_registries(ctx.io) - Operations.update_registries(ctx; force=true) + Operations.update_registries(ctx; force = true) end Operations.prune_manifest(ctx.env) if isempty(pkgs) @@ -378,18 +430,20 @@ function up(ctx::Context, pkgs::Vector{PackageSpec}; manifest_resolve!(ctx.env.manifest, pkgs) ensure_resolved(ctx, ctx.env.manifest, pkgs) end - + for pkg in pkgs + update_source_if_set(ctx.env, pkg) + end Operations.up(ctx, pkgs, level; skip_writing_project, preserve) return end -resolve(; io::IO=stderr_f(), kwargs...) = resolve(Context(;io); kwargs...) -function resolve(ctx::Context; skip_writing_project::Bool=false, kwargs...) - up(ctx; level=UPLEVEL_FIXED, mode=PKGMODE_MANIFEST, update_registry=false, skip_writing_project, kwargs...) +resolve(; io::IO = stderr_f(), kwargs...) = resolve(Context(; io); kwargs...) +function resolve(ctx::Context; skip_writing_project::Bool = false, kwargs...) + up(ctx; level = UPLEVEL_FIXED, mode = PKGMODE_MANIFEST, update_registry = false, skip_writing_project, kwargs...) return nothing end -function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...) +function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool = false, kwargs...) Context!(ctx; kwargs...) if all_pkgs !isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages") @@ -403,12 +457,16 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar pkgerror("name or UUID specification required when calling `pin`") end if pkg.repo.source !== nothing - pkgerror("repository specification invalid when calling `pin`:", - " `$(pkg.repo.source)` specified for package $(err_rep(pkg))") + pkgerror( + "repository specification invalid when calling `pin`:", + " `$(pkg.repo.source)` specified for package $(err_rep(pkg))" + ) end if pkg.repo.rev !== nothing - pkgerror("git revision specification invalid when calling `pin`:", - " `$(pkg.repo.rev)` specified for package $(err_rep(pkg))") + pkgerror( + "git revision specification invalid when calling `pin`:", + " `$(pkg.repo.rev)` specified for package $(err_rep(pkg))" + ) end version = pkg.version if version isa VersionSpec @@ -416,6 +474,7 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar pkgerror("pinning a package requires a single version, not a versionrange") end end + update_source_if_set(ctx.env, pkg) end project_deps_resolve!(ctx.env, pkgs) @@ -424,7 +483,7 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwar return end -function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...) +function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool = false, kwargs...) Context!(ctx; kwargs...) if all_pkgs !isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages") @@ -437,9 +496,11 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwa if pkg.name === nothing && pkg.uuid === nothing pkgerror("name or UUID specification required when calling `free`") end - if !(pkg.version == VersionSpec() && pkg.pinned == false && - pkg.tree_hash === nothing && pkg.repo.source === nothing && - pkg.repo.rev === nothing && pkg.path === nothing) + if !( + pkg.version == VersionSpec() && pkg.pinned == false && + pkg.tree_hash === nothing && pkg.repo.source === nothing && + pkg.repo.rev === nothing && pkg.path === nothing + ) pkgerror("packages may only be specified by name or UUID when calling `free`") end end @@ -451,14 +512,16 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwa return end -function test(ctx::Context, pkgs::Vector{PackageSpec}; - coverage=false, test_fn=nothing, - julia_args::Union{Cmd, AbstractVector{<:AbstractString}}=``, - test_args::Union{Cmd, AbstractVector{<:AbstractString}}=``, - force_latest_compatible_version::Bool=false, - allow_earlier_backwards_compatible_versions::Bool=true, - allow_reresolve::Bool=true, - kwargs...) +function test( + ctx::Context, pkgs::Vector{PackageSpec}; + coverage = false, test_fn = nothing, + julia_args::Union{Cmd, AbstractVector{<:AbstractString}} = ``, + test_args::Union{Cmd, AbstractVector{<:AbstractString}} = ``, + force_latest_compatible_version::Bool = false, + allow_earlier_backwards_compatible_versions::Bool = true, + allow_reresolve::Bool = true, + kwargs... + ) julia_args = Cmd(julia_args) test_args = Cmd(test_args) Context!(ctx; kwargs...) @@ -496,8 +559,8 @@ function is_manifest_current(path::AbstractString) return Operations.is_manifest_current(env) end -const UsageDict = Dict{String,DateTime} -const UsageByDepotDict = Dict{String,UsageDict} +const UsageDict = Dict{String, DateTime} +const UsageByDepotDict = Dict{String, UsageDict} """ gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, kwargs...) @@ -515,7 +578,7 @@ admin privileges depending on the setup). Use verbose mode (`verbose=true`) for detailed output. """ -function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, force=false, kwargs...) +function gc(ctx::Context = Context(); collect_delay::Period = Day(7), verbose = false, force = false, kwargs...) Context!(ctx; kwargs...) env = ctx.env @@ -549,6 +612,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, for (filename, infos) in parse_toml(usage_filepath) f.(Ref(filename), infos) end + return end # Extract usage data from this depot, (taking only the latest state for each @@ -556,7 +620,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, # into the overall list across depots to create a single, coherent view across # all depots. usage = UsageDict() - let usage=usage + let usage = usage reduce_usage!(joinpath(logdir(depot), "manifest_usage.toml")) do filename, info # For Manifest usage, store only the last DateTime for each filename found usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime) @@ -565,7 +629,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, manifest_usage_by_depot[depot] = usage usage = UsageDict() - let usage=usage + let usage = usage reduce_usage!(joinpath(logdir(depot), "artifact_usage.toml")) do filename, info # For Artifact usage, store only the last DateTime for each filename found usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime) @@ -576,7 +640,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, # track last-used usage = UsageDict() parents = Dict{String, Set{String}}() - let usage=usage + let usage = usage reduce_usage!(joinpath(logdir(depot), "scratch_usage.toml")) do filename, info # For Artifact usage, store only the last DateTime for each filename found usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime) @@ -617,21 +681,20 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, # Write out the TOML file for this depot usage_path = joinpath(logdir(depot), fname) if !(isempty(usage)::Bool) || isfile(usage_path) - let usage=usage - open(usage_path, "w") do io - TOML.print(io, usage, sorted=true) - end + let usage = usage + atomic_toml_write(usage_path, usage, sorted = true) end end end + return end # Write condensed Manifest usage - let all_manifest_tomls=all_manifest_tomls + let all_manifest_tomls = all_manifest_tomls write_condensed_toml(manifest_usage_by_depot, "manifest_usage.toml") do depot, usage # Keep only manifest usage markers that are still existent - let usage=usage - filter!(((k,v),) -> k in all_manifest_tomls, usage) + let usage = usage + filter!(((k, v),) -> k in all_manifest_tomls, usage) # Expand it back into a dict-of-dicts return Dict(k => [Dict("time" => v)] for (k, v) in usage) @@ -640,23 +703,23 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end # Write condensed Artifact usage - let all_artifact_tomls=all_artifact_tomls + let all_artifact_tomls = all_artifact_tomls write_condensed_toml(artifact_usage_by_depot, "artifact_usage.toml") do depot, usage let usage = usage - filter!(((k,v),) -> k in all_artifact_tomls, usage) + filter!(((k, v),) -> k in all_artifact_tomls, usage) return Dict(k => [Dict("time" => v)] for (k, v) in usage) end end end # Write condensed scratch space usage - let all_scratch_parents=all_scratch_parents, all_scratch_dirs=all_scratch_dirs + let all_scratch_parents = all_scratch_parents, all_scratch_dirs = all_scratch_dirs write_condensed_toml(scratch_usage_by_depot, "scratch_usage.toml") do depot, usage # Keep only scratch directories that still exist - filter!(((k,v),) -> k in all_scratch_dirs, usage) + filter!(((k, v),) -> k in all_scratch_dirs, usage) # Expand it back into a dict-of-dicts - expanded_usage = Dict{String,Vector{Dict}}() + expanded_usage = Dict{String, Vector{Dict}}() for (k, v) in usage # Drop scratch spaces whose parents are all non-existent parents = scratch_parents_by_depot[depot][k] @@ -665,10 +728,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, continue end - expanded_usage[k] = [Dict( - "time" => v, - "parent_projects" => collect(parents), - )] + expanded_usage[k] = [ + Dict( + "time" => v, + "parent_projects" => collect(parents), + ), + ] end return expanded_usage end @@ -756,7 +821,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end # Mark packages/artifacts as active or not by calling the appropriate user function - function mark(process_func::Function, index_files, ctx::Context; do_print=true, verbose=false, file_str=nothing) + function mark(process_func::Function, index_files, ctx::Context; do_print = true, verbose = false, file_str = nothing) marked_paths = String[] active_index_files = Set{String}() for index_file in index_files @@ -807,13 +872,16 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, push!(deletion_list, path) end end + return end # Scan manifests, parse them, read in all UUIDs listed and mark those as active # printpkgstyle(ctx.io, :Active, "manifests:") - packages_to_keep = mark(process_manifest_pkgs, all_manifest_tomls, ctx, - verbose=verbose, file_str="manifest files") + packages_to_keep = mark( + process_manifest_pkgs, all_manifest_tomls, ctx, + verbose = verbose, file_str = "manifest files" + ) # Do an initial scan of our depots to get a preliminary `packages_to_delete`. packages_to_delete = String[] @@ -842,15 +910,19 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, # `packages_to_delete`, as `process_artifacts_toml()` uses it internally to discount # `Artifacts.toml` files that will be deleted by the future culling operation. # printpkgstyle(ctx.io, :Active, "artifacts:") - artifacts_to_keep = let packages_to_delete=packages_to_delete - mark(x -> process_artifacts_toml(x, packages_to_delete), - all_artifact_tomls, ctx; verbose=verbose, file_str="artifact files") + artifacts_to_keep = let packages_to_delete = packages_to_delete + mark( + x -> process_artifacts_toml(x, packages_to_delete), + all_artifact_tomls, ctx; verbose = verbose, file_str = "artifact files" + ) end - repos_to_keep = mark(process_manifest_repos, all_manifest_tomls, ctx; do_print=false) + repos_to_keep = mark(process_manifest_repos, all_manifest_tomls, ctx; do_print = false) # printpkgstyle(ctx.io, :Active, "scratchspaces:") - spaces_to_keep = let packages_to_delete=packages_to_delete - mark(x -> process_scratchspace(x, packages_to_delete), - all_scratch_dirs, ctx; verbose=verbose, file_str="scratchspaces") + spaces_to_keep = let packages_to_delete = packages_to_delete + mark( + x -> process_scratchspace(x, packages_to_delete), + all_scratch_dirs, ctx; verbose = verbose, file_str = "scratchspaces" + ) end # Collect all orphaned paths (packages, artifacts and repos that are not reachable). These @@ -922,8 +994,8 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end elseif uuid == Operations.PkgUUID && isfile(space_dir_or_file) # special cleanup for the precompile cache files that Pkg saves - if any(prefix->startswith(basename(space_dir_or_file), prefix), ("suspend_cache_", "pending_cache_")) - if mtime(space_dir_or_file) < (time() - (24*60*60)) + if any(prefix -> startswith(basename(space_dir_or_file), prefix), ("suspend_cache_", "pending_cache_")) + if mtime(space_dir_or_file) < (time() - (24 * 60 * 60)) push!(depot_orphaned_scratchspaces, space_dir_or_file) end end @@ -950,9 +1022,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, # Write out the `new_orphanage` for this depot mkpath(dirname(orphanage_file)) - open(orphanage_file, "w") do io - TOML.print(io, new_orphanage, sorted=true) - end + atomic_toml_write(orphanage_file, new_orphanage, sorted = true) end function recursive_dir_size(path) @@ -964,12 +1034,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, try size += lstat(path).size catch ex - @error("Failed to calculate size of $path", exception=ex) + @error("Failed to calculate size of $path", exception = ex) end end end catch ex - @error("Failed to calculate size of $path", exception=ex) + @error("Failed to calculate size of $path", exception = ex) end return size end @@ -980,7 +1050,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, try lstat(path).size catch ex - @error("Failed to calculate size of $path", exception=ex) + @error("Failed to calculate size of $path", exception = ex) 0 end else @@ -988,14 +1058,16 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, end try Base.Filesystem.prepare_for_deletion(path) - Base.rm(path; recursive=true, force=true) + Base.rm(path; recursive = true, force = true) catch e - @warn("Failed to delete $path", exception=e) + @warn("Failed to delete $path", exception = e) return 0 end if verbose - printpkgstyle(ctx.io, :Deleted, pathrepr(path) * " (" * - Base.format_bytes(path_size) * ")") + printpkgstyle( + ctx.io, :Deleted, pathrepr(path) * " (" * + Base.format_bytes(path_size) * ")" + ) end return path_size end @@ -1049,12 +1121,12 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, # Do this silently because it's out of scope for Pkg.gc() but it's helpful to use this opportunity to do it if isdefined(Base.Filesystem, :delayed_delete_dir) if isdir(Base.Filesystem.delayed_delete_dir()) - for p in readdir(Base.Filesystem.delayed_delete_dir(), join=true) + for p in readdir(Base.Filesystem.delayed_delete_dir(), join = true) try Base.Filesystem.prepare_for_deletion(p) - Base.rm(p; recursive=true, force=true, allow_delayed_delete=false) + Base.rm(p; recursive = true, force = true, allow_delayed_delete = false) catch e - @debug "Failed to delete $p" exception=e + @debug "Failed to delete $p" exception = e end end end @@ -1072,7 +1144,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, s = ndel == 1 ? "" : "s" bytes_saved_string = Base.format_bytes(freed) - printpkgstyle(ctx.io, :Deleted, "$(ndel) $(name)$(s) ($bytes_saved_string)") + return printpkgstyle(ctx.io, :Deleted, "$(ndel) $(name)$(s) ($bytes_saved_string)") end print_deleted(ndel_pkg, package_space_freed, "package installation") print_deleted(ndel_repo, repo_space_freed, "repo") @@ -1086,7 +1158,7 @@ function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, return end -function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose=false, kwargs...) +function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose = false, allow_reresolve::Bool = true, kwargs...) Context!(ctx; kwargs...) if isempty(pkgs) @@ -1101,7 +1173,7 @@ function build(ctx::Context, pkgs::Vector{PackageSpec}; verbose=false, kwargs... project_resolve!(ctx.env, pkgs) manifest_resolve!(ctx.env.manifest, pkgs) ensure_resolved(ctx, ctx.env.manifest, pkgs) - Operations.build(ctx, Set{UUID}(pkg.uuid for pkg in pkgs), verbose) + return Operations.build(ctx, Set{UUID}(pkg.uuid for pkg in pkgs), verbose; allow_reresolve) end function get_or_make_pkgspec(pkgspecs::Vector{PackageSpec}, ctx::Context, uuid) @@ -1123,13 +1195,15 @@ function get_or_make_pkgspec(pkgspecs::Vector{PackageSpec}, ctx::Context, uuid) end end -function precompile(ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool=false, - strict::Bool=false, warn_loaded = true, already_instantiated = false, timing::Bool = false, - _from_loading::Bool=false, configs::Union{Base.Precompilation.Config,Vector{Base.Precompilation.Config}}=(``=>Base.CacheFlags()), - workspace::Bool=false, kwargs...) +function precompile( + ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool = false, + strict::Bool = false, warn_loaded = true, already_instantiated = false, timing::Bool = false, + _from_loading::Bool = false, configs::Union{Base.Precompilation.Config, Vector{Base.Precompilation.Config}} = (`` => Base.CacheFlags()), + workspace::Bool = false, kwargs... + ) Context!(ctx; kwargs...) if !already_instantiated - instantiate(ctx; allow_autoprecomp=false, kwargs...) + instantiate(ctx; allow_autoprecomp = false, kwargs...) @debug "precompile: instantiated" end @@ -1140,16 +1214,25 @@ function precompile(ctx::Context, pkgs::Vector{PackageSpec}; internal_call::Bool end io = ctx.io - if io isa IOContext{IO} + if io isa IOContext{IO} && !isa(io.io, Base.PipeEndpoint) # precompile does quite a bit of output and using the IOContext{IO} can cause # some slowdowns, the important part here is to not specialize the whole - # precompile function on the io + # precompile function on the io. + # But don't unwrap the IOContext if it is a PipeEndpoint, as that would + # cause the output to lose color. io = io.io end - activate(dirname(ctx.env.project_file)) do + return activate(dirname(ctx.env.project_file)) do pkgs_name = String[pkg.name for pkg in pkgs] - return Base.Precompilation.precompilepkgs(pkgs_name; internal_call, strict, warn_loaded, timing, _from_loading, configs, manifest=workspace, io) + return Base.Precompilation.precompilepkgs(pkgs_name; internal_call, strict, warn_loaded, timing, _from_loading, configs, manifest = workspace, io) + end +end + +function precompile(f, args...; kwargs...) + return Base.ScopedValues.@with _autoprecompilation_enabled_scoped => false begin + f() + Pkg.precompile(args...; kwargs...) end end @@ -1163,10 +1246,12 @@ function tree_hash(repo::LibGit2.GitRepo, tree_hash::String) end instantiate(; kwargs...) = instantiate(Context(); kwargs...) -function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, - update_registry::Bool=true, verbose::Bool=false, - platform::AbstractPlatform=HostPlatform(), allow_build::Bool=true, allow_autoprecomp::Bool=true, - workspace::Bool=false, julia_version_strict::Bool=false, kwargs...) +function instantiate( + ctx::Context; manifest::Union{Bool, Nothing} = nothing, + update_registry::Bool = true, verbose::Bool = false, + platform::AbstractPlatform = HostPlatform(), allow_build::Bool = true, allow_autoprecomp::Bool = true, + workspace::Bool = false, julia_version_strict::Bool = false, kwargs... + ) Context!(ctx; kwargs...) if Registry.download_default_registries(ctx.io) copy!(ctx.registries, Registry.reachable_registries()) @@ -1174,7 +1259,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, if !isfile(ctx.env.project_file) && isfile(ctx.env.manifest_file) _manifest = Pkg.Types.read_manifest(ctx.env.manifest_file) Types.check_manifest_julia_version_compat(_manifest, ctx.env.manifest_file; julia_version_strict) - deps = Dict{String,String}() + deps = Dict{String, String}() for (uuid, pkg) in _manifest if pkg.name in keys(deps) # TODO, query what package to put in Project when in interactive mode? @@ -1183,7 +1268,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, deps[pkg.name] = string(uuid) end Types.write_project(Dict("deps" => deps), ctx.env.project_file) - return instantiate(Context(); manifest=manifest, update_registry=update_registry, allow_autoprecomp=allow_autoprecomp, verbose=verbose, platform=platform, kwargs...) + return instantiate(Context(); manifest = manifest, update_registry = update_registry, allow_autoprecomp = allow_autoprecomp, verbose = verbose, platform = platform, kwargs...) end if (!isfile(ctx.env.manifest_file) && manifest === nothing) || manifest == false # given no manifest exists, only allow invoking a registry update if there are project deps @@ -1198,17 +1283,24 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, Types.check_manifest_julia_version_compat(ctx.env.manifest, ctx.env.manifest_file; julia_version_strict) if Operations.is_manifest_current(ctx.env) === false + resolve_cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()" + update_cmd = Pkg.in_repl_mode() ? "pkg> update" : "Pkg.update()" @warn """The project dependencies or compat requirements have changed since the manifest was last resolved. - It is recommended to `Pkg.resolve()` or consider `Pkg.update()` if necessary.""" + It is recommended to `$resolve_cmd` or consider `$update_cmd` if necessary.""" end Operations.prune_manifest(ctx.env) for (name, uuid) in ctx.env.project.deps get(ctx.env.manifest, uuid, nothing) === nothing || continue - pkgerror("`$name` is a direct dependency, but does not appear in the manifest.", - " If you intend `$name` to be a direct dependency, run `Pkg.resolve()` to populate the manifest.", - " Otherwise, remove `$name` with `Pkg.rm(\"$name\")`.", - " Finally, run `Pkg.instantiate()` again.") + resolve_cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()" + rm_cmd = Pkg.in_repl_mode() ? "pkg> rm $name" : "Pkg.rm(\"$name\")" + instantiate_cmd = Pkg.in_repl_mode() ? "pkg> instantiate" : "Pkg.instantiate()" + pkgerror( + "`$name` is a direct dependency, but does not appear in the manifest.", + " If you intend `$name` to be a direct dependency, run `$resolve_cmd` to populate the manifest.", + " Otherwise, remove `$name` with `$rm_cmd`.", + " Finally, run `$instantiate_cmd` again." + ) end # check if all source code and artifacts are downloaded to exit early if Operations.is_instantiated(ctx.env, workspace; platform) @@ -1228,7 +1320,7 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, if !(e isa PkgError) || update_registry == false rethrow(e) end - Operations.update_registries(ctx; force=false) + Operations.update_registries(ctx; force = false) Operations.check_registered(ctx.registries, pkgs) end new_git = UUID[] @@ -1247,12 +1339,12 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, pkgerror("Did not find path `$(repo_source)` for $(err_rep(pkg))") end repo_path = Types.add_repo_cache_path(repo_source) - let repo_source=repo_source - LibGit2.with(GitTools.ensure_clone(ctx.io, repo_path, repo_source; isbare=true)) do repo + let repo_source = repo_source + LibGit2.with(GitTools.ensure_clone(ctx.io, repo_path, repo_source; isbare = true)) do repo # We only update the clone if the tree hash can't be found tree_hash_object = tree_hash(repo, string(pkg.tree_hash)) if tree_hash_object === nothing - GitTools.fetch(ctx.io, repo, repo_source; refspecs=Types.refspecs) + GitTools.fetch(ctx.io, repo, repo_source; refspecs = Types.refspecs) tree_hash_object = tree_hash(repo, string(pkg.tree_hash)) end if tree_hash_object === nothing @@ -1270,35 +1362,35 @@ function instantiate(ctx::Context; manifest::Union{Bool, Nothing}=nothing, # Install all artifacts Operations.download_artifacts(ctx; platform, verbose) # Run build scripts - allow_build && Operations.build_versions(ctx, union(new_apply, new_git); verbose=verbose) + allow_build && Operations.build_versions(ctx, union(new_apply, new_git); verbose = verbose) - allow_autoprecomp && Pkg._auto_precompile(ctx, already_instantiated = true) + return allow_autoprecomp && Pkg._auto_precompile(ctx, already_instantiated = true) end -@deprecate status(mode::PackageMode) status(mode=mode) +@deprecate status(mode::PackageMode) status(mode = mode) -function status(ctx::Context, pkgs::Vector{PackageSpec}; diff::Bool=false, mode=PKGMODE_PROJECT, workspace::Bool=false, outdated::Bool=false, compat::Bool=false, extensions::Bool=false, io::IO=stdout_f()) +function status(ctx::Context, pkgs::Vector{PackageSpec}; diff::Bool = false, mode = PKGMODE_PROJECT, workspace::Bool = false, outdated::Bool = false, compat::Bool = false, extensions::Bool = false, io::IO = stdout_f()) if compat diff && pkgerror("Compat status has no `diff` mode") outdated && pkgerror("Compat status has no `outdated` mode") extensions && pkgerror("Compat status has no `extensions` mode") Operations.print_compat(ctx, pkgs; io) else - Operations.status(ctx.env, ctx.registries, pkgs; mode, git_diff=diff, io, outdated, extensions, workspace) + Operations.status(ctx.env, ctx.registries, pkgs; mode, git_diff = diff, io, outdated, extensions, workspace) end return nothing end -function activate(;temp=false, shared=false, prev=false, io::IO=stderr_f()) +function activate(; temp = false, shared = false, prev = false, io::IO = stderr_f()) shared && pkgerror("Must give a name for a shared environment") - temp && return activate(mktempdir(); io=io) + temp && return activate(mktempdir(); io = io) if prev if isempty(PREV_ENV_PATH[]) pkgerror("No previously active environment found") else - return activate(PREV_ENV_PATH[]; io=io) + return activate(PREV_ENV_PATH[]; io = io) end end if !isnothing(Base.active_project()) @@ -1320,14 +1412,14 @@ function _activate_dep(dep_name::AbstractString) return end uuid = get(ctx.env.project.deps, dep_name, nothing) - if uuid !== nothing + return if uuid !== nothing entry = manifest_info(ctx.env.manifest, uuid) if entry.path !== nothing return joinpath(dirname(ctx.env.manifest_file), entry.path::String) end end end -function activate(path::AbstractString; shared::Bool=false, temp::Bool=false, io::IO=stderr_f()) +function activate(path::AbstractString; shared::Bool = false, temp::Bool = false, io::IO = stderr_f()) temp && pkgerror("Can not give `path` argument when creating a temporary environment") if !shared # `pkg> activate path`/`Pkg.activate(path)` does the following @@ -1374,23 +1466,39 @@ end function activate(f::Function, new_project::AbstractString) old = Base.ACTIVE_PROJECT[] Base.ACTIVE_PROJECT[] = new_project - try + return try f() finally Base.ACTIVE_PROJECT[] = old end end -function compat(ctx::Context, pkg::String, compat_str::Union{Nothing,String}; io = nothing, kwargs...) +function _compat(ctx::Context, pkg::String, compat_str::Union{Nothing, String}; current::Bool = false, io = nothing, kwargs...) + if current + if compat_str !== nothing + pkgerror("`current` is true, but `compat_str` is not nothing. This is not allowed.") + end + return set_current_compat(ctx, pkg; io = io) + end io = something(io, ctx.io) pkg = pkg == "Julia" ? "julia" : pkg isnothing(compat_str) || (compat_str = string(strip(compat_str, '"'))) + existing_compat = Operations.get_compat_str(ctx.env.project, pkg) + # Double check before deleting a compat entry issue/3567 + if isinteractive() && (isnothing(compat_str) || isempty(compat_str)) + if !isnothing(existing_compat) + ans = Base.prompt(stdin, ctx.io, "No compat string was given. Delete existing compat entry `$pkg = $(repr(existing_compat))`? [y]/n", default = "y") + if lowercase(ans) !== "y" + return + end + end + end if haskey(ctx.env.project.deps, pkg) || pkg == "julia" success = Operations.set_compat(ctx.env.project, pkg, isnothing(compat_str) ? nothing : isempty(compat_str) ? nothing : compat_str) success === false && pkgerror("invalid compat version specifier \"$(compat_str)\"") write_env(ctx.env) if isnothing(compat_str) || isempty(compat_str) - printpkgstyle(io, :Compat, "entry removed for $(pkg)") + printpkgstyle(io, :Compat, "entry removed:\n $pkg = $(repr(existing_compat))") else printpkgstyle(io, :Compat, "entry set:\n $(pkg) = $(repr(compat_str))") end @@ -1410,15 +1518,81 @@ function compat(ctx::Context, pkg::String, compat_str::Union{Nothing,String}; io pkgerror("No package named $pkg in current Project") end end -compat(pkg::String; kwargs...) = compat(pkg, nothing; kwargs...) -compat(pkg::String, compat_str::Union{Nothing,String}; kwargs...) = compat(Context(), pkg, compat_str; kwargs...) -compat(;kwargs...) = compat(Context(); kwargs...) +function compat(ctx::Context = Context(); current::Bool = false, kwargs...) + if current + return set_current_compat(ctx; kwargs...) + end + return _compat(ctx; kwargs...) +end +compat(pkg::String, compat_str::Union{Nothing, String} = nothing; kwargs...) = _compat(Context(), pkg, compat_str; kwargs...) + + +function set_current_compat(ctx::Context, target_pkg::Union{Nothing, String} = nothing; io = nothing) + io = something(io, ctx.io) + updated_deps = String[] + + deps_to_process = if target_pkg !== nothing + # Process only the specified package + if haskey(ctx.env.project.deps, target_pkg) + [(target_pkg, ctx.env.project.deps[target_pkg])] + else + pkgerror("Package $(target_pkg) not found in project dependencies") + end + else + # Process all packages (existing behavior) + collect(ctx.env.project.deps) + end + + # Process regular package dependencies + for (dep, uuid) in deps_to_process + compat_str = Operations.get_compat_str(ctx.env.project, dep) + if target_pkg !== nothing || isnothing(compat_str) + entry = get(ctx.env.manifest, uuid, nothing) + entry === nothing && continue + v = entry.version + v === nothing && continue + pkgversion = string(Base.thispatch(v)) + Operations.set_compat(ctx.env.project, dep, pkgversion) || + pkgerror("invalid compat version specifier \"$(pkgversion)\"") + push!(updated_deps, dep) + end + end + + # Also handle Julia compat entry when processing all packages (not when targeting a specific package) + if target_pkg === nothing + julia_compat_str = Operations.get_compat_str(ctx.env.project, "julia") + if isnothing(julia_compat_str) + # Set julia compat to current running version + julia_version = string(Base.thispatch(VERSION)) + Operations.set_compat(ctx.env.project, "julia", julia_version) || + pkgerror("invalid compat version specifier \"$(julia_version)\"") + push!(updated_deps, "julia") + end + end + + # Update messaging + if isempty(updated_deps) + if target_pkg !== nothing + printpkgstyle(io, :Info, "$(target_pkg) already has a compat entry or is not in manifest. No changes made.", color = Base.info_color()) + else + printpkgstyle(io, :Info, "no missing compat entries found. No changes made.", color = Base.info_color()) + end + elseif length(updated_deps) == 1 + printpkgstyle(io, :Info, "new entry set for $(only(updated_deps)) based on its current version", color = Base.info_color()) + else + printpkgstyle(io, :Info, "new entries set for $(join(updated_deps, ", ", " and ")) based on their current versions", color = Base.info_color()) + end + + write_env(ctx.env) + return Operations.print_compat(ctx; io) +end +set_current_compat(; kwargs...) = set_current_compat(Context(); kwargs...) ####### # why # ####### -function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=false, kwargs...) +function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool = false, kwargs...) require_not_empty(pkgs, :why) manifest_resolve!(ctx.env.manifest, pkgs) @@ -1456,6 +1630,7 @@ function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=fa end find_paths!(final_paths, p, copy(path)) end + return end first = true @@ -1467,11 +1642,12 @@ function why(ctx::Context, pkgs::Vector{PackageSpec}; io::IO, workspace::Bool=fa foreach(reverse!, final_paths) final_paths_names = map(x -> [ctx.env.manifest[uuid].name for uuid in x], collect(final_paths)) sort!(final_paths_names, by = x -> (x, length(x))) - delimiter = sprint((io, args) -> printstyled(io, args...; color=:light_green), "→", context=io) + delimiter = sprint((io, args) -> printstyled(io, args...; color = :light_green), "→", context = io) for path in final_paths_names println(io, " ", join(path, " $delimiter ")) end end + return end @@ -1493,7 +1669,7 @@ const undo_entries = Dict{String, UndoState}() const max_undo_limit = 50 const saved_initial_snapshot = Ref(false) -function add_snapshot_to_undo(env=nothing) +function add_snapshot_to_undo(env = nothing) # only attempt to take a snapshot if there is # an active project to be found if env === nothing @@ -1511,14 +1687,14 @@ function add_snapshot_to_undo(env=nothing) return end snapshot = UndoSnapshot(now(), env.project, env.manifest) - deleteat!(state.entries, 1:(state.idx-1)) + deleteat!(state.entries, 1:(state.idx - 1)) pushfirst!(state.entries, snapshot) state.idx = 1 - resize!(state.entries, min(length(state.entries), max_undo_limit)) + return resize!(state.entries, min(length(state.entries), max_undo_limit)) end -undo(ctx = Context()) = redo_undo(ctx, :undo, 1) +undo(ctx = Context()) = redo_undo(ctx, :undo, 1) redo(ctx = Context()) = redo_undo(ctx, :redo, -1) function redo_undo(ctx, mode::Symbol, direction::Int) @assert direction == 1 || direction == -1 @@ -1529,16 +1705,16 @@ function redo_undo(ctx, mode::Symbol, direction::Int) state.idx += direction snapshot = state.entries[state.idx] ctx.env.manifest, ctx.env.project = snapshot.manifest, snapshot.project - write_env(ctx.env; update_undo=false) - Operations.show_update(ctx.env, ctx.registries; io=ctx.io) + write_env(ctx.env; update_undo = false) + return Operations.show_update(ctx.env, ctx.registries; io = ctx.io) end function setprotocol!(; - domain::AbstractString="github.com", - protocol::Union{Nothing, AbstractString}=nothing -) - GitTools.setprotocol!(domain=domain, protocol=protocol) + domain::AbstractString = "github.com", + protocol::Union{Nothing, AbstractString} = nothing + ) + GitTools.setprotocol!(domain = domain, protocol = protocol) return nothing end @@ -1546,10 +1722,15 @@ end function handle_package_input!(pkg::PackageSpec) if pkg.path !== nothing && pkg.url !== nothing - pkgerror("`path` and `url` are conflicting specifications") + pkgerror("Conflicting `path` and `url` in PackageSpec") end - pkg.repo = Types.GitRepo(rev = pkg.rev, source = pkg.url !== nothing ? pkg.url : pkg.path, - subdir = pkg.subdir) + if pkg.repo.source !== nothing || pkg.repo.rev !== nothing || pkg.repo.subdir !== nothing + pkgerror("`repo` is a private field of PackageSpec and should not be set directly") + end + pkg.repo = Types.GitRepo( + rev = pkg.rev, source = pkg.url !== nothing ? pkg.url : pkg.path, + subdir = pkg.subdir + ) pkg.path = nothing pkg.tree_hash = nothing if pkg.version === nothing @@ -1558,7 +1739,7 @@ function handle_package_input!(pkg::PackageSpec) if !(pkg.version isa VersionNumber) pkg.version = VersionSpec(pkg.version) end - pkg.uuid = pkg.uuid isa String ? UUID(pkg.uuid) : pkg.uuid + return pkg.uuid = pkg.uuid isa String ? UUID(pkg.uuid) : pkg.uuid end function upgrade_manifest(man_path::String) @@ -1567,7 +1748,7 @@ function upgrade_manifest(man_path::String) Pkg.activate(dir) do Pkg.upgrade_manifest() end - mv(joinpath(dir, "Manifest.toml"), man_path, force = true) + return mv(joinpath(dir, "Manifest.toml"), man_path, force = true) end function upgrade_manifest(ctx::Context = Context()) before_format = ctx.env.manifest.manifest_format diff --git a/src/Apps/Apps.jl b/src/Apps/Apps.jl new file mode 100644 index 0000000000..e5cfd8b10f --- /dev/null +++ b/src/Apps/Apps.jl @@ -0,0 +1,523 @@ +module Apps + +using Pkg +using Pkg: atomic_toml_write +using Pkg.Versions +using Pkg.Types: AppInfo, PackageSpec, Context, EnvCache, PackageEntry, Manifest, handle_repo_add!, handle_repo_develop!, write_manifest, write_project, + pkgerror, projectfile_path, manifestfile_path +using Pkg.Operations: print_single, source_path, update_package_add +using Pkg.API: handle_package_input! +using TOML, UUIDs +import Pkg.Registry + +app_env_folder() = joinpath(first(DEPOT_PATH), "environments", "apps") +app_manifest_file() = joinpath(app_env_folder(), "AppManifest.toml") +julia_bin_path() = joinpath(first(DEPOT_PATH), "bin") + +app_context() = Context(env = EnvCache(joinpath(app_env_folder(), "Project.toml"))) + +function validate_app_name(name::AbstractString) + if isempty(name) + error("App name cannot be empty") + end + if !occursin(r"^[a-zA-Z][a-zA-Z0-9_-]*$", name) + error("App name must start with a letter and contain only letters, numbers, underscores, and hyphens") + end + return if occursin(r"\.\.", name) || occursin(r"[/\\]", name) + error("App name cannot contain path traversal sequences or path separators") + end +end + +function validate_package_name(name::AbstractString) + if isempty(name) + error("Package name cannot be empty") + end + return if !occursin(r"^[a-zA-Z][a-zA-Z0-9_]*$", name) + error("Package name must start with a letter and contain only letters, numbers, and underscores") + end +end + +function validate_submodule_name(name::Union{AbstractString, Nothing}) + return if name !== nothing + if isempty(name) + error("Submodule name cannot be empty") + end + if !occursin(r"^[a-zA-Z][a-zA-Z0-9_]*$", name) + error("Submodule name must start with a letter and contain only letters, numbers, and underscores") + end + end +end + + +function rm_shim(name; kwargs...) + validate_app_name(name) + return Base.rm(joinpath(julia_bin_path(), name * (Sys.iswindows() ? ".bat" : "")); kwargs...) +end + +function get_project(sourcepath) + project_file = projectfile_path(sourcepath) + + isfile(project_file) || error("Project file not found: $project_file") + + project = Pkg.Types.read_project(project_file) + isempty(project.apps) && error("No apps found in Project.toml for package $(project.name) at version $(project.version)") + return project +end + + +function overwrite_file_if_different(file, content) + return if !isfile(file) || read(file, String) != content + mkpath(dirname(file)) + write(file, content) + end +end + +function check_apps_in_path(apps) + for app_name in keys(apps) + which_result = Sys.which(app_name) + if which_result === nothing + @warn """ + App '$app_name' was installed but is not available in PATH. + Consider adding '$(julia_bin_path())' to your PATH environment variable. + """ maxlog = 1 + break # Only show warning once per installation + else + # Check for collisions + expected_path = joinpath(julia_bin_path(), app_name * (Sys.iswindows() ? ".bat" : "")) + if which_result != expected_path + @warn """ + App '$app_name' collision detected: + Expected: $expected_path + Found: $which_result + Another application with the same name exists in PATH. + """ + end + end + end + return +end + +function get_max_version_register(pkg::PackageSpec, regs) + max_v = nothing + tree_hash = nothing + for reg in regs + if get(reg, pkg.uuid, nothing) !== nothing + reg_pkg = get(reg, pkg.uuid, nothing) + reg_pkg === nothing && continue + pkg_info = Registry.registry_info(reg_pkg) + for (version, info) in pkg_info.version_info + info.yanked && continue + if pkg.version isa VersionNumber + pkg.version == version || continue + else + version in pkg.version || continue + end + if max_v === nothing || version > max_v + max_v = version + tree_hash = info.git_tree_sha1 + end + end + end + end + if max_v === nothing + error("Suitable package version for $(pkg.name) not found in any registries.") + end + return (max_v, tree_hash) +end + + +################## +# Main Functions # +################## + +function _resolve(manifest::Manifest, pkgname = nothing) + for (uuid, pkg) in manifest.deps + if pkgname !== nothing && pkg.name !== pkgname + continue + end + + # TODO: Add support for existing manifest + + projectfile = joinpath(app_env_folder(), pkg.name, "Project.toml") + + sourcepath = source_path(app_manifest_file(), pkg) + original_project_file = projectfile_path(sourcepath) + + mkpath(dirname(projectfile)) + + if isfile(original_project_file) + cp(original_project_file, projectfile; force = true) + chmod(projectfile, 0o644) # Make the copied project file writable + + # Add entryfile stanza pointing to the package entry file + # TODO: What if project file has its own entryfile? + project_data = TOML.parsefile(projectfile) + project_data["entryfile"] = joinpath(sourcepath, "src", "$(pkg.name).jl") + atomic_toml_write(projectfile, project_data) + else + error("could not find project file for package $pkg") + end + + # Create a manifest with the manifest entry + Pkg.activate(joinpath(app_env_folder(), pkg.name)) do + ctx = Context() + ctx.env.manifest.deps[uuid] = pkg + Pkg.resolve(ctx) + end + + # TODO: Julia path + generate_shims_for_apps(pkg.name, pkg.apps, dirname(projectfile), joinpath(Sys.BINDIR, "julia")) + end + return write_manifest(manifest, app_manifest_file()) +end + + +function add(pkg::Vector{PackageSpec}) + for p in pkg + add(p) + end + return +end + + +function add(pkg::PackageSpec) + handle_package_input!(pkg) + + ctx = app_context() + manifest = ctx.env.manifest + new = false + + # Download package + if pkg.repo.source !== nothing || pkg.repo.rev !== nothing + entry = Pkg.API.manifest_info(ctx.env.manifest, pkg.uuid) + pkg = update_package_add(ctx, pkg, entry, false) + new = handle_repo_add!(ctx, pkg) + else + pkgs = [pkg] + Pkg.Operations.registry_resolve!(ctx.registries, pkgs) + Pkg.Operations.ensure_resolved(ctx, manifest, pkgs, registry = true) + + pkg.version, pkg.tree_hash = get_max_version_register(pkg, ctx.registries) + + new = Pkg.Operations.download_source(ctx, pkgs) + end + + # Run Pkg.build()? + + Base.rm(joinpath(app_env_folder(), pkg.name); force = true, recursive = true) + sourcepath = source_path(ctx.env.manifest_file, pkg) + project = get_project(sourcepath) + # TODO: Wrong if package itself has a sourcepath? + entry = PackageEntry(; apps = project.apps, name = pkg.name, version = project.version, tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid = pkg.uuid) + manifest.deps[pkg.uuid] = entry + + _resolve(manifest, pkg.name) + precompile(pkg.name) + + @info "For package: $(pkg.name) installed apps $(join(keys(project.apps), ","))" + return check_apps_in_path(project.apps) +end + +function develop(pkg::Vector{PackageSpec}) + for p in pkg + develop(p) + end + return +end + +function develop(pkg::PackageSpec) + if pkg.path !== nothing + pkg.path == abspath(pkg.path) + end + handle_package_input!(pkg) + ctx = app_context() + handle_repo_develop!(ctx, pkg, #=shared =# true) + Base.rm(joinpath(app_env_folder(), pkg.name); force = true, recursive = true) + sourcepath = abspath(source_path(ctx.env.manifest_file, pkg)) + project = get_project(sourcepath) + + # Seems like the `.repo.source` field is not cleared. + # At least repo-url is still in the manifest after doing a dev with a path + # Figure out why for normal dev this is not needed. + # XXX: Why needed? + if pkg.path !== nothing + pkg.repo.source = nothing + end + + + entry = PackageEntry(; apps = project.apps, name = pkg.name, version = project.version, tree_hash = pkg.tree_hash, path = sourcepath, repo = pkg.repo, uuid = pkg.uuid) + manifest = ctx.env.manifest + manifest.deps[pkg.uuid] = entry + + _resolve(manifest, pkg.name) + precompile(pkg.name) + @info "For package: $(pkg.name) installed apps: $(join(keys(project.apps), ","))" + return check_apps_in_path(project.apps) +end + + +update(pkgs_or_apps::String) = update([pkgs_or_apps]) +function update(pkgs_or_apps::Vector) + for pkg_or_app in pkgs_or_apps + if pkg_or_app isa String + pkg_or_app = PackageSpec(pkg_or_app) + end + update(pkg_or_app) + end + return +end + +# XXX: Is updating an app ever different from rm-ing and adding it from scratch? +function update(pkg::Union{PackageSpec, Nothing} = nothing) + ctx = app_context() + manifest = ctx.env.manifest + deps = Pkg.Operations.load_manifest_deps(manifest) + for dep in deps + info = manifest.deps[dep.uuid] + if pkg === nothing || info.name !== pkg.name + continue + end + Pkg.activate(joinpath(app_env_folder(), info.name)) do + # precompile only after updating all apps? + if pkg !== nothing + Pkg.update(pkg) + else + Pkg.update() + end + end + sourcepath = abspath(source_path(ctx.env.manifest_file, info)) + project = get_project(sourcepath) + # Get the tree hash from the project file + manifest_file = manifestfile_path(joinpath(app_env_folder(), info.name)) + manifest_app = Pkg.Types.read_manifest(manifest_file) + manifest_entry = manifest_app.deps[info.uuid] + + entry = PackageEntry(; + apps = project.apps, name = manifest_entry.name, version = manifest_entry.version, tree_hash = manifest_entry.tree_hash, + path = manifest_entry.path, repo = manifest_entry.repo, uuid = manifest_entry.uuid + ) + + manifest.deps[dep.uuid] = entry + Pkg.Types.write_manifest(manifest, app_manifest_file()) + end + return +end + +function status(pkgs_or_apps::Vector) + return if isempty(pkgs_or_apps) + status() + else + for pkg_or_app in pkgs_or_apps + if pkg_or_app isa String + pkg_or_app = PackageSpec(pkg_or_app) + end + status(pkg_or_app) + end + end +end + +function status(pkg_or_app::Union{PackageSpec, Nothing} = nothing) + # TODO: Sort. + pkg_or_app = pkg_or_app === nothing ? nothing : pkg_or_app.name + manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml")) + deps = Pkg.Operations.load_manifest_deps(manifest) + + is_pkg = pkg_or_app !== nothing && any(dep -> dep.name == pkg_or_app, values(manifest.deps)) + + for dep in deps + info = manifest.deps[dep.uuid] + if is_pkg && dep.name !== pkg_or_app + continue + end + if !is_pkg && pkg_or_app !== nothing + if !(pkg_or_app in keys(info.apps)) + continue + end + end + + printstyled("[", string(dep.uuid)[1:8], "] "; color = :light_black) + print_single(stdout, dep) + println() + for (appname, appinfo) in info.apps + if !is_pkg && pkg_or_app !== nothing && appname !== pkg_or_app + continue + end + julia_cmd = contractuser(appinfo.julia_command) + printstyled(" $(appname)", color = :green) + printstyled(" $(julia_cmd) \n", color = :gray) + end + end + return +end + +function precompile(pkg::Union{Nothing, String} = nothing) + manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml")) + deps = Pkg.Operations.load_manifest_deps(manifest) + for dep in deps + # TODO: Parallel app compilation..? + info = manifest.deps[dep.uuid] + if pkg !== nothing && info.name !== pkg + continue + end + Pkg.activate(joinpath(app_env_folder(), info.name)) do + Pkg.instantiate() + Pkg.precompile() + end + end + return +end + + +function require_not_empty(pkgs, f::Symbol) + return if pkgs === nothing || isempty(pkgs) + pkgerror("app $f requires at least one package") + end +end + +rm(pkgs_or_apps::String) = rm([pkgs_or_apps]) +function rm(pkgs_or_apps::Vector) + for pkg_or_app in pkgs_or_apps + if pkg_or_app isa String + pkg_or_app = PackageSpec(pkg_or_app) + end + rm(pkg_or_app) + end + return +end + +function rm(pkg_or_app::Union{PackageSpec, Nothing} = nothing) + pkg_or_app = pkg_or_app === nothing ? nothing : pkg_or_app.name + + require_not_empty(pkg_or_app, :rm) + + manifest = Pkg.Types.read_manifest(joinpath(app_env_folder(), "AppManifest.toml")) + dep_idx = findfirst(dep -> dep.name == pkg_or_app, manifest.deps) + if dep_idx !== nothing + dep = manifest.deps[dep_idx] + @info "Deleting all apps for package $(dep.name)" + delete!(manifest.deps, dep.uuid) + for (appname, appinfo) in dep.apps + @info "Deleted $(appname)" + rm_shim(appname; force = true) + end + if dep.path === nothing + Base.rm(joinpath(app_env_folder(), dep.name); recursive = true) + end + else + for (uuid, pkg) in manifest.deps + app_idx = findfirst(app -> app.name == pkg_or_app, pkg.apps) + if app_idx !== nothing + app = pkg.apps[app_idx] + @info "Deleted app $(app.name)" + delete!(pkg.apps, app.name) + rm_shim(app.name; force = true) + end + if isempty(pkg.apps) + delete!(manifest.deps, uuid) + Base.rm(joinpath(app_env_folder(), pkg.name); recursive = true) + end + end + end + # XXX: What happens if something fails above and we do not write out the updated manifest? + Pkg.Types.write_manifest(manifest, app_manifest_file()) + return +end + +for f in (:develop, :add) + @eval begin + $f(pkg::Union{AbstractString, PackageSpec}; kwargs...) = $f([pkg]; kwargs...) + $f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...) + function $f(; + name::Union{Nothing, AbstractString} = nothing, uuid::Union{Nothing, String, UUID} = nothing, + version::Union{VersionNumber, String, VersionSpec, Nothing} = nothing, + url = nothing, rev = nothing, path = nothing, subdir = nothing, kwargs... + ) + pkg = PackageSpec(; name, uuid, version, url, rev, path, subdir) + return if all(isnothing, [name, uuid, version, url, rev, path, subdir]) + $f(PackageSpec[]; kwargs...) + else + $f(pkg; kwargs...) + end + end + function $f(pkgs::Vector{<:NamedTuple}; kwargs...) + return $f([PackageSpec(; pkg...) for pkg in pkgs]; kwargs...) + end + end +end + + +######### +# Shims # +######### + +const SHIM_COMMENT = Sys.iswindows() ? "REM " : "#" +const SHIM_VERSION = 1.0 +const SHIM_HEADER = """$SHIM_COMMENT This file is generated by the Julia package manager. +$SHIM_COMMENT Shim version: $SHIM_VERSION""" + +function generate_shims_for_apps(pkgname, apps, env, julia) + for (_, app) in apps + generate_shim(pkgname, app, env, julia) + end + return +end + +function generate_shim(pkgname, app::AppInfo, env, julia) + validate_package_name(pkgname) + validate_app_name(app.name) + validate_submodule_name(app.submodule) + + module_spec = app.submodule === nothing ? pkgname : "$(pkgname).$(app.submodule)" + + filename = app.name * (Sys.iswindows() ? ".bat" : "") + julia_bin_filename = joinpath(julia_bin_path(), filename) + mkpath(dirname(julia_bin_filename)) + content = if Sys.iswindows() + julia_escaped = "\"$(Base.shell_escape_wincmd(julia))\"" + module_spec_escaped = "\"$(Base.shell_escape_wincmd(module_spec))\"" + windows_shim(julia_escaped, module_spec_escaped, env) + else + julia_escaped = Base.shell_escape(julia) + module_spec_escaped = Base.shell_escape(module_spec) + shell_shim(julia_escaped, module_spec_escaped, env) + end + overwrite_file_if_different(julia_bin_filename, content) + return if Sys.isunix() + chmod(julia_bin_filename, 0o755) + end +end + + +function shell_shim(julia_escaped::String, module_spec_escaped::String, env) + return """ + #!/bin/sh + + $SHIM_HEADER + + export JULIA_LOAD_PATH=$(repr(env)) + export JULIA_DEPOT_PATH=$(repr(join(DEPOT_PATH, ':'))) + exec $julia_escaped \\ + --startup-file=no \\ + -m $module_spec_escaped \\ + "\$@" + """ +end + +function windows_shim(julia_escaped::String, module_spec_escaped::String, env) + return """ + @echo off + + $SHIM_HEADER + + setlocal + set JULIA_LOAD_PATH=$env + set JULIA_DEPOT_PATH=$(join(DEPOT_PATH, ';')) + + $julia_escaped ^ + --startup-file=no ^ + -m $module_spec_escaped ^ + %* + """ +end + +end diff --git a/src/Artifacts.jl b/src/Artifacts.jl index 957d14aab9..d6f2c948f4 100644 --- a/src/Artifacts.jl +++ b/src/Artifacts.jl @@ -1,647 +1,685 @@ -module Artifacts - -using Artifacts, Base.BinaryPlatforms, SHA -using ..MiniProgressBars, ..PlatformEngines -using Tar: can_symlink - -import ..set_readonly, ..GitTools, ..TOML, ..pkg_server, ..can_fancyprint, - ..stderr_f, ..printpkgstyle - -import Base: get, SHA1 -import Artifacts: artifact_names, ARTIFACTS_DIR_OVERRIDE, ARTIFACT_OVERRIDES, artifact_paths, - artifacts_dirs, pack_platform!, unpack_platform, load_artifacts_toml, - query_override, with_artifacts_directory, load_overrides -import ..Types: write_env_usage, parse_toml - - -export create_artifact, artifact_exists, artifact_path, remove_artifact, verify_artifact, - artifact_meta, artifact_hash, bind_artifact!, unbind_artifact!, download_artifact, - find_artifacts_toml, ensure_artifact_installed, @artifact_str, archive_artifact, - select_downloadable_artifacts - -""" - create_artifact(f::Function) - -Creates a new artifact by running `f(artifact_path)`, hashing the result, and moving it -to the artifact store (`~/.julia/artifacts` on a typical installation). Returns the -identifying tree hash of this artifact. -""" -function create_artifact(f::Function) - # Ensure the `artifacts` directory exists in our default depot - artifacts_dir = first(artifacts_dirs()) - mkpath(artifacts_dir) - - # Temporary directory where we'll do our creation business - temp_dir = mktempdir(artifacts_dir) - - try - # allow the user to do their work inside the temporary directory - f(temp_dir) - - # Calculate the tree hash for this temporary directory - artifact_hash = SHA1(GitTools.tree_hash(temp_dir)) - - # If we created a dupe, just let the temp directory get destroyed. It's got the - # same contents as whatever already exists after all, so it doesn't matter. Only - # move its contents if it actually contains new contents. Note that we explicitly - # set `honor_overrides=false` here, as we wouldn't want to drop things into the - # system directory by accidentally creating something with the same content-hash - # as something that was foolishly overridden. This should be virtually impossible - # unless the user has been very unwise, but let's be cautious. - new_path = artifact_path(artifact_hash; honor_overrides=false) - _mv_temp_artifact_dir(temp_dir, new_path) - - # Give the people what they want - return artifact_hash - finally - # Always attempt to cleanup - rm(temp_dir; recursive=true, force=true) +module PkgArtifacts + + using Artifacts, Base.BinaryPlatforms, SHA + using ..MiniProgressBars, ..PlatformEngines + using Tar: can_symlink + using FileWatching: FileWatching + + import ..set_readonly, ..GitTools, ..TOML, ..pkg_server, ..can_fancyprint, + ..stderr_f, ..printpkgstyle, ..mv_temp_dir_retries, ..atomic_toml_write + + import Base: get, SHA1 + import Artifacts: artifact_names, ARTIFACTS_DIR_OVERRIDE, ARTIFACT_OVERRIDES, artifact_paths, + artifacts_dirs, pack_platform!, unpack_platform, load_artifacts_toml, + query_override, with_artifacts_directory, load_overrides + import ..Types: write_env_usage, parse_toml + + const Artifacts = PkgArtifacts # This is to preserve compatability for folks who depend on the internals of this module + export Artifacts, create_artifact, artifact_exists, artifact_path, remove_artifact, verify_artifact, + artifact_meta, artifact_hash, bind_artifact!, unbind_artifact!, download_artifact, + find_artifacts_toml, ensure_artifact_installed, @artifact_str, archive_artifact, + select_downloadable_artifacts, ArtifactDownloadInfo + + """ + create_artifact(f::Function) + + Creates a new artifact by running `f(artifact_path)`, hashing the result, and moving it + to the artifact store (`~/.julia/artifacts` on a typical installation). Returns the + identifying tree hash of this artifact. + """ + function create_artifact(f::Function) + # Ensure the `artifacts` directory exists in our default depot + artifacts_dir = first(artifacts_dirs()) + mkpath(artifacts_dir) + + # Temporary directory where we'll do our creation business + temp_dir = mktempdir(artifacts_dir) + + try + # allow the user to do their work inside the temporary directory + f(temp_dir) + + # Calculate the tree hash for this temporary directory + artifact_hash = SHA1(GitTools.tree_hash(temp_dir)) + + # If we created a dupe, just let the temp directory get destroyed. It's got the + # same contents as whatever already exists after all, so it doesn't matter. Only + # move its contents if it actually contains new contents. Note that we explicitly + # set `honor_overrides=false` here, as we wouldn't want to drop things into the + # system directory by accidentally creating something with the same content-hash + # as something that was foolishly overridden. This should be virtually impossible + # unless the user has been very unwise, but let's be cautious. + new_path = artifact_path(artifact_hash; honor_overrides = false) + mv_temp_dir_retries(temp_dir, new_path) + + # Give the people what they want + return artifact_hash + finally + # Always attempt to cleanup + rm(temp_dir; recursive = true, force = true) + end end -end - -""" - _mv_temp_artifact_dir(temp_dir::String, new_path::String)::Nothing -Either rename the directory at `temp_dir` to `new_path` and set it to read-only -or if `new_path` artifact already exists try to do nothing. -""" -function _mv_temp_artifact_dir(temp_dir::String, new_path::String)::Nothing - # Sometimes a rename can fail because the temp_dir is locked by - # anti-virus software scanning the new files. - # In this case we want to sleep and try again. - # I am using the list of error codes to retry from: - # https://github.com/isaacs/node-graceful-fs/blob/234379906b7d2f4c9cfeb412d2516f42b0fb4953/polyfills.js#L87 - # Retry for up to about 60 seconds by retrying 20 times with exponential backoff. - retry = 0 - max_num_retries = 20 # maybe this should be configurable? - sleep_amount = 0.01 # seconds - max_sleep_amount = 5.0 # seconds - while true - isdir(new_path) && return - # This next step is like - # `mv(temp_dir, new_path)`. - # However, `mv` defaults to `cp` if `rename` returns an error. - # `cp` is not atomic, so avoid the potential of calling it. - err = ccall(:jl_fs_rename, Int32, (Cstring, Cstring), temp_dir, new_path) - if err ≥ 0 - # rename worked - chmod(new_path, filemode(dirname(new_path))) - set_readonly(new_path) + + """ + remove_artifact(hash::SHA1; honor_overrides::Bool=false) + + Removes the given artifact (identified by its SHA1 git tree hash) from disk. Note that + if an artifact is installed in multiple depots, it will be removed from all of them. If + an overridden artifact is requested for removal, it will be silently ignored; this method + will never attempt to remove an overridden artifact. + + In general, we recommend that you use `Pkg.gc()` to manage artifact installations and do + not use `remove_artifact()` directly, as it can be difficult to know if an artifact is + being used by another package. + """ + function remove_artifact(hash::SHA1) + if query_override(hash) !== nothing + # We never remove overridden artifacts. return - else - # Ignore rename error if `new_path` exists. - isdir(new_path) && return - if retry < max_num_retries && err ∈ (Base.UV_EACCES, Base.UV_EPERM, Base.UV_EBUSY) - sleep(sleep_amount) - sleep_amount = min(sleep_amount*2.0, max_sleep_amount) - retry += 1 - else - Base.uv_error("rename of $(repr(temp_dir)) to $(repr(new_path))", err) + end + + # Get all possible paths (rooted in all depots) + possible_paths = artifacts_dirs(bytes2hex(hash.bytes)) + for path in possible_paths + if isdir(path) + rm(path; recursive = true, force = true) end end - end -end - -""" - remove_artifact(hash::SHA1; honor_overrides::Bool=false) - -Removes the given artifact (identified by its SHA1 git tree hash) from disk. Note that -if an artifact is installed in multiple depots, it will be removed from all of them. If -an overridden artifact is requested for removal, it will be silently ignored; this method -will never attempt to remove an overridden artifact. - -In general, we recommend that you use `Pkg.gc()` to manage artifact installations and do -not use `remove_artifact()` directly, as it can be difficult to know if an artifact is -being used by another package. -""" -function remove_artifact(hash::SHA1) - if query_override(hash) !== nothing - # We never remove overridden artifacts. return end - # Get all possible paths (rooted in all depots) - possible_paths = artifacts_dirs(bytes2hex(hash.bytes)) - for path in possible_paths - if isdir(path) - rm(path; recursive=true, force=true) + """ + verify_artifact(hash::SHA1; honor_overrides::Bool=false) + + Verifies that the given artifact (identified by its SHA1 git tree hash) is installed on- + disk, and retains its integrity. If the given artifact is overridden, skips the + verification unless `honor_overrides` is set to `true`. + """ + function verify_artifact(hash::SHA1; honor_overrides::Bool = false) + # Silently skip overridden artifacts unless we really ask for it + if !honor_overrides + if query_override(hash) !== nothing + return true + end end - end -end - -""" - verify_artifact(hash::SHA1; honor_overrides::Bool=false) - -Verifies that the given artifact (identified by its SHA1 git tree hash) is installed on- -disk, and retains its integrity. If the given artifact is overridden, skips the -verification unless `honor_overrides` is set to `true`. -""" -function verify_artifact(hash::SHA1; honor_overrides::Bool=false) - # Silently skip overridden artifacts unless we really ask for it - if !honor_overrides - if query_override(hash) !== nothing - return true + + # If it doesn't even exist, then skip out + if !artifact_exists(hash) + return false end - end - # If it doesn't even exist, then skip out - if !artifact_exists(hash) - return false + # Otherwise actually run the verification + return all(hash.bytes .== GitTools.tree_hash(artifact_path(hash))) end - # Otherwise actually run the verification - return all(hash.bytes .== GitTools.tree_hash(artifact_path(hash))) -end + """ + archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool=false) -""" - archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool=false) + Archive an artifact into a tarball stored at `tarball_path`, returns the SHA256 of the + resultant tarball as a hexadecimal string. Throws an error if the artifact does not + exist. If the artifact is overridden, throws an error unless `honor_overrides` is set. + """ + function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool = false) + if !honor_overrides + if query_override(hash) !== nothing + error("Will not archive an overridden artifact unless `honor_overrides` is set!") + end + end -Archive an artifact into a tarball stored at `tarball_path`, returns the SHA256 of the -resultant tarball as a hexadecimal string. Throws an error if the artifact does not -exist. If the artifact is overridden, throws an error unless `honor_overrides` is set. -""" -function archive_artifact(hash::SHA1, tarball_path::String; honor_overrides::Bool=false) - if !honor_overrides - if query_override(hash) !== nothing - error("Will not archive an overridden artifact unless `honor_overrides` is set!") + if !artifact_exists(hash) + error("Unable to archive artifact $(bytes2hex(hash.bytes)): does not exist!") end - end - if !artifact_exists(hash) - error("Unable to archive artifact $(bytes2hex(hash.bytes)): does not exist!") + # Package it up + package(artifact_path(hash), tarball_path) + + # Calculate its sha256 and return that + return open(tarball_path, "r") do io + return bytes2hex(sha256(io)) + end end - # Package it up - package(artifact_path(hash), tarball_path) + """ + ArtifactDownloadInfo - # Calculate its sha256 and return that - return open(tarball_path, "r") do io - return bytes2hex(sha256(io)) - end -end - -""" - bind_artifact!(artifacts_toml::String, name::String, hash::SHA1; - platform::Union{AbstractPlatform,Nothing} = nothing, - download_info::Union{Vector{Tuple},Nothing} = nothing, - lazy::Bool = false, - force::Bool = false) - -Writes a mapping of `name` -> `hash` within the given `(Julia)Artifacts.toml` file. If -`platform` is not `nothing`, this artifact is marked as platform-specific, and will be -a multi-mapping. It is valid to bind multiple artifacts with the same name, but -different `platform`s and `hash`'es within the same `artifacts_toml`. If `force` is set -to `true`, this will overwrite a pre-existant mapping, otherwise an error is raised. - -`download_info` is an optional vector that contains tuples of URLs and a hash. These -URLs will be listed as possible locations where this artifact can be obtained. If `lazy` -is set to `true`, even if download information is available, this artifact will not be -downloaded until it is accessed via the `artifact"name"` syntax, or -`ensure_artifact_installed()` is called upon it. -""" -function bind_artifact!(artifacts_toml::String, name::String, hash::SHA1; - platform::Union{AbstractPlatform,Nothing} = nothing, - download_info::Union{Vector{<:Tuple},Nothing} = nothing, - lazy::Bool = false, - force::Bool = false) - # First, check to see if this artifact is already bound: - if isfile(artifacts_toml) - artifact_dict = parse_toml(artifacts_toml) + Auxilliary information about an artifact to be used with `bind_artifact!()` to give + a download location for that artifact, as well as the hash and size of that artifact. + """ + struct ArtifactDownloadInfo + # URL the artifact is available at as a gzip-compressed tarball + url::String - if !force && haskey(artifact_dict, name) - meta = artifact_dict[name] - if !isa(meta, Vector) - error("Mapping for '$name' within $(artifacts_toml) already exists!") - elseif any(isequal(platform), unpack_platform(x, name, artifacts_toml) for x in meta) - error("Mapping for '$name'/$(triplet(platform)) within $(artifacts_toml) already exists!") + # SHA256 hash of the tarball + hash::Vector{UInt8} + + # Size in bytes of the tarball. `size <= 0` means unknown. + size::Int64 + + function ArtifactDownloadInfo(url, hash::AbstractVector, size = 0) + valid_hash_len = SHA.digestlen(SHA256_CTX) + hash_len = length(hash) + if hash_len != valid_hash_len + throw(ArgumentError("Invalid hash length '$(hash_len)', must be $(valid_hash_len)")) end + return new( + String(url), + Vector{UInt8}(hash), + Int64(size), + ) end - else - artifact_dict = Dict{String, Any}() end - # Otherwise, the new piece of data we're going to write out is this dict: - meta = Dict{String,Any}( - "git-tree-sha1" => bytes2hex(hash.bytes), - ) + # Convenience constructor for string hashes + ArtifactDownloadInfo(url, hash::AbstractString, args...) = ArtifactDownloadInfo(url, hex2bytes(hash), args...) + + # Convenience constructor for legacy Tuple representation + ArtifactDownloadInfo(args::Tuple) = ArtifactDownloadInfo(args...) + + ArtifactDownloadInfo(adi::ArtifactDownloadInfo) = adi - # If we're set to be lazy, then lazy we shall be - if lazy - meta["lazy"] = true + # Make the dict that will be embedded in the TOML + function make_dict(adi::ArtifactDownloadInfo) + ret = Dict{String, Any}( + "url" => adi.url, + "sha256" => bytes2hex(adi.hash), + ) + if adi.size > 0 + ret["size"] = adi.size + end + return ret end - # Integrate download info, if it is given. We represent the download info as a - # vector of dicts, each with its own `url` and `sha256`, since different tarballs can - # expand to the same tree hash. - if download_info !== nothing - meta["download"] = [ - Dict("url" => dl[1], - "sha256" => dl[2], - ) for dl in download_info - ] + """ + bind_artifact!(artifacts_toml::String, name::String, hash::SHA1; + platform::Union{AbstractPlatform,Nothing} = nothing, + download_info::Union{Vector{Tuple},Nothing} = nothing, + lazy::Bool = false, + force::Bool = false) + + Writes a mapping of `name` -> `hash` within the given `(Julia)Artifacts.toml` file. If + `platform` is not `nothing`, this artifact is marked as platform-specific, and will be + a multi-mapping. It is valid to bind multiple artifacts with the same name, but + different `platform`s and `hash`'es within the same `artifacts_toml`. If `force` is set + to `true`, this will overwrite a pre-existant mapping, otherwise an error is raised. + + `download_info` is an optional vector that contains tuples of URLs and a hash. These + URLs will be listed as possible locations where this artifact can be obtained. If `lazy` + is set to `true`, even if download information is available, this artifact will not be + downloaded until it is accessed via the `artifact"name"` syntax, or + `ensure_artifact_installed()` is called upon it. + """ + function bind_artifact!( + artifacts_toml::String, name::String, hash::SHA1; + platform::Union{AbstractPlatform, Nothing} = nothing, + download_info::Union{Vector{<:Tuple}, Vector{<:ArtifactDownloadInfo}, Nothing} = nothing, + lazy::Bool = false, + force::Bool = false + ) + # First, check to see if this artifact is already bound: + if isfile(artifacts_toml) + artifact_dict = parse_toml(artifacts_toml) + + if !force && haskey(artifact_dict, name) + meta = artifact_dict[name] + if !isa(meta, Vector) + error("Mapping for '$name' within $(artifacts_toml) already exists!") + elseif any(p -> platforms_match(platform, p), unpack_platform(x, name, artifacts_toml) for x in meta) + error("Mapping for '$name'/$(triplet(platform)) within $(artifacts_toml) already exists!") + end + end + else + artifact_dict = Dict{String, Any}() + end + + # Otherwise, the new piece of data we're going to write out is this dict: + meta = Dict{String, Any}( + "git-tree-sha1" => bytes2hex(hash.bytes), + ) + + # If we're set to be lazy, then lazy we shall be + if lazy + meta["lazy"] = true + end + + # Integrate download info, if it is given. Note that there can be multiple + # download locations, each with its own tarball with its own hash, but which + # expands to the same content/treehash. + if download_info !== nothing + meta["download"] = make_dict.(ArtifactDownloadInfo.(download_info)) + end + + if platform === nothing + artifact_dict[name] = meta + else + # Add platform-specific keys to our `meta` dict + pack_platform!(meta, platform) + + # Insert this entry into the list of artifacts + if !haskey(artifact_dict, name) + artifact_dict[name] = [meta] + else + # Delete any entries that contain identical platforms + artifact_dict[name] = filter( + x -> unpack_platform(x, name, artifacts_toml) != platform, + artifact_dict[name] + ) + push!(artifact_dict[name], meta) + end + end + + # Spit it out onto disk + let artifact_dict = artifact_dict + parent_dir = dirname(artifacts_toml) + atomic_toml_write(artifacts_toml, artifact_dict, sorted = true) + end + + # Mark that we have used this Artifact.toml + write_env_usage(artifacts_toml, "artifact_usage.toml") + return end - if platform === nothing - artifact_dict[name] = meta - else - # Add platform-specific keys to our `meta` dict - pack_platform!(meta, platform) - # Insert this entry into the list of artifacts + """ + unbind_artifact!(artifacts_toml::String, name::String; platform = nothing) + + Unbind the given `name` from an `(Julia)Artifacts.toml` file. + Silently fails if no such binding exists within the file. + """ + function unbind_artifact!( + artifacts_toml::String, name::String; + platform::Union{AbstractPlatform, Nothing} = nothing + ) + artifact_dict = parse_toml(artifacts_toml) if !haskey(artifact_dict, name) - artifact_dict[name] = [meta] + return + end + + if platform === nothing + delete!(artifact_dict, name) else - # Delete any entries that contain identical platforms artifact_dict[name] = filter( x -> unpack_platform(x, name, artifacts_toml) != platform, artifact_dict[name] ) - push!(artifact_dict[name], meta) end - end - # Spit it out onto disk - let artifact_dict = artifact_dict - parent_dir = dirname(artifacts_toml) - temp_artifacts_toml = isempty(parent_dir) ? tempname(pwd()) : tempname(parent_dir) - open(temp_artifacts_toml, "w") do io - TOML.print(io, artifact_dict, sorted=true) - end - mv(temp_artifacts_toml, artifacts_toml; force=true) + atomic_toml_write(artifacts_toml, artifact_dict, sorted = true) + return end - # Mark that we have used this Artifact.toml - write_env_usage(artifacts_toml, "artifact_usage.toml") - return -end + """ + download_artifact(tree_hash::SHA1, tarball_url::String, tarball_hash::String; + verbose::Bool = false, io::IO=stderr) + Download/install an artifact into the artifact store. Returns `true` on success, + returns an error object on failure. + + !!! compat "Julia 1.8" + As of Julia 1.8 this function returns the error object rather than `false` when + failure occurs + """ + function download_artifact( + tree_hash::SHA1, + tarball_url::String, + tarball_hash::Union{String, Nothing} = nothing; + verbose::Bool = false, + quiet_download::Bool = false, + io::IO = stderr_f(), + progress::Union{Function, Nothing} = nothing, + ) + _artifact_paths = artifact_paths(tree_hash) + pidfile = _artifact_paths[1] * ".pid" + mkpath(dirname(pidfile)) + t_wait_msg = Timer(2) do t + if progress === nothing + @info "downloading $tarball_url ($hex) in another process" + else + progress(0, 0; status = "downloading in another process") + end + end + ret = FileWatching.mkpidlock(pidfile, stale_age = 20) do + close(t_wait_msg) + if artifact_exists(tree_hash) + return true + end -""" - unbind_artifact!(artifacts_toml::String, name::String; platform = nothing) + # Ensure the `artifacts` directory exists in our default depot + artifacts_dir = first(artifacts_dirs()) + mkpath(artifacts_dir) + # expected artifact path + dst = joinpath(artifacts_dir, bytes2hex(tree_hash.bytes)) + + # We download by using a temporary directory. We do this because the download may + # be corrupted or even malicious; we don't want to clobber someone else's artifact + # by trusting the tree hash that has been given to us; we will instead download it + # to a temporary directory, calculate the true tree hash, then move it to the proper + # location only after knowing what it is, and if something goes wrong in the process, + # everything should be cleaned up. + + # Temporary directory where we'll do our creation business + temp_dir = mktempdir(artifacts_dir) + + try + download_verify_unpack( + tarball_url, tarball_hash, temp_dir; + ignore_existence = true, verbose, quiet_download, io, progress + ) + isnothing(progress) || progress(10000, 10000; status = "verifying") + calc_hash = SHA1(GitTools.tree_hash(temp_dir)) + + # Did we get what we expected? If not, freak out. + if calc_hash.bytes != tree_hash.bytes + msg = """ + Tree Hash Mismatch! + Expected git-tree-sha1: $(bytes2hex(tree_hash.bytes)) + Calculated git-tree-sha1: $(bytes2hex(calc_hash.bytes)) + """ + # Since tree hash calculation is rather fragile and file system dependent, + # we allow setting JULIA_PKG_IGNORE_HASHES=1 to ignore the error and move + # the artifact to the expected location and return true + ignore_hash_env_set = get(ENV, "JULIA_PKG_IGNORE_HASHES", "") != "" + if ignore_hash_env_set + ignore_hash = Base.get_bool_env("JULIA_PKG_IGNORE_HASHES", false) + ignore_hash === nothing && @error( + "Invalid ENV[\"JULIA_PKG_IGNORE_HASHES\"] value", + ENV["JULIA_PKG_IGNORE_HASHES"], + ) + ignore_hash = something(ignore_hash, false) + else + # default: false except Windows users who can't symlink + ignore_hash = Sys.iswindows() && + !mktempdir(can_symlink, artifacts_dir) + end + if ignore_hash + desc = ignore_hash_env_set ? + "Environment variable \$JULIA_PKG_IGNORE_HASHES is true" : + "System is Windows and user cannot create symlinks" + msg *= "\n$desc: \ + ignoring hash mismatch and moving \ + artifact to the expected location" + @error(msg) + else + error(msg) + end + end + # Move it to the location we expected + isnothing(progress) || progress(10000, 10000; status = "moving to artifact store") + mv_temp_dir_retries(temp_dir, dst) + catch err + @debug "download_artifact error" tree_hash tarball_url tarball_hash err + if isa(err, InterruptException) + rethrow(err) + end + # If something went wrong during download, return the error + return err + finally + # Always attempt to cleanup + try + rm(temp_dir; recursive = true, force = true) + catch e + e isa InterruptException && rethrow() + @warn("Failed to clean up temporary directory $(repr(temp_dir))", exception = e) + end + end + return true + end -Unbind the given `name` from an `(Julia)Artifacts.toml` file. -Silently fails if no such binding exists within the file. -""" -function unbind_artifact!(artifacts_toml::String, name::String; - platform::Union{AbstractPlatform,Nothing} = nothing) - artifact_dict = parse_toml(artifacts_toml) - if !haskey(artifact_dict, name) - return + return ret end - if platform === nothing - delete!(artifact_dict, name) - else - artifact_dict[name] = filter( - x -> unpack_platform(x, name, artifacts_toml) != platform, - artifact_dict[name] + """ + ensure_artifact_installed(name::String, artifacts_toml::String; + platform::AbstractPlatform = HostPlatform(), + pkg_uuid::Union{Base.UUID,Nothing}=nothing, + verbose::Bool = false, + quiet_download::Bool = false, + io::IO=stderr) + + Ensures an artifact is installed, downloading it via the download information stored in + `artifacts_toml` if necessary. Throws an error if unable to install. + """ + function ensure_artifact_installed( + name::String, artifacts_toml::String; + platform::AbstractPlatform = HostPlatform(), + pkg_uuid::Union{Base.UUID, Nothing} = nothing, + pkg_server_eligible::Bool = true, + verbose::Bool = false, + quiet_download::Bool = false, + progress::Union{Function, Nothing} = nothing, + io::IO = stderr_f() ) - end + meta = artifact_meta(name, artifacts_toml; pkg_uuid = pkg_uuid, platform = platform) + if meta === nothing + error("Cannot locate artifact '$(name)' in '$(artifacts_toml)'") + end - open(artifacts_toml, "w") do io - TOML.print(io, artifact_dict, sorted=true) - end - return -end - -""" - download_artifact(tree_hash::SHA1, tarball_url::String, tarball_hash::String; - verbose::Bool = false, io::IO=stderr) - -Download/install an artifact into the artifact store. Returns `true` on success, -returns an error object on failure. - -!!! compat "Julia 1.8" - As of Julia 1.8 this function returns the error object rather than `false` when - failure occurs -""" -function download_artifact( - tree_hash::SHA1, - tarball_url::String, - tarball_hash::Union{String, Nothing} = nothing; - verbose::Bool = false, - quiet_download::Bool = false, - io::IO=stderr_f(), - progress::Union{Function, Nothing} = nothing, -) - if artifact_exists(tree_hash) - return true + return ensure_artifact_installed( + name, meta, artifacts_toml; + pkg_server_eligible, platform, verbose, quiet_download, progress, io + ) end - # Ensure the `artifacts` directory exists in our default depot - artifacts_dir = first(artifacts_dirs()) - mkpath(artifacts_dir) - # expected artifact path - dst = joinpath(artifacts_dir, bytes2hex(tree_hash.bytes)) - - # We download by using a temporary directory. We do this because the download may - # be corrupted or even malicious; we don't want to clobber someone else's artifact - # by trusting the tree hash that has been given to us; we will instead download it - # to a temporary directory, calculate the true tree hash, then move it to the proper - # location only after knowing what it is, and if something goes wrong in the process, - # everything should be cleaned up. - - # Temporary directory where we'll do our creation business - temp_dir = mktempdir(artifacts_dir) - - try - download_verify_unpack(tarball_url, tarball_hash, temp_dir; - ignore_existence=true, verbose, quiet_download, io, progress) - isnothing(progress) || progress(10000, 10000; status="verifying") - calc_hash = SHA1(GitTools.tree_hash(temp_dir)) - - # Did we get what we expected? If not, freak out. - if calc_hash.bytes != tree_hash.bytes - msg = """ - Tree Hash Mismatch! - Expected git-tree-sha1: $(bytes2hex(tree_hash.bytes)) - Calculated git-tree-sha1: $(bytes2hex(calc_hash.bytes)) - """ - # Since tree hash calculation is rather fragile and file system dependent, - # we allow setting JULIA_PKG_IGNORE_HASHES=1 to ignore the error and move - # the artifact to the expected location and return true - ignore_hash_env_set = get(ENV, "JULIA_PKG_IGNORE_HASHES", "") != "" - if ignore_hash_env_set - ignore_hash = Base.get_bool_env("JULIA_PKG_IGNORE_HASHES", false) - ignore_hash === nothing && @error( - "Invalid ENV[\"JULIA_PKG_IGNORE_HASHES\"] value", - ENV["JULIA_PKG_IGNORE_HASHES"], - ) - ignore_hash = something(ignore_hash, false) - else - # default: false except Windows users who can't symlink - ignore_hash = Sys.iswindows() && - !mktempdir(can_symlink, artifacts_dir) - end - if ignore_hash - desc = ignore_hash_env_set ? - "Environment variable \$JULIA_PKG_IGNORE_HASHES is true" : - "System is Windows and user cannot create symlinks" - msg *= "\n$desc: \ - ignoring hash mismatch and moving \ - artifact to the expected location" - @error(msg) + function ensure_artifact_installed( + name::String, meta::Dict, artifacts_toml::String; + pkg_server_eligible::Bool = true, + platform::AbstractPlatform = HostPlatform(), + verbose::Bool = false, + quiet_download::Bool = false, + progress::Union{Function, Nothing} = nothing, + io::IO = stderr_f() + ) + hash = SHA1(meta["git-tree-sha1"]) + if !artifact_exists(hash) + if isnothing(progress) || verbose == true + return try_artifact_download_sources(name, hash, meta, artifacts_toml; pkg_server_eligible, platform, verbose, quiet_download, io) else - error(msg) + # if a custom progress handler is given it is taken to mean the caller wants to handle the download scheduling + return () -> try_artifact_download_sources(name, hash, meta, artifacts_toml; pkg_server_eligible, platform, quiet_download = true, io, progress) end - end - # Move it to the location we expected - isnothing(progress) || progress(10000, 10000; status="moving to artifact store") - _mv_temp_artifact_dir(temp_dir, dst) - catch err - @debug "download_artifact error" tree_hash tarball_url tarball_hash err - if isa(err, InterruptException) - rethrow(err) - end - # If something went wrong during download, return the error - return err - finally - # Always attempt to cleanup - try - rm(temp_dir; recursive=true, force=true) - catch e - e isa InterruptException && rethrow() - @warn("Failed to clean up temporary directory $(repr(temp_dir))", exception=e) - end - end - return true -end - -""" - ensure_artifact_installed(name::String, artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - pkg_uuid::Union{Base.UUID,Nothing}=nothing, - verbose::Bool = false, - quiet_download::Bool = false, - io::IO=stderr) - -Ensures an artifact is installed, downloading it via the download information stored in -`artifacts_toml` if necessary. Throws an error if unable to install. -""" -function ensure_artifact_installed(name::String, artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - pkg_uuid::Union{Base.UUID,Nothing}=nothing, - verbose::Bool = false, - quiet_download::Bool = false, - progress::Union{Function,Nothing} = nothing, - io::IO=stderr_f()) - meta = artifact_meta(name, artifacts_toml; pkg_uuid=pkg_uuid, platform=platform) - if meta === nothing - error("Cannot locate artifact '$(name)' in '$(artifacts_toml)'") - end - - return ensure_artifact_installed(name, meta, artifacts_toml; - platform, verbose, quiet_download, progress, io) -end - -function ensure_artifact_installed(name::String, meta::Dict, artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - verbose::Bool = false, - quiet_download::Bool = false, - progress::Union{Function,Nothing} = nothing, - io::IO=stderr_f()) - - hash = SHA1(meta["git-tree-sha1"]) - if !artifact_exists(hash) - if isnothing(progress) || verbose == true - return try_artifact_download_sources(name, hash, meta, artifacts_toml; platform, verbose, quiet_download, io) else - # if a custom progress handler is given it is taken to mean the caller wants to handle the download scheduling - return () -> try_artifact_download_sources(name, hash, meta, artifacts_toml; platform, quiet_download=true, io, progress) + return artifact_path(hash) end - else - return artifact_path(hash) end -end -function try_artifact_download_sources( + function try_artifact_download_sources( name::String, hash::SHA1, meta::Dict, artifacts_toml::String; - platform::AbstractPlatform=HostPlatform(), - verbose::Bool=false, - quiet_download::Bool=false, - io::IO=stderr_f(), - progress::Union{Function,Nothing}=nothing) - - errors = Any[] - # first try downloading from Pkg server - # TODO: only do this if Pkg server knows about this package - if (server = pkg_server()) !== nothing - url = "$server/artifact/$hash" - download_success = let url = url - @debug "Downloading artifact from Pkg server" name artifacts_toml platform url - with_show_download_info(io, name, quiet_download) do - download_artifact(hash, url; verbose, quiet_download, io, progress) + pkg_server_eligible::Bool = true, + platform::AbstractPlatform = HostPlatform(), + verbose::Bool = false, + quiet_download::Bool = false, + io::IO = stderr_f(), + progress::Union{Function, Nothing} = nothing + ) + + errors = Any[] + # first try downloading from Pkg server if the Pkg server knows about this package + if pkg_server_eligible && (server = pkg_server()) !== nothing + url = "$server/artifact/$hash" + download_success = let url = url + @debug "Downloading artifact from Pkg server" name artifacts_toml platform url + with_show_download_info(io, name, quiet_download) do + download_artifact(hash, url; verbose, quiet_download, io, progress) + end + end + # download_success is either `true` or an error object + if download_success === true + return artifact_path(hash) + else + @debug "Failed to download artifact from Pkg server" download_success + push!(errors, (url, download_success)) end end - # download_success is either `true` or an error object - if download_success === true - return artifact_path(hash) - else - @debug "Failed to download artifact from Pkg server" download_success - push!(errors, (url, download_success)) - end - end - # If this artifact does not exist on-disk already, ensure it has download - # information, then download it! - if !haskey(meta, "download") - error("Cannot automatically install '$(name)'; no download section in '$(artifacts_toml)'") - end + # If this artifact does not exist on-disk already, ensure it has download + # information, then download it! + if !haskey(meta, "download") + error("Cannot automatically install '$(name)'; no download section in '$(artifacts_toml)'") + end - # Attempt to download from all sources - for entry in meta["download"] - url = entry["url"] - tarball_hash = entry["sha256"] - download_success = let url = url - @debug "Downloading artifact" name artifacts_toml platform url - with_show_download_info(io, name, quiet_download) do - download_artifact(hash, url, tarball_hash; verbose, quiet_download, io, progress) + # Attempt to download from all sources + for entry in meta["download"] + url = entry["url"] + tarball_hash = entry["sha256"] + download_success = let url = url + @debug "Downloading artifact" name artifacts_toml platform url + with_show_download_info(io, name, quiet_download) do + download_artifact(hash, url, tarball_hash; verbose, quiet_download, io, progress) + end + end + # download_success is either `true` or an error object + if download_success === true + return artifact_path(hash) + else + @debug "Failed to download artifact" download_success + push!(errors, (url, download_success)) end end - # download_success is either `true` or an error object - if download_success === true - return artifact_path(hash) - else - @debug "Failed to download artifact" download_success - push!(errors, (url, download_success)) + errmsg = """ + Unable to automatically download/install artifact '$(name)' from sources listed in '$(artifacts_toml)'. + Sources attempted: + """ + for (url, err) in errors + errmsg *= "- $(url)\n" + errmsg *= " Error: $(sprint(showerror, err))\n" end + error(errmsg) end - errmsg = """ - Unable to automatically download/install artifact '$(name)' from sources listed in '$(artifacts_toml)'. - Sources attempted: - """ - for (url, err) in errors - errmsg *= "- $(url)\n" - errmsg *= " Error: $(sprint(showerror, err))\n" - end - error(errmsg) -end -function with_show_download_info(f, io, name, quiet_download) - fancyprint = can_fancyprint(io) - if !quiet_download - fancyprint && print_progress_bottom(io) - printpkgstyle(io, :Downloading, "artifact: $name") - end - success = false - try - result = f() - success = result === true - return result - finally + function with_show_download_info(f, io, name, quiet_download) + fancyprint = can_fancyprint(io) if !quiet_download - fancyprint && print(io, "\033[1A") # move cursor up one line - fancyprint && print(io, "\033[2K") # clear line - if success - fancyprint && printpkgstyle(io, :Downloaded, "artifact: $name") - else - printpkgstyle(io, :Failure, "artifact: $name", color = :red) + fancyprint && print_progress_bottom(io) + printpkgstyle(io, :Downloading, "artifact: $name") + end + success = false + try + result = f() + success = result === true + return result + finally + if !quiet_download + fancyprint && print(io, "\033[1A") # move cursor up one line + fancyprint && print(io, "\033[2K") # clear line + if success + fancyprint && printpkgstyle(io, :Downloaded, "artifact: $name") + else + printpkgstyle(io, :Failure, "artifact: $name", color = :red) + end end end end -end -""" - ensure_all_artifacts_installed(artifacts_toml::String; - platform = HostPlatform(), - pkg_uuid = nothing, - include_lazy = false, - verbose = false, - quiet_download = false, - io::IO=stderr) + """ + ensure_all_artifacts_installed(artifacts_toml::String; + platform = HostPlatform(), + pkg_uuid = nothing, + include_lazy = false, + verbose = false, + quiet_download = false, + io::IO=stderr) -Installs all non-lazy artifacts from a given `(Julia)Artifacts.toml` file. `package_uuid` must -be provided to properly support overrides from `Overrides.toml` entries in depots. + Installs all non-lazy artifacts from a given `(Julia)Artifacts.toml` file. `package_uuid` must + be provided to properly support overrides from `Overrides.toml` entries in depots. -If `include_lazy` is set to `true`, then lazy packages will be installed as well. + If `include_lazy` is set to `true`, then lazy packages will be installed as well. -This function is deprecated and should be replaced with the following snippet: + This function is deprecated and should be replaced with the following snippet: - artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy) - for name in keys(artifacts) - ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=platform) - end + artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy) + for name in keys(artifacts) + ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=platform) + end -!!! warning - This function is deprecated in Julia 1.6 and will be removed in a future version. - Use `select_downloadable_artifacts()` and `ensure_artifact_installed()` instead. -""" -function ensure_all_artifacts_installed(artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - pkg_uuid::Union{Nothing,Base.UUID} = nothing, - include_lazy::Bool = false, - verbose::Bool = false, - quiet_download::Bool = false, - io::IO=stderr_f()) - # This function should not be called anymore; use `select_downloadable_artifacts()` directly. - Base.depwarn("`ensure_all_artifacts_installed()` is deprecated; iterate over `select_downloadable_artifacts()` output with `ensure_artifact_installed()`.", :ensure_all_artifacts_installed) - # Collect all artifacts we're supposed to install - artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy, pkg_uuid) - for name in keys(artifacts) - # Otherwise, let's try and install it! - ensure_artifact_installed(name, artifacts[name], artifacts_toml; platform=platform, - verbose=verbose, quiet_download=quiet_download, io=io) - end -end - -""" - extract_all_hashes(artifacts_toml::String; - platform = HostPlatform(), - pkg_uuid = nothing, - include_lazy = false) - -Extract all hashes from a given `(Julia)Artifacts.toml` file. `package_uuid` must -be provided to properly support overrides from `Overrides.toml` entries in depots. - -If `include_lazy` is set to `true`, then lazy packages will be installed as well. -""" -function extract_all_hashes(artifacts_toml::String; - platform::AbstractPlatform = HostPlatform(), - pkg_uuid::Union{Nothing,Base.UUID} = nothing, - include_lazy::Bool = false) - hashes = Base.SHA1[] - if !isfile(artifacts_toml) - return hashes + !!! warning + This function is deprecated in Julia 1.6 and will be removed in a future version. + Use `select_downloadable_artifacts()` and `ensure_artifact_installed()` instead. + """ + function ensure_all_artifacts_installed( + artifacts_toml::String; + platform::AbstractPlatform = HostPlatform(), + pkg_uuid::Union{Nothing, Base.UUID} = nothing, + include_lazy::Bool = false, + verbose::Bool = false, + quiet_download::Bool = false, + io::IO = stderr_f() + ) + # This function should not be called anymore; use `select_downloadable_artifacts()` directly. + Base.depwarn("`ensure_all_artifacts_installed()` is deprecated; iterate over `select_downloadable_artifacts()` output with `ensure_artifact_installed()`.", :ensure_all_artifacts_installed) + # Collect all artifacts we're supposed to install + artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy, pkg_uuid) + for name in keys(artifacts) + # Otherwise, let's try and install it! + ensure_artifact_installed( + name, artifacts[name], artifacts_toml; platform = platform, + verbose = verbose, quiet_download = quiet_download, io = io + ) + end + return end - artifact_dict = load_artifacts_toml(artifacts_toml; pkg_uuid=pkg_uuid) + """ + extract_all_hashes(artifacts_toml::String; + platform = HostPlatform(), + pkg_uuid = nothing, + include_lazy = false) - for name in keys(artifact_dict) - # Get the metadata about this name for the requested platform - meta = artifact_meta(name, artifact_dict, artifacts_toml; platform=platform) + Extract all hashes from a given `(Julia)Artifacts.toml` file. `package_uuid` must + be provided to properly support overrides from `Overrides.toml` entries in depots. - # If there are no instances of this name for the desired platform, skip it - meta === nothing && continue + If `include_lazy` is set to `true`, then lazy packages will be installed as well. + """ + function extract_all_hashes( + artifacts_toml::String; + platform::AbstractPlatform = HostPlatform(), + pkg_uuid::Union{Nothing, Base.UUID} = nothing, + include_lazy::Bool = false + ) + hashes = Base.SHA1[] + if !isfile(artifacts_toml) + return hashes + end + + artifact_dict = load_artifacts_toml(artifacts_toml; pkg_uuid = pkg_uuid) + + for name in keys(artifact_dict) + # Get the metadata about this name for the requested platform + meta = artifact_meta(name, artifact_dict, artifacts_toml; platform = platform) - # If it's a lazy one and we aren't including lazy ones, skip - if get(meta, "lazy", false) && !include_lazy - continue + # If there are no instances of this name for the desired platform, skip it + meta === nothing && continue + + # If it's a lazy one and we aren't including lazy ones, skip + if get(meta, "lazy", false) && !include_lazy + continue + end + + # Otherwise, add it to the list! + push!(hashes, Base.SHA1(meta["git-tree-sha1"])) end - # Otherwise, add it to the list! - push!(hashes, Base.SHA1(meta["git-tree-sha1"])) + return hashes end - return hashes -end - -# Support `AbstractString`s, but avoid compilers needing to track backedges for callers -# of these functions in case a user defines a new type that is `<: AbstractString` -archive_artifact(hash::SHA1, tarball_path::AbstractString; kwargs...) = - archive_artifact(hash, string(tarball_path)::String; kwargs...) -bind_artifact!(artifacts_toml::AbstractString, name::AbstractString, hash::SHA1; kwargs...) = - bind_artifact!(string(artifacts_toml)::String, string(name)::String, hash; kwargs...) -unbind_artifact!(artifacts_toml::AbstractString, name::AbstractString) = - unbind_artifact!(string(artifacts_toml)::String, string(name)::String) -download_artifact(tree_hash::SHA1, tarball_url::AbstractString, args...; kwargs...) = - download_artifact(tree_hash, string(tarball_url)::String, args...; kwargs...) -ensure_artifact_installed(name::AbstractString, artifacts_toml::AbstractString; kwargs...) = - ensure_artifact_installed(string(name)::String, string(artifacts_toml)::String; kwargs...) -ensure_artifact_installed(name::AbstractString, meta::Dict, artifacts_toml::AbstractString; kwargs...) = - ensure_artifact_installed(string(name)::String, meta, string(artifacts_toml)::String; kwargs...) -ensure_all_artifacts_installed(artifacts_toml::AbstractString; kwargs...) = - ensure_all_artifacts_installed(string(artifacts_toml)::String; kwargs...) -extract_all_hashes(artifacts_toml::AbstractString; kwargs...) = - extract_all_hashes(string(artifacts_toml)::String; kwargs...) - -end # module Artifacts + # Support `AbstractString`s, but avoid compilers needing to track backedges for callers + # of these functions in case a user defines a new type that is `<: AbstractString` + archive_artifact(hash::SHA1, tarball_path::AbstractString; kwargs...) = + archive_artifact(hash, string(tarball_path)::String; kwargs...) + bind_artifact!(artifacts_toml::AbstractString, name::AbstractString, hash::SHA1; kwargs...) = + bind_artifact!(string(artifacts_toml)::String, string(name)::String, hash; kwargs...) + unbind_artifact!(artifacts_toml::AbstractString, name::AbstractString) = + unbind_artifact!(string(artifacts_toml)::String, string(name)::String) + download_artifact(tree_hash::SHA1, tarball_url::AbstractString, args...; kwargs...) = + download_artifact(tree_hash, string(tarball_url)::String, args...; kwargs...) + ensure_artifact_installed(name::AbstractString, artifacts_toml::AbstractString; kwargs...) = + ensure_artifact_installed(string(name)::String, string(artifacts_toml)::String; kwargs...) + ensure_artifact_installed(name::AbstractString, meta::Dict, artifacts_toml::AbstractString; kwargs...) = + ensure_artifact_installed(string(name)::String, meta, string(artifacts_toml)::String; kwargs...) + ensure_all_artifacts_installed(artifacts_toml::AbstractString; kwargs...) = + ensure_all_artifacts_installed(string(artifacts_toml)::String; kwargs...) + extract_all_hashes(artifacts_toml::AbstractString; kwargs...) = + extract_all_hashes(string(artifacts_toml)::String; kwargs...) + +end # module PkgArtifacts + +const Artifacts = PkgArtifacts diff --git a/src/BinaryPlatformsCompat.jl b/src/BinaryPlatformsCompat.jl new file mode 100644 index 0000000000..93403e05bd --- /dev/null +++ b/src/BinaryPlatformsCompat.jl @@ -0,0 +1,155 @@ +module BinaryPlatformsCompat + + export platform_key_abi, platform_dlext, valid_dl_path, arch, libc, + libgfortran_version, libstdcxx_version, cxxstring_abi, parse_dl_name_version, + detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi, + call_abi, wordsize, triplet, select_platform, platforms_match, + CompilerABI, Platform, UnknownPlatform, Linux, MacOS, Windows, FreeBSD + + using Base.BinaryPlatforms: parse_dl_name_version, + detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi, + os, call_abi, select_platform, platforms_match, + AbstractPlatform, Platform, HostPlatform + + import Base.BinaryPlatforms: libgfortran_version, libstdcxx_version, platform_name, + wordsize, platform_dlext, tags, arch, libc, call_abi, + cxxstring_abi + + struct UnknownPlatform <: AbstractPlatform + UnknownPlatform(args...; kwargs...) = new() + end + tags(::UnknownPlatform) = Dict{String, String}("os" => "unknown") + + + struct CompilerABI + libgfortran_version::Union{Nothing, VersionNumber} + libstdcxx_version::Union{Nothing, VersionNumber} + cxxstring_abi::Union{Nothing, Symbol} + + function CompilerABI(; + libgfortran_version::Union{Nothing, VersionNumber} = nothing, + libstdcxx_version::Union{Nothing, VersionNumber} = nothing, + cxxstring_abi::Union{Nothing, Symbol} = nothing + ) + return new(libgfortran_version, libstdcxx_version, cxxstring_abi) + end + end + + # Easy replacement constructor + function CompilerABI( + cabi::CompilerABI; libgfortran_version = nothing, + libstdcxx_version = nothing, + cxxstring_abi = nothing + ) + return CompilerABI(; + libgfortran_version = something(libgfortran_version, Some(cabi.libgfortran_version)), + libstdcxx_version = something(libstdcxx_version, Some(cabi.libstdcxx_version)), + cxxstring_abi = something(cxxstring_abi, Some(cabi.cxxstring_abi)), + ) + end + + libgfortran_version(cabi::CompilerABI) = cabi.libgfortran_version + libstdcxx_version(cabi::CompilerABI) = cabi.libstdcxx_version + cxxstring_abi(cabi::CompilerABI) = cabi.cxxstring_abi + + for T in (:Linux, :Windows, :MacOS, :FreeBSD) + @eval begin + struct $(T) <: AbstractPlatform + p::Platform + function $(T)(arch::Symbol; compiler_abi = nothing, kwargs...) + if compiler_abi !== nothing + kwargs = (; + kwargs..., + :libgfortran_version => libgfortran_version(compiler_abi), + :libstdcxx_version => libstdcxx_version(compiler_abi), + :cxxstring_abi => cxxstring_abi(compiler_abi), + ) + end + return new(Platform(string(arch), $(string(T)); kwargs..., validate_strict = true)) + end + end + end + end + + const PlatformUnion = Union{Linux, MacOS, Windows, FreeBSD} + + # First, methods we need to coerce to Symbol for backwards-compatibility + for f in (:arch, :libc, :call_abi, :cxxstring_abi) + @eval begin + function $(f)(p::PlatformUnion) + str = $(f)(p.p) + if str === nothing + return nothing + end + return Symbol(str) + end + end + end + + # Next, things we don't need to coerce + for f in (:libgfortran_version, :libstdcxx_version, :platform_name, :wordsize, :platform_dlext, :tags, :triplet) + @eval begin + $(f)(p::PlatformUnion) = $(f)(p.p) + end + end + + # Finally, add equality testing between these wrapper types and other AbstractPlatforms + @eval begin + Base.:(==)(a::PlatformUnion, b::AbstractPlatform) = b == a.p + end + + # Add one-off functions + MacOS(; kwargs...) = MacOS(:x86_64; kwargs...) + FreeBSD(; kwargs...) = FreeBSD(:x86_64; kwargs...) + + function triplet(p::AbstractPlatform) + # We are going to sub off to `Base.BinaryPlatforms.triplet()` here, + # with the important exception that we override `os_version` to better + # mimic the old behavior of `triplet()` + if Sys.isfreebsd(p) + p = deepcopy(p) + p["os_version"] = "11.1.0" + elseif Sys.isapple(p) + p = deepcopy(p) + p["os_version"] = "14.0.0" + end + return Base.BinaryPlatforms.triplet(p) + end + + """ + platform_key_abi(machine::AbstractString) + + Returns the platform key for the current platform, or any other though the + the use of the `machine` parameter. + + This method is deprecated, import `Base.BinaryPlatforms` and use either `HostPlatform()` + to get the current host platform, or `parse(Base.BinaryPlatforms.Platform, triplet)` + to parse the triplet for some other platform instead. + """ + platform_key_abi() = HostPlatform() + platform_key_abi(triplet::AbstractString) = parse(Platform, triplet) + + """ + valid_dl_path(path::AbstractString, platform::Platform) + + Return `true` if the given `path` ends in a valid dynamic library filename. + E.g. returns `true` for a path like `"usr/lib/libfoo.so.3.5"`, but returns + `false` for a path like `"libbar.so.f.a"`. + + This method is deprecated and will be removed in Julia 2.0. + """ + function valid_dl_path(path::AbstractString, platform::AbstractPlatform) + try + parse_dl_name_version(path, string(os(platform))::String) + return true + catch e + if isa(e, ArgumentError) + return false + end + rethrow(e) + end + end + +end # module BinaryPlatformsCompat + +const BinaryPlatforms = BinaryPlatformsCompat diff --git a/src/BinaryPlatforms_compat.jl b/src/BinaryPlatforms_compat.jl deleted file mode 100644 index 879dcc0c83..0000000000 --- a/src/BinaryPlatforms_compat.jl +++ /dev/null @@ -1,148 +0,0 @@ -module BinaryPlatforms - -export platform_key_abi, platform_dlext, valid_dl_path, arch, libc, - libgfortran_version, libstdcxx_version, cxxstring_abi, parse_dl_name_version, - detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi, - call_abi, wordsize, triplet, select_platform, platforms_match, - CompilerABI, Platform, UnknownPlatform, Linux, MacOS, Windows, FreeBSD - -using Base.BinaryPlatforms: parse_dl_name_version, - detect_libgfortran_version, detect_libstdcxx_version, detect_cxxstring_abi, - os, call_abi, select_platform, platforms_match, - AbstractPlatform, Platform, HostPlatform - -import Base.BinaryPlatforms: libgfortran_version, libstdcxx_version, platform_name, - wordsize, platform_dlext, tags, arch, libc, call_abi, - cxxstring_abi - -struct UnknownPlatform <: AbstractPlatform - UnknownPlatform(args...; kwargs...) = new() -end -tags(::UnknownPlatform) = Dict{String,String}("os"=>"unknown") - - -struct CompilerABI - libgfortran_version::Union{Nothing,VersionNumber} - libstdcxx_version::Union{Nothing,VersionNumber} - cxxstring_abi::Union{Nothing,Symbol} - - function CompilerABI(;libgfortran_version::Union{Nothing, VersionNumber} = nothing, - libstdcxx_version::Union{Nothing, VersionNumber} = nothing, - cxxstring_abi::Union{Nothing, Symbol} = nothing) - return new(libgfortran_version, libstdcxx_version, cxxstring_abi) - end -end - -# Easy replacement constructor -function CompilerABI(cabi::CompilerABI; libgfortran_version=nothing, - libstdcxx_version=nothing, - cxxstring_abi=nothing) - return CompilerABI(; - libgfortran_version=something(libgfortran_version, Some(cabi.libgfortran_version)), - libstdcxx_version=something(libstdcxx_version, Some(cabi.libstdcxx_version)), - cxxstring_abi=something(cxxstring_abi, Some(cabi.cxxstring_abi)), - ) -end - -libgfortran_version(cabi::CompilerABI) = cabi.libgfortran_version -libstdcxx_version(cabi::CompilerABI) = cabi.libstdcxx_version -cxxstring_abi(cabi::CompilerABI) = cabi.cxxstring_abi - -for T in (:Linux, :Windows, :MacOS, :FreeBSD) - @eval begin - struct $(T) <: AbstractPlatform - p::Platform - function $(T)(arch::Symbol; compiler_abi=nothing, kwargs...) - if compiler_abi !== nothing - kwargs = (; kwargs..., - :libgfortran_version => libgfortran_version(compiler_abi), - :libstdcxx_version => libstdcxx_version(compiler_abi), - :cxxstring_abi => cxxstring_abi(compiler_abi) - ) - end - return new(Platform(string(arch), $(string(T)); kwargs..., validate_strict=true)) - end - end - end -end - -const PlatformUnion = Union{Linux,MacOS,Windows,FreeBSD} - -# First, methods we need to coerce to Symbol for backwards-compatibility -for f in (:arch, :libc, :call_abi, :cxxstring_abi) - @eval begin - function $(f)(p::PlatformUnion) - str = $(f)(p.p) - if str === nothing - return nothing - end - return Symbol(str) - end - end -end - -# Next, things we don't need to coerce -for f in (:libgfortran_version, :libstdcxx_version, :platform_name, :wordsize, :platform_dlext, :tags, :triplet) - @eval begin - $(f)(p::PlatformUnion) = $(f)(p.p) - end -end - -# Finally, add equality testing between these wrapper types and other AbstractPlatforms -@eval begin - Base.:(==)(a::PlatformUnion, b::AbstractPlatform) = b == a.p -end - -# Add one-off functions -MacOS(; kwargs...) = MacOS(:x86_64; kwargs...) -FreeBSD(; kwargs...) = FreeBSD(:x86_64; kwargs...) - -function triplet(p::AbstractPlatform) - # We are going to sub off to `Base.BinaryPlatforms.triplet()` here, - # with the important exception that we override `os_version` to better - # mimic the old behavior of `triplet()` - if Sys.isfreebsd(p) - p = deepcopy(p) - p["os_version"] = "11.1.0" - elseif Sys.isapple(p) - p = deepcopy(p) - p["os_version"] = "14.0.0" - end - return Base.BinaryPlatforms.triplet(p) -end - -""" - platform_key_abi(machine::AbstractString) - -Returns the platform key for the current platform, or any other though the -the use of the `machine` parameter. - -This method is deprecated, import `Base.BinaryPlatforms` and use either `HostPlatform()` -to get the current host platform, or `parse(Base.BinaryPlatforms.Platform, triplet)` -to parse the triplet for some other platform instead. -""" -platform_key_abi() = HostPlatform() -platform_key_abi(triplet::AbstractString) = parse(Platform, triplet) - -""" - valid_dl_path(path::AbstractString, platform::Platform) - -Return `true` if the given `path` ends in a valid dynamic library filename. -E.g. returns `true` for a path like `"usr/lib/libfoo.so.3.5"`, but returns -`false` for a path like `"libbar.so.f.a"`. - -This method is deprecated and will be removed in Julia 2.0. -""" -function valid_dl_path(path::AbstractString, platform::AbstractPlatform) - try - parse_dl_name_version(path, string(os(platform))::String) - return true - catch e - if isa(e, ArgumentError) - return false - end - rethrow(e) - end -end - -end # module BinaryPlatforms diff --git a/src/GitTools.jl b/src/GitTools.jl index 02fae614ea..ddce9aac46 100644 --- a/src/GitTools.jl +++ b/src/GitTools.jl @@ -41,13 +41,13 @@ const GIT_USERS = Dict{String, Union{Nothing, String}}() @deprecate setprotocol!(proto::Union{Nothing, AbstractString}) setprotocol!(protocol = proto) false function setprotocol!(; - domain::AbstractString="github.com", - protocol::Union{Nothing, AbstractString}=nothing, - user::Union{Nothing, AbstractString}=(protocol == "ssh" ? "git" : nothing) -) + domain::AbstractString = "github.com", + protocol::Union{Nothing, AbstractString} = nothing, + user::Union{Nothing, AbstractString} = (protocol == "ssh" ? "git" : nothing) + ) domain = lowercase(domain) GIT_PROTOCOLS[domain] = protocol - GIT_USERS[domain] = user + return GIT_USERS[domain] = user end function normalize_url(url::AbstractString) @@ -61,7 +61,7 @@ function normalize_url(url::AbstractString) proto = get(GIT_PROTOCOLS, lowercase(host), nothing) - if proto === nothing + return if proto === nothing url else user = get(GIT_USERS, lowercase(host), nothing) @@ -80,57 +80,59 @@ function ensure_clone(io::IO, target_path, url; kwargs...) end function checkout_tree_to_path(repo::LibGit2.GitRepo, tree::LibGit2.GitObject, path::String) - GC.@preserve path begin + return GC.@preserve path begin opts = LibGit2.CheckoutOptions( checkout_strategy = LibGit2.Consts.CHECKOUT_FORCE, target_directory = Base.unsafe_convert(Cstring, path) ) - LibGit2.checkout_tree(repo, tree, options=opts) + LibGit2.checkout_tree(repo, tree, options = opts) end end -function clone(io::IO, url, source_path; header=nothing, credentials=nothing, kwargs...) +function clone(io::IO, url, source_path; header = nothing, credentials = nothing, isbare = false, kwargs...) url = String(url)::String source_path = String(source_path)::String @assert !isdir(source_path) || isempty(readdir(source_path)) url = normalize_url(url) printpkgstyle(io, :Cloning, header === nothing ? "git-repo `$url`" : header) - bar = MiniProgressBar(header = "Fetching:", color = Base.info_color()) + bar = MiniProgressBar(header = "Cloning:", color = Base.info_color()) fancyprint = can_fancyprint(io) - callbacks = if fancyprint - LibGit2.Callbacks( - :transfer_progress => ( - @cfunction(transfer_progress, Cint, (Ptr{LibGit2.TransferProgress}, Any)), - bar, - ) - ) - else - LibGit2.Callbacks() - end fancyprint && start_progress(io, bar) if credentials === nothing credentials = LibGit2.CachedCredentials() end - try + return try if use_cli_git() - cmd = `git clone --quiet $url $source_path` + args = ["--quiet", url, source_path] + isbare && pushfirst!(args, "--bare") + cmd = `git clone $args` try - run(pipeline(cmd; stdout=devnull)) + run(pipeline(cmd; stdout = devnull)) catch err Pkg.Types.pkgerror("The command $(cmd) failed, error: $err") end return LibGit2.GitRepo(source_path) else + callbacks = if fancyprint + LibGit2.Callbacks( + :transfer_progress => ( + @cfunction(transfer_progress, Cint, (Ptr{LibGit2.TransferProgress}, Any)), + bar, + ) + ) + else + LibGit2.Callbacks() + end mkpath(source_path) - return LibGit2.clone(url, source_path; callbacks=callbacks, credentials=credentials, kwargs...) + return LibGit2.clone(url, source_path; callbacks, credentials, isbare, kwargs...) end catch err - rm(source_path; force=true, recursive=true) + rm(source_path; force = true, recursive = true) err isa LibGit2.GitError || err isa InterruptException || rethrow() if err isa InterruptException Pkg.Types.pkgerror("git clone of `$url` interrupted") elseif (err.class == LibGit2.Error.Net && err.code == LibGit2.Error.EINVALIDSPEC) || - (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ENOTFOUND) + (err.class == LibGit2.Error.Repository && err.code == LibGit2.Error.ENOTFOUND) Pkg.Types.pkgerror("git repository not found at `$(url)`") else Pkg.Types.pkgerror("failed to clone from $(url), error: $err") @@ -141,17 +143,20 @@ function clone(io::IO, url, source_path; header=nothing, credentials=nothing, kw end end -function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl=nothing; header=nothing, credentials=nothing, refspecs=[""], kwargs...) +function geturl(repo) + return LibGit2.with(LibGit2.get(LibGit2.GitRemote, repo, "origin")) do remote + LibGit2.url(remote) + end +end + +function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl = nothing; header = nothing, credentials = nothing, refspecs = [""], kwargs...) if remoteurl === nothing - remoteurl = LibGit2.with(LibGit2.get(LibGit2.GitRemote, repo, "origin")) do remote - LibGit2.url(remote) - end + remoteurl = geturl(repo) end fancyprint = can_fancyprint(io) remoteurl = normalize_url(remoteurl) printpkgstyle(io, :Updating, header === nothing ? "git-repo `$remoteurl`" : header) bar = MiniProgressBar(header = "Fetching:", color = Base.info_color()) - fancyprint = can_fancyprint(io) callbacks = if fancyprint LibGit2.Callbacks( :transfer_progress => ( @@ -166,20 +171,18 @@ function fetch(io::IO, repo::LibGit2.GitRepo, remoteurl=nothing; header=nothing, if credentials === nothing credentials = LibGit2.CachedCredentials() end - try + return try if use_cli_git() - let remoteurl=remoteurl - cd(LibGit2.path(repo)) do - cmd = `git fetch -q $remoteurl $(only(refspecs))` - try - run(pipeline(cmd; stdout=devnull)) - catch err - Pkg.Types.pkgerror("The command $(cmd) failed, error: $err") - end + let remoteurl = remoteurl + cmd = `git -C $(LibGit2.path(repo)) fetch -q $remoteurl $(only(refspecs))` + try + run(pipeline(cmd; stdout = devnull)) + catch err + Pkg.Types.pkgerror("The command $(cmd) failed, error: $err") end end else - return LibGit2.fetch(repo; remoteurl=remoteurl, callbacks=callbacks, refspecs=refspecs, kwargs...) + return LibGit2.fetch(repo; remoteurl, callbacks, credentials, refspecs, kwargs...) end catch err err isa LibGit2.GitError || rethrow() @@ -196,8 +199,8 @@ end # This code gratefully adapted from https://github.com/simonbyrne/GitX.jl -@enum GitMode mode_dir=0o040000 mode_normal=0o100644 mode_executable=0o100755 mode_symlink=0o120000 mode_submodule=0o160000 -Base.string(mode::GitMode) = string(UInt32(mode); base=8) +@enum GitMode mode_dir = 0o040000 mode_normal = 0o100644 mode_executable = 0o100755 mode_symlink = 0o120000 mode_submodule = 0o160000 +Base.string(mode::GitMode) = string(UInt32(mode); base = 8) Base.print(io::IO, mode::GitMode) = print(io, string(mode)) function gitmode(path::AbstractString) @@ -227,7 +230,7 @@ end Calculate the git blob hash of a given path. """ -function blob_hash(::Type{HashType}, path::AbstractString) where HashType +function blob_hash(::Type{HashType}, path::AbstractString) where {HashType} ctx = HashType() if islink(path) datalen = length(readlink(path)) @@ -239,7 +242,7 @@ function blob_hash(::Type{HashType}, path::AbstractString) where HashType SHA.update!(ctx, Vector{UInt8}("blob $(datalen)\0")) # Next, read data in in chunks of 4KB - buff = Vector{UInt8}(undef, 4*1024) + buff = Vector{UInt8}(undef, 4 * 1024) try if islink(path) @@ -287,9 +290,9 @@ end Calculate the git tree hash of a given path. """ -function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,Nothing} = nothing, indent::Int=0) where HashType +function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO, Nothing} = nothing, indent::Int = 0) where {HashType} entries = Tuple{String, Vector{UInt8}, GitMode}[] - for f in sort(readdir(root; join=true); by = f -> gitmode(f) == mode_dir ? f*"/" : f) + for f in sort(readdir(root; join = true); by = f -> gitmode(f) == mode_dir ? f * "/" : f) # Skip `.git` directories if basename(f) == ".git" continue @@ -306,7 +309,7 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N if debug_out !== nothing child_stream = IOBuffer() end - hash = tree_hash(HashType, filepath; debug_out=child_stream, indent=indent+1) + hash = tree_hash(HashType, filepath; debug_out = child_stream, indent = indent + 1) if debug_out !== nothing indent_str = "| "^indent println(debug_out, "$(indent_str)+ [D] $(basename(filepath)) - $(bytes2hex(hash))") @@ -326,7 +329,7 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N content_size = 0 for (n, h, m) in entries - content_size += ndigits(UInt32(m); base=8) + 1 + sizeof(n) + 1 + sizeof(h) + content_size += ndigits(UInt32(m); base = 8) + 1 + sizeof(n) + 1 + sizeof(h) end # Return the hash of these entries @@ -338,17 +341,24 @@ function tree_hash(::Type{HashType}, root::AbstractString; debug_out::Union{IO,N end return SHA.digest!(ctx) end -tree_hash(root::AbstractString; debug_out::Union{IO,Nothing} = nothing) = tree_hash(SHA.SHA1_CTX, root; debug_out) +tree_hash(root::AbstractString; debug_out::Union{IO, Nothing} = nothing) = tree_hash(SHA.SHA1_CTX, root; debug_out) function check_valid_HEAD(repo) - try LibGit2.head(repo) + return try + LibGit2.head(repo) catch err - Pkg.Types.pkgerror("invalid git HEAD ($(err.msg))") + url = try + geturl(repo) + catch + "(unknown url)" + end + Pkg.Types.pkgerror("invalid git HEAD in $url ($(err.msg))") end end -function git_file_stream(repo::LibGit2.GitRepo, spec::String; fakeit::Bool=false)::IO - blob = try LibGit2.GitBlob(repo, spec) +function git_file_stream(repo::LibGit2.GitRepo, spec::String; fakeit::Bool = false)::IO + blob = try + LibGit2.GitBlob(repo, spec) catch err err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow() fakeit && return devnull diff --git a/src/HistoricalStdlibs.jl b/src/HistoricalStdlibs.jl index d5b4ad5049..6867d1e832 100644 --- a/src/HistoricalStdlibs.jl +++ b/src/HistoricalStdlibs.jl @@ -5,13 +5,13 @@ struct StdlibInfo uuid::UUID # This can be `nothing` if it's an unregistered stdlib - version::Union{Nothing,VersionNumber} + version::Union{Nothing, VersionNumber} deps::Vector{UUID} weakdeps::Vector{UUID} end -const DictStdLibs = Dict{UUID,StdlibInfo} +const DictStdLibs = Dict{UUID, StdlibInfo} # Julia standard libraries with duplicate entries removed so as to store only the # first release in a set of releases that all contain the same set of stdlibs. diff --git a/src/MiniProgressBars.jl b/src/MiniProgressBars.jl index c0a487d6b6..5682fc04a4 100644 --- a/src/MiniProgressBars.jl +++ b/src/MiniProgressBars.jl @@ -5,12 +5,12 @@ export MiniProgressBar, start_progress, end_progress, show_progress, print_progr using Printf # Until Base.format_bytes supports sigdigits -function pkg_format_bytes(bytes; binary=true, sigdigits::Integer=3) +function pkg_format_bytes(bytes; binary = true, sigdigits::Integer = 3) units = binary ? Base._mem_units : Base._cnt_units factor = binary ? 1024 : 1000 bytes, mb = Base.prettyprint_getunits(bytes, length(units), Int64(factor)) if mb == 1 - return string(Int(bytes), " ", Base._mem_units[mb], bytes==1 ? "" : "s") + return string(Int(bytes), " ", Base._mem_units[mb], bytes == 1 ? "" : "s") else return string(Base.Ryu.writefixed(Float64(bytes), sigdigits), binary ? " $(units[mb])" : "$(units[mb])B") end @@ -37,10 +37,10 @@ const PROGRESS_BAR_PERCENTAGE_GRANULARITY = Ref(0.1) function start_progress(io::IO, _::MiniProgressBar) ansi_disablecursor = "\e[?25l" - print(io, ansi_disablecursor) + return print(io, ansi_disablecursor) end -function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagereturn=true) +function show_progress(io::IO, p::MiniProgressBar; termwidth = nothing, carriagereturn = true) if p.max == 0 perc = 0.0 prev_perc = 0.0 @@ -64,22 +64,22 @@ function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagere progress_text = if p.mode == :percentage @sprintf "%2.1f %%" perc elseif p.mode == :int - string(p.current, "/", p.max) + string(p.current, "/", p.max) elseif p.mode == :data - lpad(string(pkg_format_bytes(p.current; sigdigits=1), "/", pkg_format_bytes(p.max; sigdigits=1)), 20) + lpad(string(pkg_format_bytes(p.current; sigdigits = 1), "/", pkg_format_bytes(p.max; sigdigits = 1)), 20) else error("Unknown mode $(p.mode)") end termwidth = @something termwidth displaysize(io)[2] - max_progress_width = max(0, min(termwidth - textwidth(p.header) - textwidth(progress_text) - 10 , p.width)) + max_progress_width = max(0, min(termwidth - textwidth(p.header) - textwidth(progress_text) - 10, p.width)) n_filled = floor(Int, max_progress_width * perc / 100) partial_filled = (max_progress_width * perc / 100) - n_filled n_left = max_progress_width - n_filled headers = split(p.header) - to_print = sprint(; context=io) do io + to_print = sprint(; context = io) do io print(io, " "^p.indent) if p.main - printstyled(io, headers[1], " "; color=:green, bold=true) + printstyled(io, headers[1], " "; color = :green, bold = true) length(headers) > 1 && printstyled(io, join(headers[2:end], ' '), " ") else print(io, p.header, " ") @@ -88,31 +88,31 @@ function show_progress(io::IO, p::MiniProgressBar; termwidth=nothing, carriagere print(io, p.status) else hascolor = get(io, :color, false)::Bool - printstyled(io, "━"^n_filled; color=p.color) + printstyled(io, "━"^n_filled; color = p.color) if n_left > 0 if hascolor if partial_filled > 0.5 - printstyled(io, "╸"; color=p.color) # More filled, use ╸ + printstyled(io, "╸"; color = p.color) # More filled, use ╸ else - printstyled(io, "╺"; color=:light_black) # Less filled, use ╺ + printstyled(io, "╺"; color = :light_black) # Less filled, use ╺ end end c = hascolor ? "━" : " " - printstyled(io, c^(n_left-1+!hascolor); color=:light_black) + printstyled(io, c^(n_left - 1 + !hascolor); color = :light_black) end - printstyled(io, " "; color=:light_black) + printstyled(io, " "; color = :light_black) print(io, progress_text) end carriagereturn && print(io, "\r") end # Print everything in one call - print(io, to_print) + return print(io, to_print) end function end_progress(io, p::MiniProgressBar) ansi_enablecursor = "\e[?25h" ansi_clearline = "\e[2K" - print(io, ansi_enablecursor * ansi_clearline) + return print(io, ansi_enablecursor * ansi_clearline) end # Useful when writing a progress bar in the bottom @@ -130,7 +130,7 @@ function print_progress_bottom(io::IO) ansi_clearline = "\e[2K" ansi_movecol1 = "\e[1G" ansi_moveup(n::Int) = string("\e[", n, "A") - print(io, "\e[S" * ansi_moveup(1) * ansi_clearline * ansi_movecol1) + return print(io, "\e[S" * ansi_moveup(1) * ansi_clearline * ansi_movecol1) end end diff --git a/src/Operations.jl b/src/Operations.jl index 6d6bc94558..a16355d4ef 100644 --- a/src/Operations.jl +++ b/src/Operations.jl @@ -2,6 +2,7 @@ module Operations +using FileWatching: FileWatching using UUIDs using Random: randstring import LibGit2, Dates, TOML @@ -9,19 +10,47 @@ import LibGit2, Dates, TOML using ..Types, ..Resolve, ..PlatformEngines, ..GitTools, ..MiniProgressBars import ..depots, ..depots1, ..devdir, ..set_readonly, ..Types.PackageEntry import ..Artifacts: ensure_artifact_installed, artifact_names, extract_all_hashes, - artifact_exists, select_downloadable_artifacts + artifact_exists, select_downloadable_artifacts, mv_temp_dir_retries using Base.BinaryPlatforms import ...Pkg import ...Pkg: pkg_server, Registry, pathrepr, can_fancyprint, printpkgstyle, stderr_f, OFFLINE_MODE import ...Pkg: UPDATED_REGISTRY_THIS_SESSION, RESPECT_SYSIMAGE_VERSIONS, should_autoprecompile -import ...Pkg: usable_io +import ...Pkg: usable_io, discover_repo ######### # Utils # ######### +# Helper functions for yanked package checking +function is_pkgversion_yanked(uuid::UUID, version::VersionNumber, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries()) + for reg in registries + reg_pkg = get(reg, uuid, nothing) + if reg_pkg !== nothing + info = Registry.registry_info(reg_pkg) + if haskey(info.version_info, version) && Registry.isyanked(info, version) + return true + end + end + end + return false +end + +function is_pkgversion_yanked(pkg::PackageSpec, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries()) + if pkg.uuid === nothing || pkg.version === nothing || !(pkg.version isa VersionNumber) + return false + end + return is_pkgversion_yanked(pkg.uuid, pkg.version, registries) +end + +function is_pkgversion_yanked(entry::PackageEntry, registries::Vector{Registry.RegistryInstance} = Registry.reachable_registries()) + if entry.version === nothing || !(entry.version isa VersionNumber) + return false + end + return is_pkgversion_yanked(entry.uuid, entry.version, registries) +end + function default_preserve() - if Base.get_bool_env("JULIA_PKG_PRESERVE_TIERED_INSTALLED", false) + return if Base.get_bool_env("JULIA_PKG_PRESERVE_TIERED_INSTALLED", false) PRESERVE_TIERED_INSTALLED else PRESERVE_TIERED @@ -42,14 +71,14 @@ end # more accurate name is `should_be_tracking_registered_version` # the only way to know for sure is to key into the registries -tracking_registered_version(pkg::Union{PackageSpec, PackageEntry}, julia_version=VERSION) = +tracking_registered_version(pkg::Union{PackageSpec, PackageEntry}, julia_version = VERSION) = !is_stdlib(pkg.uuid, julia_version) && pkg.path === nothing && pkg.repo.source === nothing function source_path(manifest_file::String, pkg::Union{PackageSpec, PackageEntry}, julia_version = VERSION) - pkg.tree_hash !== nothing ? find_installed(pkg.name, pkg.uuid, pkg.tree_hash) : - pkg.path !== nothing ? joinpath(dirname(manifest_file), pkg.path) : - is_or_was_stdlib(pkg.uuid, julia_version) ? Types.stdlib_path(pkg.name) : - nothing + return pkg.tree_hash !== nothing ? find_installed(pkg.name, pkg.uuid, pkg.tree_hash) : + pkg.path !== nothing ? joinpath(dirname(manifest_file), pkg.path) : + is_or_was_stdlib(pkg.uuid, julia_version) ? Types.stdlib_path(pkg.name) : + nothing end #TODO rename @@ -67,8 +96,10 @@ function load_version(version, fixed, preserve::PreserveLevel) end end -function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_DIRECT) +function load_direct_deps( + env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_DIRECT + ) pkgs_direct = load_project_deps(env.project, env.project_file, env.manifest, env.manifest_file, pkgs; preserve) for (path, project) in env.workspace @@ -103,12 +134,14 @@ function load_direct_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[] return vcat(pkgs, pkgs_direct) end -function load_project_deps(project::Project, project_file::String, manifest::Manifest, manifest_file::String, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_DIRECT) +function load_project_deps( + project::Project, project_file::String, manifest::Manifest, manifest_file::String, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_DIRECT + ) pkgs_direct = PackageSpec[] if project.name !== nothing && project.uuid !== nothing && findfirst(pkg -> pkg.uuid == project.uuid, pkgs) === nothing path = Types.relative_project_path(manifest_file, dirname(project_file)) - pkg = PackageSpec(;name=project.name, uuid=project.uuid, version=project.version, path) + pkg = PackageSpec(; name = project.name, uuid = project.uuid, version = project.version, path) push!(pkgs_direct, pkg) end @@ -116,43 +149,51 @@ function load_project_deps(project::Project, project_file::String, manifest::Man findfirst(pkg -> pkg.uuid == uuid, pkgs) === nothing || continue # do not duplicate packages path, repo = get_path_repo(project, name) entry = manifest_info(manifest, uuid) - push!(pkgs_direct, entry === nothing ? - PackageSpec(;uuid, name, path, repo) : - PackageSpec(; - uuid = uuid, - name = name, - path = path === nothing ? entry.path : path, - repo = repo == GitRepo() ? entry.repo : repo, - pinned = entry.pinned, - tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? - version = load_version(entry.version, isfixed(entry), preserve), - )) + push!( + pkgs_direct, entry === nothing ? + PackageSpec(; uuid, name, path, repo) : + PackageSpec(; + uuid = uuid, + name = name, + path = path === nothing ? entry.path : path, + repo = repo == GitRepo() ? entry.repo : repo, + pinned = entry.pinned, + tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? + version = load_version(entry.version, isfixed(entry), preserve), + ) + ) end return pkgs_direct end -function load_manifest_deps(manifest::Manifest, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_ALL) +function load_manifest_deps( + manifest::Manifest, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_ALL + ) pkgs = copy(pkgs) for (uuid, entry) in manifest findfirst(pkg -> pkg.uuid == uuid, pkgs) === nothing || continue # do not duplicate packages - push!(pkgs, PackageSpec( - uuid = uuid, - name = entry.name, - path = entry.path, - pinned = entry.pinned, - repo = entry.repo, - tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? - version = load_version(entry.version, isfixed(entry), preserve), - )) + push!( + pkgs, PackageSpec( + uuid = uuid, + name = entry.name, + path = entry.path, + pinned = entry.pinned, + repo = entry.repo, + tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? + version = load_version(entry.version, isfixed(entry), preserve), + ) + ) end return pkgs end -function load_all_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_ALL) - pkgs = load_manifest_deps(env.manifest, pkgs; preserve=preserve) +function load_all_deps( + env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_ALL + ) + pkgs = load_manifest_deps(env.manifest, pkgs; preserve = preserve) # Sources takes presedence over the manifest... for pkg in pkgs path, repo = get_path_repo(env.project, pkg.name) @@ -166,7 +207,7 @@ function load_all_deps(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]; pkg.repo.rev = repo.rev end end - return load_direct_deps(env, pkgs; preserve=preserve) + return load_direct_deps(env, pkgs; preserve = preserve) end function load_all_deps_loadable(env::EnvCache) @@ -178,7 +219,7 @@ function load_all_deps_loadable(env::EnvCache) end -function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPlatform())::Bool +function is_instantiated(env::EnvCache, workspace::Bool = false; platform = HostPlatform())::Bool # Load everything if workspace pkgs = Operations.load_all_deps(env) @@ -191,7 +232,7 @@ function is_instantiated(env::EnvCache, workspace::Bool=false; platform = HostPl # so only add it if it isn't there idx = findfirst(x -> x.uuid == env.pkg.uuid, pkgs) if idx === nothing - push!(pkgs, Types.PackageSpec(name=env.pkg.name, uuid=env.pkg.uuid, version=env.pkg.version, path=dirname(env.project_file))) + push!(pkgs, Types.PackageSpec(name = env.pkg.name, uuid = env.pkg.uuid, version = env.pkg.version, path = dirname(env.project_file))) end else # Make sure artifacts for project exist even if it is not a package @@ -206,8 +247,10 @@ function update_manifest!(env::EnvCache, pkgs::Vector{PackageSpec}, deps_map, ju empty!(manifest) for pkg in pkgs - entry = PackageEntry(;name = pkg.name, version = pkg.version, pinned = pkg.pinned, - tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid=pkg.uuid) + entry = PackageEntry(; + name = pkg.name, version = pkg.version, pinned = pkg.pinned, + tree_hash = pkg.tree_hash, path = pkg.path, repo = pkg.repo, uuid = pkg.uuid + ) if is_stdlib(pkg.uuid, julia_version) # Only set stdlib versions for versioned (external) stdlibs entry.version = stdlib_version(pkg.uuid, julia_version) @@ -216,30 +259,51 @@ function update_manifest!(env::EnvCache, pkgs::Vector{PackageSpec}, deps_map, ju env.manifest[pkg.uuid] = entry end prune_manifest(env) - record_project_hash(env) + return record_project_hash(env) end # This has to be done after the packages have been downloaded # since we need access to the Project file to read the information # about extensions -function fixups_from_projectfile!(env::EnvCache) +function fixups_from_projectfile!(ctx::Context) + env = ctx.env for pkg in values(env.manifest) - # isfile_casesenstive within locate_project_file used to error on Windows if given a - # relative path so abspath it to be extra safe https://github.com/JuliaLang/julia/pull/55220 - project_file = Base.locate_project_file(abspath(source_path(env.manifest_file, pkg))) - if project_file isa String && isfile(project_file) - p = Types.read_project(project_file) - pkg.weakdeps = p.weakdeps - pkg.exts = p.exts - pkg.entryfile = p.entryfile - for (name, _) in p.weakdeps - if !haskey(p.deps, name) + if ctx.julia_version !== VERSION && is_stdlib(pkg.uuid, ctx.julia_version) + # Special handling for non-current julia_version resolving given the source for historical stdlibs + # isn't available at this stage as Pkg thinks it should not be needed, so rely on STDLIBS_BY_VERSION + stdlibs = Types.get_last_stdlibs(ctx.julia_version) + p = stdlibs[pkg.uuid] + pkg.weakdeps = Dict{String, Base.UUID}(stdlibs[uuid].name => uuid for uuid in p.weakdeps) + # pkg.exts = p.exts # TODO: STDLIBS_BY_VERSION doesn't record this + # pkg.entryfile = p.entryfile # TODO: STDLIBS_BY_VERSION doesn't record this + for (name, _) in pkg.weakdeps + if !(name in p.deps) delete!(pkg.deps, name) end end + else + # normal mode based on project files. + # isfile_casesenstive within locate_project_file used to error on Windows if given a + # relative path so abspath it to be extra safe https://github.com/JuliaLang/julia/pull/55220 + sourcepath = source_path(env.manifest_file, pkg) + if sourcepath === nothing + pkgerror("could not find source path for package $(pkg.name) based on manifest $(env.manifest_file)") + end + project_file = Base.locate_project_file(abspath(sourcepath)) + if project_file isa String && isfile(project_file) + p = Types.read_project(project_file) + pkg.weakdeps = p.weakdeps + pkg.exts = p.exts + pkg.entryfile = p.entryfile + for (name, _) in p.weakdeps + if !haskey(p.deps, name) + delete!(pkg.deps, name) + end + end + end end end - prune_manifest(env) + return prune_manifest(env) end #################### @@ -301,8 +365,8 @@ end function collect_project(pkg::Union{PackageSpec, Nothing}, path::String) deps = PackageSpec[] weakdeps = Set{UUID}() - project_file = projectfile_path(path; strict=true) - project = project_file === nothing ? Project() : read_project(project_file) + project_file = projectfile_path(path; strict = true) + project = project_file === nothing ? Project() : read_project(project_file) julia_compat = get_compat(project, "julia") if !isnothing(julia_compat) && !(VERSION in julia_compat) pkgerror("julia version requirement from Project.toml's compat section not satisfied for package at `$path`") @@ -310,7 +374,7 @@ function collect_project(pkg::Union{PackageSpec, Nothing}, path::String) for (name, uuid) in project.deps path, repo = get_path_repo(project, name) vspec = get_compat(project, name) - push!(deps, PackageSpec(name=name, uuid=uuid, version=vspec, path=path, repo=repo)) + push!(deps, PackageSpec(name = name, uuid = uuid, version = vspec, path = path, repo = repo)) end for (name, uuid) in project.weakdeps vspec = get_compat(project, name) @@ -329,7 +393,7 @@ function collect_project(pkg::Union{PackageSpec, Nothing}, path::String) end is_tracking_path(pkg) = pkg.path !== nothing -is_tracking_repo(pkg) = pkg.repo.source !== nothing +is_tracking_repo(pkg) = (pkg.repo.source !== nothing || pkg.repo.rev !== nothing) is_tracking_registry(pkg) = !is_tracking_path(pkg) && !is_tracking_repo(pkg) isfixed(pkg) = !is_tracking_registry(pkg) || pkg.pinned @@ -337,19 +401,28 @@ function collect_developed!(env::EnvCache, pkg::PackageSpec, developed::Vector{P source = project_rel_path(env, source_path(env.manifest_file, pkg)) source_env = EnvCache(projectfile_path(source)) pkgs = load_project_deps(source_env.project, source_env.project_file, source_env.manifest, source_env.manifest_file) - for pkg in filter(is_tracking_path, pkgs) + for pkg in pkgs if any(x -> x.uuid == pkg.uuid, developed) continue end - # normalize path - # TODO: If path is collected from project, it is relative to the project file - # otherwise relative to manifest file.... - pkg.path = Types.relative_project_path(env.manifest_file, - project_rel_path(source_env, - source_path(source_env.manifest_file, pkg))) - push!(developed, pkg) - collect_developed!(env, pkg, developed) + if is_tracking_path(pkg) + # normalize path + # TODO: If path is collected from project, it is relative to the project file + # otherwise relative to manifest file.... + pkg.path = Types.relative_project_path( + env.manifest_file, + project_rel_path( + source_env, + source_path(source_env.manifest_file, pkg) + ) + ) + push!(developed, pkg) + collect_developed!(env, pkg, developed) + elseif is_tracking_repo(pkg) + push!(developed, pkg) + end end + return end function collect_developed(env::EnvCache, pkgs::Vector{PackageSpec}) @@ -361,8 +434,8 @@ function collect_developed(env::EnvCache, pkgs::Vector{PackageSpec}) end function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UUID, String}) - deps_map = Dict{UUID,Vector{PackageSpec}}() - weak_map = Dict{UUID,Set{UUID}}() + deps_map = Dict{UUID, Vector{PackageSpec}}() + weak_map = Dict{UUID, Set{UUID}}() uuid = Types.project_uuid(env) deps, weakdeps = collect_project(env.pkg, dirname(env.project_file)) @@ -372,7 +445,7 @@ function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UU for (path, project) in env.workspace uuid = Types.project_uuid(project, path) - pkg = project.name === nothing ? nothing : PackageSpec(name=project.name, uuid=uuid) + pkg = project.name === nothing ? nothing : PackageSpec(name = project.name, uuid = uuid) deps, weakdeps = collect_project(pkg, path) deps_map[Types.project_uuid(env)] = deps weak_map[Types.project_uuid(env)] = weakdeps @@ -386,19 +459,37 @@ function collect_fixed!(env::EnvCache, pkgs::Vector{PackageSpec}, names::Dict{UU if (path === nothing || !isdir(path)) && (pkg.repo.rev !== nothing || pkg.repo.source !== nothing) # ensure revved package is installed # pkg.tree_hash is set in here - Types.handle_repo_add!(Types.Context(env=env), pkg) + Types.handle_repo_add!(Types.Context(env = env), pkg) # Recompute path path = project_rel_path(env, source_path(env.manifest_file, pkg)) end if !isdir(path) - pkgerror("expected package $(err_rep(pkg)) to exist at path `$path`") + # Find which packages depend on this missing package for better error reporting + dependents = String[] + for (dep_uuid, dep_entry) in env.manifest.deps + if pkg.uuid in values(dep_entry.deps) || pkg.uuid in values(dep_entry.weakdeps) + push!(dependents, dep_entry.name === nothing ? "unknown package [$dep_uuid]" : dep_entry.name) + end + end + + error_msg = "expected package $(err_rep(pkg)) to exist at path `$path`" + error_msg *= "\n\nThis package is referenced in the manifest file: $(env.manifest_file)" + + if !isempty(dependents) + if length(dependents) == 1 + error_msg *= "\nIt is required by: $(dependents[1])" + else + error_msg *= "\nIt is required by:\n$(join([" - $dep" for dep in dependents], "\n"))" + end + end + pkgerror(error_msg) end deps, weakdeps = collect_project(pkg, path) deps_map[pkg.uuid] = deps weak_map[pkg.uuid] = weakdeps end - fixed = Dict{UUID,Resolve.Fixed}() + fixed = Dict{UUID, Resolve.Fixed}() # Collect the dependencies for the fixed packages for (uuid, deps) in deps_map q = Dict{UUID, VersionSpec}() @@ -438,8 +529,10 @@ end # sets version to a VersionNumber # adds any other packages which may be in the dependency graph # all versioned packages should have a `tree_hash` -function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version, - installed_only::Bool) +function resolve_versions!( + env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version, + installed_only::Bool + ) installed_only = installed_only || OFFLINE_MODE[] # compatibility if julia_version !== nothing @@ -447,7 +540,7 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn env.manifest.julia_version = dropbuild(VERSION) v = intersect(julia_version, get_compat_workspace(env, "julia")) if isempty(v) - @warn "julia version requirement for project not satisfied" _module=nothing _file=nothing + @warn "julia version requirement for project not satisfied" _module = nothing _file = nothing end end @@ -480,8 +573,11 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn compat = get_compat_workspace(env, pkg.name) v = intersect(pkg.version, compat) if isempty(v) - throw(Resolve.ResolverError( - "empty intersection between $(pkg.name)@$(pkg.version) and project compatibility $(compat)")) + throw( + Resolve.ResolverError( + "empty intersection between $(pkg.name)@$(pkg.version) and project compatibility $(compat)" + ) + ) end # Work around not clobbering 0.x.y+ for checked out old type of packages if !(pkg.version isa VersionNumber) @@ -507,6 +603,13 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn old_v = get(jll_fix, uuid, nothing) # We only fixup a JLL if the old major/minor/patch matches the new major/minor/patch if old_v !== nothing && Base.thispatch(old_v) == Base.thispatch(vers_fix[uuid]) + new_v = vers_fix[uuid] + if old_v != new_v && haskey(compat_map[uuid], old_v) + compat_map[uuid][old_v] = compat_map[uuid][new_v] + # Note that we don't delete!(compat_map[uuid], old_v) because we want to keep the compat info around + # in case there's JLL version confusion between the sysimage pkgorigins version and manifest + # but that issue hasn't been fully specified, so keep it to be cautious + end vers_fix[uuid] = old_v end end @@ -521,7 +624,7 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn pkg.version = vers[pkg.uuid] else name = is_stdlib(uuid) ? stdlib_infos()[uuid].name : registered_name(registries, uuid) - push!(pkgs, PackageSpec(;name=name, uuid=uuid, version=ver)) + push!(pkgs, PackageSpec(; name = name, uuid = uuid, version = ver)) end end final_deps_map = Dict{UUID, Dict{String, UUID}}() @@ -536,8 +639,12 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn deps_fixed else d = Dict{String, UUID}() + if !haskey(compat_map[pkg.uuid], pkg.version) + available_versions = sort!(collect(keys(compat_map[pkg.uuid]))) + pkgerror("version $(pkg.version) of package $(pkg.name) is not available. Available versions: $(join(available_versions, ", "))") + end for (uuid, _) in compat_map[pkg.uuid][pkg.version] - d[names[uuid]] = uuid + d[names[uuid]] = uuid end d end @@ -549,17 +656,21 @@ function resolve_versions!(env::EnvCache, registries::Vector{Registry.RegistryIn return final_deps_map end -get_or_make!(d::Dict{K,V}, k::K) where {K,V} = get!(d, k) do; V() end +get_or_make!(d::Dict{K, V}, k::K) where {K, V} = get!(d, k) do; + V() +end const JULIA_UUID = UUID("1222c4b2-2114-5bfd-aeef-88e4692bbb3e") const PKGORIGIN_HAVE_VERSION = :version in fieldnames(Base.PkgOrigin) -function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance}, uuid_to_name::Dict{UUID,String}, - reqs::Resolve.Requires, fixed::Dict{UUID,Resolve.Fixed}, julia_version, - installed_only::Bool) +function deps_graph( + env::EnvCache, registries::Vector{Registry.RegistryInstance}, uuid_to_name::Dict{UUID, String}, + reqs::Resolve.Requires, fixed::Dict{UUID, Resolve.Fixed}, julia_version, + installed_only::Bool + ) uuids = Set{UUID}() union!(uuids, keys(reqs)) union!(uuids, keys(fixed)) - for fixed_uuids in map(fx->keys(fx.requires), values(fixed)) + for fixed_uuids in map(fx -> keys(fx.requires), values(fixed)) union!(uuids, fixed_uuids) end @@ -567,11 +678,11 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance} seen = Set{UUID}() # pkg -> version -> (dependency => compat): - all_compat = Dict{UUID,Dict{VersionNumber,Dict{UUID,VersionSpec}}}() - weak_compat = Dict{UUID,Dict{VersionNumber,Set{UUID}}}() + all_compat = Dict{UUID, Dict{VersionNumber, Dict{UUID, VersionSpec}}}() + weak_compat = Dict{UUID, Dict{VersionNumber, Set{UUID}}}() for (fp, fx) in fixed - all_compat[fp] = Dict(fx.version => Dict{UUID,VersionSpec}()) + all_compat[fp] = Dict(fx.version => Dict{UUID, VersionSpec}()) end while true @@ -619,7 +730,7 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance} # TODO, pull this into a function Registry.isyanked(info, v) && continue if installed_only - pkg_spec = PackageSpec(name=pkg.name, uuid=pkg.uuid, version=v, tree_hash=Registry.treehash(info, v)) + pkg_spec = PackageSpec(name = pkg.name, uuid = pkg.uuid, version = v, tree_hash = Registry.treehash(info, v)) is_package_downloaded(env.manifest_file, pkg_spec) || continue end @@ -639,13 +750,14 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance} merge!(dv, compat_info) union!(uuids, keys(compat_info)) end + return end add_compat!(all_compat_u, Registry.compat_info(info)) weak_compat_info = Registry.weak_compat_info(info) if weak_compat_info !== nothing add_compat!(all_compat_u, weak_compat_info) # Version to Set - for (v, compat_info) in weak_compat_info + for (v, compat_info) in weak_compat_info weak_compat_u[v] = keys(compat_info) end end @@ -667,7 +779,7 @@ function deps_graph(env::EnvCache, registries::Vector{Registry.RegistryInstance} end return Resolve.Graph(all_compat, weak_compat, uuid_to_name, reqs, fixed, false, julia_version), - all_compat + all_compat end ######################## @@ -683,11 +795,16 @@ end # Returns if archive successfully installed function install_archive( - urls::Vector{Pair{String,Bool}}, - hash::SHA1, - version_path::String; - io::IO=stderr_f() -)::Bool + urls::Vector{Pair{String, Bool}}, + hash::SHA1, + version_path::String; + io::IO = stderr_f() + )::Bool + # Because we use `mv_temp_dir_retries` which uses `rename` not `mv` it can fail if the temp + # files are on a different fs. So use a temp dir in the same depot dir as some systems might + # be serving different parts of the depot on different filesystems via links i.e. pkgeval does this. + depot_temp = mkpath(joinpath(dirname(dirname(version_path)), "temp")) # .julia/packages/temp + tmp_objects = String[] url_success = false for (url, top) in urls @@ -695,19 +812,21 @@ function install_archive( push!(tmp_objects, path) # for cleanup url_success = true try - PlatformEngines.download(url, path; verbose=false, io=io) + PlatformEngines.download(url, path; verbose = false, io = io) catch e e isa InterruptException && rethrow() url_success = false end url_success || continue - dir = joinpath(tempdir(), randstring(12)) + # the temp dir should be in the same depot because the `rename` operation in `mv_temp_dir_retries` + # is possible only if the source and destination are on the same filesystem + dir = tempname(depot_temp) * randstring(6) push!(tmp_objects, dir) # for cleanup # Might fail to extract an archive (https://github.com/JuliaPackaging/PkgServer.jl/issues/126) try - unpack(path, dir; verbose=false) + unpack(path, dir; verbose = false) catch e - e isa InterruptException && rethrow() + e isa ProcessFailedException || rethrow() @warn "failed to extract archive downloaded from $(url)" url_success = false end @@ -722,34 +841,43 @@ function install_archive( unpacked = joinpath(dir, dirs[1]) end # Assert that the tarball unpacked to the tree sha we wanted - # TODO: Enable on Windows when tree_hash handles - # executable bits correctly, see JuliaLang/julia #33212. - if !Sys.iswindows() - if SHA1(GitTools.tree_hash(unpacked)) != hash - @warn "tarball content does not match git-tree-sha1" - url_success = false - end - url_success || continue + if SHA1(GitTools.tree_hash(unpacked)) != hash + @warn "tarball content does not match git-tree-sha1" + url_success = false end + url_success || continue + # Move content to version path - !isdir(version_path) && mkpath(version_path) - mv(unpacked, version_path; force=true) + !isdir(dirname(version_path)) && mkpath(dirname(version_path)) + mv_temp_dir_retries(unpacked, version_path; set_permissions = false) + break # successful install end # Clean up and exit - foreach(x -> Base.rm(x; force=true, recursive=true), tmp_objects) + foreach(x -> Base.rm(x; force = true, recursive = true), tmp_objects) return url_success end const refspecs = ["+refs/*:refs/remotes/cache/*"] function install_git( - io::IO, - uuid::UUID, - name::String, - hash::SHA1, - urls::Set{String}, - version_path::String -)::Nothing + io::IO, + uuid::UUID, + name::String, + hash::SHA1, + urls::Set{String}, + version_path::String + )::Nothing + if isempty(urls) + pkgerror( + "Package $name [$uuid] has no repository URL available. This could happen if:\n" * + " - The package is not registered in any configured registry\n" * + " - The package exists in a registry but lacks repository information\n" * + " - Registry files are corrupted or incomplete\n" * + " - Network issues prevented registry updates\n" * + "Please check that the package name is correct and that your registries are up to date." + ) + end + repo = nothing tree = nothing # TODO: Consolidate this with some of the repo handling in Types.jl @@ -757,17 +885,21 @@ function install_git( clones_dir = joinpath(depots1(), "clones") ispath(clones_dir) || mkpath(clones_dir) repo_path = joinpath(clones_dir, string(uuid)) - repo = GitTools.ensure_clone(io, repo_path, first(urls); isbare=true, - header = "[$uuid] $name from $(first(urls))") + first_url = first(urls) + repo = GitTools.ensure_clone( + io, repo_path, first_url; isbare = true, + header = "[$uuid] $name from $first_url" + ) git_hash = LibGit2.GitHash(hash.bytes) for url in urls - try LibGit2.with(LibGit2.GitObject, repo, git_hash) do g + try + LibGit2.with(LibGit2.GitObject, repo, git_hash) do g end break # object was found, we can stop catch err err isa LibGit2.GitError && err.code == LibGit2.Error.ENOTFOUND || rethrow() end - GitTools.fetch(io, repo, url, refspecs=refspecs) + GitTools.fetch(io, repo, url, refspecs = refspecs) end tree = try LibGit2.GitObject(repo, git_hash) @@ -786,9 +918,9 @@ function install_git( end end -function collect_artifacts(pkg_root::String; platform::AbstractPlatform=HostPlatform()) +function collect_artifacts(pkg_root::String; platform::AbstractPlatform = HostPlatform(), include_lazy::Bool = false) # Check to see if this package has an (Julia)Artifacts.toml - artifacts_tomls = Tuple{String,Base.TOML.TOMLDict}[] + artifacts_tomls = Tuple{String, Base.TOML.TOMLDict}[] for f in artifact_names artifacts_toml = joinpath(pkg_root, f) if isfile(artifacts_toml) @@ -803,14 +935,14 @@ function collect_artifacts(pkg_root::String; platform::AbstractPlatform=HostPlat meta_toml = String(read(select_cmd)) res = TOML.tryparse(meta_toml) if res isa TOML.ParserError - errstr = sprint(showerror, res; context=stderr) + errstr = sprint(showerror, res; context = stderr) pkgerror("failed to parse TOML output from running $(repr(selector_path)), got: \n$errstr") else push!(artifacts_tomls, (artifacts_toml, TOML.parse(meta_toml))) end else # Otherwise, use the standard selector from `Artifacts` - artifacts = select_downloadable_artifacts(artifacts_toml; platform) + artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy) push!(artifacts_tomls, (artifacts_toml, artifacts)) end break @@ -827,28 +959,40 @@ mutable struct DownloadState const bar::MiniProgressBar end -function download_artifacts(ctx::Context; - platform::AbstractPlatform=HostPlatform(), - julia_version = VERSION, - verbose::Bool=false) +function download_artifacts( + ctx::Context; + platform::AbstractPlatform = HostPlatform(), + julia_version = VERSION, + verbose::Bool = false, + io::IO = stderr_f(), + include_lazy::Bool = false + ) env = ctx.env io = ctx.io fancyprint = can_fancyprint(io) - pkg_roots = String[] + pkg_info = Tuple{String, Union{Base.UUID, Nothing}}[] for (uuid, pkg) in env.manifest pkg = manifest_info(env.manifest, uuid) pkg_root = source_path(env.manifest_file, pkg, julia_version) - pkg_root === nothing || push!(pkg_roots, pkg_root) + pkg_root === nothing || push!(pkg_info, (pkg_root, uuid)) end - push!(pkg_roots, dirname(env.project_file)) + push!(pkg_info, (dirname(env.project_file), env.pkg !== nothing ? env.pkg.uuid : nothing)) download_jobs = Dict{SHA1, Function}() + # Check what registries the current pkg server tracks + # Disable if precompiling to not access internet + server_registry_info = if Base.JLOptions().incremental == 0 + Registry.pkg_server_registry_info() + else + nothing + end + print_lock = Base.ReentrantLock() # for non-fancyprint printing download_states = Dict{SHA1, DownloadState}() errors = Channel{Any}(Inf) - is_done = false + is_done = Ref{Bool}(false) ansi_moveup(n::Int) = string("\e[", n, "A") ansi_movecol1 = "\e[1G" ansi_cleartoend = "\e[0J" @@ -856,49 +1000,59 @@ function download_artifacts(ctx::Context; ansi_enablecursor = "\e[?25h" ansi_disablecursor = "\e[?25l" - all_collected_artifacts = reduce(vcat, map(pkg_root -> collect_artifacts(pkg_root; platform), pkg_roots)) - used_artifact_tomls = Set{String}(map(first, all_collected_artifacts)) - longest_name_length = maximum(all_collected_artifacts; init=0) do (artifacts_toml, artifacts) - maximum(textwidth, keys(artifacts); init=0) + all_collected_artifacts = reduce( + vcat, map( + ((pkg_root, pkg_uuid),) -> + map(ca -> (ca[1], ca[2], pkg_uuid), collect_artifacts(pkg_root; platform, include_lazy)), pkg_info + ) + ) + used_artifact_tomls = Set{String}(map(ca -> ca[1], all_collected_artifacts)) + longest_name_length = maximum(all_collected_artifacts; init = 0) do (artifacts_toml, artifacts, pkg_uuid) + maximum(textwidth, keys(artifacts); init = 0) end - for (artifacts_toml, artifacts) in all_collected_artifacts + for (artifacts_toml, artifacts, pkg_uuid) in all_collected_artifacts # For each Artifacts.toml, install each artifact we've collected from it for name in keys(artifacts) local rname = rpad(name, longest_name_length) - local hash = SHA1(artifacts[name]["git-tree-sha1"]) - local bar = MiniProgressBar(;header=rname, main=false, indent=2, color = Base.info_color(), mode=:data, always_reprint=true) + local hash = SHA1(artifacts[name]["git-tree-sha1"]::String) + local bar = MiniProgressBar(; header = rname, main = false, indent = 2, color = Base.info_color()::Symbol, mode = :data, always_reprint = true) local dstate = DownloadState(:ready, "", time_ns(), Base.ReentrantLock(), bar) - function progress(total, current; status="") + function progress(total, current; status = "") local t = time_ns() if isempty(status) dstate.bar.max = total dstate.bar.current = current end - lock(dstate.status_lock) do + return lock(dstate.status_lock) do dstate.status = status dstate.status_update_time = t end end + # Check if the current package is eligible for PkgServer artifact downloads + local pkg_server_eligible = pkg_uuid !== nothing && Registry.is_pkg_in_pkgserver_registry(pkg_uuid, server_registry_info, ctx.registries) + # returns a string if exists, or function that downloads the artifact if not - local ret = ensure_artifact_installed(name, artifacts[name], artifacts_toml; - verbose, quiet_download=!(usable_io(io)), io, progress) + local ret = ensure_artifact_installed( + name, artifacts[name], artifacts_toml; + pkg_server_eligible, verbose, quiet_download = !(usable_io(io)), io, progress + ) if ret isa Function download_states[hash] = dstate download_jobs[hash] = () -> begin - try - dstate.state = :running - ret() - if !fancyprint - @lock print_lock printpkgstyle(io, :Installed, "artifact $rname $(MiniProgressBars.pkg_format_bytes(dstate.bar.max; sigdigits=1))") - end - catch - dstate.state = :failed - rethrow() - else - dstate.state = :done + try + dstate.state = :running + ret() + if !fancyprint && dstate.bar.max > 1 # if another process downloaded, then max is never set greater than 1 + @lock print_lock printpkgstyle(io, :Installed, "artifact $rname $(MiniProgressBars.pkg_format_bytes(dstate.bar.max; sigdigits = 1))") end + catch + dstate.state = :failed + rethrow() + else + dstate.state = :done end + end end end end @@ -909,30 +1063,30 @@ function download_artifacts(ctx::Context; try print(io, ansi_disablecursor) first = true - timer = Timer(0, interval=1/10) + timer = Timer(0, interval = 1 / 10) # TODO: Implement as a new MiniMultiProgressBar - main_bar = MiniProgressBar(; indent=2, header = "Installing artifacts", color = :green, mode = :int, always_reprint=true) + main_bar = MiniProgressBar(; indent = 2, header = "Installing artifacts", color = :green, mode = :int, always_reprint = true) main_bar.max = length(download_states) - while !is_done + while !is_done[] main_bar.current = count(x -> x.state == :done, values(download_states)) - str = sprint(context=io) do iostr + local str = sprint(context = io) do iostr first || print(iostr, ansi_cleartoend) n_printed = 1 - show_progress(iostr, main_bar; carriagereturn=false) + show_progress(iostr, main_bar; carriagereturn = false) println(iostr) - for dstate in sort!(collect(values(download_states)), by=v->v.bar.max, rev=true) - local status, status_update_time = lock(()->(dstate.status, dstate.status_update_time), dstate.status_lock) + for dstate in sort!(collect(values(download_states)), by = v -> v.bar.max, rev = true) + local status, status_update_time = lock(() -> (dstate.status, dstate.status_update_time), dstate.status_lock) # only update the bar's status message if it is stalled for at least 0.5 s. # If the new status message is empty, go back to showing the bar without waiting. if isempty(status) || time_ns() - status_update_time > UInt64(500_000_000) dstate.bar.status = status end dstate.state == :running && (dstate.bar.max > 1000 || !isempty(dstate.bar.status)) || continue - show_progress(iostr, dstate.bar; carriagereturn=false) + show_progress(iostr, dstate.bar; carriagereturn = false) println(iostr) n_printed += 1 end - is_done || print(iostr, ansi_moveup(n_printed), ansi_movecol1) + is_done[] || print(iostr, ansi_moveup(n_printed), ansi_movecol1) first = false end print(io, str) @@ -940,7 +1094,7 @@ function download_artifacts(ctx::Context; end print(io, ansi_cleartoend) main_bar.current = count(x -> x[2].state == :done, download_states) - show_progress(io, main_bar; carriagereturn=false) + show_progress(io, main_bar; carriagereturn = false) println(io) catch e e isa InterruptException || rethrow() @@ -953,26 +1107,26 @@ function download_artifacts(ctx::Context; printpkgstyle(io, :Installing, "$(length(download_jobs)) artifacts") end sema = Base.Semaphore(ctx.num_concurrent_downloads) - interrupted = false + interrupted = Ref{Bool}(false) @sync for f in values(download_jobs) - interrupted && break + interrupted[] && break Base.acquire(sema) Threads.@spawn try f() catch e - e isa InterruptException && (interrupted = true) + e isa InterruptException && (interrupted[] = true) put!(errors, e) finally Base.release(sema) end end - is_done = true + is_done[] = true fancyprint && wait(t_print) close(errors) if !isempty(errors) all_errors = collect(errors) - str = sprint(context=io) do iostr + local str = sprint(context = io) do iostr for e in all_errors Base.showerror(iostr, e) length(all_errors) > 1 && println(iostr) @@ -982,12 +1136,11 @@ function download_artifacts(ctx::Context; end end - for f in used_artifact_tomls - write_env_usage(f, "artifact_usage.toml") - end + + return write_env_usage(used_artifact_tomls, "artifact_usage.toml") end -function check_artifacts_downloaded(pkg_root::String; platform::AbstractPlatform=HostPlatform()) +function check_artifacts_downloaded(pkg_root::String; platform::AbstractPlatform = HostPlatform()) for (artifacts_toml, artifacts) in collect_artifacts(pkg_root; platform) for name in keys(artifacts) if !artifact_exists(Base.SHA1(artifacts[name]["git-tree-sha1"])) @@ -1014,15 +1167,30 @@ function find_urls(registries::Vector{Registry.RegistryInstance}, uuid::UUID) end -function download_source(ctx::Context; readonly=true) - pkgs_to_install = NamedTuple{(:pkg, :urls, :path), Tuple{PackageEntry, Set{String}, String}}[] - for pkg in values(ctx.env.manifest) +download_source(ctx::Context; readonly = true) = download_source(ctx, values(ctx.env.manifest); readonly) + +function download_source(ctx::Context, pkgs; readonly = true) + pidfile_stale_age = 10 # recommended value is about 3-5x an estimated normal download time (i.e. 2-3s) + pkgs_to_install = NamedTuple{(:pkg, :urls, :path), Tuple{eltype(pkgs), Set{String}, String}}[] + for pkg in pkgs tracking_registered_version(pkg, ctx.julia_version) || continue path = source_path(ctx.env.manifest_file, pkg, ctx.julia_version) path === nothing && continue - ispath(path) && continue + if ispath(path) && iswritable(path) + pidfile = path * ".pid" + else + # If the path is not writable, we cannot create a pidfile there so use one in the first depot. + # (pidlocking probably isn't needed as in this case the package source logically is alredy installed + # in the readonly depot, but keep the pidfile logic for consistency) + dir = joinpath(depots1(), "packages", pkg.name) + mkpath(dir) + iswritable(dir) || pkgerror("The primary depot is not writable") + pidfile = joinpath(dir, basename(path) * ".pid") + end + + FileWatching.mkpidlock(() -> ispath(path), pidfile, stale_age = pidfile_stale_age) && continue urls = find_urls(ctx.registries, pkg.uuid) - push!(pkgs_to_install, (;pkg, urls, path)) + push!(pkgs_to_install, (; pkg, urls, path)) end length(pkgs_to_install) == 0 && return Set{UUID}() @@ -1033,7 +1201,7 @@ function download_source(ctx::Context; readonly=true) missed_packages = eltype(pkgs_to_install)[] widths = [textwidth(pkg.name) for (pkg, _) in pkgs_to_install] - max_name = maximum(widths; init=0) + max_name = maximum(widths; init = 0) # Check what registries the current pkg server tracks # Disable if precompiling to not access internet @@ -1043,7 +1211,8 @@ function download_source(ctx::Context; readonly=true) nothing end - @sync begin + # use eager throw version + Base.Experimental.@sync begin jobs = Channel{eltype(pkgs_to_install)}(ctx.num_concurrent_downloads) results = Channel(ctx.num_concurrent_downloads) @@ -1053,61 +1222,68 @@ function download_source(ctx::Context; readonly=true) end end - for i in 1:ctx.num_concurrent_downloads + for i in 1:ctx.num_concurrent_downloads # (default 8) @async begin for (pkg, urls, path) in jobs - if ctx.use_git_for_all_downloads - put!(results, (pkg, false, (urls, path))) - continue - end - try - archive_urls = Pair{String,Bool}[] + mkpath(dirname(path)) # the `packages/Package` dir needs to exist for the pidfile to be created + FileWatching.mkpidlock(path * ".pid", stale_age = pidfile_stale_age) do + if ispath(path) + put!(results, (pkg, nothing, (urls, path))) + return + end + if ctx.use_git_for_all_downloads + put!(results, (pkg, false, (urls, path))) + return + end + archive_urls = Pair{String, Bool}[] # Check if the current package is available in one of the registries being tracked by the pkg server # In that case, download from the package server - if server_registry_info !== nothing + if Registry.is_pkg_in_pkgserver_registry(pkg.uuid, server_registry_info, ctx.registries) server, registry_info = server_registry_info - for reg in ctx.registries - if reg.uuid in keys(registry_info) - if haskey(reg, pkg.uuid) - url = "$server/package/$(pkg.uuid)/$(pkg.tree_hash)" - push!(archive_urls, url => true) - break - end - end - end + url = "$server/package/$(pkg.uuid)/$(pkg.tree_hash)" + push!(archive_urls, url => true) end for repo_url in urls url = get_archive_url_for_version(repo_url, pkg.tree_hash) url !== nothing && push!(archive_urls, url => false) end - success = install_archive(archive_urls, pkg.tree_hash, path, io=ctx.io) - if success && readonly - set_readonly(path) # In add mode, files should be read-only - end - if ctx.use_only_tarballs_for_downloads && !success - pkgerror("failed to get tarball from $(urls)") + try + success = install_archive(archive_urls, pkg.tree_hash, path, io = ctx.io) + if success && readonly + set_readonly(path) # In add mode, files should be read-only + end + if ctx.use_only_tarballs_for_downloads && !success + pkgerror("failed to get tarball from $(urls)") + end + put!(results, (pkg, success, (urls, path))) + catch err + put!(results, (pkg, err, catch_backtrace())) end - put!(results, (pkg, success, (urls, path))) - catch err - put!(results, (pkg, err, catch_backtrace())) end end end end - bar = MiniProgressBar(; indent=1, header = "Downloading packages", color = Base.info_color(), - mode=:int, always_reprint=true) + bar = MiniProgressBar(; + indent = 1, header = "Downloading packages", color = Base.info_color(), + mode = :int, always_reprint = true + ) bar.max = length(pkgs_to_install) fancyprint = can_fancyprint(ctx.io) try for i in 1:length(pkgs_to_install) - pkg::PackageEntry, exc_or_success, bt_or_pathurls = take!(results) - exc_or_success isa Exception && pkgerror("Error when installing package $(pkg.name):\n", - sprint(Base.showerror, exc_or_success, bt_or_pathurls)) - success, (urls, path) = exc_or_success, bt_or_pathurls + pkg::eltype(pkgs), exc_or_success_or_nothing, bt_or_pathurls = take!(results) + if exc_or_success_or_nothing isa Exception + exc = exc_or_success_or_nothing + pkgerror("Error when installing package $(pkg.name):\n", sprint(Base.showerror, exc, bt_or_pathurls)) + end + if exc_or_success_or_nothing === nothing + continue # represents when another process did the install + end + success, (urls, path) = exc_or_success_or_nothing, bt_or_pathurls success || push!(missed_packages, (; pkg, urls, path)) bar.current = i - str = sprint(; context=ctx.io) do io + str = sprint(; context = ctx.io) do io if success fancyprint && print_progress_bottom(io) vstr = if pkg.version !== nothing @@ -1132,16 +1308,18 @@ function download_source(ctx::Context; readonly=true) # Use LibGit2 to download any remaining packages # ################################################## for (pkg, urls, path) in missed_packages - uuid = pkg.uuid - install_git(ctx.io, pkg.uuid, pkg.name, pkg.tree_hash, urls, path) - readonly && set_readonly(path) - vstr = if pkg.version !== nothing - "v$(pkg.version)" - else - short_treehash = string(pkg.tree_hash)[1:16] - "[$short_treehash]" + FileWatching.mkpidlock(path * ".pid", stale_age = pidfile_stale_age) do + ispath(path) && return + install_git(ctx.io, pkg.uuid, pkg.name, pkg.tree_hash, urls, path) + readonly && set_readonly(path) + vstr = if pkg.version !== nothing + "v$(pkg.version)" + else + short_treehash = string(pkg.tree_hash)[1:16] + "[$short_treehash]" + end + printpkgstyle(ctx.io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr)) end - printpkgstyle(ctx.io, :Installed, string(rpad(pkg.name * " ", max_name + 2, "─"), " ", vstr)) end return Set{UUID}(entry.pkg.uuid for entry in pkgs_to_install) @@ -1192,10 +1370,11 @@ function prune_deps(iterator, keep::Set{UUID}) end clean && break end + return end function record_project_hash(env::EnvCache) - env.manifest.other["project_hash"] = Types.workspace_resolve_hash(env) + return env.manifest.other["project_hash"] = Types.workspace_resolve_hash(env) end ######### @@ -1232,16 +1411,16 @@ function any_package_not_installed(manifest::Manifest) return false end -function build(ctx::Context, uuids::Set{UUID}, verbose::Bool) +function build(ctx::Context, uuids::Set{UUID}, verbose::Bool; allow_reresolve::Bool = true) if any_package_not_installed(ctx.env.manifest) || !isfile(ctx.env.manifest_file) Pkg.instantiate(ctx, allow_build = false, allow_autoprecomp = false) end all_uuids = get_deps(ctx.env, uuids) - build_versions(ctx, all_uuids; verbose) + return build_versions(ctx, all_uuids; verbose, allow_reresolve) end -function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID,Int} - order = Dict{UUID,Int}() +function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID, Int} + order = Dict{UUID, Int}() seen = UUID[] k::Int = 0 function visit(uuid::UUID) @@ -1257,7 +1436,7 @@ function dependency_order_uuids(env::EnvCache, uuids::Vector{UUID})::Dict{UUID,I end foreach(visit, deps) pop!(seen) - order[uuid] = k += 1 + return order[uuid] = k += 1 end visit(uuid::String) = visit(UUID(uuid)) foreach(visit, uuids) @@ -1266,26 +1445,26 @@ end function gen_build_code(build_file::String; inherit_project::Bool = false) code = """ - $(Base.load_path_setup_code(false)) - cd($(repr(dirname(build_file)))) - include($(repr(build_file))) - """ + $(Base.load_path_setup_code(false)) + cd($(repr(dirname(build_file)))) + include($(repr(build_file))) + """ # This will make it so that running Pkg.build runs the build in a session with --startup=no # *unless* the parent julia session is started with --startup=yes explicitly. startup_flag = Base.JLOptions().startupfile == 1 ? "yes" : "no" return ``` - $(Base.julia_cmd()) -O0 --color=no --history-file=no - --startup-file=$startup_flag - $(inherit_project ? `--project=$(Base.active_project())` : ``) - --eval $code - ``` + $(Base.julia_cmd()) -O0 --color=no --history-file=no + --startup-file=$startup_flag + $(inherit_project ? `--project=$(Base.active_project())` : ``) + --eval $code + ``` end with_load_path(f::Function, new_load_path::String) = with_load_path(f, [new_load_path]) function with_load_path(f::Function, new_load_path::Vector{String}) old_load_path = copy(Base.LOAD_PATH) copy!(Base.LOAD_PATH, new_load_path) - try + return try f() finally copy!(LOAD_PATH, old_load_path) @@ -1297,9 +1476,9 @@ pkg_scratchpath() = joinpath(depots1(), "scratchspaces", PkgUUID) builddir(source_path::String) = joinpath(source_path, "deps") buildfile(source_path::String) = joinpath(builddir(source_path), "build.jl") -function build_versions(ctx::Context, uuids::Set{UUID}; verbose=false) +function build_versions(ctx::Context, uuids::Set{UUID}; verbose = false, allow_reresolve::Bool = true) # collect builds for UUIDs with `deps/build.jl` files - builds = Tuple{UUID,String,String,VersionNumber}[] + builds = Tuple{UUID, String, String, VersionNumber}[] for uuid in uuids is_stdlib(uuid) && continue if Types.is_project_uuid(ctx.env, uuid) @@ -1324,84 +1503,94 @@ function build_versions(ctx::Context, uuids::Set{UUID}; verbose=false) # toposort builds by dependencies order = dependency_order_uuids(ctx.env, UUID[first(build) for build in builds]) sort!(builds, by = build -> order[first(build)]) - max_name = maximum(build->textwidth(build[2]), builds; init=0) + max_name = maximum(build -> textwidth(build[2]), builds; init = 0) - bar = MiniProgressBar(; indent=2, header = "Building packages", color = Base.info_color(), - mode=:int, always_reprint=true) + bar = MiniProgressBar(; + indent = 2, header = "Building packages", color = Base.info_color(), + mode = :int, always_reprint = true + ) bar.max = length(builds) fancyprint = can_fancyprint(ctx.io) fancyprint && start_progress(ctx.io, bar) # build each package versions in a child process try - for (n, (uuid, name, source_path, version)) in enumerate(builds) - pkg = PackageSpec(;uuid=uuid, name=name, version=version) - build_file = buildfile(source_path) - # compatibility shim - local build_project_override, build_project_preferences - if isfile(projectfile_path(builddir(source_path))) - build_project_override = nothing - with_load_path([builddir(source_path), Base.LOAD_PATH...]) do - build_project_preferences = Base.get_preferences() - end - else - build_project_override = gen_target_project(ctx, pkg, source_path, "build") - with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do - build_project_preferences = Base.get_preferences() + for (n, (uuid, name, source_path, version)) in enumerate(builds) + pkg = PackageSpec(; uuid = uuid, name = name, version = version) + build_file = buildfile(source_path) + # compatibility shim + local build_project_override, build_project_preferences + if isfile(projectfile_path(builddir(source_path))) + build_project_override = nothing + with_load_path([builddir(source_path), Base.LOAD_PATH...]) do + build_project_preferences = Base.get_preferences() + end + else + build_project_override = gen_target_project(ctx, pkg, source_path, "build") + with_load_path([something(projectfile_path(source_path)), Base.LOAD_PATH...]) do + build_project_preferences = Base.get_preferences() + end end - end - # Put log output in Pkg's scratchspace if the package is content addressed - # by tree sha and in the build directory if it is tracked by path etc. - entry = manifest_info(ctx.env.manifest, uuid) - if entry !== nothing && entry.tree_hash !== nothing - key = string(entry.tree_hash) - scratch = joinpath(pkg_scratchpath(), key) - mkpath(scratch) - log_file = joinpath(scratch, "build.log") - # Associate the logfile with the package being built - dict = Dict{String,Any}(scratch => [ - Dict{String,Any}("time" => Dates.now(), "parent_projects" => [projectfile_path(source_path)]) - ]) - open(joinpath(depots1(), "logs", "scratch_usage.toml"), "a") do io - TOML.print(io, dict) + # Put log output in Pkg's scratchspace if the package is content addressed + # by tree sha and in the build directory if it is tracked by path etc. + entry = manifest_info(ctx.env.manifest, uuid) + if entry !== nothing && entry.tree_hash !== nothing + key = string(entry.tree_hash) + scratch = joinpath(pkg_scratchpath(), key) + mkpath(scratch) + log_file = joinpath(scratch, "build.log") + # Associate the logfile with the package being built + dict = Dict{String, Any}( + scratch => [ + Dict{String, Any}("time" => Dates.now(), "parent_projects" => [projectfile_path(source_path)]), + ] + ) + open(joinpath(depots1(), "logs", "scratch_usage.toml"), "a") do io + TOML.print(io, dict) + end + else + log_file = splitext(build_file)[1] * ".log" end - else - log_file = splitext(build_file)[1] * ".log" - end - - fancyprint && print_progress_bottom(ctx.io) - printpkgstyle(ctx.io, :Building, - rpad(name * " ", max_name + 1, "─") * "→ " * pathrepr(log_file)) - bar.current = n-1 + fancyprint && print_progress_bottom(ctx.io) - fancyprint && show_progress(ctx.io, bar) - - let log_file=log_file - sandbox(ctx, pkg, builddir(source_path), build_project_override; preferences=build_project_preferences) do - flush(ctx.io) - ok = open(log_file, "w") do log - std = verbose ? ctx.io : log - success(pipeline(gen_build_code(buildfile(source_path)), - stdout=std, stderr=std)) - end - ok && return - n_lines = isinteractive() ? 100 : 5000 - # TODO: Extract last n lines more efficiently - log_lines = readlines(log_file) - log_show = join(log_lines[max(1, length(log_lines) - n_lines):end], '\n') - full_log_at, last_lines = - if length(log_lines) > n_lines - "\n\nFull log at $log_file", - ", showing the last $n_lines of log" - else - "", "" + printpkgstyle( + ctx.io, :Building, + rpad(name * " ", max_name + 1, "─") * "→ " * pathrepr(log_file) + ) + bar.current = n - 1 + + fancyprint && show_progress(ctx.io, bar) + + let log_file = log_file + sandbox(ctx, pkg, builddir(source_path), build_project_override; preferences = build_project_preferences, allow_reresolve) do + flush(ctx.io) + ok = open(log_file, "w") do log + std = verbose ? ctx.io : log + success( + pipeline( + gen_build_code(buildfile(source_path)), + stdout = std, stderr = std + ) + ) + end + ok && return + n_lines = isinteractive() ? 100 : 5000 + # TODO: Extract last n lines more efficiently + log_lines = readlines(log_file) + log_show = join(log_lines[max(1, length(log_lines) - n_lines):end], '\n') + full_log_at, last_lines = + if length(log_lines) > n_lines + "\n\nFull log at $log_file", + ", showing the last $n_lines of log" + else + "", "" + end + pkgerror("Error building `$(pkg.name)`$last_lines: \n$log_show$full_log_at") end - pkgerror("Error building `$(pkg.name)`$last_lines: \n$log_show$full_log_at") end end - end finally fancyprint && end_progress(ctx.io, bar) end @@ -1482,47 +1671,53 @@ function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode::PackageMode) record_project_hash(ctx.env) # update project & manifest write_env(ctx.env) - show_update(ctx.env, ctx.registries; io=ctx.io) + return show_update(ctx.env, ctx.registries; io = ctx.io) end -update_package_add(ctx::Context, pkg::PackageSpec, ::Nothing, source_path, source_repo, is_dep::Bool) = pkg -function update_package_add(ctx::Context, pkg::PackageSpec, entry::PackageEntry, source_path, source_repo, is_dep::Bool) +update_package_add(ctx::Context, pkg::PackageSpec, ::Nothing, is_dep::Bool) = pkg +function update_package_add(ctx::Context, pkg::PackageSpec, entry::PackageEntry, is_dep::Bool) if entry.pinned if pkg.version == VersionSpec() println(ctx.io, "`$(pkg.name)` is pinned at `v$(entry.version)`: maintaining pinned version") end - return PackageSpec(; uuid=pkg.uuid, name=pkg.name, pinned=true, - version=entry.version, tree_hash=entry.tree_hash, - path=entry.path, repo=entry.repo) + return PackageSpec(; + uuid = pkg.uuid, name = pkg.name, pinned = true, + version = entry.version, tree_hash = entry.tree_hash, + path = entry.path, repo = entry.repo + ) end if entry.path !== nothing || entry.repo.source !== nothing || pkg.repo.source !== nothing return pkg # overwrite everything, nothing to copy over end if is_stdlib(pkg.uuid) return pkg # stdlibs are not versioned like other packages - elseif is_dep && ((isa(pkg.version, VersionNumber) && entry.version == pkg.version) || - (!isa(pkg.version, VersionNumber) && entry.version ∈ pkg.version)) + elseif is_dep && ( + (isa(pkg.version, VersionNumber) && entry.version == pkg.version) || + (!isa(pkg.version, VersionNumber) && entry.version ∈ pkg.version) + ) # leave the package as is at the installed version - return PackageSpec(; uuid=pkg.uuid, name=pkg.name, version=entry.version, - tree_hash=entry.tree_hash) + return PackageSpec(; + uuid = pkg.uuid, name = pkg.name, version = entry.version, + tree_hash = entry.tree_hash + ) end # adding a new version not compatible with the old version, so we just overwrite return pkg end # Update registries AND read them back in. -function update_registries(ctx::Context; force::Bool=true, kwargs...) +function update_registries(ctx::Context; force::Bool = true, kwargs...) OFFLINE_MODE[] && return !force && UPDATED_REGISTRY_THIS_SESSION[] && return - Registry.update(; io=ctx.io, kwargs...) + Registry.update(; io = ctx.io, kwargs...) copy!(ctx.registries, Registry.reachable_registries()) - UPDATED_REGISTRY_THIS_SESSION[] = true + return UPDATED_REGISTRY_THIS_SESSION[] = true end function is_all_registered(registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}) pkgs = filter(tracking_registered_version, pkgs) for pkg in pkgs - if !any(r->haskey(r, pkg.uuid), registries) + if !any(r -> haskey(r, pkg.uuid), registries) return pkg end end @@ -1530,9 +1725,32 @@ function is_all_registered(registries::Vector{Registry.RegistryInstance}, pkgs:: end function check_registered(registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}) + if isempty(registries) && !isempty(pkgs) + registry_pkgs = filter(tracking_registered_version, pkgs) + if !isempty(registry_pkgs) + pkgerror("no registries have been installed. Cannot resolve the following packages:\n$(join(map(pkg -> " " * err_rep(pkg), registry_pkgs), "\n"))") + end + end pkg = is_all_registered(registries, pkgs) if pkg isa PackageSpec - pkgerror("expected package $(err_rep(pkg)) to be registered") + msg = "expected package $(err_rep(pkg)) to be registered" + # check if the name exists in the registry with a different uuid + if pkg.name !== nothing + reg_uuid = Pair{String, Vector{UUID}}[] + for reg in registries + uuids = Registry.uuids_from_name(reg, pkg.name) + if !isempty(uuids) + push!(reg_uuid, reg.name => uuids) + end + end + if !isempty(reg_uuid) + msg *= "\n You may have provided the wrong UUID for package $(pkg.name).\n Found the following UUIDs for that name:" + for (reg, uuids) in reg_uuid + msg *= "\n - $(join(uuids, ", ")) from registry: $reg" + end + end + end + pkgerror(msg) end return nothing end @@ -1544,29 +1762,38 @@ function assert_can_add(ctx::Context, pkgs::Vector{PackageSpec}) # package with the same name exist in the project: assert that they have the same uuid existing_uuid = get(ctx.env.project.deps, pkg.name, pkg.uuid) existing_uuid == pkg.uuid || - pkgerror("""Refusing to add package $(err_rep(pkg)). - Package `$(pkg.name)=$(existing_uuid)` with the same name already exists as a direct dependency. - To remove the existing package, use `import Pkg; Pkg.rm("$(pkg.name)")`. - """) + pkgerror( + """Refusing to add package $(err_rep(pkg)). + Package `$(pkg.name)=$(existing_uuid)` with the same name already exists as a direct dependency. + To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm $(pkg.name)""" : """import Pkg; Pkg.rm("$(pkg.name)")""")`. + """ + ) # package with the same uuid exist in the project: assert they have the same name name = findfirst(==(pkg.uuid), ctx.env.project.deps) name === nothing || name == pkg.name || - pkgerror("""Refusing to add package $(err_rep(pkg)). - Package `$name=$(pkg.uuid)` with the same UUID already exists as a direct dependency. - To remove the existing package, use `import Pkg; Pkg.rm("$name")`. - """) + pkgerror( + """Refusing to add package $(err_rep(pkg)). + Package `$name=$(pkg.uuid)` with the same UUID already exists as a direct dependency. + To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm $name""" : """import Pkg; Pkg.rm("$name")""")`. + """ + ) # package with the same uuid exist in the manifest: assert they have the same name entry = get(ctx.env.manifest, pkg.uuid, nothing) entry === nothing || entry.name == pkg.name || - pkgerror("""Refusing to add package $(err_rep(pkg)). - Package `$(entry.name)=$(pkg.uuid)` with the same UUID already exists in the manifest. - To remove the existing package, use `import Pkg; Pkg.rm(Pkg.PackageSpec(uuid="$(pkg.uuid)"); mode=Pkg.PKGMODE_MANIFEST)`. - """) + pkgerror( + """Refusing to add package $(err_rep(pkg)). + Package `$(entry.name)=$(pkg.uuid)` with the same UUID already exists in the manifest. + To remove the existing package, use `$(Pkg.in_repl_mode() ? """pkg> rm --manifest $(entry.name)=$(pkg.uuid)""" : """import Pkg; Pkg.rm(Pkg.PackageSpec(uuid="$(pkg.uuid)"); mode=Pkg.PKGMODE_MANIFEST)""")`. + """ + ) end + return end -function tiered_resolve(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version, - try_all_installed::Bool) +function tiered_resolve( + env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, julia_version, + try_all_installed::Bool + ) if try_all_installed try # do not modify existing subgraph and only add installed versions of the new packages @debug "tiered_resolve: trying PRESERVE_ALL_INSTALLED" @@ -1609,29 +1836,47 @@ function targeted_resolve(env::EnvCache, registries::Vector{Registry.RegistryIns return pkgs, deps_map end -function _resolve(io::IO, env::EnvCache, registries::Vector{Registry.RegistryInstance}, - pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version) - printpkgstyle(io, :Resolving, "package versions...") - if preserve == PRESERVE_TIERED_INSTALLED - tiered_resolve(env, registries, pkgs, julia_version, true) - elseif preserve == PRESERVE_TIERED - tiered_resolve(env, registries, pkgs, julia_version, false) - else - targeted_resolve(env, registries, pkgs, preserve, julia_version) +function _resolve( + io::IO, env::EnvCache, registries::Vector{Registry.RegistryInstance}, + pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version + ) + usingstrategy = preserve != PRESERVE_TIERED ? " using $preserve" : "" + printpkgstyle(io, :Resolving, "package versions$(usingstrategy)...") + return try + if preserve == PRESERVE_TIERED_INSTALLED + tiered_resolve(env, registries, pkgs, julia_version, true) + elseif preserve == PRESERVE_TIERED + tiered_resolve(env, registries, pkgs, julia_version, false) + else + targeted_resolve(env, registries, pkgs, preserve, julia_version) + end + catch err + + if err isa Resolve.ResolverError + yanked_pkgs = filter(pkg -> is_pkgversion_yanked(pkg, registries), load_all_deps(env)) + if !isempty(yanked_pkgs) + indent = " "^(Pkg.pkgstyle_indent) + yanked_str = join(map(pkg -> indent * " - " * err_rep(pkg, quotes = false) * " " * string(pkg.version), yanked_pkgs), "\n") + printpkgstyle(io, :Warning, """The following package versions were yanked from their registry and \ + are not resolvable:\n$yanked_str""", color = Base.warn_color()) + end + end + rethrow() end end -function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}(); - allow_autoprecomp::Bool=true, preserve::PreserveLevel=default_preserve(), platform::AbstractPlatform=HostPlatform(), - target::Symbol=:deps) +function add( + ctx::Context, pkgs::Vector{PackageSpec}, new_git = Set{UUID}(); + allow_autoprecomp::Bool = true, preserve::PreserveLevel = default_preserve(), platform::AbstractPlatform = HostPlatform(), + target::Symbol = :deps + ) assert_can_add(ctx, pkgs) # load manifest data for (i, pkg) in pairs(pkgs) delete!(ctx.env.project.weakdeps, pkg.name) entry = manifest_info(ctx.env.manifest, pkg.uuid) is_dep = any(uuid -> uuid == pkg.uuid, [uuid for (name, uuid) in ctx.env.project.deps]) - source_path, source_repo = get_path_repo(ctx.env.project, pkg.name) - pkgs[i] = update_package_add(ctx, pkg, entry, source_path, source_repo, is_dep) + pkgs[i] = update_package_add(ctx, pkg, entry, is_dep) end names = (p.name for p in pkgs) @@ -1652,11 +1897,11 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}(); man_pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, preserve, ctx.julia_version) update_manifest!(ctx.env, man_pkgs, deps_map, ctx.julia_version) new_apply = download_source(ctx) - fixups_from_projectfile!(ctx.env) + fixups_from_projectfile!(ctx) # After downloading resolutionary packages, search for (Julia)Artifacts.toml files # and ensure they are all downloaded and unpacked as well: - download_artifacts(ctx, platform=platform, julia_version=ctx.julia_version) + download_artifacts(ctx, platform = platform, julia_version = ctx.julia_version) # if env is a package add compat entries if ctx.env.project.name !== nothing && ctx.env.project.uuid !== nothing @@ -1674,7 +1919,7 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}(); record_project_hash(ctx.env) # compat entries changed the hash after it was last recorded in update_manifest! write_env(ctx.env) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io) + show_update(ctx.env, ctx.registries; io = ctx.io) build_versions(ctx, union(new_apply, new_git)) allow_autoprecomp && Pkg._auto_precompile(ctx) else @@ -1687,8 +1932,10 @@ function add(ctx::Context, pkgs::Vector{PackageSpec}, new_git=Set{UUID}(); end # Input: name, uuid, and path -function develop(ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID}; - preserve::PreserveLevel=default_preserve(), platform::AbstractPlatform=HostPlatform()) +function develop( + ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID}; + preserve::PreserveLevel = default_preserve(), platform::AbstractPlatform = HostPlatform() + ) assert_can_add(ctx, pkgs) # no need to look at manifest.. dev will just nuke whatever is there before for pkg in pkgs @@ -1699,11 +1946,11 @@ function develop(ctx::Context, pkgs::Vector{PackageSpec}, new_git::Set{UUID}; pkgs, deps_map = _resolve(ctx.io, ctx.env, ctx.registries, pkgs, preserve, ctx.julia_version) update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version) new_apply = download_source(ctx) - fixups_from_projectfile!(ctx.env) - download_artifacts(ctx; platform=platform, julia_version=ctx.julia_version) + fixups_from_projectfile!(ctx) + download_artifacts(ctx; platform = platform, julia_version = ctx.julia_version) write_env(ctx.env) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io) - build_versions(ctx, union(new_apply, new_git)) + show_update(ctx.env, ctx.registries; io = ctx.io) + return build_versions(ctx, union(new_apply, new_git)) end # load version constraint @@ -1714,7 +1961,9 @@ function up_load_versions!(ctx::Context, pkg::PackageSpec, entry::PackageEntry, entry.version !== nothing || return false # no version to set if entry.pinned || level == UPLEVEL_FIXED pkg.version = entry.version - pkg.tree_hash = entry.tree_hash + if pkg.path === nothing + pkg.tree_hash = entry.tree_hash + end elseif entry.repo.source !== nothing || source_repo.source !== nothing # repo packages have a version but are treated specially if source_repo.source !== nothing pkg.repo = source_repo @@ -1739,7 +1988,7 @@ function up_load_versions!(ctx::Context, pkg::PackageSpec, entry::PackageEntry, r = level == UPLEVEL_PATCH ? VersionRange(ver.major, ver.minor) : level == UPLEVEL_MINOR ? VersionRange(ver.major) : level == UPLEVEL_MAJOR ? VersionRange() : - error("unexpected upgrade level: $level") + error("unexpected upgrade level: $level") pkg.version = VersionSpec(r) end return false @@ -1754,13 +2003,15 @@ function up_load_manifest_info!(pkg::PackageSpec, entry::PackageEntry) if pkg.path === nothing pkg.path = entry.path end - pkg.pinned = entry.pinned + return pkg.pinned = entry.pinned # `pkg.version` and `pkg.tree_hash` is set by `up_load_versions!` end -function load_manifest_deps_up(env::EnvCache, pkgs::Vector{PackageSpec}=PackageSpec[]; - preserve::PreserveLevel=PRESERVE_ALL) +function load_manifest_deps_up( + env::EnvCache, pkgs::Vector{PackageSpec} = PackageSpec[]; + preserve::PreserveLevel = PRESERVE_ALL + ) manifest = env.manifest project = env.project explicit_upgraded = Set(pkg.uuid for pkg in pkgs) @@ -1795,28 +2046,35 @@ function load_manifest_deps_up(env::EnvCache, pkgs::Vector{PackageSpec}=PackageS end # The rest of the packages get fixed - push!(pkgs, PackageSpec( - uuid = uuid, - name = entry.name, - path = entry.path, - pinned = entry.pinned, - repo = entry.repo, - tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? - version = something(entry.version, VersionSpec()) - )) + push!( + pkgs, PackageSpec( + uuid = uuid, + name = entry.name, + path = entry.path, + pinned = entry.pinned, + repo = entry.repo, + tree_hash = entry.tree_hash, # TODO should tree_hash be changed too? + version = something(entry.version, VersionSpec()) + ) + ) end return pkgs end function targeted_resolve_up(env::EnvCache, registries::Vector{Registry.RegistryInstance}, pkgs::Vector{PackageSpec}, preserve::PreserveLevel, julia_version) - pkgs = load_manifest_deps_up(env, pkgs; preserve=preserve) + pkgs = load_manifest_deps_up(env, pkgs; preserve = preserve) check_registered(registries, pkgs) deps_map = resolve_versions!(env, registries, pkgs, julia_version, preserve == PRESERVE_ALL_INSTALLED) return pkgs, deps_map end -function up(ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel; - skip_writing_project::Bool=false, preserve::Union{Nothing,PreserveLevel}=nothing) +function up( + ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel; + skip_writing_project::Bool = false, preserve::Union{Nothing, PreserveLevel} = nothing + ) + + requested_pkgs = pkgs + new_git = Set{UUID}() # TODO check all pkg.version == VersionSpec() # set version constraints according to `level` @@ -1840,16 +2098,44 @@ function up(ctx::Context, pkgs::Vector{PackageSpec}, level::UpgradeLevel; end update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version) new_apply = download_source(ctx) - fixups_from_projectfile!(ctx.env) - download_artifacts(ctx, julia_version=ctx.julia_version) + fixups_from_projectfile!(ctx) + download_artifacts(ctx, julia_version = ctx.julia_version) write_env(ctx.env; skip_writing_project) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io, hidden_upgrades_info = true) - build_versions(ctx, union(new_apply, new_git)) + show_update(ctx.env, ctx.registries; io = ctx.io, hidden_upgrades_info = true) + + if length(requested_pkgs) == 1 + pkg = only(requested_pkgs) + entry = manifest_info(ctx.env.manifest, pkg.uuid) + if entry === nothing || (entry.path === nothing && entry.repo.source === nothing) + # Get current version after the update + current_version = entry !== nothing ? entry.version : nothing + original_entry = manifest_info(ctx.env.original_manifest, pkg.uuid) + original_version = original_entry !== nothing ? original_entry.version : nothing + + # Check if version didn't change and there's a newer version available + if current_version == original_version && current_version !== nothing + temp_pkg = PackageSpec(name = pkg.name, uuid = pkg.uuid, version = current_version) + cinfo = status_compat_info(temp_pkg, ctx.env, ctx.registries) + if cinfo !== nothing + packages_holding_back, max_version, max_version_compat = cinfo + if current_version < max_version + printpkgstyle( + ctx.io, :Info, "$(pkg.name) can be updated but at the cost of downgrading other packages. " * + "To force upgrade to the latest version, try `add $(pkg.name)@$(max_version)`", color = Base.info_color() + ) + end + end + end + end + end + + return build_versions(ctx, union(new_apply, new_git)) end function update_package_pin!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, entry::Union{Nothing, PackageEntry}) if entry === nothing - pkgerror("package $(err_rep(pkg)) not found in the manifest, run `Pkg.resolve()` and retry.") + cmd = Pkg.in_repl_mode() ? "pkg> resolve" : "Pkg.resolve()" + pkgerror("package $(err_rep(pkg)) not found in the manifest, run `$cmd` and retry.") end #if entry.pinned && pkg.version == VersionSpec() @@ -1886,11 +2172,11 @@ function pin(ctx::Context, pkgs::Vector{PackageSpec}) update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version) new = download_source(ctx) - fixups_from_projectfile!(ctx.env) - download_artifacts(ctx; julia_version=ctx.julia_version) + fixups_from_projectfile!(ctx) + download_artifacts(ctx; julia_version = ctx.julia_version) write_env(ctx.env) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io) - build_versions(ctx, new) + show_update(ctx.env, ctx.registries; io = ctx.io) + return build_versions(ctx, new) end function update_package_free!(registries::Vector{Registry.RegistryInstance}, pkg::PackageSpec, entry::PackageEntry, err_if_free::Bool) @@ -1910,22 +2196,24 @@ function update_package_free!(registries::Vector{Registry.RegistryInstance}, pkg return # -> name, uuid end if err_if_free - pkgerror("expected package $(err_rep(pkg)) to be pinned, tracking a path,", - " or tracking a repository") + pkgerror( + "expected package $(err_rep(pkg)) to be pinned, tracking a path,", + " or tracking a repository" + ) end return end # TODO: this is two technically different operations with the same name # split into two subfunctions ... -function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free=true) +function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free = true) for pkg in pkgs entry = manifest_info(ctx.env.manifest, pkg.uuid) delete!(ctx.env.project.sources, pkg.name) update_package_free!(ctx.registries, pkg, entry, err_if_free) end - if any(pkg -> pkg.version == VersionSpec(), pkgs) + return if any(pkg -> pkg.version == VersionSpec(), pkgs) pkgs = load_direct_deps(ctx.env, pkgs) check_registered(ctx.registries, pkgs) @@ -1934,38 +2222,45 @@ function free(ctx::Context, pkgs::Vector{PackageSpec}; err_if_free=true) update_manifest!(ctx.env, pkgs, deps_map, ctx.julia_version) new = download_source(ctx) - fixups_from_projectfile!(ctx.env) + fixups_from_projectfile!(ctx) download_artifacts(ctx) write_env(ctx.env) # write env before building - show_update(ctx.env, ctx.registries; io=ctx.io) + show_update(ctx.env, ctx.registries; io = ctx.io) build_versions(ctx, new) else foreach(pkg -> manifest_info(ctx.env.manifest, pkg.uuid).pinned = false, pkgs) write_env(ctx.env) - show_update(ctx.env, ctx.registries; io=ctx.io) + show_update(ctx.env, ctx.registries; io = ctx.io) end end function gen_test_code(source_path::String; test_args::Cmd) test_file = testfile(source_path) return """ - $(Base.load_path_setup_code(false)) - cd($(repr(dirname(test_file)))) - append!(empty!(ARGS), $(repr(test_args.exec))) - include($(repr(test_file))) - """ + $(Base.load_path_setup_code(false)) + cd($(repr(dirname(test_file)))) + append!(empty!(ARGS), $(repr(test_args.exec))) + include($(repr(test_file))) + """ end function get_threads_spec() - if Threads.nthreads(:interactive) > 0 + return if haskey(ENV, "JULIA_NUM_THREADS") + if isempty(ENV["JULIA_NUM_THREADS"]) + throw(ArgumentError("JULIA_NUM_THREADS is set to an empty string. It is not clear what Pkg.test should set for `-t` on the test worker.")) + end + # if set, prefer JULIA_NUM_THREADS because this is passed to the test worker via --threads + # which takes precedence in the worker + ENV["JULIA_NUM_THREADS"] + elseif Threads.nthreads(:interactive) > 0 "$(Threads.nthreads(:default)),$(Threads.nthreads(:interactive))" else "$(Threads.nthreads(:default))" end end -function gen_subprocess_flags(source_path::String; coverage, julia_args) +function gen_subprocess_flags(source_path::String; coverage, julia_args::Cmd) coverage_arg = if coverage isa Bool # source_path is the package root, not "src" so "ext" etc. is included coverage ? string("@", source_path) : "none" @@ -1990,7 +2285,7 @@ end function with_temp_env(fn::Function, temp_env::String) load_path = copy(LOAD_PATH) active_project = Base.ACTIVE_PROJECT[] - try + return try push!(empty!(LOAD_PATH), "@", temp_env) Base.ACTIVE_PROJECT[] = nothing fn() @@ -2005,8 +2300,10 @@ function sandbox_preserve(env::EnvCache, target::PackageSpec, test_project::Stri env = deepcopy(env) # include root in manifest (in case any dependencies point back to it) if env.pkg !== nothing - env.manifest[env.pkg.uuid] = PackageEntry(;name=env.pkg.name, path=dirname(env.project_file), - deps=env.project.deps) + env.manifest[env.pkg.uuid] = PackageEntry(; + name = env.pkg.name, path = dirname(env.project_file), + deps = env.project.deps + ) end # if the source manifest is an old format, upgrade the manifest_format so # that warnings aren't thrown for the temp sandbox manifest @@ -2041,16 +2338,18 @@ function abspath!(env::EnvCache, project::Project) end # ctx + pkg used to compute parent dep graph -function sandbox(fn::Function, ctx::Context, target::PackageSpec, - sandbox_path::String, sandbox_project_override; - preferences::Union{Nothing,Dict{String,Any}} = nothing, - force_latest_compatible_version::Bool=false, - allow_earlier_backwards_compatible_versions::Bool=true, - allow_reresolve::Bool=true) +function sandbox( + fn::Function, ctx::Context, target::PackageSpec, + sandbox_path::String, sandbox_project_override; + preferences::Union{Nothing, Dict{String, Any}} = nothing, + force_latest_compatible_version::Bool = false, + allow_earlier_backwards_compatible_versions::Bool = true, + allow_reresolve::Bool = true + ) sandbox_project = projectfile_path(sandbox_path) - mktempdir() do tmp - tmp_project = projectfile_path(tmp) + return mktempdir() do tmp + tmp_project = projectfile_path(tmp) tmp_manifest = manifestfile_path(tmp) tmp_preferences = joinpath(tmp, first(Base.preferences_names)) @@ -2118,14 +2417,22 @@ function sandbox(fn::Function, ctx::Context, target::PackageSpec, end try - Pkg.resolve(temp_ctx; io=devnull, skip_writing_project=true) + Pkg.resolve(temp_ctx; io = devnull, skip_writing_project = true) @debug "Using _parent_ dep graph" - catch err# TODO + catch err # TODO err isa Resolve.ResolverError || rethrow() allow_reresolve || rethrow() @debug err - printpkgstyle(ctx.io, :Test, "Could not use exact versions of packages in manifest. Re-resolving dependencies", color=Base.warn_color()) - Pkg.update(temp_ctx; skip_writing_project=true, update_registry=false, io=ctx.io) + msg = string( + "Could not use exact versions of packages in manifest, re-resolving. ", + "Note: if you do not check your manifest file into source control, ", + "then you can probably ignore this message. ", + "However, if you do check your manifest file into source control, ", + "then you probably want to pass the `allow_reresolve = false` kwarg ", + "when calling the `Pkg.test` function.", + ) + printpkgstyle(ctx.io, :Test, msg, color = Base.warn_color()) + Pkg.update(temp_ctx; skip_writing_project = true, update_registry = false, io = ctx.io) printpkgstyle(ctx.io, :Test, "Successfully re-resolved") @debug "Using _clean_ dep graph" end @@ -2164,7 +2471,7 @@ function gen_target_project(ctx::Context, pkg::PackageSpec, source_path::String, env = ctx.env registries = ctx.registries test_project = Types.Project() - if projectfile_path(source_path; strict=true) === nothing + if projectfile_path(source_path; strict = true) === nothing # no project file, assuming this is an old REQUIRE package test_project.deps = copy(env.manifest[pkg.uuid].deps) if target == "test" @@ -2172,10 +2479,10 @@ function gen_target_project(ctx::Context, pkg::PackageSpec, source_path::String, if isfile(test_REQUIRE_path) @warn "using test/REQUIRE files is deprecated and current support is lacking in some areas" test_pkgs = parse_REQUIRE(test_REQUIRE_path) - package_specs = [PackageSpec(name=pkg) for pkg in test_pkgs] + package_specs = [PackageSpec(name = pkg) for pkg in test_pkgs] registry_resolve!(registries, package_specs) stdlib_resolve!(package_specs) - ensure_resolved(ctx, env.manifest, package_specs, registry=true) + ensure_resolved(ctx, env.manifest, package_specs, registry = true) for spec in package_specs test_project.deps[spec.name] = spec.uuid end @@ -2211,12 +2518,14 @@ end testdir(source_path::String) = joinpath(source_path, "test") testfile(source_path::String) = joinpath(testdir(source_path), "runtests.jl") -function test(ctx::Context, pkgs::Vector{PackageSpec}; - coverage=false, julia_args::Cmd=``, test_args::Cmd=``, - test_fn=nothing, - force_latest_compatible_version::Bool=false, - allow_earlier_backwards_compatible_versions::Bool=true, - allow_reresolve::Bool=true) +function test( + ctx::Context, pkgs::Vector{PackageSpec}; + coverage = false, julia_args::Cmd = ``, test_args::Cmd = ``, + test_fn = nothing, + force_latest_compatible_version::Bool = false, + allow_earlier_backwards_compatible_versions::Bool = true, + allow_reresolve::Bool = true + ) Pkg.instantiate(ctx; allow_autoprecomp = false) # do precomp later within sandbox # load manifest data @@ -2237,16 +2546,18 @@ function test(ctx::Context, pkgs::Vector{PackageSpec}; # See if we can find the test files for all packages missing_runtests = String[] - source_paths = String[] # source_path is the package root (not /src) + source_paths = String[] # source_path is the package root (not /src) for pkg in pkgs sourcepath = project_rel_path(ctx.env, source_path(ctx.env.manifest_file, pkg, ctx.julia_version)) # TODO !isfile(testfile(sourcepath)) && push!(missing_runtests, pkg.name) push!(source_paths, sourcepath) end if !isempty(missing_runtests) - pkgerror(length(missing_runtests) == 1 ? "Package " : "Packages ", - join(missing_runtests, ", "), - " did not provide a `test/runtests.jl` file") + pkgerror( + length(missing_runtests) == 1 ? "Package " : "Packages ", + join(missing_runtests, ", "), + " did not provide a `test/runtests.jl` file" + ) end # sandbox @@ -2258,13 +2569,15 @@ function test(ctx::Context, pkgs::Vector{PackageSpec}; proj = Base.locate_project_file(abspath(testdir(source_path))) env = EnvCache(proj) # Instantiate test env - Pkg.instantiate(Context(env=env); allow_autoprecomp = false) - status(env, ctx.registries; mode=PKGMODE_COMBINED, io=ctx.io, ignore_indent = false, show_usagetips = false) + Pkg.instantiate(Context(env = env); allow_autoprecomp = false) + status(env, ctx.registries; mode = PKGMODE_COMBINED, io = ctx.io, ignore_indent = false, show_usagetips = false) flags = gen_subprocess_flags(source_path; coverage, julia_args) if should_autoprecompile() cacheflags = Base.CacheFlags(parse(UInt8, read(`$(Base.julia_cmd()) $(flags) --eval 'show(ccall(:jl_cache_flags, UInt8, ()))'`, String))) - Pkg.precompile(; io=ctx.io, configs = flags => cacheflags) + # Don't warn about already loaded packages, since we are going to run tests in a new + # subprocess anyway. + Pkg.precompile(; io = ctx.io, warn_loaded = false, configs = flags => cacheflags) end printpkgstyle(ctx.io, :Testing, "Running tests...") @@ -2299,21 +2612,21 @@ function test(ctx::Context, pkgs::Vector{PackageSpec}; end # now we sandbox printpkgstyle(ctx.io, :Testing, pkg.name) - sandbox(ctx, pkg, testdir(source_path), test_project_override; preferences=test_project_preferences, force_latest_compatible_version, allow_earlier_backwards_compatible_versions, allow_reresolve) do + sandbox(ctx, pkg, testdir(source_path), test_project_override; preferences = test_project_preferences, force_latest_compatible_version, allow_earlier_backwards_compatible_versions, allow_reresolve) do test_fn !== nothing && test_fn() - sandbox_ctx = Context(;io=ctx.io) - status(sandbox_ctx.env, sandbox_ctx.registries; mode=PKGMODE_COMBINED, io=sandbox_ctx.io, ignore_indent = false, show_usagetips = false) - flags = gen_subprocess_flags(source_path; coverage,julia_args) + sandbox_ctx = Context(; io = ctx.io) + status(sandbox_ctx.env, sandbox_ctx.registries; mode = PKGMODE_COMBINED, io = sandbox_ctx.io, ignore_indent = false, show_usagetips = false) + flags = gen_subprocess_flags(source_path; coverage, julia_args) if should_autoprecompile() cacheflags = Base.CacheFlags(parse(UInt8, read(`$(Base.julia_cmd()) $(flags) --eval 'show(ccall(:jl_cache_flags, UInt8, ()))'`, String))) - Pkg.precompile(sandbox_ctx; io=sandbox_ctx.io, configs = flags => cacheflags) + Pkg.precompile(sandbox_ctx; io = sandbox_ctx.io, configs = flags => cacheflags) end printpkgstyle(ctx.io, :Testing, "Running tests...") flush(ctx.io) code = gen_test_code(source_path; test_args) - cmd = `$(Base.julia_cmd()) $(flags) --threads=$(get_threads_spec()) --eval $code` + cmd = `$(Base.julia_cmd()) --threads=$(get_threads_spec()) $(flags) --eval $code` p, interrupted = subprocess_handler(cmd, ctx.io, "Tests interrupted. Exiting the test process") if success(p) printpkgstyle(ctx.io, :Testing, pkg.name * " tests passed ") @@ -2325,7 +2638,7 @@ function test(ctx::Context, pkgs::Vector{PackageSpec}; # TODO: Should be included in Base function signal_name(signal::Integer) - if signal == Base.SIGHUP + return if signal == Base.SIGHUP "HUP" elseif signal == Base.SIGINT "INT" @@ -2343,9 +2656,9 @@ function test(ctx::Context, pkgs::Vector{PackageSpec}; end # report errors - if !isempty(pkgs_errored) + return if !isempty(pkgs_errored) function reason(p) - if Base.process_signaled(p) + return if Base.process_signaled(p) " (received signal: " * signal_name(p.termsignal) * ")" elseif Base.process_exited(p) && p.exitcode != 1 " (exit code: " * string(p.exitcode) * ")" @@ -2394,7 +2707,7 @@ end # Display -function stat_rep(x::PackageSpec; name=true) +function stat_rep(x::PackageSpec; name = true) name = name ? "$(x.name)" : "" version = x.version == VersionSpec() ? "" : "v$(x.version)" rev = "" @@ -2405,7 +2718,7 @@ function stat_rep(x::PackageSpec; name=true) repo = Operations.is_tracking_repo(x) ? "`$(x.repo.source)$(subdir_str)#$(rev)`" : "" path = Operations.is_tracking_path(x) ? "$(pathrepr(x.path))" : "" pinned = x.pinned ? "⚲" : "" - return join(filter(!isempty, [name,version,repo,path,pinned]), " ") + return join(filter(!isempty, [name, version, repo, path, pinned]), " ") end print_single(io::IO, pkg::PackageSpec) = print(io, stat_rep(pkg)) @@ -2413,20 +2726,20 @@ print_single(io::IO, pkg::PackageSpec) = print(io, stat_rep(pkg)) is_instantiated(::Nothing) = false is_instantiated(x::PackageSpec) = x.version != VersionSpec() || is_stdlib(x.uuid) # Compare an old and new node of the dependency graph and print a single line to summarize the change -function print_diff(io::IO, old::Union{Nothing,PackageSpec}, new::Union{Nothing,PackageSpec}) - if !is_instantiated(old) && is_instantiated(new) - printstyled(io, "+ $(stat_rep(new))"; color=:light_green) +function print_diff(io::IO, old::Union{Nothing, PackageSpec}, new::Union{Nothing, PackageSpec}) + return if !is_instantiated(old) && is_instantiated(new) + printstyled(io, "+ $(stat_rep(new))"; color = :light_green) elseif !is_instantiated(new) - printstyled(io, "- $(stat_rep(old))"; color=:light_red) + printstyled(io, "- $(stat_rep(old))"; color = :light_red) elseif is_tracking_registry(old) && is_tracking_registry(new) && - new.version isa VersionNumber && old.version isa VersionNumber && new.version != old.version + new.version isa VersionNumber && old.version isa VersionNumber && new.version != old.version if new.version > old.version - printstyled(io, "↑ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_yellow) + printstyled(io, "↑ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_yellow) else - printstyled(io, "↓ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_magenta) + printstyled(io, "↓ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_magenta) end else - printstyled(io, "~ $(stat_rep(old)) ⇒ $(stat_rep(new; name=false))"; color=:light_yellow) + printstyled(io, "~ $(stat_rep(old)) ⇒ $(stat_rep(new; name = false))"; color = :light_yellow) end end @@ -2442,11 +2755,11 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist reg_compat_info = Registry.compat_info(info) versions = keys(reg_compat_info) versions = filter(v -> !Registry.isyanked(info, v), versions) - max_version_reg = maximum(versions; init=v"0") + max_version_reg = maximum(versions; init = v"0") max_version = max(max_version, max_version_reg) compat_spec = get_compat_workspace(env, pkg.name) versions_in_compat = filter(in(compat_spec), keys(reg_compat_info)) - max_version_in_compat = max(max_version_in_compat, maximum(versions_in_compat; init=v"0")) + max_version_in_compat = max(max_version_in_compat, maximum(versions_in_compat; init = v"0")) end max_version == v"0" && return nothing pkg.version >= max_version && return nothing @@ -2515,7 +2828,7 @@ function status_compat_info(pkg::PackageSpec, env::EnvCache, regs::Vector{Regist return sort!(unique!(packages_holding_back)), max_version, max_version_in_compat end -function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifest=true, workspace=false) +function diff_array(old_env::Union{EnvCache, Nothing}, new_env::EnvCache; manifest = true, workspace = false) function index_pkgs(pkgs, uuid) idx = findfirst(pkg -> pkg.uuid == uuid, pkgs) return idx === nothing ? nothing : pkgs[idx] @@ -2527,9 +2840,9 @@ function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifes new = manifest ? load_all_deps_loadable(new_env) : load_project_deps(new_env.project, new_env.project_file, new_env.manifest, new_env.manifest_file) end - T, S = Union{UUID,Nothing}, Union{PackageSpec,Nothing} + T, S = Union{UUID, Nothing}, Union{PackageSpec, Nothing} if old_env === nothing - return Tuple{T,S,S}[(pkg.uuid, nothing, pkg)::Tuple{T,S,S} for pkg in new] + return Tuple{T, S, S}[(pkg.uuid, nothing, pkg)::Tuple{T, S, S} for pkg in new] end if workspace old = manifest ? load_all_deps(old_env) : load_direct_deps(old_env) @@ -2538,13 +2851,12 @@ function diff_array(old_env::Union{EnvCache,Nothing}, new_env::EnvCache; manifes end # merge old and new into single array all_uuids = union(T[pkg.uuid for pkg in old], T[pkg.uuid for pkg in new]) - return Tuple{T,S,S}[(uuid, index_pkgs(old, uuid), index_pkgs(new, uuid))::Tuple{T,S,S} for uuid in all_uuids] + return Tuple{T, S, S}[(uuid, index_pkgs(old, uuid), index_pkgs(new, uuid))::Tuple{T, S, S} for uuid in all_uuids] end -function is_package_downloaded(manifest_file::String, pkg::PackageSpec; platform=HostPlatform()) +function is_package_downloaded(manifest_file::String, pkg::PackageSpec; platform = HostPlatform()) sourcepath = source_path(manifest_file, pkg) - identifier = pkg.name !== nothing ? pkg.name : pkg.uuid - (sourcepath === nothing) && pkgerror("Could not locate the source code for the $(identifier) package. Are you trying to use a manifest generated by a different version of Julia?") + sourcepath === nothing && return false isdir(sourcepath) || return false check_artifacts_downloaded(sourcepath; platform) || return false return true @@ -2564,11 +2876,13 @@ function status_ext_info(pkg::PackageSpec, env::EnvCache) # Note: `get_extension` returns nothing for stdlibs that are loaded via `require_stdlib` ext_loaded = (Base.get_extension(Base.PkgId(pkg.uuid, pkg.name), Symbol(ext)) !== nothing) # Check if deps are loaded - extdeps_info= Tuple{String, Bool}[] + extdeps_info = Tuple{String, Bool}[] for extdep in extdeps if !(haskey(weakdepses, extdep) || haskey(depses, extdep)) - pkgerror(isnothing(pkg.name) ? "M" : "$(pkg.name) has a malformed Project.toml, ", - "the extension package $extdep is not listed in [weakdeps] or [deps]") + pkgerror( + isnothing(pkg.name) ? "M" : "$(pkg.name) has a malformed Project.toml, ", + "the extension package $extdep is not listed in [weakdeps] or [deps]" + ) end uuid = get(weakdepses, extdep, nothing) if uuid === nothing @@ -2600,29 +2914,39 @@ struct PackageStatusData extinfo::Union{Nothing, Vector{ExtInfo}} end -function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registries::Vector{Registry.RegistryInstance}, header::Symbol, - uuids::Vector, names::Vector; manifest=true, diff=false, ignore_indent::Bool, workspace::Bool, outdated::Bool, extensions::Bool, io::IO, - mode::PackageMode, hidden_upgrades_info::Bool, show_usagetips::Bool=true) - not_installed_indicator = sprint((io, args) -> printstyled(io, args...; color=Base.error_color()), "→", context=io) - upgradable_indicator = sprint((io, args) -> printstyled(io, args...; color=:green), "⌃", context=io) - heldback_indicator = sprint((io, args) -> printstyled(io, args...; color=Base.warn_color()), "⌅", context=io) +function print_status( + env::EnvCache, old_env::Union{Nothing, EnvCache}, registries::Vector{Registry.RegistryInstance}, header::Symbol, + uuids::Vector, names::Vector; manifest = true, diff = false, ignore_indent::Bool, workspace::Bool, outdated::Bool, extensions::Bool, io::IO, + mode::PackageMode, hidden_upgrades_info::Bool, show_usagetips::Bool = true + ) + not_installed_indicator = sprint((io, args) -> printstyled(io, args...; color = Base.error_color()), "→", context = io) + upgradable_indicator = sprint((io, args) -> printstyled(io, args...; color = :green), "⌃", context = io) + heldback_indicator = sprint((io, args) -> printstyled(io, args...; color = Base.warn_color()), "⌅", context = io) filter = !isempty(uuids) || !isempty(names) # setup xs = diff_array(old_env, env; manifest, workspace) # filter and return early if possible if isempty(xs) && !diff - printpkgstyle(io, header, "$(pathrepr(manifest ? env.manifest_file : env.project_file)) (empty " * - (manifest ? "manifest" : "project") * ")", ignore_indent) + printpkgstyle( + io, header, "$(pathrepr(manifest ? env.manifest_file : env.project_file)) (empty " * + (manifest ? "manifest" : "project") * ")", ignore_indent + ) return nothing end - no_changes = all(p-> p[2] == p[3], xs) + no_changes = all(p -> p[2] == p[3], xs) if no_changes - printpkgstyle(io, Symbol("No packages added to or removed from"), "$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent) + if manifest + printpkgstyle(io, :Manifest, "No packages added to or removed from $(pathrepr(env.manifest_file))", ignore_indent; color = Base.info_color()) + else + printpkgstyle(io, :Project, "No packages added to or removed from $(pathrepr(env.project_file))", ignore_indent; color = Base.info_color()) + end else xs = !filter ? xs : eltype(xs)[(id, old, new) for (id, old, new) in xs if (id in uuids || something(new, old).name in names)] if isempty(xs) - printpkgstyle(io, Symbol("No Matches"), - "in $(diff ? "diff for " : "")$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent) + printpkgstyle( + io, Symbol("No Matches"), + "in $(diff ? "diff for " : "")$(pathrepr(manifest ? env.manifest_file : env.project_file))", ignore_indent + ) return nothing end # main print @@ -2681,8 +3005,8 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie pkg_downloaded = !is_instantiated(new) || is_package_downloaded(env.manifest_file, new) new_ver_avail = !latest_version && !Operations.is_tracking_repo(new) && !Operations.is_tracking_path(new) - pkg_upgradable = new_ver_avail && isempty(cinfo[1]) - pkg_heldback = new_ver_avail && !isempty(cinfo[1]) + pkg_upgradable = new_ver_avail && cinfo !== nothing && isempty(cinfo[1]) + pkg_heldback = new_ver_avail && cinfo !== nothing && !isempty(cinfo[1]) if !pkg_downloaded && (pkg_upgradable || pkg_heldback) # allow space in the gutter for two icons on a single line @@ -2720,20 +3044,45 @@ function print_status(env::EnvCache, old_env::Union{Nothing,EnvCache}, registrie diff ? print_diff(io, pkg.old, pkg.new) : print_single(io, pkg.new) + # show if package is yanked + pkg_spec = something(pkg.new, pkg.old) + if is_pkgversion_yanked(pkg_spec, registries) + printstyled(io, " [yanked]"; color = :yellow) + end + if outdated && !diff && pkg.compat_data !== nothing packages_holding_back, max_version, max_version_compat = pkg.compat_data if pkg.new.version !== max_version_compat && max_version_compat != max_version - printstyled(io, " [