diff --git a/.buildkite-external-version b/.buildkite-external-version index ba2906d0666cf..ecf90daf34336 100644 --- a/.buildkite-external-version +++ b/.buildkite-external-version @@ -1 +1 @@ -main +release-julia-1.13 diff --git a/Compiler/src/tfuncs.jl b/Compiler/src/tfuncs.jl index 3b4ee2b4cb676..98859efa8c87d 100644 --- a/Compiler/src/tfuncs.jl +++ b/Compiler/src/tfuncs.jl @@ -708,7 +708,7 @@ function pointer_eltype(@nospecialize(ptr)) end @nospecs function pointerarith_tfunc(𝕃::AbstractLattice, ptr, offset) - return ptr + return widenconst(ptr) end @nospecs function pointerref_tfunc(𝕃::AbstractLattice, a, i, align) return pointer_eltype(a) diff --git a/Compiler/test/effects.jl b/Compiler/test/effects.jl index 3d996268b77ed..cbb77e1d8e2a3 100644 --- a/Compiler/test/effects.jl +++ b/Compiler/test/effects.jl @@ -1486,3 +1486,10 @@ let effects = Base.infer_effects((Core.SimpleVector,Int); optimize=false) do sve end @test Compiler.is_nothrow(Base.infer_effects(length, (Core.SimpleVector,))) + + +# https://github.com/JuliaLang/julia/issues/60009 +function null_offset(offset) + Ptr{UInt8}(C_NULL) + offset +end +@test null_offset(Int(100)) == Ptr{UInt8}(UInt(100)) diff --git a/Makefile b/Makefile index 462f7dddd161b..7ae414637cdda 100644 --- a/Makefile +++ b/Makefile @@ -712,7 +712,7 @@ distcleanall: cleanall # Generate compilation database (leverages existing clang tooling setup) compile-database: - @$(MAKE) $(QUIET_MAKE) -C $(BUILDROOT)/src compile-database-src + @$(MAKE) $(QUIET_MAKE) -C $(BUILDROOT)/src compile-database test: check-whitespace $(JULIA_BUILD_MODE) @$(MAKE) $(QUIET_MAKE) -C $(BUILDROOT)/test default JULIA_BUILD_MODE=$(JULIA_BUILD_MODE) diff --git a/base/docs/Docs.jl b/base/docs/Docs.jl index c2f3548150997..8817d82a6add6 100644 --- a/base/docs/Docs.jl +++ b/base/docs/Docs.jl @@ -406,6 +406,10 @@ function objectdoc(__source__, __module__, str, def, expr, sig = :(Union{})) # Special case: `global x` should return nothing to avoid syntax errors with assigning to a value val = nothing else + if isexpr(def, :(=), 2) && isexpr(def.args[1], :curly) + # workaround for lowering bug #60001 + exdef = Expr(:block, exdef) + end val = :val exdef = Expr(:(=), val, exdef) end diff --git a/base/loading.jl b/base/loading.jl index 1bb00c3d3cbbb..7dfa1e8b1492e 100644 --- a/base/loading.jl +++ b/base/loading.jl @@ -4018,7 +4018,7 @@ end record_reason(::Nothing, ::String) = nothing function list_reasons(reasons::Dict{String,Int}) isempty(reasons) && return "" - return " (cache misses: $(join(("$k ($v)" for (k,v) in reasons), ", ")))" + return " (caches not reused: $(join(("$v for $k" for (k,v) in reasons), ", ")))" end list_reasons(::Nothing) = "" @@ -4027,7 +4027,7 @@ function any_includes_stale(includes::Vector{CacheHeaderIncludes}, cachefile::St f, fsize_req, hash_req, ftime_req = chi.filename, chi.fsize, chi.hash, chi.mtime if startswith(f, string("@depot", Filesystem.pathsep())) @debug("Rejecting stale cache file $cachefile because its depot could not be resolved") - record_reason(reasons, "nonresolveable depot") + record_reason(reasons, "file location uses unresolved depot path") return true end if !ispath(f) @@ -4036,7 +4036,7 @@ function any_includes_stale(includes::Vector{CacheHeaderIncludes}, cachefile::St continue end @debug "Rejecting stale cache file $cachefile because file $f does not exist" - record_reason(reasons, "missing sourcefile") + record_reason(reasons, "source file not found") return true end if ftime_req >= 0.0 @@ -4050,7 +4050,7 @@ function any_includes_stale(includes::Vector{CacheHeaderIncludes}, cachefile::St !( 0 < (ftime_req - ftime) < 1e-6 ) # PR #45552: Compensate for Windows tar giving mtimes that may be incorrect by up to one microsecond if is_stale @debug "Rejecting stale cache file $cachefile because mtime of include_dependency $f has changed (mtime $ftime, before $ftime_req)" - record_reason(reasons, "include_dependency mtime change") + record_reason(reasons, "file modification time changed") return true end else @@ -4058,13 +4058,13 @@ function any_includes_stale(includes::Vector{CacheHeaderIncludes}, cachefile::St fsize = filesize(fstat) if fsize != fsize_req @debug "Rejecting stale cache file $cachefile because file size of $f has changed (file size $fsize, before $fsize_req)" - record_reason(reasons, "include_dependency fsize change") + record_reason(reasons, "file size changed") return true end hash = isdir(fstat) ? _crc32c(join(readdir(f))) : open(_crc32c, f, "r") if hash != hash_req @debug "Rejecting stale cache file $cachefile because hash of $f has changed (hash $hash, before $hash_req)" - record_reason(reasons, "include_dependency fhash change") + record_reason(reasons, "file content changed") return true end end @@ -4092,7 +4092,7 @@ end checksum = isvalid_cache_header(io) if iszero(checksum) @debug "Rejecting cache file $cachefile due to it containing an incompatible cache header" - record_reason(reasons, "incompatible header") + record_reason(reasons, "different Julia build configuration") return true # incompatible cache file end modules, (includes, _, requires), required_modules, srctextpos, prefs, prefs_hash, clone_targets, actual_flags = parse_cache_header(io, cachefile) @@ -4105,7 +4105,7 @@ end requested flags: $(requested_flags) [$(_cacheflag_to_uint8(requested_flags))] cache file: $(CacheFlags(actual_flags)) [$actual_flags] """ - record_reason(reasons, "mismatched flags") + record_reason(reasons, "different compilation options") return true end pkgimage = !isempty(clone_targets) @@ -4114,7 +4114,7 @@ end if JLOptions().use_pkgimages == 0 # presence of clone_targets means native code cache @debug "Rejecting cache file $cachefile for $modkey since it would require usage of pkgimage" - record_reason(reasons, "requires pkgimages") + record_reason(reasons, "native code caching disabled") return true end rejection_reasons = check_clone_targets(clone_targets) @@ -4123,12 +4123,12 @@ end Reasons=rejection_reasons, var"Image Targets"=parse_image_targets(clone_targets), var"Current Targets"=current_image_targets()) - record_reason(reasons, "target mismatch") + record_reason(reasons, "different system or CPU target") return true end if !isfile(ocachefile) @debug "Rejecting cache file $cachefile for $modkey since pkgimage $ocachefile was not found" - record_reason(reasons, "missing ocachefile") + record_reason(reasons, "native code cache file not found") return true end else @@ -4137,7 +4137,7 @@ end id = first(modules) if id.first != modkey && modkey != PkgId("") @debug "Rejecting cache file $cachefile for $modkey since it is for $id instead" - record_reason(reasons, "for different pkgid") + record_reason(reasons, "different package identifier") return true end id_build = id.second @@ -4145,7 +4145,7 @@ end if build_id != UInt128(0) if id_build != build_id @debug "Ignoring cache file $cachefile for $modkey ($(UUID(id_build))) since it does not provide desired build_id ($((UUID(build_id))))" - record_reason(reasons, "for different buildid") + record_reason(reasons, "different build identifier") return true end end @@ -4171,20 +4171,20 @@ end continue elseif M == Core @debug "Rejecting cache file $cachefile because it was made with a different julia version" - record_reason(reasons, "wrong julia version") + record_reason(reasons, "different Julia version") return true # Won't be able to fulfill dependency elseif ignore_loaded || !stalecheck # Used by Pkg.precompile given that there it's ok to precompile different versions of loaded packages else @debug "Rejecting cache file $cachefile because module $req_key is already loaded and incompatible." - record_reason(reasons, "wrong dep version loaded") + record_reason(reasons, "different dependency version already loaded") return true # Won't be able to fulfill dependency end end path = locate_package(req_key) # TODO: add env and/or skip this when stalecheck is false if path === nothing @debug "Rejecting cache file $cachefile because dependency $req_key not found." - record_reason(reasons, "dep missing source") + record_reason(reasons, "dependency source file not found") return true # Won't be able to fulfill dependency end depmods[i] = (path, req_key, req_build_id) @@ -4203,7 +4203,7 @@ end break end @debug "Rejecting cache file $cachefile because it provides the wrong build_id (got $((UUID(build_id)))) for $req_key (want $(UUID(req_build_id)))" - record_reason(reasons, "wrong dep buildid") + record_reason(reasons, "different dependency build identifier") return true # cachefile doesn't provide the required version of the dependency end end @@ -4219,7 +4219,7 @@ end if !(isreadable(stdlib_path) && samefile(stdlib_path, modpath)) !samefile(fixup_stdlib_path(includes[1].filename), modpath) @debug "Rejecting cache file $cachefile because it is for file $(includes[1].filename) not file $modpath" - record_reason(reasons, "wrong source") + record_reason(reasons, "different source file path") return true # cache file was compiled from a different path end end @@ -4228,7 +4228,7 @@ end pkg = identify_package(modkey, req_modkey.name) if pkg != req_modkey @debug "Rejecting cache file $cachefile because uuid mapping for $modkey => $req_modkey has changed, expected $modkey => $(repr("text/plain", pkg))" - record_reason(reasons, "dep uuid changed") + record_reason(reasons, "dependency identifier changed") return true end end @@ -4239,14 +4239,14 @@ end if !isvalid_file_crc(io) @debug "Rejecting cache file $cachefile because it has an invalid checksum" - record_reason(reasons, "invalid checksum") + record_reason(reasons, "cache file checksum is invalid") return true end if pkgimage if !isvalid_pkgimage_crc(io, ocachefile::String) @debug "Rejecting cache file $cachefile because $ocachefile has an invalid checksum" - record_reason(reasons, "ocachefile invalid checksum") + record_reason(reasons, "native code cache checksum is invalid") return true end end @@ -4254,7 +4254,7 @@ end curr_prefs_hash = get_preferences_hash(id.uuid, prefs) if prefs_hash != curr_prefs_hash @debug "Rejecting cache file $cachefile because preferences hash does not match 0x$(string(prefs_hash, base=16)) != 0x$(string(curr_prefs_hash, base=16))" - record_reason(reasons, "preferences hash mismatch") + record_reason(reasons, "package preferences changed") return true end diff --git a/base/precompilation.jl b/base/precompilation.jl index e4883cea830a8..adecc50f6d1e4 100644 --- a/base/precompilation.jl +++ b/base/precompilation.jl @@ -406,7 +406,7 @@ function excluded_circular_deps_explanation(io::IOContext{IO}, ext_to_parent::Di else line = " β””" * "─" ^j * " " end - hascolor = get(io, :color, false)::Bool # XXX: this output does not go to `io` so this is bad to call here + hascolor = get(io, :color, false)::Bool line = _color_string(line, :light_black, hascolor) * full_name(ext_to_parent, pkg) * "\n" cycle_str *= line end @@ -471,76 +471,6 @@ function collect_all_deps(direct_deps, dep, alldeps=Set{Base.PkgId}()) end -""" - precompilepkgs(pkgs; kwargs...) - -Precompile packages and their dependencies, with support for parallel compilation, -progress tracking, and various compilation configurations. - -`pkgs::Union{Vector{String}, Vector{PkgId}}`: Packages to precompile. When -empty (default), precompiles all project dependencies. When specified, -precompiles only the given packages and their dependencies (unless -`manifest=true`). - -!!! note - Errors will only throw when precompiling the top-level dependencies, given that - not all manifest dependencies may be loaded by the top-level dependencies on the given system. - This can be overridden to make errors in all dependencies throw by setting the kwarg `strict` to `true` - -# Keyword Arguments -- `internal_call::Bool`: Indicates this is an automatic precompilation call - from somewhere external (e.g. Pkg). Do not use this parameter. - -- `strict::Bool`: Controls error reporting scope. When `false` (default), only reports - errors for direct project dependencies. Only relevant when `manifest=true`. - -- `warn_loaded::Bool`: When `true` (default), checks for and warns about packages that are - precompiled but already loaded with a different version. Displays a warning that Julia - needs to be restarted to use the newly precompiled versions. - -- `timing::Bool`: When `true` (not default), displays timing information for - each package compilation, but only if compilation might have succeeded. - Disables fancy progress bar output (timing is shown in simple text mode). - -- `_from_loading::Bool`: Internal flag indicating the call originated from the - package loading system. When `true` (not default): returns early instead of - throwing when packages are not found; suppresses progress messages when not - in an interactive session; allows packages outside the current environment to - be added as serial precompilation jobs; skips LOADING_CACHE initialization; - and changes cachefile locking behavior. - -- `configs::Union{Config,Vector{Config}}`: Compilation configurations to use. Each Config - is a `Pair{Cmd, Base.CacheFlags}` specifying command flags and cache flags. When - multiple configs are provided, each package is precompiled for each configuration. - -- `io::IO`: Output stream for progress messages, warnings, and errors. Can be - redirected (e.g., to `devnull` when called from loading in non-interactive mode). - -- `fancyprint::Bool`: Controls output format. When `true`, displays an animated progress - bar with spinners. When `false`, instead enables `timing` mode. Automatically - disabled when `timing=true` or when called from loading in non-interactive mode. - -- `manifest::Bool`: Controls the scope of packages to precompile. When `false` (default), - precompiles only packages specified in `pkgs` and their dependencies. When `true`, - precompiles all packages in the manifest (workspace mode), typically used by Pkg for - workspace precompile requests. - -- `ignore_loaded::Bool`: Controls whether already-loaded packages affect cache - freshness checks. When `false` (not default), loaded package versions are considered when - determining if cache files are fresh. - -# Return -- `Vector{String}`: Paths to cache files for the requested packages. -- `Nothing`: precompilation should be skipped - -# Notes -- Packages in circular dependency cycles are skipped with a warning. -- Packages with `__precompile__(false)` are skipped if they are from loading to - avoid repeated work on every session. -- Parallel compilation is controlled by `JULIA_NUM_PRECOMPILE_TASKS` environment variable - (defaults to CPU_THREADS + 1, capped at 16, halved on Windows). -- Extensions are precompiled when all their triggers are available in the environment. -""" function precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}=String[]; internal_call::Bool=false, strict::Bool = false, @@ -567,7 +497,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, timing::Bool, _from_loading::Bool, configs::Vector{Config}, - io::IOContext{IO}, + _io::IOContext{IO}, fancyprint::Bool, manifest::Bool, ignore_loaded::Bool) @@ -605,18 +535,16 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, # suppress precompilation progress messages when precompiling for loading packages, except during interactive sessions # or when specified by logging heuristics that explicitly require it # since the complicated IO implemented here can have somewhat disastrous consequences when happening in the background (e.g. #59599) - logio = io + io = _io logcalls = nothing - if _from_loading - if !isinteractive() - logio = IOContext{IO}(devnull) - fancyprint = false - end + if _from_loading && !isinteractive() + io = IOContext{IO}(devnull) + fancyprint = false logcalls = isinteractive() ? CoreLogging.Info : CoreLogging.Debug # sync with Base.compilecache end nconfigs = length(configs) - hascolor = get(logio, :color, false)::Bool + hascolor = get(io, :color, false)::Bool color_string(cstr::String, col::Union{Int64, Symbol}) = _color_string(cstr, col, hascolor) stale_cache = Dict{StaleCacheKey, Bool}() @@ -817,7 +745,8 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, pkg_names = [pkg.name for pkg in project_deps] end keep = Set{Base.PkgId}() - for dep_pkgid in keys(direct_deps) + for dep in direct_deps + dep_pkgid = first(dep) if dep_pkgid.name in pkg_names push!(keep, dep_pkgid) collect_all_deps(direct_deps, dep_pkgid, keep) @@ -906,10 +835,9 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, pkg_liveprinted = Ref{Union{Nothing, PkgId}}(nothing) function monitor_std(pkg_config, pipe; single_requested_pkg=false) - local pkg, config = pkg_config + pkg, config = pkg_config try - local liveprinting = false - local thistaskwaiting = false + liveprinting = false while !eof(pipe) local str = readline(pipe, keep=true) if single_requested_pkg && (liveprinting || !isempty(str)) @@ -922,18 +850,15 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, end end write(get!(IOBuffer, std_outputs, pkg_config), str) - if thistaskwaiting - if occursin("Waiting for background task / IO / timer", str) - thistaskwaiting = true - !liveprinting && !fancyprint && @lock print_lock begin - println(io, pkg.name, color_string(str, Base.warn_color())) - end - push!(taskwaiting, pkg_config) + if !in(pkg_config, taskwaiting) && occursin("waiting for IO to finish", str) + !fancyprint && @lock print_lock begin + println(io, pkg.name, color_string(" Waiting for background task / IO / timer.", Base.warn_color())) end - else - # XXX: don't just re-enable IO for random packages without printing the context for them first - !liveprinting && !fancyprint && @lock print_lock begin - print(io, ansi_cleartoendofline, str) + push!(taskwaiting, pkg_config) + end + if !fancyprint && in(pkg_config, taskwaiting) + @lock print_lock begin + print(io, str) end end end @@ -949,10 +874,10 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, (isempty(pkg_queue) || interrupted_or_done[]) && return @lock print_lock begin if target[] !== nothing - printpkgstyle(logio, :Precompiling, target[]) + printpkgstyle(io, :Precompiling, target[]) end if fancyprint - print(logio, ansi_disablecursor) + print(io, ansi_disablecursor) end end t = Timer(0; interval=1/10) @@ -966,7 +891,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, n_print_rows = 0 while !printloop_should_exit[] @lock print_lock begin - term_size = displaysize(logio)::Tuple{Int, Int} + term_size = displaysize(io)::Tuple{Int, Int} num_deps_show = max(term_size[1] - 3, 2) # show at least 2 deps pkg_queue_show = if !interrupted_or_done[] && length(pkg_queue) > num_deps_show last(pkg_queue, num_deps_show) @@ -983,7 +908,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, # window between print cycles termwidth = (displaysize(io)::Tuple{Int,Int})[2] - 4 if !final_loop - s = sprint(io -> show_progress(io, bar; termwidth, carriagereturn=false); context=logio) + s = sprint(io -> show_progress(io, bar; termwidth, carriagereturn=false); context=io) print(iostr, Base._truncate_at_width_or_chars(true, s, termwidth), "\n") end for pkg_config in pkg_queue_show @@ -1024,11 +949,11 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, end last_length = length(pkg_queue_show) n_print_rows = count("\n", str_) - print(logio, str_) + print(io, str_) printloop_should_exit[] = interrupted_or_done[] && final_loop final_loop = interrupted_or_done[] # ensures one more loop to tidy last task after finish i += 1 - printloop_should_exit[] || print(logio, ansi_moveup(n_print_rows), ansi_movecol1) + printloop_should_exit[] || print(io, ansi_moveup(n_print_rows), ansi_movecol1) end wait(t) end @@ -1038,7 +963,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, # Base.display_error(ErrorException(""), Base.catch_backtrace()) handle_interrupt(err, true) || rethrow() finally - fancyprint && print(logio, ansi_enablecursor) + fancyprint && print(io, ansi_enablecursor) end end @@ -1065,10 +990,8 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, notify(was_processed[pkg_config]) continue end - # Heuristic for when precompilation is disabled, which must not over-estimate however for any dependent - # since it will also block precompilation of all dependents - if _from_loading && single_requested_pkg && occursin(r"\b__precompile__\(\s*false\s*\)", read(sourcepath, String)) - Base.@logmsg logcalls "Disabled precompiling $(repr("text/plain", pkg)) since the text `__precompile__(false)` was found in file." + # Heuristic for when precompilation is disabled + if occursin(r"\b__precompile__\(\s*false\s*\)", read(sourcepath, String)) notify(was_processed[pkg_config]) continue end @@ -1090,8 +1013,8 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, end if !circular && is_stale Base.acquire(parallel_limiter) - is_serial_dep = pkg in serial_deps is_project_dep = pkg in project_deps + is_serial_dep = pkg in serial_deps # std monitoring std_pipe = Base.link_pipe!(Pipe(); reader_supports_async=true, writer_supports_async=true) @@ -1101,7 +1024,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, name = describe_pkg(pkg, is_project_dep, is_serial_dep, flags, cacheflags) @lock print_lock begin if !fancyprint && isempty(pkg_queue) - printpkgstyle(logio, :Precompiling, something(target[], "packages...")) + printpkgstyle(io, :Precompiling, something(target[], "packages...")) end end push!(pkg_queue, pkg_config) @@ -1112,13 +1035,7 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, end # for extensions, any extension in our direct dependencies is one we have a right to load # for packages, we may load any extension (all possible triggers are accounted for above) - loadable_exts = haskey(ext_to_parent, pkg) ? filter((dep)->haskey(ext_to_parent, dep), deps) : nothing - if !isempty(deps) - # if deps is empty, either it doesn't have any (so compiled-modules is - # irrelevant) or we couldn't compute them (so we actually should attempt - # serial compile, as the dependencies are not in the parallel list) - flags = `$flags --compiled-modules=strict` - end + loadable_exts = haskey(ext_to_parent, pkg) ? filter((dep)->haskey(ext_to_parent, dep), direct_deps[pkg]) : nothing if _from_loading && pkg in requested_pkgids # loading already took the cachefile_lock and printed logmsg for its explicit requests t = @elapsed ret = begin @@ -1150,11 +1067,11 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, if ret isa Exception push!(precomperr_deps, pkg_config) !fancyprint && @lock print_lock begin - println(logio, _timing_string(t), color_string(" ? ", Base.warn_color()), name) + println(io, _timing_string(t), color_string(" ? ", Base.warn_color()), name) end else !fancyprint && @lock print_lock begin - println(logio, _timing_string(t), color_string(" βœ“ ", loaded ? Base.warn_color() : :green), name) + println(io, _timing_string(t), color_string(" βœ“ ", loaded ? Base.warn_color() : :green), name) end if ret !== nothing was_recompiled[pkg_config] = true @@ -1170,9 +1087,11 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, close(std_pipe.in) # close pipe to end the std output monitor wait(t_monitor) if err isa ErrorException || (err isa ArgumentError && startswith(err.msg, "Invalid header in cache file")) - failed_deps[pkg_config] = sprint(showerror, err) + errmsg = String(take!(get(IOBuffer, std_outputs, pkg_config))) + delete!(std_outputs, pkg_config) # so it's not shown as warnings, given error report + failed_deps[pkg_config] = (strict || is_project_dep) ? string(sprint(showerror, err), "\n", strip(errmsg)) : "" !fancyprint && @lock print_lock begin - println(logio, " "^12, color_string(" βœ— ", Base.error_color()), name) + println(io, " "^12, color_string(" βœ— ", Base.error_color()), name) end else rethrow() @@ -1220,25 +1139,9 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, quick_exit = any(t -> !istaskdone(t) || istaskfailed(t), tasks) || interrupted[] # all should have finished (to avoid memory corruption) seconds_elapsed = round(Int, (time_ns() - time_start) / 1e9) ndeps = count(values(was_recompiled)) - # Determine if any of failures were a requested package - requested_errs = false - for ((dep, config), err) in failed_deps - if dep in requested_pkgids - requested_errs = true - break - end - end - # if every requested package succeeded, filter away output from failed packages - # since it didn't contribute to the overall success and can be regenerated if that package is later required - if !strict && !requested_errs - for (pkg_config, err) in failed_deps - delete!(std_outputs, pkg_config) - end - empty!(failed_deps) - end - if ndeps > 0 || !isempty(failed_deps) - if !quick_exit - logstr = sprint(context=logio) do iostr + if ndeps > 0 || !isempty(failed_deps) || (quick_exit && !isempty(std_outputs)) + str = sprint(context=io) do iostr + if !quick_exit if fancyprint # replace the progress bar what = isempty(requested_pkgids) ? "packages finished." : "$(join((p.name for p in requested_pkgids), ", ", " and ")) finished." printpkgstyle(iostr, :Precompiling, what) @@ -1271,17 +1174,10 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, ) end end - @lock print_lock begin - println(logio, logstr) - end - end - end - if !isempty(std_outputs) - str = sprint(context=io) do iostr # show any stderr output, even if Pkg.precompile has been interrupted (quick_exit=true), given user may be - # interrupting a hanging precompile job with stderr output. + # interrupting a hanging precompile job with stderr output. julia#48371 let std_outputs = Tuple{PkgConfig,SubString{String}}[(pkg_config, strip(String(take!(io)))) for (pkg_config,io) in std_outputs] - filter!(!isempty∘last, std_outputs) + filter!(kv -> !isempty(last(kv)), std_outputs) if !isempty(std_outputs) local plural1 = length(std_outputs) == 1 ? "y" : "ies" local plural2 = length(std_outputs) == 1 ? "" : "s" @@ -1299,32 +1195,49 @@ function _precompilepkgs(pkgs::Union{Vector{String}, Vector{PkgId}}, end end end - isempty(str) || @lock print_lock begin + @lock print_lock begin println(io, str) end - end - # Done cleanup and sub-process output, now ensure caller aborts too with the right error - if interrupted[] - throw(InterruptException()) - end - # Fail noisily now with failed_deps if any. - # Include all messages from compilecache since any might be relevant in the failure. - if !isempty(failed_deps) + if interrupted[] + # done cleanup, now ensure caller aborts too + throw(InterruptException()) + end + quick_exit && return Vector{String}[] err_str = IOBuffer() - for ((dep, config), err) in failed_deps - write(err_str, "\n") - print(err_str, "\n", dep.name, " ") - join(err_str, config[1], " ") - print(err_str, "\n", err) + n_direct_errs = 0 + for (pkg_config, err) in failed_deps + dep, config = pkg_config + if strict || (dep in project_deps) + print(err_str, "\n", dep.name, " ") + for cfg in config[1] + print(err_str, cfg, " ") + end + print(err_str, "\n\n", err) + n_direct_errs > 0 && write(err_str, "\n") + n_direct_errs += 1 + end end - n_errs = length(failed_deps) - pluraled = n_errs == 1 ? "" : "s" - err_msg = "The following $n_errs package$(pluraled) failed to precompile:$(String(take!(err_str)))\n" - if internal_call - # Pkg does not implement correct error handling, so this sometimes handles them instead - print(io, err_msg) - else - throw(PkgPrecompileError(err_msg)) + if position(err_str) > 0 + skip(err_str, -1) + truncate(err_str, position(err_str)) + pluralde = n_direct_errs == 1 ? "y" : "ies" + direct = strict ? "" : "direct " + err_msg = "The following $n_direct_errs $(direct)dependenc$(pluralde) failed to precompile:\n$(String(take!(err_str)))" + if internal_call # aka. auto-precompilation + if isinteractive() + plural1 = length(failed_deps) == 1 ? "y" : "ies" + println(io, " ", color_string("$(length(failed_deps))", Base.error_color()), " dependenc$(plural1) errored.") + println(io, " For a report of the errors see `julia> err`. To retry use `pkg> precompile`") + setglobal!(Base.MainInclude, :err, PkgPrecompileError(err_msg)) + else + # auto-precompilation shouldn't throw but if the user can't easily access the + # error messages, just show them + print(io, "\n", err_msg) + end + else + println(io) + throw(PkgPrecompileError(err_msg)) + end end end return collect(String, Iterators.flatten((v for (pkgid, v) in cachepath_cache if pkgid in requested_pkgids))) diff --git a/base/toml_parser.jl b/base/toml_parser.jl index 458f04bbf9cdf..f07f25eeddf25 100644 --- a/base/toml_parser.jl +++ b/base/toml_parser.jl @@ -1110,7 +1110,7 @@ function _parse_local_time(l::Parser, skip_hour=false)::Err{NTuple{4, Int64}} second in 0:59 || return ParserError(ErrParsingDateTime) # optional fractional second - fractional_second = Int64(0) + millisecond = Int64(0) if accept(l, '.') set_marker!(l) found_fractional_digit = false @@ -1121,12 +1121,15 @@ function _parse_local_time(l::Parser, skip_hour=false)::Err{NTuple{4, Int64}} return ParserError(ErrParsingDateTime) end # DateTime in base only manages 3 significant digits in fractional - # second + # second. Interpret parsed digits as fractional seconds and scale to + # milliseconds precision (e.g., ".2" => 200ms, ".20" => 200ms). + ndigits = l.prevpos - l.marker fractional_second = parse_int(l, false)::Int64 + millisecond = fractional_second * 10^(3 - ndigits) # Truncate off the rest eventual digits accept_batch(l, isdigit) end - return hour, minute, second, fractional_second + return hour, minute, second, millisecond end diff --git a/deps/checksums/cacert-2025-09-09.pem/md5 b/deps/checksums/cacert-2025-09-09.pem/md5 deleted file mode 100644 index 5e71d6346f3d4..0000000000000 --- a/deps/checksums/cacert-2025-09-09.pem/md5 +++ /dev/null @@ -1 +0,0 @@ -b7cf55bb817092ae0848509613079931 diff --git a/deps/checksums/cacert-2025-09-09.pem/sha512 b/deps/checksums/cacert-2025-09-09.pem/sha512 deleted file mode 100644 index 5683f52ecc0d7..0000000000000 --- a/deps/checksums/cacert-2025-09-09.pem/sha512 +++ /dev/null @@ -1 +0,0 @@ -bddfc1575a830d31417b3f55adf3b18c2d4bd1434da6e3883f771ab61194e3bf2d5a7d50033f60354bd425bc78f2e07d0663ddab711bfb9d48a8ea2e8cf7de4e diff --git a/deps/checksums/cacert-2025-11-04.pem/md5 b/deps/checksums/cacert-2025-11-04.pem/md5 new file mode 100644 index 0000000000000..641a98aecef02 --- /dev/null +++ b/deps/checksums/cacert-2025-11-04.pem/md5 @@ -0,0 +1 @@ +4ca8e1c3e8fc44c3ecd7a1fb9d3a6d03 diff --git a/deps/checksums/cacert-2025-11-04.pem/sha512 b/deps/checksums/cacert-2025-11-04.pem/sha512 new file mode 100644 index 0000000000000..bbd48b9475d7f --- /dev/null +++ b/deps/checksums/cacert-2025-11-04.pem/sha512 @@ -0,0 +1 @@ +9d9f7ecc829bafc222501d8a66852d96a51f522b04a82963e4166c87b85d6a5e5eedb50ced2ef3026cd7cb06fcb4b7dca59c4157813a067cb7b185e32f2957ec diff --git a/deps/libgit2.version b/deps/libgit2.version index 056ada6ca8e89..6c80bdc41e928 100644 --- a/deps/libgit2.version +++ b/deps/libgit2.version @@ -11,4 +11,4 @@ LIBGIT2_SHA1=0060d9cf5666f015b1067129bd874c6cc4c9c7ac # The versions of cacert.pem are identified by the date (YYYY-MM-DD) of their changes. # See https://curl.haxx.se/docs/caextract.html for more details. # Keep in sync with `stdlib/MozillaCACerts_jll/Project.toml`. -MOZILLA_CACERT_VERSION := 2025-09-09 +MOZILLA_CACERT_VERSION := 2025-11-04 diff --git a/doc/src/devdocs/functions.md b/doc/src/devdocs/functions.md index 70893a809b54f..fb1123f9b19ae 100644 --- a/doc/src/devdocs/functions.md +++ b/doc/src/devdocs/functions.md @@ -7,7 +7,7 @@ This document will explain how functions, method definitions, and method tables Every function in Julia is a generic function. A generic function is conceptually a single function, but consists of many definitions, or methods. The methods of a generic function are stored in a -method table. There is one global method table (type `MethodTable`) named `Core.GlobalMethods`. Any +method table. There is one global method table (type `MethodTable`) named `Core.methodtable`. Any default operation on methods (such as calls) uses that table. ## [Function calls](@id Function-calls) diff --git a/doc/src/devdocs/precompile_hang.md b/doc/src/devdocs/precompile_hang.md index 279ffec5360e8..2204651848509 100644 --- a/doc/src/devdocs/precompile_hang.md +++ b/doc/src/devdocs/precompile_hang.md @@ -17,7 +17,7 @@ If you follow the advice and hit `Ctrl-C`, you might see 1 dependency had warnings during precompilation: β”Œ Test1 [ac89d554-e2ba-40bc-bc5c-de68b658c982] -β”‚ [pid 2745] Waiting for background task / IO / timer to finish: +β”‚ [pid 2745] waiting for IO to finish: β”‚ Handle type uv_handle_t->data β”‚ timer 0x55580decd1e0->0x7f94c3a4c340 ``` diff --git a/src/Makefile b/src/Makefile index ecde1682bc169..f8c69b75b4d1b 100644 --- a/src/Makefile +++ b/src/Makefile @@ -259,7 +259,7 @@ endif default: $(JULIA_BUILD_MODE) # contains either "debug" or "release" all: debug release -release debug: %: libjulia-internal-% libjulia-codegen-% $(BUILDDIR)/compile_commands.json +release debug: %: libjulia-internal-% libjulia-codegen-% regenerate-compile_commands $(BUILDDIR): mkdir -p $(BUILDDIR) @@ -558,6 +558,7 @@ clean: -rm -f $(BUILDDIR)/*.dbg.obj $(BUILDDIR)/*.o $(BUILDDIR)/*.dwo $(BUILDDIR)/*.$(SHLIB_EXT) $(BUILDDIR)/*.a $(BUILDDIR)/*.h.gen -rm -f $(BUILDDIR)/julia.expmap -rm -f $(BUILDDIR)/julia_version.h + -rm -f $(BUILDDIR)/compile_commands.json clean-flisp: -$(MAKE) -C $(SRCDIR)/flisp clean BUILDDIR='$(abspath $(BUILDDIR)/flisp)' @@ -654,8 +655,10 @@ clean-analyzegc: rm -f $(build_shlibdir)/libImplicitAtomicsPlugin.$(SHLIB_EXT) # Compilation database generation using existing clang infrastructure -$(BUILDDIR)/compile_commands.json: - @{ \ +.PHONY: regenerate-compile_commands +regenerate-compile_commands: + TMPFILE=$$(mktemp -p $(BUILDDIR) compile_commands.json.XXXXXX); \ + { \ CLANG_TOOLING_C_FLAGS="$$($(JULIAHOME)/contrib/escape_json.sh clang $(CLANG_TOOLING_C_FLAGS))"; \ CLANG_TOOLING_CXX_FLAGS="$$($(JULIAHOME)/contrib/escape_json.sh clang $(CLANG_TOOLING_CXX_FLAGS))"; \ echo "["; \ @@ -711,18 +714,17 @@ $(BUILDDIR)/compile_commands.json: printf '{\n "directory": "%s",\n "file": "%s",\n "arguments": [%s]\n}' "$(abspath $(SRCDIR))" "$$included_file" "$$cmd"; \ done; \ echo "]"; \ - } > $@.tmp - @# This ensures we replace the file atomically, and avoid spurious rewrites - @if ! cmp -s $@.tmp $@; then \ - mv $@.tmp $@; \ + } > $$TMPFILE; \ + if ! cmp -s $$TMPFILE $(BUILDDIR)/compile_commands.json; then \ + mv $$TMPFILE $(BUILDDIR)/compile_commands.json; \ else \ - rm -f $@.tmp; \ + rm -f $$TMPFILE; \ fi -compile-database: $(BUILDDIR)/compile_commands.json +compile-database: regenerate-compile_commands $(MAKE) -C $(SRCDIR)/flisp compile-database BUILDDIR='$(abspath $(BUILDDIR)/flisp)' $(MAKE) -C $(SRCDIR)/support compile-database BUILDDIR='$(abspath $(BUILDDIR)/support)' - @echo "Compilation database created: $<" + @echo "Compilation database created for src" .FORCE: -.PHONY: default all debug release clean cleanall clean-* libccalltest libllvmcalltest julia_flisp.boot.inc.phony analyzegc analyzesrc compile-database $(BUILDDIR)/compile_commands.json .FORCE +.PHONY: default all debug release clean cleanall clean-* libccalltest libllvmcalltest julia_flisp.boot.inc.phony analyzegc analyzesrc compile-database .FORCE diff --git a/src/cgutils.cpp b/src/cgutils.cpp index e6e101b353149..62f31e237f4b6 100644 --- a/src/cgutils.cpp +++ b/src/cgutils.cpp @@ -2676,9 +2676,11 @@ static jl_cgval_t typed_store(jl_codectx_t &ctx, assert(!isboxed && maybe_null_if_boxed); Value *first_ptr = extract_first_ptr(ctx, realinstr); assert(first_ptr != nullptr); - Done = ctx.builder.CreateIsNotNull(first_ptr); + // Done = Success || first_ptr != NULL + Done = ctx.builder.CreateOr(Success, ctx.builder.CreateIsNotNull(first_ptr)); } else { + // Done = Success || first_ptr == NULL || oldval == cmpop) // Done = !(!Success && (first_ptr != NULL && oldval == cmpop)) Done = emit_guarded_test(ctx, ctx.builder.CreateNot(Success), false, [&] { Value *first_ptr = nullptr; diff --git a/src/flisp/Makefile b/src/flisp/Makefile index c146ef5a5a43a..3c24dab77d89f 100644 --- a/src/flisp/Makefile +++ b/src/flisp/Makefile @@ -61,9 +61,9 @@ DEBUGFLAGS_COMMON += $(FLAGS_COMMON) default: release -release: $(BUILDDIR)/$(EXENAME)$(EXE) $(BUILDDIR)/compile_commands.json +release: $(BUILDDIR)/$(EXENAME)$(EXE) regenerate-compile_commands -debug: $(BUILDDIR)/$(EXENAME)-debug$(EXE) $(BUILDDIR)/compile_commands.json +debug: $(BUILDDIR)/$(EXENAME)-debug$(EXE) regenerate-compile_commands $(BUILDDIR): mkdir -p $(BUILDDIR) @@ -140,8 +140,10 @@ CLANG_TOOLING_C_FLAGS = $(CLANGSA_FLAGS) $(DEBUGFLAGS_CLANG) $(JCPPFLAGS_CLANG) INCLUDED_FLISP_FILES := flisp.c:cvalues.c flisp.c:types.c flisp.c:print.c flisp.c:read.c flisp.c:equal.c # Compilation database generation -$(BUILDDIR)/compile_commands.json: - @{ \ +.PHONY: regenerate-compile_commands +regenerate-compile_commands: + TMPFILE=$$(mktemp -p $(BUILDDIR) compile_commands.json.XXXXXX); \ + { \ CLANG_TOOLING_C_FLAGS="$$($(JULIAHOME)/contrib/escape_json.sh clang $(CLANG_TOOLING_C_FLAGS))"; \ echo "["; \ first=true; \ @@ -158,16 +160,15 @@ $(BUILDDIR)/compile_commands.json: printf '{\n "directory": "%s",\n "file": "%s",\n "arguments": [%s]\n}' "$(abspath $(SRCDIR))" "$$included_file" "$$cmd"; \ done; \ echo "]"; \ - } > $@.tmp - @# This ensures we replace the file atomically, and avoid spurious rewrites - @if ! cmp -s $@.tmp $@; then \ - mv $@.tmp $@; \ + } > $$TMPFILE; \ + if ! cmp -s $$TMPFILE $(BUILDDIR)/compile_commands.json; then \ + mv $$TMPFILE $(BUILDDIR)/compile_commands.json; \ else \ - rm -f $@.tmp; \ + rm -f $$TMPFILE; \ fi -compile-database: $(BUILDDIR)/compile_commands.json - @echo "Compilation database created for flisp: $<" +compile-database: regenerate-compile_commands + @echo "Compilation database created for src/flisp" clean: rm -f $(BUILDDIR)/*.o @@ -178,4 +179,4 @@ clean: rm -f $(BUILDDIR)/compile_commands.json rm -f $(BUILDDIR)/host/* -.PHONY: flisp-deps compile-database $(BUILDDIR)/compile_commands.json +.PHONY: flisp-deps compile-database diff --git a/src/jl_uv.c b/src/jl_uv.c index e41b896320693..766e962288db6 100644 --- a/src/jl_uv.c +++ b/src/jl_uv.c @@ -68,7 +68,7 @@ static void wait_empty_func(uv_timer_t *t) uv_unref((uv_handle_t*)&signal_async); if (!uv_loop_alive(t->loop)) return; - jl_safe_printf("\n[pid %zd] Waiting for background task / IO / timer to finish:\n" + jl_safe_printf("\n[pid %zd] waiting for IO to finish:\n" " Handle type uv_handle_t->data\n", (size_t)uv_os_getpid()); uv_walk(jl_io_loop, walk_print_cb, NULL); diff --git a/src/support/Makefile b/src/support/Makefile index 9f5e3cc201231..69e97a1f65cc7 100644 --- a/src/support/Makefile +++ b/src/support/Makefile @@ -53,8 +53,8 @@ $(BUILDDIR)/host/Makefile: @printf "%s\n" 'BUILDING_HOST_TOOLS=1' >> $@ @printf "%s\n" 'include $(SRCDIR)/Makefile' >> $@ -release: $(BUILDDIR)/libsupport.a $(BUILDDIR)/compile_commands.json -debug: $(BUILDDIR)/libsupport-debug.a $(BUILDDIR)/compile_commands.json +release: $(BUILDDIR)/libsupport.a regenerate-compile_commands +debug: $(BUILDDIR)/libsupport-debug.a regenerate-compile_commands $(BUILDDIR)/libsupport.a: $(OBJS) | $(BUILDIR) rm -rf $@ @@ -78,8 +78,10 @@ CLANG_TOOLING_C_FLAGS = $(CLANGSA_FLAGS) $(DEBUGFLAGS_CLANG) $(JCPPFLAGS_CLANG) INCLUDED_SUPPORT_FILES := hashing.c:MurmurHash3.c # Compilation database generation -$(BUILDDIR)/compile_commands.json: - @{ \ +.PHONY: regenerate-compile_commands +regenerate-compile_commands: + TMPFILE=$$(mktemp -p $(BUILDDIR) compile_commands.json.XXXXXX); \ + { \ CLANG_TOOLING_S_FLAGS="$$($(JULIAHOME)/contrib/escape_json.sh clang $(JCPPFLAGS) $(DEBUGFLAGS))"; \ CLANG_TOOLING_C_FLAGS="$$($(JULIAHOME)/contrib/escape_json.sh clang $(JCPPFLAGS) $(JCFLAGS) $(DEBUGFLAGS))"; \ echo "["; \ @@ -103,16 +105,15 @@ $(BUILDDIR)/compile_commands.json: printf '{\n "directory": "%s",\n "file": "%s",\n "arguments": [%s]\n}' "$(abspath $(SRCDIR))" "$$included_file" "$$cmd"; \ done; \ echo "]"; \ - } > $@.tmp - @# This ensures we replace the file atomically, and avoid spurious rewrites - @if ! cmp -s $@.tmp $@; then \ - mv $@.tmp $@; \ + } > $$TMPFILE; \ + if ! cmp -s $$TMPFILE $(BUILDDIR)/compile_commands.json; then \ + mv $$TMPFILE $(BUILDDIR)/compile_commands.json; \ else \ - rm -f $@.tmp; \ + rm -f $$TMPFILE; \ fi -compile-database: $(BUILDDIR)/compile_commands.json - @echo "Compilation database created in support: $<" +compile-database: regenerate-compile_commands + @echo "Compilation database created for src/support" clean: rm -f $(BUILDDIR)/*.o @@ -125,4 +126,4 @@ clean: rm -f $(BUILDDIR)/compile_commands.json rm -f $(BUILDDIR)/host/* -.PHONY: compile-database $(BUILDDIR)/compile_commands.json +.PHONY: compile-database diff --git a/stdlib/LibGit2/src/LibGit2.jl b/stdlib/LibGit2/src/LibGit2.jl index 7d5b86867c7ae..30d141be146d7 100644 --- a/stdlib/LibGit2/src/LibGit2.jl +++ b/stdlib/LibGit2/src/LibGit2.jl @@ -262,15 +262,21 @@ The keyword arguments are: * `remoteurl::AbstractString=""`: the URL of `remote`. If not specified, will be assumed based on the given name of `remote`. * `refspecs=AbstractString[]`: determines properties of the fetch. + * `depth::Integer=0`: limit fetching to the specified number of commits from the tip + of each remote branch. `0` indicates a full fetch (the default). + Use `Consts.FETCH_DEPTH_UNSHALLOW` to fetch all missing data from a shallow clone. + Note: depth is, at the time of writing, only supported for network protocols (http, https, git, ssh), not for local filesystem paths. + (https://github.com/libgit2/libgit2/issues/6634) * `credentials=nothing`: provides credentials and/or settings when authenticating against a private `remote`. * `callbacks=Callbacks()`: user provided callbacks and payloads. -Equivalent to `git fetch [|] []`. +Equivalent to `git fetch [--depth ] [|] []`. """ function fetch(repo::GitRepo; remote::AbstractString="origin", remoteurl::AbstractString="", refspecs::Vector{<:AbstractString}=AbstractString[], + depth::Integer=0, credentials::Creds=nothing, callbacks::Callbacks=Callbacks()) rmt = if isempty(remoteurl) @@ -290,7 +296,12 @@ function fetch(repo::GitRepo; remote::AbstractString="origin", result = try remote_callbacks = RemoteCallbacks(callbacks) - fo = FetchOptions(callbacks=remote_callbacks) + @static if LibGit2.VERSION >= v"1.7.0" + fo = FetchOptions(callbacks=remote_callbacks, depth=Cuint(depth)) + else + depth != 0 && throw(ArgumentError("Depth parameter for fetch requires libgit2 >= 1.7.0")) + fo = FetchOptions(callbacks=remote_callbacks) + end fetch(rmt, refspecs, msg="from $(url(rmt))", options=fo) catch err if isa(err, GitError) && err.code === Error.EAUTH @@ -539,11 +550,16 @@ The keyword arguments are: * `remote_cb::Ptr{Cvoid}=C_NULL`: a callback which will be used to create the remote before it is cloned. If `C_NULL` (the default), no attempt will be made to create the remote - it will be assumed to already exist. + * `depth::Integer=0`: create a shallow clone with a history truncated to the + specified number of commits. `0` indicates a full clone (the default). + Use `Consts.FETCH_DEPTH_UNSHALLOW` to fetch all missing data from a shallow clone. + Note: shallow clones are, at the time of writing, only supported for network protocols (http, https, git, ssh), not for local filesystem paths. + (https://github.com/libgit2/libgit2/issues/6634) * `credentials::Creds=nothing`: provides credentials and/or settings when authenticating against a private repository. * `callbacks::Callbacks=Callbacks()`: user provided callbacks and payloads. -Equivalent to `git clone [-b ] [--bare] `. +Equivalent to `git clone [-b ] [--bare] [--depth ] `. # Examples ```julia @@ -552,12 +568,15 @@ repo1 = LibGit2.clone(repo_url, "test_path") repo2 = LibGit2.clone(repo_url, "test_path", isbare=true) julia_url = "https://github.com/JuliaLang/julia" julia_repo = LibGit2.clone(julia_url, "julia_path", branch="release-0.6") +# Shallow clone with only the most recent commit +shallow_repo = LibGit2.clone(repo_url, "shallow_path", depth=1) ``` """ function clone(repo_url::AbstractString, repo_path::AbstractString; branch::AbstractString="", isbare::Bool = false, remote_cb::Ptr{Cvoid} = C_NULL, + depth::Integer = 0, credentials::Creds=nothing, callbacks::Callbacks=Callbacks()) cred_payload = reset!(CredentialPayload(credentials)) @@ -573,7 +592,12 @@ function clone(repo_url::AbstractString, repo_path::AbstractString; lbranch = Base.cconvert(Cstring, branch) GC.@preserve lbranch begin remote_callbacks = RemoteCallbacks(callbacks) - fetch_opts = FetchOptions(callbacks=remote_callbacks) + @static if LibGit2.VERSION >= v"1.7.0" + fetch_opts = FetchOptions(callbacks=remote_callbacks, depth=Cuint(depth)) + else + depth != 0 && throw(ArgumentError("Shallow clone (depth parameter) requires libgit2 >= 1.7.0")) + fetch_opts = FetchOptions(callbacks=remote_callbacks) + end clone_opts = CloneOptions( bare = Cint(isbare), checkout_branch = isempty(lbranch) ? Cstring(C_NULL) : Base.unsafe_convert(Cstring, lbranch), diff --git a/stdlib/LibGit2/src/repository.jl b/stdlib/LibGit2/src/repository.jl index 89d94f4426bf0..97d414fc664e4 100644 --- a/stdlib/LibGit2/src/repository.jl +++ b/stdlib/LibGit2/src/repository.jl @@ -112,6 +112,27 @@ function isattached(repo::GitRepo) ccall((:git_repository_head_detached, libgit2), Cint, (Ptr{Cvoid},), repo) != 1 end +""" + isshallow(repo::GitRepo)::Bool + +Determine if `repo` is a shallow clone. A shallow clone has a truncated history, +created by cloning with a specific depth (e.g., `LibGit2.clone(url, path, depth=1)`). + +# Examples +```julia +shallow_repo = LibGit2.clone(url, "shallow_path", depth=1) +LibGit2.isshallow(shallow_repo) # returns true + +normal_repo = LibGit2.clone(url, "normal_path") +LibGit2.isshallow(normal_repo) # returns false +``` +""" +function isshallow(repo::GitRepo) + ensure_initialized() + @assert repo.ptr != C_NULL + ccall((:git_repository_is_shallow, libgit2), Cint, (Ptr{Cvoid},), repo) == 1 +end + @doc """ GitObject(repo::GitRepo, hash::AbstractGitHash) GitObject(repo::GitRepo, spec::AbstractString) diff --git a/stdlib/LibGit2/test/libgit2-tests.jl b/stdlib/LibGit2/test/libgit2-tests.jl index 4c0099f7296f4..b6112e1ee6f9d 100644 --- a/stdlib/LibGit2/test/libgit2-tests.jl +++ b/stdlib/LibGit2/test/libgit2-tests.jl @@ -744,6 +744,23 @@ mktempdir() do dir cred_payload = LibGit2.CredentialPayload() @test_throws ArgumentError LibGit2.clone(cache_repo, test_repo, callbacks=callbacks, credentials=cred_payload) end + @testset "shallow clone" begin + @static if LibGit2.VERSION >= v"1.7.0" + # Note: Shallow clones are not supported with local file:// transport + # This is a limitation in libgit2 - shallow clones only work with + # network protocols (http, https, git, ssh) + # See online-tests.jl for tests with remote repositories + + # Test normal clone is not shallow + normal_path = joinpath(dir, "Example.NotShallow") + LibGit2.with(LibGit2.clone(cache_repo, normal_path)) do repo + @test !LibGit2.isshallow(repo) + end + else + # Test that depth parameter throws error on older libgit2 + @test_throws ArgumentError LibGit2.clone(cache_repo, joinpath(dir, "Example.Shallow"), depth=1) + end + end end @testset "Update cache repository" begin diff --git a/stdlib/LibGit2/test/online-tests.jl b/stdlib/LibGit2/test/online-tests.jl index 4c5f346894b3d..c4d3cf452e78b 100644 --- a/stdlib/LibGit2/test/online-tests.jl +++ b/stdlib/LibGit2/test/online-tests.jl @@ -87,6 +87,22 @@ mktempdir() do dir @test ex.code == LibGit2.Error.EAUTH end end + + @testset "Shallow clone" begin + @static if LibGit2.VERSION >= v"1.7.0" + # Test shallow clone with depth=1 + repo_path = joinpath(dir, "Example.Shallow") + c = LibGit2.CredentialPayload(allow_prompt=false, allow_git_helpers=false) + repo = LibGit2.clone(repo_url, repo_path, depth=1, credentials=c) + try + @test isdir(repo_path) + @test isdir(joinpath(repo_path, ".git")) + @test LibGit2.isshallow(repo) + finally + close(repo) + end + end + end end end diff --git a/stdlib/MozillaCACerts_jll/Project.toml b/stdlib/MozillaCACerts_jll/Project.toml index 72a53cdbfb51d..d63251d59d58f 100644 --- a/stdlib/MozillaCACerts_jll/Project.toml +++ b/stdlib/MozillaCACerts_jll/Project.toml @@ -1,7 +1,7 @@ name = "MozillaCACerts_jll" uuid = "14a3606d-f60d-562e-9121-12d972cd8159" # Keep in sync with `deps/libgit2.version`. -version = "2025.09.09" +version = "2025.11.04" [extras] Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/stdlib/Pkg.version b/stdlib/Pkg.version index affaa13276534..44b40954cb272 100644 --- a/stdlib/Pkg.version +++ b/stdlib/Pkg.version @@ -1,4 +1,4 @@ -PKG_BRANCH = master +PKG_BRANCH = release-1.13 PKG_SHA1 = 6aae10788d7dfe89104f7bd040a1923879b77418 PKG_GIT_URL := https://github.com/JuliaLang/Pkg.jl.git PKG_TAR_URL = https://api.github.com/repos/JuliaLang/Pkg.jl/tarball/$1 diff --git a/stdlib/REPL/test/precompilation.jl b/stdlib/REPL/test/precompilation.jl index 7efcf0b5e8282..bc3eda15eae61 100644 --- a/stdlib/REPL/test/precompilation.jl +++ b/stdlib/REPL/test/precompilation.jl @@ -18,7 +18,7 @@ if !Sys.iswindows() # start an interactive session, ensuring `TERM` is unset since it can trigger # different amounts of precompilation stemming from `base/terminfo.jl` depending # on the value, making the test here unreliable - cmd = addenv(`$(Base.julia_cmd()[1]) --trace-compile=$f -q --startup-file=no -i`, + cmd = addenv(`$(Base.julia_cmd()) --trace-compile=$f -q --startup-file=no -i`, Dict("TERM" => "")) pts, ptm = open_fake_pty() p = run(cmd, pts, pts, pts; wait=false) @@ -26,7 +26,10 @@ if !Sys.iswindows() std = readuntil(ptm, "julia>") # check for newlines instead of equality with "julia>" because color may be on occursin("\n", std) && @info "There was output before the julia prompt:\n$std" - sleep(1) # sometimes precompiles output just after prompt appears + @async write(ptm, "\n") # another prompt + readuntil(ptm, "julia>") + @async write(ptm, "\n") # another prompt + readuntil(ptm, "julia>") tracecompile_out = read(f, String) close(ptm) # close after reading so we don't get precompiles from error shutdown diff --git a/stdlib/TOML/src/print.jl b/stdlib/TOML/src/print.jl index 741fd96e548a8..aca013955b28f 100644 --- a/stdlib/TOML/src/print.jl +++ b/stdlib/TOML/src/print.jl @@ -117,7 +117,7 @@ function print_integer(io::IO, value::Integer) end function print_inline_table(f::Function, io::IO, value::AbstractDict, sorted::Bool) - vkeys = collect(keys(value)) + vkeys = collect(keys(value))::AbstractArray if sorted sort!(vkeys) end diff --git a/stdlib/TOML/test/values.jl b/stdlib/TOML/test/values.jl index 53be1b04708b3..b7c3730006723 100644 --- a/stdlib/TOML/test/values.jl +++ b/stdlib/TOML/test/values.jl @@ -116,6 +116,9 @@ end @test testval("2016-09-09T09:09:09Z" , DateTime(2016 , 9 , 9 , 9 , 9 , 9)) @test testval("2016-09-09T09:09:09.0Z" , DateTime(2016 , 9 , 9 , 9 , 9 , 9)) @test testval("2016-09-09T09:09:09.012" , DateTime(2016 , 9 , 9 , 9 , 9 , 9 , 12)) + @test testval("2016-09-09T09:09:09.2" , DateTime(2016 , 9 , 9 , 9 , 9 , 9 , 200)) + @test testval("2016-09-09T09:09:09.20" , DateTime(2016 , 9 , 9 , 9 , 9 , 9 , 200)) + @test testval("2016-09-09T09:09:09.02" , DateTime(2016 , 9 , 9 , 9 , 9 , 9 , 20)) @test failval("2016-09-09T09:09:09.0+10:00" , Internals.ErrOffsetDateNotSupported) @test failval("2016-09-09T09:09:09.012-02:00" , Internals.ErrOffsetDateNotSupported) @@ -132,8 +135,12 @@ end end @testset "Time" begin - @test testval("09:09:09.99" , Time(9 , 9 , 9 , 99)) + @test testval("09:09:09.99" , Time(9 , 9 , 9 , 990)) @test testval("09:09:09.99999" , Time(9 , 9 , 9 , 999)) + @test testval("00:00:00.2" , Time(0 , 0 , 0 , 200)) + @test testval("00:00:00.20" , Time(0 , 0 , 0 , 200)) + @test testval("00:00:00.23" , Time(0 , 0 , 0 , 230)) + @test testval("00:00:00.234" , Time(0 , 0 , 0 , 234)) @test failval("09:09x09", Internals.ErrParsingDateTime) end diff --git a/test/atomics.jl b/test/atomics.jl index 3572824741459..369a63f7d5fbf 100644 --- a/test/atomics.jl +++ b/test/atomics.jl @@ -57,9 +57,8 @@ swap(x, y) = y struct UndefComplex{T} re::T im::T - UndefComplex{T}() where {T} = new{T}() end -Base.convert(T::Type{<:UndefComplex}, S) = T() +Base.convert(T::Type{<:UndefComplex}, S) = T(S, 0) let T1 = Refxy{NTuple{3,UInt8}}, T2 = ARefxy{NTuple{3,UInt8}} diff --git a/test/docs.jl b/test/docs.jl index ddab5e4e60eaf..148c0cf8ca649 100644 --- a/test/docs.jl +++ b/test/docs.jl @@ -1659,3 +1659,12 @@ module DocReturnValue end @test result11 isa Base.Docs.Binding end + +# https://github.com/JuliaLang/julia/issues/59949 +struct Foo59949{T} end + +""" +Bar59949{T} +""" +Bar59949{T} = Foo59949{T} +@test docstrings_equal(@doc(Bar59949), doc"Bar59949{T}") diff --git a/test/loading.jl b/test/loading.jl index e3487c7050d70..d13bc4ea648f1 100644 --- a/test/loading.jl +++ b/test/loading.jl @@ -1487,8 +1487,10 @@ end """) write(joinpath(foo_path, "Manifest.toml"), """ - julia_version = "1.13.0" + # This file is machine-generated - editing it directly is not advised + julia_version = "1.13.0-DEV" manifest_format = "2.0" + project_hash = "8699765aeeac181c3e5ddbaeb9371968e1f84d6b" [[deps.Foo51989]] path = "." diff --git a/test/precompile.jl b/test/precompile.jl index 12f7a5b1f89ba..2bba1fcac011e 100644 --- a/test/precompile.jl +++ b/test/precompile.jl @@ -687,7 +687,15 @@ precompile_test_harness(false) do dir error("break me") end """) - @test_throws Base.Precompilation.PkgPrecompileError Base.require(Main, :FooBar2) + try + Base.require(Main, :FooBar2) + error("the \"break me\" test failed") + catch exc + isa(exc, LoadError) || rethrow() + exc = exc.error + isa(exc, ErrorException) || rethrow() + "break me" == exc.msg || rethrow() + end # Test that trying to eval into closed modules during precompilation is an error FooBar3_file = joinpath(dir, "FooBar3.jl") @@ -699,7 +707,14 @@ precompile_test_harness(false) do dir $code end """) - @test_throws Base.Precompilation.PkgPrecompileError Base.require(Main, :FooBar3) + try + Base.require(Main, :FooBar3) + catch exc + isa(exc, LoadError) || rethrow() + exc = exc.error + isa(exc, ErrorException) || rethrow() + occursin("Evaluation into the closed module `Base` breaks incremental compilation", exc.msg) || rethrow() + end end # Test transitive dependency for #21266 @@ -2529,161 +2544,4 @@ let io = IOBuffer() @test isempty(String(take!(io))) end -# Test --compiled-modules=strict in precompilepkgs -@testset "compiled-modules=strict with dependencies" begin - mkdepottempdir() do depot - # Create three packages: one that fails to precompile, one that loads it, one that doesn't - project_path = joinpath(depot, "testenv") - mkpath(project_path) - - # Create FailPkg - a package that can't be precompiled - fail_pkg_path = joinpath(depot, "dev", "FailPkg") - mkpath(joinpath(fail_pkg_path, "src")) - write(joinpath(fail_pkg_path, "Project.toml"), - """ - name = "FailPkg" - uuid = "10000000-0000-0000-0000-000000000001" - version = "0.1.0" - """) - write(joinpath(fail_pkg_path, "src", "FailPkg.jl"), - """ - module FailPkg - print("Now FailPkg is running.\n") - error("expected fail") - end - """) - - # Create LoadsFailPkg - depends on and loads FailPkg (should fail with strict) - loads_pkg_path = joinpath(depot, "dev", "LoadsFailPkg") - mkpath(joinpath(loads_pkg_path, "src")) - write(joinpath(loads_pkg_path, "Project.toml"), - """ - name = "LoadsFailPkg" - uuid = "20000000-0000-0000-0000-000000000002" - version = "0.1.0" - - [deps] - FailPkg = "10000000-0000-0000-0000-000000000001" - """) - write(joinpath(loads_pkg_path, "src", "LoadsFailPkg.jl"), - """ - module LoadsFailPkg - print("Now LoadsFailPkg is running.\n") - import FailPkg - print("unreachable\n") - end - """) - - # Create DependsOnly - depends on FailPkg but doesn't load it (should succeed) - depends_pkg_path = joinpath(depot, "dev", "DependsOnly") - mkpath(joinpath(depends_pkg_path, "src")) - write(joinpath(depends_pkg_path, "Project.toml"), - """ - name = "DependsOnly" - uuid = "30000000-0000-0000-0000-000000000003" - version = "0.1.0" - - [deps] - FailPkg = "10000000-0000-0000-0000-000000000001" - """) - write(joinpath(depends_pkg_path, "src", "DependsOnly.jl"), - """ - module DependsOnly - # Has FailPkg as a dependency but doesn't load it - print("Now DependsOnly is running.\n") - end - """) - - # Create main project with all packages - write(joinpath(project_path, "Project.toml"), - """ - [deps] - LoadsFailPkg = "20000000-0000-0000-0000-000000000002" - DependsOnly = "30000000-0000-0000-0000-000000000003" - """) - write(joinpath(project_path, "Manifest.toml"), - """ - julia_version = "1.13.0" - manifest_format = "2.0" - - [[DependsOnly]] - deps = ["FailPkg"] - uuid = "30000000-0000-0000-0000-000000000003" - version = "0.1.0" - - [[FailPkg]] - uuid = "10000000-0000-0000-0000-000000000001" - version = "0.1.0" - - [[LoadsFailPkg]] - deps = ["FailPkg"] - uuid = "20000000-0000-0000-0000-000000000002" - version = "0.1.0" - - [[deps.DependsOnly]] - deps = ["FailPkg"] - path = "../dev/DependsOnly/" - uuid = "30000000-0000-0000-0000-000000000003" - version = "0.1.0" - - [[deps.FailPkg]] - path = "../dev/FailPkg/" - uuid = "10000000-0000-0000-0000-000000000001" - version = "0.1.0" - - [[deps.LoadsFailPkg]] - deps = ["FailPkg"] - path = "../dev/LoadsFailPkg/" - uuid = "20000000-0000-0000-0000-000000000002" - version = "0.1.0" - """) - - # Call precompilepkgs with output redirected to a file - LoadsFailPkg_output = joinpath(depot, "LoadsFailPkg_output.txt") - DependsOnly_output = joinpath(depot, "DependsOnly_output.txt") - original_depot_path = copy(Base.DEPOT_PATH) - old_proj = Base.active_project() - try - push!(empty!(DEPOT_PATH), depot) - Base.set_active_project(project_path) - precompile_capture(file, pkg) = open(file, "w") do io - try - r = Base.Precompilation.precompilepkgs([pkg]; io, fancyprint=true) - @test r isa Vector{String} - r - catch ex - ex isa Base.Precompilation.PkgPrecompileError || rethrow() - ex - end - end - loadsfailpkg = precompile_capture(LoadsFailPkg_output, "LoadsFailPkg") - @test loadsfailpkg isa Base.Precompilation.PkgPrecompileError - dependsonly = precompile_capture(DependsOnly_output, "DependsOnly") - @test length(dependsonly) == 1 - finally - Base.set_active_project(old_proj) - append!(empty!(DEPOT_PATH), original_depot_path) - end - - output = read(LoadsFailPkg_output, String) - # LoadsFailPkg should fail because it tries to load FailPkg with --compiled-modules=strict - @test count("LoadError: expected fail", output) == 1 - @test count("expected fail", output) == 1 - @test count("βœ— FailPkg", output) > 0 - @test count("βœ— LoadsFailPkg", output) > 0 - @test count("Now FailPkg is running.", output) == 1 - @test count("Now LoadsFailPkg is running.", output) == 1 - @test count("DependsOnly precompiling.", output) == 0 - - # DependsOnly should succeed because it doesn't actually load FailPkg - output = read(DependsOnly_output, String) - @test count("LoadError: expected fail", output) == 0 - @test count("expected fail", output) == 0 - @test count("βœ— FailPkg", output) > 0 - @test count("Precompiling DependsOnly finished.", output) == 1 - @test count("Now FailPkg is running.", output) == 0 - @test count("Now DependsOnly is running.", output) == 1 - end -end - finish_precompile_test!()