diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index f9230242014..58344cc7eae 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -16,10 +16,10 @@ jobs: with: # Fix the Julia version, because Enzyme doesn't always like being updated # to `latest`. - version: '1.11' + version: '1.12' - uses: julia-actions/cache@v2 - name: Install Gurobi license - env: + env: SECRET_GUROBI_LICENSE: ${{ secrets.GUROBI_LICENSE }} shell: bash run: | diff --git a/.vale.ini b/.vale.ini index e9d218b1276..c70761a9754 100644 --- a/.vale.ini +++ b/.vale.ini @@ -63,6 +63,9 @@ Google.Quotes = NO Google.FirstPerson = NO Vale.Spelling = NO +[docs/src/packages/MultiObjectiveAlgorithms.md] +Vale.Spelling = NO + [docs/src/packages/Optim.md] Google.EmDash = NO diff --git a/docs/.gitignore b/docs/.gitignore index f389a3b10a2..f6c07c64ea7 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -9,5 +9,4 @@ src/release_notes.md src/packages/*.md !src/packages/solvers.md !src/packages/extensions.md -!src/packages/NLopt.md src/JuMP.pdf diff --git a/docs/Project.toml b/docs/Project.toml index 3adbbe83c41..0a083c9a3e0 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -54,33 +54,33 @@ Clarabel = "=0.11.0" Clustering = "0.15.8" DSP = "0.8.4" DataFrames = "1.8.1" -DifferentiationInterface = "0.7.12" +DifferentiationInterface = "0.7.13" DimensionalData = "=0.29.25" -Distributions = "0.25.122" -Documenter = "=1.15.0" +Distributions = "0.25.123" +Documenter = "=1.16.1" DocumenterCitations = "1.4.1" Dualization = "0.7.1" -Enzyme = "0.13.109" -ForwardDiff = "1.3.0" -Gurobi = "=1.9.0" +Enzyme = "0.13.112" +ForwardDiff = "1.3.1" +Gurobi = "=1.9.1" HTTP = "1.10.19" HiGHS = "=1.20.1" Images = "0.26.2" -Interpolations = "0.15" +Interpolations = "0.16.2" Ipopt = "=1.13.0" JSON = "0.21.4" JSONSchema = "1.5.0" -LinearOperatorCollection = "2.2.1" +LinearOperatorCollection = "2.3.0" Literate = "2.21.0" MarkdownAST = "0.1.2" MathOptChordalDecomposition = "=0.2.0" MathOptInterface = "=1.48.0" -MultiObjectiveAlgorithms = "=1.8.0" +MultiObjectiveAlgorithms = "=1.8.1" PATHSolver = "=1.7.9" ParametricOptInterface = "0.14.1" -Plots = "1.41.2" +Plots = "1.41.3" RegularizedLeastSquares = "0.16.11" -SCS = "=2.4.0" +SCS = "=2.5.0" SQLite = "1.6.1" SpecialFunctions = "2.6.1" StatsPlots = "0.15.8" diff --git a/docs/make.jl b/docs/make.jl index 4041f700288..dc68c59892e 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -560,17 +560,24 @@ function _add_moi_pages() end end end - # Fix `# Infeasibility certificates` in moi/background/infeasibility_certificates.md - filename = joinpath(moi_dir, "background", "infeasibility_certificates.md") - contents = read(filename, String) - id = "# [Infeasibility certificates](@id moi_infeasibility_certificates)" - contents = replace(contents, r"^# Infeasibility certificates$"m => id) - write(filename, contents) - # Fix `JSON.Object` in moi/submodules/FileFormats/overview.md - # This can be removed once we support JSON@1 in the documentation - filename = joinpath(moi_dir, "submodules", "FileFormats", "overview.md") - contents = read(filename, String) - write(filename, replace(contents, "JSON.Object" => "Dict")) + for (filename, replacements) in [ + "background/infeasibility_certificates.md" => [ + r"^# Infeasibility certificates$"m => "# [Infeasibility certificates](@id moi_infeasibility_certificates)", + ], + "reference/models.md" => ["# ResultStatusCode" => "# Result Status"], + # This can be removed once we support JSON@1 in the documentation + "submodules/FileFormats/overview.md" => ["JSON.Object" => "Dict"], + # These can be removed once we support MOI@1.48.1 or later + "reference/models.md" => ["# ResultStatusCode" => "# Result Status"], + "submodules/Nonlinear/SymbolicAD.md" => [ + "# `simplify`" => "# [`simplify`](@id symbolic_ad_manual_simplify)", + "# `variables`" => "# [`variables`](@id symbolic_ad_manual_variables)", + "# `derivative`" => "# [`derivative`](@id symbolic_ad_manual_derivative)", + ], + ] + filename = joinpath(moi_dir, filename) + write(filename, replace(read(filename, String), replacements...)) + end return end diff --git a/docs/packages.toml b/docs/packages.toml index 46b6919a3a1..e83b87f7832 100644 --- a/docs/packages.toml +++ b/docs/packages.toml @@ -47,7 +47,7 @@ [CSDP] rev = "v1.1.2" [cuOpt] - rev = "v0.1.1" + rev = "v0.1.2" [DiffOpt] rev = "v0.5.4" extension = true @@ -61,7 +61,7 @@ [GLPK] rev = "v1.2.1" [Gurobi] - rev = "v1.9.0" + rev = "v1.9.1" [HiGHS] rev = "v1.20.1" [Hypatia] @@ -70,7 +70,7 @@ [Ipopt] rev = "v1.13.0" [KNITRO] - rev = "v1.1.0" + rev = "v1.2.0" [MathOptAnalyzer] rev = "v0.1.1" [MathOptIIS] @@ -80,7 +80,7 @@ [MosekTools] rev = "v0.15.10" [MultiObjectiveAlgorithms] - rev = "3bf253221cfa943532b5030ab7d5bed6157b9786" + rev = "v1.8.1" has_html = true [NEOSServer] rev = "v1.2.0" @@ -103,7 +103,7 @@ rev = "v0.7.6" extension = true [SCS] - rev = "v2.4.0" + rev = "v2.5.0" [SDPA] rev = "v0.6.1" [SDPLR] @@ -144,7 +144,7 @@ has_html = true [COPT] user = "COPT-Public" - rev = "v1.1.30" + rev = "v1.1.31" [COSMO] user = "oxfordcontrol" rev = "v0.8.9" @@ -159,7 +159,7 @@ extension = true [EAGO] user = "PSORLab" - rev = "v0.8.3" + rev = "v0.8.4" filename = "docs/src/jump/README.md" [GAMS] user = "GAMS-dev" diff --git a/docs/src/manual/containers.md b/docs/src/manual/containers.md index ef2cc42a2ed..146846099f2 100644 --- a/docs/src/manual/containers.md +++ b/docs/src/manual/containers.md @@ -138,7 +138,7 @@ julia> DataFrames.DataFrame(table) 6 │ 2 3 (2, 3) ``` -## DenseAxisArray +## [DenseAxisArray](@id manual_dense_axis_array) A [`Containers.DenseAxisArray`](@ref) is created when the index sets are rectangular, but not of the form `1:n`. The index sets can be of any type. @@ -275,7 +275,7 @@ And data, a 2-element Vector{Tuple{Int64, Symbol}}: (2, :B) ``` -## SparseAxisArray +## [SparseAxisArray](@id manual_sparse_axis_array) A [`Containers.SparseAxisArray`](@ref) is created when the index sets are non-rectangular. This occurs in two circumstances: diff --git a/docs/src/manual/nlp.md b/docs/src/manual/nlp.md index b7a7c5fb1a6..6ff89b8265c 100644 --- a/docs/src/manual/nlp.md +++ b/docs/src/manual/nlp.md @@ -227,7 +227,7 @@ julia> @variable(model, y); julia> c = [1, 2]; julia> @NLobjective(model, Min, c' * x + 3y) -ERROR: Unexpected array [1 2] in nonlinear expression. Nonlinear expressions may contain only scalar expressions. +ERROR: Unexpected array adjoint([1, 2]) in nonlinear expression. Nonlinear expressions may contain only scalar expressions. [...] ``` diff --git a/docs/src/manual/nonlinear.md b/docs/src/manual/nonlinear.md index 8281b5b2a74..383be6bebf2 100644 --- a/docs/src/manual/nonlinear.md +++ b/docs/src/manual/nonlinear.md @@ -796,7 +796,7 @@ works. The operator takes `f(x::Vector)` as input, instead of the splatted `f(x...)`. -```jldoctest +```jldoctest; filter=r"\(::ForwardDiff.+\)" julia> import ForwardDiff julia> my_operator_bad(x::Vector) = sum(x[i]^2 for i in eachindex(x)) @@ -806,7 +806,7 @@ julia> my_operator_good(x...) = sum(x[i]^2 for i in eachindex(x)) my_operator_good (generic function with 1 method) julia> ForwardDiff.gradient(x -> my_operator_bad(x...), [1.0, 2.0]) -ERROR: MethodError: no method matching my_operator_bad(::ForwardDiff.Dual{ForwardDiff.Tag{var"#5#6", Float64}, Float64, 2}, ::ForwardDiff.Dual{ForwardDiff.Tag{var"#5#6", Float64}, Float64, 2}) +ERROR: MethodError: no method matching my_operator_bad(::ForwardDiff.Dual, ::ForwardDiff.Dual) [...] julia> ForwardDiff.gradient(x -> my_operator_good(x...), [1.0, 2.0]) @@ -820,7 +820,7 @@ julia> ForwardDiff.gradient(x -> my_operator_good(x...), [1.0, 2.0]) The operator assumes `Float64` will be passed as input, but it must work for any generic `Real` type. -```jldoctest +```jldoctest; filter=r"\(::ForwardDiff.+\)" julia> import ForwardDiff julia> my_operator_bad(x::Float64...) = sum(x[i]^2 for i in eachindex(x)) @@ -830,7 +830,7 @@ julia> my_operator_good(x::Real...) = sum(x[i]^2 for i in eachindex(x)) my_operator_good (generic function with 1 method) julia> ForwardDiff.gradient(x -> my_operator_bad(x...), [1.0, 2.0]) -ERROR: MethodError: no method matching my_operator_bad(::ForwardDiff.Dual{ForwardDiff.Tag{var"#5#6", Float64}, Float64, 2}, ::ForwardDiff.Dual{ForwardDiff.Tag{var"#5#6", Float64}, Float64, 2}) +ERROR: MethodError: no method matching my_operator_bad(::ForwardDiff.Dual, ::ForwardDiff.Dual) [...] julia> ForwardDiff.gradient(x -> my_operator_good(x...), [1.0, 2.0]) diff --git a/docs/src/tutorials/algorithms/pdhg.jl b/docs/src/tutorials/algorithms/pdhg.jl index 3f93e2affcd..f681af40855 100644 --- a/docs/src/tutorials/algorithms/pdhg.jl +++ b/docs/src/tutorials/algorithms/pdhg.jl @@ -255,7 +255,7 @@ end MOI.get(::Optimizer, ::MOI.SolverName) = "PDHG" -# ### GenericModel +# ### [GenericModel](@id tutorial_pdhg_generic_model) # The simplest way to solve a problem with your optimizer is to implement the # method `MOI.optimize!(dest::Optimizer, src::MOI.ModelLike)`, where `src` is an diff --git a/docs/src/tutorials/applications/web_app.jl b/docs/src/tutorials/applications/web_app.jl index 4d90fe619e1..8d0997a6ea6 100644 --- a/docs/src/tutorials/applications/web_app.jl +++ b/docs/src/tutorials/applications/web_app.jl @@ -71,47 +71,30 @@ endpoint_solve(Dict{String,Any}("lower_bound" => 1.2)) endpoint_solve(Dict{String,Any}()) -# For a second function, we need a function that accepts an `HTTP.Request` -# object and returns an `HTTP.Response` object. - -function serve_solve(request::HTTP.Request) - data = JSON.parse(String(request.body)) - solution = endpoint_solve(data) - return HTTP.Response(200, JSON.json(solution)) -end - -# Finally, we need an HTTP server. There are a variety of ways you can do this -# in HTTP.jl. We use an explicit `Sockets.listen` so we have manual control of -# when we shutdown the server. - -function setup_server(host, port) - server = HTTP.Sockets.listen(host, port) - HTTP.serve!(host, port; server = server) do request - try - ## Extend the server by adding other endpoints here. - if request.target == "/api/solve" - return serve_solve(request) - else - return HTTP.Response(404, "target $(request.target) not found") - end +# We now need to turn each endpoint into a function that accepts an +# `HTTP.Request`, parses the JSON input, runs the endpoint, converts the result +# to JSON, and returns an `HTTP.Response`. In addition, the computation is +# handled in a separate thread, and we catch any unhandled exceptions. + +function wrap_endpoint(endpoint::Function) + function serve_request(request::HTTP.Request)::HTTP.Response + task = Threads.@spawn try + ret = request.body |> String |> JSON.parse |> endpoint |> JSON.json + HTTP.Response(200, ret) catch err - ## Log details about the exception server-side - @info "Unhandled exception: $err" - ## Return a response to the client - return HTTP.Response(500, "internal error") + HTTP.Response(500, "internal error: $err") end + return fetch(task) end - return server end -# !!! warning -# HTTP.jl does not serve requests on a separate thread. Therefore, a -# long-running job will block the main thread, preventing concurrent users from -# submitting requests. To work-around this, read [HTTP.jl issue 798](https://github.com/JuliaWeb/HTTP.jl/issues/798) -# or watch [Building Microservices and Applications in Julia](https://www.youtube.com/watch?v=uLhXgt_gKJc&t=9543s) -# from JuliaCon 2020. +# Finally, we need an HTTP server. There are a variety of ways you can do this +# in HTTP.jl. Here's one way: -server = setup_server(HTTP.ip"127.0.0.1", 8080) +router = HTTP.Router() +## Register other routes as needed +HTTP.register!(router, "/api/solve", wrap_endpoint(endpoint_solve)) +server = HTTP.serve!(router, HTTP.ip"127.0.0.1", 8080) # ## The client side diff --git a/src/JuMP.jl b/src/JuMP.jl index 54f5edb5171..98a37e60aa8 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -1131,7 +1131,7 @@ julia> set_optimize_hook(model, my_hook) my_hook (generic function with 1 method) julia> optimize!(model; test_arg = true) -Base.Pairs{Symbol, Bool, Tuple{Symbol}, @NamedTuple{test_arg::Bool}}(:test_arg => 1) +Base.Pairs{Symbol, Bool, Nothing, @NamedTuple{test_arg::Bool}}(:test_arg => 1) Calling with `ignore_optimize_hook = true` ERROR: NoOptimizer() [...] diff --git a/src/macros/@force_nonlinear.jl b/src/macros/@force_nonlinear.jl index 0a1645d3d9a..e7fbf9d2d18 100644 --- a/src/macros/@force_nonlinear.jl +++ b/src/macros/@force_nonlinear.jl @@ -84,10 +84,10 @@ julia> @expression(model, @force_nonlinear(x * 2.0 * (1 + x) * x)) x * 2 * (1 + x) * x julia> @allocated @expression(model, x * 2.0 * (1 + x) * x) -3680 +3264 julia> @allocated @expression(model, @force_nonlinear(x * 2.0 * (1 + x) * x)) -768 +944 ``` """ macro force_nonlinear(expr)