Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/documentation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ jobs:
with:
# Fix the Julia version, because Enzyme doesn't always like being updated
# to `latest`.
version: '1.11'
version: '1.12'
- uses: julia-actions/cache@v2
- name: Install Gurobi license
env:
env:
SECRET_GUROBI_LICENSE: ${{ secrets.GUROBI_LICENSE }}
shell: bash
run: |
Expand Down
3 changes: 3 additions & 0 deletions .vale.ini
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,9 @@ Google.Quotes = NO
Google.FirstPerson = NO
Vale.Spelling = NO

[docs/src/packages/MultiObjectiveAlgorithms.md]
Vale.Spelling = NO

[docs/src/packages/Optim.md]
Google.EmDash = NO

Expand Down
1 change: 0 additions & 1 deletion docs/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,4 @@ src/release_notes.md
src/packages/*.md
!src/packages/solvers.md
!src/packages/extensions.md
!src/packages/NLopt.md
src/JuMP.pdf
22 changes: 11 additions & 11 deletions docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,33 +54,33 @@ Clarabel = "=0.11.0"
Clustering = "0.15.8"
DSP = "0.8.4"
DataFrames = "1.8.1"
DifferentiationInterface = "0.7.12"
DifferentiationInterface = "0.7.13"
DimensionalData = "=0.29.25"
Distributions = "0.25.122"
Documenter = "=1.15.0"
Distributions = "0.25.123"
Documenter = "=1.16.1"
DocumenterCitations = "1.4.1"
Dualization = "0.7.1"
Enzyme = "0.13.109"
ForwardDiff = "1.3.0"
Gurobi = "=1.9.0"
Enzyme = "0.13.112"
ForwardDiff = "1.3.1"
Gurobi = "=1.9.1"
HTTP = "1.10.19"
HiGHS = "=1.20.1"
Images = "0.26.2"
Interpolations = "0.15"
Interpolations = "0.16.2"
Ipopt = "=1.13.0"
JSON = "0.21.4"
JSONSchema = "1.5.0"
LinearOperatorCollection = "2.2.1"
LinearOperatorCollection = "2.3.0"
Literate = "2.21.0"
MarkdownAST = "0.1.2"
MathOptChordalDecomposition = "=0.2.0"
MathOptInterface = "=1.48.0"
MultiObjectiveAlgorithms = "=1.8.0"
MultiObjectiveAlgorithms = "=1.8.1"
PATHSolver = "=1.7.9"
ParametricOptInterface = "0.14.1"
Plots = "1.41.2"
Plots = "1.41.3"
RegularizedLeastSquares = "0.16.11"
SCS = "=2.4.0"
SCS = "=2.5.0"
SQLite = "1.6.1"
SpecialFunctions = "2.6.1"
StatsPlots = "0.15.8"
Expand Down
29 changes: 18 additions & 11 deletions docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -560,17 +560,24 @@ function _add_moi_pages()
end
end
end
# Fix `# Infeasibility certificates` in moi/background/infeasibility_certificates.md
filename = joinpath(moi_dir, "background", "infeasibility_certificates.md")
contents = read(filename, String)
id = "# [Infeasibility certificates](@id moi_infeasibility_certificates)"
contents = replace(contents, r"^# Infeasibility certificates$"m => id)
write(filename, contents)
# Fix `JSON.Object` in moi/submodules/FileFormats/overview.md
# This can be removed once we support JSON@1 in the documentation
filename = joinpath(moi_dir, "submodules", "FileFormats", "overview.md")
contents = read(filename, String)
write(filename, replace(contents, "JSON.Object" => "Dict"))
for (filename, replacements) in [
"background/infeasibility_certificates.md" => [
r"^# Infeasibility certificates$"m => "# [Infeasibility certificates](@id moi_infeasibility_certificates)",
],
"reference/models.md" => ["# ResultStatusCode" => "# Result Status"],
# This can be removed once we support JSON@1 in the documentation
"submodules/FileFormats/overview.md" => ["JSON.Object" => "Dict"],
# These can be removed once we support [email protected] or later
"reference/models.md" => ["# ResultStatusCode" => "# Result Status"],
"submodules/Nonlinear/SymbolicAD.md" => [
"# `simplify`" => "# [`simplify`](@id symbolic_ad_manual_simplify)",
"# `variables`" => "# [`variables`](@id symbolic_ad_manual_variables)",
"# `derivative`" => "# [`derivative`](@id symbolic_ad_manual_derivative)",
],
]
filename = joinpath(moi_dir, filename)
write(filename, replace(read(filename, String), replacements...))
end
return
end

Expand Down
14 changes: 7 additions & 7 deletions docs/packages.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
[CSDP]
rev = "v1.1.2"
[cuOpt]
rev = "v0.1.1"
rev = "v0.1.2"
[DiffOpt]
rev = "v0.5.4"
extension = true
Expand All @@ -61,7 +61,7 @@
[GLPK]
rev = "v1.2.1"
[Gurobi]
rev = "v1.9.0"
rev = "v1.9.1"
[HiGHS]
rev = "v1.20.1"
[Hypatia]
Expand All @@ -70,7 +70,7 @@
[Ipopt]
rev = "v1.13.0"
[KNITRO]
rev = "v1.1.0"
rev = "v1.2.0"
[MathOptAnalyzer]
rev = "v0.1.1"
[MathOptIIS]
Expand All @@ -80,7 +80,7 @@
[MosekTools]
rev = "v0.15.10"
[MultiObjectiveAlgorithms]
rev = "3bf253221cfa943532b5030ab7d5bed6157b9786"
rev = "v1.8.1"
has_html = true
[NEOSServer]
rev = "v1.2.0"
Expand All @@ -103,7 +103,7 @@
rev = "v0.7.6"
extension = true
[SCS]
rev = "v2.4.0"
rev = "v2.5.0"
[SDPA]
rev = "v0.6.1"
[SDPLR]
Expand Down Expand Up @@ -144,7 +144,7 @@
has_html = true
[COPT]
user = "COPT-Public"
rev = "v1.1.30"
rev = "v1.1.31"
[COSMO]
user = "oxfordcontrol"
rev = "v0.8.9"
Expand All @@ -159,7 +159,7 @@
extension = true
[EAGO]
user = "PSORLab"
rev = "v0.8.3"
rev = "v0.8.4"
filename = "docs/src/jump/README.md"
[GAMS]
user = "GAMS-dev"
Expand Down
4 changes: 2 additions & 2 deletions docs/src/manual/containers.md
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ julia> DataFrames.DataFrame(table)
6 │ 2 3 (2, 3)
```

## DenseAxisArray
## [DenseAxisArray](@id manual_dense_axis_array)

A [`Containers.DenseAxisArray`](@ref) is created when the index sets are
rectangular, but not of the form `1:n`. The index sets can be of any type.
Expand Down Expand Up @@ -275,7 +275,7 @@ And data, a 2-element Vector{Tuple{Int64, Symbol}}:
(2, :B)
```

## SparseAxisArray
## [SparseAxisArray](@id manual_sparse_axis_array)

A [`Containers.SparseAxisArray`](@ref) is created when the index sets are
non-rectangular. This occurs in two circumstances:
Expand Down
2 changes: 1 addition & 1 deletion docs/src/manual/nlp.md
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ julia> @variable(model, y);
julia> c = [1, 2];

julia> @NLobjective(model, Min, c' * x + 3y)
ERROR: Unexpected array [1 2] in nonlinear expression. Nonlinear expressions may contain only scalar expressions.
ERROR: Unexpected array adjoint([1, 2]) in nonlinear expression. Nonlinear expressions may contain only scalar expressions.
[...]
```

Expand Down
8 changes: 4 additions & 4 deletions docs/src/manual/nonlinear.md
Original file line number Diff line number Diff line change
Expand Up @@ -796,7 +796,7 @@ works.

The operator takes `f(x::Vector)` as input, instead of the splatted `f(x...)`.

```jldoctest
```jldoctest; filter=r"\(::ForwardDiff.+\)"
julia> import ForwardDiff

julia> my_operator_bad(x::Vector) = sum(x[i]^2 for i in eachindex(x))
Expand All @@ -806,7 +806,7 @@ julia> my_operator_good(x...) = sum(x[i]^2 for i in eachindex(x))
my_operator_good (generic function with 1 method)

julia> ForwardDiff.gradient(x -> my_operator_bad(x...), [1.0, 2.0])
ERROR: MethodError: no method matching my_operator_bad(::ForwardDiff.Dual{ForwardDiff.Tag{var"#5#6", Float64}, Float64, 2}, ::ForwardDiff.Dual{ForwardDiff.Tag{var"#5#6", Float64}, Float64, 2})
ERROR: MethodError: no method matching my_operator_bad(::ForwardDiff.Dual, ::ForwardDiff.Dual)
[...]

julia> ForwardDiff.gradient(x -> my_operator_good(x...), [1.0, 2.0])
Expand All @@ -820,7 +820,7 @@ julia> ForwardDiff.gradient(x -> my_operator_good(x...), [1.0, 2.0])
The operator assumes `Float64` will be passed as input, but it must work for any
generic `Real` type.

```jldoctest
```jldoctest; filter=r"\(::ForwardDiff.+\)"
julia> import ForwardDiff

julia> my_operator_bad(x::Float64...) = sum(x[i]^2 for i in eachindex(x))
Expand All @@ -830,7 +830,7 @@ julia> my_operator_good(x::Real...) = sum(x[i]^2 for i in eachindex(x))
my_operator_good (generic function with 1 method)

julia> ForwardDiff.gradient(x -> my_operator_bad(x...), [1.0, 2.0])
ERROR: MethodError: no method matching my_operator_bad(::ForwardDiff.Dual{ForwardDiff.Tag{var"#5#6", Float64}, Float64, 2}, ::ForwardDiff.Dual{ForwardDiff.Tag{var"#5#6", Float64}, Float64, 2})
ERROR: MethodError: no method matching my_operator_bad(::ForwardDiff.Dual, ::ForwardDiff.Dual)
[...]

julia> ForwardDiff.gradient(x -> my_operator_good(x...), [1.0, 2.0])
Expand Down
2 changes: 1 addition & 1 deletion docs/src/tutorials/algorithms/pdhg.jl
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ end

MOI.get(::Optimizer, ::MOI.SolverName) = "PDHG"

# ### GenericModel
# ### [GenericModel](@id tutorial_pdhg_generic_model)

# The simplest way to solve a problem with your optimizer is to implement the
# method `MOI.optimize!(dest::Optimizer, src::MOI.ModelLike)`, where `src` is an
Expand Down
53 changes: 18 additions & 35 deletions docs/src/tutorials/applications/web_app.jl
Original file line number Diff line number Diff line change
Expand Up @@ -71,47 +71,30 @@ endpoint_solve(Dict{String,Any}("lower_bound" => 1.2))

endpoint_solve(Dict{String,Any}())

# For a second function, we need a function that accepts an `HTTP.Request`
# object and returns an `HTTP.Response` object.

function serve_solve(request::HTTP.Request)
data = JSON.parse(String(request.body))
solution = endpoint_solve(data)
return HTTP.Response(200, JSON.json(solution))
end

# Finally, we need an HTTP server. There are a variety of ways you can do this
# in HTTP.jl. We use an explicit `Sockets.listen` so we have manual control of
# when we shutdown the server.

function setup_server(host, port)
server = HTTP.Sockets.listen(host, port)
HTTP.serve!(host, port; server = server) do request
try
## Extend the server by adding other endpoints here.
if request.target == "/api/solve"
return serve_solve(request)
else
return HTTP.Response(404, "target $(request.target) not found")
end
# We now need to turn each endpoint into a function that accepts an
# `HTTP.Request`, parses the JSON input, runs the endpoint, converts the result
# to JSON, and returns an `HTTP.Response`. In addition, the computation is
# handled in a separate thread, and we catch any unhandled exceptions.

function wrap_endpoint(endpoint::Function)
function serve_request(request::HTTP.Request)::HTTP.Response
task = Threads.@spawn try
ret = request.body |> String |> JSON.parse |> endpoint |> JSON.json
HTTP.Response(200, ret)
catch err
## Log details about the exception server-side
@info "Unhandled exception: $err"
## Return a response to the client
return HTTP.Response(500, "internal error")
HTTP.Response(500, "internal error: $err")
end
return fetch(task)
end
return server
end

# !!! warning
# HTTP.jl does not serve requests on a separate thread. Therefore, a
# long-running job will block the main thread, preventing concurrent users from
# submitting requests. To work-around this, read [HTTP.jl issue 798](https://github.com/JuliaWeb/HTTP.jl/issues/798)
# or watch [Building Microservices and Applications in Julia](https://www.youtube.com/watch?v=uLhXgt_gKJc&t=9543s)
# from JuliaCon 2020.
# Finally, we need an HTTP server. There are a variety of ways you can do this
# in HTTP.jl. Here's one way:

server = setup_server(HTTP.ip"127.0.0.1", 8080)
router = HTTP.Router()
## Register other routes as needed
HTTP.register!(router, "/api/solve", wrap_endpoint(endpoint_solve))
server = HTTP.serve!(router, HTTP.ip"127.0.0.1", 8080)

# ## The client side

Expand Down
2 changes: 1 addition & 1 deletion src/JuMP.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1131,7 +1131,7 @@ julia> set_optimize_hook(model, my_hook)
my_hook (generic function with 1 method)

julia> optimize!(model; test_arg = true)
Base.Pairs{Symbol, Bool, Tuple{Symbol}, @NamedTuple{test_arg::Bool}}(:test_arg => 1)
Base.Pairs{Symbol, Bool, Nothing, @NamedTuple{test_arg::Bool}}(:test_arg => 1)
Calling with `ignore_optimize_hook = true`
ERROR: NoOptimizer()
[...]
Expand Down
4 changes: 2 additions & 2 deletions src/macros/@force_nonlinear.jl
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,10 @@ julia> @expression(model, @force_nonlinear(x * 2.0 * (1 + x) * x))
x * 2 * (1 + x) * x

julia> @allocated @expression(model, x * 2.0 * (1 + x) * x)
3680
3264

julia> @allocated @expression(model, @force_nonlinear(x * 2.0 * (1 + x) * x))
768
944
```
"""
macro force_nonlinear(expr)
Expand Down
Loading