Skip to content
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,6 @@ To use JaCoP with JuMP, use `JaCoP.Optimizer`:
using JuMP, JaCoP
model = Model(JaCoP.Optimizer)
```

> [!WARNING]
> There is a know issue where JaCoP.jl segfaults if you delete variables or constraints.
9 changes: 9 additions & 0 deletions src/MOI/parse.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,12 @@ function _parse_to_coeffs_vars(
vars = Variable[_info(model, t.variable).variable for t in terms]
return coeffs, vars
end

function _parse_to_coeffs_vars(
model::Optimizer,
terms::Vector{MOI.ScalarAffineTerm{T}},
) where {T <: Real}
coeffs = Int32[round(Int, t.coefficient) for t in terms]
vars = Variable[_info(model, t.variable).variable for t in terms]
return coeffs, vars
end
140 changes: 98 additions & 42 deletions src/MOI/wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -59,25 +59,16 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
# VariableInfo also stores some additional fields like the type of variable.
constraint_info::Dict{MOI.ConstraintIndex, ConstraintInfo}

# # Memorise the objective sense and the function separately, as the Concert
# # API forces to give both at the same time.
# objective_sense::MOI.OptimizationSense
# objective_function::Union{Nothing, MOI.AbstractScalarFunction}
# objective_function_cp::Union{Nothing, NumExpr}
# objective_cp::Union{Nothing, IloObjective}
# Objective sense (min/max/feasibility). Required for MOI tests.
objective_sense::MOI.OptimizationSense
# Type and value of the objective function if set; nothing otherwise.
objective_function_type::Union{Nothing, DataType}
objective_function::Union{Nothing, MOI.VariableIndex, MOI.ScalarAffineFunction{Float64}}

# Cached solution state.
termination_status::MOI.TerminationStatusCode
primal_status::MOI.ResultStatusCode

# # Mappings from variable and constraint names to their indices. These are
# # lazily built on-demand, so most of the time, they are `nothing`.
# # The solver's functionality is not useful in this case, as it can only
# # handle integer variables. Moreover, bound constraints do not have names
# # for the solver.
# name_to_variable::Union{Nothing, Dict{String, MOI.VariableIndex}}
# name_to_constraint::Union{Nothing, Dict{String, MOI.ConstraintIndex}}

"""
Optimizer()

Expand All @@ -94,12 +85,9 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
model.termination_status = MOI.OPTIMIZE_NOT_CALLED
model.primal_status = MOI.NO_SOLUTION

# model.objective_sense = MOI.FEASIBILITY_SENSE
# model.objective_function = nothing
# model.objective_function_cp = nothing
# model.objective_cp = nothing

# model.callback_state = CB_NONE
model.objective_sense = MOI.FEASIBILITY_SENSE
model.objective_function_type = nothing
model.objective_function = nothing

MOI.empty!(model)
return model
Expand All @@ -111,6 +99,9 @@ function MOI.empty!(model::Optimizer)
model.name = ""
empty!(model.variable_info)
empty!(model.constraint_info)
model.objective_sense = MOI.FEASIBILITY_SENSE
model.objective_function_type = nothing
model.objective_function = nothing
model.termination_status = MOI.OPTIMIZE_NOT_CALLED
model.primal_status = MOI.NO_SOLUTION
return
Expand All @@ -120,6 +111,8 @@ function MOI.is_empty(model::Optimizer)
!isempty(model.name) && return false
!isempty(model.variable_info) && return false
!isempty(model.constraint_info) && return false
model.objective_sense != MOI.FEASIBILITY_SENSE && return false
(model.objective_function_type !== nothing || model.objective_function !== nothing) && return false
model.termination_status != MOI.OPTIMIZE_NOT_CALLED && return false
return true
end
Expand All @@ -135,6 +128,25 @@ function MOI.supports(
return true
end

function MOI.get(model::Optimizer, ::MOI.ObjectiveFunction{F}) where {F}
if model.objective_function_type !== F
error(
"Objective function type is $(model.objective_function_type), not $F.",
)
end
return model.objective_function::F
end

function MOI.set(
model::Optimizer,
::MOI.ObjectiveFunction{F},
f::F,
) where {F <: Union{MOI.VariableIndex, MOI.ScalarAffineFunction{Float64}}}
model.objective_function_type = F
model.objective_function = f
return
end

function MOI.supports_constraint(
::Optimizer,
::Type{MOI.VariableIndex},
Expand All @@ -161,7 +173,7 @@ end

# MOI.supports(::Optimizer, ::MOI.NumberOfThreads) = true
# MOI.supports(::Optimizer, ::MOI.TimeLimitSec) = true
# MOI.supports(::Optimizer, ::MOI.ObjectiveSense) = true
MOI.supports(::Optimizer, ::MOI.ObjectiveSense) = true
# MOI.supports(::Optimizer, ::MOI.RawOptimizerAttribute) = true

MOI.supports_incremental_interface(::Optimizer) = true
Expand All @@ -170,9 +182,22 @@ function MOI.copy_to(dest::Optimizer, src::MOI.ModelLike)
return MOI.Utilities.default_copy_to(dest, src)
end

function MOI.get(model::Optimizer, ::MOI.ObjectiveSense)
return model.objective_sense
end

function MOI.set(model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.OptimizationSense)
model.objective_sense = sense
return
end

function MOI.get(model::Optimizer, ::MOI.ObjectiveFunctionType)
return model.objective_function_type
end

function MOI.get(model::Optimizer, ::MOI.ListOfModelAttributesSet)
attributes = Any[MOI.ObjectiveSense()]
typ = MOI.get(model, MOI.ObjectiveFunctionType())
typ = model.objective_function_type
if typ !== nothing
push!(attributes, MOI.ObjectiveFunction{typ}())
end
Expand All @@ -184,28 +209,54 @@ function MOI.optimize!(model::Optimizer)
info.variable for
info in values(model.variable_info) if info.variable isa IntVar
]
if isempty(int_vars)
float_vars = FloatVar[
info.variable for
info in values(model.variable_info) if info.variable isa FloatVar
]
if isempty(int_vars) && isempty(float_vars)
model.termination_status = MOI.OPTIMAL
model.primal_status = MOI.FEASIBLE_POINT
return
end
search = DepthFirstSearch(())
indomain = IndomainMin(())
select = InputOrderSelect(
(Store, Vector{Var}, Indomain),
model.inner,
int_vars,
indomain,
)
result = jcall(
search,
"labeling",
jboolean,
(Store, SelectChoicePoint),
model.inner,
select,
)
if result != 0
result = true
if !isempty(int_vars)
search = DepthFirstSearch(())
indomain = IndomainMin(())
select = InputOrderSelect(
(Store, Vector{Var}, Indomain),
model.inner,
int_vars,
indomain,
)
result = jcall(
search,
"labeling",
jboolean,
(Store, SelectChoicePoint),
model.inner,
select,
) != 0
end
if result && !isempty(float_vars)
search_float = DepthFirstSearch(())
comparator = SmallestDomainFloat(())
# JNI: use Var[] (FloatVar[] passes as subclass); 3rd arg is ComparatorVariable.
select_float = SplitSelectFloat(
(Store, Vector{Var}, ComparatorVariable),
model.inner,
float_vars,
comparator,
)
result = jcall(
search_float,
"labeling",
jboolean,
(Store, SelectChoicePoint),
model.inner,
select_float,
) != 0
end
if result
model.termination_status = MOI.OPTIMAL
model.primal_status = MOI.FEASIBLE_POINT
else
Expand All @@ -228,6 +279,11 @@ function MOI.get(model::Optimizer, ::MOI.ResultCount)
end

function MOI.get(model::Optimizer, ::MOI.VariablePrimal, vi::MOI.VariableIndex)
v = _info(model, vi).variable
return Int(jcall(v, "value", jint, ()))
info = _info(model, vi)
v = info.variable
if v isa FloatVar
return Float64(jcall(v, "value", jdouble, ()))
else
return Int(jcall(v, "value", jint, ()))
end
end
66 changes: 39 additions & 27 deletions src/MOI/wrapper_constraints_mo.jl
Original file line number Diff line number Diff line change
@@ -1,51 +1,63 @@
## ScalarAffineFunction-in-Set

function _build_linear_constraint(
model::Optimizer,
f::MOI.ScalarAffineFunction{T},
s::MOI.AbstractScalarSet,
rel::String,
) where {T <: Real}
coeffs, vars, constant = _parse_to_coeffs_vars(model, f)
rhs = MOI.constant(s) - constant
if all(v -> v isa IntVar, vars)
return LinearInt(
(Store, Vector{IntVar}, Vector{jint}, JString, jint),
model.inner,
vars,
coeffs,
rel,
Int32(rhs),
)
elseif all(v -> v isa FloatVar, vars)
f_canon = MOI.Utilities.canonical(f)
coeffs_float = Float64[t.coefficient for t in f_canon.terms]
rhs_float = Float64(MOI.constant(s) - f_canon.constant)
return LinearFloat(
(Store, Vector{FloatVar}, Vector{jdouble}, JString, jdouble),
model.inner,
vars,
coeffs_float,
rel,
rhs_float,
)
else
error(
"ScalarAffineFunction with mixed integer and continuous variables is not supported",
)
end
end

function _build_constraint(
model::Optimizer,
f::MOI.ScalarAffineFunction{T},
s::MOI.GreaterThan{T},
) where {T <: Real}
coeffs, vars, constant = _parse_to_coeffs_vars(model, f)
return LinearInt(
(Store, Vector{jint}, Vector{IntVar}, JString, jint),
model.inner,
coeffs,
vars,
">=",
Int32(s.constant - constant),
)
return _build_linear_constraint(model, f, s, ">=")
end

function _build_constraint(
model::Optimizer,
f::MOI.ScalarAffineFunction{T},
s::MOI.LessThan{T},
) where {T <: Real}
coeffs, vars, constant = _parse_to_coeffs_vars(model, f)
return LinearInt(
(Store, Vector{jint}, Vector{IntVar}, JString, jint),
model.inner,
coeffs,
vars,
"<=",
Int32(s.constant - constant),
)
return _build_linear_constraint(model, f, s, "<=")
end

function _build_constraint(
model::Optimizer,
f::MOI.ScalarAffineFunction{T},
s::MOI.EqualTo{T},
) where {T <: Real}
coeffs, vars, constant = _parse_to_coeffs_vars(model, f)
return LinearInt(
(Store, Vector{jint}, Vector{IntVar}, JString, jint),
model.inner,
coeffs,
vars,
"==",
Int32(s.constant - constant),
)
return _build_linear_constraint(model, f, s, "==")
end

# No vector of constraints, there is no more efficient way to do it.
Expand Down
33 changes: 20 additions & 13 deletions src/MOI/wrapper_variables.jl
Original file line number Diff line number Diff line change
Expand Up @@ -41,28 +41,32 @@ function _make_vars(model::Optimizer, variables::Vector{<:Variable})
return indices
end

function _sanitise_bounds(lb::Real, ub::Real, T)
if lb === nothing
function _sanitise_bounds(lb::Union{Nothing, Real}, ub::Union{Nothing, Real}, T)
if isnothing(lb)
lb = typemin(T)
end
if ub === nothing
if isnothing(ub)
ub = typemax(T)
end
return lb, ub
end

# JaCoP FloatVar requires explicit bounds; unbounded would leave internal intervals null (NPE).
const _DEFAULT_FLOAT_LB = -1.0e30
const _DEFAULT_FLOAT_UB = 1.0e30

function _make_floatvar(
model::Optimizer,
set::MOI.AbstractScalarSet;
lb::Union{Nothing, Float64}=nothing,
ub::Union{Nothing, Float64}=nothing,
)
v = if lb === nothing && ub === nothing
FloatVar((Store,), model.inner)
else
lb_, ub_ = _sanitise_bounds(lb, ub, Float64)
FloatVar((Store, jdouble, jdouble), model.inner, lb_, ub_)
end
lb_, ub_ = _sanitise_bounds(
something(lb, _DEFAULT_FLOAT_LB),
something(ub, _DEFAULT_FLOAT_UB),
Float64,
)
v = FloatVar((Store, jdouble, jdouble), model.inner, lb_, ub_)

vindex, cindex = _make_var(model, v, set)
_info(model, vindex).type = CONTINUOUS
Expand Down Expand Up @@ -92,7 +96,7 @@ function _make_intvar(
end

function _make_boolvar(model::Optimizer, set::MOI.AbstractScalarSet)
vindex, cindex = _make_var(model, BoolVar((Store,), model.inner), set)
vindex, cindex = _make_var(model, BooleanVar((Store,), model.inner), set)
_info(model, vindex).type = BINARY
return vindex, cindex
end
Expand All @@ -118,11 +122,14 @@ function MOI.supports_add_constrained_variable(
end

function MOI.add_variable(model::Optimizer)
v = FloatVar((Store,), model.inner)
v = FloatVar(
(Store, jdouble, jdouble),
model.inner,
_DEFAULT_FLOAT_LB,
_DEFAULT_FLOAT_UB,
)
vindex = _make_var(model, v)
_info(model, vindex).type = CONTINUOUS
_info(model, vindex).lb = lb
_info(model, vindex).ub = ub
return vindex
end

Expand Down
Loading
Loading