Skip to content

Commit ba60ef1

Browse files
blibliboeLaar
andauthored
33 HSJ (Hop Skip Jump method) (#39)
* Updated the generate parralel to return state aka all solutions from metaheuristics * Updated generate alternatives to make multiple MGA techniques possible * Update optimization specifically to also allow for multiple MGa techniques * Added the max-distance method to the MGA methods, this was already implemented * Added the max-distance method to the MGA methods, this was already implemented * Changed it so that max distance is not neccesarily the standard when testing * Added the logic to use HSJ as a MGA method * Put the test in the right test suite * fixed end of file * Added HSJ in the correct form * Fixed the formatters and mixed line ending * Small update to the formatter * Fixed some errors * Fixed the error where max-distance was spelled without a capital --------- Co-authored-by: Laar <luuk.vandelaar@tno.nl>
1 parent 25a7467 commit ba60ef1

File tree

7 files changed

+222
-2
lines changed

7 files changed

+222
-2
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,3 +10,4 @@ coverage
1010
docs/build/
1111
env
1212
node_modules
13+
env

docs/Project.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
[deps]
22
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
3+
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
34
LiveServer = "16fef848-5104-11e9-1b77-fb7a48bbb589"
45
NearOptimalAlternatives = "a88418a5-8dc5-4f1c-8a3e-d5a75ec49c70"
56

src/MGA-Methods/HSJ.jl

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
export HSJ_update!, HSJ_initial!
2+
3+
4+
"""
5+
HSJ_initial!(
6+
model::JuMP.Model,
7+
variables::AbstractArray{T,N},
8+
fixed_variables::Vector{VariableRef};
9+
weights::Vector{Float64} = zeros(length(variables)),
10+
metric::Distances.SemiMetric = SqEuclidean(),
11+
) where {T<:Union{VariableRef,AffExpr}, N}
12+
13+
Initialize the objective of a JuMP model using the HSJ (Hop-Skip-Jump) method to generate alternative solutions.
14+
15+
This function sets a new objective that minimizes the weighted sum of the decision variables, where weights are based on the sign (non-zero value) of the original optimal solution. Fixed variables are locked at their optimal values.
16+
17+
# Arguments
18+
- `model::JuMP.Model`: a solved JuMP model whose objective is to be redefined for alternative generation.
19+
- `variables::AbstractArray{T,N}`: the variables involved in the objective, typically a vector or matrix of `VariableRef`s or `AffExpr`s.
20+
- `fixed_variables::Vector{VariableRef}`: variables to be fixed at their current values to avoid changes in alternatives.
21+
- `weights::Vector{Float64}`: optional vector of weights for each variable; will be internally overwritten based on variable values.
22+
- `metric::Distances.SemiMetric`: unused in this method (included for consistency with other alternative generation methods).
23+
24+
# Behavior
25+
- Variables that are zero in the original solution receive weight 0; others receive weight 1.
26+
- Fixed variables are frozen at their optimal values using `fix(...)`.
27+
- The objective is set to minimize the weighted sum of the variables, encouraging sparsity or deviation from the original.
28+
"""
29+
function HSJ_initial!(
30+
model::JuMP.Model,
31+
variables::AbstractArray{T,N},
32+
fixed_variables::Vector{VariableRef};
33+
weights::Vector{Float64} = zeros(length(variables)),
34+
metric::Distances.SemiMetric = SqEuclidean(),
35+
) where {T<:Union{VariableRef,AffExpr},N}
36+
# new objective function consist of the n variables in variables
37+
for (i, v) in enumerate(variables)
38+
if value(v) == 0
39+
weights[i] = 0
40+
else
41+
weights[i] = 1
42+
end
43+
end
44+
# Fix the variables that are fixed
45+
fix.(fixed_variables, value.(fixed_variables), force = true)
46+
47+
# update these variables based on their sign
48+
objective_function = [v * weights[i] for (i, v) in enumerate(variables)]
49+
50+
# Update objective by adding the distance between variables and the previous optimal solution.
51+
@objective(model, Min, sum(objective_function))
52+
end
53+
54+
"""
55+
HSJ_update!(
56+
model::JuMP.Model,
57+
variables::AbstractArray{T,N};
58+
weights::Vector{Float64} = zeros(length(variables)),
59+
metric::Distances.SemiMetric = SqEuclidean(),
60+
) where {T<:Union{VariableRef,AffExpr}, N}
61+
62+
Update the objective of a JuMP model using the HSJ method to generate the next alternative solution.
63+
64+
This function redefines the objective based on the current optimal solution of the model, using weights determined by the sign of each variable's value (non-zero implies weight 1).
65+
66+
# Arguments
67+
- `model::JuMP.Model`: the JuMP model to be updated.
68+
- `variables::AbstractArray{T,N}`: the decision variables involved in the updated objective.
69+
- `weights::Vector{Float64}`: optional vector of weights; will be overwritten based on current variable values.
70+
- `metric::Distances.SemiMetric`: unused in this method (included for interface consistency).
71+
72+
# Behavior
73+
- Variables with a zero value receive weight 0; all others receive weight 1.
74+
- A new objective is set: minimize the weighted sum of the variables.
75+
- This function does not re-fix any variables; it is typically called iteratively after `HSJ_initial!`.
76+
"""
77+
function HSJ_update!(
78+
model::JuMP.Model,
79+
variables::AbstractArray{T,N};
80+
weights::Vector{Float64} = zeros(length(variables)),
81+
metric::Distances.SemiMetric = SqEuclidean(),
82+
) where {T<:Union{VariableRef,AffExpr},N}
83+
# new objective function consist of the n variables in variables
84+
for (i, v) in enumerate(variables)
85+
if value(v) == 0
86+
weights[i] = 0
87+
else
88+
weights[i] = 1
89+
end
90+
end
91+
92+
# update these variables based on their sign
93+
objective_function = [v * weights[i] for (i, v) in enumerate(variables)]
94+
95+
# Update objective by adding the distance between variables and the previous optimal solution.
96+
@objective(model, Min, sum(objective_function))
97+
end

src/MGA-Methods/Max-Distance.jl

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ function Dist_initial!(
3333
weights::Vector{Float64} = zeros(length(variables)),
3434
metric::Distances.SemiMetric = SqEuclidean(),
3535
) where {T<:Union{VariableRef,AffExpr},N}
36+
3637
vars_vec = [v for v in variables]
3738
solution = value.(vars_vec)
3839

@@ -43,6 +44,7 @@ function Dist_initial!(
4344
@objective(model, Max, Distances.evaluate(metric, vars_vec, solution))
4445
end
4546

47+
4648
"""
4749
Dist_update!(
4850
model::JuMP.Model,

src/NearOptimalAlternatives.jl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,13 @@ using Metaheuristics
99
using DataStructures
1010

1111
include("MGA-Methods/Max-Distance.jl")
12+
include("MGA-Methods/HSJ.jl")
1213

1314
include("results.jl")
1415
include("alternative-optimisation.jl")
1516
include("generate-alternatives.jl")
1617
include("alternative-metaheuristics.jl")
1718

19+
20+
1821
end

src/alternative-optimisation.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,17 @@ const METHOD_DISPATCH shows the mapping of method symbols to their corresponding
55
It is used to dynamically select the appropriate function based on the method specified in the `create_alternative_generating_problem!` function.
66
"""
77
const METHOD_DISPATCH_UPDATE = Dict{Symbol,Function}(
8-
# :HSJ => HSJ_update!,
8+
:HSJ => HSJ_update!,
99
# :Spores => SPORES_update!,
1010
# :Min_Max_Variables => MM_update!,
1111
# :Random_Vector => RV_update!,
1212
# :Directionally_Weighted_Variables => DW_update!,
1313
:Max_Distance => Dist_update!,
1414
)
1515

16+
1617
const METHOD_DISPATCH_INITIAL = Dict{Symbol,Function}(
17-
# :HSJ => HSJ_update!,
18+
:HSJ => HSJ_initial!,
1819
# :Spores => SPORES_update!,
1920
# :Min_Max_Variables => MM_update!,
2021
# :Random_Vector => RV_update!,

test/test-HSJ.jl

Lines changed: 115 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,115 @@
1+
@testset "Test generate alternatives with HSJ as modeling_method." begin
2+
@testset "Test regular run with one alternative." begin
3+
optimizer = Ipopt.Optimizer
4+
model = JuMP.Model(optimizer)
5+
6+
# Initialise simple `square` JuMP model
7+
@variable(model, 0 x_1 1)
8+
@variable(model, 0 x_2 1)
9+
@objective(model, Max, x_1 + x_2)
10+
JuMP.optimize!(model)
11+
12+
results = NearOptimalAlternatives.generate_alternatives!(
13+
model,
14+
0.1,
15+
all_variables(model),
16+
1;
17+
modeling_method = :HSJ,
18+
)
19+
20+
# Test that `results` contains one solution with 2 variables, and an objective value between 1.8 and 2.0.
21+
@test length(results.solutions) == 1 &&
22+
length(results.solutions[1]) == 2 &&
23+
length(results.objective_values) == 1 &&
24+
(
25+
results.objective_values[1] 1.8 ||
26+
isapprox(results.objective_values[1], 1.8)
27+
) &&
28+
(
29+
results.objective_values[1] 2.0 ||
30+
isapprox(results.objective_values[1], 2.0)
31+
)
32+
end
33+
34+
@testset "Test regular run with one alternative with one fixed variable." begin
35+
optimizer = Ipopt.Optimizer
36+
model = JuMP.Model(optimizer)
37+
38+
# Initialise simple `square` JuMP model
39+
@variable(model, 0 x_1 1)
40+
@variable(model, 0 x_2 1)
41+
@objective(model, Max, x_1 + x_2)
42+
JuMP.optimize!(model)
43+
44+
results = NearOptimalAlternatives.generate_alternatives!(
45+
model,
46+
0.1,
47+
all_variables(model),
48+
1;
49+
fixed_variables = [x_2],
50+
modeling_method = :HSJ,
51+
)
52+
53+
# Test that `results` contains one solution with 2 variables, and an objective value between 1.8 and 2.0. Also, `x_2` should remain around 1.0 and `x_1` should be between 0.8 and 1.0.
54+
@test length(results.solutions) == 1 &&
55+
length(results.solutions[1]) == 2 &&
56+
length(results.objective_values) == 1 &&
57+
(
58+
results.objective_values[1] 1.8 ||
59+
isapprox(results.objective_values[1], 1.8)
60+
) &&
61+
(
62+
results.objective_values[1] 2.0 ||
63+
isapprox(results.objective_values[1], 2.0)
64+
) &&
65+
(
66+
results.solutions[1][x_1] 0.8 ||
67+
isapprox(results.solutions[1][x_1], 0.8)
68+
) &&
69+
(
70+
results.solutions[1][x_1] 1.0 ||
71+
isapprox(results.solutions[1][x_1], 1.0)
72+
) &&
73+
isapprox(results.solutions[1][x_2], 1.0)
74+
end
75+
76+
@testset "Test regular run with two alternatives." begin
77+
optimizer = Ipopt.Optimizer
78+
model = JuMP.Model(optimizer)
79+
80+
# Initialise simple `square` JuMP model
81+
@variable(model, 0 x_1 1)
82+
@variable(model, 0 x_2 1)
83+
@objective(model, Max, x_1 + x_2)
84+
JuMP.optimize!(model)
85+
86+
results = NearOptimalAlternatives.generate_alternatives!(
87+
model,
88+
0.1,
89+
all_variables(model),
90+
2;
91+
modeling_method = :HSJ,
92+
)
93+
94+
# Test that `results` contains 2 solutions with two variables each, where the objective values of both solutions are between 1.8 and 2.0.
95+
@test length(results.solutions) == 2 &&
96+
length(results.solutions[2]) == 2 &&
97+
length(results.objective_values) == 2 &&
98+
(
99+
results.objective_values[1] 1.8 ||
100+
isapprox(results.objective_values[1], 1.8)
101+
) &&
102+
(
103+
results.objective_values[1] 2.0 ||
104+
isapprox(results.objective_values[1], 2.0)
105+
) &&
106+
(
107+
results.objective_values[2] 1.8 ||
108+
isapprox(results.objective_values[2], 1.8)
109+
) &&
110+
(
111+
results.objective_values[2] 2.0 ||
112+
isapprox(results.objective_values[2], 2.0)
113+
)
114+
end
115+
end

0 commit comments

Comments
 (0)