Skip to content

Commit 516d460

Browse files
blibliboeLaargnawin
authored
37 directionally weighted variables (#43)
* Updated the generate parralel to return state aka all solutions from metaheuristics * Updated generate alternatives to make multiple MGA techniques possible * Update optimization specifically to also allow for multiple MGa techniques * Added the max-distance method to the MGA methods, this was already implemented * Added the max-distance method to the MGA methods, this was already implemented * Changed it so that max distance is not neccesarily the standard when testing * Added random vector as a method * Exported core functions * Explicitly used JuMP when calling all variables * Changes based on the formatter * apply formatter * apply formatter --------- Co-authored-by: Laar <luuk.vandelaar@tno.nl> Co-authored-by: Ni Wang <125902905+gnawin@users.noreply.github.com>
1 parent 32f89b2 commit 516d460

File tree

4 files changed

+208
-2
lines changed

4 files changed

+208
-2
lines changed
Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
export DWV_initial!, DWV_update!
2+
3+
"""
4+
DWV_initial!(
5+
model::JuMP.Model,
6+
variables::AbstractArray{T,N},
7+
fixed_variables::Vector{VariableRef};
8+
weights::Vector{Float64} = zeros(length(variables)),
9+
metric::Distances.SemiMetric = SqEuclidean(),
10+
old_objective::AffExpr = JuMP.objective_function(model),
11+
) where {T<:Union{VariableRef,AffExpr},N}
12+
Initialize the objective of a JuMP model using the Directionally Weighted Variables method to generate alternative solutions.
13+
This function sets a new objective that minimizes the weighted sum of the decision variables, where weights are uniformly chosen between -1 and 1, based on the original objective function. Fixed variables are locked at their optimal values.
14+
# Arguments
15+
- `model::JuMP.Model`: a solved JuMP model whose objective is to be redefined for alternative generation.
16+
- `variables::AbstractArray{T,N}`: the variables involved in the objective, typically a vector or matrix of `VariableRef`s or `AffExpr`s.
17+
- `fixed_variables::Vector{VariableRef}`: variables to be fixed at their current values to avoid changes in alternatives.
18+
- `weights::Vector{Float64}`: optional vector of weights for each variable; will be internally overwritten based on variable values.
19+
- `metric::Distances.SemiMetric`: unused in this method (included for consistency with other alternative generation methods).
20+
- `old_objective::AffExpr`: the original objective function of the model, used to determine variable weights.
21+
# Behavior
22+
- Variables are randomly minimized or maximized, based on the original objective function.
23+
- Fixed variables are frozen at their optimal values using `fix(...)`.
24+
- The objective is set to minimize the weighted sum of the variables, encouraging sparsity or deviation from the original.
25+
"""
26+
function DWV_initial!(
27+
model::JuMP.Model,
28+
variables::AbstractArray{T,N},
29+
fixed_variables::Vector{VariableRef};
30+
weights::Vector{Float64} = zeros(length(variables)),
31+
metric::Distances.SemiMetric = SqEuclidean(),
32+
old_objective::AffExpr = JuMP.objective_function(model),
33+
) where {T<:Union{VariableRef,AffExpr},N}
34+
35+
# Assign weights depending on coefficient sign
36+
for (i, v) in enumerate(variables)
37+
c = get(old_objective.terms, v, 0.0) # 0.0 if variable not in objective
38+
if c > 0
39+
weights[i] = rand([0, 1])
40+
elseif c < 0
41+
weights[i] = rand([-1, 0])
42+
else
43+
weights[i] = rand([-1, 0, 1])
44+
end
45+
end
46+
47+
# Fix the variables that are fixed
48+
fix.(fixed_variables, value.(fixed_variables), force = true)
49+
50+
# update these variables based on their sign
51+
objective_function = [v * weights[i] for (i, v) in enumerate(variables)]
52+
53+
# Update objective by adding the distance between variables and the previous optimal solution.
54+
@objective(model, Min, sum(objective_function))
55+
end
56+
57+
"""
58+
DWV_update!(
59+
model::JuMP.Model,
60+
variables::AbstractArray{T,N},
61+
weights::Vector{Float64}
62+
) where {T<:Union{VariableRef,AffExpr},N}
63+
"""
64+
function DWV_update!(
65+
model::JuMP.Model,
66+
variables::AbstractArray{T,N};
67+
weights::Vector{Float64} = zeros(length(variables)),
68+
metric::Distances.SemiMetric = SqEuclidean(),
69+
old_objective::AffExpr = JuMP.objective_function(model), #TODO make sure the old objective is passed correctly
70+
) where {T<:Union{VariableRef,AffExpr},N}
71+
# Assign weights depending on coefficient sign
72+
for (i, v) in enumerate(variables)
73+
c = get(old_objective.terms, v, 0.0) # 0.0 if variable not in objective
74+
if c > 0
75+
weights[i] = rand([0, 1])
76+
elseif c < 0
77+
weights[i] = rand([-1, 0])
78+
else
79+
weights[i] = rand([-1, 0, 1])
80+
end
81+
end
82+
83+
# Apply weights to variables
84+
objective_function = [v * weights[i] for (i, v) in enumerate(variables)]
85+
86+
# Update model objective
87+
set_objective_sense(model, FEASIBILITY_SENSE)
88+
@objective(model, Min, sum(objective_function))
89+
end

src/NearOptimalAlternatives.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ include("MGA-Methods/HSJ.jl")
1313
include("MGA-Methods/Spores.jl")
1414
include("MGA-Methods/Min-Max-Variables.jl")
1515
include("MGA-Methods/Random-Vector.jl")
16+
include("MGA-Methods/Directionally-Weighted-Variables.jl")
1617

1718
include("results.jl")
1819
include("alternative-optimisation.jl")

src/alternative-optimisation.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ const METHOD_DISPATCH_UPDATE = Dict{Symbol,Function}(
99
:Spores => Spores_update!,
1010
:Min_Max_Variables => Min_Max_Variables_update!,
1111
:Random_Vector => Random_Vector_update!,
12-
# :Directionally_Weighted_Variables => DW_update!,
12+
:Directionally_Weighted_Variables => DWV_update!,
1313
:Max_Distance => Dist_update!,
1414
)
1515

@@ -19,7 +19,7 @@ const METHOD_DISPATCH_INITIAL = Dict{Symbol,Function}(
1919
:Spores => Spores_initial!,
2020
:Min_Max_Variables => Min_Max_Variables_initial!,
2121
:Random_Vector => Random_Vector_initial!,
22-
# :Directionally_Weighted_Variables => DW_update!,
22+
:Directionally_Weighted_Variables => DWV_initial!,
2323
:Max_Distance => Dist_initial!,
2424
)
2525

Lines changed: 116 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
2+
@testset "Test generate alternatives with Max Distance as modeling_method." begin
3+
@testset "Test regular run with one alternative." begin
4+
optimizer = Ipopt.Optimizer
5+
model = JuMP.Model(optimizer)
6+
7+
# Initialise simple `square` JuMP model
8+
@variable(model, 0 x_1 1)
9+
@variable(model, 0 x_2 1)
10+
@objective(model, Max, x_1 + x_2)
11+
JuMP.optimize!(model)
12+
13+
results = generate_alternatives!(
14+
model,
15+
0.1,
16+
all_variables(model),
17+
1;
18+
modeling_method = :Directionally_Weighted_Variables,
19+
)
20+
21+
# Test that `results` contains one solution with 2 variables, and an objective value between 1.8 and 2.0.
22+
@test length(results.solutions) == 1 &&
23+
length(results.solutions[1]) == 2 &&
24+
length(results.objective_values) == 1 &&
25+
(
26+
results.objective_values[1] 1.8 ||
27+
isapprox(results.objective_values[1], 1.8)
28+
) &&
29+
(
30+
results.objective_values[1] 2.0 ||
31+
isapprox(results.objective_values[1], 2.0)
32+
)
33+
end
34+
35+
@testset "Test regular run with one alternative with one fixed variable." begin
36+
optimizer = Ipopt.Optimizer
37+
model = JuMP.Model(optimizer)
38+
39+
# Initialise simple `square` JuMP model
40+
@variable(model, 0 x_1 1)
41+
@variable(model, 0 x_2 1)
42+
@objective(model, Max, x_1 + x_2)
43+
JuMP.optimize!(model)
44+
45+
results = generate_alternatives!(
46+
model,
47+
0.1,
48+
all_variables(model),
49+
1;
50+
fixed_variables = [x_2],
51+
modeling_method = :Directionally_Weighted_Variables,
52+
)
53+
54+
# Test that `results` contains one solution with 2 variables, and an objective value between 1.8 and 2.0. Also, `x_2` should remain around 1.0 and `x_1` should be between 0.8 and 1.0.
55+
@test length(results.solutions) == 1 &&
56+
length(results.solutions[1]) == 2 &&
57+
length(results.objective_values) == 1 &&
58+
(
59+
results.objective_values[1] 1.8 ||
60+
isapprox(results.objective_values[1], 1.8)
61+
) &&
62+
(
63+
results.objective_values[1] 2.0 ||
64+
isapprox(results.objective_values[1], 2.0)
65+
) &&
66+
(
67+
results.solutions[1][x_1] 0.8 ||
68+
isapprox(results.solutions[1][x_1], 0.8)
69+
) &&
70+
(
71+
results.solutions[1][x_1] 1.0 ||
72+
isapprox(results.solutions[1][x_1], 1.0)
73+
) &&
74+
isapprox(results.solutions[1][x_2], 1.0)
75+
end
76+
77+
@testset "Test regular run with two alternatives." begin
78+
optimizer = Ipopt.Optimizer
79+
model = JuMP.Model(optimizer)
80+
81+
# Initialise simple `square` JuMP model
82+
@variable(model, 0 x_1 1)
83+
@variable(model, 0 x_2 1)
84+
@objective(model, Max, x_1 + x_2)
85+
JuMP.optimize!(model)
86+
87+
results = generate_alternatives!(
88+
model,
89+
0.1,
90+
all_variables(model),
91+
2;
92+
modeling_method = :Directionally_Weighted_Variables,
93+
)
94+
95+
# Test that `results` contains 2 solutions with two variables each, where the objective values of both solutions are between 1.8 and 2.0.
96+
@test length(results.solutions) == 2 &&
97+
length(results.solutions[2]) == 2 &&
98+
length(results.objective_values) == 2 &&
99+
(
100+
results.objective_values[1] 1.8 ||
101+
isapprox(results.objective_values[1], 1.8)
102+
) &&
103+
(
104+
results.objective_values[1] 2.0 ||
105+
isapprox(results.objective_values[1], 2.0)
106+
) &&
107+
(
108+
results.objective_values[2] 1.8 ||
109+
isapprox(results.objective_values[2], 1.8)
110+
) &&
111+
(
112+
results.objective_values[2] 2.0 ||
113+
isapprox(results.objective_values[2], 2.0)
114+
)
115+
end
116+
end

0 commit comments

Comments
 (0)