Skip to content

Commit bdfa551

Browse files
authored
Merge pull request #38 from LAMPSPUC/dev
Version 0.1.5
2 parents af07d62 + df31d71 commit bdfa551

File tree

4 files changed

+55
-50
lines changed

4 files changed

+55
-50
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "ApplicationDrivenLearning"
22
uuid = "0856f1c8-ef17-4e14-9230-2773e47a789e"
33
authors = ["Giovanni Amorim", "Joaquim Garcia"]
4-
version = "0.1.4"
4+
version = "0.1.5"
55

66
[deps]
77
BilevelJuMP = "485130c0-026e-11ea-0f1a-6992cd14145c"

src/ApplicationDrivenLearning.jl

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -198,8 +198,6 @@ function train!(
198198
elseif options.mode == GradientMPIMode
199199
return train_with_gradient_mpi!(model, X, y, options.params)
200200
elseif options.mode == BilevelMode
201-
asr_msg = "BilevelMode not implemented for multiple forecasting models"
202-
@assert length(model.forecast.networks) == 1 asr_msg
203201
return solve_bilevel(model, X, y, options.params)
204202
else
205203
# should never get here

src/jump.jl

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,9 @@ function JuMP.set_optimizer(model::Model, builder, evaluate_duals::Bool = true)
136136
JuMP.set_optimizer(model.plan, () -> new_diff_optimizer)
137137

138138
# basic setting for assess model
139-
return JuMP.set_optimizer(model.assess, builder)
139+
JuMP.set_optimizer(model.assess, builder)
140+
141+
return nothing
140142
end
141143

142144
function JuMP.set_silent(model::Model)

src/optimizers/bilevel.jl

Lines changed: 51 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -117,46 +117,49 @@ function solve_bilevel(
117117
end
118118

119119
# implement predictive model expression iterating through
120-
# layers and creating predictive expression
121-
layers_inpt = Dict{Any,Any}(
122-
output_idx => X[1:T, input_idx] for
123-
(input_idx, output_idx) in model.forecast.input_output_map[1]
124-
)
125-
predictive_model_vars = Dict{Int,Any}()
126-
i_layer = 1
127-
for layer in model.forecast.networks[1]
128-
# if it is layer with parameters, process output
129-
if has_params(layer)
130-
# get size and parameters W and b
131-
(layer_size_out, layer_size_in) = size(layer.weight)
132-
W = @variable(
133-
Upper(bilevel_model),
134-
[1:layer_size_out, 1:layer_size_in]
135-
)
136-
if layer.bias == false
137-
b = zeros(layer_size_out)
120+
# models and layers to create predictive expression
121+
npreds = size(model.forecast.networks, 1)
122+
predictive_model_vars = [Dict{Int,Any}() for ipred = 1:npreds]
123+
y_hat = Matrix{Any}(undef, size(Y, 1), size(Y, 2))
124+
for ipred = 1:npreds
125+
layers_inpt = Dict{Any,Any}(
126+
output_idx => X[1:T, input_idx] for (input_idx, output_idx) in
127+
model.forecast.input_output_map[ipred]
128+
)
129+
i_layer = 1
130+
for layer in model.forecast.networks[ipred]
131+
# if it is layer with parameters, process output
132+
if has_params(layer)
133+
# get size and parameters W and b
134+
(layer_size_out, layer_size_in) = size(layer.weight)
135+
W = @variable(
136+
Upper(bilevel_model),
137+
[1:layer_size_out, 1:layer_size_in]
138+
)
139+
if layer.bias == false
140+
b = zeros(layer_size_out)
141+
else
142+
b = @variable(Upper(bilevel_model), [1:layer_size_out])
143+
end
144+
predictive_model_vars[ipred][i_layer] = Dict(:W => W, :b => b)
145+
# build layer output as next layer input
146+
for output_idx in values(model.forecast.input_output_map[ipred])
147+
layers_inpt[output_idx] =
148+
layer.σ(W * layers_inpt[output_idx]' .+ b)'
149+
end
150+
# if activation function layer, just apply
151+
elseif supertype(typeof(layer)) == Function
152+
for output_idx in values(model.forecast.input_output_map[ipred])
153+
layers_inpt[output_idx] = layer(layers_inpt[output_idx])
154+
end
138155
else
139-
b = @variable(Upper(bilevel_model), [1:layer_size_out])
140-
end
141-
predictive_model_vars[i_layer] = Dict(:W => W, :b => b)
142-
# build layer output as next layer input
143-
for output_idx in values(model.forecast.input_output_map[1])
144-
layers_inpt[output_idx] =
145-
layer.σ(W * layers_inpt[output_idx]' .+ b)'
156+
println("Network $ipred layer $ilayer type not supported")
146157
end
147-
# if activation function layer, just apply
148-
elseif supertype(typeof(layer)) == Function
149-
for output_idx in values(model.forecast.input_output_map[1])
150-
layers_inpt[output_idx] = layer(layers_inpt[output_idx])
151-
end
152-
else
153-
println("Network layer $ilayer type not supported")
158+
i_layer += 1
159+
end
160+
for (output_idx, prediction) in layers_inpt
161+
y_hat[:, output_idx] = prediction
154162
end
155-
i_layer += 1
156-
end
157-
y_hat = Matrix{Any}(undef, size(Y, 1), size(Y, 2))
158-
for (output_idx, prediction) in layers_inpt
159-
y_hat[:, output_idx] = prediction
160163
end
161164

162165
# and apply prediction on lower model as constraint
@@ -174,17 +177,19 @@ function solve_bilevel(
174177
optimize!(bilevel_model)
175178

176179
# fix parameters to predictive_model
177-
ilayer = 1
178-
for layer in model.forecast.networks[1]
179-
if has_params(layer)
180-
for p in Flux.trainables(layer.weight)
181-
p .= value.(predictive_model_vars[ilayer][:W])
182-
end
183-
for p in Flux.trainables(layer.bias)
184-
p .= value.(predictive_model_vars[ilayer][:b])
180+
for ipred = 1:npreds
181+
ilayer = 1
182+
for layer in model.forecast.networks[ipred]
183+
if has_params(layer)
184+
for p in Flux.trainables(layer.weight)
185+
p .= value.(predictive_model_vars[ipred][ilayer][:W])
186+
end
187+
for p in Flux.trainables(layer.bias)
188+
p .= value.(predictive_model_vars[ipred][ilayer][:b])
189+
end
185190
end
191+
ilayer += 1
186192
end
187-
ilayer += 1
188193
end
189194

190195
return Solution(

0 commit comments

Comments
 (0)