Skip to content

Commit ef6c2fd

Browse files
Fully Connected NN (#9)
* add ICNN * update code * update documentation * update code * update * update code * format bluestyle * increase coverage
1 parent 61f3a0c commit ef6c2fd

16 files changed

+643
-321
lines changed

Project.toml

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,13 @@ version = "1.2.0-DEV"
77
Arrow = "69666777-d1a9-59fb-9406-91d4454c9d45"
88
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
99
Dualization = "191a621a-6537-11e9-281d-650236a99e60"
10+
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
1011
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
1112
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
13+
MLJFlux = "094fc8d1-fd35-5302-93ea-dabda2abf845"
1214
Nonconvex = "01bcebdf-4d21-426d-b5c4-6132c1619978"
1315
ParametricOptInterface = "0ce4ce61-57bf-432b-a095-efac525d185e"
16+
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
1417
UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
1518
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
1619

@@ -27,13 +30,13 @@ AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918"
2730
Clarabel = "61c947e1-3e6d-4ee4-985a-eec8c727bd6e"
2831
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
2932
DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab"
30-
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
3133
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"
3234
Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9"
3335
NonconvexNLopt = "b43a31b8-ff9b-442d-8e31-c163daa8ab75"
3436
PGLib = "07a8691f-3d11-4330-951b-3c50f98338be"
3537
PowerModels = "c36e90e8-916a-50a6-bd94-075b64ef4655"
3638
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
39+
MLJ = "add582a8-e3ab-11e8-2d5e-e98b27df1bc7"
3740

3841
[targets]
39-
test = ["Test", "DelimitedFiles", "PGLib", "HiGHS", "PowerModels", "Flux", "DataFrames", "Clarabel", "Ipopt", "NonconvexNLopt"]
42+
test = ["Test", "DelimitedFiles", "PGLib", "HiGHS", "PowerModels", "DataFrames", "Clarabel", "Ipopt", "NonconvexNLopt", "MLJ"]

examples/flux/flux_forecaster_script.jl

Lines changed: 48 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ using TestEnv
22
TestEnv.activate()
33

44
using Arrow
5+
using CSV
56
using Flux
67
using DataFrames
78
using PowerModels
@@ -10,26 +11,38 @@ using L2O
1011
# Paths
1112
case_name = "pglib_opf_case300_ieee" # pglib_opf_case300_ieee # pglib_opf_case5_pjm
1213
network_formulation = SOCWRConicPowerModel # SOCWRConicPowerModel # DCPPowerModel
13-
filetype = ArrowFile
14+
filetype = ArrowFile # ArrowFile # CSVFile
1415
path_dataset = joinpath(pwd(), "examples", "powermodels", "data")
1516
case_file_path = joinpath(path_dataset, case_name, string(network_formulation))
1617

1718
# Load input and output data tables
1819
iter_files = readdir(joinpath(case_file_path))
19-
iter_files = filter(x -> occursin(string(ArrowFile), x), iter_files)
20-
file_ins = [joinpath(case_file_path, file) for file in iter_files if occursin("input", file)]
21-
file_outs = [joinpath(case_file_path, file) for file in iter_files if occursin("output", file)]
20+
iter_files = filter(x -> occursin(string(filetype), x), iter_files)
21+
file_ins = [
22+
joinpath(case_file_path, file) for file in iter_files if occursin("input", file)
23+
]
24+
file_outs = [
25+
joinpath(case_file_path, file) for file in iter_files if occursin("output", file)
26+
]
2227
batch_ids = [split(split(file, "_")[end], ".")[1] for file in file_ins]
2328

2429
# Load input and output data tables
25-
train_idx = [1]
26-
test_idx = [2]
30+
train_idx = collect(1:floor(Int, length(file_ins) * 0.5))
31+
test_idx = setdiff(1:length(file_ins), train_idx)
2732

28-
input_table_train = Arrow.Table(file_ins[train_idx])
29-
output_table_train = Arrow.Table(file_outs[train_idx])
33+
if filetype === ArrowFile
34+
input_table_train = Arrow.Table(file_ins[train_idx])
35+
output_table_train = Arrow.Table(file_outs[train_idx])
3036

31-
input_table_test = Arrow.Table(file_ins[test_idx])
32-
output_table_test = Arrow.Table(file_outs[test_idx])
37+
input_table_test = Arrow.Table(file_ins[test_idx])
38+
output_table_test = Arrow.Table(file_outs[test_idx])
39+
else
40+
input_table_train = CSV.read(file_ins[train_idx], DataFrame)
41+
output_table_train = CSV.read(file_outs[train_idx], DataFrame)
42+
43+
input_table_test = CSV.read(file_ins[test_idx], DataFrame)
44+
output_table_test = CSV.read(file_outs[test_idx], DataFrame)
45+
end
3346

3447
# Convert to dataframes
3548
input_data_train = DataFrame(input_table_train)
@@ -40,41 +53,39 @@ output_data_test = DataFrame(output_table_test)
4053

4154
# Separate input and output variables
4255
output_variables_train = output_data_train[!, Not(:id)]
43-
input_features_train = innerjoin(input_data_train, output_data_train[!, [:id]], on = :id)[!, Not(:id)] # just use success solves
44-
45-
num_loads = floor(Int,size(input_features_train,2)/2)
46-
total_volume=[sum(sqrt(input_features_train[i,l]^2 + input_features_train[i,l+num_loads]^2) for l in 1:num_loads) for i in 1:size(input_features_train,1) ]
56+
input_features_train = innerjoin(input_data_train, output_data_train[!, [:id]]; on=:id)[
57+
!, Not(:id)
58+
] # just use success solves
59+
60+
num_loads = floor(Int, size(input_features_train, 2) / 2)
61+
total_volume = [
62+
sum(
63+
sqrt(input_features_train[i, l]^2 + input_features_train[i, l + num_loads]^2) for
64+
l in 1:num_loads
65+
) for i in 1:size(input_features_train, 1)
66+
]
4767

4868
output_variables_test = output_data_test[!, Not(:id)]
49-
input_features_test = innerjoin(input_data_test, output_data_test[!, [:id]], on = :id)[!, Not(:id)] # just use success solves
69+
input_features_test = innerjoin(input_data_test, output_data_test[!, [:id]]; on=:id)[
70+
!, Not(:id)
71+
] # just use success solves
5072

5173
# Define model
52-
model = Chain(
53-
Dense(size(input_features_train, 2), 64, relu),
54-
Dense(64, 32, relu),
55-
Dense(32, size(output_variables_train, 2)),
74+
model = MultitargetNeuralNetworkRegressor(;
75+
builder=FullyConnectedBuilder([64, 32]),
76+
rng=123,
77+
epochs=20,
78+
optimiser=ConvexRule(
79+
Flux.Optimise.Adam(0.001, (0.9, 0.999), 1.0e-8, IdDict{Any,Any}())
80+
),
5681
)
5782

58-
# Define loss function
59-
loss(x, y) = Flux.mse(model(x), y)
60-
61-
# Convert the data to matrices
62-
input_features_train = Matrix(input_features_train)'
63-
output_variables_train = Matrix(output_variables_train)'
64-
65-
input_features_test = Matrix(input_features_test)'
66-
output_variables_test = Matrix(output_variables_test)'
67-
68-
# Define the optimizer
69-
optimizer = Flux.ADAM()
70-
71-
# Train the model
72-
Flux.train!(
73-
loss, Flux.params(model), [(input_features_train, output_variables_train)], optimizer
74-
)
83+
# Define the machine
84+
mach = machine(model, input_features_train, output_variables_train)
85+
fit!(mach; verbosity=2)
7586

7687
# Make predictions
77-
predictions = model(input_features_test)
88+
predictions = predict(mach, input_features)
7889

7990
# Calculate the error
8091
error = Flux.mse(predictions, output_variables_test)

examples/flux/test_flux_forecaster.jl

Lines changed: 0 additions & 45 deletions
This file was deleted.

examples/powermodels/generate_full_datasets_script.jl

Lines changed: 49 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
# run with: julia ./examples/powermodels/generate_full_datasets_script.jl "./examples/powermodels/data/pglib_opf_case300_ieee/case300.config.toml"
22
config_path = ARGS[1]
33

4-
import Pkg; Pkg.activate(".")
4+
using Pkg: Pkg;
5+
Pkg.activate(".");
56

67
using TestEnv
78
TestEnv.activate()
@@ -27,13 +28,14 @@ using NonconvexNLopt
2728

2829
########## POI SOLVER ##########
2930

30-
cached = () -> MOI.Bridges.full_bridge_optimizer(
31-
MOI.Utilities.CachingOptimizer(
32-
MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}()),
33-
Clarabel.Optimizer(),
34-
),
35-
Float64,
36-
)
31+
cached =
32+
() -> MOI.Bridges.full_bridge_optimizer(
33+
MOI.Utilities.CachingOptimizer(
34+
MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}()),
35+
Clarabel.Optimizer(),
36+
),
37+
Float64,
38+
)
3739

3840
POI_cached_optimizer() = POI.Optimizer(cached())
3941

@@ -45,12 +47,12 @@ path = config["export_dir"]
4547
path_powermodels = joinpath(dirname(@__FILE__)) # TODO: Make it a submodule
4648
include(joinpath(path_powermodels, "pglib_datagen.jl"))
4749

48-
filetype = ArrowFile
50+
filetype = ArrowFile # ArrowFile # CSVFile
4951

5052
case_name = config["case_name"]
5153
case_file_path = joinpath(path, case_name)
5254
mkpath(case_file_path)
53-
network_formulation= eval(Symbol(ARGS[2]))
55+
network_formulation = eval(Symbol(ARGS[2])) # SOCWRConicPowerModel # DCPPowerModel
5456

5557
########## SAMPLER DATASET GENERATION ##########
5658

@@ -59,9 +61,16 @@ if haskey(config, "sampler")
5961
num_p = config["sampler"]["num_samples"]
6062
global success_solves = 0.0
6163
for i in 1:num_batches
62-
_success_solves, number_variables, number_loads, batch_id = generate_dataset_pglib(case_file_path, case_name;
63-
num_p=num_p, filetype=filetype, network_formulation=network_formulation, optimizer=POI_cached_optimizer,
64-
internal_load_sampler= (_o, n) -> load_sampler(_o, n, max_multiplier=1.25, min_multiplier=0.8, step_multiplier=0.01)
64+
_success_solves, number_variables, number_loads, batch_id = generate_dataset_pglib(
65+
case_file_path,
66+
case_name;
67+
num_p=num_p,
68+
filetype=filetype,
69+
network_formulation=network_formulation,
70+
optimizer=POI_cached_optimizer,
71+
internal_load_sampler=(_o, n) -> load_sampler(
72+
_o, n; max_multiplier=1.25, min_multiplier=0.8, step_multiplier=0.01
73+
),
6574
)
6675
global success_solves += _success_solves
6776
end
@@ -81,35 +90,45 @@ if haskey(config, "line_search")
8190
early_stop_fn = (model, status, recorder) -> !status
8291

8392
global success_solves = 0.0
84-
global batch_id = string(uuid1())
8593
for ibatc in 1:num_batches
86-
_success_solves, number_variables, number_loads, b_id = generate_dataset_pglib(case_file_path, case_name;
87-
num_p=num_p, filetype=filetype, network_formulation=network_formulation, optimizer=POI_cached_optimizer,
88-
internal_load_sampler= (_o, n, idx, num_inputs) -> line_sampler(_o, n, idx, num_inputs, ibatc; step_multiplier=step_multiplier),
94+
_success_solves, number_variables, number_loads, b_id = generate_dataset_pglib(
95+
case_file_path,
96+
case_name;
97+
num_p=num_p,
98+
filetype=filetype,
99+
network_formulation=network_formulation,
100+
optimizer=POI_cached_optimizer,
101+
internal_load_sampler=(_o, n, idx, num_inputs) -> line_sampler(
102+
_o, n, idx, num_inputs, ibatc; step_multiplier=step_multiplier
103+
),
89104
early_stop_fn=early_stop_fn,
90-
batch_id=batch_id,
91105
)
92106
global success_solves += _success_solves
93107
end
94108
success_solves /= num_batches
95-
109+
96110
@info "Success solves: $(success_solves * 100) % of $(num_batches * num_p)"
97111
end
98112

99113
########## WORST CASE DUAL DATASET GENERATION ##########
100114
if haskey(config, "worst_case_dual")
101115
num_p = config["worst_case_dual"]["num_samples"]
102116
function optimizer_factory()
103-
IPO_OPT = Gurobi.Optimizer()
117+
IPO_OPT = Gurobi.Optimizer()
104118
# IPO_OPT = MadNLP.Optimizer(print_level=MadNLP.INFO, max_iter=100)
105119
# IPO = MOI.Bridges.Constraint.SOCtoNonConvexQuad{Float64}(IPO_OPT)
106120
# MIP = QuadraticToBinary.Optimizer{Float64}(IPO)
107121
return () -> IPO_OPT
108122
end
109123

110-
success_solves, number_variables, number_loads, batch_id = generate_worst_case_dataset(case_file_path, case_name;
111-
num_p=num_p, filetype=filetype, network_formulation=network_formulation, optimizer_factory=optimizer_factory,
112-
hook = (model) -> set_optimizer_attribute(model, "NonConvex", 2)
124+
success_solves, number_variables, number_loads, batch_id = generate_worst_case_dataset(
125+
case_file_path,
126+
case_name;
127+
num_p=num_p,
128+
filetype=filetype,
129+
network_formulation=network_formulation,
130+
optimizer_factory=optimizer_factory,
131+
hook=(model) -> set_optimizer_attribute(model, "NonConvex", 2),
113132
)
114133

115134
@info "Success solves Worst Case: $(success_solves) of $(num_p)"
@@ -119,8 +138,13 @@ end
119138
if haskey(config, "worst_case_nonconvex")
120139
num_p = config["worst_case_nonconvex"]["num_samples"]
121140

122-
success_solves, number_variables, number_loads, batch_id = generate_worst_case_dataset_Nonconvex(case_file_path, case_name;
123-
num_p=num_p, filetype=filetype, network_formulation=network_formulation, optimizer=POI_cached_optimizer,
141+
success_solves, number_variables, number_loads, batch_id = generate_worst_case_dataset_Nonconvex(
142+
case_file_path,
143+
case_name;
144+
num_p=num_p,
145+
filetype=filetype,
146+
network_formulation=network_formulation,
147+
optimizer=POI_cached_optimizer,
124148
)
125149

126150
@info "Success solves Worst Case: $(success_solves * 100) of $(num_p)"

0 commit comments

Comments
 (0)