11# run with: julia ./examples/powermodels/generate_full_datasets_script.jl "./examples/powermodels/data/pglib_opf_case300_ieee/case300.config.toml"
22config_path = ARGS [1 ]
33
4- import Pkg; Pkg. activate(" ." )
4+ using Pkg: Pkg;
5+ Pkg. activate(" ." );
56
67using TestEnv
78TestEnv. activate()
@@ -27,13 +28,14 @@ using NonconvexNLopt
2728
2829# ######### POI SOLVER ##########
2930
30- cached = () -> MOI. Bridges. full_bridge_optimizer(
31- MOI. Utilities. CachingOptimizer(
32- MOI. Utilities. UniversalFallback(MOI. Utilities. Model{Float64}()),
33- Clarabel. Optimizer(),
34- ),
35- Float64,
36- )
31+ cached =
32+ () -> MOI. Bridges. full_bridge_optimizer(
33+ MOI. Utilities. CachingOptimizer(
34+ MOI. Utilities. UniversalFallback(MOI. Utilities. Model{Float64}()),
35+ Clarabel. Optimizer(),
36+ ),
37+ Float64,
38+ )
3739
3840POI_cached_optimizer() = POI. Optimizer(cached())
3941
@@ -45,12 +47,12 @@ path = config["export_dir"]
4547path_powermodels = joinpath(dirname(@__FILE__)) # TODO : Make it a submodule
4648include(joinpath(path_powermodels, " pglib_datagen.jl" ))
4749
48- filetype = ArrowFile
50+ filetype = ArrowFile # ArrowFile # CSVFile
4951
5052case_name = config[" case_name" ]
5153case_file_path = joinpath(path, case_name)
5254mkpath(case_file_path)
53- network_formulation= eval(Symbol(ARGS [2 ]))
55+ network_formulation = eval(Symbol(ARGS [2 ])) # SOCWRConicPowerModel # DCPPowerModel
5456
5557# ######### SAMPLER DATASET GENERATION ##########
5658
@@ -59,9 +61,16 @@ if haskey(config, "sampler")
5961 num_p = config[" sampler" ][" num_samples" ]
6062 global success_solves = 0.0
6163 for i in 1 : num_batches
62- _success_solves, number_variables, number_loads, batch_id = generate_dataset_pglib(case_file_path, case_name;
63- num_p= num_p, filetype= filetype, network_formulation= network_formulation, optimizer= POI_cached_optimizer,
64- internal_load_sampler= (_o, n) -> load_sampler(_o, n, max_multiplier= 1.25 , min_multiplier= 0.8 , step_multiplier= 0.01 )
64+ _success_solves, number_variables, number_loads, batch_id = generate_dataset_pglib(
65+ case_file_path,
66+ case_name;
67+ num_p= num_p,
68+ filetype= filetype,
69+ network_formulation= network_formulation,
70+ optimizer= POI_cached_optimizer,
71+ internal_load_sampler= (_o, n) -> load_sampler(
72+ _o, n; max_multiplier= 1.25 , min_multiplier= 0.8 , step_multiplier= 0.01
73+ ),
6574 )
6675 global success_solves += _success_solves
6776 end
@@ -81,35 +90,45 @@ if haskey(config, "line_search")
8190 early_stop_fn = (model, status, recorder) -> ! status
8291
8392 global success_solves = 0.0
84- global batch_id = string(uuid1())
8593 for ibatc in 1 : num_batches
86- _success_solves, number_variables, number_loads, b_id = generate_dataset_pglib(case_file_path, case_name;
87- num_p= num_p, filetype= filetype, network_formulation= network_formulation, optimizer= POI_cached_optimizer,
88- internal_load_sampler= (_o, n, idx, num_inputs) -> line_sampler(_o, n, idx, num_inputs, ibatc; step_multiplier= step_multiplier),
94+ _success_solves, number_variables, number_loads, b_id = generate_dataset_pglib(
95+ case_file_path,
96+ case_name;
97+ num_p= num_p,
98+ filetype= filetype,
99+ network_formulation= network_formulation,
100+ optimizer= POI_cached_optimizer,
101+ internal_load_sampler= (_o, n, idx, num_inputs) -> line_sampler(
102+ _o, n, idx, num_inputs, ibatc; step_multiplier= step_multiplier
103+ ),
89104 early_stop_fn= early_stop_fn,
90- batch_id= batch_id,
91105 )
92106 global success_solves += _success_solves
93107 end
94108 success_solves /= num_batches
95-
109+
96110 @info " Success solves: $(success_solves * 100 ) % of $(num_batches * num_p) "
97111end
98112
99113# ######### WORST CASE DUAL DATASET GENERATION ##########
100114if haskey(config, " worst_case_dual" )
101115 num_p = config[" worst_case_dual" ][" num_samples" ]
102116 function optimizer_factory()
103- IPO_OPT = Gurobi. Optimizer()
117+ IPO_OPT = Gurobi. Optimizer()
104118 # IPO_OPT = MadNLP.Optimizer(print_level=MadNLP.INFO, max_iter=100)
105119 # IPO = MOI.Bridges.Constraint.SOCtoNonConvexQuad{Float64}(IPO_OPT)
106120 # MIP = QuadraticToBinary.Optimizer{Float64}(IPO)
107121 return () -> IPO_OPT
108122 end
109123
110- success_solves, number_variables, number_loads, batch_id = generate_worst_case_dataset(case_file_path, case_name;
111- num_p= num_p, filetype= filetype, network_formulation= network_formulation, optimizer_factory= optimizer_factory,
112- hook = (model) -> set_optimizer_attribute(model, " NonConvex" , 2 )
124+ success_solves, number_variables, number_loads, batch_id = generate_worst_case_dataset(
125+ case_file_path,
126+ case_name;
127+ num_p= num_p,
128+ filetype= filetype,
129+ network_formulation= network_formulation,
130+ optimizer_factory= optimizer_factory,
131+ hook= (model) -> set_optimizer_attribute(model, " NonConvex" , 2 ),
113132 )
114133
115134 @info " Success solves Worst Case: $(success_solves) of $(num_p) "
119138if haskey(config, " worst_case_nonconvex" )
120139 num_p = config[" worst_case_nonconvex" ][" num_samples" ]
121140
122- success_solves, number_variables, number_loads, batch_id = generate_worst_case_dataset_Nonconvex(case_file_path, case_name;
123- num_p= num_p, filetype= filetype, network_formulation= network_formulation, optimizer= POI_cached_optimizer,
141+ success_solves, number_variables, number_loads, batch_id = generate_worst_case_dataset_Nonconvex(
142+ case_file_path,
143+ case_name;
144+ num_p= num_p,
145+ filetype= filetype,
146+ network_formulation= network_formulation,
147+ optimizer= POI_cached_optimizer,
124148 )
125149
126150 @info " Success solves Worst Case: $(success_solves * 100 ) of $(num_p) "
0 commit comments