-
Notifications
You must be signed in to change notification settings - Fork 18
[WIP] Implement VOCS interface #281
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 37 commits
fa96b4c
31ba116
7cd13c9
b03f0f1
f973cfa
e44319c
0f8049d
a73735c
942a692
56757a8
6a51d00
3293fde
684bf31
0b690ff
89323d0
f313089
cca80cc
4b8d698
c7660e6
fa43bc0
8e21bb5
deaeb6b
68e2e13
d4bb5dc
cebffd1
36f6277
df8066b
9d9eaf7
bf31174
4948cb5
cf1211d
9b60771
ce09d6b
07eee24
9df7f76
bbc23bc
6f0933f
3a6ef44
f883c7b
346dbee
844ff1a
d019af5
21210b9
b760144
ee2cb13
b91a7db
ff92723
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -67,6 +67,7 @@ | |
TrialStatus, | ||
) | ||
from .ax_metric import AxMetric | ||
from generator_standard.vocs import VOCS, DiscreteVariable | ||
|
||
# Define generator states. | ||
NOT_STARTED = "not_started" | ||
|
@@ -152,10 +153,8 @@ class AxMultitaskGenerator(AxGenerator): | |
|
||
Parameters | ||
---------- | ||
varying_parameters : list of VaryingParameter | ||
List of input parameters to vary. One them should be a fidelity. | ||
objectives : list of Objective | ||
List of optimization objectives. Only one objective is supported. | ||
vocs : VOCS | ||
VOCS object defining variables, objectives, constraints, and observables. | ||
lofi_task, hifi_task : Task | ||
The low- and high-fidelity tasks. | ||
analyzed_parameters : list of Parameter, optional | ||
|
@@ -184,31 +183,25 @@ class AxMultitaskGenerator(AxGenerator): | |
|
||
def __init__( | ||
self, | ||
varying_parameters: List[VaryingParameter], | ||
objectives: List[Objective], | ||
vocs: VOCS, | ||
lofi_task: Task, | ||
hifi_task: Task, | ||
analyzed_parameters: Optional[List[Parameter]] = None, | ||
use_cuda: Optional[bool] = False, | ||
gpu_id: Optional[int] = 0, | ||
dedicated_resources: Optional[bool] = False, | ||
save_model: Optional[bool] = True, | ||
model_save_period: Optional[int] = 5, | ||
model_history_dir: Optional[str] = "model_history", | ||
) -> None: | ||
# As trial parameters these get written to history array | ||
# Ax trial_index and arm toegther locate a point | ||
# Multiple points (Optimas trials) can share the same Ax trial_index | ||
custom_trial_parameters = [ | ||
TrialParameter("arm_name", "ax_arm_name", dtype="U32"), | ||
TrialParameter("trial_type", "ax_trial_type", dtype="U32"), | ||
TrialParameter("ax_trial_id", "ax_trial_index", dtype=int), | ||
] | ||
self._check_inputs(varying_parameters, objectives, lofi_task, hifi_task) | ||
self._check_inputs(vocs, lofi_task, hifi_task) | ||
|
||
# Convert discrete variables to trial parameters before calling super().__init__ | ||
custom_trial_parameters = ( | ||
self._convert_discrete_variables_to_trial_parameters(vocs) | ||
) | ||
|
||
super().__init__( | ||
varying_parameters=varying_parameters, | ||
objectives=objectives, | ||
analyzed_parameters=analyzed_parameters, | ||
vocs=vocs, | ||
use_cuda=use_cuda, | ||
gpu_id=gpu_id, | ||
dedicated_resources=dedicated_resources, | ||
|
@@ -231,6 +224,25 @@ def __init__( | |
self.gr_lofi = None | ||
self._experiment = self._create_experiment() | ||
|
||
# Internal mapping: _id -> (arm_name, ax_trial_id, trial_type) | ||
self._id_mapping = {} | ||
self._next_id = 0 | ||
|
||
def _convert_discrete_variables_to_trial_parameters( | ||
self, vocs: VOCS | ||
) -> List[TrialParameter]: | ||
"""Convert discrete variables from VOCS to TrialParameter objects.""" | ||
trial_parameters = [] | ||
for var_name, var_spec in vocs.variables.items(): | ||
if isinstance(var_spec, DiscreteVariable): | ||
# Convert discrete variable to trial parameter | ||
max_len = max(len(str(val)) for val in var_spec.values) | ||
trial_param = TrialParameter( | ||
var_name, var_name, dtype=f"U{max_len}" | ||
) | ||
trial_parameters.append(trial_param) | ||
return trial_parameters | ||
|
||
def get_gen_specs( | ||
self, sim_workers: int, run_params: Dict, sim_max: int | ||
) -> Dict: | ||
|
@@ -242,20 +254,30 @@ def get_gen_specs( | |
gen_specs["out"].append(("task", str, max_length)) | ||
return gen_specs | ||
|
||
def _validate_vocs(self, vocs: VOCS) -> None: | ||
"""Validate VOCS for multitask generator.""" | ||
super()._validate_vocs(vocs) | ||
# Check that only one objective has been given. | ||
n_objectives = len(vocs.objectives) | ||
assert n_objectives == 1, ( | ||
"Multitask generator supports only a single objective. " | ||
"Objectives given: {}.".format(n_objectives) | ||
) | ||
# Check that there is a discrete variable called 'trial_type' | ||
assert ( | ||
"trial_type" in vocs.variables | ||
), "Multitask generator requires a discrete variable named 'trial_type'" | ||
assert isinstance( | ||
vocs.variables["trial_type"], DiscreteVariable | ||
), "Variable 'trial_type' must be a discrete variable" | ||
|
||
def _check_inputs( | ||
self, | ||
varying_parameters: List[VaryingParameter], | ||
objectives: List[Objective], | ||
vocs: VOCS, | ||
lofi_task: Task, | ||
hifi_task: Task, | ||
) -> None: | ||
"""Check that the generator inputs are valid.""" | ||
# Check that only one objective has been given. | ||
n_objectives = len(objectives) | ||
assert n_objectives == 1, ( | ||
"Multitask generator supports only a single objective. " | ||
"Objectives given: {}.".format(n_objectives) | ||
) | ||
# Check that the number of low-fidelity trials per iteration is larger | ||
# than that of high-fidelity trials. | ||
assert lofi_task.n_opt >= hifi_task.n_opt, ( | ||
|
@@ -274,11 +296,21 @@ def suggest(self, num_points: Optional[int]) -> List[dict]: | |
var.name: arm.parameters.get(var.name) | ||
for var in self._varying_parameters | ||
} | ||
# SH for VOCS standard these will need to be 'variables' | ||
# For now much match the trial parameter names. | ||
point["ax_trial_id"] = trial_index | ||
point["arm_name"] = arm.name | ||
point["trial_type"] = trial_type | ||
# SH We can use a discrete var here in vocs (converted for now to trial parameters) | ||
# But unlike varying parameters the name refers to a fixed generator concept. | ||
for trial_param in self._custom_trial_parameters: | ||
if trial_param.name == "trial_type": | ||
point[trial_param.name] = trial_type | ||
|
||
# Generate unique _id and store mapping | ||
current_id = self._next_id | ||
self._id_mapping[current_id] = { | ||
"arm_name": arm.name, | ||
"ax_trial_id": trial_index, | ||
"trial_type": trial_type, | ||
|
||
} | ||
point["_id"] = current_id | ||
self._next_id += 1 | ||
points.append(point) | ||
return points | ||
|
||
|
@@ -295,6 +327,15 @@ def ingest(self, results: List[dict]) -> None: | |
custom_parameters=self._custom_trial_parameters, | ||
) | ||
trials.append(trial) | ||
|
||
# Apply _id mapping to all trials before processing | ||
for trial in trials: | ||
if trial.gen_id is not None and trial.gen_id in self._id_mapping: | ||
mapping = self._id_mapping[trial.gen_id] | ||
trial.arm_name = mapping["arm_name"] | ||
trial.ax_trial_id = mapping["ax_trial_id"] | ||
# trial_type should already be in trial from custom_parameters | ||
|
||
if self.gen_state == NOT_STARTED: | ||
self._incorporate_external_data(trials) | ||
else: | ||
|
Uh oh!
There was an error while loading. Please reload this page.