Skip to content

Commit 38f580f

Browse files
authored
[Autotuner] CI Smoke Test - Algorithm & Evaluation mode (#2052)
* lock in scaffold for tests * Fix bugs for random search * Remove nevergrad code and dependencies * remove pins distance Signed-off-by: Jack Luar <[email protected]>
1 parent 7b4e7aa commit 38f580f

File tree

8 files changed

+155
-41
lines changed

8 files changed

+155
-41
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,3 +95,6 @@ mainREADME.md
9595
build
9696
.scala-build/
9797
.bsp/
98+
99+
# autotuner artifacts
100+
metadata-base-at.json

flow/designs/asap7/gcd/autotuner.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
"type": "float",
4545
"minmax": [
4646
0.0,
47-
0.99
47+
0.1
4848
],
4949
"step": 0
5050
},

flow/designs/sky130hd/gcd/autotuner.json

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -65,14 +65,6 @@
6565
],
6666
"step": 0
6767
},
68-
"_PINS_DISTANCE": {
69-
"type": "int",
70-
"minmax": [
71-
1,
72-
1
73-
],
74-
"step": 1
75-
},
7668
"CTS_CLUSTER_SIZE": {
7769
"type": "int",
7870
"minmax": [
@@ -99,4 +91,3 @@
9991
"step": 0
10092
}
10193
}
102-

flow/test/test_helper.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,9 @@ if [ $RUN_AUTOTUNER -eq 1 ]; then
110110

111111
echo "Running Autotuner smoke sweep test"
112112
python3 -m unittest tools.AutoTuner.test.smoke_test_sweep.${PLATFORM}SweepSmokeTest.test_sweep
113+
114+
echo "Running Autotuner smoke algorithm & evaluation test"
115+
python3 -m unittest tools.AutoTuner.test.smoke_test_algo_eval.${PLATFORM}AlgoEvalSmokeTest.test_algo_eval
113116
fi
114117

115118
exit $ret

flow/util/utils.mk

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,13 @@ update_rules:
3434
update_rules_force:
3535
$(UTILS_DIR)/genRuleFile.py $(DESIGN_DIR) --variant $(FLOW_VARIANT) --update
3636

37+
.PHONY: update_metadata_autotuner
38+
update_metadata_autotuner:
39+
@$(UTILS_DIR)/genMetrics.py -d $(DESIGN_NICKNAME) \
40+
-p $(PLATFORM) \
41+
-v $(FLOW_VARIANT) \
42+
-o $(DESIGN_DIR)/metadata-$(FLOW_VARIANT)-at.json -x
43+
3744
#-------------------------------------------------------------------------------
3845

3946
.PHONY: write_net_rc

tools/AutoTuner/requirements.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
ray[default,tune]==2.9.3
22
ax-platform>=0.3.3,<=0.3.7
33
hyperopt==0.2.7
4-
nevergrad==1.0.2
54
optuna==3.6.0
65
pandas>=2.0,<=2.2.1
76
bayesian-optimization==1.4.0

tools/AutoTuner/src/autotuner/distributed.py

Lines changed: 49 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,12 @@
3131
import sys
3232
import glob
3333
import subprocess
34+
import random
3435
from datetime import datetime
3536
from multiprocessing import cpu_count
3637
from subprocess import run
3738
from itertools import product
39+
from collections import namedtuple
3840
from uuid import uuid4 as uuid
3941

4042
import numpy as np
@@ -47,12 +49,9 @@
4749
from ray.tune.search.ax import AxSearch
4850
from ray.tune.search.basic_variant import BasicVariantGenerator
4951
from ray.tune.search.hyperopt import HyperOptSearch
50-
51-
# from ray.tune.search.nevergrad import NevergradSearch
5252
from ray.tune.search.optuna import OptunaSearch
5353
from ray.util.queue import Queue
5454

55-
# import nevergrad as ng
5655
from ax.service.ax_client import AxClient
5756

5857
DATE = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
@@ -188,6 +187,9 @@ def percent(x_1, x_2):
188187
def evaluate(self, metrics):
189188
error = "ERR" in metrics.values() or "ERR" in reference.values()
190189
not_found = "N/A" in metrics.values() or "N/A" in reference.values()
190+
print("Metrics", metrics.values())
191+
print("Reference", reference.values())
192+
print(error, not_found)
191193
if error or not_found:
192194
return ERROR_METRIC
193195
ppa = self.get_ppa(metrics)
@@ -221,24 +223,19 @@ def apply_condition(config, data):
221223
if args.algorithm != "random":
222224
return config
223225
dp_pad_min = data["CELL_PAD_IN_SITES_DETAIL_PLACEMENT"]["minmax"][0]
224-
# dp_pad_max = data['CELL_PAD_IN_SITES_DETAIL_PLACEMENT']['minmax'][1]
225226
dp_pad_step = data["CELL_PAD_IN_SITES_DETAIL_PLACEMENT"]["step"]
226227
if dp_pad_step == 1:
227228
config["CELL_PAD_IN_SITES_DETAIL_PLACEMENT"] = tune.sample_from(
228-
lambda spec: tune.randint(
229+
lambda spec: np.random.randint(
229230
dp_pad_min, spec.config.CELL_PAD_IN_SITES_GLOBAL_PLACEMENT + 1
230231
)
231232
)
232233
if dp_pad_step > 1:
233234
config["CELL_PAD_IN_SITES_DETAIL_PLACEMENT"] = tune.sample_from(
234-
lambda spec: tune.choice(
235-
np.ndarray.tolist(
236-
np.arange(
237-
dp_pad_min,
238-
spec.config.CELL_PAD_IN_SITES_GLOBAL_PLACEMENT + 1,
239-
dp_pad_step,
240-
)
241-
)
235+
lambda spec: random.randrange(
236+
dp_pad_min,
237+
spec.config.CELL_PAD_IN_SITES_GLOBAL_PLACEMENT + 1,
238+
dp_pad_step,
242239
)
243240
)
244241
return config
@@ -248,13 +245,8 @@ def read_tune(this):
248245
if min_ == max_:
249246
# Returning a choice of a single element allow pbt algorithm to
250247
# work. pbt does not accept single values as tunable.
251-
return tune.choice([min_])
248+
return tune.choice([min_, max_])
252249
if this["type"] == "int":
253-
if min_ == 0 and args.algorithm == "nevergrad":
254-
print(
255-
"[WARNING TUN-0011] NevergradSearch may not work "
256-
"with lower bound value 0."
257-
)
258250
if this["step"] == 1:
259251
return tune.randint(min_, max_)
260252
return tune.choice(np.ndarray.tolist(np.arange(min_, max_, this["step"])))
@@ -265,7 +257,12 @@ def read_tune(this):
265257
return None
266258

267259
def read_tune_ax(name, this):
260+
"""
261+
Ax format: https://ax.dev/versions/0.3.7/api/service.html
262+
"""
268263
dict_ = dict(name=name)
264+
if "minmax" not in this:
265+
return None
269266
min_, max_ = this["minmax"]
270267
if min_ == max_:
271268
dict_["type"] = "fixed"
@@ -292,6 +289,21 @@ def read_tune_ax(name, this):
292289
dict_["value_type"] = "float"
293290
return dict_
294291

292+
def read_tune_pbt(name, this):
293+
"""
294+
PBT format: https://docs.ray.io/en/releases-2.9.3/tune/examples/pbt_guide.html
295+
Note that PBT does not support step values.
296+
"""
297+
if "minmax" not in this:
298+
return None
299+
min_, max_ = this["minmax"]
300+
if min_ == max_:
301+
return ray.tune.choice([min_, max_])
302+
if this["type"] == "int":
303+
return ray.tune.randint(min_, max_)
304+
if this["type"] == "float":
305+
return ray.tune.uniform(min_, max_)
306+
295307
# Check file exists and whether it is a valid JSON file.
296308
assert os.path.isfile(file_name), f"File {file_name} not found."
297309
try:
@@ -319,13 +331,25 @@ def read_tune_ax(name, this):
319331
fr_file = read(f"{os.path.dirname(file_name)}/{value}")
320332
continue
321333
if not isinstance(value, dict):
322-
config[key] = value
334+
# To take care of empty values like _FR_FILE_PATH
335+
if args.mode == "tune" and args.algorithm == "ax":
336+
param_dict = read_tune_ax(key, value)
337+
if param_dict:
338+
config.append(param_dict)
339+
elif args.mode == "tune" and args.algorithm == "pbt":
340+
param_dict = read_tune_pbt(key, value)
341+
if param_dict:
342+
config[key] = param_dict
343+
else:
344+
config[key] = value
323345
elif args.mode == "sweep":
324346
config[key] = read_sweep(value)
325-
elif args.mode == "tune" and args.algorithm != "ax":
326-
config[key] = read_tune(value)
327347
elif args.mode == "tune" and args.algorithm == "ax":
328348
config.append(read_tune_ax(key, value))
349+
elif args.mode == "tune" and args.algorithm == "pbt":
350+
config[key] = read_tune_pbt(key, value)
351+
elif args.mode == "tune":
352+
config[key] = read_tune(value)
329353
if args.mode == "tune":
330354
config = apply_condition(config, data)
331355
return config, sdc_file, fr_file
@@ -724,7 +748,7 @@ def parse_arguments():
724748
tune_parser.add_argument(
725749
"--algorithm",
726750
type=str,
727-
choices=["hyperopt", "ax", "nevergrad", "optuna", "pbt", "random"],
751+
choices=["hyperopt", "ax", "optuna", "pbt", "random"],
728752
default="hyperopt",
729753
help="Search algorithm to use for Autotuning.",
730754
)
@@ -840,18 +864,13 @@ def set_algorithm(experiment_name, config):
840864
algorithm = HyperOptSearch(points_to_evaluate=best_params)
841865
elif args.algorithm == "ax":
842866
ax_client = AxClient(enforce_sequential_optimization=False)
867+
AxClientMetric = namedtuple("AxClientMetric", "minimize")
843868
ax_client.create_experiment(
844869
name=experiment_name,
845870
parameters=config,
846-
objective_name=METRIC,
847-
minimize=True,
871+
objectives={METRIC: AxClientMetric(minimize=True)},
848872
)
849873
algorithm = AxSearch(ax_client=ax_client, points_to_evaluate=best_params)
850-
elif args.algorithm == "nevergrad":
851-
algorithm = NevergradSearch(
852-
points_to_evaluate=best_params,
853-
optimizer=ng.optimizers.registry["PortfolioDiscreteOnePlusOne"],
854-
)
855874
elif args.algorithm == "optuna":
856875
algorithm = OptunaSearch(points_to_evaluate=best_params, seed=args.seed)
857876
elif args.algorithm == "pbt":
Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
import unittest
2+
import subprocess
3+
import os
4+
5+
cur_dir = os.path.dirname(os.path.abspath(__file__))
6+
src_dir = os.path.join(cur_dir, "../src/autotuner")
7+
orfs_dir = os.path.join(cur_dir, "../../../flow")
8+
os.chdir(src_dir)
9+
10+
11+
class BaseAlgoEvalSmokeTest(unittest.TestCase):
12+
platform = ""
13+
design = ""
14+
15+
def setUp(self):
16+
design_path = f"../../../flow/designs/{self.platform}/{self.design}"
17+
self.config = os.path.join(cur_dir, f"{design_path}/autotuner.json")
18+
self.experiment = f"smoke-test-algo-eval-{self.platform}"
19+
self.reference = os.path.join(cur_dir, f"{design_path}/metadata-base-at.json")
20+
# note for ppa-improv, you need to also add in reference file (--reference)
21+
_algo = ["hyperopt", "ax", "optuna", "pbt", "random"]
22+
_eval = ["default", "ppa-improv"]
23+
self.matrix = [(a, e) for a in _algo for e in _eval]
24+
self.commands = [
25+
f"python3 distributed.py"
26+
f" --design {self.design}"
27+
f" --platform {self.platform}"
28+
f" --experiment {self.experiment}"
29+
f" --config {self.config}"
30+
f" tune --samples 1"
31+
f" --algorithm {a} --eval {e}"
32+
f" --reference {self.reference}"
33+
for a, e in self.matrix
34+
]
35+
36+
def make_base(self):
37+
os.chdir(orfs_dir)
38+
commands = [
39+
f"make DESIGN_CONFIG=./designs/{self.platform}/{self.design}/config.mk clean_all",
40+
f"make DESIGN_CONFIG=./designs/{self.platform}/{self.design}/config.mk EQUIVALENCE_CHECK=0",
41+
f"make DESIGN_CONFIG=./designs/{self.platform}/{self.design}/config.mk update_metadata_autotuner",
42+
]
43+
for command in commands:
44+
out = subprocess.run(command, shell=True, check=True)
45+
self.assertTrue(out.returncode == 0)
46+
os.chdir(src_dir)
47+
48+
49+
class ASAP7AlgoEvalSmokeTest(BaseAlgoEvalSmokeTest):
50+
platform = "asap7"
51+
design = "gcd"
52+
53+
def test_algo_eval(self):
54+
# Run `make` to get baseline metrics (metadata-base-ok.json)
55+
self.make_base()
56+
for command in self.commands:
57+
print(command)
58+
out = subprocess.run(command, shell=True, check=True)
59+
successful = out.returncode == 0
60+
self.assertTrue(successful)
61+
62+
63+
class IHPSG13G2AlgoEvalSmokeTest(BaseAlgoEvalSmokeTest):
64+
platform = "ihp-sg13g2"
65+
design = "gcd"
66+
67+
def test_algo_eval(self):
68+
# Run `make` to get baseline metrics (metadata-base-ok.json)
69+
self.make_base()
70+
for command in self.commands:
71+
print(command)
72+
out = subprocess.run(command, shell=True, check=True)
73+
successful = out.returncode == 0
74+
self.assertTrue(successful)
75+
76+
77+
class SKY130HDAlgoEvalSmokeTest(BaseAlgoEvalSmokeTest):
78+
platform = "sky130hd"
79+
design = "gcd"
80+
81+
def test_algo_eval(self):
82+
# Run `make` to get baseline metrics (metadata-base-ok.json)
83+
self.make_base()
84+
for command in self.commands:
85+
print(command)
86+
out = subprocess.run(command, shell=True, check=True)
87+
successful = out.returncode == 0
88+
self.assertTrue(successful)
89+
90+
91+
if __name__ == "__main__":
92+
unittest.main()

0 commit comments

Comments
 (0)