Skip to content

Commit 7a16c56

Browse files
committed
Renamed folder
1 parent 5295510 commit 7a16c56

File tree

5 files changed

+161
-0
lines changed

5 files changed

+161
-0
lines changed
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Lines changed: 161 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,161 @@
1+
import argparse
2+
import numpy as np
3+
import json
4+
import subprocess
5+
from pathlib import Path
6+
import os
7+
import pickle
8+
import logging
9+
10+
if __name__ == '__main__':
11+
12+
logging.basicConfig(format='Generating param files -- %(levelname)s: %(message)s', level=logging.INFO, force=True)
13+
14+
parser = argparse.ArgumentParser(description='Train a Latent Equilibrium Neural Network on CIFAR10 task.')
15+
parser.add_argument('--run', action='store_true',
16+
default=False,
17+
help='Run parameter sweep.')
18+
parser.add_argument('--gather', action='store_true',
19+
default=False,
20+
help='Gather results.')
21+
parser.add_argument('--algorithm', required=True,
22+
help='Choose algorithm: BP, FA, DFA or PAL')
23+
24+
25+
args = parser.parse_args()
26+
algo = args.algorithm
27+
logging.info(f'Algorithm: {algo}')
28+
29+
# path to parent generalized_latent_equilbrium folder
30+
PATH_parent = Path(__file__).parent.resolve().parents[2]
31+
# path to folder of this file
32+
PATH_runner = Path(__file__).parent.resolve() / str(algo)
33+
34+
35+
36+
OUTPUT_DIR = PATH_runner / "runs"
37+
38+
# create a bunch of JSON param files in subfolders (lr/seeds/)
39+
seeds = [1,3,5,7,9,11,13,15,17,19]
40+
41+
params_arr = []
42+
for lr in [None]:
43+
params_per_lr = []
44+
for seed in seeds:
45+
if algo == 'BP':
46+
params = {
47+
"algorithm": algo,
48+
"epochs": 50,
49+
"batch_size": 128,
50+
"batch_learning_multiplier": 64,
51+
"lr_factors": [5.0, 0.0, 1.0, 0.0, 2.0, 0.2],
52+
#"lr_factors": [lr1, 0, lr2, 0, lr3, lr4],
53+
#"lr_factors": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
54+
"wn_sigma": [0,0,0,0,0,0],
55+
"n_updates": 100,
56+
"target_type": "mse",
57+
"seed": seed,
58+
"activation": "Sigmoid",
59+
"model_variant": "vanilla",
60+
"with_optimizer": "store_true"
61+
}
62+
elif algo == 'FA':
63+
params = {
64+
"algorithm": algo,
65+
"epochs": 50,
66+
"batch_size": 128,
67+
"batch_learning_multiplier": 64,
68+
"lr_factors": [10.0, 0.0, 25.0, 0.0, 2.0, 0.2],
69+
"wn_sigma": [0,0,0,0,0,0],
70+
"n_updates": 100,
71+
"target_type": "mse",
72+
"seed": seed,
73+
"activation": "Sigmoid",
74+
"model_variant": "vanilla",
75+
"with_optimizer": "store_true",
76+
"rec_degs": True,
77+
}
78+
elif algo == 'DFA':
79+
params = {
80+
"algorithm": algo,
81+
"epochs": 50,
82+
"batch_size": 128,
83+
"batch_learning_multiplier": 64,
84+
"lr_factors": [1.0, 0.0, 1.0, 0.0, 5.0, 0.2],
85+
"wn_sigma": [0,0,0,0,0,0],
86+
"n_updates": 100,
87+
"target_type": "mse",
88+
"seed": seed,
89+
"activation": "Sigmoid",
90+
"model_variant": "vanilla",
91+
"with_optimizer": "store_true",
92+
"rec_degs": True,
93+
}
94+
elif algo == 'PAL':
95+
params = {
96+
"algorithm": algo,
97+
"epochs": 50,
98+
"batch_size": 128,
99+
"batch_learning_multiplier": 64,
100+
"lr_factors": [5.0, 0.0, 1.0, 0.0, 2.0, 0.2],
101+
"wn_sigma": [0,0,0,0,0,0],
102+
"n_updates": 100,
103+
"target_type": "mse",
104+
"seed": seed,
105+
"activation": "Sigmoid",
106+
"model_variant": "vanilla",
107+
"with_optimizer": "store_true",
108+
"rec_degs": True,
109+
"bw_lr_factors": [0,0,0,0,10,10],
110+
"regularizer": [0,0,0,0,5e-5,5e-5],
111+
"tau_xi": [10,10,10,10,10,10],
112+
"tau_HP": [10,10,10,10,10,10],
113+
"tau_LO": [0,0,0,0,0,0],
114+
"sigma": [0,5e-2,0,5e-2,5e-2,5e-2]
115+
}
116+
params_per_lr.append(params)
117+
params_arr.append(params_per_lr)
118+
119+
120+
121+
if args.run and not args.gather:
122+
for i, params_per_lr in zip(range(len(params_arr)), params_arr):
123+
for j, params in zip(range(len(params_per_lr)), params_per_lr):
124+
125+
PATH_output = OUTPUT_DIR / str('lr' + str(i) + '/seed' + str(j))
126+
params["output"] = str(PATH_output) + '/'
127+
128+
# create output directory if it doesn't exist
129+
if not(os.path.exists(params['output'])):
130+
# logging.info(f"{PATH_runner + '/' + params['output'] } doesn't exists, creating")
131+
os.makedirs(params['output'] )
132+
133+
with open(str(params['output']) + '/params.json', 'w') as f:
134+
logging.info(f"Saving to {f.name}")
135+
json.dump(params, f)
136+
137+
sim_dir = PATH_output
138+
# start runs as separate processes
139+
proc_name = ['python', 'experiments/CIFAR10/le_layers_cifar10_training.py', '--params', str(sim_dir / 'params.json')]
140+
141+
logging.info(f"Starting run as subprocess {proc_name}.")
142+
subprocess.Popen(proc_name, cwd=PATH_parent)
143+
144+
elif args.gather:
145+
# Collect results
146+
lin_acc_arr = []
147+
for i, params_per_lr in zip(range(len(params_arr)), params_arr):
148+
lin_acc_per_lr = []
149+
for j, params in zip(range(len(params_per_lr)), params_per_lr):
150+
151+
sim_dir = OUTPUT_DIR / str('lr' + str(i) + '/seed' + str(j))
152+
with open(str(sim_dir) + '/val_acc.pkl', 'rb') as in_file:
153+
val_acc = pickle.load(in_file)
154+
lin_acc_per_lr.append(val_acc)
155+
lin_acc_arr.append(lin_acc_per_lr)
156+
157+
158+
lin_acc_output_file = PATH_runner / "acc_lr_seeds_epochs.npy"
159+
np.save(lin_acc_output_file, lin_acc_arr)
160+
logging.info(f"Gathered data and saved to {lin_acc_output_file}.")
161+

0 commit comments

Comments
 (0)