Skip to content

Commit ab7d6a7

Browse files
committed
feat(ppsci): support data_effient_nopt for training and test
1 parent e29fd66 commit ab7d6a7

20 files changed

+5545
-0
lines changed
Lines changed: 375 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,375 @@
1+
default: &DEFAULT
2+
num_data_workers: 1
3+
# model
4+
model: 'fno'
5+
depth: 5
6+
in_dim: 2
7+
out_dim: 1
8+
dropout: 0
9+
# data/domain
10+
Lx: !!float 1.0
11+
Ly: !!float 1.0
12+
nx: 256
13+
ny: 256
14+
# optimization
15+
optimizer: 'adam'
16+
scheduler: 'none'
17+
learning_rate: !!float 1.0
18+
max_epochs: 500
19+
scheduler_epochs: 500
20+
weight_decay: 0
21+
batch_size: 25
22+
# misc
23+
log_to_screen: !!bool False
24+
save_checkpoint: !!bool False
25+
seed: 0
26+
plot_figs: !!bool False
27+
pack_data: !!bool False
28+
# Weights & Biases
29+
entity: 'entity_name'
30+
project: 'proj_name'
31+
group: 'helmholtz'
32+
log_to_wandb: !!bool False
33+
distill: !!bool False
34+
subsample: 1
35+
exp_dir: './exp/'
36+
tie_fields: !!bool False
37+
use_all_fields: !!bool True
38+
tie_batches: !!bool False
39+
model_type: fno
40+
pretrained: False
41+
warmup_steps: 0
42+
epoch_size: 1
43+
accum_grad: 1
44+
enable_amp: !!bool False
45+
log_interval: 1
46+
checkpoint_save_interval: 10
47+
debug_grad: False
48+
49+
helmholtz: &helmholtz
50+
<<: *DEFAULT
51+
n_demos: 0
52+
batch_size: 128
53+
nx: 128
54+
ny: 128
55+
log_to_wandb: !!bool True
56+
save_checkpoint: !!bool True
57+
max_epochs: 500
58+
scheduler: 'cosine'
59+
60+
model: 'fno'
61+
layers: [64, 64, 64, 64, 64]
62+
modes1: [65, 65, 65, 65]
63+
modes2: [65, 65, 65, 65]
64+
fc_dim: 128
65+
66+
in_dim: 2
67+
out_dim: 1
68+
mode_cut: 32
69+
embed_cut: 64
70+
fc_cut: 2
71+
72+
optimizer: 'adam'
73+
74+
learning_rate: 1E-3
75+
pack_data: !!bool False
76+
77+
helm-64-scale-o5_15: &helm_64_o5_15
78+
<<: *helmholtz
79+
train_path: '/path/to/helmholtz_64_o5_15_train.h5'
80+
val_path: '/path/to/helmholtz_64_o5_15_val.h5'
81+
test_path: '/path/to/helmholtz_64_o5_15_test.h5'
82+
scales_path: '/path/to/helmholtz_64_o5_15_train_scale.npy'
83+
# train_rand_idx_path: '/path/to/old_gen/train_rand_idx.npy'
84+
batch_size: 128
85+
in_dim: 3
86+
out_dim: 1
87+
mode_cut: 32
88+
embed_cut: 64
89+
fc_cut: 2
90+
learning_rate: 1E-3
91+
subsample: 1
92+
nx: 64
93+
ny: 64
94+
95+
pt: "train"
96+
pt_split: [46080, 8192]
97+
pretrained: False
98+
99+
100+
helm-64-pretrain-o1_20: &helm_64_o1_20_pt
101+
<<: *helmholtz
102+
train_path: '/path/to/helmholtz_64_o1_20_train.h5'
103+
val_path: '/path/to/helmholtz_64_o1_20_val.h5'
104+
test_path: '/path/to/helmholtz_64_o1_20_test.h5'
105+
scales_path: '/path/to/helmholtz_64_o1_20_train_scale.npy'
106+
train_rand_idx_path: '/path/to/train_rand_idx.npy'
107+
batch_size: 128
108+
in_dim: 3
109+
out_dim: 1
110+
mode_cut: 32
111+
embed_cut: 64
112+
fc_cut: 2
113+
learning_rate: 1E-3
114+
subsample: 1
115+
nx: 64
116+
ny: 64
117+
pt: "pretrain"
118+
pt_split: [46080, 8192] #[0.9, 0.1]
119+
blur: [0, 1]
120+
121+
helm-64-pretrain-o1_20_ft: &helm_64_o1_20_ft
122+
<<: *helm_64_o1_20_pt
123+
pt: "train"
124+
fix_backbone: False
125+
126+
127+
helm-64-finetune-o5_15: &helm_64_o5_15_ft
128+
<<: *helmholtz
129+
train_path: '/path/to/helmholtz_64_o5_15_train.h5'
130+
val_path: '/path/to/helmholtz_64_o5_15_val.h5'
131+
test_path: '/path/to/helmholtz_64_o5_15_test.h5'
132+
scales_path: '/path/to/helmholtz_64_o5_15_train_scale.npy'
133+
train_rand_idx_path: '/path/to/train_rand_idx.npy'
134+
batch_size: 128
135+
in_dim: 3 #normal helmholtz has 3 dim, joint has 4
136+
out_dim: 1
137+
mode_cut: 32
138+
embed_cut: 64
139+
fc_cut: 2
140+
learning_rate: 1E-3
141+
subsample: 1
142+
nx: 64
143+
ny: 64
144+
pt: "train"
145+
pt_split: [46080, 8192]
146+
fix_backbone: False
147+
pretrained: True
148+
pretrained_ckpt_path: /pretrained_ckpt_path/training_checkpoints/ckpt.tar
149+
150+
helm-64-o5_15_ft0: &helm_64_o5_15_ft0
151+
<<: *helm_64_o5_15_ft
152+
subsample: 1
153+
154+
helm-64-o5_15_ft0_r0: &helm_64_o5_15_ft0_r0
155+
<<: *helm_64_o5_15_ft
156+
subsample: 1
157+
158+
helm-64-o5_15_ft0_r1: &helm_64_o5_15_ft0_r1
159+
<<: *helm_64_o5_15_ft
160+
subsample: 1
161+
seed: 1
162+
163+
helm-64-o5_15_ft0_r2: &helm_64_o5_15_ft0_r2
164+
<<: *helm_64_o5_15_ft
165+
subsample: 1
166+
seed: 2
167+
168+
helm-64-o5_15_ft1: &helm_64_o5_15_ft1
169+
<<: *helm_64_o5_15_ft
170+
subsample: 2
171+
172+
helm-64-o5_15_ft1_r0: &helm_64_o5_15_ft1_r0
173+
<<: *helm_64_o5_15_ft
174+
subsample: 2
175+
176+
helm-64-o5_15_ft2: &helm_64_o5_15_ft2
177+
<<: *helm_64_o5_15_ft
178+
subsample: 4
179+
180+
helm-64-o5_15_ft2_r0: &helm_64_o5_15_ft2_r0
181+
<<: *helm_64_o5_15_ft
182+
subsample: 4
183+
184+
helm-64-o5_15_ft3_r1: &helm_64_o5_15_ft3_r1
185+
<<: *helm_64_o5_15_ft
186+
subsample: 8
187+
seed: 1
188+
189+
helm-64-o5_15_ft3_r2: &helm_64_o5_15_ft3_r2
190+
<<: *helm_64_o5_15_ft
191+
subsample: 8
192+
seed: 2
193+
194+
helm-64-o5_15_ft3_r0: &helm_64_o5_15_ft3_r0
195+
<<: *helm_64_o5_15_ft
196+
subsample: 8
197+
seed: 0
198+
199+
helm-64-o5_15_ft3_r3: &helm_64_o5_15_ft3_r3
200+
<<: *helm_64_o5_15_ft
201+
subsample: 8
202+
seed: 3
203+
204+
helm-64-o5_15_ft4_r0: &helm_64_o5_15_ft4_r0
205+
<<: *helm_64_o5_15_ft
206+
subsample: 16
207+
seed: 0
208+
209+
helm-64-o5_15_ft4_r3: &helm_64_o5_15_ft4_r3
210+
<<: *helm_64_o5_15_ft
211+
subsample: 16
212+
seed: 3
213+
214+
helm-64-o5_15_ft4_r1: &helm_64_o5_15_ft4_r1
215+
<<: *helm_64_o5_15_ft
216+
subsample: 16
217+
seed: 1
218+
219+
helm-64-o5_15_ft4_r2: &helm_64_o5_15_ft4_r2
220+
<<: *helm_64_o5_15_ft
221+
subsample: 16
222+
seed: 2
223+
224+
helm-64-o5_15_ft5_r1: &helm_64_o5_15_ft5_r1
225+
<<: *helm_64_o5_15_ft
226+
subsample: 32
227+
seed: 1
228+
229+
helm-64-o5_15_ft5_r0: &helm_64_o5_15_ft5_r0
230+
<<: *helm_64_o5_15_ft
231+
subsample: 32
232+
seed: 0
233+
234+
helm-64-o5_15_ft5_r2: &helm_64_o5_15_ft5_r2
235+
<<: *helm_64_o5_15_ft
236+
subsample: 32
237+
seed: 2
238+
239+
helm-64-o5_15_ft6_r0: &helm_64_o5_15_ft6_r0
240+
<<: *helm_64_o5_15_ft
241+
subsample: 64
242+
seed: 0
243+
244+
helm-64-o5_15_ft6_r1: &helm_64_o5_15_ft6_r1
245+
<<: *helm_64_o5_15_ft
246+
subsample: 64
247+
seed: 1
248+
249+
helm-64-o5_15_ft6_r2: &helm_64_o5_15_ft6_r2
250+
<<: *helm_64_o5_15_ft
251+
subsample: 64
252+
seed: 2
253+
254+
helm-64-o5_15_ft7_r0: &helm_64_o5_15_ft7_r0
255+
<<: *helm_64_o5_15_ft
256+
subsample: 128
257+
# learning_rate: 1E-5
258+
batch_size: 64
259+
seed: 0
260+
261+
helm-64-o5_15_ft7_r1: &helm_64_o5_15_ft7_r1
262+
<<: *helm_64_o5_15_ft
263+
subsample: 128
264+
# learning_rate: 1E-5
265+
batch_size: 64
266+
seed: 1
267+
268+
helm-64-o5_15_ft7_r2: &helm_64_o5_15_ft7_r2
269+
<<: *helm_64_o5_15_ft
270+
subsample: 128
271+
# learning_rate: 1E-5
272+
batch_size: 64
273+
seed: 2
274+
275+
helm-64-o5_15_ft8_r0: &helm_64_o5_15_ft8_r0
276+
<<: *helm_64_o5_15_ft
277+
subsample: 256
278+
# learning_rate: 1E-5
279+
batch_size: 32
280+
seed: 0
281+
282+
helm-64-o5_15_ft9_r0: &helm_64_o5_15_ft9_r0
283+
<<: *helm_64_o5_15_ft
284+
subsample: 512
285+
# learning_rate: 1E-5
286+
batch_size: 16
287+
seed: 0
288+
289+
helm-64-pretrain-o1_20_m0: &helm-64-o1_20_pt_m0
290+
<<: *helm_64_o1_20_pt
291+
mask_ratio: 0.
292+
293+
helm-64-pretrain-o1_20_m1: &helm-64-o1_20_pt_m1
294+
<<: *helm_64_o1_20_pt
295+
mask_ratio: 0.1
296+
297+
helm-64-pretrain-o1_20_m2: &helm-64-o1_20_pt_m2
298+
<<: *helm_64_o1_20_pt
299+
mask_ratio: 0.2
300+
301+
helm-64-pretrain-o1_20_m3: &helm-64-o1_20_pt_m3
302+
<<: *helm_64_o1_20_pt
303+
mask_ratio: 0.3
304+
305+
helm-64-pretrain-o1_20_m4: &helm-64-o1_20_pt_m4
306+
<<: *helm_64_o1_20_pt
307+
mask_ratio: 0.4
308+
309+
helm-64-pretrain-o1_20_m5: &helm-64-o1_20_pt_m5
310+
<<: *helm_64_o1_20_pt
311+
mask_ratio: 0.5
312+
313+
helm-64-pretrain-o1_20_m6: &helm-64-o1_20_pt_m6
314+
<<: *helm_64_o1_20_pt
315+
mask_ratio: 0.6
316+
317+
helm-64-pretrain-o1_20_m7: &helm-64-o1_20_pt_m7
318+
<<: *helm_64_o1_20_pt
319+
mask_ratio: 0.7
320+
321+
helm-64-pretrain-o1_20_m8: &helm-64-o1_20_pt_m8
322+
<<: *helm_64_o1_20_pt
323+
mask_ratio: 0.8
324+
325+
helm-64-pretrain-o1_20_m9: &helm-64-o1_20_pt_m9
326+
<<: *helm_64_o1_20_pt
327+
mask_ratio: 0.9
328+
329+
helm-64-o5_15_bsln: &helm_64_o5_15_baseline
330+
<<: *helm_64_o5_15
331+
pt: "train"
332+
pt_split: [0, 1]
333+
334+
helm-64-o5_15_b0: &helm-64-o1_10_ss4
335+
<<: *helm_64_o5_15_baseline
336+
subsample: 4
337+
338+
helm-64-o5_15_b1: &helm-64-o1_10_ss8
339+
<<: *helm_64_o5_15_baseline
340+
subsample: 8
341+
342+
helm-64-o5_15_b2: &helm-64-o1_10_ss16
343+
<<: *helm_64_o5_15_baseline
344+
subsample: 16
345+
346+
helm-64-o5_15_b3: &helm-64-o1_10_ss32
347+
<<: *helm_64_o5_15_baseline
348+
subsample: 32
349+
350+
helm-64-o5_15_b4: &helm-64-o1_10_ss64
351+
<<: *helm_64_o5_15_baseline
352+
subsample: 64
353+
354+
helm-64-o5_15_b5: &helm-64-o1_10_ss128
355+
<<: *helm_64_o5_15_baseline
356+
subsample: 128
357+
358+
helm-64-o5_15_b6: &helm-64-o1_10_ss256
359+
<<: *helm_64_o5_15_baseline
360+
subsample: 256
361+
362+
helm-64-o5_15_b7: &helm-64-o1_10_ss512
363+
<<: *helm_64_o5_15_baseline
364+
subsample: 512
365+
batch_size: 64
366+
367+
helm-64-o5_15_b8: &helm-64-o1_10_ss1024
368+
<<: *helm_64_o5_15_baseline
369+
subsample: 1024
370+
batch_size: 32
371+
372+
helm-64-o5_15_b9: &helm-64-o1_10_ss2048
373+
<<: *helm_64_o5_15_baseline
374+
subsample: 2048
375+
batch_size: 16

0 commit comments

Comments
 (0)