Skip to content

Commit 61fa2aa

Browse files
add-earthformer (#870)
1 parent 154c01c commit 61fa2aa

19 files changed

+7408
-0
lines changed

docs/zh/api/arch.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,5 +27,6 @@
2727
- DGMR
2828
- ChipDeepONets
2929
- AutoEncoder
30+
- CuboidTransformer
3031
show_root_heading: true
3132
heading_level: 3

docs/zh/api/data/arch.md

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
# Arch(网络模型) 模块
2+
3+
::: ppsci.arch
4+
handler: python
5+
options:
6+
members:
7+
- Arch
8+
- AMGNet
9+
- MLP
10+
- ModifiedMLP
11+
- DeepONet
12+
- DeepPhyLSTM
13+
- LorenzEmbedding
14+
- RosslerEmbedding
15+
- CylinderEmbedding
16+
- Generator
17+
- Discriminator
18+
- PhysformerGPT2
19+
- ModelList
20+
- AFNONet
21+
- PrecipNet
22+
- PhyCRNet
23+
- UNetEx
24+
- USCNN
25+
- NowcastNet
26+
- HEDeepONets
27+
- DGMR
28+
- ChipDeepONets
29+
- AutoEncoder
30+
- CuboidTransformer
31+
show_root_heading: true
32+
heading_level: 3
Lines changed: 153 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,153 @@
1+
hydra:
2+
run:
3+
# dynamic output directory according to running time and override name
4+
dir: outputs_earthformer_pretrain
5+
job:
6+
name: ${mode} # name of logfile
7+
chdir: false # keep current working direcotry unchaned
8+
config:
9+
override_dirname:
10+
exclude_keys:
11+
- TRAIN.checkpoint_path
12+
- TRAIN.pretrained_model_path
13+
- EVAL.pretrained_model_path
14+
- mode
15+
- output_dir
16+
- log_freq
17+
callbacks:
18+
init_callback:
19+
_target_: ppsci.utils.callbacks.InitCallback
20+
sweep:
21+
# output directory for multirun
22+
dir: ${hydra.run.dir}
23+
subdir: ./
24+
25+
# general settings
26+
mode: train # running mode: train/eval/export/infer
27+
seed: 0
28+
output_dir: ${hydra:run.dir}
29+
log_freq: 20
30+
31+
# set train and evaluate data path
32+
FILE_PATH: ./datasets/enso/enso_round1_train_20210201
33+
34+
# dataset setting
35+
DATASET:
36+
label_keys: ["sst_target","nino_target"]
37+
in_len: 12
38+
out_len: 14
39+
nino_window_t: 3
40+
in_stride: 1
41+
out_stride: 1
42+
train_samples_gap: 2
43+
eval_samples_gap: 1
44+
normalize_sst: true
45+
46+
# model settings
47+
MODEL:
48+
input_keys: ["sst_data"]
49+
output_keys: ["sst_target","nino_target"]
50+
input_shape: [12, 24, 48, 1]
51+
target_shape: [14, 24, 48, 1]
52+
base_units: 64
53+
scale_alpha: 1.0
54+
55+
enc_depth: [1, 1]
56+
dec_depth: [1, 1]
57+
enc_use_inter_ffn: true
58+
dec_use_inter_ffn: true
59+
dec_hierarchical_pos_embed: false
60+
61+
downsample: 2
62+
downsample_type: "patch_merge"
63+
upsample_type: "upsample"
64+
65+
num_global_vectors: 0
66+
use_dec_self_global: false
67+
dec_self_update_global: true
68+
use_dec_cross_global: false
69+
use_global_vector_ffn: false
70+
use_global_self_attn: false
71+
separate_global_qkv: false
72+
global_dim_ratio: 1
73+
74+
self_pattern: "axial"
75+
cross_self_pattern: "axial"
76+
cross_pattern: "cross_1x1"
77+
dec_cross_last_n_frames: null
78+
79+
attn_drop: 0.1
80+
proj_drop: 0.1
81+
ffn_drop: 0.1
82+
num_heads: 4
83+
84+
ffn_activation: "gelu"
85+
gated_ffn: false
86+
norm_layer: "layer_norm"
87+
padding_type: "zeros"
88+
pos_embed_type: "t+h+w"
89+
use_relative_pos: true
90+
self_attn_use_final_proj: true
91+
dec_use_first_self_attn: false
92+
93+
z_init_method: "zeros"
94+
initial_downsample_type: "conv"
95+
initial_downsample_activation: "leaky_relu"
96+
initial_downsample_scale: [1, 1, 2]
97+
initial_downsample_conv_layers: 2
98+
final_upsample_conv_layers: 1
99+
checkpoint_level: 2
100+
101+
attn_linear_init_mode: "0"
102+
ffn_linear_init_mode: "0"
103+
conv_init_mode: "0"
104+
down_up_linear_init_mode: "0"
105+
norm_init_mode: "0"
106+
107+
108+
# training settings
109+
TRAIN:
110+
epochs: 100
111+
save_freq: 20
112+
eval_during_train: true
113+
eval_freq: 10
114+
lr_scheduler:
115+
epochs: ${TRAIN.epochs}
116+
learning_rate: 0.0002
117+
by_epoch: true
118+
min_lr_ratio: 1.0e-3
119+
wd: 1.0e-5
120+
batch_size: 16
121+
pretrained_model_path: null
122+
checkpoint_path: null
123+
124+
125+
# evaluation settings
126+
EVAL:
127+
pretrained_model_path: ./checkpoint/enso/earthformer_enso.pdparams
128+
compute_metric_by_batch: false
129+
eval_with_no_grad: true
130+
batch_size: 1
131+
132+
INFER:
133+
pretrained_model_path: ./checkpoint/enso/earthformer_enso.pdparams
134+
export_path: ./inference/earthformer/enso
135+
pdmodel_path: ${INFER.export_path}.pdmodel
136+
pdpiparams_path: ${INFER.export_path}.pdiparams
137+
device: gpu
138+
engine: native
139+
precision: fp32
140+
onnx_path: ${INFER.export_path}.onnx
141+
ir_optim: true
142+
min_subgraph_size: 10
143+
gpu_mem: 4000
144+
gpu_id: 0
145+
max_batch_size: 16
146+
num_cpu_threads: 4
147+
batch_size: 1
148+
data_path: ./datasets/enso/infer/SODA_train.nc
149+
in_len: 12
150+
in_stride: 1
151+
out_len: 14
152+
out_stride: 1
153+
samples_gap: 1
Lines changed: 185 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,185 @@
1+
hydra:
2+
run:
3+
# dynamic output directory according to running time and override name
4+
dir: outputs_earthformer_pretrain
5+
job:
6+
name: ${mode} # name of logfile
7+
chdir: false # keep current working direcotry unchaned
8+
config:
9+
override_dirname:
10+
exclude_keys:
11+
- TRAIN.checkpoint_path
12+
- TRAIN.pretrained_model_path
13+
- EVAL.pretrained_model_path
14+
- mode
15+
- output_dir
16+
- log_freq
17+
callbacks:
18+
init_callback:
19+
_target_: ppsci.utils.callbacks.InitCallback
20+
sweep:
21+
# output directory for multirun
22+
dir: ${hydra.run.dir}
23+
subdir: ./
24+
25+
# general settings
26+
mode: train # running mode: train/eval/export/infer
27+
seed: 0
28+
output_dir: ${hydra:run.dir}
29+
log_freq: 20
30+
31+
# set train and evaluate data path
32+
FILE_PATH: ./datasets/sevir/sevir_data
33+
34+
# SEVIR dataset:raw_seq_len: 49,interval_real_time:5, img_height = 384,img_width = 384
35+
# SEVIR_lr dataset:raw_seq_len: 25,interval_real_time:10, img_height = 128,img_width = 128
36+
37+
# dataset setting
38+
DATASET:
39+
label_keys: ["vil"]
40+
data_types: ["vil"]
41+
seq_len: 25
42+
raw_seq_len: 49
43+
sample_mode: "sequent"
44+
stride: 12
45+
batch_size: 2
46+
layout: "NTHWC"
47+
in_len: 13
48+
out_len: 12
49+
split_mode: "uneven"
50+
51+
shuffle_seed: 1
52+
rescale_method: "01"
53+
downsample_dict: null
54+
verbose: false
55+
preprocess: true
56+
57+
# model settings
58+
MODEL:
59+
input_keys: ["input"]
60+
output_keys: ["vil"]
61+
input_shape: [13, 384, 384, 1]
62+
target_shape: [12, 384, 384, 1]
63+
base_units: 128
64+
scale_alpha: 1.0
65+
66+
enc_depth: [1, 1]
67+
dec_depth: [1, 1]
68+
enc_use_inter_ffn: true
69+
dec_use_inter_ffn: true
70+
dec_hierarchical_pos_embed: false
71+
72+
downsample: 2
73+
downsample_type: "patch_merge"
74+
upsample_type: "upsample"
75+
76+
num_global_vectors: 8
77+
use_dec_self_global: false
78+
dec_self_update_global: true
79+
use_dec_cross_global: false
80+
use_global_vector_ffn: false
81+
use_global_self_attn: true
82+
separate_global_qkv: true
83+
global_dim_ratio: 1
84+
85+
self_pattern: "axial"
86+
cross_self_pattern: "axial"
87+
cross_pattern: "cross_1x1"
88+
dec_cross_last_n_frames: null
89+
90+
attn_drop: 0.1
91+
proj_drop: 0.1
92+
ffn_drop: 0.1
93+
num_heads: 4
94+
95+
ffn_activation: "gelu"
96+
gated_ffn: false
97+
norm_layer: "layer_norm"
98+
padding_type: "zeros"
99+
pos_embed_type: "t+h+w"
100+
use_relative_pos: true
101+
self_attn_use_final_proj: true
102+
dec_use_first_self_attn: false
103+
104+
z_init_method: "zeros"
105+
initial_downsample_type: "stack_conv"
106+
initial_downsample_activation: "leaky_relu"
107+
initial_downsample_stack_conv_num_layers: 3
108+
initial_downsample_stack_conv_dim_list: [16, 64, 128]
109+
initial_downsample_stack_conv_downscale_list: [3, 2, 2]
110+
initial_downsample_stack_conv_num_conv_list: [2, 2, 2]
111+
checkpoint_level: 2
112+
113+
attn_linear_init_mode: "0"
114+
ffn_linear_init_mode: "0"
115+
conv_init_mode: "0"
116+
down_up_linear_init_mode: "0"
117+
norm_init_mode: "0"
118+
119+
120+
# training settings
121+
TRAIN:
122+
epochs: 100
123+
save_freq: 20
124+
eval_during_train: true
125+
eval_freq: 10
126+
lr_scheduler:
127+
epochs: ${TRAIN.epochs}
128+
learning_rate: 0.001
129+
by_epoch: true
130+
min_lr_ratio: 1.0e-3
131+
wd: 0.0
132+
batch_size: 1
133+
pretrained_model_path: null
134+
checkpoint_path: null
135+
start_date: null
136+
end_date: [2019, 1, 1]
137+
138+
139+
# evaluation settings
140+
EVAL:
141+
pretrained_model_path: ./checkpoint/sevir/earthformer_sevir.pdparams
142+
compute_metric_by_batch: false
143+
eval_with_no_grad: true
144+
batch_size: 1
145+
end_date: [2019, 6, 1]
146+
147+
metrics_mode: "0"
148+
metrics_list: ["csi", "pod", "sucr", "bias"]
149+
threshold_list: [16, 74, 133, 160, 181, 219]
150+
151+
152+
TEST:
153+
pretrained_model_path: ./checkpoint/sevir/earthformer_sevir.pdparams
154+
compute_metric_by_batch: true
155+
eval_with_no_grad: true
156+
batch_size: 1
157+
start_date: [2019, 6, 1]
158+
end_date: null
159+
160+
INFER:
161+
pretrained_model_path: ./checkpoint/sevir/earthformer_sevir.pdparams
162+
export_path: ./inference/earthformer/sevir
163+
pdmodel_path: ${INFER.export_path}.pdmodel
164+
pdpiparams_path: ${INFER.export_path}.pdiparams
165+
device: gpu
166+
engine: native
167+
precision: fp32
168+
onnx_path: ${INFER.export_path}.onnx
169+
ir_optim: true
170+
min_subgraph_size: 10
171+
gpu_mem: 4000
172+
gpu_id: 0
173+
max_batch_size: 16
174+
num_cpu_threads: 4
175+
batch_size: 1
176+
data_path: ./datasets/sevir/vil/2019/SEVIR_VIL_STORMEVENTS_2019_0701_1231.h5
177+
in_len: 13
178+
out_len: 12
179+
sevir_vis_save: ./inference/earthformer/sevir/vis
180+
layout: "NTHWC"
181+
plot_stride: 2
182+
logging_prefix: "Cuboid_SEVIR"
183+
interval_real_time: 5
184+
data_type: "vil"
185+
rescale_method: "01"

0 commit comments

Comments
 (0)