Skip to content

Commit 96394a5

Browse files
committed
feat(scripts):APN
1 parent 324e0be commit 96394a5

File tree

11 files changed

+553
-0
lines changed

11 files changed

+553
-0
lines changed

scripts/APN/ECL.sh

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
use_multi_gpu=0
2+
if [ $use_multi_gpu -eq 0 ]; then
3+
launch_command="python"
4+
else
5+
launch_command="accelerate launch"
6+
fi
7+
8+
. "$(dirname "$(readlink -f "$0")")/../globals.sh" # Import shared information from scripts/globals.sh
9+
10+
dataset_name=$(basename "$0" .sh) # file name
11+
dataset_subset_name=""
12+
dataset_id=$dataset_name
13+
get_dataset_info "$dataset_name" "$dataset_subset_name" # Get dataset information from scripts/globals.sh
14+
15+
model_name="$(basename "$(dirname "$(readlink -f "$0")")")" # folder name
16+
model_id=$model_name
17+
18+
seq_len=96
19+
label_len=48
20+
for pred_len in 96 192 336 720; do
21+
$launch_command main.py \
22+
--is_training 1 \
23+
--loss "MSE" \
24+
--dropout 0.1 \
25+
--d_model 24 \
26+
--n_layers 2 \
27+
--n_heads 8 \
28+
--n_patches_list "24" \
29+
--tpatchgnn_te_dim 8 \
30+
--task_name "long_term_forecast" \
31+
--use_multi_gpu $use_multi_gpu \
32+
--dataset_root_path $dataset_root_path \
33+
--dataset_file_name "electricity.csv" \
34+
--model_id $model_id \
35+
--model_name $model_name \
36+
--dataset_name $dataset_name \
37+
--dataset_id $dataset_id \
38+
--features M \
39+
--seq_len $seq_len \
40+
--label_len $label_len \
41+
--pred_len $pred_len \
42+
--enc_in $n_variables \
43+
--dec_in $n_variables \
44+
--c_out $n_variables \
45+
--train_epochs 300 \
46+
--patience 10 \
47+
--val_interval 1 \
48+
--itr 5 \
49+
--batch_size 1 \
50+
--learning_rate 1e-2
51+
done
52+

scripts/APN/ETTh1.sh

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
use_multi_gpu=0
2+
if [ $use_multi_gpu -eq 0 ]; then
3+
launch_command="python"
4+
else
5+
launch_command="accelerate launch"
6+
fi
7+
8+
. "$(dirname "$(readlink -f "$0")")/../globals.sh" # Import shared information from scripts/globals.sh
9+
10+
dataset_name=$(basename "$0" .sh) # file name
11+
dataset_subset_name=""
12+
dataset_id=$dataset_name
13+
get_dataset_info "$dataset_name" "$dataset_subset_name" # Get dataset information from scripts/globals.sh
14+
15+
model_name="$(basename "$(dirname "$(readlink -f "$0")")")" # folder name
16+
model_id=$model_name
17+
18+
seq_len=96
19+
label_len=48
20+
for pred_len in 96 192 336 720; do
21+
$launch_command main.py \
22+
--is_training 1 \
23+
--loss "MSE" \
24+
--dropout 0.1 \
25+
--d_model 24 \
26+
--n_layers 2 \
27+
--n_heads 8 \
28+
--n_patches_list "24" \
29+
--tpatchgnn_te_dim 8 \
30+
--task_name "long_term_forecast" \
31+
--use_multi_gpu $use_multi_gpu \
32+
--dataset_root_path $dataset_root_path \
33+
--model_id $model_id \
34+
--model_name $model_name \
35+
--dataset_name $dataset_name \
36+
--dataset_id $dataset_id \
37+
--features M \
38+
--seq_len $seq_len \
39+
--label_len $label_len \
40+
--pred_len $pred_len \
41+
--enc_in $n_variables \
42+
--dec_in $n_variables \
43+
--c_out $n_variables \
44+
--train_epochs 300 \
45+
--patience 10 \
46+
--val_interval 1 \
47+
--itr 5 \
48+
--batch_size 32 \
49+
--learning_rate 1e-2
50+
done
51+

scripts/APN/ETTm1.sh

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
use_multi_gpu=0
2+
if [ $use_multi_gpu -eq 0 ]; then
3+
launch_command="python"
4+
else
5+
launch_command="accelerate launch"
6+
fi
7+
8+
. "$(dirname "$(readlink -f "$0")")/../globals.sh" # Import shared information from scripts/globals.sh
9+
10+
dataset_name=$(basename "$0" .sh) # file name
11+
dataset_subset_name=""
12+
dataset_id=$dataset_name
13+
get_dataset_info "$dataset_name" "$dataset_subset_name" # Get dataset information from scripts/globals.sh
14+
15+
model_name="$(basename "$(dirname "$(readlink -f "$0")")")" # folder name
16+
model_id=$model_name
17+
18+
seq_len=96
19+
label_len=48
20+
for pred_len in 96 192 336 720; do
21+
$launch_command main.py \
22+
--is_training 1 \
23+
--loss "MSE" \
24+
--dropout 0.1 \
25+
--d_model 24 \
26+
--n_layers 2 \
27+
--n_heads 8 \
28+
--n_patches_list "24" \
29+
--tpatchgnn_te_dim 8 \
30+
--task_name "long_term_forecast" \
31+
--use_multi_gpu $use_multi_gpu \
32+
--dataset_root_path $dataset_root_path \
33+
--model_id $model_id \
34+
--model_name $model_name \
35+
--dataset_name $dataset_name \
36+
--dataset_id $dataset_id \
37+
--features M \
38+
--freq 't' \
39+
--seq_len $seq_len \
40+
--label_len $label_len \
41+
--pred_len $pred_len \
42+
--enc_in $n_variables \
43+
--dec_in $n_variables \
44+
--c_out $n_variables \
45+
--train_epochs 300 \
46+
--patience 10 \
47+
--val_interval 1 \
48+
--itr 5 \
49+
--batch_size 32 \
50+
--learning_rate 1e-2
51+
done
52+

scripts/APN/HumanActivity.sh

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
use_multi_gpu=0
2+
if [ $use_multi_gpu -eq 0 ]; then
3+
launch_command="python"
4+
else
5+
launch_command="accelerate launch"
6+
fi
7+
8+
. "$(dirname "$(readlink -f "$0")")/../globals.sh" # Import shared information from scripts/globals.sh
9+
10+
dataset_name=$(basename "$0" .sh) # file name
11+
dataset_subset_name=""
12+
dataset_id=$dataset_name
13+
get_dataset_info "$dataset_name" "$dataset_subset_name" # Get dataset information from scripts/globals.sh
14+
15+
model_name="$(basename "$(dirname "$(readlink -f "$0")")")" # folder name
16+
model_id=$model_name
17+
18+
seq_len=3000
19+
for pred_len in 300; do
20+
$launch_command main.py \
21+
--is_training 1 \
22+
--collate_fn "collate_fn" \
23+
--loss "MSE" \
24+
--d_model 56 \
25+
--n_layers 2 \
26+
--n_heads 8 \
27+
--n_patches_list "300" \
28+
--tpatchgnn_te_dim 8 \
29+
--use_multi_gpu $use_multi_gpu \
30+
--dataset_root_path $dataset_root_path \
31+
--model_id $model_id \
32+
--model_name $model_name \
33+
--dataset_name $dataset_name \
34+
--dataset_id $dataset_id \
35+
--features M \
36+
--seq_len $seq_len \
37+
--pred_len $pred_len \
38+
--enc_in $n_variables \
39+
--dec_in $n_variables \
40+
--c_out $n_variables \
41+
--train_epochs 300 \
42+
--patience 10 \
43+
--val_interval 1 \
44+
--itr 5 \
45+
--batch_size 32 \
46+
--learning_rate 1e-2
47+
done

scripts/APN/ILI.sh

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
use_multi_gpu=0
2+
if [ $use_multi_gpu -eq 0 ]; then
3+
launch_command="python"
4+
else
5+
launch_command="accelerate launch"
6+
fi
7+
8+
. "$(dirname "$(readlink -f "$0")")/../globals.sh" # Import shared information from scripts/globals.sh
9+
10+
dataset_name=$(basename "$0" .sh) # file name
11+
dataset_subset_name=""
12+
dataset_id=$dataset_name
13+
get_dataset_info "$dataset_name" "$dataset_subset_name" # Get dataset information from scripts/globals.sh
14+
15+
model_name="$(basename "$(dirname "$(readlink -f "$0")")")" # folder name
16+
model_id=$model_name
17+
18+
seq_len=36
19+
label_len=18
20+
for pred_len in 12 24 36 48; do
21+
$launch_command main.py \
22+
--is_training 1 \
23+
--loss "MSE" \
24+
--dropout 0.1 \
25+
--d_model 24 \
26+
--n_layers 2 \
27+
--n_heads 8 \
28+
--n_patches_list "12" \
29+
--tpatchgnn_te_dim 8 \
30+
--task_name "long_term_forecast" \
31+
--use_multi_gpu $use_multi_gpu \
32+
--dataset_root_path $dataset_root_path \
33+
--dataset_file_name "national_illness.csv" \
34+
--model_id $model_id \
35+
--model_name $model_name \
36+
--dataset_name $dataset_name \
37+
--dataset_id $dataset_id \
38+
--features M \
39+
--seq_len $seq_len \
40+
--label_len $label_len \
41+
--pred_len $pred_len \
42+
--enc_in $n_variables \
43+
--dec_in $n_variables \
44+
--c_out $n_variables \
45+
--train_epochs 300 \
46+
--patience 10 \
47+
--val_interval 1 \
48+
--itr 5 \
49+
--batch_size 32 \
50+
--learning_rate 1e-2
51+
done
52+

scripts/APN/MIMIC_III.sh

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
use_multi_gpu=0
2+
if [ $use_multi_gpu -eq 0 ]; then
3+
launch_command="python"
4+
else
5+
launch_command="accelerate launch"
6+
fi
7+
8+
. "$(dirname "$(readlink -f "$0")")/../globals.sh" # Import shared information from scripts/globals.sh
9+
10+
dataset_name=$(basename "$0" .sh) # file name
11+
dataset_subset_name=""
12+
dataset_id=$dataset_name
13+
get_dataset_info "$dataset_name" "$dataset_subset_name" # Get dataset information from scripts/globals.sh
14+
15+
model_name="$(basename "$(dirname "$(readlink -f "$0")")")" # folder name
16+
model_id=$model_name
17+
18+
seq_len=72
19+
for pred_len in 3; do
20+
$launch_command main.py \
21+
--is_training 1 \
22+
--collate_fn "collate_fn" \
23+
--loss "MSE" \
24+
--d_model 16 \
25+
--n_layers 2 \
26+
--n_heads 8 \
27+
--n_patches_list "50" \
28+
--tpatchgnn_te_dim 16 \
29+
--use_multi_gpu $use_multi_gpu \
30+
--dataset_root_path $dataset_root_path \
31+
--model_id $model_id \
32+
--model_name $model_name \
33+
--dataset_name $dataset_name \
34+
--dataset_id $dataset_id \
35+
--features M \
36+
--seq_len $seq_len \
37+
--pred_len $pred_len \
38+
--enc_in $n_variables \
39+
--dec_in $n_variables \
40+
--c_out $n_variables \
41+
--train_epochs 300 \
42+
--patience 10 \
43+
--val_interval 1 \
44+
--itr 5 \
45+
--batch_size 32 \
46+
--learning_rate 2e-2
47+
done
48+

scripts/APN/MIMIC_IV.sh

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
use_multi_gpu=0
2+
if [ $use_multi_gpu -eq 0 ]; then
3+
launch_command="python"
4+
else
5+
launch_command="accelerate launch"
6+
fi
7+
8+
. "$(dirname "$(readlink -f "$0")")/../globals.sh" # Import shared information from scripts/globals.sh
9+
10+
dataset_name=$(basename "$0" .sh) # file name
11+
dataset_subset_name=""
12+
dataset_id=$dataset_name
13+
get_dataset_info "$dataset_name" "$dataset_subset_name" # Get dataset information from scripts/globals.sh
14+
15+
model_name="$(basename "$(dirname "$(readlink -f "$0")")")" # folder name
16+
model_id=$model_name
17+
18+
seq_len=2160
19+
for pred_len in 3; do
20+
$launch_command main.py \
21+
--is_training 1 \
22+
--collate_fn "collate_fn" \
23+
--loss "MSE" \
24+
--dropout 0.1 \
25+
--d_model 24 \
26+
--n_layers 2 \
27+
--n_heads 8 \
28+
--n_patches_list "360" \
29+
--tpatchgnn_te_dim 8 \
30+
--use_multi_gpu $use_multi_gpu \
31+
--dataset_root_path $dataset_root_path \
32+
--model_id $model_id \
33+
--model_name $model_name \
34+
--dataset_name $dataset_name \
35+
--dataset_id $dataset_id \
36+
--features M \
37+
--seq_len $seq_len \
38+
--pred_len $pred_len \
39+
--enc_in $n_variables \
40+
--dec_in $n_variables \
41+
--c_out $n_variables \
42+
--train_epochs 300 \
43+
--patience 10 \
44+
--val_interval 1 \
45+
--itr 5 \
46+
--batch_size 32 \
47+
--learning_rate 1e-2
48+
done
49+

0 commit comments

Comments
 (0)