-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
74 lines (57 loc) · 2.16 KB
/
main.py
File metadata and controls
74 lines (57 loc) · 2.16 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
from classes.dataset import Dataset
from classes.loader import Loader
from classes.model import Transformer
from classes.trainer import Trainer
import torch
from torch.cuda import is_available
from torch.optim import AdamW
from torch.optim.lr_scheduler import StepLR
import torch.nn as nn
import yaml
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
def main():
path = 'settings_files\\leia-s.yaml'
with open(path, 'r') as yaml_file:
data = yaml.safe_load(yaml_file)
#Shared consts
CONTEXT_LEN = data['CONTEXT_LEN']
BATCH_SIZE = data['BATCH_SIZE']
NUM_EMBEDDINGS = data['NUM_EMBEDDINGS']
DEVICE = 'cuda' if is_available() else 'cpu'
#Dataset settings
DATASET_PATH = data['DATASET_PATH']
#Transformer settings
BIAS = data['BIAS']
D_FF = data['D_FF']
EMBEDDING_DIM = data['EMBEDDING_DIM']
N_HEADS = data['N_HEADS']
N_LAYERS = data['N_LAYERS']
PARAMS_PATH = data['PARAMS_PATH']
P_DROP = data['P_DROP']
#Optimizer settings
LEARNING_RATE = data['LEARNING_RATE']
#Scheaduler settings
STEP_SIZE = data['STEP_SIZE']
GAMMA = data['GAMMA']
# Trainer settings
EPOCHS = data['EPOCHS']
LOG_INTERVAL = data['LOG_INTERVAL']
SAVE_INTERVAL = data['SAVE_INTERVAL']
#Data objects
dataset = Dataset(DATASET_PATH, CONTEXT_LEN)
loader = Loader(dataset.get_dataset('train'), BATCH_SIZE, CONTEXT_LEN, NUM_EMBEDDINGS)
#Model
Leia = Transformer(NUM_EMBEDDINGS, CONTEXT_LEN, N_LAYERS, EMBEDDING_DIM, D_FF, N_HEADS, BIAS, P_DROP, DEVICE)
#Loading params
if PARAMS_PATH is not None:
Leia.load_state_dict(torch.load(PARAMS_PATH))
print(count_parameters(Leia))
#Training objects
loss_function = nn.CrossEntropyLoss()
optimizer = AdamW(Leia.parameters(), LEARNING_RATE)
scheaduler = StepLR(optimizer, step_size=STEP_SIZE, gamma=GAMMA)
trainer = Trainer(loader, Leia, loss_function, optimizer, DEVICE)
trainer.train(EPOCHS, LOG_INTERVAL, SAVE_INTERVAL)
if __name__ == "__main__":
main()