File tree Expand file tree Collapse file tree 2 files changed +5
-1
lines changed Expand file tree Collapse file tree 2 files changed +5
-1
lines changed Original file line number Diff line number Diff line change 4747$ python train.py
4848```
4949
50+ To record some of your experiments, just invoke ` wandb login ` first before modifying the training script
51+
5052## Citations
5153
5254``` bibtex
Original file line number Diff line number Diff line change 2929
3030PROJECT_NAME = 'native-sparse-attention'
3131RUN_NAME = 'baseline' if not USE_SPARSE_ATTN else 'sparse-attn'
32- WANDB_ONLINE = False # turn this on to pipe experiment to cloud
32+ WANDB_ONLINE = True # turn this on to pipe experiment to cloud
3333
3434# helpers
3535
@@ -153,6 +153,7 @@ def __getitem__(self, index):
153153
154154 (loss / GRAD_ACCUM_EVERY ).backward ()
155155
156+ wandb .log (dict (loss = loss .item ()), step = i )
156157 print (f"training loss: { loss .item ():.3f} " )
157158
158159 torch .nn .utils .clip_grad_norm_ (model .parameters (), 0.5 )
@@ -166,6 +167,7 @@ def __getitem__(self, index):
166167 valid_data = next (val_loader )
167168
168169 loss = model (valid_data , return_loss = True )
170+ wandb .log (dict (valid_loss = loss .item ()), step = i )
169171 print (f"validation loss: { loss .item ():.3f} " )
170172
171173 if i % GENERATE_EVERY == 0 :
You can’t perform that action at this time.
0 commit comments