Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions LearningMachine.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,10 @@ def train(self, optimizer, loss_fn):
optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.conf.clip_grad_norm_max_norm)
if isinstance(self.model, nn.DataParallel):
torch.nn.utils.clip_grad_norm_(self.model.module.layers['embedding'].get_parameters(), self.conf.clip_grad_norm_max_norm)
else:
torch.nn.utils.clip_grad_norm_(self.model.layers['embedding'].get_parameters(), self.conf.clip_grad_norm_max_norm)
optimizer.step()

del loss, logits, logits_softmax, logits_flat
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
test
# ***NeuronBlocks*** - Building Your NLP DNN Models Like Playing Lego

[![language](https://img.shields.io/badge/language-en%20%7C%20中文-brightgreen.svg)](#language-supported)
Expand Down
File renamed without changes.
6 changes: 6 additions & 0 deletions block_zoo/Embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from block_zoo.embedding import *
import copy
import logging
import itertools

class EmbeddingConf(BaseConf):
""" Configuration for Embedding
Expand Down Expand Up @@ -171,6 +172,11 @@ def forward(self, inputs, use_gpu=False):
else:
return features[0]

def get_parameters(self):
for sub_emb in self.embeddings:
for param in self.embeddings[sub_emb].parameters():
yield param




File renamed without changes.
7 changes: 6 additions & 1 deletion train.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,14 @@
import copy

import torch
import torch.nn as nn
from ModelConf import ModelConf
from problem import Problem
from utils.common_utils import dump_to_pkl, load_from_pkl, prepare_dir
from utils.philly_utils import HDFSDirectTransferer
from losses import *
from optimizers import *
import itertools

from LearningMachine import LearningMachine

Expand Down Expand Up @@ -231,7 +233,10 @@ def main(params):
loss_fn.cuda()

### optimizer
optimizer = eval(conf.optimizer_name)(lm.model.parameters(), **conf.optimizer_params)
if isinstance(lm.model, nn.DataParallel):
optimizer = eval(conf.optimizer_name)(list(lm.model.parameters()) + list(lm.model.module.layers['embedding'].get_parameters()), **conf.optimizer_params)
else:
optimizer = eval(conf.optimizer_name)(list(lm.model.parameters()) + list(lm.model.layers['embedding'].get_parameters()), **conf.optimizer_params)

## train
lm.train(optimizer, loss_fn)
Expand Down