Skip to content

Commit cfd2462

Browse files
committed
Fix keyward to keyword
1 parent 0404891 commit cfd2462

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

classification/models/intern_image.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -691,7 +691,7 @@ def _init_deform_weights(self, m):
691691
m._reset_parameters()
692692

693693
@torch.jit.ignore
694-
def lr_decay_keywards(self, decay_ratio=0.87):
694+
def lr_decay_keywords(self, decay_ratio=0.87):
695695
lr_ratios = {}
696696

697697
# blocks
@@ -701,7 +701,7 @@ def lr_decay_keywards(self, decay_ratio=0.87):
701701
for j in range(self.depths[layer_num]):
702702
block_num = self.depths[layer_num] - j - 1
703703
tag = 'levels.{}.blocks.{}.'.format(layer_num, block_num)
704-
decay = 1.0 * (decay_ratio**idx)
704+
decay = 1.0 * (decay_ratio ** idx)
705705
lr_ratios[tag] = decay
706706
idx += 1
707707
# patch_embed (before stage-1)

classification/optimizer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -129,11 +129,11 @@ def set_weight_decay_and_lr(
129129

130130
if lr_layer_decay:
131131
print('layer-wise lr decay is used !')
132-
assert hasattr(model, 'lr_decay_keywards')
133-
lr_ratio_keywards = model.lr_decay_keywards(lr_layer_decay_ratio)
132+
assert hasattr(model, 'lr_decay_keywords')
133+
lr_ratio_keywords = model.lr_decay_keywords(lr_layer_decay_ratio)
134134

135135
# 2. check lr
136-
ratio = check_keywords_in_dict(name, lr_ratio_keywards)
136+
ratio = check_keywords_in_dict(name, lr_ratio_keywords)
137137
if ratio is not None:
138138
lr = ratio * base_lr
139139
else:

0 commit comments

Comments
 (0)