Commit cfd24625 authored by zhe chen's avatar zhe chen
Browse files

Fix keyward to keyword

parent 0404891e
...@@ -691,7 +691,7 @@ class InternImage(nn.Module): ...@@ -691,7 +691,7 @@ class InternImage(nn.Module):
m._reset_parameters() m._reset_parameters()
@torch.jit.ignore @torch.jit.ignore
def lr_decay_keywards(self, decay_ratio=0.87): def lr_decay_keywords(self, decay_ratio=0.87):
lr_ratios = {} lr_ratios = {}
# blocks # blocks
...@@ -701,7 +701,7 @@ class InternImage(nn.Module): ...@@ -701,7 +701,7 @@ class InternImage(nn.Module):
for j in range(self.depths[layer_num]): for j in range(self.depths[layer_num]):
block_num = self.depths[layer_num] - j - 1 block_num = self.depths[layer_num] - j - 1
tag = 'levels.{}.blocks.{}.'.format(layer_num, block_num) tag = 'levels.{}.blocks.{}.'.format(layer_num, block_num)
decay = 1.0 * (decay_ratio**idx) decay = 1.0 * (decay_ratio ** idx)
lr_ratios[tag] = decay lr_ratios[tag] = decay
idx += 1 idx += 1
# patch_embed (before stage-1) # patch_embed (before stage-1)
......
...@@ -129,11 +129,11 @@ def set_weight_decay_and_lr( ...@@ -129,11 +129,11 @@ def set_weight_decay_and_lr(
if lr_layer_decay: if lr_layer_decay:
print('layer-wise lr decay is used !') print('layer-wise lr decay is used !')
assert hasattr(model, 'lr_decay_keywards') assert hasattr(model, 'lr_decay_keywords')
lr_ratio_keywards = model.lr_decay_keywards(lr_layer_decay_ratio) lr_ratio_keywords = model.lr_decay_keywords(lr_layer_decay_ratio)
# 2. check lr # 2. check lr
ratio = check_keywords_in_dict(name, lr_ratio_keywards) ratio = check_keywords_in_dict(name, lr_ratio_keywords)
if ratio is not None: if ratio is not None:
lr = ratio * base_lr lr = ratio * base_lr
else: else:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment