"...git@developer.sourcefind.cn:OpenDAS/torchaudio.git" did not exist on "cfa5a383d5edcd0a5d7ee4fbd0fec2a077c9e6f5"
Commit 01ff4f82 authored by Matej Svejda's avatar Matej Svejda
Browse files

learning rate problems in run_classifier.py

parent 0a9d7c7e
...@@ -296,11 +296,6 @@ def accuracy(out, labels): ...@@ -296,11 +296,6 @@ def accuracy(out, labels):
outputs = np.argmax(out, axis=1) outputs = np.argmax(out, axis=1)
return np.sum(outputs == labels) return np.sum(outputs == labels)
def warmup_linear(x, warmup=0.002):
if x < warmup:
return x/warmup
return 1.0 - x
def main(): def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
...@@ -447,7 +442,7 @@ def main(): ...@@ -447,7 +442,7 @@ def main():
if args.do_train: if args.do_train:
train_examples = processor.get_train_examples(args.data_dir) train_examples = processor.get_train_examples(args.data_dir)
num_train_steps = int( num_train_steps = int(
len(train_examples) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs) len(train_examples) / args.train_batch_size * args.num_train_epochs)
# Prepare model # Prepare model
model = BertForSequenceClassification.from_pretrained(args.bert_model, model = BertForSequenceClassification.from_pretrained(args.bert_model,
...@@ -541,10 +536,6 @@ def main(): ...@@ -541,10 +536,6 @@ def main():
nb_tr_examples += input_ids.size(0) nb_tr_examples += input_ids.size(0)
nb_tr_steps += 1 nb_tr_steps += 1
if (step + 1) % args.gradient_accumulation_steps == 0: if (step + 1) % args.gradient_accumulation_steps == 0:
# modify learning rate with special warm up BERT uses
lr_this_step = args.learning_rate * warmup_linear(global_step/t_total, args.warmup_proportion)
for param_group in optimizer.param_groups:
param_group['lr'] = lr_this_step
optimizer.step() optimizer.step()
optimizer.zero_grad() optimizer.zero_grad()
global_step += 1 global_step += 1
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment