Commit 4604b4a5 authored by Myle Ott's avatar Myle Ott Committed by Facebook Github Bot
Browse files

Don't load training set twice

Summary: Pull Request resolved: https://github.com/fairinternal/fairseq-py/pull/595

Differential Revision: D15428242

Pulled By: myleott

fbshipit-source-id: 3cec83a2353498a4802398eba8bcb1aefaf6d5c4
parent ef62ec0a
......@@ -28,9 +28,9 @@ class FairseqLamb(FairseqOptimizer):
"""Add optimizer-specific arguments to the parser."""
# fmt: off
parser.add_argument('--lamb-betas', default='(0.9, 0.999)', metavar='B',
help='betas for Adam optimizer')
help='betas for LAMB optimizer')
parser.add_argument('--lamb-eps', type=float, default=1e-8, metavar='D',
help='epsilon for Adam optimizer')
help='epsilon for LAMB optimizer')
parser.add_argument('--weight-decay', '--wd', default=0.0, type=float, metavar='WD',
help='weight decay')
# fmt: on
......
......@@ -41,8 +41,7 @@ def main(args, init_distributed=False):
# Setup task, e.g., translation, language modeling, etc.
task = tasks.setup_task(args)
# Load dataset splits
task.load_dataset(args.train_subset, combine=True, epoch=0)
# Load valid dataset (we load training data below, based on the latest checkpoint)
for valid_sub_split in args.valid_subset.split(','):
task.load_dataset(valid_sub_split, combine=True, epoch=0)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment