Commit 661553f6 authored by Mostofa Patwary's avatar Mostofa Patwary
Browse files

made more generic, aligned with other tasks

parent 8b04e0e4
......@@ -12,7 +12,9 @@ CHECKPOINT_PATH=<Specify path of pretrained ICT model>
QA_FILE=<Path of the natural question test dataset>
python tasks/orqa/evaluate_orqa.py \
python tasks/main.py \
--task ICT-ZEROSHOT-NQ \
--tokenizer-type BertWordPieceLowerCase \
--num-layers 12 \
--hidden-size 768 \
--num-attention-heads 12 \
......
......@@ -62,6 +62,8 @@ if __name__ == '__main__':
from glue.finetune import main
elif args.task in ['LAMBADA', 'WIKITEXT103']:
from zeroshot_gpt.evaluate import main
elif args.task in ['ICT-ZEROSHOT-NQ']:
from orqa.evaluate_orqa import main
else:
raise NotImplementedError('Task {} is not implemented.'.format(
args.task))
......
......@@ -18,9 +18,6 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.join(os.path.pardir, os.path.pardir))))
from megatron import get_args
from megatron.initialize import initialize_megatron
......@@ -30,8 +27,7 @@ def main():
"""
Main program
"""
initialize_megatron(extra_args_provider=None,
args_defaults={'tokenizer_type': 'BertWordPieceLowerCase'})
args = get_args()
# Set up the model and evaluator
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment