Commit bc919276 authored by Myle Ott's avatar Myle Ott Committed by Facebook Github Bot
Browse files

Add test for mixture of experts

Summary: Pull Request resolved: https://github.com/pytorch/fairseq/pull/543

Differential Revision: D14259481

Pulled By: myleott

fbshipit-source-id: fcb0a150b8e851cf86ea5ed1f083f56e1600588e
parent 139e3a3c
......@@ -43,7 +43,7 @@ $ fairseq-generate data-bin/wmt17_en_de \
--task translation_moe \
--method hMoElp --mean-pool-gating-network \
--num-experts 3 \
--gen-expert 0 \
--gen-expert 0
```
## Evaluate
......
......@@ -160,6 +160,25 @@ class TestTranslation(unittest.TestCase):
])
generate_main(data_dir)
def test_mixture_of_experts(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_moe') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'transformer_iwslt_de_en', [
'--task', 'translation_moe',
'--method', 'hMoElp',
'--mean-pool-gating-network',
'--num-experts', '3',
])
generate_main(data_dir, [
'--task', 'translation_moe',
'--method', 'hMoElp',
'--mean-pool-gating-network',
'--num-experts', '3',
'--gen-expert', '0'
])
class TestStories(unittest.TestCase):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment