gen-card-allenai-wmt19.py 4.38 KB
Newer Older
1
#!/usr/bin/env python
Sylvain Gugger's avatar
Sylvain Gugger committed
2
3
4
5
6
7
8
9
10
11
12
13
14
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40

# Usage:
# ./gen-card-allenai-wmt19.py

import os
from pathlib import Path

def write_model_card(model_card_dir, src_lang, tgt_lang, model_name):

    texts = {
        "en": "Machine learning is great, isn't it?",
        "ru": "袦邪褕懈薪薪芯械 芯斜褍褔械薪懈械 - 褝褌芯 蟹写芯褉芯胁芯, 薪械 褌邪泻 谢懈?",
        "de": "Maschinelles Lernen ist gro脽artig, nicht wahr?",
    }

    # BLUE scores as follows:
    # "pair": [fairseq, transformers]
    scores = {
        "wmt19-de-en-6-6-base": [0, 38.37],
        "wmt19-de-en-6-6-big": [0, 39.90],
    }
    pair = f"{src_lang}-{tgt_lang}"

    readme = f"""
---

41
42
43
language:
- {src_lang}
- {tgt_lang}
44
45
46
47
48
thumbnail:
tags:
- translation
- wmt19
- allenai
Stas Bekman's avatar
Stas Bekman committed
49
license: apache-2.0
50
datasets:
51
- wmt19
52
metrics:
53
- bleu
54
55
56
57
58
59
---

# FSMT

## Model description

60
61
62
This is a ported version of fairseq-based [wmt19 transformer](https://github.com/jungokasai/deep-shallow/) for {src_lang}-{tgt_lang}.

For more details, please, see [Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation](https://arxiv.org/abs/2006.10369).
63
64
65
66
67
68

2 models are available:

* [wmt19-de-en-6-6-big](https://huggingface.co/allenai/wmt19-de-en-6-6-big)
* [wmt19-de-en-6-6-base](https://huggingface.co/allenai/wmt19-de-en-6-6-base)

69

70
71
72
73
74
## Intended uses & limitations

#### How to use

```python
Sylvain Gugger's avatar
Sylvain Gugger committed
75
from transformers import FSMTForConditionalGeneration, FSMTTokenizer
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
mname = "allenai/{model_name}"
tokenizer = FSMTTokenizer.from_pretrained(mname)
model = FSMTForConditionalGeneration.from_pretrained(mname)

input = "{texts[src_lang]}"
input_ids = tokenizer.encode(input, return_tensors="pt")
outputs = model.generate(input_ids)
decoded = tokenizer.decode(outputs[0], skip_special_tokens=True)
print(decoded) # {texts[tgt_lang]}

```

#### Limitations and bias


## Training data

93
Pretrained weights were left identical to the original model released by allenai. For more details, please, see the [paper](https://arxiv.org/abs/2006.10369).
94
95
96
97
98
99

## Eval results

Here are the BLEU scores:

model   |  transformers
Stas Bekman's avatar
Stas Bekman committed
100
-------|---------
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
{model_name}  |  {scores[model_name][1]}

The score was calculated using this code:

```bash
git clone https://github.com/huggingface/transformers
cd transformers
export PAIR={pair}
export DATA_DIR=data/$PAIR
export SAVE_DIR=data/$PAIR
export BS=8
export NUM_BEAMS=5
mkdir -p $DATA_DIR
sacrebleu -t wmt19 -l $PAIR --echo src > $DATA_DIR/val.source
sacrebleu -t wmt19 -l $PAIR --echo ref > $DATA_DIR/val.target
echo $PAIR
PYTHONPATH="src:examples/seq2seq" python examples/seq2seq/run_eval.py allenai/{model_name} $DATA_DIR/val.source $SAVE_DIR/test_translations.txt --reference_path $DATA_DIR/val.target --score_path $SAVE_DIR/test_bleu.json --bs $BS --task translation --num_beams $NUM_BEAMS
```

120
121
122
123
## Data Sources

- [training, etc.](http://www.statmt.org/wmt19/)
- [test set](http://matrix.statmt.org/test_sets/newstest2019.tgz?1556572561)
Stas Bekman's avatar
Stas Bekman committed
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138


### BibTeX entry and citation info

```
@misc{{kasai2020deep,
    title={{Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation}},
    author={{Jungo Kasai and Nikolaos Pappas and Hao Peng and James Cross and Noah A. Smith}},
    year={{2020}},
    eprint={{2006.10369}},
    archivePrefix={{arXiv}},
    primaryClass={{cs.CL}}
}}
```

139
140
141
142
143
144
145
146
147
148
149
150
151
152
"""
    model_card_dir.mkdir(parents=True, exist_ok=True)
    path = os.path.join(model_card_dir, "README.md")
    print(f"Generating {path}")
    with open(path, "w", encoding="utf-8") as f:
        f.write(readme)

# make sure we are under the root of the project
repo_dir = Path(__file__).resolve().parent.parent.parent
model_cards_dir = repo_dir / "model_cards"

for model_name in ["wmt19-de-en-6-6-base", "wmt19-de-en-6-6-big"]:
    model_card_dir = model_cards_dir / "allenai" / model_name
    write_model_card(model_card_dir, src_lang="de", tgt_lang="en", model_name=model_name)