modeling_mbart.py 1.63 KB
Newer Older
Sylvain Gugger's avatar
Sylvain Gugger committed
1
from ..bart.modeling_bart import BartForConditionalGeneration
2
3
4
5
6
7
8
9
10
11
12
13
14
15
from .configuration_mbart import MBartConfig


_CONFIG_FOR_DOC = "MBartConfig"
_TOKENIZER_FOR_DOC = "MBartTokenizer"

MBART_PRETRAINED_MODEL_ARCHIVE_LIST = [
    "facebook/mbart-large-cc25",
    "facebook/mbart-large-en-ro",
    # See all multilingual BART models at https://huggingface.co/models?filter=mbart
]


class MBartForConditionalGeneration(BartForConditionalGeneration):
16
    r"""
Sylvain Gugger's avatar
Sylvain Gugger committed
17
18
    This class overrides :class:`~transformers.BartForConditionalGeneration`. Please check the superclass for the
    appropriate documentation alongside usage examples.
19
20
21
22
23
24

    Examples::
        >>> from transformers import MBartForConditionalGeneration, MBartTokenizer
        >>> model = MBartForConditionalGeneration.from_pretrained("facebook/mbart-large-en-ro")
        >>> tokenizer = MBartTokenizer.from_pretrained("facebook/mbart-large-en-ro")
        >>> article = "UN Chief Says There Is No Military Solution in Syria"
25
        >>> batch = tokenizer.prepare_seq2seq_batch(src_texts=[article], return_tensors="pt")
26
27
28
        >>> translated_tokens = model.generate(**batch)
        >>> translation = tokenizer.batch_decode(translated_tokens, skip_special_tokens=True)[0]
        >>> assert translation == "艦eful ONU declar膬 c膬 nu exist膬 o solu牛ie militar膬 卯n Siria"
29
    """
Sam Shleifer's avatar
Sam Shleifer committed
30
    model_type = "mbart"
31
    config_class = MBartConfig
32
    _keys_to_ignore_on_load_missing = [
33
34
35
        "model.encoder.embed_positions.weight",
        "model.decoder.embed_positions.weight",
    ]
36
    _keys_to_ignore_on_save = [
37
38
39
        "model.encoder.embed_positions.weight",
        "model.decoder.embed_positions.weight",
    ]