"tasks/orqa/git@developer.sourcefind.cn:wuxk1/megatron-lm.git" did not exist on "220637f945488d409500c4132e891009cf1ef964"
configuration_rag.py 8.59 KB
Newer Older
Ola Piktus's avatar
Ola Piktus committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# coding=utf-8
# Copyright 2020, The RAG Authors and The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Sylvain Gugger's avatar
Sylvain Gugger committed
15
""" RAG model configuration"""
Ola Piktus's avatar
Ola Piktus committed
16
17
18

import copy

Sylvain Gugger's avatar
Sylvain Gugger committed
19
from ...configuration_utils import PretrainedConfig
20
from ...utils import add_start_docstrings
Ola Piktus's avatar
Ola Piktus committed
21
22
23


RAG_CONFIG_DOC = r"""
Sylvain Gugger's avatar
Sylvain Gugger committed
24
25
    [`RagConfig`] stores the configuration of a *RagModel*. Configuration objects inherit from [`PretrainedConfig`] and
    can be used to control the model outputs. Read the documentation from [`PretrainedConfig`] for more information.
Ola Piktus's avatar
Ola Piktus committed
26
27

    Args:
28
        title_sep (`str`, *optional*, defaults to  `" / "`):
Sylvain Gugger's avatar
Sylvain Gugger committed
29
            Separator inserted between the title and the text of the retrieved document when calling [`RagRetriever`].
30
        doc_sep (`str`, *optional*, defaults to  `" // "`):
Yulv-git's avatar
Yulv-git committed
31
            Separator inserted between the text of the retrieved document and the original input when calling
32
33
            [`RagRetriever`].
        n_docs (`int`, *optional*, defaults to 5):
Ola Piktus's avatar
Ola Piktus committed
34
            Number of documents to retrieve.
35
36
37
38
39
        max_combined_length (`int`, *optional*, defaults to 300):
            Max length of contextualized input returned by [`~RagRetriever.__call__`].
        retrieval_vector_size (`int`, *optional*, defaults to 768):
            Dimensionality of the document embeddings indexed by [`RagRetriever`].
        retrieval_batch_size (`int`, *optional*, defaults to 8):
40
            Retrieval batch size, defined as the number of queries issues concurrently to the faiss index encapsulated
41
42
            [`RagRetriever`].
        dataset (`str`, *optional*, defaults to `"wiki_dpr"`):
Sylvain Gugger's avatar
Sylvain Gugger committed
43
            A dataset identifier of the indexed dataset in HuggingFace Datasets (list all available datasets and ids
44
45
46
47
            using `datasets.list_datasets()`).
        dataset_split (`str`, *optional*, defaults to `"train"`)
            Which split of the `dataset` to load.
        index_name (`str`, *optional*, defaults to `"compressed"`)
Sylvain Gugger's avatar
Sylvain Gugger committed
48
49
            The index name of the index associated with the `dataset`. One can choose between `"legacy"`, `"exact"` and
            `"compressed"`.
50
        index_path (`str`, *optional*)
Ola Piktus's avatar
Ola Piktus committed
51
            The path to the serialized faiss index on disk.
52
        passages_path: (`str`, *optional*):
53
            A path to text passages compatible with the faiss index. Required if using
54
55
56
57
            [`~models.rag.retrieval_rag.LegacyIndex`]
        use_dummy_dataset (`bool`, *optional*, defaults to `False`)
            Whether to load a "dummy" variant of the dataset specified by `dataset`.
        label_smoothing (`float`, *optional*, defaults to 0.0):
Sylvain Gugger's avatar
Sylvain Gugger committed
58
59
            Only relevant if `return_loss` is set to `True`. Controls the `epsilon` parameter value for label smoothing
            in the loss calculation. If set to 0, no label smoothing is performed.
60
61
62
63
64
65
        do_marginalize (`bool`, *optional*, defaults to `False`):
            If `True`, the logits are marginalized over all documents by making use of
            `torch.nn.functional.log_softmax`.
        reduce_loss (`bool`, *optional*, defaults to `False`):
            Whether or not to reduce the NLL loss using the `torch.Tensor.sum` operation.
        do_deduplication (`bool`, *optional*, defaults to `True`):
Sylvain Gugger's avatar
Sylvain Gugger committed
66
            Whether or not to deduplicate the generations from different context documents for a given input. Has to be
67
68
            set to `False` if used while training with distributed backend.
        exclude_bos_score (`bool`, *optional*, defaults to `False`):
69
            Whether or not to disregard the BOS token when computing the loss.
70
71
72
73
        output_retrieved(`bool`, *optional*, defaults to `False`):
            If set to `True`, `retrieved_doc_embeds`, `retrieved_doc_ids`, `context_input_ids` and
            `context_attention_mask` are returned. See returned tensors for more detail.
        use_cache (`bool`, *optional*, defaults to `True`):
74
            Whether or not the model should return the last key/values attentions (not used by all models).
75
76
77
        forced_eos_token_id (`int`, *optional*):
            The id of the token to force as the last generated token when `max_length` is reached. Usually set to
            `eos_token_id`.
Ola Piktus's avatar
Ola Piktus committed
78
79
80
81
82
83
"""


@add_start_docstrings(RAG_CONFIG_DOC)
class RagConfig(PretrainedConfig):
    model_type = "rag"
84
    is_composition = True
Ola Piktus's avatar
Ola Piktus committed
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112

    def __init__(
        self,
        vocab_size=None,
        is_encoder_decoder=True,
        prefix=None,
        bos_token_id=None,
        pad_token_id=None,
        eos_token_id=None,
        decoder_start_token_id=None,
        title_sep=" / ",
        doc_sep=" // ",
        n_docs=5,
        max_combined_length=300,
        retrieval_vector_size=768,
        retrieval_batch_size=8,
        dataset="wiki_dpr",
        dataset_split="train",
        index_name="compressed",
        index_path=None,
        passages_path=None,
        use_dummy_dataset=False,
        reduce_loss=False,
        label_smoothing=0.0,
        do_deduplication=True,
        exclude_bos_score=False,
        do_marginalize=False,
        output_retrieved=False,
113
        use_cache=True,
114
        forced_eos_token_id=None,
Ola Piktus's avatar
Ola Piktus committed
115
116
117
118
119
120
121
        **kwargs
    ):
        super().__init__(
            bos_token_id=bos_token_id,
            pad_token_id=pad_token_id,
            eos_token_id=eos_token_id,
            decoder_start_token_id=decoder_start_token_id,
122
            forced_eos_token_id=forced_eos_token_id,
Ola Piktus's avatar
Ola Piktus committed
123
124
125
126
127
128
129
130
131
132
133
134
135
            is_encoder_decoder=is_encoder_decoder,
            prefix=prefix,
            vocab_size=vocab_size,
            **kwargs,
        )
        assert (
            "question_encoder" in kwargs and "generator" in kwargs
        ), "Config has to be initialized with question_encoder and generator config"
        question_encoder_config = kwargs.pop("question_encoder")
        question_encoder_model_type = question_encoder_config.pop("model_type")
        decoder_config = kwargs.pop("generator")
        decoder_model_type = decoder_config.pop("model_type")

Sylvain Gugger's avatar
Sylvain Gugger committed
136
        from ..auto.configuration_auto import AutoConfig
Ola Piktus's avatar
Ola Piktus committed
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164

        self.question_encoder = AutoConfig.for_model(question_encoder_model_type, **question_encoder_config)
        self.generator = AutoConfig.for_model(decoder_model_type, **decoder_config)

        self.reduce_loss = reduce_loss
        self.label_smoothing = label_smoothing
        self.exclude_bos_score = exclude_bos_score
        self.do_marginalize = do_marginalize

        self.title_sep = title_sep
        self.doc_sep = doc_sep
        self.n_docs = n_docs
        self.max_combined_length = max_combined_length

        self.dataset = dataset
        self.dataset_split = dataset_split
        self.index_name = index_name

        self.retrieval_vector_size = retrieval_vector_size
        self.retrieval_batch_size = retrieval_batch_size
        self.passages_path = passages_path
        self.index_path = index_path
        self.use_dummy_dataset = use_dummy_dataset

        self.output_retrieved = output_retrieved

        self.do_deduplication = do_deduplication

165
166
        self.use_cache = use_cache

167
168
169
        if self.forced_eos_token_id is None:
            self.forced_eos_token_id = getattr(self.generator, "forced_eos_token_id", None)

Ola Piktus's avatar
Ola Piktus committed
170
171
172
173
174
    @classmethod
    def from_question_encoder_generator_configs(
        cls, question_encoder_config: PretrainedConfig, generator_config: PretrainedConfig, **kwargs
    ) -> PretrainedConfig:
        r"""
Sylvain Gugger's avatar
Sylvain Gugger committed
175
176
        Instantiate a [`EncoderDecoderConfig`] (or a derived class) from a pre-trained encoder model configuration and
        decoder model configuration.
Ola Piktus's avatar
Ola Piktus committed
177
178

        Returns:
179
            [`EncoderDecoderConfig`]: An instance of a configuration object
Ola Piktus's avatar
Ola Piktus committed
180
181
182
183
184
        """
        return cls(question_encoder=question_encoder_config.to_dict(), generator=generator_config.to_dict(), **kwargs)

    def to_dict(self):
        """
Sylvain Gugger's avatar
Sylvain Gugger committed
185
        Serializes this instance to a Python dictionary. Override the default [`~PretrainedConfig.to_dict`].
Ola Piktus's avatar
Ola Piktus committed
186
187

        Returns:
188
            `Dict[str, any]`: Dictionary of all the attributes that make up this configuration instance,
Ola Piktus's avatar
Ola Piktus committed
189
190
191
192
193
194
        """
        output = copy.deepcopy(self.__dict__)
        output["question_encoder"] = self.question_encoder.to_dict()
        output["generator"] = self.generator.to_dict()
        output["model_type"] = self.__class__.model_type
        return output