test_configuration_common.py 14.1 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# coding=utf-8
# Copyright 2019 HuggingFace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Aymeric Augustin's avatar
Aymeric Augustin committed
15

16
import copy
17
import json
Aymeric Augustin's avatar
Aymeric Augustin committed
18
import os
19
import shutil
20
import sys
21
import tempfile
Sylvain Gugger's avatar
Sylvain Gugger committed
22
import unittest
23
import unittest.mock
24
from pathlib import Path
Sylvain Gugger's avatar
Sylvain Gugger committed
25

26
from huggingface_hub import Repository, delete_repo, login
Sylvain Gugger's avatar
Sylvain Gugger committed
27
from requests.exceptions import HTTPError
28
from transformers import AutoConfig, BertConfig, GPT2Config, is_torch_available
29
from transformers.configuration_utils import PretrainedConfig
30
from transformers.testing_utils import PASS, USER, is_staging_test
31
32


33
34
35
36
37
sys.path.append(str(Path(__file__).parent.parent / "utils"))

from test_module.custom_configuration import CustomConfig  # noqa E402


38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
config_common_kwargs = {
    "return_dict": False,
    "output_hidden_states": True,
    "output_attentions": True,
    "torchscript": True,
    "torch_dtype": "float16",
    "use_bfloat16": True,
    "pruned_heads": {"a": 1},
    "tie_word_embeddings": False,
    "is_decoder": True,
    "cross_attention_hidden_size": 128,
    "add_cross_attention": True,
    "tie_encoder_decoder": True,
    "max_length": 50,
    "min_length": 3,
    "do_sample": True,
    "early_stopping": True,
    "num_beams": 3,
    "num_beam_groups": 3,
    "diversity_penalty": 0.5,
    "temperature": 2.0,
    "top_k": 10,
    "top_p": 0.7,
    "repetition_penalty": 0.8,
    "length_penalty": 0.8,
    "no_repeat_ngram_size": 5,
    "encoder_no_repeat_ngram_size": 5,
    "bad_words_ids": [1, 2, 3],
    "num_return_sequences": 3,
    "chunk_size_feed_forward": 5,
    "output_scores": True,
    "return_dict_in_generate": True,
    "forced_bos_token_id": 2,
    "forced_eos_token_id": 3,
    "remove_invalid_values": True,
    "architectures": ["BertModel"],
    "finetuning_task": "translation",
    "id2label": {0: "label"},
    "label2id": {"label": "0"},
    "tokenizer_class": "BertTokenizerFast",
    "prefix": "prefix",
    "bos_token_id": 6,
    "pad_token_id": 7,
    "eos_token_id": 8,
    "sep_token_id": 9,
    "decoder_start_token_id": 10,
    "task_specific_params": {"translation": "some_params"},
    "problem_type": "regression",
}


89
class ConfigTester(object):
NielsRogge's avatar
NielsRogge committed
90
    def __init__(self, parent, config_class=None, has_text_modality=True, **kwargs):
91
92
        self.parent = parent
        self.config_class = config_class
NielsRogge's avatar
NielsRogge committed
93
        self.has_text_modality = has_text_modality
94
95
96
97
        self.inputs_dict = kwargs

    def create_and_test_config_common_properties(self):
        config = self.config_class(**self.inputs_dict)
98
99
100
        common_properties = ["hidden_size", "num_attention_heads", "num_hidden_layers"]

        # Add common fields for text models
NielsRogge's avatar
NielsRogge committed
101
        if self.has_text_modality:
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
            common_properties.extend(["vocab_size"])

        # Test that config has the common properties as getters
        for prop in common_properties:
            self.parent.assertTrue(hasattr(config, prop), msg=f"`{prop}` does not exist")

        # Test that config has the common properties as setter
        for idx, name in enumerate(common_properties):
            try:
                setattr(config, name, idx)
                self.parent.assertEqual(
                    getattr(config, name), idx, msg=f"`{name} value {idx} expected, but was {getattr(config, name)}"
                )
            except NotImplementedError:
                # Some models might not be able to implement setters for common_properties
                # In that case, a NotImplementedError is raised
                pass

        # Test if config class can be called with Config(prop_name=..)
        for idx, name in enumerate(common_properties):
            try:
                config = self.config_class(**{name: idx})
                self.parent.assertEqual(
                    getattr(config, name), idx, msg=f"`{name} value {idx} expected, but was {getattr(config, name)}"
                )
            except NotImplementedError:
                # Some models might not be able to implement setters for common_properties
                # In that case, a NotImplementedError is raised
                pass
131
132
133
134
135
136
137
138
139

    def create_and_test_config_to_json_string(self):
        config = self.config_class(**self.inputs_dict)
        obj = json.loads(config.to_json_string())
        for key, value in self.inputs_dict.items():
            self.parent.assertEqual(obj[key], value)

    def create_and_test_config_to_json_file(self):
        config_first = self.config_class(**self.inputs_dict)
thomwolf's avatar
thomwolf committed
140

141
        with tempfile.TemporaryDirectory() as tmpdirname:
thomwolf's avatar
thomwolf committed
142
143
144
145
146
147
148
149
150
            json_file_path = os.path.join(tmpdirname, "config.json")
            config_first.to_json_file(json_file_path)
            config_second = self.config_class.from_json_file(json_file_path)

        self.parent.assertEqual(config_second.to_dict(), config_first.to_dict())

    def create_and_test_config_from_and_save_pretrained(self):
        config_first = self.config_class(**self.inputs_dict)

151
        with tempfile.TemporaryDirectory() as tmpdirname:
thomwolf's avatar
thomwolf committed
152
153
154
            config_first.save_pretrained(tmpdirname)
            config_second = self.config_class.from_pretrained(tmpdirname)

155
156
        self.parent.assertEqual(config_second.to_dict(), config_first.to_dict())

157
158
159
160
161
162
163
164
165
    def create_and_test_config_with_num_labels(self):
        config = self.config_class(**self.inputs_dict, num_labels=5)
        self.parent.assertEqual(len(config.id2label), 5)
        self.parent.assertEqual(len(config.label2id), 5)

        config.num_labels = 3
        self.parent.assertEqual(len(config.id2label), 3)
        self.parent.assertEqual(len(config.label2id), 3)

166
167
168
169
170
171
    def check_config_can_be_init_without_params(self):
        if self.config_class.is_composition:
            return
        config = self.config_class()
        self.parent.assertIsNotNone(config)

172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
    def check_config_arguments_init(self):
        kwargs = copy.deepcopy(config_common_kwargs)
        config = self.config_class(**kwargs)
        wrong_values = []
        for key, value in config_common_kwargs.items():
            if key == "torch_dtype":
                if not is_torch_available():
                    continue
                else:
                    import torch

                    if config.torch_dtype != torch.float16:
                        wrong_values.append(("torch_dtype", config.torch_dtype, torch.float16))
            elif getattr(config, key) != value:
                wrong_values.append((key, getattr(config, key), value))

        if len(wrong_values) > 0:
            errors = "\n".join([f"- {v[0]}: got {v[1]} instead of {v[2]}" for v in wrong_values])
190
            raise ValueError(f"The following keys were not properly set in the config:\n{errors}")
191

192
193
194
195
    def run_common_tests(self):
        self.create_and_test_config_common_properties()
        self.create_and_test_config_to_json_string()
        self.create_and_test_config_to_json_file()
thomwolf's avatar
thomwolf committed
196
        self.create_and_test_config_from_and_save_pretrained()
197
        self.create_and_test_config_with_num_labels()
198
        self.check_config_can_be_init_without_params()
199
        self.check_config_arguments_init()
Sylvain Gugger's avatar
Sylvain Gugger committed
200
201
202
203
204
205


@is_staging_test
class ConfigPushToHubTester(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
206
        cls._token = login(username=USER, password=PASS)
Sylvain Gugger's avatar
Sylvain Gugger committed
207
208
209
210

    @classmethod
    def tearDownClass(cls):
        try:
211
            delete_repo(token=cls._token, name="test-config")
Sylvain Gugger's avatar
Sylvain Gugger committed
212
213
214
215
        except HTTPError:
            pass

        try:
216
            delete_repo(token=cls._token, name="test-config-org", organization="valid_org")
Sylvain Gugger's avatar
Sylvain Gugger committed
217
218
219
        except HTTPError:
            pass

220
221
222
223
224
        try:
            delete_repo(token=cls._token, name="test-dynamic-config")
        except HTTPError:
            pass

Sylvain Gugger's avatar
Sylvain Gugger committed
225
226
227
228
229
    def test_push_to_hub(self):
        config = BertConfig(
            vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
        )
        with tempfile.TemporaryDirectory() as tmp_dir:
230
            config.save_pretrained(os.path.join(tmp_dir, "test-config"), push_to_hub=True, use_auth_token=self._token)
Sylvain Gugger's avatar
Sylvain Gugger committed
231

232
            new_config = BertConfig.from_pretrained(f"{USER}/test-config")
Sylvain Gugger's avatar
Sylvain Gugger committed
233
234
235
236
237
238
239
240
241
242
243
            for k, v in config.__dict__.items():
                if k != "transformers_version":
                    self.assertEqual(v, getattr(new_config, k))

    def test_push_to_hub_in_organization(self):
        config = BertConfig(
            vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
        )

        with tempfile.TemporaryDirectory() as tmp_dir:
            config.save_pretrained(
244
                os.path.join(tmp_dir, "test-config-org"),
Sylvain Gugger's avatar
Sylvain Gugger committed
245
246
247
248
249
                push_to_hub=True,
                use_auth_token=self._token,
                organization="valid_org",
            )

250
            new_config = BertConfig.from_pretrained("valid_org/test-config-org")
Sylvain Gugger's avatar
Sylvain Gugger committed
251
252
253
            for k, v in config.__dict__.items():
                if k != "transformers_version":
                    self.assertEqual(v, getattr(new_config, k))
254

255
    def test_push_to_hub_dynamic_config(self):
256
257
        CustomConfig.register_for_auto_class()
        config = CustomConfig(attribute=42)
258
259
260
261

        with tempfile.TemporaryDirectory() as tmp_dir:
            repo = Repository(tmp_dir, clone_from=f"{USER}/test-dynamic-config", use_auth_token=self._token)
            config.save_pretrained(tmp_dir)
262
263
264
265
266

            # This has added the proper auto_map field to the config
            self.assertDictEqual(config.auto_map, {"AutoConfig": "custom_configuration.CustomConfig"})
            # The code has been copied from fixtures
            self.assertTrue(os.path.isfile(os.path.join(tmp_dir, "custom_configuration.py")))
267
268
269
270
271

            repo.push_to_hub()

        new_config = AutoConfig.from_pretrained(f"{USER}/test-dynamic-config", trust_remote_code=True)
        # Can't make an isinstance check because the new_config is from the FakeConfig class of a dynamic module
272
        self.assertEqual(new_config.__class__.__name__, "CustomConfig")
273
274
        self.assertEqual(new_config.attribute, 42)

275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291

class ConfigTestUtils(unittest.TestCase):
    def test_config_from_string(self):
        c = GPT2Config()

        # attempt to modify each of int/float/bool/str config records and verify they were updated
        n_embd = c.n_embd + 1  # int
        resid_pdrop = c.resid_pdrop + 1.0  # float
        scale_attn_weights = not c.scale_attn_weights  # bool
        summary_type = c.summary_type + "foo"  # str
        c.update_from_string(
            f"n_embd={n_embd},resid_pdrop={resid_pdrop},scale_attn_weights={scale_attn_weights},summary_type={summary_type}"
        )
        self.assertEqual(n_embd, c.n_embd, "mismatch for key: n_embd")
        self.assertEqual(resid_pdrop, c.resid_pdrop, "mismatch for key: resid_pdrop")
        self.assertEqual(scale_attn_weights, c.scale_attn_weights, "mismatch for key: scale_attn_weights")
        self.assertEqual(summary_type, c.summary_type, "mismatch for key: summary_type")
292
293
294
295
296
297
298
299
300
301
302
303

    def test_config_common_kwargs_is_complete(self):
        base_config = PretrainedConfig()
        missing_keys = [key for key in base_config.__dict__ if key not in config_common_kwargs]
        # If this part of the test fails, you have arguments to addin config_common_kwargs above.
        self.assertListEqual(missing_keys, ["is_encoder_decoder", "_name_or_path", "transformers_version"])
        keys_with_defaults = [key for key, value in config_common_kwargs.items() if value == getattr(base_config, key)]
        if len(keys_with_defaults) > 0:
            raise ValueError(
                "The following keys are set with the default values in `test_configuration_common.config_common_kwargs` "
                f"pick another value for them: {', '.join(keys_with_defaults)}."
            )
304
305
306
307
308


class ConfigurationVersioningTest(unittest.TestCase):
    def test_local_versioning(self):
        configuration = AutoConfig.from_pretrained("bert-base-cased")
309
        configuration.configuration_files = ["config.4.0.0.json"]
310
311
312
313
314
315
316
317
318
319
320
321

        with tempfile.TemporaryDirectory() as tmp_dir:
            configuration.save_pretrained(tmp_dir)
            configuration.hidden_size = 2
            json.dump(configuration.to_dict(), open(os.path.join(tmp_dir, "config.4.0.0.json"), "w"))

            # This should pick the new configuration file as the version of Transformers is > 4.0.0
            new_configuration = AutoConfig.from_pretrained(tmp_dir)
            self.assertEqual(new_configuration.hidden_size, 2)

            # Will need to be adjusted if we reach v42 and this test is still here.
            # Should pick the old configuration file as the version of Transformers is < 4.42.0
322
323
324
            configuration.configuration_files = ["config.42.0.0.json"]
            configuration.hidden_size = 768
            configuration.save_pretrained(tmp_dir)
325
326
327
328
329
            shutil.move(os.path.join(tmp_dir, "config.4.0.0.json"), os.path.join(tmp_dir, "config.42.0.0.json"))
            new_configuration = AutoConfig.from_pretrained(tmp_dir)
            self.assertEqual(new_configuration.hidden_size, 768)

    def test_repo_versioning_before(self):
330
331
        # This repo has two configuration files, one for v4.0.0 and above with a different hidden size.
        repo = "hf-internal-testing/test-two-configs"
332
333
334

        import transformers as new_transformers

335
        new_transformers.configuration_utils.__version__ = "v4.0.0"
336
        new_configuration = new_transformers.models.auto.AutoConfig.from_pretrained(repo)
337
        self.assertEqual(new_configuration.hidden_size, 2)
338
339
340
341
342
343

        # Testing an older version by monkey-patching the version in the module it's used.
        import transformers as old_transformers

        old_transformers.configuration_utils.__version__ = "v3.0.0"
        old_configuration = old_transformers.models.auto.AutoConfig.from_pretrained(repo)
344
        self.assertEqual(old_configuration.hidden_size, 768)