test_configuration_common.py 15.2 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# coding=utf-8
# Copyright 2019 HuggingFace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Aymeric Augustin's avatar
Aymeric Augustin committed
15

16
import copy
17
import json
Aymeric Augustin's avatar
Aymeric Augustin committed
18
import os
19
import shutil
20
import sys
21
import tempfile
Sylvain Gugger's avatar
Sylvain Gugger committed
22
import unittest
23
import unittest.mock as mock
24
from pathlib import Path
Sylvain Gugger's avatar
Sylvain Gugger committed
25

26
from huggingface_hub import HfFolder, Repository, delete_repo, set_access_token
Sylvain Gugger's avatar
Sylvain Gugger committed
27
from requests.exceptions import HTTPError
28
from transformers import AutoConfig, BertConfig, GPT2Config, is_torch_available
29
from transformers.configuration_utils import PretrainedConfig
30
from transformers.testing_utils import TOKEN, USER, is_staging_test
31
32


33
34
35
36
37
sys.path.append(str(Path(__file__).parent.parent / "utils"))

from test_module.custom_configuration import CustomConfig  # noqa E402


38
39
40
41
42
43
44
config_common_kwargs = {
    "return_dict": False,
    "output_hidden_states": True,
    "output_attentions": True,
    "torchscript": True,
    "torch_dtype": "float16",
    "use_bfloat16": True,
Matt's avatar
Matt committed
45
    "tf_legacy_loss": True,
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
    "pruned_heads": {"a": 1},
    "tie_word_embeddings": False,
    "is_decoder": True,
    "cross_attention_hidden_size": 128,
    "add_cross_attention": True,
    "tie_encoder_decoder": True,
    "max_length": 50,
    "min_length": 3,
    "do_sample": True,
    "early_stopping": True,
    "num_beams": 3,
    "num_beam_groups": 3,
    "diversity_penalty": 0.5,
    "temperature": 2.0,
    "top_k": 10,
    "top_p": 0.7,
62
    "typical_p": 0.2,
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
    "repetition_penalty": 0.8,
    "length_penalty": 0.8,
    "no_repeat_ngram_size": 5,
    "encoder_no_repeat_ngram_size": 5,
    "bad_words_ids": [1, 2, 3],
    "num_return_sequences": 3,
    "chunk_size_feed_forward": 5,
    "output_scores": True,
    "return_dict_in_generate": True,
    "forced_bos_token_id": 2,
    "forced_eos_token_id": 3,
    "remove_invalid_values": True,
    "architectures": ["BertModel"],
    "finetuning_task": "translation",
    "id2label": {0: "label"},
    "label2id": {"label": "0"},
    "tokenizer_class": "BertTokenizerFast",
    "prefix": "prefix",
    "bos_token_id": 6,
    "pad_token_id": 7,
    "eos_token_id": 8,
    "sep_token_id": 9,
    "decoder_start_token_id": 10,
86
    "exponential_decay_length_penalty": (5, 1.01),
87
88
89
90
91
    "task_specific_params": {"translation": "some_params"},
    "problem_type": "regression",
}


92
class ConfigTester(object):
NielsRogge's avatar
NielsRogge committed
93
    def __init__(self, parent, config_class=None, has_text_modality=True, **kwargs):
94
95
        self.parent = parent
        self.config_class = config_class
NielsRogge's avatar
NielsRogge committed
96
        self.has_text_modality = has_text_modality
97
98
99
100
        self.inputs_dict = kwargs

    def create_and_test_config_common_properties(self):
        config = self.config_class(**self.inputs_dict)
101
102
103
        common_properties = ["hidden_size", "num_attention_heads", "num_hidden_layers"]

        # Add common fields for text models
NielsRogge's avatar
NielsRogge committed
104
        if self.has_text_modality:
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
            common_properties.extend(["vocab_size"])

        # Test that config has the common properties as getters
        for prop in common_properties:
            self.parent.assertTrue(hasattr(config, prop), msg=f"`{prop}` does not exist")

        # Test that config has the common properties as setter
        for idx, name in enumerate(common_properties):
            try:
                setattr(config, name, idx)
                self.parent.assertEqual(
                    getattr(config, name), idx, msg=f"`{name} value {idx} expected, but was {getattr(config, name)}"
                )
            except NotImplementedError:
                # Some models might not be able to implement setters for common_properties
                # In that case, a NotImplementedError is raised
                pass

        # Test if config class can be called with Config(prop_name=..)
        for idx, name in enumerate(common_properties):
            try:
                config = self.config_class(**{name: idx})
                self.parent.assertEqual(
                    getattr(config, name), idx, msg=f"`{name} value {idx} expected, but was {getattr(config, name)}"
                )
            except NotImplementedError:
                # Some models might not be able to implement setters for common_properties
                # In that case, a NotImplementedError is raised
                pass
134
135
136
137
138
139
140
141
142

    def create_and_test_config_to_json_string(self):
        config = self.config_class(**self.inputs_dict)
        obj = json.loads(config.to_json_string())
        for key, value in self.inputs_dict.items():
            self.parent.assertEqual(obj[key], value)

    def create_and_test_config_to_json_file(self):
        config_first = self.config_class(**self.inputs_dict)
thomwolf's avatar
thomwolf committed
143

144
        with tempfile.TemporaryDirectory() as tmpdirname:
thomwolf's avatar
thomwolf committed
145
146
147
148
149
150
151
152
153
            json_file_path = os.path.join(tmpdirname, "config.json")
            config_first.to_json_file(json_file_path)
            config_second = self.config_class.from_json_file(json_file_path)

        self.parent.assertEqual(config_second.to_dict(), config_first.to_dict())

    def create_and_test_config_from_and_save_pretrained(self):
        config_first = self.config_class(**self.inputs_dict)

154
        with tempfile.TemporaryDirectory() as tmpdirname:
thomwolf's avatar
thomwolf committed
155
156
157
            config_first.save_pretrained(tmpdirname)
            config_second = self.config_class.from_pretrained(tmpdirname)

158
159
        self.parent.assertEqual(config_second.to_dict(), config_first.to_dict())

160
161
162
163
164
165
166
167
168
    def create_and_test_config_with_num_labels(self):
        config = self.config_class(**self.inputs_dict, num_labels=5)
        self.parent.assertEqual(len(config.id2label), 5)
        self.parent.assertEqual(len(config.label2id), 5)

        config.num_labels = 3
        self.parent.assertEqual(len(config.id2label), 3)
        self.parent.assertEqual(len(config.label2id), 3)

169
170
171
172
173
174
    def check_config_can_be_init_without_params(self):
        if self.config_class.is_composition:
            return
        config = self.config_class()
        self.parent.assertIsNotNone(config)

175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
    def check_config_arguments_init(self):
        kwargs = copy.deepcopy(config_common_kwargs)
        config = self.config_class(**kwargs)
        wrong_values = []
        for key, value in config_common_kwargs.items():
            if key == "torch_dtype":
                if not is_torch_available():
                    continue
                else:
                    import torch

                    if config.torch_dtype != torch.float16:
                        wrong_values.append(("torch_dtype", config.torch_dtype, torch.float16))
            elif getattr(config, key) != value:
                wrong_values.append((key, getattr(config, key), value))

        if len(wrong_values) > 0:
            errors = "\n".join([f"- {v[0]}: got {v[1]} instead of {v[2]}" for v in wrong_values])
193
            raise ValueError(f"The following keys were not properly set in the config:\n{errors}")
194

195
196
197
198
    def run_common_tests(self):
        self.create_and_test_config_common_properties()
        self.create_and_test_config_to_json_string()
        self.create_and_test_config_to_json_file()
thomwolf's avatar
thomwolf committed
199
        self.create_and_test_config_from_and_save_pretrained()
200
        self.create_and_test_config_with_num_labels()
201
        self.check_config_can_be_init_without_params()
202
        self.check_config_arguments_init()
Sylvain Gugger's avatar
Sylvain Gugger committed
203
204
205
206
207
208


@is_staging_test
class ConfigPushToHubTester(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
209
210
211
        cls._token = TOKEN
        set_access_token(TOKEN)
        HfFolder.save_token(TOKEN)
Sylvain Gugger's avatar
Sylvain Gugger committed
212
213
214
215

    @classmethod
    def tearDownClass(cls):
        try:
216
            delete_repo(token=cls._token, repo_id="test-config")
Sylvain Gugger's avatar
Sylvain Gugger committed
217
218
219
220
        except HTTPError:
            pass

        try:
221
            delete_repo(token=cls._token, repo_id="valid_org/test-config-org")
Sylvain Gugger's avatar
Sylvain Gugger committed
222
223
224
        except HTTPError:
            pass

225
        try:
226
            delete_repo(token=cls._token, repo_id="test-dynamic-config")
227
228
229
        except HTTPError:
            pass

Sylvain Gugger's avatar
Sylvain Gugger committed
230
231
232
233
234
    def test_push_to_hub(self):
        config = BertConfig(
            vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
        )
        with tempfile.TemporaryDirectory() as tmp_dir:
235
            config.save_pretrained(os.path.join(tmp_dir, "test-config"), push_to_hub=True, use_auth_token=self._token)
Sylvain Gugger's avatar
Sylvain Gugger committed
236

237
            new_config = BertConfig.from_pretrained(f"{USER}/test-config")
Sylvain Gugger's avatar
Sylvain Gugger committed
238
239
240
241
242
243
244
245
246
247
248
            for k, v in config.__dict__.items():
                if k != "transformers_version":
                    self.assertEqual(v, getattr(new_config, k))

    def test_push_to_hub_in_organization(self):
        config = BertConfig(
            vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
        )

        with tempfile.TemporaryDirectory() as tmp_dir:
            config.save_pretrained(
249
                os.path.join(tmp_dir, "test-config-org"),
Sylvain Gugger's avatar
Sylvain Gugger committed
250
251
252
253
254
                push_to_hub=True,
                use_auth_token=self._token,
                organization="valid_org",
            )

255
            new_config = BertConfig.from_pretrained("valid_org/test-config-org")
Sylvain Gugger's avatar
Sylvain Gugger committed
256
257
258
            for k, v in config.__dict__.items():
                if k != "transformers_version":
                    self.assertEqual(v, getattr(new_config, k))
259

260
    def test_push_to_hub_dynamic_config(self):
261
262
        CustomConfig.register_for_auto_class()
        config = CustomConfig(attribute=42)
263
264
265
266

        with tempfile.TemporaryDirectory() as tmp_dir:
            repo = Repository(tmp_dir, clone_from=f"{USER}/test-dynamic-config", use_auth_token=self._token)
            config.save_pretrained(tmp_dir)
267
268
269
270
271

            # This has added the proper auto_map field to the config
            self.assertDictEqual(config.auto_map, {"AutoConfig": "custom_configuration.CustomConfig"})
            # The code has been copied from fixtures
            self.assertTrue(os.path.isfile(os.path.join(tmp_dir, "custom_configuration.py")))
272
273
274
275
276

            repo.push_to_hub()

        new_config = AutoConfig.from_pretrained(f"{USER}/test-dynamic-config", trust_remote_code=True)
        # Can't make an isinstance check because the new_config is from the FakeConfig class of a dynamic module
277
        self.assertEqual(new_config.__class__.__name__, "CustomConfig")
278
279
        self.assertEqual(new_config.attribute, 42)

280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296

class ConfigTestUtils(unittest.TestCase):
    def test_config_from_string(self):
        c = GPT2Config()

        # attempt to modify each of int/float/bool/str config records and verify they were updated
        n_embd = c.n_embd + 1  # int
        resid_pdrop = c.resid_pdrop + 1.0  # float
        scale_attn_weights = not c.scale_attn_weights  # bool
        summary_type = c.summary_type + "foo"  # str
        c.update_from_string(
            f"n_embd={n_embd},resid_pdrop={resid_pdrop},scale_attn_weights={scale_attn_weights},summary_type={summary_type}"
        )
        self.assertEqual(n_embd, c.n_embd, "mismatch for key: n_embd")
        self.assertEqual(resid_pdrop, c.resid_pdrop, "mismatch for key: resid_pdrop")
        self.assertEqual(scale_attn_weights, c.scale_attn_weights, "mismatch for key: scale_attn_weights")
        self.assertEqual(summary_type, c.summary_type, "mismatch for key: summary_type")
297
298
299
300
301
302
303
304
305

    def test_config_common_kwargs_is_complete(self):
        base_config = PretrainedConfig()
        missing_keys = [key for key in base_config.__dict__ if key not in config_common_kwargs]
        # If this part of the test fails, you have arguments to addin config_common_kwargs above.
        self.assertListEqual(missing_keys, ["is_encoder_decoder", "_name_or_path", "transformers_version"])
        keys_with_defaults = [key for key, value in config_common_kwargs.items() if value == getattr(base_config, key)]
        if len(keys_with_defaults) > 0:
            raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
306
307
308
                "The following keys are set with the default values in"
                " `test_configuration_common.config_common_kwargs` pick another value for them:"
                f" {', '.join(keys_with_defaults)}."
309
            )
310

311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
    def test_cached_files_are_used_when_internet_is_down(self):
        # A mock response for an HTTP head request to emulate server down
        response_mock = mock.Mock()
        response_mock.status_code = 500
        response_mock.headers = []
        response_mock.raise_for_status.side_effect = HTTPError

        # Download this model to make sure it's in the cache.
        _ = BertConfig.from_pretrained("hf-internal-testing/tiny-random-bert")

        # Under the mock environment we get a 500 error when trying to reach the model.
        with mock.patch("transformers.utils.hub.requests.head", return_value=response_mock) as mock_head:
            _ = BertConfig.from_pretrained("hf-internal-testing/tiny-random-bert")
            # This check we did call the fake head request
            mock_head.assert_called()

327
328
329
330

class ConfigurationVersioningTest(unittest.TestCase):
    def test_local_versioning(self):
        configuration = AutoConfig.from_pretrained("bert-base-cased")
331
        configuration.configuration_files = ["config.4.0.0.json"]
332
333
334
335
336
337
338
339
340
341
342
343

        with tempfile.TemporaryDirectory() as tmp_dir:
            configuration.save_pretrained(tmp_dir)
            configuration.hidden_size = 2
            json.dump(configuration.to_dict(), open(os.path.join(tmp_dir, "config.4.0.0.json"), "w"))

            # This should pick the new configuration file as the version of Transformers is > 4.0.0
            new_configuration = AutoConfig.from_pretrained(tmp_dir)
            self.assertEqual(new_configuration.hidden_size, 2)

            # Will need to be adjusted if we reach v42 and this test is still here.
            # Should pick the old configuration file as the version of Transformers is < 4.42.0
344
345
346
            configuration.configuration_files = ["config.42.0.0.json"]
            configuration.hidden_size = 768
            configuration.save_pretrained(tmp_dir)
347
348
349
350
351
            shutil.move(os.path.join(tmp_dir, "config.4.0.0.json"), os.path.join(tmp_dir, "config.42.0.0.json"))
            new_configuration = AutoConfig.from_pretrained(tmp_dir)
            self.assertEqual(new_configuration.hidden_size, 768)

    def test_repo_versioning_before(self):
352
353
        # This repo has two configuration files, one for v4.0.0 and above with a different hidden size.
        repo = "hf-internal-testing/test-two-configs"
354
355
356

        import transformers as new_transformers

357
        new_transformers.configuration_utils.__version__ = "v4.0.0"
358
359
360
        new_configuration, kwargs = new_transformers.models.auto.AutoConfig.from_pretrained(
            repo, return_unused_kwargs=True
        )
361
        self.assertEqual(new_configuration.hidden_size, 2)
362
        # This checks `_configuration_file` ia not kept in the kwargs by mistake.
363
        self.assertDictEqual(kwargs, {})
364
365
366
367
368
369

        # Testing an older version by monkey-patching the version in the module it's used.
        import transformers as old_transformers

        old_transformers.configuration_utils.__version__ = "v3.0.0"
        old_configuration = old_transformers.models.auto.AutoConfig.from_pretrained(repo)
370
        self.assertEqual(old_configuration.hidden_size, 768)