test_modeling_electra.py 17 KB
Newer Older
Lysandre Debut's avatar
Lysandre Debut committed
1
# coding=utf-8
Sylvain Gugger's avatar
Sylvain Gugger committed
2
# Copyright 2020 The HuggingFace Team. All rights reserved.
Lysandre Debut's avatar
Lysandre Debut committed
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import unittest

19
from transformers import ElectraConfig, is_torch_available
20
from transformers.models.auto import get_values
21
from transformers.testing_utils import require_torch, slow, torch_device
Lysandre Debut's avatar
Lysandre Debut committed
22

Yih-Dar's avatar
Yih-Dar committed
23
24
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
Lysandre Debut's avatar
Lysandre Debut committed
25
26
27


if is_torch_available():
28
29
    import torch

Lysandre Debut's avatar
Lysandre Debut committed
30
    from transformers import (
31
        MODEL_FOR_PRETRAINING_MAPPING,
32
        ElectraForCausalLM,
Lysandre Debut's avatar
Lysandre Debut committed
33
        ElectraForMaskedLM,
Suraj Patil's avatar
Suraj Patil committed
34
        ElectraForMultipleChoice,
35
        ElectraForPreTraining,
36
        ElectraForQuestionAnswering,
37
38
39
        ElectraForSequenceClassification,
        ElectraForTokenClassification,
        ElectraModel,
Lysandre Debut's avatar
Lysandre Debut committed
40
    )
Sylvain Gugger's avatar
Sylvain Gugger committed
41
    from transformers.models.electra.modeling_electra import ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST
Lysandre Debut's avatar
Lysandre Debut committed
42
43


44
45
class ElectraModelTester:
    def __init__(
Lysandre's avatar
Lysandre committed
46
47
        self,
        parent,
Yih-Dar's avatar
Yih-Dar committed
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
        batch_size=13,
        seq_length=7,
        is_training=True,
        use_input_mask=True,
        use_token_type_ids=True,
        use_labels=True,
        vocab_size=99,
        hidden_size=32,
        num_hidden_layers=5,
        num_attention_heads=4,
        intermediate_size=37,
        hidden_act="gelu",
        hidden_dropout_prob=0.1,
        attention_probs_dropout_prob=0.1,
        max_position_embeddings=512,
        type_vocab_size=16,
        type_sequence_label_size=2,
        initializer_range=0.02,
        num_labels=3,
        num_choices=4,
        scope=None,
69
70
    ):
        self.parent = parent
Yih-Dar's avatar
Yih-Dar committed
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
        self.batch_size = batch_size
        self.seq_length = seq_length
        self.is_training = is_training
        self.use_input_mask = use_input_mask
        self.use_token_type_ids = use_token_type_ids
        self.use_labels = use_labels
        self.vocab_size = vocab_size
        self.hidden_size = hidden_size
        self.num_hidden_layers = num_hidden_layers
        self.num_attention_heads = num_attention_heads
        self.intermediate_size = intermediate_size
        self.hidden_act = hidden_act
        self.hidden_dropout_prob = hidden_dropout_prob
        self.attention_probs_dropout_prob = attention_probs_dropout_prob
        self.max_position_embeddings = max_position_embeddings
        self.type_vocab_size = type_vocab_size
        self.type_sequence_label_size = type_sequence_label_size
        self.initializer_range = initializer_range
        self.num_labels = num_labels
        self.num_choices = num_choices
        self.scope = scope
Lysandre Debut's avatar
Lysandre Debut committed
92

93
94
    def prepare_config_and_inputs(self):
        input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
Lysandre Debut's avatar
Lysandre Debut committed
95

96
97
        input_mask = None
        if self.use_input_mask:
98
            input_mask = random_attention_mask([self.batch_size, self.seq_length])
Lysandre Debut's avatar
Lysandre Debut committed
99

100
101
102
        token_type_ids = None
        if self.use_token_type_ids:
            token_type_ids = ids_tensor([self.batch_size, self.seq_length], self.type_vocab_size)
Lysandre Debut's avatar
Lysandre Debut committed
103

104
105
106
107
108
109
110
111
        sequence_labels = None
        token_labels = None
        choice_labels = None
        if self.use_labels:
            sequence_labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
            token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels)
            choice_labels = ids_tensor([self.batch_size], self.num_choices)
            fake_token_labels = ids_tensor([self.batch_size, self.seq_length], 1)
Lysandre Debut's avatar
Lysandre Debut committed
112

113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
        config = self.get_config()

        return (
            config,
            input_ids,
            token_type_ids,
            input_mask,
            sequence_labels,
            token_labels,
            choice_labels,
            fake_token_labels,
        )

    def get_config(self):
        return ElectraConfig(
128
129
130
131
132
133
134
135
136
137
138
139
140
            vocab_size=self.vocab_size,
            hidden_size=self.hidden_size,
            num_hidden_layers=self.num_hidden_layers,
            num_attention_heads=self.num_attention_heads,
            intermediate_size=self.intermediate_size,
            hidden_act=self.hidden_act,
            hidden_dropout_prob=self.hidden_dropout_prob,
            attention_probs_dropout_prob=self.attention_probs_dropout_prob,
            max_position_embeddings=self.max_position_embeddings,
            type_vocab_size=self.type_vocab_size,
            is_decoder=False,
            initializer_range=self.initializer_range,
        )
Lysandre Debut's avatar
Lysandre Debut committed
141

142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
    def prepare_config_and_inputs_for_decoder(self):
        (
            config,
            input_ids,
            token_type_ids,
            input_mask,
            sequence_labels,
            token_labels,
            choice_labels,
            _,
        ) = self.prepare_config_and_inputs()

        config.is_decoder = True
        encoder_hidden_states = floats_tensor([self.batch_size, self.seq_length, self.hidden_size])
        encoder_attention_mask = ids_tensor([self.batch_size, self.seq_length], vocab_size=2)

        return (
            config,
            input_ids,
            token_type_ids,
            input_mask,
            sequence_labels,
            token_labels,
            choice_labels,
            encoder_hidden_states,
            encoder_attention_mask,
        )

170
171
172
173
174
175
176
177
178
179
180
181
182
183
    def create_and_check_electra_model(
        self,
        config,
        input_ids,
        token_type_ids,
        input_mask,
        sequence_labels,
        token_labels,
        choice_labels,
        fake_token_labels,
    ):
        model = ElectraModel(config=config)
        model.to(torch_device)
        model.eval()
Sylvain Gugger's avatar
Sylvain Gugger committed
184
185
186
        result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids)
        result = model(input_ids, token_type_ids=token_type_ids)
        result = model(input_ids)
Stas Bekman's avatar
Stas Bekman committed
187
        self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
Lysandre Debut's avatar
Lysandre Debut committed
188

189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
    def create_and_check_electra_model_as_decoder(
        self,
        config,
        input_ids,
        token_type_ids,
        input_mask,
        sequence_labels,
        token_labels,
        choice_labels,
        encoder_hidden_states,
        encoder_attention_mask,
    ):
        config.add_cross_attention = True
        model = ElectraModel(config)
        model.to(torch_device)
        model.eval()
        result = model(
            input_ids,
            attention_mask=input_mask,
            token_type_ids=token_type_ids,
            encoder_hidden_states=encoder_hidden_states,
            encoder_attention_mask=encoder_attention_mask,
        )
        result = model(
            input_ids,
            attention_mask=input_mask,
            token_type_ids=token_type_ids,
            encoder_hidden_states=encoder_hidden_states,
        )
        result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids)
        self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))

221
222
223
224
225
226
227
228
229
230
231
232
233
234
    def create_and_check_electra_for_masked_lm(
        self,
        config,
        input_ids,
        token_type_ids,
        input_mask,
        sequence_labels,
        token_labels,
        choice_labels,
        fake_token_labels,
    ):
        model = ElectraForMaskedLM(config=config)
        model.to(torch_device)
        model.eval()
Sylvain Gugger's avatar
Sylvain Gugger committed
235
        result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels)
Stas Bekman's avatar
Stas Bekman committed
236
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
Lysandre Debut's avatar
Lysandre Debut committed
237

238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
    def create_and_check_electra_for_causal_lm(
        self,
        config,
        input_ids,
        token_type_ids,
        input_mask,
        sequence_labels,
        token_labels,
        choice_labels,
        encoder_hidden_states,
        encoder_attention_mask,
    ):
        model = ElectraForCausalLM(config=config)
        model.to(torch_device)
        model.eval()
        result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels)
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))

256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
    def create_and_check_electra_for_token_classification(
        self,
        config,
        input_ids,
        token_type_ids,
        input_mask,
        sequence_labels,
        token_labels,
        choice_labels,
        fake_token_labels,
    ):
        config.num_labels = self.num_labels
        model = ElectraForTokenClassification(config=config)
        model.to(torch_device)
        model.eval()
Sylvain Gugger's avatar
Sylvain Gugger committed
271
        result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels)
Stas Bekman's avatar
Stas Bekman committed
272
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.num_labels))
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288

    def create_and_check_electra_for_pretraining(
        self,
        config,
        input_ids,
        token_type_ids,
        input_mask,
        sequence_labels,
        token_labels,
        choice_labels,
        fake_token_labels,
    ):
        config.num_labels = self.num_labels
        model = ElectraForPreTraining(config=config)
        model.to(torch_device)
        model.eval()
Sylvain Gugger's avatar
Sylvain Gugger committed
289
        result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=fake_token_labels)
Stas Bekman's avatar
Stas Bekman committed
290
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length))
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306

    def create_and_check_electra_for_sequence_classification(
        self,
        config,
        input_ids,
        token_type_ids,
        input_mask,
        sequence_labels,
        token_labels,
        choice_labels,
        fake_token_labels,
    ):
        config.num_labels = self.num_labels
        model = ElectraForSequenceClassification(config)
        model.to(torch_device)
        model.eval()
Sylvain Gugger's avatar
Sylvain Gugger committed
307
        result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=sequence_labels)
Stas Bekman's avatar
Stas Bekman committed
308
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_labels))
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323

    def create_and_check_electra_for_question_answering(
        self,
        config,
        input_ids,
        token_type_ids,
        input_mask,
        sequence_labels,
        token_labels,
        choice_labels,
        fake_token_labels,
    ):
        model = ElectraForQuestionAnswering(config=config)
        model.to(torch_device)
        model.eval()
Sylvain Gugger's avatar
Sylvain Gugger committed
324
        result = model(
325
            input_ids,
326
327
328
329
330
            attention_mask=input_mask,
            token_type_ids=token_type_ids,
            start_positions=sequence_labels,
            end_positions=sequence_labels,
        )
Stas Bekman's avatar
Stas Bekman committed
331
332
        self.parent.assertEqual(result.start_logits.shape, (self.batch_size, self.seq_length))
        self.parent.assertEqual(result.end_logits.shape, (self.batch_size, self.seq_length))
333

Suraj Patil's avatar
Suraj Patil committed
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
    def create_and_check_electra_for_multiple_choice(
        self,
        config,
        input_ids,
        token_type_ids,
        input_mask,
        sequence_labels,
        token_labels,
        choice_labels,
        fake_token_labels,
    ):
        config.num_choices = self.num_choices
        model = ElectraForMultipleChoice(config=config)
        model.to(torch_device)
        model.eval()
        multiple_choice_inputs_ids = input_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
        multiple_choice_token_type_ids = token_type_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
        multiple_choice_input_mask = input_mask.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
Sylvain Gugger's avatar
Sylvain Gugger committed
352
        result = model(
Suraj Patil's avatar
Suraj Patil committed
353
354
355
356
357
            multiple_choice_inputs_ids,
            attention_mask=multiple_choice_input_mask,
            token_type_ids=multiple_choice_token_type_ids,
            labels=choice_labels,
        )
Stas Bekman's avatar
Stas Bekman committed
358
        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_choices))
Suraj Patil's avatar
Suraj Patil committed
359

360
361
362
    def prepare_config_and_inputs_for_common(self):
        config_and_inputs = self.prepare_config_and_inputs()
        (
363
364
365
366
367
368
369
370
            config,
            input_ids,
            token_type_ids,
            input_mask,
            sequence_labels,
            token_labels,
            choice_labels,
            fake_token_labels,
371
372
373
374
375
376
377
        ) = config_and_inputs
        inputs_dict = {"input_ids": input_ids, "token_type_ids": token_type_ids, "attention_mask": input_mask}
        return config, inputs_dict


@require_torch
class ElectraModelTest(ModelTesterMixin, unittest.TestCase):
378

379
380
381
382
383
    all_model_classes = (
        (
            ElectraModel,
            ElectraForPreTraining,
            ElectraForMaskedLM,
384
            ElectraForCausalLM,
385
            ElectraForMultipleChoice,
386
387
388
389
390
391
392
            ElectraForTokenClassification,
            ElectraForSequenceClassification,
            ElectraForQuestionAnswering,
        )
        if is_torch_available()
        else ()
    )
393
    fx_compatible = True
Lysandre Debut's avatar
Lysandre Debut committed
394

395
396
397
398
399
    # special case for ForPreTraining model
    def _prepare_for_class(self, inputs_dict, model_class, return_labels=False):
        inputs_dict = super()._prepare_for_class(inputs_dict, model_class, return_labels=return_labels)

        if return_labels:
400
            if model_class in get_values(MODEL_FOR_PRETRAINING_MAPPING):
401
402
403
404
405
                inputs_dict["labels"] = torch.zeros(
                    (self.model_tester.batch_size, self.model_tester.seq_length), dtype=torch.long, device=torch_device
                )
        return inputs_dict

Lysandre Debut's avatar
Lysandre Debut committed
406
    def setUp(self):
407
        self.model_tester = ElectraModelTester(self)
Lysandre Debut's avatar
Lysandre Debut committed
408
409
410
411
412
413
414
415
416
        self.config_tester = ConfigTester(self, config_class=ElectraConfig, hidden_size=37)

    def test_config(self):
        self.config_tester.run_common_tests()

    def test_electra_model(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
        self.model_tester.create_and_check_electra_model(*config_and_inputs)

417
418
419
420
    def test_electra_model_as_decoder(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs_for_decoder()
        self.model_tester.create_and_check_electra_model_as_decoder(*config_and_inputs)

421
422
423
424
425
426
    def test_electra_model_various_embeddings(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
        for type in ["absolute", "relative_key", "relative_key_query"]:
            config_and_inputs[0].position_embedding_type = type
            self.model_tester.create_and_check_electra_model(*config_and_inputs)

Lysandre Debut's avatar
Lysandre Debut committed
427
428
429
430
431
432
433
434
435
436
437
438
    def test_for_masked_lm(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
        self.model_tester.create_and_check_electra_for_masked_lm(*config_and_inputs)

    def test_for_token_classification(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
        self.model_tester.create_and_check_electra_for_token_classification(*config_and_inputs)

    def test_for_pre_training(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
        self.model_tester.create_and_check_electra_for_pretraining(*config_and_inputs)

439
440
441
442
    def test_for_sequence_classification(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
        self.model_tester.create_and_check_electra_for_sequence_classification(*config_and_inputs)

443
444
445
446
    def test_for_question_answering(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
        self.model_tester.create_and_check_electra_for_question_answering(*config_and_inputs)

Suraj Patil's avatar
Suraj Patil committed
447
448
449
450
    def test_for_multiple_choice(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs()
        self.model_tester.create_and_check_electra_for_multiple_choice(*config_and_inputs)

Lysandre Debut's avatar
Lysandre Debut committed
451
452
    @slow
    def test_model_from_pretrained(self):
453
        for model_name in ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
454
            model = ElectraModel.from_pretrained(model_name)
Lysandre Debut's avatar
Lysandre Debut committed
455
            self.assertIsNotNone(model)
456

457
458
459
460
    def test_for_causal_lm(self):
        config_and_inputs = self.model_tester.prepare_config_and_inputs_for_decoder()
        self.model_tester.create_and_check_electra_for_causal_lm(*config_and_inputs)

461
462
463
464
465

@require_torch
class ElectraModelIntegrationTest(unittest.TestCase):
    @slow
    def test_inference_no_head_absolute_embedding(self):
466
        model = ElectraModel.from_pretrained("google/electra-small-discriminator")
467
        input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]])
468
469
470
        attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])
        output = model(input_ids, attention_mask=attention_mask)[0]
        expected_shape = torch.Size((1, 11, 256))
471
472
        self.assertEqual(output.shape, expected_shape)
        expected_slice = torch.tensor(
473
            [[[0.4471, 0.6821, -0.3265], [0.4627, 0.5255, -0.3668], [0.4532, 0.3313, -0.4344]]]
474
475
        )

476
        self.assertTrue(torch.allclose(output[:, 1:4, 1:4], expected_slice, atol=1e-4))