gpt.py 5.27 KB
Newer Older
1
2
import copy

3
4
5
6
7
8
9
10
11
12
13
import torch
import transformers

from ..registry import ModelAttribute, model_zoo

# ===============================
# Register single-sentence GPT
# ===============================


def data_gen():
14
15
16
17
18
19
20
    # Generated from following code snippet
    #
    # from transformers import GPT2Tokenizer
    # input = 'Hello, my dog is cute'
    # tokenized_input = tokenizer(input, return_tensors='pt')
    # input_ids = tokenized_input['input_ids']
    # attention_mask = tokenized_input['attention_mask']
21
22
    input_ids = torch.tensor([[15496, 11, 616, 3290, 318, 13779, 318, 13779]], dtype=torch.int64)
    attention_mask = torch.tensor([[1, 1, 1, 1, 1, 1, 1, 1]], dtype=torch.int64)
23
    return dict(input_ids=input_ids, attention_mask=attention_mask)
24
25


26
27
28
29
30
31
def data_gen_for_lm():
    # LM data gen
    # the `labels` of LM is the token of the output, cause no padding, use `input_ids` as `labels`
    data = data_gen()
    data['labels'] = data['input_ids'].clone()
    return data
32
33


34
35
36
37
38
39
40
41
42
43
44
def data_gen_for_question_answering():
    # question answering data gen
    # `labels` is the type not the token id for token classification, 0 or 1
    data = data_gen()
    start_positions = torch.tensor([0], dtype=torch.int64)
    data['start_positions'] = start_positions
    end_positions = torch.tensor([1], dtype=torch.int64)
    data['end_positions'] = end_positions
    return data


45
46
47
48
def data_gen_for_token_classification():
    # token classification data gen
    # `labels` is the type not the token id for token classification, 0 or 1
    data = data_gen()
49
    data['labels'] = torch.tensor([[0, 0, 0, 0, 0, 0, 0, 1]], dtype=torch.int64)
50
51
52
53
54
55
    return data


def data_gen_for_sequence_classification():
    # sequence classification data gen
    data = data_gen()
56
    data['labels'] = torch.tensor([1], dtype=torch.int64)
57
58
59
    return data


60
61
62
63
64
65
def date_gen_for_double_heads():
    data = data_gen_for_lm()
    data['mc_labels'] = torch.zeros(data['input_ids'].shape[0], dtype=torch.int64)
    return data


66
# define output transform function
67
68
output_transform_fn = lambda x: x

69
# define loss function
70
71
loss_fn_for_gpt2_model = lambda x: torch.nn.functional.mse_loss(x.last_hidden_state, torch.ones_like(x.last_hidden_state
                                                                                                    ))
72
73
loss_fn = lambda x: x.loss

74
75
76
77
78
79
80
config = transformers.GPT2Config(n_layer=2,
                                 n_head=4,
                                 vocab_size=50258,
                                 attn_pdrop=0,
                                 embd_pdrop=0,
                                 resid_pdrop=0,
                                 summary_first_dropout=0,
81
82
83
                                 hidden_dropout=0,
                                 problem_type="single_label_classification",
                                 pad_token_id=50256)
84
85
86

config_for_token_classification = copy.deepcopy(config)
config_for_token_classification.num_labels = 2
87
88
89
90
91
92

# register the following models
model_zoo.register(name='transformers_gpt',
                   model_fn=lambda: transformers.GPT2Model(config),
                   data_gen_fn=data_gen,
                   output_transform_fn=output_transform_fn,
93
                   loss_fn=loss_fn_for_gpt2_model,
94
95
96
                   model_attribute=ModelAttribute(has_control_flow=True))
model_zoo.register(name='transformers_gpt_lm',
                   model_fn=lambda: transformers.GPT2LMHeadModel(config),
97
                   data_gen_fn=data_gen_for_lm,
98
                   output_transform_fn=output_transform_fn,
99
                   loss_fn=loss_fn,
100
                   model_attribute=ModelAttribute(has_control_flow=True))
101
102
103
104
105
106
107
108

# TODO The model training is failing, there is a bug in GPT2DoubleHeadsModel in transformers.
# model_zoo.register(name='transformers_gpt_double_heads',
#                    model_fn=lambda: transformers.GPT2DoubleHeadsModel(config),
#                    data_gen_fn=date_gen_for_double_heads,
#                    output_transform_fn=lambda x: dict(loss=x.loss + x.mc_loss),
#                    loss_fn=loss_fn,
#                    model_attribute=ModelAttribute(has_control_flow=True))
109
110
111
112
113
114
model_zoo.register(name='transformers_gpt_for_question_answering',
                   model_fn=lambda: transformers.GPT2ForQuestionAnswering(config),
                   data_gen_fn=data_gen_for_question_answering,
                   output_transform_fn=output_transform_fn,
                   loss_fn=loss_fn,
                   model_attribute=ModelAttribute(has_control_flow=True))
115
model_zoo.register(name='transformers_gpt_for_token_classification',
116
                   model_fn=lambda: transformers.GPT2ForTokenClassification(config_for_token_classification),
117
                   data_gen_fn=data_gen_for_token_classification,
118
                   output_transform_fn=output_transform_fn,
119
                   loss_fn=loss_fn,
120
121
                   model_attribute=ModelAttribute(has_control_flow=True))
model_zoo.register(name='transformers_gpt_for_sequence_classification',
122
                   model_fn=lambda: transformers.GPT2ForSequenceClassification(config_for_token_classification),
123
                   data_gen_fn=data_gen_for_sequence_classification,
124
                   output_transform_fn=output_transform_fn,
125
                   loss_fn=loss_fn,
126
                   model_attribute=ModelAttribute(has_control_flow=True))