llama.py 3.02 KB
Newer Older
1
2
3
4
5
6
import torch
import transformers

from ..registry import ModelAttribute, model_zoo

try:
7
8
    from transformers import LlamaConfig

9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
    HAS_LLAMA = True
except ImportError:
    HAS_LLAMA = False

if HAS_LLAMA:
    # ===============================
    # Register LLaMA
    # ===============================

    def data_gen():
        # the input ids are corresponding to the sentence
        # 'Hello, my dog is cute'
        #
        # the code is give below:
        # -----------------------------------
        # from transformers import LlamaTokenizerFast
        # tokenizer = LlamaTokenizerFast.from_pretrained("hf-internal-testing/llama-tokenizer")
        # input = 'Hello, my dog is cute'
        # tokenized_input = tokenizer(input, return_tensors='pt').to('cuda')
        # -----------------------------------

30
31
32
33
        input_ids = torch.Tensor(
            [[1, 15043, 29892, 590, 11203, 338, 274, 1082], [1, 15043, 29892, 590, 11203, 338, 274, 1082]]
        ).long()
        attention_mask = torch.Tensor([[1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1]]).long()
34
35
36
37
38
        return dict(input_ids=input_ids, attention_mask=attention_mask)

    # label is needed for casual lm
    def data_gen_for_casual_lm():
        data = data_gen()
39
40
        labels = data["input_ids"].clone()
        data["labels"] = labels
41
42
43
44
45
46
        return data

    # transform the output to a dict
    output_transform_fn = lambda x: x

    # function to get the loss
47
48
49
    loss_fn = lambda output: output["last_hidden_state"].mean()
    loss_fn_for_casual_lm = lambda output: output["loss"]
    loss_fn_for_seq_classification = lambda output: output["logits"].mean()
50

51
    config = LlamaConfig(
52
53
54
        num_hidden_layers=8,
        hidden_size=32,
        intermediate_size=64,
55
56
57
58
        num_attention_heads=4,
        max_position_embeddings=128,
        num_labels=16,
    )
59

60
61
62
    if hasattr(config, "pad_token_id"):
        config.pad_token_id = config.eos_token_id

63
64
65
66
    # register the following models
    # transformers.LlamaModel,
    # transformers.LlamaForCausalLM,
    # transformers.LlamaForSequenceClassification,
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
    model_zoo.register(
        name="transformers_llama",
        model_fn=lambda: transformers.LlamaModel(config),
        data_gen_fn=data_gen,
        output_transform_fn=output_transform_fn,
        loss_fn=loss_fn,
        model_attribute=ModelAttribute(has_control_flow=True),
    )
    model_zoo.register(
        name="transformers_llama_for_casual_lm",
        model_fn=lambda: transformers.LlamaForCausalLM(config),
        data_gen_fn=data_gen_for_casual_lm,
        output_transform_fn=output_transform_fn,
        loss_fn=loss_fn_for_casual_lm,
        model_attribute=ModelAttribute(has_control_flow=True),
    )
    model_zoo.register(
        name="transformers_llama_for_sequence_classification",
        model_fn=lambda: transformers.LlamaForSequenceClassification(config),
        data_gen_fn=data_gen,
        output_transform_fn=output_transform_fn,
        loss_fn=loss_fn_for_seq_classification,
        model_attribute=ModelAttribute(has_control_flow=True),
    )