gptj.py 1.74 KB
Newer Older
EC2 Default User's avatar
EC2 Default User committed
1
2
3
from .base import BaseAWQForCausalLM
from transformers.models.gptj.modeling_gptj import GPTJForCausalLM, GPTJBlock

Casper's avatar
Casper committed
4

EC2 Default User's avatar
EC2 Default User committed
5
6
class GPTJAWQForCausalLM(BaseAWQForCausalLM):
    layer_type = "GPTJBlock"
Casper's avatar
Casper committed
7
    max_seq_len_key = "n_positions"
EC2 Default User's avatar
EC2 Default User committed
8
9
10
11

    @staticmethod
    def get_model_layers(model: GPTJForCausalLM):
        return model.transformer.h
Casper's avatar
Casper committed
12

EC2 Default User's avatar
EC2 Default User committed
13
14
15
16
17
18
    @staticmethod
    def get_act_for_scaling(module: GPTJBlock):
        return dict(
            is_scalable=True,
            scale_name="mlp.act",
            scale_layer=module.mlp.act,
Casper's avatar
Casper committed
19
            scale_shape=module.mlp.fc_in.out_features,
EC2 Default User's avatar
EC2 Default User committed
20
        )
Casper's avatar
Casper committed
21

EC2 Default User's avatar
EC2 Default User committed
22
23
24
    @staticmethod
    def move_embed(model: GPTJForCausalLM, device: str):
        model.transformer.wte = model.transformer.wte.to(device)
Casper's avatar
Casper committed
25

EC2 Default User's avatar
EC2 Default User committed
26
27
28
29
    @staticmethod
    def get_layers_for_scaling(module: GPTJBlock, input_feat, module_kwargs):
        layers = []

EC2 Default User's avatar
EC2 Default User committed
30
        # attention input + linear 1
Casper's avatar
Casper committed
31
32
33
34
35
36
37
38
39
40
41
42
43
44
        layers.append(
            dict(
                prev_op=module.ln_1,
                layers=[
                    module.attn.q_proj,
                    module.attn.k_proj,
                    module.attn.v_proj,
                    module.mlp.fc_in,
                ],
                inp=input_feat["attn.q_proj"],
                module2inspect=module,
                kwargs=module_kwargs,
            )
        )
EC2 Default User's avatar
EC2 Default User committed
45
46

        # attention out
Casper's avatar
Casper committed
47
48
49
50
51
52
53
        layers.append(
            dict(
                prev_op=module.attn.v_proj,
                layers=[module.attn.out_proj],
                inp=input_feat["attn.out_proj"],
            )
        )
EC2 Default User's avatar
EC2 Default User committed
54
55

        # linear 2
Casper's avatar
Casper committed
56
57
58
59
60
61
62
        layers.append(
            dict(
                prev_op=module.mlp.act,
                layers=[module.mlp.fc_out],
                inp=input_feat["mlp.fc_out"],
            )
        )
EC2 Default User's avatar
EC2 Default User committed
63

Casper's avatar
Casper committed
64
        return layers