gpt_bigcode.py 1.82 KB
Newer Older
1
from .base import BaseAWQForCausalLM
Casper's avatar
Casper committed
2
3
4
5
6
from transformers.models.gpt_bigcode.modeling_gpt_bigcode import (
    GPTBigCodeForCausalLM,
    GPTBigCodeBlock as OldGptBigCodeBlock,
)

7
8
9

class GptBigCodeAWQForCausalLM(BaseAWQForCausalLM):
    layer_type = "GPTBigCodeBlock"
Casper's avatar
Casper committed
10
    max_seq_len_key = "n_positions"
11
12
13
14
15
16

    @staticmethod
    def get_model_layers(model: GPTBigCodeForCausalLM):
        return model.transformer.h

    @staticmethod
17
    def get_act_for_scaling(module: OldGptBigCodeBlock):
18
19
20
21
        return dict(
            is_scalable=True,
            scale_name="mlp.act",
            scale_layer=module.mlp.act,
Casper's avatar
Casper committed
22
            scale_shape=module.mlp.c_fc.out_features,
23
24
25
26
27
        )

    @staticmethod
    def move_embed(model: GPTBigCodeForCausalLM, device):
        model.transformer.wte = model.transformer.wte.to(device)
28
        model.transformer.wpe = model.transformer.wpe.to(device)
29
30
31
        model.transformer.drop = model.transformer.drop.to(device)

    @staticmethod
Casper's avatar
Casper committed
32
    def get_layers_for_scaling(module: OldGptBigCodeBlock, input_feat, module_kwargs):
33
34
35
        layers = []

        # attention input
Casper's avatar
Casper committed
36
37
38
39
40
41
42
43
44
45
        layers.append(
            dict(
                prev_op=module.ln_1,
                layers=[module.attn.c_attn],
                inp=input_feat["attn.c_attn"],
                module2inspect=module.attn,
                kwargs=module_kwargs,
            )
        )

46
        # linear 1
Casper's avatar
Casper committed
47
48
49
50
51
52
53
54
        layers.append(
            dict(
                prev_op=module.ln_2,
                layers=[module.mlp.c_fc],
                inp=input_feat["mlp.c_fc"],
                module2inspect=module.mlp,
            )
        )
55
56

        # linear 2
Casper's avatar
Casper committed
57
58
59
60
61
62
63
        layers.append(
            dict(
                prev_op=module.mlp.act,
                layers=[module.mlp.c_proj],
                inp=input_feat["mlp.c_proj"],
            )
        )
64
65

        return layers