auto.py 3.48 KB
Newer Older
1
import os
Casper's avatar
Casper committed
2
import logging
3
from transformers import AutoConfig
4
from awq.models import *
5
from awq.models.base import BaseAWQForCausalLM
6
7
8

AWQ_CAUSAL_LM_MODEL_MAP = {
    "mpt": MptAWQForCausalLM,
Casper Hansen's avatar
Casper Hansen committed
9
    "llama": LlamaAWQForCausalLM,
Casper Hansen's avatar
Casper Hansen committed
10
11
    "opt": OptAWQForCausalLM,
    "RefinedWeb": FalconAWQForCausalLM,
Casper Hansen's avatar
Casper Hansen committed
12
    "RefinedWebModel": FalconAWQForCausalLM,
Casper's avatar
Casper committed
13
    "falcon": FalconAWQForCausalLM,
EC2 Default User's avatar
EC2 Default User committed
14
    "bloom": BloomAWQForCausalLM,
15
    "gptj": GPTJAWQForCausalLM,
Casper Hansen's avatar
Casper Hansen committed
16
    "gpt_bigcode": GptBigCodeAWQForCausalLM,
twaka's avatar
twaka committed
17
    "mistral": MistralAWQForCausalLM,
18
    "mixtral": MixtralAWQForCausalLM,
twaka's avatar
twaka committed
19
    "gpt_neox": GPTNeoXAWQForCausalLM,
ldwang's avatar
ldwang committed
20
    "aquila": AquilaAWQForCausalLM,
Qing's avatar
Qing committed
21
    "Yi": YiAWQForCausalLM,
22
    "qwen": QwenAWQForCausalLM,
Aoyu's avatar
Aoyu committed
23
    "baichuan": BaichuanAWQForCausalLM,
24
    "llava": LlavaAWQForCausalLM,
Casper's avatar
Casper committed
25
    "qwen2": Qwen2AWQForCausalLM,
TechxGenus's avatar
TechxGenus committed
26
    "gemma": GemmaAWQForCausalLM,
27
28
}

29

30
def check_and_get_model_type(model_dir, trust_remote_code=True, **model_init_kwargs):
31
32
33
    config = AutoConfig.from_pretrained(
        model_dir, trust_remote_code=trust_remote_code, **model_init_kwargs
    )
34
35
36
37
38
    if config.model_type not in AWQ_CAUSAL_LM_MODEL_MAP.keys():
        raise TypeError(f"{config.model_type} isn't supported yet.")
    model_type = config.model_type
    return model_type

39

40
41
class AutoAWQForCausalLM:
    def __init__(self):
42
43
44
45
46
        raise EnvironmentError(
            "You must instantiate AutoAWQForCausalLM with\n"
            "AutoAWQForCausalLM.from_quantized or AutoAWQForCausalLM.from_pretrained"
        )

47
    @classmethod
48
49
50
51
    def from_pretrained(
        self,
        model_path,
        trust_remote_code=True,
Casper's avatar
Casper committed
52
        safetensors=True,
53
        device_map=None,
54
        download_kwargs=None,
55
56
57
58
59
        **model_init_kwargs,
    ) -> BaseAWQForCausalLM:
        model_type = check_and_get_model_type(
            model_path, trust_remote_code, **model_init_kwargs
        )
60
61

        return AWQ_CAUSAL_LM_MODEL_MAP[model_type].from_pretrained(
62
63
64
65
66
            model_path,
            model_type,
            trust_remote_code=trust_remote_code,
            safetensors=safetensors,
            device_map=device_map,
67
            download_kwargs=download_kwargs,
68
            **model_init_kwargs,
69
        )
70
71

    @classmethod
72
73
74
75
    def from_quantized(
        self,
        quant_path,
        quant_filename="",
Casper's avatar
Casper committed
76
        max_seq_len=2048,
77
78
79
80
81
82
83
84
        trust_remote_code=True,
        fuse_layers=True,
        use_exllama=False,
        use_exllama_v2=False,
        batch_size=1,
        safetensors=True,
        device_map="balanced",
        offload_folder=None,
85
        download_kwargs=None,
86
87
        **config_kwargs,
    ) -> BaseAWQForCausalLM:
88
        os.environ["AWQ_BATCH_SIZE"] = str(batch_size)
89
        model_type = check_and_get_model_type(quant_path, trust_remote_code)
90

Casper's avatar
Casper committed
91
92
93
94
95
96
97
        if config_kwargs.get("max_new_tokens") is not None:
            max_seq_len = config_kwargs["max_new_tokens"]
            logging.warning(
                "max_new_tokens argument is deprecated... gracefully "
                "setting max_seq_len=max_new_tokens."
            )

98
        return AWQ_CAUSAL_LM_MODEL_MAP[model_type].from_quantized(
99
100
101
            quant_path,
            model_type,
            quant_filename,
Casper's avatar
Casper committed
102
            max_seq_len,
103
104
105
106
107
108
109
            trust_remote_code=trust_remote_code,
            fuse_layers=fuse_layers,
            use_exllama=use_exllama,
            use_exllama_v2=use_exllama_v2,
            safetensors=safetensors,
            device_map=device_map,
            offload_folder=offload_folder,
110
            download_kwargs=download_kwargs,
111
            **config_kwargs,
s4rduk4r's avatar
s4rduk4r committed
112
        )