export.py 5.2 KB
Newer Older
chenych's avatar
chenych committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
# Copyright 2024 the LlamaFactory team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import TYPE_CHECKING, Dict, Generator, List, Union

from ...extras.constants import PEFT_METHODS
from ...extras.misc import torch_gc
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
19
from ...extras.packages import is_gradio_available
chenych's avatar
chenych committed
20
21
from ...train.tuner import export_model
from ..common import GPTQ_BITS, get_save_dir
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
22
23
24
25
26
27
28
29
30
31
32
33
34
from ..locales import ALERTS


if is_gradio_available():
    import gradio as gr


if TYPE_CHECKING:
    from gradio.components import Component

    from ..engine import Engine


chenych's avatar
chenych committed
35
36
37
38
39
def can_quantize(checkpoint_path: Union[str, List[str]]) -> "gr.Dropdown":
    if isinstance(checkpoint_path, list) and len(checkpoint_path) != 0:
        return gr.Dropdown(value="none", interactive=False)
    else:
        return gr.Dropdown(interactive=True)
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
40
41
42
43
44
45
46


def save_model(
    lang: str,
    model_name: str,
    model_path: str,
    finetuning_type: str,
chenych's avatar
chenych committed
47
    checkpoint_path: Union[str, List[str]],
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
48
    template: str,
chenych's avatar
chenych committed
49
50
    export_size: int,
    export_quantization_bit: str,
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
51
    export_quantization_dataset: str,
chenych's avatar
chenych committed
52
    export_device: str,
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
53
54
55
56
57
58
59
60
61
62
63
64
65
    export_legacy_format: bool,
    export_dir: str,
    export_hub_model_id: str,
) -> Generator[str, None, None]:
    error = ""
    if not model_name:
        error = ALERTS["err_no_model"][lang]
    elif not model_path:
        error = ALERTS["err_no_path"][lang]
    elif not export_dir:
        error = ALERTS["err_no_export_dir"][lang]
    elif export_quantization_bit in GPTQ_BITS and not export_quantization_dataset:
        error = ALERTS["err_no_dataset"][lang]
chenych's avatar
chenych committed
66
    elif export_quantization_bit not in GPTQ_BITS and not checkpoint_path:
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
67
        error = ALERTS["err_no_adapter"][lang]
chenych's avatar
chenych committed
68
69
    elif export_quantization_bit in GPTQ_BITS and checkpoint_path and isinstance(checkpoint_path, list):
        error = ALERTS["err_gptq_lora"][lang]
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
70
71
72
73
74
75
76
77
78
79
80
81

    if error:
        gr.Warning(error)
        yield error
        return

    args = dict(
        model_name_or_path=model_path,
        finetuning_type=finetuning_type,
        template=template,
        export_dir=export_dir,
        export_hub_model_id=export_hub_model_id or None,
chenych's avatar
chenych committed
82
        export_size=export_size,
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
83
84
        export_quantization_bit=int(export_quantization_bit) if export_quantization_bit in GPTQ_BITS else None,
        export_quantization_dataset=export_quantization_dataset,
chenych's avatar
chenych committed
85
        export_device=export_device,
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
86
87
88
        export_legacy_format=export_legacy_format,
    )

chenych's avatar
chenych committed
89
90
91
92
93
94
95
96
    if checkpoint_path:
        if finetuning_type in PEFT_METHODS:  # list
            args["adapter_name_or_path"] = ",".join(
                [get_save_dir(model_name, finetuning_type, adapter) for adapter in checkpoint_path]
            )
        else:  # str
            args["model_name_or_path"] = get_save_dir(model_name, finetuning_type, checkpoint_path)

Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
97
98
    yield ALERTS["info_exporting"][lang]
    export_model(args)
chenych's avatar
chenych committed
99
    torch_gc()
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
100
101
102
103
104
    yield ALERTS["info_exported"][lang]


def create_export_tab(engine: "Engine") -> Dict[str, "Component"]:
    with gr.Row():
chenych's avatar
chenych committed
105
106
        export_size = gr.Slider(minimum=1, maximum=100, value=5, step=1)
        export_quantization_bit = gr.Dropdown(choices=["none"] + GPTQ_BITS, value="none")
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
107
        export_quantization_dataset = gr.Textbox(value="data/c4_demo.json")
chenych's avatar
chenych committed
108
        export_device = gr.Radio(choices=["cpu", "auto"], value="cpu")
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
109
110
111
112
113
114
        export_legacy_format = gr.Checkbox()

    with gr.Row():
        export_dir = gr.Textbox()
        export_hub_model_id = gr.Textbox()

chenych's avatar
chenych committed
115
116
117
    checkpoint_path: gr.Dropdown = engine.manager.get_elem_by_id("top.checkpoint_path")
    checkpoint_path.change(can_quantize, [checkpoint_path], [export_quantization_bit], queue=False)

Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
118
119
120
121
122
123
124
125
126
127
    export_btn = gr.Button()
    info_box = gr.Textbox(show_label=False, interactive=False)

    export_btn.click(
        save_model,
        [
            engine.manager.get_elem_by_id("top.lang"),
            engine.manager.get_elem_by_id("top.model_name"),
            engine.manager.get_elem_by_id("top.model_path"),
            engine.manager.get_elem_by_id("top.finetuning_type"),
chenych's avatar
chenych committed
128
            engine.manager.get_elem_by_id("top.checkpoint_path"),
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
129
            engine.manager.get_elem_by_id("top.template"),
chenych's avatar
chenych committed
130
            export_size,
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
131
132
            export_quantization_bit,
            export_quantization_dataset,
chenych's avatar
chenych committed
133
            export_device,
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
134
135
136
137
138
139
140
141
            export_legacy_format,
            export_dir,
            export_hub_model_id,
        ],
        [info_box],
    )

    return dict(
chenych's avatar
chenych committed
142
        export_size=export_size,
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
143
144
        export_quantization_bit=export_quantization_bit,
        export_quantization_dataset=export_quantization_dataset,
chenych's avatar
chenych committed
145
        export_device=export_device,
Rayyyyy's avatar
V0.6.3  
Rayyyyy committed
146
147
148
149
150
151
        export_legacy_format=export_legacy_format,
        export_dir=export_dir,
        export_hub_model_id=export_hub_model_id,
        export_btn=export_btn,
        info_box=info_box,
    )