test_lora.py 2.39 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
# Copyright 2023-2024 SGLang Team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
14
15
16
17

import multiprocessing as mp
import unittest

18
from utils import TORCH_DTYPES, LoRAAdaptor, LoRAModelCase, run_lora_test_by_batch
19

20
from sglang.test.test_utils import CustomTestCase
21
22
23
24

PROMPTS = [
    """
### Instruction:
25
Write a poem about the transformers Python library.
26
27
28
29
30
Mention the word "large language models" in that poem.
### Response:
The Transformers are large language models,
They're used to make predictions on text.
""",
31
    "AI is a field of computer science focused on",
32
33
]

34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
LORA_MODELS_WITH_NONE = [
    LoRAModelCase(
        base="meta-llama/Llama-3.1-8B-Instruct",
        adaptors=[
            LoRAAdaptor(
                name="algoprog/fact-generation-llama-3.1-8b-instruct-lora",
            ),
            LoRAAdaptor(
                name=None,
            ),
        ],
        max_loras_per_batch=2,
    ),
    LoRAModelCase(
        base="meta-llama/Llama-3.1-8B-Instruct",
        adaptors=[
            LoRAAdaptor(
                name=None,
            ),
            LoRAAdaptor(
                name="algoprog/fact-generation-llama-3.1-8b-instruct-lora",
            ),
        ],
        max_loras_per_batch=2,
    ),
]
60
61


62
class TestLoRA(CustomTestCase):
63
64
65
    def test_lora_batch_with_none(self):
        for model_case in LORA_MODELS_WITH_NONE:
            prompts = PROMPTS
66
            for torch_dtype in TORCH_DTYPES:
67
68
69
70
71
72
73
74
                run_lora_test_by_batch(
                    prompts,
                    model_case,
                    torch_dtype,
                    max_new_tokens=32,
                    backend="triton",
                    test_tag="test_lora_batch_with_none",
                )
75
76
77
78
79
80
81
82
83


if __name__ == "__main__":
    try:
        mp.set_start_method("spawn")
    except RuntimeError:
        pass

    unittest.main(warnings="ignore")