"include/ck/ck.hpp" did not exist on "31ded4ac4bc524acdbf897ffff094d7e7cbed991"
io_struct.py 7.17 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
"""
Copyright 2023-2024 SGLang Team
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""

Lianmin Zheng's avatar
Lianmin Zheng committed
16
17
18
19
20
"""
The definition of objects transfered between different
processes (TokenizerManager, DetokenizerManager, Controller).
"""

Lianmin Zheng's avatar
Lianmin Zheng committed
21
22
23
24
import uuid
from dataclasses import dataclass
from typing import Dict, List, Optional, Union

25
from sglang.srt.managers.schedule_batch import BaseFinishReason
26
from sglang.srt.sampling_params import SamplingParams
Lianmin Zheng's avatar
Lianmin Zheng committed
27
28
29
30


@dataclass
class GenerateReqInput:
Ying Sheng's avatar
Ying Sheng committed
31
    # The input prompt. It can be a single prompt or a batch of prompts.
32
    text: Optional[Union[List[str], str]] = None
Ying Sheng's avatar
Ying Sheng committed
33
    # The token ids for text; one can either specify text or input_ids.
34
    input_ids: Optional[Union[List[List[int]], List[int]]] = None
Ying Sheng's avatar
Ying Sheng committed
35
36
    # The image input. It can be a file name, a url, or base64 encoded string.
    # See also python/sglang/srt/utils.py:load_image.
Lianmin Zheng's avatar
Lianmin Zheng committed
37
    image_data: Optional[Union[List[str], str]] = None
38
    # The sampling_params. See descriptions below.
Lianmin Zheng's avatar
Lianmin Zheng committed
39
    sampling_params: Union[List[Dict], Dict] = None
Ying Sheng's avatar
Ying Sheng committed
40
    # The request id.
Lianmin Zheng's avatar
Lianmin Zheng committed
41
    rid: Optional[Union[List[str], str]] = None
Ying Sheng's avatar
Ying Sheng committed
42
    # Whether to return logprobs.
43
    return_logprob: Optional[Union[List[bool], bool]] = None
Ying Sheng's avatar
Ying Sheng committed
44
    # The start location of the prompt for return_logprob.
45
    logprob_start_len: Optional[Union[List[int], int]] = None
Ying Sheng's avatar
Ying Sheng committed
46
    # The number of top logprobs to return.
Liangsheng Yin's avatar
Liangsheng Yin committed
47
    top_logprobs_num: Optional[Union[List[int], int]] = None
48
    # Whether to detokenize tokens in text in the returned logprobs.
49
    return_text_in_logprobs: bool = False
Ying Sheng's avatar
Ying Sheng committed
50
    # Whether to stream output.
Lianmin Zheng's avatar
Lianmin Zheng committed
51
52
53
    stream: bool = False

    def post_init(self):
54
55
56
        if (self.text is None and self.input_ids is None) or (
            self.text is not None and self.input_ids is not None
        ):
57
            raise ValueError("Either text or input_ids should be provided.")
Yineng Zhang's avatar
Yineng Zhang committed
58
59
60
61
        if (
            isinstance(self.sampling_params, dict)
            and self.sampling_params.get("n", 1) != 1
        ):
62
            is_single = False
63
        else:
64
65
66
67
            if self.text is not None:
                is_single = isinstance(self.text, str)
            else:
                is_single = isinstance(self.input_ids[0], int)
68
        self.is_single = is_single
Lianmin Zheng's avatar
Lianmin Zheng committed
69
70
71
72
73
74

        if is_single:
            if self.sampling_params is None:
                self.sampling_params = {}
            if self.rid is None:
                self.rid = uuid.uuid4().hex
75
76
77
78
            if self.return_logprob is None:
                self.return_logprob = False
            if self.logprob_start_len is None:
                self.logprob_start_len = 0
Liangsheng Yin's avatar
Liangsheng Yin committed
79
80
            if self.top_logprobs_num is None:
                self.top_logprobs_num = 0
Lianmin Zheng's avatar
Lianmin Zheng committed
81
        else:
82
83
84
85
86
87
88
89
90
91
92
93
94
            parallel_sample_num_list = []
            if isinstance(self.sampling_params, dict):
                parallel_sample_num = self.sampling_params.get("n", 1)
            elif isinstance(self.sampling_params, list):
                for sp in self.sampling_params:
                    parallel_sample_num = sp.get("n", 1)
                    parallel_sample_num_list.append(parallel_sample_num)
                parallel_sample_num = max(parallel_sample_num_list)
                all_equal = all(
                    element == parallel_sample_num
                    for element in parallel_sample_num_list
                )
                if parallel_sample_num > 1 and (not all_equal):
yichuan~'s avatar
yichuan~ committed
95
                    # TODO cope with the case that the parallel_sample_num is different for different samples
96
97
98
99
100
101
                    raise ValueError(
                        "The parallel_sample_num should be the same for all samples in sample params."
                    )
            else:
                parallel_sample_num = 1
            self.parallel_sample_num = parallel_sample_num
102
103
104
105

            if parallel_sample_num != 1:
                # parallel sampling +1 represents the original prefill stage
                num = parallel_sample_num + 1
yichuan~'s avatar
yichuan~ committed
106
107
                if isinstance(self.text, list):
                    # suppot batch operation
108
109
                    self.batch_size = len(self.text)
                    num = num * len(self.text)
yichuan~'s avatar
yichuan~ committed
110
111
112
113
114
                elif isinstance(self.input_ids, list) and isinstance(
                    self.input_ids[0], list
                ):
                    self.batch_size = len(self.input_ids)
                    num = num * len(self.input_ids)
115
116
117
                else:
                    self.batch_size = 1
            else:
yichuan~'s avatar
yichuan~ committed
118
                # support select operation
119
120
                num = len(self.text) if self.text is not None else len(self.input_ids)
                self.batch_size = num
Lianmin Zheng's avatar
Lianmin Zheng committed
121
122
123
124
125
126
127
128
129
130
131
132
133
134

            if self.image_data is None:
                self.image_data = [None] * num
            elif not isinstance(self.image_data, list):
                self.image_data = [self.image_data] * num

            if self.sampling_params is None:
                self.sampling_params = [{}] * num
            elif not isinstance(self.sampling_params, list):
                self.sampling_params = [self.sampling_params] * num

            if self.rid is None:
                self.rid = [uuid.uuid4().hex for _ in range(num)]
            else:
135
136
                if not isinstance(self.rid, list):
                    raise ValueError("The rid should be a list.")
Lianmin Zheng's avatar
Lianmin Zheng committed
137

138
139
140
141
            if self.return_logprob is None:
                self.return_logprob = [False] * num
            elif not isinstance(self.return_logprob, list):
                self.return_logprob = [self.return_logprob] * num
Lianmin Zheng's avatar
Lianmin Zheng committed
142

143
144
145
146
            if self.logprob_start_len is None:
                self.logprob_start_len = [0] * num
            elif not isinstance(self.logprob_start_len, list):
                self.logprob_start_len = [self.logprob_start_len] * num
Lianmin Zheng's avatar
Lianmin Zheng committed
147

Liangsheng Yin's avatar
Liangsheng Yin committed
148
149
150
151
152
            if self.top_logprobs_num is None:
                self.top_logprobs_num = [0] * num
            elif not isinstance(self.top_logprobs_num, list):
                self.top_logprobs_num = [self.top_logprobs_num] * num

Lianmin Zheng's avatar
Lianmin Zheng committed
153
154
155
156

@dataclass
class TokenizedGenerateReqInput:
    rid: str
Liangsheng Yin's avatar
Liangsheng Yin committed
157
    input_text: str
Lianmin Zheng's avatar
Lianmin Zheng committed
158
159
160
    input_ids: List[int]
    pixel_values: List[float]
    image_hash: int
shiyi.c_98's avatar
shiyi.c_98 committed
161
    image_size: List[int]
Lianmin Zheng's avatar
Lianmin Zheng committed
162
    sampling_params: SamplingParams
163
164
    return_logprob: bool
    logprob_start_len: int
Liangsheng Yin's avatar
Liangsheng Yin committed
165
    top_logprobs_num: int
Lianmin Zheng's avatar
Lianmin Zheng committed
166
167
168
169
170
171
    stream: bool


@dataclass
class BatchTokenIDOut:
    rids: List[str]
172
    vids: List[int]
Liangsheng Yin's avatar
Liangsheng Yin committed
173
    decoded_texts: List[str]
174
175
    decode_ids: List[int]
    read_offsets: List[int]
Lianmin Zheng's avatar
Lianmin Zheng committed
176
    skip_special_tokens: List[bool]
177
    spaces_between_special_tokens: List[bool]
Lianmin Zheng's avatar
Lianmin Zheng committed
178
    meta_info: List[Dict]
179
    finished_reason: List[BaseFinishReason]
Lianmin Zheng's avatar
Lianmin Zheng committed
180

Liangsheng Yin's avatar
Liangsheng Yin committed
181

Lianmin Zheng's avatar
Lianmin Zheng committed
182
183
184
@dataclass
class BatchStrOut:
    rids: List[str]
185
    output_strs: List[str]
Lianmin Zheng's avatar
Lianmin Zheng committed
186
    meta_info: List[Dict]
187
    finished_reason: List[BaseFinishReason]
Liangsheng Yin's avatar
Liangsheng Yin committed
188
189
190
191
192


@dataclass
class FlushCacheReq:
    pass
Cody Yu's avatar
Cody Yu committed
193

194

195
196
197
198
199
@dataclass
class AbortReq:
    rid: str


Cody Yu's avatar
Cody Yu committed
200
201
@dataclass
class DetokenizeReqInput:
202
    input_ids: List[int]