generate.proto 5 KB
Newer Older
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
1
2
3
4
syntax = "proto3";

package generate.v1;

Olivier Dehaene's avatar
Olivier Dehaene committed
5
service TextGenerationService {
6
7
    /// Model Info
    rpc Info (InfoRequest) returns (InfoResponse) {}
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
8
    /// Service discovery
Olivier Dehaene's avatar
Olivier Dehaene committed
9
    rpc ServiceDiscovery (ServiceDiscoveryRequest) returns (ServiceDiscoveryResponse) {}
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
10
    /// Empties batch cache
Olivier Dehaene's avatar
Olivier Dehaene committed
11
    rpc ClearCache (ClearCacheRequest) returns (ClearCacheResponse);
12
13
    /// Remove requests from a cached batch
    rpc FilterBatch (FilterBatchRequest) returns (FilterBatchResponse);
14
15
    /// Warmup the model and compute max cache size
    rpc Warmup (WarmupRequest) returns (WarmupResponse);
16
17
18
19
    /// Prefill batch and decode first token
    rpc Prefill (PrefillRequest) returns (PrefillResponse);
    /// Decode token for a list of prefilled batches
    rpc Decode (DecodeRequest) returns (DecodeResponse);
20
21
    /// Health check
    rpc Health (HealthRequest) returns (HealthResponse);
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
22
23
}

24
25
26
message HealthRequest {}
message HealthResponse {}

27
28
29
30
31
32
33
34
35
/// Empty request
message InfoRequest {}

message InfoResponse {
    bool requires_padding = 1;
    string dtype = 2;
    string device_type = 3;
}

Olivier Dehaene's avatar
Olivier Dehaene committed
36
37
38
/// Empty request
message ServiceDiscoveryRequest {}

Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
39
message ServiceDiscoveryResponse {
Olivier Dehaene's avatar
Olivier Dehaene committed
40
    /// Other shards urls
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
41
42
43
    repeated string urls = 1;
}

44
45
46
47
message ClearCacheRequest {
    /// Optional batch id
    optional uint64 id = 1;
}
Olivier Dehaene's avatar
Olivier Dehaene committed
48
49
50
51

/// Empty response
message ClearCacheResponse {}

OlivierDehaene's avatar
OlivierDehaene committed
52
message NextTokenChooserParameters {
53
    /// exponential scaling output probability distribution
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
54
    float temperature = 1;
55
    /// restricting to the k highest probability elements
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
56
    uint32 top_k = 2;
57
    /// restricting to top tokens summing to prob_cut_off <= prob_cut_off
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
58
    float top_p = 3;
59
60
    /// restricting to top tokens summing to prob_cut_off <= prob_cut_off
    float typical_p = 4;
61
    /// apply sampling on the logits
62
    bool do_sample = 5;
63
    /// random seed for sampling
64
    uint64 seed = 6;
65
    /// repetition penalty
66
    float repetition_penalty = 7;
67
    /// token watermarking using "A Watermark for Large Language Models"
68
    bool watermark = 8;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
69
70
}

71
72
73
74
75
message StoppingCriteriaParameters {
    /// Maximum number of generated tokens
    uint32 max_new_tokens = 1;
    /// Optional stopping sequences
    repeated string stop_sequences = 2;
76
77
78
    /// Ignore end of sequence token
    /// used for benchmarking
    bool ignore_eos_token = 3;
79
80
}

Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
81
82
83
84
85
message Request {
    /// Request ID
    uint64 id = 1;
    /// The generation context
    string inputs = 2;
86
87
    /// Context truncation
    uint32 truncate = 3;
OlivierDehaene's avatar
OlivierDehaene committed
88
    /// Next Token Chooser Parameters
89
    NextTokenChooserParameters parameters = 4;
90
    /// Stopping Criteria Parameters
91
    StoppingCriteriaParameters stopping_parameters = 5;
92
93
    /// Return prefill logprobs
    bool prefill_logprobs = 6;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
94
95
96
97
98
99
100
}

message Batch {
    /// Batch ID
    uint64 id = 1;
    /// Individual requests
    repeated Request requests = 2;
Olivier Dehaene's avatar
Olivier Dehaene committed
101
102
    /// Batch size (==len(requests))
    uint32 size = 3;
103
104
    /// Maximum number of tokens this batch will grow to
    uint32 max_tokens = 4;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
105
106
}

107
108
109
110
111
112
113
114
115
116
117
message CachedBatch {
    /// Batch ID
    uint64 id = 1;
    /// Individual requests ids
    repeated uint64 request_ids = 2;
    /// Batch size (==len(requests))
    uint32 size = 3;
    /// Maximum number of tokens this batch will grow to
    uint32 max_tokens = 4;
}

118
119
120
121
122
123
enum FinishReason {
    FINISH_REASON_LENGTH = 0;
    FINISH_REASON_EOS_TOKEN = 1;
    FINISH_REASON_STOP_SEQUENCE = 2;
}

Olivier Dehaene's avatar
Olivier Dehaene committed
124
message GeneratedText {
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
125
    /// Output
126
    string text = 1;
127
    /// Number of generated tokens
128
    uint32 generated_tokens = 2;
129
    /// Finish reason
130
    FinishReason finish_reason = 3;
131
    /// Seed
132
    optional uint64 seed = 4;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
133
134
}

135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
message PrefillTokens {
    /// Prefill Token IDs
    repeated uint32 ids = 1;
    /// Prefill Logprobs
    repeated float logprobs = 2;
    /// Prefill tokens
    repeated string texts = 3;
}

message Generation {
    /// Request ID
    uint64 request_id = 1;
    /// Prefill tokens (optional)
    PrefillTokens prefill_tokens = 2;
    /// Token ID
    uint32 token_id = 3;
    /// Logprob
    float token_logprob = 4;
    /// Text
    string token_text = 5;
155
156
    /// Is it a special token
    bool token_is_special = 6;
157
    /// Complete generated text
158
    optional GeneratedText generated_text = 7;
159
160
}

161
162
163
164
message FilterBatchRequest {
    /// Batch ID
    uint64 batch_id = 1;
    /// Requests to keep
165
    repeated uint64 request_ids = 2;
166
167
168
169
}

message FilterBatchResponse {
    /// Filtered Batch (cached)
170
    CachedBatch batch = 1;
171
172
173
}


174
message PrefillRequest {
Olivier Dehaene's avatar
Olivier Dehaene committed
175
176
    /// Batch
    Batch batch = 1;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
177
178
}

179
180
181
message PrefillResponse {
    /// Generation
    repeated Generation generations = 1;
Olivier Dehaene's avatar
Olivier Dehaene committed
182
    /// Next batch (cached)
183
    optional CachedBatch batch = 2;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
184
185
}

186
message DecodeRequest {
Olivier Dehaene's avatar
Olivier Dehaene committed
187
    /// Cached batches
188
    repeated CachedBatch batches = 1;
Olivier Dehaene's avatar
Olivier Dehaene committed
189
}
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
190

191
192
193
message DecodeResponse {
    /// Decodes
    repeated Generation generations = 1;
Olivier Dehaene's avatar
Olivier Dehaene committed
194
    /// Next batch (cached)
195
    optional CachedBatch batch = 2;
196
}
197
198
199
200
201
202
203
204
205
206

message WarmupRequest {
    /// Batch to warmup on
    Batch batch = 1;
    /// Maximum number of tokens that the client will send
    uint32 max_total_tokens = 2;
}

/// Empty response
message WarmupResponse {}