client.rs 8.86 KB
Newer Older
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
1
/// Single shard Client
Nicolas Patry's avatar
Nicolas Patry committed
2
3
use crate::pb::generate::v2::text_generation_service_client::TextGenerationServiceClient;
use crate::pb::generate::v2::*;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
4
use crate::Result;
5
use grpc_metadata::InjectTelemetryContext;
6
use std::cmp::min;
7
use std::time::Duration;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
8
use tonic::transport::{Channel, Uri};
9
use tracing::instrument;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
10

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
11
/// Text Generation Inference gRPC client
12
#[derive(Debug, Clone)]
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
13
pub struct Client {
Olivier Dehaene's avatar
Olivier Dehaene committed
14
    stub: TextGenerationServiceClient<Channel>,
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
15
16
17
}

impl Client {
Olivier Dehaene's avatar
Olivier Dehaene committed
18
19
20
    /// Returns a client connected to the given url
    pub async fn connect(uri: Uri) -> Result<Self> {
        let channel = Channel::builder(uri).connect().await?;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
21

Olivier Dehaene's avatar
Olivier Dehaene committed
22
23
24
        Ok(Self {
            stub: TextGenerationServiceClient::new(channel),
        })
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
25
26
    }

Olivier Dehaene's avatar
Olivier Dehaene committed
27
28
    /// Returns a client connected to the given unix socket
    pub async fn connect_uds(path: String) -> Result<Self> {
Olivier Dehaene's avatar
Olivier Dehaene committed
29
        let channel = Channel::from_shared("http://[::]:50051".to_string())
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
30
31
32
33
            .unwrap()
            .connect_with_connector(tower::service_fn(move |_: Uri| {
                tokio::net::UnixStream::connect(path.clone())
            }))
Olivier Dehaene's avatar
Olivier Dehaene committed
34
            .await?;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
35

Olivier Dehaene's avatar
Olivier Dehaene committed
36
37
38
        Ok(Self {
            stub: TextGenerationServiceClient::new(channel),
        })
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
39
40
    }

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
41
    /// Returns a list of uris or unix sockets of all shards
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
42
43
    #[instrument(skip(self))]
    pub async fn service_discovery(&mut self) -> Result<Vec<String>> {
44
45
        let request = tonic::Request::new(ServiceDiscoveryRequest {}).inject_context();
        let response = self.stub.service_discovery(request).await?;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
46
47
48
49
        let urls = response
            .into_inner()
            .urls
            .into_iter()
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
50
            // Remove unix socket prefix
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
51
52
53
54
55
56
57
58
            .map(|url| match url.strip_prefix("unix://") {
                None => url,
                Some(stripped_url) => stripped_url.to_string(),
            })
            .collect();
        Ok(urls)
    }

59
60
61
62
63
64
65
66
    /// Get model info
    #[instrument(skip(self))]
    pub async fn info(&mut self) -> Result<InfoResponse> {
        let request = tonic::Request::new(InfoRequest {}).inject_context();
        let response = self.stub.info(request).await?.into_inner();
        Ok(response)
    }

67
68
69
70
71
72
73
74
    /// Get model health
    #[instrument(skip(self))]
    pub async fn health(&mut self) -> Result<HealthResponse> {
        let request = tonic::Request::new(HealthRequest {}).inject_context();
        let response = self.stub.health(request).await?.into_inner();
        Ok(response)
    }

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
75
    /// Clear the past generations cache
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
76
    #[instrument(skip(self))]
77
78
    pub async fn clear_cache(&mut self, batch_id: Option<u64>) -> Result<()> {
        let request = tonic::Request::new(ClearCacheRequest { id: batch_id }).inject_context();
79
        self.stub.clear_cache(request).await?;
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
80
81
82
        Ok(())
    }

83
84
85
86
87
    /// Filter a cached batch
    #[instrument(skip(self))]
    pub async fn filter_batch(
        &mut self,
        batch_id: u64,
88
89
        request_ids: Vec<u64>,
    ) -> Result<Option<CachedBatch>> {
90
91
        let request = tonic::Request::new(FilterBatchRequest {
            batch_id,
92
            request_ids,
93
94
95
96
97
98
        })
        .inject_context();
        let filtered_batch = self.stub.filter_batch(request).await?.into_inner();
        Ok(filtered_batch.batch)
    }

99
100
101
    /// Warmup on a max size batch
    ///
    /// Returns the maximum amount of tokens supported by the hardware
102
    #[instrument(skip_all)]
103
104
105
106
    pub async fn warmup(
        &mut self,
        max_input_length: u32,
        max_prefill_tokens: u32,
OlivierDehaene's avatar
OlivierDehaene committed
107
        max_total_tokens: u32,
108
        max_batch_size: Option<usize>,
109
    ) -> Result<Option<u32>> {
110
111
112
113
        let mut n_tokens = 0;
        let mut requests = Vec::new();
        // Create requests
        while n_tokens < max_prefill_tokens {
OlivierDehaene's avatar
OlivierDehaene committed
114
            let truncate = min(max_input_length, max_prefill_tokens - n_tokens);
115
116
117
118
119

            let mut inputs = String::new();
            inputs.push_str("![](data:image/jpeg;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAABg2lDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSotUROxQxCFDdbKLijjWKhShQqgVWnUwufQLmrQkKS6OgmvBwY/FqoOLs64OroIg+AHi7OCk6CIl/i8ptIjx4Lgf7+497t4BQqvKNDOQADTdMjKppJjLr4rBVwQQwhAERGVm1uckKQ3P8XUPH1/v4jzL+9yfY0AtmAzwicQJVjcs4g3imU2rznmfOMLKskp8Tjxh0AWJH7muuPzGueSwwDMjRjYzTxwhFks9rPQwKxsa8TRxTNV0yhdyLquctzhr1Qbr3JO/MFzQV5a5TnMUKSxiCRJEKGiggiosxGnVSTGRof2kh3/E8UvkUshVASPHAmrQIDt+8D/43a1ZnJp0k8JJoO/Ftj/GgOAu0G7a9vexbbdPAP8zcKV3/bUWMPtJerOrxY6AwW3g4rqrKXvA5Q4QfarLhuxIfppCsQi8n9E35YHhW6B/ze2ts4/TByBLXaVvgINDYLxE2ese7w719vbvmU5/PycecohsjayNAAAACXBIWXMAAC4jAAAuIwF4pT92AAAAB3RJTUUH6AQIEQMnlTSSjwAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAAAASSURBVDjLY2AYBaNgFIyCoQsABMQAAeRw1DoAAAAASUVORK5CYII=");
            inputs.push_str(&"_test ".to_string().repeat(max_input_length as usize));

120
121
122
            requests.push(Request {
                id: 0,
                // We truncate the input on the server side to be sure that it has the correct size
123
                inputs,
OlivierDehaene's avatar
OlivierDehaene committed
124
                truncate,
125
126
127
128
129
130
131
132
133
                // Set sampling parameters to also take these ops into account in the max memory
                parameters: Some(NextTokenChooserParameters {
                    temperature: 0.9,
                    top_k: 10,
                    top_p: 0.9,
                    typical_p: 0.9,
                    do_sample: false,
                    seed: 0,
                    repetition_penalty: 1.2,
134
                    frequency_penalty: 0.1,
135
                    watermark: true,
drbh's avatar
drbh committed
136
137
                    grammar: String::new(),
                    grammar_type: GrammarType::None as i32,
138
139
                }),
                stopping_parameters: Some(StoppingCriteriaParameters {
OlivierDehaene's avatar
OlivierDehaene committed
140
                    max_new_tokens: max_total_tokens - truncate,
141
                    stop_sequences: vec![],
OlivierDehaene's avatar
OlivierDehaene committed
142
                    ignore_eos_token: true,
143
144
                }),
                prefill_logprobs: true,
Nicolas Patry's avatar
Nicolas Patry committed
145
                top_n_tokens: 20,
146
147
            });
            n_tokens += max_input_length;
148
149
150
151
152

            // Check max_batch_size
            if Some(requests.len()) == max_batch_size {
                break;
            }
153
154
155
156
157
158
159
160
161
        }

        let batch = Batch {
            id: 0,
            size: requests.len() as u32,
            requests,
            max_tokens: 0,
        };

162
163
164
165
166
167
168
        let request = tonic::Request::new(WarmupRequest {
            batch: Some(batch),
            max_input_length,
            max_prefill_tokens,
            max_total_tokens,
        })
        .inject_context();
169
170
        let response = self.stub.warmup(request).await?.into_inner();
        Ok(response.max_supported_total_tokens)
171
172
    }

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
173
174
    /// Generate one token for each request in the given batch
    ///
175
    /// Returns Generation for each request in batch
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
176
    /// and the next cached batch
177
    #[instrument(skip_all, fields(id = &batch.id, size = &batch.size))]
178
179
180
    pub async fn prefill(
        &mut self,
        batch: Batch,
181
    ) -> Result<(Vec<Generation>, Option<CachedBatch>, PrefillTimings)> {
182
183
        let request = tonic::Request::new(PrefillRequest { batch: Some(batch) }).inject_context();
        let response = self.stub.prefill(request).await?.into_inner();
184
185
186
187
188
        Ok((
            response.generations,
            response.batch,
            PrefillTimings::new(response.forward_ns, response.decode_ns, response.total_ns),
        ))
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
189
190
    }

191
    /// Generate one token for each request in the given cached batches
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
192
    ///
193
    /// Returns Generation for each request in batches
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
194
    /// and the next cached batch
195
    #[instrument(skip_all, fields(size = batches.iter().map(|batch|{batch.size}).sum::<u32>()))]
196
    pub async fn decode(
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
197
        &mut self,
198
        batches: Vec<CachedBatch>,
199
    ) -> Result<(Vec<Generation>, Option<CachedBatch>, DecodeTimings)> {
200
201
        let request = tonic::Request::new(DecodeRequest { batches }).inject_context();
        let response = self.stub.decode(request).await?.into_inner();
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
        Ok((
            response.generations,
            response.batch,
            DecodeTimings::new(
                response.concat_ns,
                response.forward_ns,
                response.decode_ns,
                response.total_ns,
            ),
        ))
    }
}

pub struct PrefillTimings {
    pub forward: Duration,
    pub decode: Duration,
    pub total: Duration,
}

impl PrefillTimings {
    fn new(forward_ns: u64, decode_ns: u64, total_ns: u64) -> Self {
        Self {
            forward: Duration::from_nanos(forward_ns),
            decode: Duration::from_nanos(decode_ns),
            total: Duration::from_nanos(total_ns),
        }
    }
}

pub struct DecodeTimings {
    pub concat: Option<Duration>,
    pub forward: Duration,
    pub decode: Duration,
    pub total: Duration,
}

impl DecodeTimings {
    fn new(concat_ns: Option<u64>, forward_ns: u64, decode_ns: u64, total_ns: u64) -> Self {
        Self {
241
            concat: concat_ns.map(Duration::from_nanos),
242
243
244
245
            forward: Duration::from_nanos(forward_ns),
            decode: Duration::from_nanos(decode_ns),
            total: Duration::from_nanos(total_ns),
        }
Olivier Dehaene's avatar
Olivier Dehaene committed
246
    }
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
247
}