validation.rs 36.3 KB
Newer Older
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
1
/// Payload validation logic
OlivierDehaene's avatar
OlivierDehaene committed
2
use crate::config::Config;
3
use crate::validation::ValidationError::{BestOfSampling, BestOfSeed, EmptyInput};
drbh's avatar
drbh committed
4
use crate::{GenerateParameters, GenerateRequest, GrammarType};
OlivierDehaene's avatar
OlivierDehaene committed
5
6
use base64::{engine::general_purpose::STANDARD, Engine};
use image::{io::Reader as ImageReader, ImageFormat};
7
use jsonschema::{Draft, JSONSchema};
8
use rand::{thread_rng, Rng};
9
use serde_json::Value;
10
use std::io::Cursor;
OlivierDehaene's avatar
OlivierDehaene committed
11
use text_generation_client::{Chunk, Image, InputChunk};
Olivier Dehaene's avatar
Olivier Dehaene committed
12
use thiserror::Error;
Olivier Dehaene's avatar
Olivier Dehaene committed
13
use tokenizers::tokenizer::Tokenizer;
OlivierDehaene's avatar
OlivierDehaene committed
14
use tokio::sync::mpsc;
15
use tokio::sync::oneshot;
16
use tracing::{instrument, Span};
17
use {once_cell::sync::Lazy, regex::Regex};
Olivier Dehaene's avatar
Olivier Dehaene committed
18

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
19
/// Validation
Olivier Dehaene's avatar
Olivier Dehaene committed
20
#[derive(Debug, Clone)]
Olivier Dehaene's avatar
Olivier Dehaene committed
21
pub struct Validation {
22
    /// Validation parameters
23
    max_best_of: usize,
24
    max_stop_sequences: usize,
Nicolas Patry's avatar
Nicolas Patry committed
25
    max_top_n_tokens: u32,
26
27
    max_input_length: usize,
    max_total_tokens: usize,
drbh's avatar
drbh committed
28
    disable_grammar_support: bool,
29
    /// Channel to communicate with the background tokenization task
OlivierDehaene's avatar
OlivierDehaene committed
30
    sender: Option<mpsc::UnboundedSender<TokenizerRequest>>,
Olivier Dehaene's avatar
Olivier Dehaene committed
31
32
33
}

impl Validation {
OlivierDehaene's avatar
OlivierDehaene committed
34
    #[allow(clippy::too_many_arguments)]
35
36
    pub(crate) fn new(
        workers: usize,
37
        tokenizer: Option<Tokenizer>,
38
        config: Option<Config>,
39
        max_best_of: usize,
40
        max_stop_sequences: usize,
Nicolas Patry's avatar
Nicolas Patry committed
41
        max_top_n_tokens: u32,
42
43
        max_input_length: usize,
        max_total_tokens: usize,
drbh's avatar
drbh committed
44
        disable_grammar_support: bool,
45
    ) -> Self {
46
47
        // If we have a fast tokenizer
        let sender = if let Some(tokenizer) = tokenizer {
OlivierDehaene's avatar
OlivierDehaene committed
48
49
50
            // Create round robin channel
            let (validation_sender, validation_round_robin_receiver) = mpsc::unbounded_channel();
            let mut senders = Vec::with_capacity(workers);
51
52
53
54

            // Create workers
            for _ in 0..workers {
                let tokenizer_clone = tokenizer.clone();
55
                let config_clone = config.clone();
OlivierDehaene's avatar
OlivierDehaene committed
56
57
                let (tokenizer_sender, tokenizer_receiver) = mpsc::unbounded_channel();
                senders.push(tokenizer_sender);
58
59
60

                // Spawn worker
                tokio::task::spawn_blocking(move || {
61
                    tokenizer_worker(tokenizer_clone, config_clone, tokenizer_receiver)
62
63
                });
            }
OlivierDehaene's avatar
OlivierDehaene committed
64
65
66
67

            // Create tokenization round robin task
            tokio::spawn(round_robin_task(validation_round_robin_receiver, senders));

68
69
70
71
72
73
74
75
            Some(validation_sender)
        } else {
            None
        };

        Self {
            max_best_of,
            sender,
76
            max_stop_sequences,
Nicolas Patry's avatar
Nicolas Patry committed
77
            max_top_n_tokens,
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
78
            max_input_length,
79
            max_total_tokens,
drbh's avatar
drbh committed
80
            disable_grammar_support,
81
82
        }
    }
Olivier Dehaene's avatar
Olivier Dehaene committed
83

84
    #[instrument(skip(self, inputs))]
85
    pub async fn tokenize(
86
87
88
        &self,
        inputs: String,
        truncate: Option<usize>,
89
    ) -> Result<Option<(tokenizers::Encoding, Vec<InputChunk>)>, ValidationError> {
90
91
92
93
94
95
96
97
98
99
100
101
        // If we have a fast tokenizer
        if let Some(sender) = &self.sender {
            // Create response channel
            let (response_sender, response_receiver) = oneshot::channel();
            // Send request to the background validation task
            // Unwrap is safe here
            sender
                .send(((inputs, truncate), response_sender, Span::current()))
                .unwrap();

            // Await on response channel
            // Unwrap is safe here
102
103
104
105
106
107
108
109
110
111
112
113
114
            let encoding = response_receiver.await.unwrap()?;
            Ok(Some(encoding))
        } else {
            Ok(None)
        }
    }

    #[instrument(skip(self, inputs))]
    async fn validate_input(
        &self,
        inputs: String,
        truncate: Option<usize>,
        max_new_tokens: Option<u32>,
115
    ) -> Result<(Vec<InputChunk>, usize, u32), ValidationError> {
116
117
118
        // If we have a fast tokenizer
        if let Some((encoding, inputs)) = self.tokenize(inputs.clone(), truncate).await? {
            // Create response channel
Nicolas Patry's avatar
Nicolas Patry committed
119
120
121
122
123
            let input_length = if let Some(truncate) = truncate {
                std::cmp::min(encoding.len(), truncate)
            } else {
                encoding.len()
            };
124
125

            // Get total tokens
126
127
128
129
130
            let max_new_tokens: u32 = if let Some(max_new_tokens) = max_new_tokens {
                max_new_tokens
            } else {
                self.max_total_tokens.saturating_sub(input_length) as u32
            };
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
            let total_tokens = input_length + max_new_tokens as usize;

            // Validate MaxTotalTokens
            if total_tokens > self.max_total_tokens {
                return Err(ValidationError::MaxTotalTokens(
                    self.max_total_tokens,
                    input_length,
                    max_new_tokens,
                ));
            }

            // Validate InputLength
            if input_length > self.max_input_length {
                return Err(ValidationError::InputLength(
                    self.max_input_length,
                    input_length,
                ));
            }

            metrics::histogram!("tgi_request_input_length", input_length as f64);
151
            Ok((inputs, input_length, max_new_tokens))
152
153
154
155
156
157
        }
        // Return inputs without validation
        else {
            // In this case, we don't know the real length in tokens of the inputs
            // However, the inputs will be truncated by the python servers
            // We make sure that truncate + max_new_tokens <= self.max_total_tokens
158
159
            let max_new_tokens: u32 = if let Some(max_new_tokens) = max_new_tokens {
                max_new_tokens
OlivierDehaene's avatar
OlivierDehaene committed
160
161
            } else if let Some(truncate) = truncate {
                self.max_total_tokens.saturating_sub(truncate) as u32
162
            } else {
OlivierDehaene's avatar
OlivierDehaene committed
163
                return Err(ValidationError::UnsetMaxNewTokens);
164
            };
165
            let mut input_length = truncate.unwrap_or(self.max_input_length);
166

167
168
            // We don't have a tokenizer, therefore we have no idea how long is the query, let
            // them through and hope for the best.
169
            // Validate MaxNewTokens
170
            if (input_length as u32 + max_new_tokens) > self.max_total_tokens as u32 {
171
                input_length = input_length.saturating_sub(max_new_tokens as usize);
172
173
            }

174
175
176
177
178
            Ok((
                vec![Chunk::Text(inputs).into()],
                input_length,
                max_new_tokens,
            ))
Olivier Dehaene's avatar
Olivier Dehaene committed
179
180
181
        }
    }

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
182
    /// Validate a payload and get the number of tokens in the input
183
    #[instrument(skip_all)]
Olivier Dehaene's avatar
Olivier Dehaene committed
184
185
186
    pub(crate) async fn validate(
        &self,
        request: GenerateRequest,
187
    ) -> Result<ValidGenerateRequest, ValidationError> {
188
189
190
191
        let GenerateParameters {
            best_of,
            temperature,
            repetition_penalty,
192
            frequency_penalty,
193
194
195
196
197
198
199
200
201
            top_k,
            top_p,
            typical_p,
            do_sample,
            max_new_tokens,
            stop: stop_sequences,
            truncate,
            seed,
            watermark,
202
            decoder_input_details,
Nicolas Patry's avatar
Nicolas Patry committed
203
            top_n_tokens,
drbh's avatar
drbh committed
204
            grammar,
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
            ..
        } = request.parameters;

        // sampling must be true when best_of > 1
        let best_of = best_of.unwrap_or(1);
        let sampling = do_sample
            || temperature.is_some()
            || top_k.is_some()
            || top_p.is_some()
            || typical_p.is_some();

        if best_of > 1 && !sampling {
            return Err(BestOfSampling);
        }

        let temperature = temperature.unwrap_or(1.0);
        if temperature <= 0.0 {
            return Err(ValidationError::Temperature);
        }

        let repetition_penalty = repetition_penalty.unwrap_or(1.0);
        if repetition_penalty <= 0.0 {
            return Err(ValidationError::RepetitionPenalty);
        }

230
231
232
233
234
        let frequency_penalty = frequency_penalty.unwrap_or(0.0);
        if !(-2.0..=2.0).contains(&frequency_penalty) {
            return Err(ValidationError::FrequencyPenalty);
        }

235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
        // Different because the proto default value is not a valid value
        // for the user
        let top_p = top_p
            .map(|value| {
                if value <= 0.0 || value >= 1.0 {
                    return Err(ValidationError::TopP);
                }
                Ok(value)
            })
            .unwrap_or(Ok(1.0))?;

        let typical_p = typical_p
            .map(|value| {
                if value <= 0.0 || value >= 1.0 {
                    return Err(ValidationError::TypicalP);
                }
                Ok(value)
            })
            .unwrap_or(Ok(1.0))?;

        let top_k: u32 = top_k
            .map(|value| {
                if value <= 0 {
                    return Err(ValidationError::TopK);
                }
                Ok(value as u32)
            })
            .unwrap_or(Ok(0))?;

264
        if max_new_tokens == Some(0) {
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
            return Err(ValidationError::NegativeMaxNewTokens);
        }

        if stop_sequences.len() > self.max_stop_sequences {
            return Err(ValidationError::StopSequence(
                self.max_stop_sequences,
                stop_sequences.len(),
            ));
        }

        // If seed is None, assign a random one
        let seed = match seed {
            None => thread_rng().gen(),
            Some(seed) => {
                if best_of > 1 {
                    return Err(BestOfSeed);
                }
                seed
            }
        };

Nicolas Patry's avatar
Nicolas Patry committed
286
287
288
289
290
291
292
293
294
        let top_n_tokens = top_n_tokens
            .map(|value| {
                if value > self.max_top_n_tokens {
                    return Err(ValidationError::TopNTokens(self.max_top_n_tokens, value));
                }
                Ok(value)
            })
            .unwrap_or(Ok(0))?;

295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
        // Check if inputs is empty
        if request.inputs.is_empty() {
            return Err(EmptyInput);
        }

        // Check if truncate is strictly positive and less than max_input_length
        let truncate = truncate
            .map(|value| {
                if value == 0 || value > self.max_input_length {
                    return Err(ValidationError::Truncate(self.max_input_length, value));
                }
                Ok(Some(value))
            })
            .unwrap_or(Ok(None))?;

        // Validate inputs
311
        let (inputs, input_length, max_new_tokens) = self
312
313
314
            .validate_input(request.inputs, truncate, max_new_tokens)
            .await?;

drbh's avatar
drbh committed
315
316
317
318
319
320
321
        // TODO: we should build the FSM here and pass the compiled FSM instead of the grammar
        // NOTE: this is currently difficult because we need the tokenizer in Python to build
        // the FSM and we'd have to load a copy of the tokenizer into our Pyo3 instance which
        // may be slow and memory intensive. Best case is to have a Rust implementation of the FSM
        // compiler and use that to build the FSM here.

        // Validate grammar and unpack the grammar and type for the proto message
OlivierDehaene's avatar
OlivierDehaene committed
322
        let grammar = match grammar {
drbh's avatar
drbh committed
323
324
325
326
327
            Some(grammar) => {
                // Ensure that grammar is not set if it's not supported
                if self.disable_grammar_support {
                    return Err(ValidationError::Grammar);
                }
OlivierDehaene's avatar
OlivierDehaene committed
328
                let valid_grammar = match grammar {
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
                    GrammarType::Json(json) => {
                        let json = match json {
                            // if value is a string, we need to parse it again to make sure its
                            // a valid json
                            Value::String(s) => serde_json::from_str(&s)
                                .map_err(|e| ValidationError::InvalidGrammar(e.to_string())),
                            Value::Object(_) => Ok(json),
                            _ => Err(ValidationError::Grammar),
                        }?;

                        // Check if the json is a valid JSONSchema
                        JSONSchema::options()
                            .with_draft(Draft::Draft202012)
                            .compile(&json)
                            .map_err(|e| ValidationError::InvalidGrammar(e.to_string()))?;

OlivierDehaene's avatar
OlivierDehaene committed
345
346
                        // Serialize json to string
                        ValidGrammar::Json(
347
348
349
350
                            serde_json::to_string(&json)
                                .map_err(|e| ValidationError::InvalidGrammar(e.to_string()))?,
                        )
                    }
OlivierDehaene's avatar
OlivierDehaene committed
351
352
353
                    GrammarType::Regex(regex) => ValidGrammar::Regex(regex),
                };
                Some(valid_grammar)
drbh's avatar
drbh committed
354
            }
OlivierDehaene's avatar
OlivierDehaene committed
355
            None => None,
drbh's avatar
drbh committed
356
357
        };

OlivierDehaene's avatar
OlivierDehaene committed
358
        let parameters = ValidParameters {
359
360
            temperature,
            repetition_penalty,
361
            frequency_penalty,
362
363
364
365
366
367
            top_k,
            top_p,
            typical_p,
            do_sample,
            seed,
            watermark,
drbh's avatar
drbh committed
368
            grammar,
369
        };
OlivierDehaene's avatar
OlivierDehaene committed
370
        let stopping_parameters = ValidStoppingParameters {
371
372
373
374
375
376
377
378
379
            max_new_tokens,
            stop_sequences,
            ignore_eos_token: false,
        };

        metrics::histogram!("tgi_request_max_new_tokens", max_new_tokens as f64);

        Ok(ValidGenerateRequest {
            inputs,
380
            decoder_input_details,
381
            input_length: input_length as u32,
382
383
384
            truncate: truncate.unwrap_or(self.max_input_length) as u32,
            parameters,
            stopping_parameters,
Nicolas Patry's avatar
Nicolas Patry committed
385
            top_n_tokens,
386
        })
Olivier Dehaene's avatar
Olivier Dehaene committed
387
    }
388
389
390
391
392
393
394
395
396
397
398
399
400
401

    /// Validate the best_of parameter
    #[instrument(skip_all)]
    pub(crate) fn validate_best_of(&self, best_of: usize) -> Result<usize, ValidationError> {
        if self.max_best_of == 1 && best_of != 1 {
            return Err(ValidationError::BestOfDisabled);
        }

        if best_of > self.max_best_of {
            return Err(ValidationError::BestOf(self.max_best_of, best_of));
        }

        Ok(best_of)
    }
Olivier Dehaene's avatar
Olivier Dehaene committed
402
403
}

OlivierDehaene's avatar
OlivierDehaene committed
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
/// Round robin tokenization task
async fn round_robin_task(
    mut receiver: mpsc::UnboundedReceiver<TokenizerRequest>,
    senders: Vec<mpsc::UnboundedSender<TokenizerRequest>>,
) {
    loop {
        for sender in &senders {
            match receiver.recv().await {
                None => return,
                Some(request) => sender.send(request).unwrap(),
            };
        }
    }
}

419
/// Start tokenization workers
420
421
422
423
424
fn tokenizer_worker(
    tokenizer: Tokenizer,
    config: Option<Config>,
    mut receiver: mpsc::UnboundedReceiver<TokenizerRequest>,
) {
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
425
    // Loop over requests
OlivierDehaene's avatar
OlivierDehaene committed
426
    while let Some(((inputs, truncate), response_tx, parent_span)) = receiver.blocking_recv() {
427
428
        parent_span.in_scope(|| {
            response_tx
429
                .send(prepare_input(inputs, truncate, &tokenizer, &config))
430
431
                .unwrap_or(())
        })
432
433
    }
}
Olivier Dehaene's avatar
Olivier Dehaene committed
434

435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
fn format_from_mimetype(mimetype: &str) -> Option<ImageFormat> {
    match mimetype {
        "image/png" => Some(ImageFormat::Png),
        "image/jpeg" => Some(ImageFormat::Jpeg),
        "image/jpg" => Some(ImageFormat::Jpeg),
        "image/gif" => Some(ImageFormat::Gif),
        "image/webp" => Some(ImageFormat::WebP),
        "image/tiff" => Some(ImageFormat::Tiff),
        // "image/pnm"=>Some(ImageFormat::Pnm),
        // "image/tga"=>Some(ImageFormat::Tga),
        // "image/dds"=>Some(ImageFormat::Dds),
        // "image/bmp"=>Some(ImageFormat::Bmp),
        // "image/ico"=>Some(ImageFormat::Ico),
        // "image/x-exr"=>Some(ImageFormat::OpenExr),
        _ => None,
    }
}
OlivierDehaene's avatar
OlivierDehaene committed
452

453
454
455
456
457
458
459
460
461
462
463
464
fn format_to_mimetype(format: ImageFormat) -> String {
    match format {
        ImageFormat::Png => "image/png",
        ImageFormat::Jpeg => "image/jpeg",
        ImageFormat::Gif => "image/gif",
        ImageFormat::WebP => "image/webp",
        ImageFormat::Tiff => "image/tiff",
        _ => "application/octet-stream",
    }
    .to_string()
}

465
fn fetch_image(input: &str) -> Result<(Vec<u8>, String, usize, usize), ValidationError> {
466
467
468
469
470
471
472
473
474
475
    if input.starts_with("![](http://") || input.starts_with("![](https://") {
        let url = &input["![](".len()..input.len() - 1];
        let data = reqwest::blocking::get(url)?.bytes()?;

        let format = image::guess_format(&data)?;
        // TODO Remove this clone
        let img = ImageReader::with_format(Cursor::new(data.clone()), format).decode()?;
        let height: usize = img.height().try_into()?;
        let width: usize = img.width().try_into()?;
        let mimetype = format_to_mimetype(format);
476
        Ok((data.to_vec(), mimetype, height, width))
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
    } else if input.starts_with("![](data:") {
        // Remove ![](....)
        let content = &input["![](data:".len()..input.len() - 1];
        let tokens: Vec<_> = content.split(';').collect();
        if tokens.len() != 2 {
            return Err(ValidationError::InvalidImageContent(content.to_string()));
        }
        let mimetype = tokens[0];
        let content = tokens[1];

        if !content.starts_with("base64,") {
            return Err(ValidationError::InvalidImageContent(content.to_string()));
        }

        let data = STANDARD.decode(content["base64,".len()..].as_bytes())?;
        let img = if let Some(format) = format_from_mimetype(mimetype) {
493
            ImageReader::with_format(Cursor::new(&data), format).decode()?
494
        } else {
495
            ImageReader::new(Cursor::new(&data))
496
497
498
499
500
501
502
                .with_guessed_format()
                .map_err(|_io_error| ValidationError::InvalidImageContent(content.to_string()))?
                .decode()?
        };

        let height: usize = img.height().try_into()?;
        let width: usize = img.width().try_into()?;
503
        Ok((data, mimetype.to_string(), height, width))
504
505
506
507
508
    } else {
        Err(ValidationError::InvalidImageContent(input.to_string()))
    }
}

509
510
/// Get input length and optionally truncate it
fn prepare_input(
511
    inputs: String,
512
    _truncate: Option<usize>,
513
    tokenizer: &Tokenizer,
514
    config: &Option<Config>,
515
) -> Result<(tokenizers::Encoding, Vec<InputChunk>), ValidationError> {
516
    static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"!\[\]\([^\)]*\)").unwrap());
517
    let (tokenizer_query, input_chunks) = match config {
518
        Some(Config::LlavaNext(config)) => {
519
            let mut input_chunks = Vec::new();
520
521
522
523
            let mut tokenizer_query = String::with_capacity(inputs.len());
            let mut start = 0;
            for chunk in RE.find_iter(&inputs) {
                let chunk_start = chunk.start();
drbh's avatar
drbh committed
524
525
                let chunk_end = chunk.end();
                if chunk_start != start {
526
                    input_chunks.push(Chunk::Text(inputs[start..chunk_start].to_string()).into());
drbh's avatar
drbh committed
527
528
                    tokenizer_query.push_str(&inputs[start..chunk_start]);
                }
529
                let (data, mimetype, height, width) = fetch_image(&inputs[chunk_start..chunk_end])?;
drbh's avatar
drbh committed
530
                let slots = config.get_number_of_features(height, width);
531
                input_chunks.push(Chunk::Image(Image { data, mimetype }).into());
drbh's avatar
drbh committed
532
533
534
                tokenizer_query.push_str(&"<image>".repeat(slots));
                start = chunk_end;
            }
535
536
            if start != inputs.len() {
                input_chunks.push(Chunk::Text(inputs[start..].to_string()).into());
drbh's avatar
drbh committed
537
538
                tokenizer_query.push_str(&inputs[start..]);
            }
539
            (tokenizer_query, input_chunks)
drbh's avatar
drbh committed
540
541
        }
        Some(Config::Paligemma(config)) => {
542
            let mut input_chunks = Vec::new();
drbh's avatar
drbh committed
543
544
545
546
            let mut tokenizer_query = String::with_capacity(inputs.len());
            let mut start = 0;
            for chunk in RE.find_iter(&inputs) {
                let chunk_start = chunk.start();
547
548
                let chunk_end = chunk.end();
                if chunk_start != start {
549
                    input_chunks.push(Chunk::Text(inputs[start..chunk_start].to_string()).into());
550
551
                    tokenizer_query.push_str(&inputs[start..chunk_start]);
                }
552
                let (data, mimetype, height, width) = fetch_image(&inputs[chunk_start..chunk_end])?;
553
                let slots = config.get_number_of_features(height, width);
554
                input_chunks.push(Chunk::Image(Image { data, mimetype }).into());
555
556
557
                tokenizer_query.push_str(&"<image>".repeat(slots));
                start = chunk_end;
            }
558
559
            if start != inputs.len() {
                input_chunks.push(Chunk::Text(inputs[start..].to_string()).into());
560
561
                tokenizer_query.push_str(&inputs[start..]);
            }
562
            (tokenizer_query, input_chunks)
563
        }
Nicolas Patry's avatar
Nicolas Patry committed
564
        Some(Config::Idefics2(config)) => {
565
            let mut input_chunks = Vec::new();
Nicolas Patry's avatar
Nicolas Patry committed
566
567
568
569
570
571
            let mut tokenizer_query = String::with_capacity(inputs.len());
            let mut start = 0;
            for chunk in RE.find_iter(&inputs) {
                let chunk_start = chunk.start();
                let chunk_end = chunk.end();
                if chunk_start != start {
572
                    input_chunks.push(Chunk::Text(inputs[start..chunk_start].to_string()).into());
Nicolas Patry's avatar
Nicolas Patry committed
573
574
                    tokenizer_query.push_str(&inputs[start..chunk_start]);
                }
575
                let (data, mimetype, height, width) = fetch_image(&inputs[chunk_start..chunk_end])?;
Nicolas Patry's avatar
Nicolas Patry committed
576
577
578
579
580
                let slots = config.get_number_of_features(height, width);
                tokenizer_query.push_str("<fake_token_around_image>");
                tokenizer_query.push_str(&"<image>".repeat(slots));
                tokenizer_query.push_str("<fake_token_around_image>");

581
                input_chunks.push(Chunk::Image(Image { data, mimetype }).into());
Nicolas Patry's avatar
Nicolas Patry committed
582
583
                start = chunk_end;
            }
584
585
            if start != inputs.len() {
                input_chunks.push(Chunk::Text(inputs[start..].to_string()).into());
Nicolas Patry's avatar
Nicolas Patry committed
586
587
                tokenizer_query.push_str(&inputs[start..]);
            }
588
            (tokenizer_query, input_chunks)
Nicolas Patry's avatar
Nicolas Patry committed
589
590
        }
        Some(Config::Idefics) => {
591
            let mut input_chunks = Vec::new();
Nicolas Patry's avatar
Nicolas Patry committed
592
593
594
595
596
597
            let mut tokenizer_query = String::with_capacity(inputs.len());
            let mut start = 0;
            for chunk in RE.find_iter(&inputs) {
                let chunk_start = chunk.start();
                let chunk_end = chunk.end();
                if chunk_start != start {
598
                    input_chunks.push(Chunk::Text(inputs[start..chunk_start].to_string()).into());
Nicolas Patry's avatar
Nicolas Patry committed
599
600
                    tokenizer_query.push_str(&inputs[start..chunk_start]);
                }
601
602
                let (data, mimetype, _height, _width) =
                    fetch_image(&inputs[chunk_start..chunk_end])?;
Nicolas Patry's avatar
Nicolas Patry committed
603
604
                let slots = 1;
                tokenizer_query.push_str(&"<image>".repeat(slots));
605
                input_chunks.push(Chunk::Image(Image { data, mimetype }).into());
Nicolas Patry's avatar
Nicolas Patry committed
606
607
                start = chunk_end;
            }
608
609
            if start != inputs.len() {
                input_chunks.push(Chunk::Text(inputs[start..].to_string()).into());
Nicolas Patry's avatar
Nicolas Patry committed
610
611
                tokenizer_query.push_str(&inputs[start..]);
            }
612
            (tokenizer_query, input_chunks)
Nicolas Patry's avatar
Nicolas Patry committed
613
        }
614
        _ => (inputs.clone(), vec![Chunk::Text(inputs).into()]),
615
    };
616

617
    // Get the number of tokens in the input
618
619
    let encoding = tokenizer
        .encode(tokenizer_query, true)
620
621
        .map_err(|err| ValidationError::Tokenizer(err.to_string()))?;

622
    Ok((encoding, input_chunks))
Olivier Dehaene's avatar
Olivier Dehaene committed
623
}
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
624

625
626
type TokenizerRequest = (
    (String, Option<usize>),
627
    oneshot::Sender<Result<(tokenizers::Encoding, Vec<InputChunk>), ValidationError>>,
628
    Span,
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
629
630
);

OlivierDehaene's avatar
OlivierDehaene committed
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
#[derive(Debug, Clone)]
pub(crate) enum ValidGrammar {
    Json(String),
    Regex(String),
}

#[derive(Debug, Clone)]
pub(crate) struct ValidParameters {
    /// / exponential scaling output probability distribution
    pub temperature: f32,
    /// / restricting to the k highest probability elements
    pub top_k: u32,
    /// / restricting to top tokens summing to prob_cut_off <= prob_cut_off
    pub top_p: f32,
    /// / restricting to top tokens summing to prob_cut_off <= prob_cut_off
    pub typical_p: f32,
    /// / apply sampling on the logits
    pub do_sample: bool,
    /// / random seed for sampling
    pub seed: u64,
    /// / repetition penalty
    pub repetition_penalty: f32,
    /// / frequency penalty
    pub frequency_penalty: f32,
    /// / token watermarking using "A Watermark for Large Language Models"
    pub watermark: bool,
    /// / grammar (applied if not empty)
    pub grammar: Option<ValidGrammar>,
}

#[derive(Debug, Clone)]
pub(crate) struct ValidStoppingParameters {
    /// / Maximum number of generated tokens
    pub max_new_tokens: u32,
    /// / Optional stopping sequences
    pub stop_sequences: Vec<String>,
    /// / Ignore end of sequence token
    /// / used for benchmarking
    pub ignore_eos_token: bool,
}

672
#[derive(Debug, Clone)]
673
pub(crate) struct ValidGenerateRequest {
674
    pub inputs: Vec<InputChunk>,
675
    pub input_length: u32,
676
    pub truncate: u32,
677
    pub decoder_input_details: bool,
OlivierDehaene's avatar
OlivierDehaene committed
678
679
    pub parameters: ValidParameters,
    pub stopping_parameters: ValidStoppingParameters,
Nicolas Patry's avatar
Nicolas Patry committed
680
    pub top_n_tokens: u32,
681
682
}

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
683
684
#[derive(Error, Debug)]
pub enum ValidationError {
685
686
687
688
689
690
691
692
693
694
    #[error("`best_of` must be > 0 and <= {0}. Given: {1}")]
    BestOf(usize, usize),
    #[error("`best_of` != 1 is not allowed for this endpoint")]
    BestOfDisabled,
    #[error("you must use sampling when `best_of` is > 1")]
    BestOfSampling,
    #[error("`seed` must not be set when `best_of` > 1")]
    BestOfSeed,
    #[error("`best_of` != 1 is not supported when streaming tokens")]
    BestOfStream,
Nicolas Patry's avatar
Nicolas Patry committed
695
696
697
698
    #[error("`top_n_tokens` must be >= 0 and <= {0}. Given: {1}")]
    TopNTokens(u32, u32),
    #[error("`top_n_tokens` != 0 is not allowed for this endpoint")]
    TopNTokensDisabled,
699
700
    #[error("`decoder_input_details` == true is not supported when streaming tokens")]
    PrefillDetailsStream,
701
    #[error("`temperature` must be strictly positive")]
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
702
    Temperature,
703
    #[error("`repetition_penalty` must be strictly positive")]
704
    RepetitionPenalty,
705
706
    #[error("`frequency_penalty` must be >= -2.0 and <= 2.0")]
    FrequencyPenalty,
707
    #[error("`top_p` must be > 0.0 and < 1.0")]
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
708
    TopP,
709
    #[error("`top_k` must be strictly positive")]
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
710
    TopK,
711
712
    #[error("`truncate` must be strictly positive and less than {0}. Given: {1}")]
    Truncate(usize, usize),
713
714
    #[error("`typical_p` must be > 0.0 and < 1.0")]
    TypicalP,
715
716
    #[error("one of `max_new_tokens` or `truncate` must be set if a fast tokenizer is not in use")]
    UnsetMaxNewTokens,
717
    #[error("`max_new_tokens` must be strictly positive")]
718
719
720
    NegativeMaxNewTokens,
    #[error("`max_new_tokens` must be <= {0}. Given: {1}")]
    MaxNewTokens(usize, u32),
721
    #[error("`inputs` tokens + `max_new_tokens` must be <= {0}. Given: {1} `inputs` tokens and {2} `max_new_tokens`")]
722
    MaxTotalTokens(usize, usize, u32),
723
    #[error("`inputs` must have less than {0} tokens. Given: {1}")]
724
    InputLength(usize, usize),
725
    #[error("`inputs` cannot be empty")]
726
    EmptyInput,
727
    #[error("`stop` supports up to {0} stop sequences. Given: {1}")]
728
    StopSequence(usize, usize),
729
730
    #[error("tokenizer error {0}")]
    Tokenizer(String),
drbh's avatar
drbh committed
731
732
    #[error("grammar is not supported")]
    Grammar,
733
734
    #[error("grammar is not valid: {0}")]
    InvalidGrammar(String),
735
736
737
738
739
740
741
742
743
744
    #[error("base64 encoding is invalid: {0}")]
    InvalidBase64(#[from] base64::DecodeError),
    #[error("invalid image: {0}")]
    InvalidImage(#[from] image::ImageError),
    #[error("invalid integer: {0}")]
    InvalidInt(#[from] core::num::TryFromIntError),
    #[error("invalid image content: {0}")]
    InvalidImageContent(String),
    #[error("Could not fetch image: {0}")]
    FailedFetchImage(#[from] reqwest::Error),
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
745
}
746
747

#[cfg(test)]
748
mod tests {
749
    use super::*;
750
    use crate::config::{PaliTextConfig, Paligemma};
751
752
    use crate::default_parameters;
    use crate::tests::get_tokenizer;
753
754

    #[tokio::test]
755
    async fn test_validation_max_new_tokens() {
756
757
758
        let tokenizer = None;
        let max_best_of = 2;
        let max_stop_sequence = 3;
Nicolas Patry's avatar
Nicolas Patry committed
759
760
761
        let max_top_n_tokens = 4;
        let max_input_length = 5;
        let max_total_tokens = 6;
762
        let workers = 1;
drbh's avatar
drbh committed
763
        let disable_grammar_support = true;
764
        let config = None;
765
766
767
        let validation = Validation::new(
            workers,
            tokenizer,
768
            config,
769
770
            max_best_of,
            max_stop_sequence,
Nicolas Patry's avatar
Nicolas Patry committed
771
            max_top_n_tokens,
772
773
            max_input_length,
            max_total_tokens,
drbh's avatar
drbh committed
774
            disable_grammar_support,
775
        );
776
777

        let max_new_tokens = 10;
778
        match validation
779
            .validate_input("Hello".to_string(), None, Some(max_new_tokens))
780
781
            .await
        {
782
783
784
            // Err(ValidationError::MaxNewTokens(1, 10)) => (),
            Ok((_s, 0, 10)) => (),
            r => panic!("Unexpected not max new tokens: {r:?}"),
785
786
787
788
        }
    }

    #[tokio::test]
789
    async fn test_validation_input_length() {
790
791
792
        let tokenizer = Some(get_tokenizer().await);
        let max_best_of = 2;
        let max_stop_sequence = 3;
Nicolas Patry's avatar
Nicolas Patry committed
793
794
795
        let max_top_n_tokens = 4;
        let max_input_length = 5;
        let max_total_tokens = 6;
drbh's avatar
drbh committed
796
        let disable_grammar_support = true;
797
        let workers = 1;
798
        let config = None;
799
800
801
        let validation = Validation::new(
            workers,
            tokenizer,
802
            config,
803
804
            max_best_of,
            max_stop_sequence,
Nicolas Patry's avatar
Nicolas Patry committed
805
            max_top_n_tokens,
806
807
            max_input_length,
            max_total_tokens,
drbh's avatar
drbh committed
808
            disable_grammar_support,
809
        );
810
811

        let max_new_tokens = 10;
812
        match validation
813
            .validate_input("Hello".to_string(), None, Some(max_new_tokens))
814
815
            .await
        {
Nicolas Patry's avatar
Nicolas Patry committed
816
            Err(ValidationError::MaxTotalTokens(6, 1, 10)) => (),
817
            _ => panic!("Unexpected not max new tokens"),
818
819
        }
    }
820
821

    #[tokio::test]
822
    async fn test_validation_best_of_sampling() {
823
824
825
        let tokenizer = Some(get_tokenizer().await);
        let max_best_of = 2;
        let max_stop_sequence = 3;
Nicolas Patry's avatar
Nicolas Patry committed
826
827
828
        let max_top_n_tokens = 4;
        let max_input_length = 5;
        let max_total_tokens = 6;
829
        let workers = 1;
drbh's avatar
drbh committed
830
        let disable_grammar_support = true;
831
        let config = None;
832
833
834
        let validation = Validation::new(
            workers,
            tokenizer,
835
            config,
836
837
            max_best_of,
            max_stop_sequence,
Nicolas Patry's avatar
Nicolas Patry committed
838
            max_top_n_tokens,
839
840
            max_input_length,
            max_total_tokens,
drbh's avatar
drbh committed
841
            disable_grammar_support,
842
843
844
845
846
847
848
849
850
851
852
853
        );
        match validation
            .validate(GenerateRequest {
                inputs: "Hello".to_string(),
                parameters: GenerateParameters {
                    best_of: Some(2),
                    do_sample: false,
                    ..default_parameters()
                },
            })
            .await
        {
854
            Err(ValidationError::BestOfSampling) => (),
855
            _ => panic!("Unexpected not best of sampling"),
856
857
858
859
        }
    }

    #[tokio::test]
860
    async fn test_validation_top_p() {
861
862
863
        let tokenizer = Some(get_tokenizer().await);
        let max_best_of = 2;
        let max_stop_sequence = 3;
Nicolas Patry's avatar
Nicolas Patry committed
864
865
        let max_top_n_tokens = 4;
        let max_input_length = 5;
866
        let max_total_tokens = 106;
867
        let workers = 1;
drbh's avatar
drbh committed
868
        let disable_grammar_support = true;
869
        let config = None;
870
871
872
        let validation = Validation::new(
            workers,
            tokenizer,
873
            config,
874
875
            max_best_of,
            max_stop_sequence,
Nicolas Patry's avatar
Nicolas Patry committed
876
            max_top_n_tokens,
877
878
            max_input_length,
            max_total_tokens,
drbh's avatar
drbh committed
879
            disable_grammar_support,
880
881
882
883
884
885
        );
        match validation
            .validate(GenerateRequest {
                inputs: "Hello".to_string(),
                parameters: GenerateParameters {
                    top_p: Some(1.0),
886
                    max_new_tokens: Some(5),
887
888
889
890
891
                    ..default_parameters()
                },
            })
            .await
        {
892
            Err(ValidationError::TopP) => (),
893
            _ => panic!("Unexpected top_p"),
894
895
        }

896
897
898
899
900
        match validation
            .validate(GenerateRequest {
                inputs: "Hello".to_string(),
                parameters: GenerateParameters {
                    top_p: Some(0.99),
901
                    max_new_tokens: Some(5),
902
903
904
905
906
                    ..default_parameters()
                },
            })
            .await
        {
907
            Ok(_) => (),
908
            _ => panic!("Unexpected top_p error"),
909
910
        }

911
912
913
914
915
        let valid_request = validation
            .validate(GenerateRequest {
                inputs: "Hello".to_string(),
                parameters: GenerateParameters {
                    top_p: None,
916
                    max_new_tokens: Some(5),
917
918
919
920
921
                    ..default_parameters()
                },
            })
            .await
            .unwrap();
922
923
924
        // top_p == 1.0 is invalid for users to ask for but it's the default resolved value.
        assert_eq!(valid_request.parameters.top_p, 1.0);
    }
Nicolas Patry's avatar
Nicolas Patry committed
925
926
927
928
929
930
931
932

    #[tokio::test]
    async fn test_validation_top_n_tokens() {
        let tokenizer = Some(get_tokenizer().await);
        let max_best_of = 2;
        let max_stop_sequences = 3;
        let max_top_n_tokens = 4;
        let max_input_length = 5;
933
        let max_total_tokens = 106;
Nicolas Patry's avatar
Nicolas Patry committed
934
        let workers = 1;
drbh's avatar
drbh committed
935
        let disable_grammar_support = true;
936
        let config = None;
Nicolas Patry's avatar
Nicolas Patry committed
937
938
939
        let validation = Validation::new(
            workers,
            tokenizer,
940
            config,
Nicolas Patry's avatar
Nicolas Patry committed
941
942
943
944
945
            max_best_of,
            max_stop_sequences,
            max_top_n_tokens,
            max_input_length,
            max_total_tokens,
drbh's avatar
drbh committed
946
            disable_grammar_support,
Nicolas Patry's avatar
Nicolas Patry committed
947
948
949
950
951
952
        );
        match validation
            .validate(GenerateRequest {
                inputs: "Hello".to_string(),
                parameters: GenerateParameters {
                    top_n_tokens: Some(5),
953
                    max_new_tokens: Some(5),
Nicolas Patry's avatar
Nicolas Patry committed
954
955
956
957
958
959
960
961
962
963
964
965
966
967
                    ..default_parameters()
                },
            })
            .await
        {
            Err(ValidationError::TopNTokens(4, 5)) => (),
            _ => panic!("Unexpected top_n_tokens"),
        }

        validation
            .validate(GenerateRequest {
                inputs: "Hello".to_string(),
                parameters: GenerateParameters {
                    top_n_tokens: Some(4),
968
                    max_new_tokens: Some(5),
Nicolas Patry's avatar
Nicolas Patry committed
969
970
971
972
973
974
975
976
977
978
979
                    ..default_parameters()
                },
            })
            .await
            .unwrap();

        validation
            .validate(GenerateRequest {
                inputs: "Hello".to_string(),
                parameters: GenerateParameters {
                    top_n_tokens: Some(0),
980
                    max_new_tokens: Some(5),
Nicolas Patry's avatar
Nicolas Patry committed
981
982
983
984
985
986
987
988
989
990
991
                    ..default_parameters()
                },
            })
            .await
            .unwrap();

        let valid_request = validation
            .validate(GenerateRequest {
                inputs: "Hello".to_string(),
                parameters: GenerateParameters {
                    top_n_tokens: None,
992
                    max_new_tokens: Some(5),
Nicolas Patry's avatar
Nicolas Patry committed
993
994
995
996
997
998
999
1000
                    ..default_parameters()
                },
            })
            .await
            .unwrap();

        assert_eq!(valid_request.top_n_tokens, 0);
    }
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057

    static PIXEL_GIF: &str = "R0lGODdhAQABAIEAAP///wAAAAAAAAAAACwAAAAAAQABAAAIBAABBAQAOw==";

    #[tokio::test]
    async fn test_prepare_input_chunks() {
        let pixel_data = STANDARD.decode(PIXEL_GIF).unwrap();

        let tokenizer = Some(get_tokenizer().await);

        let max_best_of = 2;
        let max_stop_sequence = 3;
        let max_top_n_tokens = 4;
        let max_input_length = 5;
        let max_total_tokens = 6;
        let disable_grammar_support = true;
        let workers = 1;
        let config = Config::Paligemma(Paligemma {
            text_config: PaliTextConfig {
                num_image_tokens: 1,
            },
        });
        let validation = Validation::new(
            workers,
            tokenizer,
            Some(config),
            max_best_of,
            max_stop_sequence,
            max_top_n_tokens,
            max_input_length,
            max_total_tokens,
            disable_grammar_support,
        );

        let chunks = match validation
            .tokenize(
                format!("test![](data:image/gif;base64,{})", PIXEL_GIF),
                None,
            )
            .await
        {
            Ok(Some((_encoding, chunks))) => chunks,
            _ => panic!("Unexpected tokenization failure"),
        };

        assert!(
            chunks
                == vec![
                    Chunk::Text("test".to_string()).into(),
                    Chunk::Image(Image {
                        data: pixel_data.clone(),
                        mimetype: "image/gif".to_string()
                    })
                    .into()
                ],
            "Failed to process images",
        );
    }
1058
}