lib.rs 43.8 KB
Newer Older
1
/// Text Generation Inference Webserver
OlivierDehaene's avatar
OlivierDehaene committed
2
pub mod config;
3
mod infer;
Olivier Dehaene's avatar
Olivier Dehaene committed
4
pub mod server;
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
5
mod validation;
Olivier Dehaene's avatar
Olivier Dehaene committed
6

7
8
9
#[cfg(feature = "kserve")]
mod kserve;

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
10
use serde::{Deserialize, Serialize};
Nicolas Patry's avatar
Nicolas Patry committed
11
use tracing::warn;
12
use utoipa::ToSchema;
Olivier Dehaene's avatar
Olivier Dehaene committed
13
use validation::Validation;
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
14

drbh's avatar
drbh committed
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#[derive(Clone, Deserialize, ToSchema)]
pub(crate) struct VertexInstance {
    #[schema(example = "What is Deep Learning?")]
    pub inputs: String,
    #[schema(nullable = true, default = "null", example = "null")]
    pub parameters: Option<GenerateParameters>,
}

#[derive(Deserialize, ToSchema)]
pub(crate) struct VertexRequest {
    #[serde(rename = "instances")]
    pub instances: Vec<VertexInstance>,
}

#[derive(Clone, Deserialize, ToSchema, Serialize)]
pub(crate) struct VertexResponse {
    pub predictions: Vec<String>,
}

34
35
/// Hub type
#[derive(Clone, Debug, Deserialize)]
36
pub struct HubModelInfo {
37
38
39
40
41
42
    #[serde(rename(deserialize = "id"))]
    pub model_id: String,
    pub sha: Option<String>,
    pub pipeline_tag: Option<String>,
}

43
44
45
46
47
48
49
50
51
52
53
54
55
56
#[derive(Debug, Clone, Deserialize, PartialEq)]
pub struct ChatTemplate {
    name: String,
    template: String,
}

#[derive(Debug, Clone, Deserialize, PartialEq)]
#[serde(untagged)]
pub enum ChatTemplateVersions {
    Single(String),
    Multiple(Vec<ChatTemplate>),
}

#[derive(Debug, Clone, Deserialize, Default)]
57
pub struct HubTokenizerConfig {
58
    pub chat_template: Option<ChatTemplateVersions>,
59
    pub completion_template: Option<String>,
60
    #[serde(deserialize_with = "token_serde::deserialize")]
61
    pub bos_token: Option<String>,
62
    #[serde(deserialize_with = "token_serde::deserialize")]
63
    pub eos_token: Option<String>,
64
65
66
}

impl HubTokenizerConfig {
67
68
69
    pub fn from_file<P: AsRef<std::path::Path>>(filename: P) -> Option<Self> {
        let content = std::fs::read_to_string(filename).ok()?;
        serde_json::from_str(&content).ok()
70
71
72
    }
}

73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "processor_class")]
pub enum HubPreprocessorConfig {
    Idefics2Processor(Idefics2Preprocessor),
}

impl HubPreprocessorConfig {
    pub fn from_file<P: AsRef<std::path::Path>>(filename: P) -> Option<Self> {
        let content = std::fs::read_to_string(filename).ok()?;
        serde_json::from_str(&content).ok()
    }
}

#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Idefics2Preprocessor {
    #[serde(default)]
    do_image_splitting: bool,
}

drbh's avatar
drbh committed
92
93
94
95
96
97
98
99
100
101
102
103
104
105
#[derive(Debug, Clone, Deserialize, Default)]
pub struct HubProcessorConfig {
    pub chat_template: Option<ChatTemplateVersions>,
    pub image_seq_len: usize,
    pub processor_class: Option<String>,
}

impl HubProcessorConfig {
    pub fn from_file<P: AsRef<std::path::Path>>(filename: P) -> Option<Self> {
        let content = std::fs::read_to_string(filename).ok()?;
        serde_json::from_str(&content).ok()
    }
}

106
#[derive(Clone, Debug, Deserialize, ToSchema, Serialize)]
drbh's avatar
drbh committed
107
108
#[serde(tag = "type", content = "value")]
pub(crate) enum GrammarType {
109
110
111
112
113
    /// A string that represents a [JSON Schema](https://json-schema.org/).
    ///
    /// JSON Schema is a declarative language that allows to annotate JSON documents
    /// with types and descriptions.
    #[serde(rename = "json")]
drbh's avatar
drbh committed
114
    #[serde(alias = "json_object")]
115
116
    #[schema(example = json ! ({"properties": {"location":{"type": "string"}}}))]
    Json(serde_json::Value),
drbh's avatar
drbh committed
117
118
119
120
    #[serde(rename = "regex")]
    Regex(String),
}

121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
mod token_serde {
    use super::*;
    use serde::de;
    use serde::Deserializer;
    use serde_json::Value;

    pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<String>, D::Error>
    where
        D: Deserializer<'de>,
    {
        let value = Value::deserialize(deserializer)?;

        match value {
            Value::String(s) => Ok(Some(s)),
            Value::Object(map) => {
                if let Some(content) = map.get("content").and_then(|v| v.as_str()) {
                    Ok(Some(content.to_string()))
                } else {
                    Err(de::Error::custom(
                        "content key not found in structured token",
                    ))
                }
            }
144
            Value::Null => Ok(None),
145
146
147
148
149
            _ => Err(de::Error::custom("invalid token format")),
        }
    }
}

150
151
#[derive(Clone, Debug, Serialize, ToSchema)]
pub struct Info {
152
    /// Model info
153
154
155
156
    #[schema(example = "bigscience/blomm-560m")]
    pub model_id: String,
    #[schema(nullable = true, example = "e985a63cdc139290c5f700ff1929f0b5942cced2")]
    pub model_sha: Option<String>,
157
158
159
160
    #[schema(example = "torch.float16")]
    pub model_dtype: String,
    #[schema(example = "cuda")]
    pub model_device_type: String,
161
162
    #[schema(nullable = true, example = "text-generation")]
    pub model_pipeline_tag: Option<String>,
163
164
165
166
167
168
169
170
    /// Router Parameters
    #[schema(example = "128")]
    pub max_concurrent_requests: usize,
    #[schema(example = "2")]
    pub max_best_of: usize,
    #[schema(example = "4")]
    pub max_stop_sequences: usize,
    #[schema(example = "1024")]
OlivierDehaene's avatar
OlivierDehaene committed
171
    pub max_input_tokens: usize,
172
173
174
175
176
177
178
179
    #[schema(example = "2048")]
    pub max_total_tokens: usize,
    #[schema(example = "1.2")]
    pub waiting_served_ratio: f32,
    #[schema(example = "32000")]
    pub max_batch_total_tokens: u32,
    #[schema(example = "20")]
    pub max_waiting_tokens: usize,
180
181
    #[schema(nullable = true, example = "null")]
    pub max_batch_size: Option<usize>,
182
183
    #[schema(example = "2")]
    pub validation_workers: usize,
184
185
    #[schema(example = "32")]
    pub max_client_batch_size: usize,
186
    /// Router Info
187
188
    #[schema(example = "text-generation-router")]
    pub router: &'static str,
189
190
191
192
    #[schema(example = "0.5.0")]
    pub version: &'static str,
    #[schema(nullable = true, example = "null")]
    pub sha: Option<&'static str>,
193
194
    #[schema(nullable = true, example = "null")]
    pub docker_label: Option<&'static str>,
195
196
}

drbh's avatar
drbh committed
197
#[derive(Clone, Debug, Deserialize, ToSchema, Default)]
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
198
pub(crate) struct GenerateParameters {
199
    /// Generate best_of sequences and return the one if the highest token logprobs.
200
201
202
    #[serde(default)]
    #[schema(exclusive_minimum = 0, nullable = true, default = "null", example = 1)]
    pub best_of: Option<usize>,
203
204

    /// The value used to module the logits distribution.
205
206
207
208
209
210
211
212
    #[serde(default)]
    #[schema(
        exclusive_minimum = 0.0,
        nullable = true,
        default = "null",
        example = 0.5
    )]
    pub temperature: Option<f32>,
213
214
215

    /// The parameter for repetition penalty. 1.0 means no penalty.
    /// See [this paper](https://arxiv.org/pdf/1909.05858.pdf) for more details.
216
217
218
219
220
221
222
223
    #[serde(default)]
    #[schema(
        exclusive_minimum = 0.0,
        nullable = true,
        default = "null",
        example = 1.03
    )]
    pub repetition_penalty: Option<f32>,
224
225
226
227

    /// The parameter for frequency penalty. 1.0 means no penalty
    /// Penalize new tokens based on their existing frequency in the text so far,
    /// decreasing the model's likelihood to repeat the same line verbatim.
228
    #[serde(default)]
229
230
231
232
233
234
235
    #[schema(
        exclusive_minimum = -2.0,
        nullable = true,
        default = "null",
        example = 0.1
    )]
    pub frequency_penalty: Option<f32>,
236
237

    /// The number of highest probability vocabulary tokens to keep for top-k-filtering.
238
    #[serde(default)]
239
240
    #[schema(exclusive_minimum = 0, nullable = true, default = "null", example = 10)]
    pub top_k: Option<i32>,
241
242

    /// Top-p value for nucleus sampling.
243
244
245
246
247
248
249
250
251
    #[serde(default)]
    #[schema(
        exclusive_minimum = 0.0,
        maximum = 1.0,
        nullable = true,
        default = "null",
        example = 0.95
    )]
    pub top_p: Option<f32>,
252
253
254

    /// Typical Decoding mass
    /// See [Typical Decoding for Natural Language Generation](https://arxiv.org/abs/2202.00666) for more information.
255
    #[serde(default)]
256
257
258
259
260
261
262
263
    #[schema(
        exclusive_minimum = 0.0,
        maximum = 1.0,
        nullable = true,
        default = "null",
        example = 0.95
    )]
    pub typical_p: Option<f32>,
264
265

    /// Activate logits sampling.
266
    #[serde(default)]
267
    #[schema(default = "false", example = true)]
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
268
    pub do_sample: bool,
269
270

    /// Maximum number of tokens to generate.
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
271
    #[serde(default = "default_max_new_tokens")]
272
    #[schema(nullable = true, default = "100", example = "20")]
273
    pub max_new_tokens: Option<u32>,
274
275

    /// Whether to prepend the prompt to the generated text
OlivierDehaene's avatar
OlivierDehaene committed
276
    #[serde(default)]
277
    #[schema(nullable = true, default = "null", example = false)]
278
    pub return_full_text: Option<bool>,
279
280

    /// Stop generating tokens if a member of `stop` is generated.
281
    #[serde(default)]
282
    #[schema(inline, max_items = 4, example = json ! (["photographer"]))]
283
    pub stop: Vec<String>,
284
285

    /// Truncate inputs tokens to the given size.
OlivierDehaene's avatar
OlivierDehaene committed
286
    #[serde(default)]
287
    #[schema(nullable = true, default = "null", example = "null")]
288
    pub truncate: Option<usize>,
289
290

    /// Watermarking with [A Watermark for Large Language Models](https://arxiv.org/abs/2301.10226).
291
    #[serde(default)]
292
293
    #[schema(default = "false", example = true)]
    pub watermark: bool,
294
295

    /// Whether to return generation details.
296
    #[serde(default)]
297
    #[schema(default = "true")]
OlivierDehaene's avatar
OlivierDehaene committed
298
    pub details: bool,
299
300

    /// Whether to return decoder input token logprobs and ids.
301
    #[serde(default)]
302
    #[schema(default = "false")]
303
    pub decoder_input_details: bool,
304
305

    /// Random sampling seed.
306
    #[serde(default)]
307
308
309
310
311
312
    #[schema(
        exclusive_minimum = 0,
        nullable = true,
        default = "null",
        example = "null"
    )]
313
    pub seed: Option<u64>,
314
315

    /// The number of highest probability vocabulary tokens to keep for top-n-filtering.
Nicolas Patry's avatar
Nicolas Patry committed
316
317
318
    #[serde(default)]
    #[schema(exclusive_minimum = 0, nullable = true, default = "null", example = 5)]
    pub top_n_tokens: Option<u32>,
319
320

    /// Grammar constraints for the generation.
drbh's avatar
drbh committed
321
    #[serde(default)]
322
    #[schema(nullable = true, default = "null", example = "null")]
drbh's avatar
drbh committed
323
    pub grammar: Option<GrammarType>,
drbh's avatar
drbh committed
324
325
326
327
328

    /// Lora adapter id
    #[serde(default)]
    #[schema(nullable = true, default = "null", example = "null")]
    pub adapter_id: Option<String>,
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
329
330
}

331
fn default_max_new_tokens() -> Option<u32> {
332
    Some(100)
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
333
334
335
336
}

fn default_parameters() -> GenerateParameters {
    GenerateParameters {
337
        best_of: None,
338
339
        temperature: None,
        repetition_penalty: None,
340
        frequency_penalty: None,
341
342
        top_k: None,
        top_p: None,
343
        typical_p: None,
344
        do_sample: true,
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
345
        max_new_tokens: default_max_new_tokens(),
346
        return_full_text: None,
347
        stop: Vec::new(),
348
        truncate: None,
349
        watermark: false,
OlivierDehaene's avatar
OlivierDehaene committed
350
        details: false,
351
        decoder_input_details: false,
352
        seed: None,
Nicolas Patry's avatar
Nicolas Patry committed
353
        top_n_tokens: None,
drbh's avatar
drbh committed
354
        grammar: None,
drbh's avatar
drbh committed
355
        adapter_id: None,
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
356
357
358
    }
}

359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
mod prompt_serde {
    use serde::{self, Deserialize, Deserializer};
    use serde_json::Value;

    pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
    where
        D: Deserializer<'de>,
    {
        let value = Value::deserialize(deserializer)?;
        match value {
            Value::String(s) => Ok(vec![s]),
            Value::Array(arr) if arr.is_empty() => Err(serde::de::Error::custom(
                "Empty array detected. Do not use an empty array for the prompt.",
            )),
            Value::Array(arr) => arr
                .iter()
                .map(|v| match v {
                    Value::String(s) => Ok(s.to_owned()),
                    _ => Err(serde::de::Error::custom("Expected a string")),
                })
                .collect(),
            _ => Err(serde::de::Error::custom(
                "Expected a string or an array of strings",
            )),
        }
    }
}

387
388
389
390
391
392
393
394
395
#[derive(Clone, Deserialize, Serialize, ToSchema, Debug)]
pub struct CompletionRequest {
    /// UNUSED
    #[schema(example = "mistralai/Mistral-7B-Instruct-v0.2")]
    /// ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API.
    pub model: String,

    /// The prompt to generate completions for.
    #[schema(example = "What is Deep Learning?")]
396
397
    #[serde(deserialize_with = "prompt_serde::deserialize")]
    pub prompt: Vec<String>,
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434

    /// The maximum number of tokens that can be generated in the chat completion.
    #[serde(default)]
    #[schema(default = "32")]
    pub max_tokens: Option<u32>,

    /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while
    /// lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both.
    #[serde(default)]
    #[schema(nullable = true, example = 1.0)]
    pub temperature: Option<f32>,

    /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the
    /// tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
    #[serde(default)]
    #[schema(nullable = true, example = 0.95)]
    pub top_p: Option<f32>,

    #[serde(default = "bool::default")]
    pub stream: bool,

    #[schema(nullable = true, example = 42)]
    pub seed: Option<u64>,

    /// The text to append to the prompt. This is useful for completing sentences or generating a paragraph of text.
    /// please see the completion_template field in the model's tokenizer_config.json file for completion template.
    #[serde(default)]
    pub suffix: Option<String>,

    #[serde(default)]
    pub repetition_penalty: Option<f32>,

    /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far,
    /// decreasing the model's likelihood to repeat the same line verbatim.
    #[serde(default)]
    #[schema(example = "1.0")]
    pub frequency_penalty: Option<f32>,
435
436
437
438
439

    /// Up to 4 sequences where the API will stop generating further tokens.
    #[serde(default)]
    #[schema(nullable = true, example = "null")]
    pub stop: Option<Vec<String>>,
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
}

#[derive(Clone, Deserialize, Serialize, ToSchema, Default)]
pub(crate) struct Completion {
    pub id: String,
    pub object: String,
    #[schema(example = "1706270835")]
    pub created: u64,
    #[schema(example = "mistralai/Mistral-7B-Instruct-v0.2")]
    pub model: String,
    pub system_fingerprint: String,
    pub choices: Vec<CompletionComplete>,
    pub usage: Usage,
}

#[derive(Clone, Deserialize, Serialize, ToSchema)]
pub(crate) struct CompletionComplete {
    pub index: u32,
    pub text: String,
    pub logprobs: Option<Vec<f32>>,
    pub finish_reason: String,
}

463
#[derive(Clone, Deserialize, Serialize, ToSchema)]
464
465
466
pub(crate) struct ChatCompletion {
    pub id: String,
    pub object: String,
467
    #[schema(example = "1706270835")]
468
    pub created: u64,
469
    #[schema(example = "mistralai/Mistral-7B-Instruct-v0.2")]
470
471
472
473
474
475
    pub model: String,
    pub system_fingerprint: String,
    pub choices: Vec<ChatCompletionComplete>,
    pub usage: Usage,
}

476
#[derive(Clone, Deserialize, Serialize, ToSchema)]
477
478
pub(crate) struct ChatCompletionComplete {
    pub index: u32,
Nicolas Patry's avatar
Nicolas Patry committed
479
    pub message: OutputMessage,
480
    pub logprobs: Option<ChatCompletionLogprobs>,
481
482
483
    pub finish_reason: String,
}

484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
#[derive(Clone, Deserialize, Serialize, ToSchema)]
pub(crate) struct ChatCompletionLogprobs {
    content: Vec<ChatCompletionLogprob>,
}

impl From<(Token, Vec<Token>)> for ChatCompletionLogprobs {
    fn from(value: (Token, Vec<Token>)) -> Self {
        let (token, top_tokens) = value;

        Self {
            content: vec![ChatCompletionLogprob {
                token: token.text,
                logprob: token.logprob,
                top_logprobs: top_tokens
                    .into_iter()
                    .map(|t| ChatCompletionTopLogprob {
                        token: t.text,
                        logprob: t.logprob,
                    })
                    .collect(),
            }],
        }
    }
}

impl From<(Vec<Token>, Vec<Vec<Token>>)> for ChatCompletionLogprobs {
    fn from(value: (Vec<Token>, Vec<Vec<Token>>)) -> Self {
        let (tokens, top_tokens) = value;
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526

        // Create an iterator that produces None for top_tokens once it's exhausted
        let top_tokens_iter = top_tokens
            .into_iter()
            .map(Some)
            .chain(std::iter::repeat(None));

        let content = tokens
            .into_iter()
            .zip(top_tokens_iter)
            .map(|(t, top_t_option)| ChatCompletionLogprob {
                token: t.text,
                logprob: t.logprob,
                top_logprobs: match top_t_option {
                    Some(top_t) => top_t
527
528
529
530
531
532
                        .into_iter()
                        .map(|t| ChatCompletionTopLogprob {
                            token: t.text,
                            logprob: t.logprob,
                        })
                        .collect(),
533
534
535
536
537
538
                    None => vec![], // Handle the case where there are no top tokens
                },
            })
            .collect();

        Self { content }
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
    }
}

#[derive(Clone, Deserialize, Serialize, ToSchema)]
pub(crate) struct ChatCompletionLogprob {
    token: String,
    logprob: f32,
    top_logprobs: Vec<ChatCompletionTopLogprob>,
}

#[derive(Clone, Deserialize, Serialize, ToSchema)]
pub(crate) struct ChatCompletionTopLogprob {
    token: String,
    logprob: f32,
}

555
#[derive(Clone, Deserialize, Serialize, ToSchema, Default)]
556
557
558
559
560
561
562
563
564
565
pub(crate) struct Usage {
    pub prompt_tokens: u32,
    pub completion_tokens: u32,
    pub total_tokens: u32,
}

impl ChatCompletion {
    pub(crate) fn new(
        model: String,
        system_fingerprint: String,
drbh's avatar
drbh committed
566
        output: Option<String>,
567
568
569
        created: u64,
        details: Details,
        return_logprobs: bool,
570
        tool_calls: Option<Vec<ToolCall>>,
571
    ) -> Self {
Nicolas Patry's avatar
Nicolas Patry committed
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
        let message = match (output, tool_calls) {
            (Some(content), None) => OutputMessage::ChatMessage(TextMessage {
                role: "assistant".into(),
                content,
            }),
            (None, Some(tool_calls)) => OutputMessage::ToolCall(ToolCallMessage {
                role: "assistant".to_string(),
                tool_calls,
            }),
            (Some(output), Some(_)) => {
                warn!("Received both chat and tool call");
                OutputMessage::ChatMessage(TextMessage {
                    role: "assistant".into(),
                    content: output,
                })
            }
            (None, None) => {
                warn!("Didn't receive an answer");
                OutputMessage::ChatMessage(TextMessage {
                    role: "assistant".into(),
                    content: "".to_string(),
                })
            }
        };
596
597
        Self {
            id: String::new(),
598
            object: "chat.completion".into(),
599
600
601
602
603
            created,
            model,
            system_fingerprint,
            choices: vec![ChatCompletionComplete {
                index: 0,
Nicolas Patry's avatar
Nicolas Patry committed
604
                message,
605
                logprobs: return_logprobs
606
                    .then(|| ChatCompletionLogprobs::from((details.tokens, details.top_tokens))),
607
608
609
610
611
612
613
614
615
616
                finish_reason: details.finish_reason.to_string(),
            }],
            usage: Usage {
                prompt_tokens: details.prefill.len() as u32,
                completion_tokens: details.generated_tokens,
                total_tokens: details.prefill.len() as u32 + details.generated_tokens,
            },
        }
    }
}
617
618
619
620
621
622
623
624
625
#[derive(Clone, Deserialize, Serialize, ToSchema)]
pub(crate) struct CompletionCompleteChunk {
    pub id: String,
    pub object: String,
    pub created: u64,
    pub choices: Vec<CompletionComplete>,
    pub model: String,
    pub system_fingerprint: String,
}
Nicolas Patry's avatar
Nicolas Patry committed
626

627
#[derive(Clone, Serialize, ToSchema)]
628
629
630
pub(crate) struct ChatCompletionChunk {
    pub id: String,
    pub object: String,
631
    #[schema(example = "1706270978")]
632
    pub created: u64,
633
    #[schema(example = "mistralai/Mistral-7B-Instruct-v0.2")]
634
635
636
637
638
    pub model: String,
    pub system_fingerprint: String,
    pub choices: Vec<ChatCompletionChoice>,
}

639
#[derive(Clone, Serialize, ToSchema)]
640
641
642
pub(crate) struct ChatCompletionChoice {
    pub index: u32,
    pub delta: ChatCompletionDelta,
643
    pub logprobs: Option<ChatCompletionLogprobs>,
644
645
646
    pub finish_reason: Option<String>,
}

Nicolas Patry's avatar
Nicolas Patry committed
647
648
649
650
651
652
653
#[derive(Clone, Deserialize, ToSchema, Serialize, Debug, PartialEq)]
pub struct ToolCallDelta {
    #[schema(example = "assistant")]
    role: String,
    tool_calls: DeltaToolCall,
}

654
655
#[derive(Clone, Debug, Serialize, ToSchema)]
#[serde(untagged)]
Nicolas Patry's avatar
Nicolas Patry committed
656
657
658
enum ChatCompletionDelta {
    Chat(TextMessage),
    Tool(ToolCallDelta),
drbh's avatar
drbh committed
659
660
}

Nicolas Patry's avatar
Nicolas Patry committed
661
#[derive(Clone, Deserialize, Serialize, ToSchema, Debug, PartialEq)]
drbh's avatar
drbh committed
662
663
664
665
666
667
668
pub(crate) struct DeltaToolCall {
    pub index: u32,
    pub id: String,
    pub r#type: String,
    pub function: Function,
}

Nicolas Patry's avatar
Nicolas Patry committed
669
#[derive(Clone, Deserialize, Serialize, ToSchema, Debug, PartialEq)]
drbh's avatar
drbh committed
670
671
672
pub(crate) struct Function {
    pub name: Option<String>,
    pub arguments: String,
673
674
}

drbh's avatar
drbh committed
675
#[allow(clippy::too_many_arguments)]
676
677
678
679
impl ChatCompletionChunk {
    pub(crate) fn new(
        model: String,
        system_fingerprint: String,
drbh's avatar
drbh committed
680
681
        delta: Option<String>,
        tool_calls: Option<Vec<String>>,
682
        created: u64,
683
        logprobs: Option<ChatCompletionLogprobs>,
684
685
        finish_reason: Option<String>,
    ) -> Self {
686
        let delta = match (delta, tool_calls) {
Nicolas Patry's avatar
Nicolas Patry committed
687
688
689
690
691
692
693
            (Some(delta), _) => ChatCompletionDelta::Chat(TextMessage {
                role: "assistant".to_string(),
                content: delta,
            }),
            (None, Some(tool_calls)) => ChatCompletionDelta::Tool(ToolCallDelta {
                role: "assistant".to_string(),
                tool_calls: DeltaToolCall {
694
695
696
697
698
699
700
                    index: 0,
                    id: String::new(),
                    r#type: "function".to_string(),
                    function: Function {
                        name: None,
                        arguments: tool_calls[0].to_string(),
                    },
Nicolas Patry's avatar
Nicolas Patry committed
701
702
703
704
705
706
                },
            }),
            (None, None) => ChatCompletionDelta::Chat(TextMessage {
                role: "assistant".to_string(),
                content: "".to_string(),
            }),
707
        };
708
709
        Self {
            id: String::new(),
710
            object: "chat.completion.chunk".to_string(),
711
712
713
714
            created,
            model,
            system_fingerprint,
            choices: vec![ChatCompletionChoice {
715
                index: 0,
716
                delta,
717
718
719
720
721
722
723
724
725
                logprobs,
                finish_reason,
            }],
        }
    }
}

#[derive(Clone, Deserialize, ToSchema, Serialize)]
pub(crate) struct ChatRequest {
726
    #[schema(example = "mistralai/Mistral-7B-Instruct-v0.2")]
drbh's avatar
drbh committed
727
    /// [UNUSED] ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API.
728
    pub model: String,
drbh's avatar
drbh committed
729

730
    /// A list of messages comprising the conversation so far.
drbh's avatar
drbh committed
731
    #[schema(example = "[{\"role\": \"user\", \"content\": \"What is Deep Learning?\"}]")]
732
733
734
735
736
    pub messages: Vec<Message>,

    /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far,
    /// decreasing the model's likelihood to repeat the same line verbatim.
    #[serde(default)]
737
    #[schema(example = "1.0")]
738
739
740
741
742
743
744
745
746
747
748
749
750
751
    pub frequency_penalty: Option<f32>,

    /// UNUSED
    /// Modify the likelihood of specified tokens appearing in the completion. Accepts a JSON object that maps tokens
    /// (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically,
    /// the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model,
    /// but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should
    /// result in a ban or exclusive selection of the relevant token.
    #[serde(default)]
    pub logit_bias: Option<Vec<f32>>,

    /// Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each
    /// output token returned in the content of message.
    #[serde(default)]
752
    #[schema(example = "false")]
753
754
755
756
757
    pub logprobs: Option<bool>,

    /// An integer between 0 and 5 specifying the number of most likely tokens to return at each token position, each with
    /// an associated log probability. logprobs must be set to true if this parameter is used.
    #[serde(default)]
758
    #[schema(example = "5")]
759
760
761
762
    pub top_logprobs: Option<u32>,

    /// The maximum number of tokens that can be generated in the chat completion.
    #[serde(default)]
763
    #[schema(example = "32")]
764
765
766
767
768
769
    pub max_tokens: Option<u32>,

    /// UNUSED
    /// How many chat completion choices to generate for each input message. Note that you will be charged based on the
    /// number of generated tokens across all of the choices. Keep n as 1 to minimize costs.
    #[serde(default)]
770
    #[schema(nullable = true, example = "2")]
771
772
773
774
775
    pub n: Option<u32>,

    /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far,
    /// increasing the model's likelihood to talk about new topics
    #[serde(default)]
776
    #[schema(nullable = true, example = 0.1)]
777
778
    pub presence_penalty: Option<f32>,

779
780
781
782
783
    /// Up to 4 sequences where the API will stop generating further tokens.
    #[serde(default)]
    #[schema(nullable = true, example = "null")]
    pub stop: Option<Vec<String>>,

784
785
786
787
788
    #[serde(default = "bool::default")]
    pub stream: bool,

    #[schema(nullable = true, example = 42)]
    pub seed: Option<u64>,
789
790
791
792
793
794

    /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while
    /// lower values like 0.2 will make it more focused and deterministic.
    ///
    /// We generally recommend altering this or `top_p` but not both.
    #[serde(default)]
795
    #[schema(nullable = true, example = 1.0)]
796
797
798
799
800
    pub temperature: Option<f32>,

    /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the
    /// tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
    #[serde(default)]
801
    #[schema(nullable = true, example = 0.95)]
802
    pub top_p: Option<f32>,
drbh's avatar
drbh committed
803
804
805
806
807
808
809
810
811
812
813

    /// A list of tools the model may call. Currently, only functions are supported as a tool. Use this to provide a list of
    /// functions the model may generate JSON inputs for.
    #[serde(default)]
    #[schema(nullable = true, example = "null")]
    pub tools: Option<Vec<Tool>>,

    /// A prompt to be appended before the tools
    #[serde(default = "default_tool_prompt")]
    #[schema(
        nullable = true,
814
        example = "\"You will be presented with a JSON schema representing a set of tools.\nIf the user request lacks of sufficient information to make a precise tool selection: Do not invent any tool's properties, instead notify with an error message.\n\nJSON Schema:\n\""
drbh's avatar
drbh committed
815
816
817
818
819
820
821
822
    )]
    pub tool_prompt: Option<String>,

    /// A specific tool to use. If not provided, the model will default to use any of the tools provided in the tools parameter.
    #[serde(default)]
    #[schema(nullable = true, example = "null")]
    #[serde(deserialize_with = "deserialize_tool_choice::deserialize")]
    pub tool_choice: Option<ToolType>,
drbh's avatar
drbh committed
823
824
825
826
827
828
829

    /// Response format constraints for the generation.
    ///
    /// NOTE: A request can use `response_format` OR `tools` but not both.
    #[serde(default)]
    #[schema(nullable = true, default = "null", example = "null")]
    pub response_format: Option<GrammarType>,
drbh's avatar
drbh committed
830
831
832
833
}

fn default_tool_prompt() -> Option<String> {
    Some(
834
        "\nYou will be presented with a JSON schema representing a set of tools.\nIf the user request lacks of sufficient information to make a precise tool selection: Do not invent any tool's properties, instead notify with an error message.\n\nJSON Schema:\n".to_string(),
drbh's avatar
drbh committed
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
    )
}
#[derive(Clone, Deserialize, ToSchema, Serialize)]
enum ToolType {
    FunctionName(String),
    OneOf,
}

/// Deserialize the tool choice from the JSON input or from the function name ("none" is allowed but mapped to None)
mod deserialize_tool_choice {
    use super::*;
    use serde::de;
    use serde::Deserializer;
    use serde_json::Value;

    pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<ToolType>, D::Error>
    where
        D: Deserializer<'de>,
    {
        let value = Value::deserialize(deserializer)?;

        match value {
            Value::String(s) => match s.as_str() {
                "none" => Ok(None),
                "auto" => Ok(Some(ToolType::OneOf)),
                _ => Ok(Some(ToolType::FunctionName(s))),
            },
            Value::Object(map) => {
                if let Some(content) = map
                    .get("function")
                    .and_then(|v| v.get("name"))
                    .and_then(|v| v.as_str())
                {
                    Ok(Some(ToolType::FunctionName(content.to_string())))
                } else {
                    Err(de::Error::custom("function key not found in tool choice"))
                }
            }
            Value::Null => Ok(Some(ToolType::OneOf)),
            _ => Err(de::Error::custom("invalid token format")),
        }
    }
}

879
#[derive(Debug, Deserialize, Serialize, ToSchema, PartialEq)]
drbh's avatar
drbh committed
880
881
882
883
884
885
pub struct Tools {
    #[serde(flatten)]
    functions_map: FunctionsMap,
    properties: Properties,
}

886
#[derive(Debug, Serialize, Deserialize, PartialEq)]
drbh's avatar
drbh committed
887
888
889
890
891
struct FunctionsMap {
    #[serde(rename = "$functions")]
    functions: std::collections::HashMap<String, serde_json::Value>,
}

892
#[derive(Debug, Serialize, Deserialize, PartialEq)]
drbh's avatar
drbh committed
893
894
895
896
897
struct FunctionRef {
    #[serde(rename = "$ref")]
    ref_path: String,
}

898
#[derive(Debug, Serialize, Deserialize, PartialEq)]
drbh's avatar
drbh committed
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
struct Properties {
    #[serde(serialize_with = "serialize_function")]
    function: Vec<FunctionRef>,
}

fn serialize_function<S>(functions: &Vec<FunctionRef>, serializer: S) -> Result<S::Ok, S::Error>
where
    S: serde::Serializer,
{
    use serde::ser::SerializeStruct;
    let mut state = serializer.serialize_struct("Function", 1)?;
    state.serialize_field("anyOf", functions)?;
    state.end()
}

Nicolas Patry's avatar
Nicolas Patry committed
914
#[derive(Clone, Debug, Deserialize, Serialize, ToSchema, Default, PartialEq)]
drbh's avatar
drbh committed
915
916
917
918
pub(crate) struct FunctionDefinition {
    #[serde(default)]
    pub description: Option<String>,
    pub name: String,
919
920
    #[serde(alias = "parameters")]
    pub arguments: serde_json::Value,
drbh's avatar
drbh committed
921
922
923
924
925
926
927
928
929
}

#[derive(Clone, Debug, Deserialize, Serialize, ToSchema)]
pub(crate) struct Tool {
    // The type of the tool. Currently, only 'function' is supported.
    #[schema(example = "function")]
    pub r#type: String,
    // Grab the tool as generic JSON for debugging purposes.
    pub function: FunctionDefinition,
930
931
}

932
#[derive(Clone, Serialize, Deserialize, Default)]
933
pub(crate) struct ChatTemplateInputs<'a> {
Nicolas Patry's avatar
Nicolas Patry committed
934
    messages: Vec<TextMessage>,
935
936
    bos_token: Option<&'a str>,
    eos_token: Option<&'a str>,
937
    add_generation_prompt: bool,
938
939
    tools: Option<&'a str>,
    tools_prompt: Option<&'a str>,
940
941
}

Nicolas Patry's avatar
Nicolas Patry committed
942
#[derive(Clone, Deserialize, Serialize, ToSchema, Default, Debug, PartialEq)]
drbh's avatar
drbh committed
943
pub(crate) struct ToolCall {
944
    pub id: String,
drbh's avatar
drbh committed
945
946
947
948
    pub r#type: String,
    pub function: FunctionDefinition,
}

Nicolas Patry's avatar
Nicolas Patry committed
949
950
951
#[derive(Clone, Deserialize, ToSchema, Serialize, Debug, PartialEq)]
struct Url {
    url: String,
drbh's avatar
drbh committed
952
953
}

Nicolas Patry's avatar
Nicolas Patry committed
954
955
956
#[derive(Clone, Deserialize, ToSchema, Serialize, Debug, PartialEq)]
struct ImageUrl {
    image_url: Url,
drbh's avatar
drbh committed
957
958
}

Nicolas Patry's avatar
Nicolas Patry committed
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
#[derive(Clone, Deserialize, ToSchema, Serialize, Debug, PartialEq)]
struct Text {
    text: String,
}

#[derive(Clone, Deserialize, ToSchema, Serialize, Debug, PartialEq)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
enum MessageChunk {
    Text(Text),
    ImageUrl(ImageUrl),
}

#[derive(Clone, Deserialize, ToSchema, Serialize, Debug, PartialEq)]
pub struct Message {
    #[schema(example = "user")]
    role: String,
    #[schema(example = "My name is David and I")]
    #[serde(deserialize_with = "message_content_serde::deserialize")]
    content: Vec<MessageChunk>,
drbh's avatar
drbh committed
979
    #[serde(default, skip_serializing_if = "Option::is_none")]
Nicolas Patry's avatar
Nicolas Patry committed
980
981
    #[schema(example = "\"David\"")]
    name: Option<String>,
drbh's avatar
drbh committed
982
983
984
985
}

mod message_content_serde {
    use super::*;
Nicolas Patry's avatar
Nicolas Patry committed
986
    use serde::{Deserialize, Deserializer};
drbh's avatar
drbh committed
987

Nicolas Patry's avatar
Nicolas Patry committed
988
    pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<MessageChunk>, D::Error>
drbh's avatar
drbh committed
989
990
991
    where
        D: Deserializer<'de>,
    {
Nicolas Patry's avatar
Nicolas Patry committed
992
993
994
995
996
        #[derive(Deserialize)]
        #[serde(untagged)]
        enum Message {
            Text(String),
            Chunks(Vec<MessageChunk>),
drbh's avatar
drbh committed
997
        }
Nicolas Patry's avatar
Nicolas Patry committed
998
999
1000
1001
1002
1003
1004
1005
        let message: Message = Deserialize::deserialize(deserializer)?;
        let chunks = match message {
            Message::Text(text) => {
                vec![MessageChunk::Text(Text { text })]
            }
            Message::Chunks(s) => s,
        };
        Ok(chunks)
drbh's avatar
drbh committed
1006
1007
1008
    }
}

Nicolas Patry's avatar
Nicolas Patry committed
1009
1010
#[derive(Clone, Deserialize, ToSchema, Serialize, Debug, PartialEq)]
pub struct TextMessage {
1011
1012
1013
    #[schema(example = "user")]
    pub role: String,
    #[schema(example = "My name is David and I")]
Nicolas Patry's avatar
Nicolas Patry committed
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
    pub content: String,
}

impl From<Message> for TextMessage {
    fn from(value: Message) -> Self {
        TextMessage {
            role: value.role,
            content: value
                .content
                .into_iter()
                .map(|c| match c {
                    MessageChunk::Text(Text { text }) => text,
                    MessageChunk::ImageUrl(image) => {
                        let url = image.image_url.url;
                        format!("![]({url})")
                    }
                })
                .collect::<Vec<_>>()
                .join(""),
        }
    }
}

#[derive(Clone, Deserialize, ToSchema, Serialize, Debug, PartialEq)]
pub struct ToolCallMessage {
    #[schema(example = "assistant")]
    role: String,
    tool_calls: Vec<ToolCall>,
}

#[derive(Clone, Deserialize, ToSchema, Serialize, Debug, PartialEq)]
#[serde(untagged)]
pub(crate) enum OutputMessage {
    ChatMessage(TextMessage),
    ToolCall(ToolCallMessage),
1049
1050
}

1051
#[derive(Clone, Debug, Deserialize, ToSchema)]
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
1052
pub(crate) struct GenerateRequest {
1053
    #[schema(example = "My name is Olivier and I")]
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
1054
1055
1056
1057
1058
    pub inputs: String,
    #[serde(default = "default_parameters")]
    pub parameters: GenerateParameters,
}

1059
1060
1061
1062
1063
1064
1065
#[derive(Clone, Debug, Deserialize, ToSchema)]
pub(crate) struct CompatGenerateRequest {
    #[schema(example = "My name is Olivier and I")]
    pub inputs: String,
    #[serde(default = "default_parameters")]
    pub parameters: GenerateParameters,
    #[serde(default)]
OlivierDehaene's avatar
OlivierDehaene committed
1066
    #[schema(default = "false")]
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
    pub stream: bool,
}

impl From<CompatGenerateRequest> for GenerateRequest {
    fn from(req: CompatGenerateRequest) -> Self {
        Self {
            inputs: req.inputs,
            parameters: req.parameters,
        }
    }
}

1079
1080
1081
1082
1083
1084
#[derive(Debug, Serialize, ToSchema)]
pub struct PrefillToken {
    #[schema(example = 0)]
    id: u32,
    #[schema(example = "test")]
    text: String,
1085
    #[schema(nullable = true, example = - 0.34)]
1086
1087
1088
    logprob: f32,
}

1089
#[derive(Debug, Serialize, ToSchema, Clone)]
1090
1091
1092
1093
1094
pub struct Token {
    #[schema(example = 0)]
    id: u32,
    #[schema(example = "test")]
    text: String,
1095
    #[schema(nullable = true, example = - 0.34)]
1096
    logprob: f32,
1097
1098
    #[schema(example = "false")]
    special: bool,
1099
1100
}

1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
#[derive(Debug, Serialize, ToSchema)]
pub struct SimpleToken {
    #[schema(example = 0)]
    id: u32,
    #[schema(example = "test")]
    text: String,
    #[schema(example = 0)]
    start: usize,
    #[schema(example = 2)]
    stop: usize,
}

OlivierDehaene's avatar
OlivierDehaene committed
1113
#[derive(Debug, Serialize, ToSchema)]
1114
#[serde(rename_all(serialize = "snake_case"))]
1115
#[schema(example = "Length")]
1116
1117
1118
1119
1120
1121
1122
1123
1124
pub(crate) enum FinishReason {
    #[schema(rename = "length")]
    Length,
    #[serde(rename = "eos_token")]
    #[schema(rename = "eos_token")]
    EndOfSequenceToken,
    #[schema(rename = "stop_sequence")]
    StopSequence,
}
1125

1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
impl std::fmt::Display for FinishReason {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match self {
            FinishReason::Length => write!(f, "length"),
            FinishReason::EndOfSequenceToken => write!(f, "eos_token"),
            FinishReason::StopSequence => write!(f, "stop_sequence"),
        }
    }
}

1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
#[derive(Serialize, ToSchema)]
pub(crate) struct BestOfSequence {
    #[schema(example = "test")]
    pub generated_text: String,
    #[schema(example = "length")]
    pub finish_reason: FinishReason,
    #[schema(example = 1)]
    pub generated_tokens: u32,
    #[schema(nullable = true, example = 42)]
    pub seed: Option<u64>,
    pub prefill: Vec<PrefillToken>,
    pub tokens: Vec<Token>,
Nicolas Patry's avatar
Nicolas Patry committed
1148
1149
    #[serde(skip_serializing_if = "Vec::is_empty")]
    pub top_tokens: Vec<Vec<Token>>,
1150
1151
}

1152
#[derive(Serialize, ToSchema)]
OlivierDehaene's avatar
OlivierDehaene committed
1153
pub(crate) struct Details {
1154
1155
1156
    #[schema(example = "length")]
    pub finish_reason: FinishReason,
    #[schema(example = 1)]
OlivierDehaene's avatar
OlivierDehaene committed
1157
    pub generated_tokens: u32,
1158
    #[schema(nullable = true, example = 42)]
1159
    pub seed: Option<u64>,
1160
1161
    pub prefill: Vec<PrefillToken>,
    pub tokens: Vec<Token>,
1162
1163
    #[serde(skip_serializing_if = "Option::is_none")]
    pub best_of_sequences: Option<Vec<BestOfSequence>>,
Nicolas Patry's avatar
Nicolas Patry committed
1164
1165
    #[serde(skip_serializing_if = "Vec::is_empty")]
    pub top_tokens: Vec<Vec<Token>>,
OlivierDehaene's avatar
OlivierDehaene committed
1166
1167
}

1168
#[derive(Serialize, ToSchema)]
1169
pub(crate) struct GenerateResponse {
1170
    #[schema(example = "test")]
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
1171
    pub generated_text: String,
OlivierDehaene's avatar
OlivierDehaene committed
1172
1173
    #[serde(skip_serializing_if = "Option::is_none")]
    pub details: Option<Details>,
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
1174
}
1175

1176
1177
1178
1179
#[derive(Serialize, ToSchema)]
#[serde(transparent)]
pub(crate) struct TokenizeResponse(Vec<SimpleToken>);

1180
1181
1182
1183
1184
1185
#[derive(Serialize, ToSchema)]
pub(crate) struct StreamDetails {
    #[schema(example = "length")]
    pub finish_reason: FinishReason,
    #[schema(example = 1)]
    pub generated_tokens: u32,
1186
    #[schema(nullable = true, example = 42)]
1187
1188
1189
1190
    pub seed: Option<u64>,
}

#[derive(Serialize, ToSchema)]
1191
pub(crate) struct StreamResponse {
1192
    pub index: u32,
1193
    pub token: Token,
Nicolas Patry's avatar
Nicolas Patry committed
1194
1195
    #[serde(skip_serializing_if = "Vec::is_empty")]
    pub top_tokens: Vec<Token>,
1196
    #[schema(nullable = true, default = "null", example = "test")]
1197
    pub generated_text: Option<String>,
1198
1199
    #[schema(nullable = true, default = "null")]
    pub details: Option<StreamDetails>,
1200
1201
}

1202
#[derive(Serialize, ToSchema)]
1203
1204
pub(crate) struct ErrorResponse {
    pub error: String,
1205
    pub error_type: String,
1206
}
1207
1208

#[cfg(test)]
1209
mod tests {
1210
    use super::*;
Nicolas Patry's avatar
Nicolas Patry committed
1211
    use serde_json::json;
1212
1213
    use tokenizers::Tokenizer;

1214
    pub(crate) async fn get_tokenizer() -> Tokenizer {
1215
1216
1217
1218
        let api = hf_hub::api::sync::Api::new().unwrap();
        let repo = api.model("gpt2".to_string());
        let filename = repo.get("tokenizer.json").unwrap();
        Tokenizer::from_file(filename).unwrap()
1219
    }
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233

    #[test]
    fn test_hub_nested_tokens_tokenizer_config() {
        // this is a subset of the tokenizer.json file
        // in this case we expect the tokens to be encoded as simple strings
        let json_content = r#"{
            "chat_template": "test",
            "bos_token": "<|begin▁of▁sentence|>",
            "eos_token": "<|end▁of▁sentence|>"
        }"#;

        let config: HubTokenizerConfig = serde_json::from_str(json_content).unwrap();

        // check that we successfully parsed the tokens
1234
1235
1236
1237
        assert_eq!(
            config.chat_template,
            Some(ChatTemplateVersions::Single("test".to_string()))
        );
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
        assert_eq!(
            config.bos_token,
            Some("<|begin▁of▁sentence|>".to_string())
        );
        assert_eq!(config.eos_token, Some("<|end▁of▁sentence|>".to_string()));

        // in this case we expect the tokens to be encoded as structured tokens
        // we want the content of the structured token
        let json_content = r#"{
            "chat_template": "test",
            "bos_token": {
              "__type": "AddedToken",
              "content": "<|begin▁of▁sentence|>",
              "lstrip": false,
              "normalized": true,
              "rstrip": false,
              "single_word": false
            },
            "eos_token": {
              "__type": "AddedToken",
              "content": "<|end▁of▁sentence|>",
              "lstrip": false,
              "normalized": true,
              "rstrip": false,
              "single_word": false
            }
        }"#;

        let config: HubTokenizerConfig = serde_json::from_str(json_content).unwrap();

        // check that we successfully parsed the tokens
1269
1270
1271
1272
        assert_eq!(
            config.chat_template,
            Some(ChatTemplateVersions::Single("test".to_string()))
        );
1273
1274
1275
1276
1277
1278
        assert_eq!(
            config.bos_token,
            Some("<|begin▁of▁sentence|>".to_string())
        );
        assert_eq!(config.eos_token, Some("<|end▁of▁sentence|>".to_string()));
    }
Nicolas Patry's avatar
Nicolas Patry committed
1279
1280
1281

    #[test]
    fn test_chat_simple_string() {
Nicolas Patry's avatar
Nicolas Patry committed
1282
        let json = json!({
Nicolas Patry's avatar
Nicolas Patry committed
1283
            "model": "",
Nicolas Patry's avatar
Nicolas Patry committed
1284
1285
            "messages": [{
                "role": "user",
Nicolas Patry's avatar
Nicolas Patry committed
1286
                "content": "What is Deep Learning?"
Nicolas Patry's avatar
Nicolas Patry committed
1287
            }]
Nicolas Patry's avatar
Nicolas Patry committed
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
        });
        let request: ChatRequest = serde_json::from_str(json.to_string().as_str()).unwrap();

        assert_eq!(
            request.messages[0],
            Message {
                role: "user".to_string(),
                content: vec![MessageChunk::Text(Text {
                    text: "What is Deep Learning?".to_string()
                }),],
                name: None
            }
        );
    }

    #[test]
    fn test_chat_request() {
Nicolas Patry's avatar
Nicolas Patry committed
1305
        let json = json!({
Nicolas Patry's avatar
Nicolas Patry committed
1306
            "model": "",
Nicolas Patry's avatar
Nicolas Patry committed
1307
1308
            "messages": [{
                "role": "user",
Nicolas Patry's avatar
Nicolas Patry committed
1309
1310
                "content": [
                    {"type": "text", "text": "Whats in this image?"},
Nicolas Patry's avatar
Nicolas Patry committed
1311
                    {"type": "image_url", "image_url": {"url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/rabbit.png"}},
Nicolas Patry's avatar
Nicolas Patry committed
1312
                ]
Nicolas Patry's avatar
Nicolas Patry committed
1313
            }]
Nicolas Patry's avatar
Nicolas Patry committed
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
        });
        let request: ChatRequest = serde_json::from_str(json.to_string().as_str()).unwrap();

        assert_eq!(
            request.messages[0],
            Message{
                role: "user".to_string(),
                content: vec![
                    MessageChunk::Text(Text { text: "Whats in this image?".to_string() }),
                    MessageChunk::ImageUrl(ImageUrl { image_url: Url { url: "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/rabbit.png".to_string() } })
                ],
                name: None
            }
        );
    }
Nicolas Patry's avatar
Nicolas Patry committed
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374

    #[test]
    fn text_message_convert() {
        let message = Message{
                role: "user".to_string(),
                content: vec![
                    MessageChunk::Text(Text { text: "Whats in this image?".to_string() }),
                    MessageChunk::ImageUrl(ImageUrl { image_url: Url { url: "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/rabbit.png".to_string() } })
                ],
                name: None
            };
        let textmsg: TextMessage = message.into();
        assert_eq!(textmsg.content, "Whats in this image?![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/rabbit.png)");
    }
    #[test]
    fn openai_output() {
        let message = OutputMessage::ChatMessage(TextMessage {
            role: "assistant".to_string(),
            content: "This is the answer".to_string(),
        });
        let serialized = serde_json::to_string(&message).unwrap();
        assert_eq!(
            serialized,
            r#"{"role":"assistant","content":"This is the answer"}"#
        );

        let message = OutputMessage::ToolCall(ToolCallMessage {
            role: "assistant".to_string(),
            tool_calls: vec![ToolCall {
                id: "0".to_string(),
                r#type: "function".to_string(),
                function: FunctionDefinition {
                    description: None,
                    name: "myfn".to_string(),
                    arguments: json!({
                        "format": "csv"
                    }),
                },
            }],
        });
        let serialized = serde_json::to_string(&message).unwrap();
        assert_eq!(
            serialized,
            r#"{"role":"assistant","tool_calls":[{"id":"0","type":"function","function":{"description":null,"name":"myfn","arguments":{"format":"csv"}}}]}"#
        );
    }
1375
}