opt.rs 6.2 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
// SPDX-FileCopyrightText: Copyright (c) 2024-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

16
use std::{fmt, io::IsTerminal as _, path::PathBuf};
17

18
use dynamo_runtime::protocols::ENDPOINT_SCHEME;
19

20
21
const BATCH_PREFIX: &str = "batch:";

22
#[derive(PartialEq)]
23
24
25
26
pub enum Input {
    /// Run an OpenAI compatible HTTP server
    Http,

27
28
29
30
    /// Single prompt on stdin
    Stdin,

    /// Interactive chat
31
    Text,
32
33
34

    /// Pull requests from a namespace/component/endpoint path.
    Endpoint(String),
35

36
37
    /// Batch mode. Run all the prompts, write the outputs, exit.
    Batch(PathBuf),
38
39
40
41
42
43
44
45
46
}

impl TryFrom<&str> for Input {
    type Error = anyhow::Error;

    fn try_from(s: &str) -> anyhow::Result<Self> {
        match s {
            "http" => Ok(Input::Http),
            "text" => Ok(Input::Text),
47
            "stdin" => Ok(Input::Stdin),
48
            endpoint_path if endpoint_path.starts_with(ENDPOINT_SCHEME) => {
49
                Ok(Input::Endpoint(endpoint_path.to_string()))
50
            }
51
52
53
54
            batch_patch if batch_patch.starts_with(BATCH_PREFIX) => {
                let path = batch_patch.strip_prefix(BATCH_PREFIX).unwrap();
                Ok(Input::Batch(PathBuf::from(path)))
            }
55
56
57
58
59
60
61
62
63
64
            e => Err(anyhow::anyhow!("Invalid in= option '{e}'")),
        }
    }
}

impl fmt::Display for Input {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        let s = match self {
            Input::Http => "http",
            Input::Text => "text",
65
            Input::Stdin => "stdin",
66
            Input::Endpoint(path) => path,
67
            Input::Batch(path) => &path.display().to_string(),
68
69
70
71
72
        };
        write!(f, "{s}")
    }
}

73
74
75
76
77
78
79
80
81
82
impl Default for Input {
    fn default() -> Self {
        if std::io::stdin().is_terminal() {
            Input::Text
        } else {
            Input::Stdin
        }
    }
}

83
84
85
pub enum Output {
    /// Accept un-preprocessed requests, echo the prompt back as the response
    EchoFull,
86

87
88
89
    /// Accept preprocessed requests, echo the tokens back as the response
    EchoCore,

90
91
92
    /// Publish requests to a namespace/component/endpoint path.
    Endpoint(String),

93
94
95
    #[cfg(feature = "mistralrs")]
    /// Run inference on a model in a GGUF file using mistralrs w/ candle
    MistralRs,
96

97
98
99
    #[cfg(feature = "llamacpp")]
    /// Run inference using llama.cpp
    LlamaCpp,
Graham King's avatar
Graham King committed
100

101
102
103
    /// Run inference using sglang
    SgLang,

104
105
    // Start vllm in a sub-process connecting via nats
    // Sugar for `python vllm_inc.py --endpoint <thing> --model <thing>`
Graham King's avatar
Graham King committed
106
    Vllm,
Graham King's avatar
Graham King committed
107

108
109
    /// Run inference using a user supplied python file that accepts and returns
    /// strings. It does it's own pre-processing.
110
111
    #[cfg(feature = "python")]
    PythonStr(String),
112
113
    // DEVELOPER NOTE
    // If you add an engine add it to `available_engines` below, and to Default if it makes sense
114
115
116
117
118
119
120
}

impl TryFrom<&str> for Output {
    type Error = anyhow::Error;

    fn try_from(s: &str) -> anyhow::Result<Self> {
        match s {
121
122
123
            #[cfg(feature = "mistralrs")]
            "mistralrs" => Ok(Output::MistralRs),

124
125
126
            #[cfg(feature = "llamacpp")]
            "llamacpp" | "llama_cpp" => Ok(Output::LlamaCpp),

127
            "sglang" => Ok(Output::SgLang),
Graham King's avatar
Graham King committed
128
            "vllm" => Ok(Output::Vllm),
129

130
            "echo_full" => Ok(Output::EchoFull),
131
            "echo_core" => Ok(Output::EchoCore),
132
133
134
135
136
137

            endpoint_path if endpoint_path.starts_with(ENDPOINT_SCHEME) => {
                let path = endpoint_path.strip_prefix(ENDPOINT_SCHEME).unwrap();
                Ok(Output::Endpoint(path.to_string()))
            }

138
139
140
141
142
143
144
145
            #[cfg(feature = "python")]
            python_str_gen if python_str_gen.starts_with(crate::PYTHON_STR_SCHEME) => {
                let path = python_str_gen
                    .strip_prefix(crate::PYTHON_STR_SCHEME)
                    .unwrap();
                Ok(Output::PythonStr(path.to_string()))
            }

146
147
148
149
150
151
152
153
            e => Err(anyhow::anyhow!("Invalid out= option '{e}'")),
        }
    }
}

impl fmt::Display for Output {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        let s = match self {
154
155
156
            #[cfg(feature = "mistralrs")]
            Output::MistralRs => "mistralrs",

157
158
159
            #[cfg(feature = "llamacpp")]
            Output::LlamaCpp => "llamacpp",

160
            Output::SgLang => "sglang",
Graham King's avatar
Graham King committed
161
            Output::Vllm => "vllm",
162

163
            Output::EchoFull => "echo_full",
164
            Output::EchoCore => "echo_core",
165
166

            Output::Endpoint(path) => path,
167
168

            #[cfg(feature = "python")]
169
            Output::PythonStr(_) => "pystr",
170
171
172
173
        };
        write!(f, "{s}")
    }
}
174

175
176
/// Returns the engine to use if user did not say on cmd line.
/// Nearly always defaults to mistralrs which has no dependencies and we include by default.
177
/// If built with --no-default-features default to subprocess vllm.
178
179
180
#[allow(unused_assignments, unused_mut)]
impl Default for Output {
    fn default() -> Self {
181
        let mut out = Output::Vllm;
182

183
184
185
186
187
        #[cfg(feature = "mistralrs")]
        {
            out = Output::MistralRs;
        }

188
189
190
        out
    }
}
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205

impl Output {
    #[allow(unused_mut)]
    pub fn available_engines() -> Vec<String> {
        let mut out = vec!["echo_core".to_string(), "echo_full".to_string()];
        #[cfg(feature = "mistralrs")]
        {
            out.push(Output::MistralRs.to_string());
        }

        #[cfg(feature = "llamacpp")]
        {
            out.push(Output::LlamaCpp.to_string());
        }

206
207
        out.push(Output::SgLang.to_string());
        out.push(Output::Vllm.to_string());
208
209
210
211
212
213
214
215
216

        #[cfg(feature = "python")]
        {
            out.push(Output::PythonStr("file.py".to_string()).to_string());
        }

        out
    }
}