opt.rs 8.12 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
// SPDX-FileCopyrightText: Copyright (c) 2024-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

16
use std::{fmt, io::IsTerminal as _, path::PathBuf};
17

18
use dynamo_runtime::protocols::ENDPOINT_SCHEME;
19

20
21
const BATCH_PREFIX: &str = "batch:";

22
#[derive(PartialEq)]
23
24
25
26
pub enum Input {
    /// Run an OpenAI compatible HTTP server
    Http,

27
28
29
30
    /// Single prompt on stdin
    Stdin,

    /// Interactive chat
31
    Text,
32
33
34

    /// Pull requests from a namespace/component/endpoint path.
    Endpoint(String),
35

36
37
38
    /// Batch mode. Run all the prompts, write the outputs, exit.
    Batch(PathBuf),

39
40
41
42
    /// Start the engine but don't provide any way to talk to it.
    /// For multi-node sglang, where the engine connects directly
    /// to the co-ordinator via torch distributed / nccl.
    None,
43
44
45
46
47
48
49
50
51
}

impl TryFrom<&str> for Input {
    type Error = anyhow::Error;

    fn try_from(s: &str) -> anyhow::Result<Self> {
        match s {
            "http" => Ok(Input::Http),
            "text" => Ok(Input::Text),
52
            "stdin" => Ok(Input::Stdin),
53
            "none" => Ok(Input::None),
54
            endpoint_path if endpoint_path.starts_with(ENDPOINT_SCHEME) => {
55
                Ok(Input::Endpoint(endpoint_path.to_string()))
56
            }
57
58
59
60
            batch_patch if batch_patch.starts_with(BATCH_PREFIX) => {
                let path = batch_patch.strip_prefix(BATCH_PREFIX).unwrap();
                Ok(Input::Batch(PathBuf::from(path)))
            }
61
62
63
64
65
66
67
68
69
70
            e => Err(anyhow::anyhow!("Invalid in= option '{e}'")),
        }
    }
}

impl fmt::Display for Input {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        let s = match self {
            Input::Http => "http",
            Input::Text => "text",
71
            Input::Stdin => "stdin",
72
            Input::Endpoint(path) => path,
73
            Input::Batch(path) => &path.display().to_string(),
74
            Input::None => "none",
75
76
77
78
79
        };
        write!(f, "{s}")
    }
}

80
81
82
83
84
85
86
87
88
89
impl Default for Input {
    fn default() -> Self {
        if std::io::stdin().is_terminal() {
            Input::Text
        } else {
            Input::Stdin
        }
    }
}

90
91
92
pub enum Output {
    /// Accept un-preprocessed requests, echo the prompt back as the response
    EchoFull,
93

94
95
96
    /// Accept preprocessed requests, echo the tokens back as the response
    EchoCore,

97
98
99
    /// Publish requests to a namespace/component/endpoint path.
    Endpoint(String),

100
101
102
    #[cfg(feature = "mistralrs")]
    /// Run inference on a model in a GGUF file using mistralrs w/ candle
    MistralRs,
103
104

    #[cfg(feature = "sglang")]
105
106
107
    /// Deprecated
    SgLangLegacy,

108
109
    /// Run inference using sglang
    SgLang,
110
111
112
113

    #[cfg(feature = "llamacpp")]
    /// Run inference using llama.cpp
    LlamaCpp,
Graham King's avatar
Graham King committed
114

115
116
    // Start vllm in a sub-process connecting via nats
    // Sugar for `python vllm_inc.py --endpoint <thing> --model <thing>`
Graham King's avatar
Graham King committed
117
    Vllm,
Graham King's avatar
Graham King committed
118

119
120
121
122
123
124
125
126
    #[cfg(feature = "vllm")]
    /// Run inference using vllm 0.8.X+
    Vllm0_8,

    #[cfg(feature = "vllm")]
    /// Run inference using vllm 0.7.X
    Vllm0_7,

127
128
    /// Run inference using a user supplied python file that accepts and returns
    /// strings. It does it's own pre-processing.
129
130
    #[cfg(feature = "python")]
    PythonStr(String),
131
132
133
134
135

    /// Run inference using a user supplied python file that accepts and returns
    /// tokens. We do the pre-processing.
    #[cfg(feature = "python")]
    PythonTok(String),
136
137
138
    //
    // DEVELOPER NOTE
    // If you add an engine add it to `available_engines` below, and to Default if it makes sense
139
140
141
142
143
144
145
}

impl TryFrom<&str> for Output {
    type Error = anyhow::Error;

    fn try_from(s: &str) -> anyhow::Result<Self> {
        match s {
146
147
148
            #[cfg(feature = "mistralrs")]
            "mistralrs" => Ok(Output::MistralRs),

149
            #[cfg(feature = "sglang")]
150
151
            "sglang_legacy" => Ok(Output::SgLangLegacy),

152
153
            "sglang" => Ok(Output::SgLang),

154
155
156
            #[cfg(feature = "llamacpp")]
            "llamacpp" | "llama_cpp" => Ok(Output::LlamaCpp),

Graham King's avatar
Graham King committed
157
            "vllm" => Ok(Output::Vllm),
158

159
160
161
162
            #[cfg(feature = "vllm")]
            "vllm0_8" => Ok(Output::Vllm0_8),
            #[cfg(feature = "vllm")]
            "vllm0_7" => Ok(Output::Vllm0_7),
Graham King's avatar
Graham King committed
163

164
            "echo_full" => Ok(Output::EchoFull),
165
            "echo_core" => Ok(Output::EchoCore),
166
167
168
169
170
171

            endpoint_path if endpoint_path.starts_with(ENDPOINT_SCHEME) => {
                let path = endpoint_path.strip_prefix(ENDPOINT_SCHEME).unwrap();
                Ok(Output::Endpoint(path.to_string()))
            }

172
173
174
175
176
177
178
179
            #[cfg(feature = "python")]
            python_str_gen if python_str_gen.starts_with(crate::PYTHON_STR_SCHEME) => {
                let path = python_str_gen
                    .strip_prefix(crate::PYTHON_STR_SCHEME)
                    .unwrap();
                Ok(Output::PythonStr(path.to_string()))
            }

180
181
182
183
184
185
186
187
            #[cfg(feature = "python")]
            python_tok_gen if python_tok_gen.starts_with(crate::PYTHON_TOK_SCHEME) => {
                let path = python_tok_gen
                    .strip_prefix(crate::PYTHON_TOK_SCHEME)
                    .unwrap();
                Ok(Output::PythonTok(path.to_string()))
            }

188
189
190
191
192
193
194
195
            e => Err(anyhow::anyhow!("Invalid out= option '{e}'")),
        }
    }
}

impl fmt::Display for Output {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        let s = match self {
196
197
198
            #[cfg(feature = "mistralrs")]
            Output::MistralRs => "mistralrs",

199
            #[cfg(feature = "sglang")]
200
201
            Output::SgLangLegacy => "sglang_legacy",

202
203
            Output::SgLang => "sglang",

204
205
206
            #[cfg(feature = "llamacpp")]
            Output::LlamaCpp => "llamacpp",

Graham King's avatar
Graham King committed
207
            Output::Vllm => "vllm",
208

209
210
211
212
            #[cfg(feature = "vllm")]
            Output::Vllm0_8 => "vllm0_8",
            #[cfg(feature = "vllm")]
            Output::Vllm0_7 => "vllm0_7",
Graham King's avatar
Graham King committed
213

214
            Output::EchoFull => "echo_full",
215
            Output::EchoCore => "echo_core",
216
217

            Output::Endpoint(path) => path,
218
219

            #[cfg(feature = "python")]
220
            Output::PythonStr(_) => "pystr",
221
222

            #[cfg(feature = "python")]
223
            Output::PythonTok(_) => "pytok",
224
225
226
227
        };
        write!(f, "{s}")
    }
}
228

229
230
/// Returns the engine to use if user did not say on cmd line.
/// Nearly always defaults to mistralrs which has no dependencies and we include by default.
231
/// If built with --no-default-features default to subprocess vllm.
232
233
234
#[allow(unused_assignments, unused_mut)]
impl Default for Output {
    fn default() -> Self {
235
        let mut out = Output::Vllm;
236

237
238
239
240
241
        #[cfg(feature = "mistralrs")]
        {
            out = Output::MistralRs;
        }

242
243
244
        out
    }
}
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259

impl Output {
    #[allow(unused_mut)]
    pub fn available_engines() -> Vec<String> {
        let mut out = vec!["echo_core".to_string(), "echo_full".to_string()];
        #[cfg(feature = "mistralrs")]
        {
            out.push(Output::MistralRs.to_string());
        }

        #[cfg(feature = "llamacpp")]
        {
            out.push(Output::LlamaCpp.to_string());
        }

260
        out.push(Output::SgLang.to_string());
261
262
        #[cfg(feature = "sglang")]
        {
263
            out.push(Output::SgLangLegacy.to_string());
264
265
        }

266
        out.push(Output::Vllm.to_string());
267
268
        #[cfg(feature = "vllm")]
        {
269
270
            out.push(Output::Vllm0_7.to_string());
            out.push(Output::Vllm0_8.to_string());
271
272
273
274
275
276
277
278
279
280
281
        }

        #[cfg(feature = "python")]
        {
            out.push(Output::PythonStr("file.py".to_string()).to_string());
            out.push(Output::PythonTok("file.py".to_string()).to_string());
        }

        out
    }
}