server.rs 6.02 KB
Newer Older
1
use crate::{Batcher, GenerateParameters, GenerateRequest, GeneratedText, Validation};
Olivier Dehaene's avatar
Olivier Dehaene committed
2
use axum::extract::Extension;
3
4
use axum::http::{HeaderMap, StatusCode};
use axum::response::IntoResponse;
Olivier Dehaene's avatar
Olivier Dehaene committed
5
use axum::routing::{get, post};
Olivier Dehaene's avatar
Olivier Dehaene committed
6
use axum::{Json, Router};
Olivier Dehaene's avatar
Olivier Dehaene committed
7
use bloom_inference_client::ShardedClient;
Olivier Dehaene's avatar
Olivier Dehaene committed
8
use std::net::SocketAddr;
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
9
use std::sync::Arc;
Olivier Dehaene's avatar
Olivier Dehaene committed
10
use tokenizers::Tokenizer;
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
11
12
use tokio::signal;
use tokio::sync::Semaphore;
Olivier Dehaene's avatar
Olivier Dehaene committed
13
14
15
use tokio::time::Instant;
use tracing::instrument;

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
16
17
18
19
20
21
// Server shared state
#[derive(Clone)]
struct ServerState {
    validation: Validation,
    batcher: Batcher,
    limit_concurrent_requests: Arc<Semaphore>,
Olivier Dehaene's avatar
Olivier Dehaene committed
22
23
}

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
24
/// Health check method
Olivier Dehaene's avatar
Olivier Dehaene committed
25
#[instrument(skip(state), fields(time, time_per_token))]
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
26
27
28
29
30
31
32
33
34
35
36
37
38
39
async fn health(state: Extension<ServerState>) -> Result<(), (StatusCode, String)> {
    // TODO: while this is the best health check we can do, it is a bit on the heavy side and might
    //       be a bit too slow for a health check.
    //       What we should do instead if check if the gRPC channels are still healthy.

    // Limit concurrent requests by acquiring a permit from the semaphore
    let _permit = state.limit_concurrent_requests.try_acquire().map_err(|_| {
        (
            StatusCode::TOO_MANY_REQUESTS,
            "Model is overloaded".to_string(),
        )
    })?;

    // Send a small inference request
Olivier Dehaene's avatar
Olivier Dehaene committed
40
    state
Olivier Dehaene's avatar
Olivier Dehaene committed
41
        .batcher
Olivier Dehaene's avatar
Olivier Dehaene committed
42
43
44
45
46
47
48
49
50
51
52
53
54
        .infer(
            1,
            GenerateRequest {
                inputs: "liveness".to_string(),
                parameters: GenerateParameters {
                    temperature: 1.0,
                    top_k: 0,
                    top_p: 1.0,
                    do_sample: false,
                    max_new_tokens: 1,
                },
            },
        )
Olivier Dehaene's avatar
Olivier Dehaene committed
55
56
        .await?;
    Ok(())
Olivier Dehaene's avatar
Olivier Dehaene committed
57
58
}

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
59
/// Generate method
60
61
62
63
64
65
66
67
68
69
#[instrument(
    skip(state),
    fields(
        total_time,
        validation_time,
        queue_time,
        inference_time,
        time_per_token
    )
)]
Olivier Dehaene's avatar
Olivier Dehaene committed
70
async fn generate(
Olivier Dehaene's avatar
Olivier Dehaene committed
71
    state: Extension<ServerState>,
Olivier Dehaene's avatar
Olivier Dehaene committed
72
    req: Json<GenerateRequest>,
73
74
) -> Result<impl IntoResponse, (StatusCode, String)> {
    let start_time = Instant::now();
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
75
76
77
78
79
80
81
    // Limit concurrent requests by acquiring a permit from the semaphore
    let _permit = state.limit_concurrent_requests.try_acquire().map_err(|_| {
        (
            StatusCode::TOO_MANY_REQUESTS,
            "Model is overloaded".to_string(),
        )
    })?;
Olivier Dehaene's avatar
Olivier Dehaene committed
82

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
83
    // Validate request
Olivier Dehaene's avatar
Olivier Dehaene committed
84
    let (input_length, validated_request) = state
Olivier Dehaene's avatar
Olivier Dehaene committed
85
        .validation
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
86
        // FIXME: can't we get rid of the cloning here??
Olivier Dehaene's avatar
Olivier Dehaene committed
87
        .validate(GenerateRequest {
Olivier Dehaene's avatar
Olivier Dehaene committed
88
89
90
            inputs: req.inputs.clone(),
            parameters: req.parameters.clone(),
        })
Olivier Dehaene's avatar
Olivier Dehaene committed
91
92
        .await?;

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
93
    // Inference
94
    let response = state.batcher.infer(input_length, validated_request).await?;
Olivier Dehaene's avatar
Olivier Dehaene committed
95

96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
    // Timings
    let total_time = start_time.elapsed();
    let validation_time = response.queued - start_time;
    let queue_time = response.start - response.queued;
    let inference_time = response.end - response.start;
    let time_per_token = inference_time / req.parameters.max_new_tokens;

    // Headers
    let mut headers = HeaderMap::new();
    headers.insert(
        "x-total-time",
        total_time.as_millis().to_string().parse().unwrap(),
    );
    headers.insert(
        "x-validation-time",
        validation_time.as_millis().to_string().parse().unwrap(),
    );
    headers.insert(
        "x-queue-time",
        queue_time.as_millis().to_string().parse().unwrap(),
Olivier Dehaene's avatar
Olivier Dehaene committed
116
    );
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
    headers.insert(
        "x-inference-time",
        inference_time.as_millis().to_string().parse().unwrap(),
    );
    headers.insert(
        "x-time-per-token",
        time_per_token.as_millis().to_string().parse().unwrap(),
    );

    // Tracing metadata
    tracing::Span::current().record("total_time", format!("{:?}", total_time));
    tracing::Span::current().record("validation_time", format!("{:?}", validation_time));
    tracing::Span::current().record("queue_time", format!("{:?}", queue_time));
    tracing::Span::current().record("inference_time", format!("{:?}", inference_time));
    tracing::Span::current().record("time_per_token", format!("{:?}", time_per_token));
    tracing::info!("Output: {}", response.output);
Olivier Dehaene's avatar
Olivier Dehaene committed
133

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
134
    // Send response
135
136
137
138
    let response = vec![GeneratedText {
        generated_text: response.output,
    }];
    Ok((headers, Json(response)))
Olivier Dehaene's avatar
Olivier Dehaene committed
139
140
}

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
141
142
143
144
145
146
/// Serving method
#[allow(clippy::too_many_arguments)]
pub async fn run(
    max_concurrent_requests: usize,
    max_input_length: usize,
    max_batch_size: usize,
147
    max_waiting_tokens: usize,
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
148
149
150
151
152
153
    client: ShardedClient,
    tokenizer: Tokenizer,
    validation_workers: usize,
    addr: SocketAddr,
) {
    // Create state
154
    let batcher = Batcher::new(client, max_batch_size, max_waiting_tokens);
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
155
156
157
158
159
160
161
162
    let validation = Validation::new(validation_workers, tokenizer, max_input_length);
    let shared_state = ServerState {
        validation,
        batcher,
        limit_concurrent_requests: Arc::new(Semaphore::new(max_concurrent_requests)),
    };

    // Create router
Olivier Dehaene's avatar
Olivier Dehaene committed
163
164
165
    let app = Router::new()
        .route("/generate", post(generate))
        .layer(Extension(shared_state.clone()))
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
166
        .route("/health", get(health))
Olivier Dehaene's avatar
Olivier Dehaene committed
167
        .layer(Extension(shared_state.clone()));
Olivier Dehaene's avatar
Olivier Dehaene committed
168

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
169
    // Run server
Olivier Dehaene's avatar
Olivier Dehaene committed
170
    axum::Server::bind(&addr)
Olivier Dehaene's avatar
Olivier Dehaene committed
171
        .serve(app.into_make_service())
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
172
173
        // Wait until all requests are finished to shut down
        .with_graceful_shutdown(shutdown_signal())
Olivier Dehaene's avatar
Olivier Dehaene committed
174
175
        .await
        .unwrap();
Olivier Dehaene's avatar
Olivier Dehaene committed
176
}
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203

/// Shutdown signal handler
async fn shutdown_signal() {
    let ctrl_c = async {
        signal::ctrl_c()
            .await
            .expect("failed to install Ctrl+C handler");
    };

    #[cfg(unix)]
    let terminate = async {
        signal::unix::signal(signal::unix::SignalKind::terminate())
            .expect("failed to install signal handler")
            .recv()
            .await;
    };

    #[cfg(not(unix))]
    let terminate = std::future::pending::<()>();

    tokio::select! {
        _ = ctrl_c => {},
        _ = terminate => {},
    }

    tracing::info!("signal received, starting graceful shutdown");
}