"src/diffusers/pipelines/audioldm2/modeling_audioldm2.py" did not exist on "88d269461ca9b5acfae3dedd732438266f526109"
server.rs 6.44 KB
Newer Older
1
2
3
use crate::{
    Batcher, ErrorResponse, GenerateParameters, GenerateRequest, GeneratedText, Validation,
};
Olivier Dehaene's avatar
Olivier Dehaene committed
4
use axum::extract::Extension;
5
6
use axum::http::{HeaderMap, StatusCode};
use axum::response::IntoResponse;
Olivier Dehaene's avatar
Olivier Dehaene committed
7
use axum::routing::{get, post};
Olivier Dehaene's avatar
Olivier Dehaene committed
8
9
use axum::{Json, Router};
use std::net::SocketAddr;
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
10
use std::sync::Arc;
11
use text_generation_client::ShardedClient;
Olivier Dehaene's avatar
Olivier Dehaene committed
12
use tokenizers::Tokenizer;
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
13
14
use tokio::signal;
use tokio::sync::Semaphore;
Olivier Dehaene's avatar
Olivier Dehaene committed
15
16
17
use tokio::time::Instant;
use tracing::instrument;

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
18
19
20
21
22
23
// Server shared state
#[derive(Clone)]
struct ServerState {
    validation: Validation,
    batcher: Batcher,
    limit_concurrent_requests: Arc<Semaphore>,
Olivier Dehaene's avatar
Olivier Dehaene committed
24
25
}

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
26
/// Health check method
Olivier Dehaene's avatar
Olivier Dehaene committed
27
#[instrument(skip(state), fields(time, time_per_token))]
28
async fn health(state: Extension<ServerState>) -> Result<(), (StatusCode, Json<ErrorResponse>)> {
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
29
30
31
32
33
34
35
36
    // TODO: while this is the best health check we can do, it is a bit on the heavy side and might
    //       be a bit too slow for a health check.
    //       What we should do instead if check if the gRPC channels are still healthy.

    // Limit concurrent requests by acquiring a permit from the semaphore
    let _permit = state.limit_concurrent_requests.try_acquire().map_err(|_| {
        (
            StatusCode::TOO_MANY_REQUESTS,
37
38
39
            Json(ErrorResponse {
                error: "Model is overloaded".to_string(),
            }),
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
40
41
42
43
        )
    })?;

    // Send a small inference request
Olivier Dehaene's avatar
Olivier Dehaene committed
44
    state
Olivier Dehaene's avatar
Olivier Dehaene committed
45
        .batcher
Olivier Dehaene's avatar
Olivier Dehaene committed
46
47
48
49
50
51
52
53
54
55
56
57
58
        .infer(
            1,
            GenerateRequest {
                inputs: "liveness".to_string(),
                parameters: GenerateParameters {
                    temperature: 1.0,
                    top_k: 0,
                    top_p: 1.0,
                    do_sample: false,
                    max_new_tokens: 1,
                },
            },
        )
Olivier Dehaene's avatar
Olivier Dehaene committed
59
60
        .await?;
    Ok(())
Olivier Dehaene's avatar
Olivier Dehaene committed
61
62
}

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
63
/// Generate method
64
65
66
67
68
69
70
71
72
73
#[instrument(
    skip(state),
    fields(
        total_time,
        validation_time,
        queue_time,
        inference_time,
        time_per_token
    )
)]
Olivier Dehaene's avatar
Olivier Dehaene committed
74
async fn generate(
Olivier Dehaene's avatar
Olivier Dehaene committed
75
    state: Extension<ServerState>,
Olivier Dehaene's avatar
Olivier Dehaene committed
76
    req: Json<GenerateRequest>,
77
) -> Result<impl IntoResponse, (StatusCode, Json<ErrorResponse>)> {
78
    let start_time = Instant::now();
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
79
80
    // Limit concurrent requests by acquiring a permit from the semaphore
    let _permit = state.limit_concurrent_requests.try_acquire().map_err(|_| {
81
        tracing::error!("Model is overloaded");
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
82
83
        (
            StatusCode::TOO_MANY_REQUESTS,
84
85
86
            Json(ErrorResponse {
                error: "Model is overloaded".to_string(),
            }),
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
87
88
        )
    })?;
Olivier Dehaene's avatar
Olivier Dehaene committed
89

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
90
    // Validate request
Olivier Dehaene's avatar
Olivier Dehaene committed
91
    let (input_length, validated_request) = state
Olivier Dehaene's avatar
Olivier Dehaene committed
92
        .validation
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
93
        // FIXME: can't we get rid of the cloning here??
Olivier Dehaene's avatar
Olivier Dehaene committed
94
        .validate(GenerateRequest {
Olivier Dehaene's avatar
Olivier Dehaene committed
95
96
97
            inputs: req.inputs.clone(),
            parameters: req.parameters.clone(),
        })
98
99
100
101
102
        .await
        .map_err(|err| {
            tracing::error!("{}", err.to_string());
            err
        })?;
Olivier Dehaene's avatar
Olivier Dehaene committed
103

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
104
    // Inference
105
106
107
108
109
110
111
112
    let response = state
        .batcher
        .infer(input_length, validated_request)
        .await
        .map_err(|err| {
            tracing::error!("{}", err.to_string());
            err
        })?;
Olivier Dehaene's avatar
Olivier Dehaene committed
113

114
115
116
117
118
    // Timings
    let total_time = start_time.elapsed();
    let validation_time = response.queued - start_time;
    let queue_time = response.start - response.queued;
    let inference_time = response.end - response.start;
119
    let time_per_token = inference_time / response.tokens;
120
121
122
123
124
125
126
127
128
129
130
131
132
133

    // Headers
    let mut headers = HeaderMap::new();
    headers.insert(
        "x-total-time",
        total_time.as_millis().to_string().parse().unwrap(),
    );
    headers.insert(
        "x-validation-time",
        validation_time.as_millis().to_string().parse().unwrap(),
    );
    headers.insert(
        "x-queue-time",
        queue_time.as_millis().to_string().parse().unwrap(),
Olivier Dehaene's avatar
Olivier Dehaene committed
134
    );
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
    headers.insert(
        "x-inference-time",
        inference_time.as_millis().to_string().parse().unwrap(),
    );
    headers.insert(
        "x-time-per-token",
        time_per_token.as_millis().to_string().parse().unwrap(),
    );

    // Tracing metadata
    tracing::Span::current().record("total_time", format!("{:?}", total_time));
    tracing::Span::current().record("validation_time", format!("{:?}", validation_time));
    tracing::Span::current().record("queue_time", format!("{:?}", queue_time));
    tracing::Span::current().record("inference_time", format!("{:?}", inference_time));
    tracing::Span::current().record("time_per_token", format!("{:?}", time_per_token));
    tracing::info!("Output: {}", response.output);
Olivier Dehaene's avatar
Olivier Dehaene committed
151

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
152
    // Send response
153
154
155
156
    let response = vec![GeneratedText {
        generated_text: response.output,
    }];
    Ok((headers, Json(response)))
Olivier Dehaene's avatar
Olivier Dehaene committed
157
158
}

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
159
160
161
162
163
164
/// Serving method
#[allow(clippy::too_many_arguments)]
pub async fn run(
    max_concurrent_requests: usize,
    max_input_length: usize,
    max_batch_size: usize,
165
    max_waiting_tokens: usize,
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
166
167
168
169
170
171
    client: ShardedClient,
    tokenizer: Tokenizer,
    validation_workers: usize,
    addr: SocketAddr,
) {
    // Create state
172
    let batcher = Batcher::new(client, max_batch_size, max_waiting_tokens);
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
173
174
175
176
177
178
179
180
    let validation = Validation::new(validation_workers, tokenizer, max_input_length);
    let shared_state = ServerState {
        validation,
        batcher,
        limit_concurrent_requests: Arc::new(Semaphore::new(max_concurrent_requests)),
    };

    // Create router
Olivier Dehaene's avatar
Olivier Dehaene committed
181
182
183
    let app = Router::new()
        .route("/generate", post(generate))
        .layer(Extension(shared_state.clone()))
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
184
        .route("/health", get(health))
Olivier Dehaene's avatar
Olivier Dehaene committed
185
        .layer(Extension(shared_state.clone()));
Olivier Dehaene's avatar
Olivier Dehaene committed
186

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
187
    // Run server
Olivier Dehaene's avatar
Olivier Dehaene committed
188
    axum::Server::bind(&addr)
Olivier Dehaene's avatar
Olivier Dehaene committed
189
        .serve(app.into_make_service())
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
190
191
        // Wait until all requests are finished to shut down
        .with_graceful_shutdown(shutdown_signal())
Olivier Dehaene's avatar
Olivier Dehaene committed
192
193
        .await
        .unwrap();
Olivier Dehaene's avatar
Olivier Dehaene committed
194
}
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221

/// Shutdown signal handler
async fn shutdown_signal() {
    let ctrl_c = async {
        signal::ctrl_c()
            .await
            .expect("failed to install Ctrl+C handler");
    };

    #[cfg(unix)]
    let terminate = async {
        signal::unix::signal(signal::unix::SignalKind::terminate())
            .expect("failed to install signal handler")
            .recv()
            .await;
    };

    #[cfg(not(unix))]
    let terminate = std::future::pending::<()>();

    tokio::select! {
        _ = ctrl_c => {},
        _ = terminate => {},
    }

    tracing::info!("signal received, starting graceful shutdown");
}