Unverified Commit a0557642 authored by Chang Su's avatar Chang Su Committed by GitHub
Browse files

[router][lint] Add unused_qualifications to cargo lint warnings (#11366)

parent 84768d10
...@@ -8,6 +8,9 @@ default = ["grpc-client"] ...@@ -8,6 +8,9 @@ default = ["grpc-client"]
grpc-client = [] grpc-client = []
grpc-server = [] grpc-server = []
[lints.rust]
unused_qualifications = "warn"
[lib] [lib]
name = "sglang_router_rs" name = "sglang_router_rs"
# Pure Rust library: Just omit crate-type (defaults to rlib) # Pure Rust library: Just omit crate-type (defaults to rlib)
......
...@@ -18,11 +18,11 @@ fn main() -> Result<(), Box<dyn std::error::Error>> { ...@@ -18,11 +18,11 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
// Add a module-level attribute for documentation and clippy warnings // Add a module-level attribute for documentation and clippy warnings
.server_mod_attribute( .server_mod_attribute(
"sglang.grpc.scheduler", "sglang.grpc.scheduler",
"#[allow(unused, clippy::mixed_attributes_style)]", "#[allow(unused, unused_qualifications, clippy::mixed_attributes_style)]",
) )
.client_mod_attribute( .client_mod_attribute(
"sglang.grpc.scheduler", "sglang.grpc.scheduler",
"#[allow(unused, clippy::mixed_attributes_style)]", "#[allow(unused, unused_qualifications, clippy::mixed_attributes_style)]",
) )
// Compile the proto file with the custom config // Compile the proto file with the custom config
.compile_protos_with_config( .compile_protos_with_config(
......
...@@ -286,8 +286,7 @@ impl Manager for ConversationItemOracleConnectionManager { ...@@ -286,8 +286,7 @@ impl Manager for ConversationItemOracleConnectionManager {
fn create( fn create(
&self, &self,
) -> impl std::future::Future<Output = std::result::Result<Connection, oracle::Error>> + Send ) -> impl std::future::Future<Output = Result<Connection, oracle::Error>> + Send {
{
let params = self.params.clone(); let params = self.params.clone();
async move { async move {
let mut conn = Connection::connect( let mut conn = Connection::connect(
......
// Integration test for Responses API // Integration test for Responses API
use axum::http::StatusCode;
use sglang_router_rs::protocols::spec::{ use sglang_router_rs::protocols::spec::{
GenerationRequest, ReasoningEffort, ResponseInput, ResponseReasoningParam, ResponseStatus, GenerationRequest, ReasoningEffort, ResponseInput, ResponseReasoningParam, ResponseStatus,
ResponseTool, ResponseToolType, ResponsesRequest, ResponsesResponse, ServiceTier, ToolChoice, ResponseTool, ResponseToolType, ResponsesRequest, ResponsesResponse, ServiceTier, ToolChoice,
...@@ -99,11 +100,11 @@ async fn test_non_streaming_mcp_minimal_e2e_with_persistence() { ...@@ -99,11 +100,11 @@ async fn test_non_streaming_mcp_minimal_e2e_with_persistence() {
parallel_tool_calls: true, parallel_tool_calls: true,
previous_response_id: None, previous_response_id: None,
reasoning: None, reasoning: None,
service_tier: sglang_router_rs::protocols::spec::ServiceTier::Auto, service_tier: ServiceTier::Auto,
store: true, store: true,
stream: false, stream: false,
temperature: Some(0.2), temperature: Some(0.2),
tool_choice: sglang_router_rs::protocols::spec::ToolChoice::default(), tool_choice: ToolChoice::default(),
tools: vec![ResponseTool { tools: vec![ResponseTool {
r#type: ResponseToolType::Mcp, r#type: ResponseToolType::Mcp,
server_url: Some(mcp.url()), server_url: Some(mcp.url()),
...@@ -115,7 +116,7 @@ async fn test_non_streaming_mcp_minimal_e2e_with_persistence() { ...@@ -115,7 +116,7 @@ async fn test_non_streaming_mcp_minimal_e2e_with_persistence() {
}], }],
top_logprobs: 0, top_logprobs: 0,
top_p: None, top_p: None,
truncation: sglang_router_rs::protocols::spec::Truncation::Disabled, truncation: Truncation::Disabled,
user: None, user: None,
request_id: "resp_test_mcp_e2e".to_string(), request_id: "resp_test_mcp_e2e".to_string(),
priority: 0, priority: 0,
...@@ -132,7 +133,7 @@ async fn test_non_streaming_mcp_minimal_e2e_with_persistence() { ...@@ -132,7 +133,7 @@ async fn test_non_streaming_mcp_minimal_e2e_with_persistence() {
.route_responses(None, &req, req.model.as_deref()) .route_responses(None, &req, req.model.as_deref())
.await; .await;
assert_eq!(resp.status(), axum::http::StatusCode::OK); assert_eq!(resp.status(), StatusCode::OK);
let body_bytes = axum::body::to_bytes(resp.into_body(), usize::MAX) let body_bytes = axum::body::to_bytes(resp.into_body(), usize::MAX)
.await .await
...@@ -289,7 +290,7 @@ async fn test_conversations_crud_basic() { ...@@ -289,7 +290,7 @@ async fn test_conversations_crud_basic() {
// Create // Create
let create_body = serde_json::json!({ "metadata": { "project": "alpha" } }); let create_body = serde_json::json!({ "metadata": { "project": "alpha" } });
let create_resp = router.create_conversation(None, &create_body).await; let create_resp = router.create_conversation(None, &create_body).await;
assert_eq!(create_resp.status(), axum::http::StatusCode::OK); assert_eq!(create_resp.status(), StatusCode::OK);
let create_bytes = axum::body::to_bytes(create_resp.into_body(), usize::MAX) let create_bytes = axum::body::to_bytes(create_resp.into_body(), usize::MAX)
.await .await
.unwrap(); .unwrap();
...@@ -300,7 +301,7 @@ async fn test_conversations_crud_basic() { ...@@ -300,7 +301,7 @@ async fn test_conversations_crud_basic() {
// Get // Get
let get_resp = router.get_conversation(None, conv_id).await; let get_resp = router.get_conversation(None, conv_id).await;
assert_eq!(get_resp.status(), axum::http::StatusCode::OK); assert_eq!(get_resp.status(), StatusCode::OK);
let get_bytes = axum::body::to_bytes(get_resp.into_body(), usize::MAX) let get_bytes = axum::body::to_bytes(get_resp.into_body(), usize::MAX)
.await .await
.unwrap(); .unwrap();
...@@ -312,7 +313,7 @@ async fn test_conversations_crud_basic() { ...@@ -312,7 +313,7 @@ async fn test_conversations_crud_basic() {
let upd_resp = router let upd_resp = router
.update_conversation(None, conv_id, &update_body) .update_conversation(None, conv_id, &update_body)
.await; .await;
assert_eq!(upd_resp.status(), axum::http::StatusCode::OK); assert_eq!(upd_resp.status(), StatusCode::OK);
let upd_bytes = axum::body::to_bytes(upd_resp.into_body(), usize::MAX) let upd_bytes = axum::body::to_bytes(upd_resp.into_body(), usize::MAX)
.await .await
.unwrap(); .unwrap();
...@@ -322,7 +323,7 @@ async fn test_conversations_crud_basic() { ...@@ -322,7 +323,7 @@ async fn test_conversations_crud_basic() {
// Delete // Delete
let del_resp = router.delete_conversation(None, conv_id).await; let del_resp = router.delete_conversation(None, conv_id).await;
assert_eq!(del_resp.status(), axum::http::StatusCode::OK); assert_eq!(del_resp.status(), StatusCode::OK);
let del_bytes = axum::body::to_bytes(del_resp.into_body(), usize::MAX) let del_bytes = axum::body::to_bytes(del_resp.into_body(), usize::MAX)
.await .await
.unwrap(); .unwrap();
...@@ -331,7 +332,7 @@ async fn test_conversations_crud_basic() { ...@@ -331,7 +332,7 @@ async fn test_conversations_crud_basic() {
// Get again -> 404 // Get again -> 404
let not_found = router.get_conversation(None, conv_id).await; let not_found = router.get_conversation(None, conv_id).await;
assert_eq!(not_found.status(), axum::http::StatusCode::NOT_FOUND); assert_eq!(not_found.status(), StatusCode::NOT_FOUND);
} }
#[test] #[test]
...@@ -662,11 +663,7 @@ async fn test_multi_turn_loop_with_mcp() { ...@@ -662,11 +663,7 @@ async fn test_multi_turn_loop_with_mcp() {
let response = router.route_responses(None, &req, None).await; let response = router.route_responses(None, &req, None).await;
// Check status // Check status
assert_eq!( assert_eq!(response.status(), StatusCode::OK, "Request should succeed");
response.status(),
axum::http::StatusCode::OK,
"Request should succeed"
);
// Read the response body // Read the response body
use axum::body::to_bytes; use axum::body::to_bytes;
...@@ -837,7 +834,7 @@ async fn test_max_tool_calls_limit() { ...@@ -837,7 +834,7 @@ async fn test_max_tool_calls_limit() {
}; };
let response = router.route_responses(None, &req, None).await; let response = router.route_responses(None, &req, None).await;
assert_eq!(response.status(), axum::http::StatusCode::OK); assert_eq!(response.status(), StatusCode::OK);
use axum::body::to_bytes; use axum::body::to_bytes;
let response_body = response.into_body(); let response_body = response.into_body();
...@@ -1037,7 +1034,7 @@ async fn test_streaming_with_mcp_tool_calls() { ...@@ -1037,7 +1034,7 @@ async fn test_streaming_with_mcp_tool_calls() {
// Verify streaming response // Verify streaming response
assert_eq!( assert_eq!(
response.status(), response.status(),
axum::http::StatusCode::OK, StatusCode::OK,
"Streaming request should succeed" "Streaming request should succeed"
); );
...@@ -1312,7 +1309,7 @@ async fn test_streaming_multi_turn_with_mcp() { ...@@ -1312,7 +1309,7 @@ async fn test_streaming_multi_turn_with_mcp() {
}; };
let response = router.route_responses(None, &req, None).await; let response = router.route_responses(None, &req, None).await;
assert_eq!(response.status(), axum::http::StatusCode::OK); assert_eq!(response.status(), StatusCode::OK);
use axum::body::to_bytes; use axum::body::to_bytes;
let body_bytes = to_bytes(response.into_body(), usize::MAX).await.unwrap(); let body_bytes = to_bytes(response.into_body(), usize::MAX).await.unwrap();
......
...@@ -96,7 +96,7 @@ async fn test_openai_router_creation() { ...@@ -96,7 +96,7 @@ async fn test_openai_router_creation() {
None, None,
Arc::new(MemoryResponseStorage::new()), Arc::new(MemoryResponseStorage::new()),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await; .await;
...@@ -146,7 +146,7 @@ async fn test_openai_router_models() { ...@@ -146,7 +146,7 @@ async fn test_openai_router_models() {
None, None,
Arc::new(MemoryResponseStorage::new()), Arc::new(MemoryResponseStorage::new()),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await .await
.unwrap(); .unwrap();
...@@ -226,7 +226,7 @@ async fn test_openai_router_responses_with_mock() { ...@@ -226,7 +226,7 @@ async fn test_openai_router_responses_with_mock() {
None, None,
storage.clone(), storage.clone(),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await .await
.unwrap(); .unwrap();
...@@ -487,7 +487,7 @@ async fn test_openai_router_responses_streaming_with_mock() { ...@@ -487,7 +487,7 @@ async fn test_openai_router_responses_streaming_with_mock() {
None, None,
storage.clone(), storage.clone(),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await .await
.unwrap(); .unwrap();
...@@ -592,7 +592,7 @@ async fn test_unsupported_endpoints() { ...@@ -592,7 +592,7 @@ async fn test_unsupported_endpoints() {
None, None,
Arc::new(MemoryResponseStorage::new()), Arc::new(MemoryResponseStorage::new()),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await .await
.unwrap(); .unwrap();
...@@ -634,7 +634,7 @@ async fn test_openai_router_chat_completion_with_mock() { ...@@ -634,7 +634,7 @@ async fn test_openai_router_chat_completion_with_mock() {
None, None,
Arc::new(MemoryResponseStorage::new()), Arc::new(MemoryResponseStorage::new()),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await .await
.unwrap(); .unwrap();
...@@ -677,7 +677,7 @@ async fn test_openai_e2e_with_server() { ...@@ -677,7 +677,7 @@ async fn test_openai_e2e_with_server() {
None, None,
Arc::new(MemoryResponseStorage::new()), Arc::new(MemoryResponseStorage::new()),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await .await
.unwrap(); .unwrap();
...@@ -748,7 +748,7 @@ async fn test_openai_router_chat_streaming_with_mock() { ...@@ -748,7 +748,7 @@ async fn test_openai_router_chat_streaming_with_mock() {
None, None,
Arc::new(MemoryResponseStorage::new()), Arc::new(MemoryResponseStorage::new()),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await .await
.unwrap(); .unwrap();
...@@ -802,7 +802,7 @@ async fn test_openai_router_circuit_breaker() { ...@@ -802,7 +802,7 @@ async fn test_openai_router_circuit_breaker() {
Some(cb_config), Some(cb_config),
Arc::new(MemoryResponseStorage::new()), Arc::new(MemoryResponseStorage::new()),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await .await
.unwrap(); .unwrap();
...@@ -831,7 +831,7 @@ async fn test_openai_router_models_auth_forwarding() { ...@@ -831,7 +831,7 @@ async fn test_openai_router_models_auth_forwarding() {
None, None,
Arc::new(MemoryResponseStorage::new()), Arc::new(MemoryResponseStorage::new()),
Arc::new(MemoryConversationStorage::new()), Arc::new(MemoryConversationStorage::new()),
Arc::new(sglang_router_rs::data_connector::MemoryConversationItemStorage::new()), Arc::new(MemoryConversationItemStorage::new()),
) )
.await .await
.unwrap(); .unwrap();
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment