Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
change
sglang
Commits
03c039c4
Unverified
Commit
03c039c4
authored
Jun 25, 2025
by
ybyang
Committed by
GitHub
Jun 24, 2025
Browse files
[OAI] patch origin request_id logic (#7508)
parent
57ab7769
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
19 additions
and
5 deletions
+19
-5
python/sglang/srt/entrypoints/openai/protocol.py
python/sglang/srt/entrypoints/openai/protocol.py
+6
-3
python/sglang/srt/entrypoints/openai/serving_chat.py
python/sglang/srt/entrypoints/openai/serving_chat.py
+1
-0
python/sglang/srt/entrypoints/openai/serving_completions.py
python/sglang/srt/entrypoints/openai/serving_completions.py
+1
-0
python/sglang/srt/entrypoints/openai/serving_embedding.py
python/sglang/srt/entrypoints/openai/serving_embedding.py
+1
-0
python/sglang/srt/managers/io_struct.py
python/sglang/srt/managers/io_struct.py
+10
-2
No files found.
python/sglang/srt/entrypoints/openai/protocol.py
View file @
03c039c4
...
...
@@ -196,6 +196,9 @@ class CompletionRequest(BaseModel):
bootstrap_port
:
Optional
[
int
]
=
None
bootstrap_room
:
Optional
[
int
]
=
None
# For request id
rid
:
Optional
[
Union
[
List
[
str
],
str
]]
=
None
@
field_validator
(
"max_tokens"
)
@
classmethod
def
validate_max_tokens_positive
(
cls
,
v
):
...
...
@@ -430,8 +433,8 @@ class ChatCompletionRequest(BaseModel):
stream_reasoning
:
bool
=
True
chat_template_kwargs
:
Optional
[
Dict
]
=
None
#
The
request id
.
rid
:
Optional
[
str
]
=
None
#
For
request id
rid
:
Optional
[
Union
[
List
[
str
],
str
]
]
=
None
# For PD disaggregation
bootstrap_host
:
Optional
[
str
]
=
None
...
...
@@ -529,7 +532,7 @@ class EmbeddingRequest(BaseModel):
user
:
Optional
[
str
]
=
None
# The request id.
rid
:
Optional
[
str
]
=
None
rid
:
Optional
[
Union
[
List
[
str
],
str
]
]
=
None
class
EmbeddingObject
(
BaseModel
):
...
...
python/sglang/srt/entrypoints/openai/serving_chat.py
View file @
03c039c4
...
...
@@ -95,6 +95,7 @@ class OpenAIServingChat(OpenAIServingBase):
bootstrap_port
=
request
.
bootstrap_port
,
bootstrap_room
=
request
.
bootstrap_room
,
return_hidden_states
=
request
.
return_hidden_states
,
rid
=
request
.
rid
,
)
return
adapted_request
,
request
...
...
python/sglang/srt/entrypoints/openai/serving_completions.py
View file @
03c039c4
...
...
@@ -87,6 +87,7 @@ class OpenAIServingCompletion(OpenAIServingBase):
bootstrap_port
=
request
.
bootstrap_port
,
bootstrap_room
=
request
.
bootstrap_room
,
return_hidden_states
=
request
.
return_hidden_states
,
rid
=
request
.
rid
,
)
return
adapted_request
,
request
...
...
python/sglang/srt/entrypoints/openai/serving_embedding.py
View file @
03c039c4
...
...
@@ -119,6 +119,7 @@ class OpenAIServingEmbedding(OpenAIServingBase):
adapted_request
=
EmbeddingReqInput
(
**
prompt_kwargs
,
rid
=
request
.
rid
,
)
return
adapted_request
,
request
...
...
python/sglang/srt/managers/io_struct.py
View file @
03c039c4
...
...
@@ -319,8 +319,16 @@ class GenerateReqInput:
"""Normalize request IDs for batch processing."""
if
self
.
rid
is
None
:
self
.
rid
=
[
uuid
.
uuid4
().
hex
for
_
in
range
(
num
)]
elif
not
isinstance
(
self
.
rid
,
list
):
raise
ValueError
(
"The rid should be a list for batch processing."
)
elif
isinstance
(
self
.
rid
,
str
):
new_rids
=
[
f
"
{
self
.
rid
}
_
{
i
}
"
for
i
in
range
(
num
)]
self
.
rid
=
new_rids
elif
isinstance
(
self
.
rid
,
list
):
if
len
(
self
.
rid
)
!=
num
:
raise
ValueError
(
"The specified rids length mismatch with the batch_size for batch processing."
)
else
:
raise
ValueError
(
"The rid should be a string or a list of strings."
)
def
_normalize_logprob_params
(
self
,
num
):
"""Normalize logprob-related parameters for batch processing."""
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment