Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
Qwen_lmdeploy
Commits
22cd7d15
Unverified
Commit
22cd7d15
authored
Sep 26, 2023
by
aisensiy
Committed by
GitHub
Sep 26, 2023
Browse files
Fix compatibility issues with Pydantic 2 (#465)
parent
a54e3e09
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
6 additions
and
6 deletions
+6
-6
lmdeploy/serve/openai/api_server.py
lmdeploy/serve/openai/api_server.py
+3
-3
lmdeploy/serve/openai/protocol.py
lmdeploy/serve/openai/protocol.py
+3
-3
No files found.
lmdeploy/serve/openai/api_server.py
View file @
22cd7d15
...
...
@@ -60,7 +60,7 @@ def create_error_response(status: HTTPStatus, message: str):
return
JSONResponse
(
ErrorResponse
(
message
=
message
,
type
=
'invalid_request_error'
,
code
=
status
.
value
).
dict
())
code
=
status
.
value
).
model_dump
())
async
def
check_request
(
request
)
->
Optional
[
JSONResponse
]:
...
...
@@ -152,7 +152,7 @@ async def chat_completions_v1(request: ChatCompletionRequest,
model
=
model_name
,
choices
=
[
choice_data
],
)
response_json
=
response
.
json
(
ensure_ascii
=
False
)
response_json
=
response
.
model_dump_json
(
)
return
response_json
...
...
@@ -167,7 +167,7 @@ async def chat_completions_v1(request: ChatCompletionRequest,
chunk
=
ChatCompletionStreamResponse
(
id
=
request_id
,
choices
=
[
choice_data
],
model
=
model_name
)
data
=
chunk
.
json
(
exclude_unset
=
True
,
ensure_ascii
=
False
)
data
=
chunk
.
model_dump_
json
(
exclude_unset
=
True
)
yield
f
'data:
{
data
}
\n\n
'
async
for
res
in
result_generator
:
...
...
lmdeploy/serve/openai/protocol.py
View file @
22cd7d15
...
...
@@ -84,7 +84,7 @@ class ChatCompletionResponseChoice(BaseModel):
"""Chat completion response choices."""
index
:
int
message
:
ChatMessage
finish_reason
:
Optional
[
Literal
[
'stop'
,
'length'
]]
finish_reason
:
Optional
[
Literal
[
'stop'
,
'length'
]]
=
None
class
ChatCompletionResponse
(
BaseModel
):
...
...
@@ -107,7 +107,7 @@ class ChatCompletionResponseStreamChoice(BaseModel):
"""Chat completion response stream choice."""
index
:
int
delta
:
DeltaMessage
finish_reason
:
Optional
[
Literal
[
'stop'
,
'length'
]]
finish_reason
:
Optional
[
Literal
[
'stop'
,
'length'
]]
=
None
class
ChatCompletionStreamResponse
(
BaseModel
):
...
...
@@ -142,7 +142,7 @@ class CompletionResponseChoice(BaseModel):
index
:
int
text
:
str
logprobs
:
Optional
[
int
]
=
None
finish_reason
:
Optional
[
Literal
[
'stop'
,
'length'
]]
finish_reason
:
Optional
[
Literal
[
'stop'
,
'length'
]]
=
None
class
CompletionResponse
(
BaseModel
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment