Unverified Commit 22cd7d15 authored by aisensiy's avatar aisensiy Committed by GitHub
Browse files

Fix compatibility issues with Pydantic 2 (#465)

parent a54e3e09
...@@ -60,7 +60,7 @@ def create_error_response(status: HTTPStatus, message: str): ...@@ -60,7 +60,7 @@ def create_error_response(status: HTTPStatus, message: str):
return JSONResponse( return JSONResponse(
ErrorResponse(message=message, ErrorResponse(message=message,
type='invalid_request_error', type='invalid_request_error',
code=status.value).dict()) code=status.value).model_dump())
async def check_request(request) -> Optional[JSONResponse]: async def check_request(request) -> Optional[JSONResponse]:
...@@ -152,7 +152,7 @@ async def chat_completions_v1(request: ChatCompletionRequest, ...@@ -152,7 +152,7 @@ async def chat_completions_v1(request: ChatCompletionRequest,
model=model_name, model=model_name,
choices=[choice_data], choices=[choice_data],
) )
response_json = response.json(ensure_ascii=False) response_json = response.model_dump_json()
return response_json return response_json
...@@ -167,7 +167,7 @@ async def chat_completions_v1(request: ChatCompletionRequest, ...@@ -167,7 +167,7 @@ async def chat_completions_v1(request: ChatCompletionRequest,
chunk = ChatCompletionStreamResponse(id=request_id, chunk = ChatCompletionStreamResponse(id=request_id,
choices=[choice_data], choices=[choice_data],
model=model_name) model=model_name)
data = chunk.json(exclude_unset=True, ensure_ascii=False) data = chunk.model_dump_json(exclude_unset=True)
yield f'data: {data}\n\n' yield f'data: {data}\n\n'
async for res in result_generator: async for res in result_generator:
......
...@@ -84,7 +84,7 @@ class ChatCompletionResponseChoice(BaseModel): ...@@ -84,7 +84,7 @@ class ChatCompletionResponseChoice(BaseModel):
"""Chat completion response choices.""" """Chat completion response choices."""
index: int index: int
message: ChatMessage message: ChatMessage
finish_reason: Optional[Literal['stop', 'length']] finish_reason: Optional[Literal['stop', 'length']] = None
class ChatCompletionResponse(BaseModel): class ChatCompletionResponse(BaseModel):
...@@ -107,7 +107,7 @@ class ChatCompletionResponseStreamChoice(BaseModel): ...@@ -107,7 +107,7 @@ class ChatCompletionResponseStreamChoice(BaseModel):
"""Chat completion response stream choice.""" """Chat completion response stream choice."""
index: int index: int
delta: DeltaMessage delta: DeltaMessage
finish_reason: Optional[Literal['stop', 'length']] finish_reason: Optional[Literal['stop', 'length']] = None
class ChatCompletionStreamResponse(BaseModel): class ChatCompletionStreamResponse(BaseModel):
...@@ -142,7 +142,7 @@ class CompletionResponseChoice(BaseModel): ...@@ -142,7 +142,7 @@ class CompletionResponseChoice(BaseModel):
index: int index: int
text: str text: str
logprobs: Optional[int] = None logprobs: Optional[int] = None
finish_reason: Optional[Literal['stop', 'length']] finish_reason: Optional[Literal['stop', 'length']] = None
class CompletionResponse(BaseModel): class CompletionResponse(BaseModel):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment