Unverified Commit 0ffded81 authored by Zhuohan Li's avatar Zhuohan Li Committed by GitHub
Browse files

[Fix] Better error message for batched prompts (#342)

parent 0bd2a573
......@@ -358,7 +358,13 @@ async def create_completion(raw_request: Request):
model_name = request.model
request_id = f"cmpl-{random_uuid()}"
if isinstance(request.prompt, list):
assert len(request.prompt) == 1
if len(request.prompt) == 0:
return create_error_response(HTTPStatus.BAD_REQUEST,
"please provide at least one prompt")
if len(request.prompt) > 1:
return create_error_response(HTTPStatus.BAD_REQUEST,
"multiple prompts in a batch is not "
"currently supported")
prompt = request.prompt[0]
else:
prompt = request.prompt
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment