Unverified Commit 2e0f94ab authored by lambert0312's avatar lambert0312 Committed by GitHub
Browse files

[Fix] fix output_top_logprobs is not exist (#4597)

parent 18317ddc
...@@ -1119,7 +1119,9 @@ def v1_chat_generate_response( ...@@ -1119,7 +1119,9 @@ def v1_chat_generate_response(
if logprobs: if logprobs:
logprobs = to_openai_style_logprobs( logprobs = to_openai_style_logprobs(
output_token_logprobs=ret_item["meta_info"]["output_token_logprobs"], output_token_logprobs=ret_item["meta_info"]["output_token_logprobs"],
output_top_logprobs=ret_item["meta_info"]["output_top_logprobs"], output_top_logprobs=ret_item["meta_info"].get(
"output_top_logprobs", None
),
) )
token_logprobs = [] token_logprobs = []
for token_idx, (token, logprob) in enumerate( for token_idx, (token, logprob) in enumerate(
...@@ -1329,9 +1331,9 @@ async def v1_chat_completions( ...@@ -1329,9 +1331,9 @@ async def v1_chat_completions(
output_token_logprobs=content["meta_info"][ output_token_logprobs=content["meta_info"][
"output_token_logprobs" "output_token_logprobs"
][n_prev_token:], ][n_prev_token:],
output_top_logprobs=content["meta_info"][ output_top_logprobs=content["meta_info"].get(
"output_top_logprobs" "output_top_logprobs", []
][n_prev_token:], )[n_prev_token:],
) )
n_prev_token = len( n_prev_token = len(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment