Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
norm
vllm
Commits
e8ddc08e
Unverified
Commit
e8ddc08e
authored
Aug 02, 2023
by
YHPeter
Committed by
GitHub
Aug 02, 2023
Browse files
[BUG FIX] upgrade fschat version to 0.2.23 (#650)
Co-authored-by:
hao.yu
<
hao.yu@cn-c017.server.mila.quebec
>
parent
1b0bd0fe
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
10 additions
and
2 deletions
+10
-2
vllm/entrypoints/openai/api_server.py
vllm/entrypoints/openai/api_server.py
+10
-2
No files found.
vllm/entrypoints/openai/api_server.py
View file @
e8ddc08e
...
...
@@ -7,6 +7,7 @@ from http import HTTPStatus
import
json
import
time
from
typing
import
AsyncGenerator
,
Dict
,
List
,
Optional
from
packaging
import
version
import
fastapi
from
fastapi
import
BackgroundTasks
,
Request
...
...
@@ -31,6 +32,7 @@ from vllm.transformers_utils.tokenizer import get_tokenizer
from
vllm.utils
import
random_uuid
try
:
import
fastchat
from
fastchat.conversation
import
Conversation
,
SeparatorStyle
from
fastchat.model.model_adapter
import
get_conversation_template
_fastchat_available
=
True
...
...
@@ -72,10 +74,16 @@ async def get_gen_prompt(request) -> str:
"fastchat is not installed. Please install fastchat to use "
"the chat completion and conversation APIs: `$ pip install fschat`"
)
if
version
.
parse
(
fastchat
.
__version__
)
<
version
.
parse
(
"0.2.23"
):
raise
ImportError
(
f
"fastchat version is low. Current version:
{
fastchat
.
__version__
}
"
"Please upgrade fastchat to use: `$ pip install -U fschat`"
)
conv
=
get_conversation_template
(
request
.
model
)
conv
=
Conversation
(
name
=
conv
.
name
,
system
=
conv
.
system
,
system_template
=
conv
.
system_template
,
system_message
=
conv
.
system_message
,
roles
=
conv
.
roles
,
messages
=
list
(
conv
.
messages
),
# prevent in-place modification
offset
=
conv
.
offset
,
...
...
@@ -92,7 +100,7 @@ async def get_gen_prompt(request) -> str:
for
message
in
request
.
messages
:
msg_role
=
message
[
"role"
]
if
msg_role
==
"system"
:
conv
.
system
=
message
[
"content"
]
conv
.
system
_message
=
message
[
"content"
]
elif
msg_role
==
"user"
:
conv
.
append_message
(
conv
.
roles
[
0
],
message
[
"content"
])
elif
msg_role
==
"assistant"
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment