Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
84defafc
Commit
84defafc
authored
Jun 09, 2024
by
Timothy J. Baek
Browse files
feat: unified chat completions endpoint
parent
7b1404f4
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
39 additions
and
6 deletions
+39
-6
backend/apps/ollama/main.py
backend/apps/ollama/main.py
+6
-1
backend/main.py
backend/main.py
+32
-2
src/lib/components/chat/Chat.svelte
src/lib/components/chat/Chat.svelte
+1
-3
No files found.
backend/apps/ollama/main.py
View file @
84defafc
...
...
@@ -849,9 +849,14 @@ async def generate_chat_completion(
# TODO: we should update this part once Ollama supports other types
class
OpenAIChatMessageContent
(
BaseModel
):
type
:
str
model_config
=
ConfigDict
(
extra
=
"allow"
)
class
OpenAIChatMessage
(
BaseModel
):
role
:
str
content
:
str
content
:
Union
[
str
,
OpenAIChatMessageContent
]
model_config
=
ConfigDict
(
extra
=
"allow"
)
...
...
backend/main.py
View file @
84defafc
...
...
@@ -25,8 +25,17 @@ from starlette.responses import StreamingResponse, Response
from
apps.socket.main
import
app
as
socket_app
from
apps.ollama.main
import
app
as
ollama_app
,
get_all_models
as
get_ollama_models
from
apps.openai.main
import
app
as
openai_app
,
get_all_models
as
get_openai_models
from
apps.ollama.main
import
(
app
as
ollama_app
,
OpenAIChatCompletionForm
,
get_all_models
as
get_ollama_models
,
generate_openai_chat_completion
as
generate_ollama_chat_completion
,
)
from
apps.openai.main
import
(
app
as
openai_app
,
get_all_models
as
get_openai_models
,
generate_chat_completion
as
generate_openai_chat_completion
,
)
from
apps.audio.main
import
app
as
audio_app
from
apps.images.main
import
app
as
images_app
...
...
@@ -485,6 +494,27 @@ async def get_models(user=Depends(get_verified_user)):
return
{
"data"
:
models
}
@
app
.
post
(
"/api/chat/completions"
)
async
def
generate_chat_completions
(
form_data
:
dict
,
user
=
Depends
(
get_verified_user
)):
model_id
=
form_data
[
"model"
]
if
model_id
not
in
app
.
state
.
MODELS
:
raise
HTTPException
(
status_code
=
status
.
HTTP_404_NOT_FOUND
,
detail
=
"Model not found"
,
)
model
=
app
.
state
.
MODELS
[
model_id
]
print
(
model
)
if
model
[
"owned_by"
]
==
"ollama"
:
return
await
generate_ollama_chat_completion
(
OpenAIChatCompletionForm
(
**
form_data
),
user
=
user
)
else
:
return
await
generate_openai_chat_completion
(
form_data
,
user
=
user
)
@
app
.
post
(
"/api/chat/completed"
)
async
def
chat_completed
(
form_data
:
dict
,
user
=
Depends
(
get_verified_user
)):
data
=
form_data
...
...
src/lib/components/chat/Chat.svelte
View file @
84defafc
...
...
@@ -1134,9 +1134,7 @@
titleModelId,
userPrompt,
$chatId,
titleModel?.owned_by === '
openai
' ?? false
? `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1`
`${WEBUI_BASE_URL}/api`
);
return title;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment