Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
4dd77b78
Commit
4dd77b78
authored
Jul 11, 2024
by
Timothy J. Baek
Browse files
fix
parent
7d7a29cf
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
11 additions
and
9 deletions
+11
-9
backend/apps/ollama/main.py
backend/apps/ollama/main.py
+1
-1
backend/main.py
backend/main.py
+10
-8
No files found.
backend/apps/ollama/main.py
View file @
4dd77b78
...
...
@@ -895,8 +895,8 @@ async def generate_openai_chat_completion(
user
=
Depends
(
get_verified_user
),
):
form_data
=
OpenAIChatCompletionForm
(
**
form_data
)
payload
=
{
**
form_data
.
model_dump
(
exclude_none
=
True
,
exclude
=
[
"metadata"
])}
payload
=
{
**
form_data
}
if
"metadata"
in
payload
:
del
payload
[
"metadata"
]
...
...
backend/main.py
View file @
4dd77b78
...
...
@@ -317,7 +317,7 @@ async def get_function_call_response(
{
"role"
:
"user"
,
"content"
:
f
"Query:
{
prompt
}
"
},
],
"stream"
:
False
,
"task"
:
TASKS
.
FUNCTION_CALLING
,
"task"
:
str
(
TASKS
.
FUNCTION_CALLING
)
,
}
try
:
...
...
@@ -632,8 +632,6 @@ class ChatCompletionMiddleware(BaseHTTPMiddleware):
message_id
=
body
[
"id"
]
del
body
[
"id"
]
__event_emitter__
=
await
get_event_emitter
(
{
"chat_id"
:
chat_id
,
"message_id"
:
message_id
,
"session_id"
:
session_id
}
)
...
...
@@ -1037,12 +1035,16 @@ async def generate_chat_completions(form_data: dict, user=Depends(get_verified_u
task
=
form_data
[
"task"
]
del
form_data
[
"task"
]
if
"metadata"
in
form_data
:
form_data
[
"metadata"
][
'task'
]
=
task
if
task
:
if
"metadata"
in
form_data
:
form_data
[
"metadata"
][
"task"
]
=
task
else
:
form_data
[
"metadata"
]
=
{
"task"
:
task
}
if
model
.
get
(
"pipe"
):
return
await
generate_function_chat_completion
(
form_data
,
user
=
user
)
if
model
[
"owned_by"
]
==
"ollama"
:
print
(
"generate_ollama_chat_completion"
)
return
await
generate_ollama_chat_completion
(
form_data
,
user
=
user
)
else
:
return
await
generate_openai_chat_completion
(
form_data
,
user
=
user
)
...
...
@@ -1311,7 +1313,7 @@ async def generate_title(form_data: dict, user=Depends(get_verified_user)):
"stream"
:
False
,
"max_tokens"
:
50
,
"chat_id"
:
form_data
.
get
(
"chat_id"
,
None
),
"task"
:
TASKS
.
TITLE_GENERATION
,
"task"
:
str
(
TASKS
.
TITLE_GENERATION
)
,
}
log
.
debug
(
payload
)
...
...
@@ -1364,7 +1366,7 @@ async def generate_search_query(form_data: dict, user=Depends(get_verified_user)
"messages"
:
[{
"role"
:
"user"
,
"content"
:
content
}],
"stream"
:
False
,
"max_tokens"
:
30
,
"task"
:
TASKS
.
QUERY_GENERATION
,
"task"
:
str
(
TASKS
.
QUERY_GENERATION
)
,
}
print
(
payload
)
...
...
@@ -1421,7 +1423,7 @@ Message: """{{prompt}}"""
"stream"
:
False
,
"max_tokens"
:
4
,
"chat_id"
:
form_data
.
get
(
"chat_id"
,
None
),
"task"
:
TASKS
.
EMOJI_GENERATION
,
"task"
:
str
(
TASKS
.
EMOJI_GENERATION
)
,
}
log
.
debug
(
payload
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment