Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
9c2429ff
Unverified
Commit
9c2429ff
authored
Aug 12, 2024
by
Timothy Jaeryang Baek
Committed by
GitHub
Aug 12, 2024
Browse files
Merge pull request #4402 from michaelpoluektov/remove-ollama
refactor: re-use utils in Ollama
parents
d0645d3c
547611b7
Changes
5
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
139 additions
and
320 deletions
+139
-320
backend/apps/images/utils/comfyui.py
backend/apps/images/utils/comfyui.py
+3
-2
backend/apps/ollama/main.py
backend/apps/ollama/main.py
+84
-309
backend/apps/openai/main.py
backend/apps/openai/main.py
+5
-2
backend/apps/webui/main.py
backend/apps/webui/main.py
+2
-2
backend/utils/misc.py
backend/utils/misc.py
+45
-5
No files found.
backend/apps/images/utils/comfyui.py
View file @
9c2429ff
import
asyncio
import
websocket
# NOTE: websocket-client (https://github.com/websocket-client/websocket-client)
import
uuid
import
json
import
urllib.request
import
urllib.parse
...
...
@@ -398,7 +397,9 @@ async def comfyui_generate_image(
return
None
try
:
images
=
await
asyncio
.
to_thread
(
get_images
,
ws
,
comfyui_prompt
,
client_id
,
base_url
)
images
=
await
asyncio
.
to_thread
(
get_images
,
ws
,
comfyui_prompt
,
client_id
,
base_url
)
except
Exception
as
e
:
log
.
exception
(
f
"Error while receiving images:
{
e
}
"
)
images
=
None
...
...
backend/apps/ollama/main.py
View file @
9c2429ff
This diff is collapsed.
Click to expand it.
backend/apps/openai/main.py
View file @
9c2429ff
...
...
@@ -17,7 +17,10 @@ from utils.utils import (
get_verified_user
,
get_admin_user
,
)
from
utils.misc
import
apply_model_params_to_body
,
apply_model_system_prompt_to_body
from
utils.misc
import
(
apply_model_params_to_body_openai
,
apply_model_system_prompt_to_body
,
)
from
config
import
(
SRC_LOG_LEVELS
,
...
...
@@ -368,7 +371,7 @@ async def generate_chat_completion(
payload
[
"model"
]
=
model_info
.
base_model_id
params
=
model_info
.
params
.
model_dump
()
payload
=
apply_model_params_to_body
(
params
,
payload
)
payload
=
apply_model_params_to_body
_openai
(
params
,
payload
)
payload
=
apply_model_system_prompt_to_body
(
params
,
payload
,
user
)
model
=
app
.
state
.
MODELS
[
payload
.
get
(
"model"
)]
...
...
backend/apps/webui/main.py
View file @
9c2429ff
...
...
@@ -22,7 +22,7 @@ from apps.webui.utils import load_function_module_by_id
from
utils.misc
import
(
openai_chat_chunk_message_template
,
openai_chat_completion_message_template
,
apply_model_params_to_body
,
apply_model_params_to_body
_openai
,
apply_model_system_prompt_to_body
,
)
...
...
@@ -291,7 +291,7 @@ async def generate_function_chat_completion(form_data, user):
form_data
[
"model"
]
=
model_info
.
base_model_id
params
=
model_info
.
params
.
model_dump
()
form_data
=
apply_model_params_to_body
(
params
,
form_data
)
form_data
=
apply_model_params_to_body
_openai
(
params
,
form_data
)
form_data
=
apply_model_system_prompt_to_body
(
params
,
form_data
,
user
)
pipe_id
=
get_pipe_id
(
form_data
)
...
...
backend/utils/misc.py
View file @
9c2429ff
...
...
@@ -2,7 +2,7 @@ from pathlib import Path
import
hashlib
import
re
from
datetime
import
timedelta
from
typing
import
Optional
,
List
,
Tuple
from
typing
import
Optional
,
List
,
Tuple
,
Callable
import
uuid
import
time
...
...
@@ -135,10 +135,21 @@ def apply_model_system_prompt_to_body(params: dict, form_data: dict, user) -> di
# inplace function: form_data is modified
def
apply_model_params_to_body
(
params
:
dict
,
form_data
:
dict
)
->
dict
:
def
apply_model_params_to_body
(
params
:
dict
,
form_data
:
dict
,
mappings
:
dict
[
str
,
Callable
]
)
->
dict
:
if
not
params
:
return
form_data
for
key
,
cast_func
in
mappings
.
items
():
if
(
value
:
=
params
.
get
(
key
))
is
not
None
:
form_data
[
key
]
=
cast_func
(
value
)
return
form_data
# inplace function: form_data is modified
def
apply_model_params_to_body_openai
(
params
:
dict
,
form_data
:
dict
)
->
dict
:
mappings
=
{
"temperature"
:
float
,
"top_p"
:
int
,
...
...
@@ -147,10 +158,39 @@ def apply_model_params_to_body(params: dict, form_data: dict) -> dict:
"seed"
:
lambda
x
:
x
,
"stop"
:
lambda
x
:
[
bytes
(
s
,
"utf-8"
).
decode
(
"unicode_escape"
)
for
s
in
x
],
}
return
apply_model_params_to_body
(
params
,
form_data
,
mappings
)
def
apply_model_params_to_body_ollama
(
params
:
dict
,
form_data
:
dict
)
->
dict
:
opts
=
[
"temperature"
,
"top_p"
,
"seed"
,
"mirostat"
,
"mirostat_eta"
,
"mirostat_tau"
,
"num_ctx"
,
"num_batch"
,
"num_keep"
,
"repeat_last_n"
,
"tfs_z"
,
"top_k"
,
"min_p"
,
"use_mmap"
,
"use_mlock"
,
"num_thread"
,
]
mappings
=
{
i
:
lambda
x
:
x
for
i
in
opts
}
form_data
=
apply_model_params_to_body
(
params
,
form_data
,
mappings
)
name_differences
=
{
"max_tokens"
:
"num_predict"
,
"frequency_penalty"
:
"repeat_penalty"
,
}
for
key
,
cast_func
in
mapping
s
.
items
():
if
(
value
:
=
params
.
get
(
key
))
is
not
None
:
form_data
[
key
]
=
cast_func
(
value
)
for
key
,
value
in
name_difference
s
.
items
():
if
(
param
:
=
params
.
get
(
key
,
None
))
is
not
None
:
form_data
[
value
]
=
param
return
form_data
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment