Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
f8726719
Commit
f8726719
authored
Jul 31, 2024
by
Michael Poluektov
Browse files
refac: rename whole_message_template, silence lsp
parent
ae0bb8f1
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
26 additions
and
25 deletions
+26
-25
backend/apps/webui/main.py
backend/apps/webui/main.py
+2
-2
backend/utils/misc.py
backend/utils/misc.py
+24
-23
No files found.
backend/apps/webui/main.py
View file @
f8726719
...
...
@@ -21,7 +21,7 @@ from apps.webui.utils import load_function_module_by_id
from
utils.misc
import
(
stream_message_template
,
whole
_message_template
,
openai_chat_completion
_message_template
,
add_or_update_system_message
,
)
from
utils.task
import
prompt_template
...
...
@@ -403,6 +403,6 @@ async def generate_function_chat_completion(form_data, user):
return
res
.
model_dump
()
message
=
await
get_message_content
(
res
)
return
whole
_message_template
(
form_data
[
"model"
],
message
)
return
openai_chat_completion
_message_template
(
form_data
[
"model"
],
message
)
return
await
job
()
backend/utils/misc.py
View file @
f8726719
from
pathlib
import
Path
import
hashlib
import
json
import
re
from
datetime
import
timedelta
from
typing
import
Optional
,
List
,
Tuple
...
...
@@ -8,37 +7,39 @@ import uuid
import
time
def
get_last_user_message_item
(
messages
:
List
[
dict
])
->
str
:
def
get_last_user_message_item
(
messages
:
List
[
dict
])
->
Optional
[
dict
]
:
for
message
in
reversed
(
messages
):
if
message
[
"role"
]
==
"user"
:
return
message
return
None
def
get_last_user_message
(
messages
:
List
[
dict
])
->
str
:
message
=
get_last_user_message_item
(
messages
)
if
message
is
not
None
:
def
get_content_from_message
(
message
:
dict
)
->
Optional
[
str
]:
if
isinstance
(
message
[
"content"
],
list
):
for
item
in
message
[
"content"
]:
if
item
[
"type"
]
==
"text"
:
return
item
[
"text"
]
else
:
return
message
[
"content"
]
return
None
def
get_last_assistant_message
(
messages
:
List
[
dict
])
->
str
:
def
get_last_user_message
(
messages
:
List
[
dict
])
->
Optional
[
str
]:
message
=
get_last_user_message_item
(
messages
)
if
message
is
None
:
return
None
return
get_content_from_message
(
message
)
def
get_last_assistant_message
(
messages
:
List
[
dict
])
->
Optional
[
str
]:
for
message
in
reversed
(
messages
):
if
message
[
"role"
]
==
"assistant"
:
if
isinstance
(
message
[
"content"
],
list
):
for
item
in
message
[
"content"
]:
if
item
[
"type"
]
==
"text"
:
return
item
[
"text"
]
return
message
[
"content"
]
return
get_content_from_message
(
message
)
return
None
def
get_system_message
(
messages
:
List
[
dict
])
->
dict
:
def
get_system_message
(
messages
:
List
[
dict
])
->
Optional
[
dict
]
:
for
message
in
messages
:
if
message
[
"role"
]
==
"system"
:
return
message
...
...
@@ -49,7 +50,7 @@ def remove_system_message(messages: List[dict]) -> List[dict]:
return
[
message
for
message
in
messages
if
message
[
"role"
]
!=
"system"
]
def
pop_system_message
(
messages
:
List
[
dict
])
->
Tuple
[
dict
,
List
[
dict
]]:
def
pop_system_message
(
messages
:
List
[
dict
])
->
Tuple
[
Optional
[
dict
]
,
List
[
dict
]]:
return
get_system_message
(
messages
),
remove_system_message
(
messages
)
...
...
@@ -103,7 +104,7 @@ def stream_message_template(model: str, message: str):
return
template
def
whole
_message_template
(
model
:
str
,
message
:
str
):
def
openai_chat_completion
_message_template
(
model
:
str
,
message
:
str
):
template
=
message_template
(
model
)
template
[
"object"
]
=
"chat.completion"
template
[
"choices"
][
0
][
"message"
]
=
{
"content"
:
message
,
"role"
:
"assistant"
}
...
...
@@ -180,7 +181,7 @@ def extract_folders_after_data_docs(path):
tags
=
[]
folders
=
parts
[
index_docs
:
-
1
]
for
idx
,
part
in
enumerate
(
folders
):
for
idx
,
_
in
enumerate
(
folders
):
tags
.
append
(
"/"
.
join
(
folders
[:
idx
+
1
]))
return
tags
...
...
@@ -276,11 +277,11 @@ def parse_ollama_modelfile(model_text):
value
=
param_match
.
group
(
1
)
try
:
if
param_type
==
int
:
if
param_type
is
int
:
value
=
int
(
value
)
elif
param_type
==
float
:
elif
param_type
is
float
:
value
=
float
(
value
)
elif
param_type
==
bool
:
elif
param_type
is
bool
:
value
=
value
.
lower
()
==
"true"
except
Exception
as
e
:
print
(
e
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment