Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
c89b34fd
Commit
c89b34fd
authored
Jul 31, 2024
by
Michael Poluektov
Browse files
flatten job()
parent
b9b1fdd1
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
52 additions
and
57 deletions
+52
-57
backend/apps/webui/main.py
backend/apps/webui/main.py
+52
-57
No files found.
backend/apps/webui/main.py
View file @
c89b34fd
...
@@ -147,7 +147,7 @@ async def get_pipe_models():
...
@@ -147,7 +147,7 @@ async def get_pipe_models():
function_module
=
get_function_module
(
pipe
.
id
)
function_module
=
get_function_module
(
pipe
.
id
)
# Check if function is a manifold
# Check if function is a manifold
if
hasattr
(
function_module
,
"
type"
)
and
function_module
.
type
==
"manifold"
:
if
hasattr
(
function_module
,
"
pipes"
)
:
manifold_pipes
=
[]
manifold_pipes
=
[]
# Check if pipes is a function or a list
# Check if pipes is a function or a list
...
@@ -343,70 +343,65 @@ async def generate_function_chat_completion(form_data, user):
...
@@ -343,70 +343,65 @@ async def generate_function_chat_completion(form_data, user):
form_data
=
add_model_params
(
params
,
form_data
)
form_data
=
add_model_params
(
params
,
form_data
)
form_data
=
populate_system_message
(
params
,
form_data
,
user
)
form_data
=
populate_system_message
(
params
,
form_data
,
user
)
async
def
job
():
pipe_id
=
get_pipe_id
(
form_data
)
pipe_id
=
get_pipe_id
(
form_data
)
function_module
=
get_function_module
(
pipe_id
)
function_module
=
get_function_module
(
pipe_id
)
pipe
=
function_module
.
pipe
params
=
get_params_dict
(
pipe
,
form_data
,
user
,
extra_params
,
function_module
)
if
form_data
[
"stream"
]:
async
def
stream_content
():
try
:
res
=
await
execute_pipe
(
pipe
,
params
)
# Directly return if the response is a StreamingResponse
pipe
=
function_module
.
pipe
if
isinstance
(
res
,
StreamingResponse
):
params
=
get_params_dict
(
pipe
,
form_data
,
user
,
extra_params
,
function_module
)
async
for
data
in
res
.
body_iterator
:
yield
data
return
if
isinstance
(
res
,
dict
):
yield
f
"data:
{
json
.
dumps
(
res
)
}
\n\n
"
return
except
Exception
as
e
:
if
form_data
[
"stream"
]:
print
(
f
"Error:
{
e
}
"
)
yield
f
"data:
{
json
.
dumps
(
{
'error'
:
{
'detail'
:
str
(
e
)
}}
)
}
\n\n
"
return
if
isinstance
(
res
,
str
):
async
def
stream_content
():
message
=
openai_chat_chunk_message_template
(
form_data
[
"model"
],
res
)
yield
f
"data:
{
json
.
dumps
(
message
)
}
\n\n
"
if
isinstance
(
res
,
Iterator
):
for
line
in
res
:
yield
process_line
(
form_data
,
line
)
if
isinstance
(
res
,
AsyncGenerator
):
async
for
line
in
res
:
yield
process_line
(
form_data
,
line
)
if
isinstance
(
res
,
str
)
or
isinstance
(
res
,
Generator
):
finish_message
=
openai_chat_chunk_message_template
(
form_data
[
"model"
],
""
)
finish_message
[
"choices"
][
0
][
"finish_reason"
]
=
"stop"
yield
f
"data:
{
json
.
dumps
(
finish_message
)
}
\n\n
"
yield
"data: [DONE]"
return
StreamingResponse
(
stream_content
(),
media_type
=
"text/event-stream"
)
else
:
try
:
try
:
res
=
await
execute_pipe
(
pipe
,
params
)
res
=
await
execute_pipe
(
pipe
,
params
)
# Directly return if the response is a StreamingResponse
if
isinstance
(
res
,
StreamingResponse
):
async
for
data
in
res
.
body_iterator
:
yield
data
return
if
isinstance
(
res
,
dict
):
yield
f
"data:
{
json
.
dumps
(
res
)
}
\n\n
"
return
except
Exception
as
e
:
except
Exception
as
e
:
print
(
f
"Error:
{
e
}
"
)
print
(
f
"Error:
{
e
}
"
)
return
{
"error"
:
{
"detail"
:
str
(
e
)}}
yield
f
"data:
{
json
.
dumps
(
{
'error'
:
{
'detail'
:
str
(
e
)
}}
)
}
\n\n
"
return
if
isinstance
(
res
,
StreamingResponse
)
or
isinstance
(
res
,
dict
):
if
isinstance
(
res
,
str
):
return
res
message
=
openai_chat_chunk_message_template
(
form_data
[
"model"
],
res
)
if
isinstance
(
res
,
BaseModel
):
yield
f
"data:
{
json
.
dumps
(
message
)
}
\n\n
"
return
res
.
model_dump
()
if
isinstance
(
res
,
Iterator
):
for
line
in
res
:
yield
process_line
(
form_data
,
line
)
if
isinstance
(
res
,
AsyncGenerator
):
async
for
line
in
res
:
yield
process_line
(
form_data
,
line
)
if
isinstance
(
res
,
str
)
or
isinstance
(
res
,
Generator
):
finish_message
=
openai_chat_chunk_message_template
(
form_data
[
"model"
],
""
)
finish_message
[
"choices"
][
0
][
"finish_reason"
]
=
"stop"
yield
f
"data:
{
json
.
dumps
(
finish_message
)
}
\n\n
"
yield
"data: [DONE]"
return
StreamingResponse
(
stream_content
(),
media_type
=
"text/event-stream"
)
else
:
try
:
res
=
await
execute_pipe
(
pipe
,
params
)
except
Exception
as
e
:
print
(
f
"Error:
{
e
}
"
)
return
{
"error"
:
{
"detail"
:
str
(
e
)}}
message
=
await
get_message_content
(
res
)
if
isinstance
(
res
,
StreamingResponse
)
or
isinstance
(
res
,
dict
):
return
openai_chat_completion_message_template
(
form_data
[
"model"
],
message
)
return
res
if
isinstance
(
res
,
BaseModel
):
return
res
.
model_dump
()
return
await
job
()
message
=
await
get_message_content
(
res
)
return
openai_chat_completion_message_template
(
form_data
[
"model"
],
message
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment