Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
7f74426a
Commit
7f74426a
authored
Jun 02, 2024
by
Jun Siang Cheah
Browse files
fix: openai streaming cancellation using aiohttp
parent
4dd51bad
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
31 additions
and
15 deletions
+31
-15
backend/apps/ollama/main.py
backend/apps/ollama/main.py
+1
-1
backend/apps/openai/main.py
backend/apps/openai/main.py
+30
-14
No files found.
backend/apps/ollama/main.py
View file @
7f74426a
...
...
@@ -153,7 +153,7 @@ async def cleanup_response(
await
session
.
close
()
async
def
post_streaming_url
(
url
,
payload
):
async
def
post_streaming_url
(
url
:
str
,
payload
:
str
):
r
=
None
try
:
session
=
aiohttp
.
ClientSession
()
...
...
backend/apps/openai/main.py
View file @
7f74426a
...
...
@@ -9,6 +9,7 @@ import json
import
logging
from
pydantic
import
BaseModel
from
starlette.background
import
BackgroundTask
from
apps.webui.models.models
import
Models
from
apps.webui.models.users
import
Users
...
...
@@ -194,6 +195,16 @@ async def fetch_url(url, key):
return
None
async
def
cleanup_response
(
response
:
Optional
[
aiohttp
.
ClientResponse
],
session
:
Optional
[
aiohttp
.
ClientSession
],
):
if
response
:
response
.
close
()
if
session
:
await
session
.
close
()
def
merge_models_lists
(
model_lists
):
log
.
debug
(
f
"merge_models_lists
{
model_lists
}
"
)
merged_list
=
[]
...
...
@@ -426,40 +437,45 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
headers
[
"Content-Type"
]
=
"application/json"
r
=
None
session
=
None
streaming
=
False
try
:
r
=
requests
.
request
(
method
=
request
.
method
,
url
=
target_url
,
data
=
payload
if
payload
else
body
,
headers
=
headers
,
stream
=
True
,
session
=
aiohttp
.
ClientSession
()
r
=
await
session
.
request
(
method
=
request
.
method
,
url
=
target_url
,
data
=
payload
,
headers
=
headers
)
r
.
raise_for_status
()
# Check if response is SSE
if
"text/event-stream"
in
r
.
headers
.
get
(
"Content-Type"
,
""
):
streaming
=
True
return
StreamingResponse
(
r
.
iter_
content
(
chunk_size
=
8192
)
,
status_code
=
r
.
status
_code
,
r
.
content
,
status_code
=
r
.
status
,
headers
=
dict
(
r
.
headers
),
background
=
BackgroundTask
(
cleanup_response
,
response
=
r
,
session
=
session
),
)
else
:
response_data
=
r
.
json
()
response_data
=
await
r
.
json
()
return
response_data
except
Exception
as
e
:
log
.
exception
(
e
)
error_detail
=
"Open WebUI: Server Connection Error"
if
r
is
not
None
:
try
:
res
=
r
.
json
()
res
=
await
r
.
json
()
print
(
res
)
if
"error"
in
res
:
error_detail
=
f
"External:
{
res
[
'error'
][
'message'
]
if
'message'
in
res
[
'error'
]
else
res
[
'error'
]
}
"
except
:
error_detail
=
f
"External:
{
e
}
"
raise
HTTPException
(
status_code
=
r
.
status_code
if
r
else
500
,
detail
=
error_detail
)
raise
HTTPException
(
status_code
=
r
.
status
if
r
else
500
,
detail
=
error_detail
)
finally
:
if
not
streaming
and
session
:
if
r
:
r
.
close
()
await
session
.
close
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment