Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
8651bec9
"git@developer.sourcefind.cn:chenpangpang/open-webui.git" did not exist on "cf9dbea370cf1647fd8d7026650331bbd3baa296"
Commit
8651bec9
authored
Apr 21, 2024
by
Timothy J. Baek
Browse files
pwned :)
parent
a41b195f
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
17 additions
and
2 deletions
+17
-2
backend/apps/litellm/main.py
backend/apps/litellm/main.py
+10
-1
backend/main.py
backend/main.py
+7
-1
No files found.
backend/apps/litellm/main.py
View file @
8651bec9
...
@@ -43,20 +43,29 @@ app.add_middleware(
...
@@ -43,20 +43,29 @@ app.add_middleware(
async
def
run_background_process
(
command
):
async
def
run_background_process
(
command
):
# Start the process
process
=
await
asyncio
.
create_subprocess_exec
(
process
=
await
asyncio
.
create_subprocess_exec
(
*
command
.
split
(),
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
*
command
.
split
(),
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
)
)
return
process
# Read output asynchronously
async
for
line
in
process
.
stdout
:
print
(
line
.
decode
().
strip
())
# Print stdout line by line
await
process
.
wait
()
# Wait for the subprocess to finish
async
def
start_litellm_background
():
async
def
start_litellm_background
():
print
(
"start_litellm_background"
)
# Command to run in the background
# Command to run in the background
command
=
"litellm --telemetry False --config ./data/litellm/config.yaml"
command
=
"litellm --telemetry False --config ./data/litellm/config.yaml"
await
run_background_process
(
command
)
await
run_background_process
(
command
)
@
app
.
on_event
(
"startup"
)
@
app
.
on_event
(
"startup"
)
async
def
startup_event
():
async
def
startup_event
():
print
(
"startup_event"
)
# TODO: Check config.yaml file and create one
# TODO: Check config.yaml file and create one
asyncio
.
create_task
(
start_litellm_background
())
asyncio
.
create_task
(
start_litellm_background
())
...
...
backend/main.py
View file @
8651bec9
...
@@ -20,12 +20,13 @@ from starlette.middleware.base import BaseHTTPMiddleware
...
@@ -20,12 +20,13 @@ from starlette.middleware.base import BaseHTTPMiddleware
from
apps.ollama.main
import
app
as
ollama_app
from
apps.ollama.main
import
app
as
ollama_app
from
apps.openai.main
import
app
as
openai_app
from
apps.openai.main
import
app
as
openai_app
from
apps.litellm.main
import
app
as
litellm_app
from
apps.litellm.main
import
app
as
litellm_app
,
start_litellm_background
from
apps.audio.main
import
app
as
audio_app
from
apps.audio.main
import
app
as
audio_app
from
apps.images.main
import
app
as
images_app
from
apps.images.main
import
app
as
images_app
from
apps.rag.main
import
app
as
rag_app
from
apps.rag.main
import
app
as
rag_app
from
apps.web.main
import
app
as
webui_app
from
apps.web.main
import
app
as
webui_app
import
asyncio
from
pydantic
import
BaseModel
from
pydantic
import
BaseModel
from
typing
import
List
from
typing
import
List
...
@@ -168,6 +169,11 @@ async def check_url(request: Request, call_next):
...
@@ -168,6 +169,11 @@ async def check_url(request: Request, call_next):
return
response
return
response
@
app
.
on_event
(
"startup"
)
async
def
on_startup
():
asyncio
.
create_task
(
start_litellm_background
())
app
.
mount
(
"/api/v1"
,
webui_app
)
app
.
mount
(
"/api/v1"
,
webui_app
)
app
.
mount
(
"/litellm/api"
,
litellm_app
)
app
.
mount
(
"/litellm/api"
,
litellm_app
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment