Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
5e458d49
Commit
5e458d49
authored
Apr 21, 2024
by
Timothy J. Baek
Browse files
fix: run litellm as subprocess
parent
948f2e91
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
58 additions
and
20 deletions
+58
-20
backend/apps/litellm/main.py
backend/apps/litellm/main.py
+57
-14
backend/main.py
backend/main.py
+1
-6
No files found.
backend/apps/litellm/main.py
View file @
5e458d49
import
logging
from
litellm.proxy.proxy_server
import
ProxyConfig
,
initialize
from
litellm.proxy.proxy_server
import
app
from
fastapi
import
FastAPI
,
Depends
from
fastapi.routing
import
APIRoute
from
fastapi.middleware.cors
import
CORSMiddleware
import
logging
from
fastapi
import
FastAPI
,
Request
,
Depends
,
status
,
Response
from
fastapi.responses
import
JSONResponse
...
...
@@ -23,24 +23,39 @@ from config import (
)
proxy_config
=
ProxyConfig
()
import
asyncio
import
subprocess
async
def
config
():
router
,
model_list
,
general_settings
=
await
proxy_config
.
load_config
(
router
=
None
,
config_file_path
=
"./data/litellm/config.yaml"
)
app
=
FastAPI
()
await
initialize
(
config
=
"./data/litellm/config.yaml"
,
telemetry
=
False
)
origins
=
[
"*"
]
app
.
add_middleware
(
CORSMiddleware
,
allow_origins
=
origins
,
allow_credentials
=
True
,
allow_methods
=
[
"*"
],
allow_headers
=
[
"*"
],
)
async
def
startup
():
await
config
()
async
def
run_background_process
(
command
):
process
=
await
asyncio
.
create_subprocess_exec
(
*
command
.
split
(),
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
)
return
process
async
def
start_litellm_background
():
# Command to run in the background
command
=
"litellm --config ./data/litellm/config.yaml"
await
run_background_process
(
command
)
@
app
.
on_event
(
"startup"
)
async
def
on_
startup
():
a
wait
startup
()
async
def
startup
_event
():
a
syncio
.
create_task
(
start_litellm_background
()
)
app
.
state
.
MODEL_FILTER_ENABLED
=
MODEL_FILTER_ENABLED
...
...
@@ -63,6 +78,11 @@ async def auth_middleware(request: Request, call_next):
return
response
@
app
.
get
(
"/"
)
async
def
get_status
():
return
{
"status"
:
True
}
class
ModifyModelsResponseMiddleware
(
BaseHTTPMiddleware
):
async
def
dispatch
(
self
,
request
:
Request
,
call_next
:
RequestResponseEndpoint
...
...
@@ -98,3 +118,26 @@ class ModifyModelsResponseMiddleware(BaseHTTPMiddleware):
app
.
add_middleware
(
ModifyModelsResponseMiddleware
)
# from litellm.proxy.proxy_server import ProxyConfig, initialize
# from litellm.proxy.proxy_server import app
# proxy_config = ProxyConfig()
# async def config():
# router, model_list, general_settings = await proxy_config.load_config(
# router=None, config_file_path="./data/litellm/config.yaml"
# )
# await initialize(config="./data/litellm/config.yaml", telemetry=False)
# async def startup():
# await config()
# @app.on_event("startup")
# async def on_startup():
# await startup()
backend/main.py
View file @
5e458d49
...
...
@@ -20,7 +20,7 @@ from starlette.middleware.base import BaseHTTPMiddleware
from
apps.ollama.main
import
app
as
ollama_app
from
apps.openai.main
import
app
as
openai_app
from
apps.litellm.main
import
app
as
litellm_app
,
startup
as
litellm_app_startup
from
apps.litellm.main
import
app
as
litellm_app
from
apps.audio.main
import
app
as
audio_app
from
apps.images.main
import
app
as
images_app
from
apps.rag.main
import
app
as
rag_app
...
...
@@ -168,11 +168,6 @@ async def check_url(request: Request, call_next):
return
response
@
app
.
on_event
(
"startup"
)
async
def
on_startup
():
await
litellm_app_startup
()
app
.
mount
(
"/api/v1"
,
webui_app
)
app
.
mount
(
"/litellm/api"
,
litellm_app
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment