Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
2b84af87
"llama.cpp/scripts/sync-ggml.last" did not exist on "7aa90c0ea35f88a5ef227b773e5a9fe3a0fd7eb2"
Commit
2b84af87
authored
Mar 08, 2024
by
Timothy J. Baek
Browse files
refac: litellm
parent
171084ea
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
43 additions
and
37 deletions
+43
-37
backend/apps/litellm/main.py
backend/apps/litellm/main.py
+41
-0
backend/main.py
backend/main.py
+2
-37
No files found.
backend/apps/litellm/main.py
0 → 100644
View file @
2b84af87
from
litellm.proxy.proxy_server
import
ProxyConfig
,
initialize
from
litellm.proxy.proxy_server
import
app
from
fastapi
import
FastAPI
,
Request
,
Depends
,
status
from
fastapi.responses
import
JSONResponse
from
utils.utils
import
get_http_authorization_cred
,
get_current_user
from
config
import
ENV
proxy_config
=
ProxyConfig
()
async
def
config
():
router
,
model_list
,
general_settings
=
await
proxy_config
.
load_config
(
router
=
None
,
config_file_path
=
"./data/litellm/config.yaml"
)
await
initialize
(
config
=
"./data/litellm/config.yaml"
,
telemetry
=
False
)
async
def
startup
():
await
config
()
@
app
.
on_event
(
"startup"
)
async
def
on_startup
():
await
startup
()
@
app
.
middleware
(
"http"
)
async
def
auth_middleware
(
request
:
Request
,
call_next
):
auth_header
=
request
.
headers
.
get
(
"Authorization"
,
""
)
if
ENV
!=
"dev"
:
try
:
user
=
get_current_user
(
get_http_authorization_cred
(
auth_header
))
print
(
user
)
except
Exception
as
e
:
return
JSONResponse
(
status_code
=
400
,
content
=
{
"detail"
:
str
(
e
)})
response
=
await
call_next
(
request
)
return
response
backend/main.py
View file @
2b84af87
...
...
@@ -9,17 +9,14 @@ import requests
from
fastapi
import
FastAPI
,
Request
,
Depends
,
status
from
fastapi.staticfiles
import
StaticFiles
from
fastapi
import
HTTPException
from
fastapi.responses
import
JSONResponse
from
fastapi.middleware.wsgi
import
WSGIMiddleware
from
fastapi.middleware.cors
import
CORSMiddleware
from
starlette.exceptions
import
HTTPException
as
StarletteHTTPException
from
litellm.proxy.proxy_server
import
ProxyConfig
,
initialize
from
litellm.proxy.proxy_server
import
app
as
litellm_app
from
apps.ollama.main
import
app
as
ollama_app
from
apps.openai.main
import
app
as
openai_app
from
apps.litellm.main
import
app
as
litellm_app
,
startup
as
litellm_app_startup
from
apps.audio.main
import
app
as
audio_app
from
apps.images.main
import
app
as
images_app
from
apps.rag.main
import
app
as
rag_app
...
...
@@ -29,8 +26,6 @@ from apps.web.main import app as webui_app
from
config
import
WEBUI_NAME
,
ENV
,
VERSION
,
CHANGELOG
,
FRONTEND_BUILD_DIR
from
constants
import
ERROR_MESSAGES
from
utils.utils
import
get_http_authorization_cred
,
get_current_user
class
SPAStaticFiles
(
StaticFiles
):
async
def
get_response
(
self
,
path
:
str
,
scope
):
...
...
@@ -43,21 +38,6 @@ class SPAStaticFiles(StaticFiles):
raise
ex
proxy_config
=
ProxyConfig
()
async
def
config
():
router
,
model_list
,
general_settings
=
await
proxy_config
.
load_config
(
router
=
None
,
config_file_path
=
"./data/litellm/config.yaml"
)
await
initialize
(
config
=
"./data/litellm/config.yaml"
,
telemetry
=
False
)
async
def
startup
():
await
config
()
app
=
FastAPI
(
docs_url
=
"/docs"
if
ENV
==
"dev"
else
None
,
redoc_url
=
None
)
origins
=
[
"*"
]
...
...
@@ -73,7 +53,7 @@ app.add_middleware(
@
app
.
on_event
(
"startup"
)
async
def
on_startup
():
await
startup
()
await
litellm_app_
startup
()
@
app
.
middleware
(
"http"
)
...
...
@@ -86,21 +66,6 @@ async def check_url(request: Request, call_next):
return
response
@
litellm_app
.
middleware
(
"http"
)
async
def
auth_middleware
(
request
:
Request
,
call_next
):
auth_header
=
request
.
headers
.
get
(
"Authorization"
,
""
)
if
ENV
!=
"dev"
:
try
:
user
=
get_current_user
(
get_http_authorization_cred
(
auth_header
))
print
(
user
)
except
Exception
as
e
:
return
JSONResponse
(
status_code
=
400
,
content
=
{
"detail"
:
str
(
e
)})
response
=
await
call_next
(
request
)
return
response
app
.
mount
(
"/api/v1"
,
webui_app
)
app
.
mount
(
"/litellm/api"
,
litellm_app
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment