Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
14dd0d11
"git@developer.sourcefind.cn:gaoqiong/migraphx.git" did not exist on "bc2cd59b95e5691dbc9cf49685b87b3bd2bd1639"
Commit
14dd0d11
authored
Feb 24, 2024
by
Timothy J. Baek
Browse files
feat: litellm yaml
parent
b5bd07a0
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
31 additions
and
2 deletions
+31
-2
backend/.gitignore
backend/.gitignore
+2
-0
backend/main.py
backend/main.py
+23
-2
test.json
test.json
+6
-0
No files found.
backend/.gitignore
View file @
14dd0d11
...
@@ -8,4 +8,6 @@ _test
...
@@ -8,4 +8,6 @@ _test
Pipfile
Pipfile
data/*
data/*
!data/config.json
!data/config.json
!data/litellm/config.yaml
.webui_secret_key
.webui_secret_key
\ No newline at end of file
backend/main.py
View file @
14dd0d11
...
@@ -2,7 +2,8 @@ from bs4 import BeautifulSoup
...
@@ -2,7 +2,8 @@ from bs4 import BeautifulSoup
import
json
import
json
import
markdown
import
markdown
import
time
import
time
import
os
import
sys
from
fastapi
import
FastAPI
,
Request
,
Depends
from
fastapi
import
FastAPI
,
Request
,
Depends
from
fastapi.staticfiles
import
StaticFiles
from
fastapi.staticfiles
import
StaticFiles
...
@@ -13,6 +14,7 @@ from fastapi.middleware.cors import CORSMiddleware
...
@@ -13,6 +14,7 @@ from fastapi.middleware.cors import CORSMiddleware
from
starlette.exceptions
import
HTTPException
as
StarletteHTTPException
from
starlette.exceptions
import
HTTPException
as
StarletteHTTPException
from
litellm.proxy.proxy_server
import
ProxyConfig
,
initialize
from
litellm.proxy.proxy_server
import
app
as
litellm_app
from
litellm.proxy.proxy_server
import
app
as
litellm_app
from
apps.ollama.main
import
app
as
ollama_app
from
apps.ollama.main
import
app
as
ollama_app
...
@@ -38,6 +40,21 @@ class SPAStaticFiles(StaticFiles):
...
@@ -38,6 +40,21 @@ class SPAStaticFiles(StaticFiles):
raise
ex
raise
ex
proxy_config
=
ProxyConfig
()
async
def
config
():
router
,
model_list
,
general_settings
=
await
proxy_config
.
load_config
(
router
=
None
,
config_file_path
=
"./data/litellm/config.yaml"
)
await
initialize
(
config
=
"./data/litellm/config.yaml"
,
telemetry
=
False
)
async
def
startup
():
await
config
()
app
=
FastAPI
(
docs_url
=
"/docs"
if
ENV
==
"dev"
else
None
,
redoc_url
=
None
)
app
=
FastAPI
(
docs_url
=
"/docs"
if
ENV
==
"dev"
else
None
,
redoc_url
=
None
)
origins
=
[
"*"
]
origins
=
[
"*"
]
...
@@ -51,6 +68,11 @@ app.add_middleware(
...
@@ -51,6 +68,11 @@ app.add_middleware(
)
)
@
app
.
on_event
(
"startup"
)
async
def
on_startup
():
await
startup
()
@
app
.
middleware
(
"http"
)
@
app
.
middleware
(
"http"
)
async
def
check_url
(
request
:
Request
,
call_next
):
async
def
check_url
(
request
:
Request
,
call_next
):
start_time
=
int
(
time
.
time
())
start_time
=
int
(
time
.
time
())
...
@@ -79,7 +101,6 @@ async def auth_middleware(request: Request, call_next):
...
@@ -79,7 +101,6 @@ async def auth_middleware(request: Request, call_next):
app
.
mount
(
"/api/v1"
,
webui_app
)
app
.
mount
(
"/api/v1"
,
webui_app
)
app
.
mount
(
"/litellm/api"
,
litellm_app
)
app
.
mount
(
"/litellm/api"
,
litellm_app
)
app
.
mount
(
"/ollama/api"
,
ollama_app
)
app
.
mount
(
"/ollama/api"
,
ollama_app
)
app
.
mount
(
"/openai/api"
,
openai_app
)
app
.
mount
(
"/openai/api"
,
openai_app
)
...
...
test.json
0 → 100644
View file @
14dd0d11
{
"model_name"
:
"string"
,
"litellm_params"
:
{
"model"
:
"ollama/mistral"
}
}
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment