Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
760c6273
".github/vscode:/vscode.git/clone" did not exist on "3b3d0cce1e4aa1a57e13e8d3f412dd1216d44a6f"
Commit
760c6273
authored
Apr 21, 2024
by
Timothy J. Baek
Browse files
refac: improved error handling
parent
e627b8bf
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
23 additions
and
7 deletions
+23
-7
backend/apps/litellm/main.py
backend/apps/litellm/main.py
+19
-7
backend/constants.py
backend/constants.py
+4
-0
No files found.
backend/apps/litellm/main.py
View file @
760c6273
...
...
@@ -9,6 +9,7 @@ from fastapi.responses import JSONResponse
from
starlette.middleware.base
import
BaseHTTPMiddleware
,
RequestResponseEndpoint
from
starlette.responses
import
StreamingResponse
import
json
import
time
import
requests
from
pydantic
import
BaseModel
...
...
@@ -16,7 +17,7 @@ from typing import Optional, List
from
utils.utils
import
get_verified_user
,
get_current_user
,
get_admin_user
from
config
import
SRC_LOG_LEVELS
,
ENV
from
constants
import
ERROR_
MESSAGES
from
constants
import
MESSAGES
log
=
logging
.
getLogger
(
__name__
)
log
.
setLevel
(
SRC_LOG_LEVELS
[
"LITELLM"
])
...
...
@@ -201,6 +202,7 @@ async def get_models(user=Depends(get_current_user)):
return
data
except
Exception
as
e
:
log
.
exception
(
e
)
error_detail
=
"Open WebUI: Server Connection Error"
if
r
is
not
None
:
...
...
@@ -211,10 +213,18 @@ async def get_models(user=Depends(get_current_user)):
except
:
error_detail
=
f
"External:
{
e
}
"
raise
HTTPException
(
status_code
=
r
.
status_code
if
r
else
500
,
detail
=
error_detail
,
)
return
{
"data"
:
[
{
"id"
:
model
[
"model_name"
],
"object"
:
"model"
,
"created"
:
int
(
time
.
time
()),
"owned_by"
:
"openai"
,
}
for
model
in
app
.
state
.
CONFIG
[
"model_list"
]
],
"object"
:
"list"
,
}
@
app
.
get
(
"/model/info"
)
...
...
@@ -231,6 +241,8 @@ class AddLiteLLMModelForm(BaseModel):
async
def
add_model_to_config
(
form_data
:
AddLiteLLMModelForm
,
user
=
Depends
(
get_admin_user
)
):
# TODO: Validate model form
app
.
state
.
CONFIG
[
"model_list"
].
append
(
form_data
.
model_dump
())
with
open
(
LITELLM_CONFIG_DIR
,
"w"
)
as
file
:
...
...
@@ -238,7 +250,7 @@ async def add_model_to_config(
await
restart_litellm
()
return
{
"message"
:
"model added"
}
return
{
"message"
:
MESSAGES
.
MODEL_ADDED
(
form_data
.
model_name
)
}
class
DeleteLiteLLMModelForm
(
BaseModel
):
...
...
@@ -260,7 +272,7 @@ async def delete_model_from_config(
await
restart_litellm
()
return
{
"message"
:
"model deleted"
}
return
{
"message"
:
MESSAGES
.
MODEL_DELETED
(
form_data
.
id
)
}
@
app
.
api_route
(
"/{path:path}"
,
methods
=
[
"GET"
,
"POST"
,
"PUT"
,
"DELETE"
])
...
...
backend/constants.py
View file @
760c6273
...
...
@@ -3,6 +3,10 @@ from enum import Enum
class
MESSAGES
(
str
,
Enum
):
DEFAULT
=
lambda
msg
=
""
:
f
"
{
msg
if
msg
else
''
}
"
MODEL_ADDED
=
lambda
model
=
""
:
f
"The model '
{
model
}
' has been added successfully."
MODEL_DELETED
=
(
lambda
model
=
""
:
f
"The model '
{
model
}
' has been deleted successfully."
)
class
WEBHOOK_MESSAGES
(
str
,
Enum
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment