Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
5166e92f
Unverified
Commit
5166e92f
authored
May 28, 2024
by
arkohut
Committed by
GitHub
May 28, 2024
Browse files
Merge branch 'dev' into support-py-for-run-code
parents
b443d61c
b6b71c08
Changes
168
Show whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
649 additions
and
99 deletions
+649
-99
backend/apps/webui/routers/configs.py
backend/apps/webui/routers/configs.py
+31
-1
backend/apps/webui/routers/documents.py
backend/apps/webui/routers/documents.py
+1
-1
backend/apps/webui/routers/memories.py
backend/apps/webui/routers/memories.py
+1
-1
backend/apps/webui/routers/models.py
backend/apps/webui/routers/models.py
+108
-0
backend/apps/webui/routers/prompts.py
backend/apps/webui/routers/prompts.py
+1
-1
backend/apps/webui/routers/users.py
backend/apps/webui/routers/users.py
+47
-3
backend/apps/webui/routers/utils.py
backend/apps/webui/routers/utils.py
+1
-1
backend/config.py
backend/config.py
+61
-33
backend/constants.py
backend/constants.py
+2
-0
backend/main.py
backend/main.py
+129
-35
backend/open_webui/__init__.py
backend/open_webui/__init__.py
+60
-0
backend/requirements.txt
backend/requirements.txt
+18
-20
backend/start.sh
backend/start.sh
+25
-0
backend/utils/misc.py
backend/utils/misc.py
+74
-0
backend/utils/models.py
backend/utils/models.py
+10
-0
backend/utils/utils.py
backend/utils/utils.py
+1
-1
cypress/e2e/chat.cy.ts
cypress/e2e/chat.cy.ts
+23
-0
docker-compose.a1111-test.yaml
docker-compose.a1111-test.yaml
+31
-0
hatch_build.py
hatch_build.py
+23
-0
package-lock.json
package-lock.json
+2
-2
No files found.
backend/apps/web/routers/configs.py
→
backend/apps/web
ui
/routers/configs.py
View file @
5166e92f
...
...
@@ -8,7 +8,9 @@ from pydantic import BaseModel
import
time
import
uuid
from
apps.web.models.users
import
Users
from
config
import
BannerModel
from
apps.webui.models.users
import
Users
from
utils.utils
import
(
get_password_hash
,
...
...
@@ -57,3 +59,31 @@ async def set_global_default_suggestions(
data
=
form_data
.
model_dump
()
request
.
app
.
state
.
config
.
DEFAULT_PROMPT_SUGGESTIONS
=
data
[
"suggestions"
]
return
request
.
app
.
state
.
config
.
DEFAULT_PROMPT_SUGGESTIONS
############################
# SetBanners
############################
class
SetBannersForm
(
BaseModel
):
banners
:
List
[
BannerModel
]
@
router
.
post
(
"/banners"
,
response_model
=
List
[
BannerModel
])
async
def
set_banners
(
request
:
Request
,
form_data
:
SetBannersForm
,
user
=
Depends
(
get_admin_user
),
):
data
=
form_data
.
model_dump
()
request
.
app
.
state
.
config
.
BANNERS
=
data
[
"banners"
]
return
request
.
app
.
state
.
config
.
BANNERS
@
router
.
get
(
"/banners"
,
response_model
=
List
[
BannerModel
])
async
def
get_banners
(
request
:
Request
,
user
=
Depends
(
get_current_user
),
):
return
request
.
app
.
state
.
config
.
BANNERS
backend/apps/web/routers/documents.py
→
backend/apps/web
ui
/routers/documents.py
View file @
5166e92f
...
...
@@ -6,7 +6,7 @@ from fastapi import APIRouter
from
pydantic
import
BaseModel
import
json
from
apps.web.models.documents
import
(
from
apps.web
ui
.models.documents
import
(
Documents
,
DocumentForm
,
DocumentUpdateForm
,
...
...
backend/apps/web/routers/memories.py
→
backend/apps/web
ui
/routers/memories.py
View file @
5166e92f
...
...
@@ -7,7 +7,7 @@ from fastapi import APIRouter
from
pydantic
import
BaseModel
import
logging
from
apps.web.models.memories
import
Memories
,
MemoryModel
from
apps.web
ui
.models.memories
import
Memories
,
MemoryModel
from
utils.utils
import
get_verified_user
from
constants
import
ERROR_MESSAGES
...
...
backend/apps/webui/routers/models.py
0 → 100644
View file @
5166e92f
from
fastapi
import
Depends
,
FastAPI
,
HTTPException
,
status
,
Request
from
datetime
import
datetime
,
timedelta
from
typing
import
List
,
Union
,
Optional
from
fastapi
import
APIRouter
from
pydantic
import
BaseModel
import
json
from
apps.webui.models.models
import
Models
,
ModelModel
,
ModelForm
,
ModelResponse
from
utils.utils
import
get_verified_user
,
get_admin_user
from
constants
import
ERROR_MESSAGES
router
=
APIRouter
()
###########################
# getModels
###########################
@
router
.
get
(
"/"
,
response_model
=
List
[
ModelResponse
])
async
def
get_models
(
user
=
Depends
(
get_verified_user
)):
return
Models
.
get_all_models
()
############################
# AddNewModel
############################
@
router
.
post
(
"/add"
,
response_model
=
Optional
[
ModelModel
])
async
def
add_new_model
(
request
:
Request
,
form_data
:
ModelForm
,
user
=
Depends
(
get_admin_user
)
):
if
form_data
.
id
in
request
.
app
.
state
.
MODELS
:
raise
HTTPException
(
status_code
=
status
.
HTTP_401_UNAUTHORIZED
,
detail
=
ERROR_MESSAGES
.
MODEL_ID_TAKEN
,
)
else
:
model
=
Models
.
insert_new_model
(
form_data
,
user
.
id
)
if
model
:
return
model
else
:
raise
HTTPException
(
status_code
=
status
.
HTTP_401_UNAUTHORIZED
,
detail
=
ERROR_MESSAGES
.
DEFAULT
(),
)
############################
# GetModelById
############################
@
router
.
get
(
"/"
,
response_model
=
Optional
[
ModelModel
])
async
def
get_model_by_id
(
id
:
str
,
user
=
Depends
(
get_verified_user
)):
model
=
Models
.
get_model_by_id
(
id
)
if
model
:
return
model
else
:
raise
HTTPException
(
status_code
=
status
.
HTTP_401_UNAUTHORIZED
,
detail
=
ERROR_MESSAGES
.
NOT_FOUND
,
)
############################
# UpdateModelById
############################
@
router
.
post
(
"/update"
,
response_model
=
Optional
[
ModelModel
])
async
def
update_model_by_id
(
request
:
Request
,
id
:
str
,
form_data
:
ModelForm
,
user
=
Depends
(
get_admin_user
)
):
model
=
Models
.
get_model_by_id
(
id
)
if
model
:
model
=
Models
.
update_model_by_id
(
id
,
form_data
)
return
model
else
:
if
form_data
.
id
in
request
.
app
.
state
.
MODELS
:
model
=
Models
.
insert_new_model
(
form_data
,
user
.
id
)
print
(
model
)
if
model
:
return
model
else
:
raise
HTTPException
(
status_code
=
status
.
HTTP_401_UNAUTHORIZED
,
detail
=
ERROR_MESSAGES
.
DEFAULT
(),
)
else
:
raise
HTTPException
(
status_code
=
status
.
HTTP_401_UNAUTHORIZED
,
detail
=
ERROR_MESSAGES
.
DEFAULT
(),
)
############################
# DeleteModelById
############################
@
router
.
delete
(
"/delete"
,
response_model
=
bool
)
async
def
delete_model_by_id
(
id
:
str
,
user
=
Depends
(
get_admin_user
)):
result
=
Models
.
delete_model_by_id
(
id
)
return
result
backend/apps/web/routers/prompts.py
→
backend/apps/web
ui
/routers/prompts.py
View file @
5166e92f
...
...
@@ -6,7 +6,7 @@ from fastapi import APIRouter
from
pydantic
import
BaseModel
import
json
from
apps.web.models.prompts
import
Prompts
,
PromptForm
,
PromptModel
from
apps.web
ui
.models.prompts
import
Prompts
,
PromptForm
,
PromptModel
from
utils.utils
import
get_current_user
,
get_admin_user
from
constants
import
ERROR_MESSAGES
...
...
backend/apps/web/routers/users.py
→
backend/apps/web
ui
/routers/users.py
View file @
5166e92f
...
...
@@ -9,9 +9,15 @@ import time
import
uuid
import
logging
from
apps.web.models.users
import
UserModel
,
UserUpdateForm
,
UserRoleUpdateForm
,
Users
from
apps.web.models.auths
import
Auths
from
apps.web.models.chats
import
Chats
from
apps.webui.models.users
import
(
UserModel
,
UserUpdateForm
,
UserRoleUpdateForm
,
UserSettings
,
Users
,
)
from
apps.webui.models.auths
import
Auths
from
apps.webui.models.chats
import
Chats
from
utils.utils
import
get_verified_user
,
get_password_hash
,
get_admin_user
from
constants
import
ERROR_MESSAGES
...
...
@@ -68,6 +74,42 @@ async def update_user_role(form_data: UserRoleUpdateForm, user=Depends(get_admin
)
############################
# GetUserSettingsBySessionUser
############################
@
router
.
get
(
"/user/settings"
,
response_model
=
Optional
[
UserSettings
])
async
def
get_user_settings_by_session_user
(
user
=
Depends
(
get_verified_user
)):
user
=
Users
.
get_user_by_id
(
user
.
id
)
if
user
:
return
user
.
settings
else
:
raise
HTTPException
(
status_code
=
status
.
HTTP_400_BAD_REQUEST
,
detail
=
ERROR_MESSAGES
.
USER_NOT_FOUND
,
)
############################
# UpdateUserSettingsBySessionUser
############################
@
router
.
post
(
"/user/settings/update"
,
response_model
=
UserSettings
)
async
def
update_user_settings_by_session_user
(
form_data
:
UserSettings
,
user
=
Depends
(
get_verified_user
)
):
user
=
Users
.
update_user_by_id
(
user
.
id
,
{
"settings"
:
form_data
.
model_dump
()})
if
user
:
return
user
.
settings
else
:
raise
HTTPException
(
status_code
=
status
.
HTTP_400_BAD_REQUEST
,
detail
=
ERROR_MESSAGES
.
USER_NOT_FOUND
,
)
############################
# GetUserById
############################
...
...
@@ -81,6 +123,8 @@ class UserResponse(BaseModel):
@
router
.
get
(
"/{user_id}"
,
response_model
=
UserResponse
)
async
def
get_user_by_id
(
user_id
:
str
,
user
=
Depends
(
get_verified_user
)):
# Check if user_id is a shared chat
# If it is, get the user_id from the chat
if
user_id
.
startswith
(
"shared-"
):
chat_id
=
user_id
.
replace
(
"shared-"
,
""
)
chat
=
Chats
.
get_chat_by_id
(
chat_id
)
...
...
backend/apps/web/routers/utils.py
→
backend/apps/web
ui
/routers/utils.py
View file @
5166e92f
...
...
@@ -8,7 +8,7 @@ from pydantic import BaseModel
from
fpdf
import
FPDF
import
markdown
from
apps.web.internal.db
import
DB
from
apps.web
ui
.internal.db
import
DB
from
utils.utils
import
get_admin_user
from
utils.misc
import
calculate_sha256
,
get_gravatar_url
...
...
backend/config.py
View file @
5166e92f
import
os
import
sys
import
logging
import
importlib.metadata
import
pkgutil
import
chromadb
from
chromadb
import
Settings
from
base64
import
b64encode
from
bs4
import
BeautifulSoup
from
typing
import
TypeVar
,
Generic
,
Union
from
pydantic
import
BaseModel
from
typing
import
Optional
from
pathlib
import
Path
import
json
...
...
@@ -22,10 +26,15 @@ from constants import ERROR_MESSAGES
# Load .env file
####################################
BACKEND_DIR
=
Path
(
__file__
).
parent
# the path containing this file
BASE_DIR
=
BACKEND_DIR
.
parent
# the path containing the backend/
print
(
BASE_DIR
)
try
:
from
dotenv
import
load_dotenv
,
find_dotenv
load_dotenv
(
find_dotenv
(
"../
.env"
))
load_dotenv
(
find_dotenv
(
str
(
BASE_DIR
/
"
.env"
))
)
except
ImportError
:
print
(
"dotenv not installed, skipping..."
)
...
...
@@ -51,7 +60,6 @@ log_sources = [
"CONFIG"
,
"DB"
,
"IMAGES"
,
"LITELLM"
,
"MAIN"
,
"MODELS"
,
"OLLAMA"
,
...
...
@@ -87,9 +95,11 @@ WEBUI_FAVICON_URL = "https://openwebui.com/favicon.png"
ENV
=
os
.
environ
.
get
(
"ENV"
,
"dev"
)
try
:
with
open
(
f
"../package.json"
,
"r"
)
as
f
:
PACKAGE_DATA
=
json
.
load
(
f
)
PACKAGE_DATA
=
json
.
loads
((
BASE_DIR
/
"package.json"
).
read_text
())
except
:
try
:
PACKAGE_DATA
=
{
"version"
:
importlib
.
metadata
.
version
(
"open-webui"
)}
except
importlib
.
metadata
.
PackageNotFoundError
:
PACKAGE_DATA
=
{
"version"
:
"0.0.0"
}
VERSION
=
PACKAGE_DATA
[
"version"
]
...
...
@@ -115,10 +125,13 @@ def parse_section(section):
try
:
with
open
(
"../CHANGELOG.md"
,
"r"
)
as
file
:
changelog_path
=
BASE_DIR
/
"CHANGELOG.md"
with
open
(
str
(
changelog_path
.
absolute
()),
"r"
,
encoding
=
"utf8"
)
as
file
:
changelog_content
=
file
.
read
()
except
:
changelog_content
=
""
changelog_content
=
(
pkgutil
.
get_data
(
"open_webui"
,
"CHANGELOG.md"
)
or
b
""
).
decode
()
# Convert markdown content to HTML
html_content
=
markdown
.
markdown
(
changelog_content
)
...
...
@@ -155,21 +168,20 @@ CHANGELOG = changelog_json
####################################
# WEBUI_
VERSION
# WEBUI_
BUILD_HASH
####################################
WEBUI_
VERSION
=
os
.
environ
.
get
(
"WEBUI_
VERSION"
,
"v1.0.0-alpha.100
"
)
WEBUI_
BUILD_HASH
=
os
.
environ
.
get
(
"WEBUI_
BUILD_HASH"
,
"dev-build
"
)
####################################
# DATA/FRONTEND BUILD DIR
####################################
DATA_DIR
=
str
(
Path
(
os
.
getenv
(
"DATA_DIR"
,
"./
data"
)).
resolve
()
)
FRONTEND_BUILD_DIR
=
str
(
Path
(
os
.
getenv
(
"FRONTEND_BUILD_DIR"
,
"../build"
))
)
DATA_DIR
=
Path
(
os
.
getenv
(
"DATA_DIR"
,
BACKEND_DIR
/
"
data"
)).
resolve
()
FRONTEND_BUILD_DIR
=
Path
(
os
.
getenv
(
"FRONTEND_BUILD_DIR"
,
BASE_DIR
/
"build"
)).
resolve
(
)
try
:
with
open
(
f
"
{
DATA_DIR
}
/config.json"
,
"r"
)
as
f
:
CONFIG_DATA
=
json
.
load
(
f
)
CONFIG_DATA
=
json
.
loads
((
DATA_DIR
/
"config.json"
).
read_text
())
except
:
CONFIG_DATA
=
{}
...
...
@@ -279,11 +291,11 @@ JWT_EXPIRES_IN = PersistentConfig(
# Static DIR
####################################
STATIC_DIR
=
str
(
Path
(
os
.
getenv
(
"STATIC_DIR"
,
"./
static"
)).
resolve
()
)
STATIC_DIR
=
Path
(
os
.
getenv
(
"STATIC_DIR"
,
BACKEND_DIR
/
"
static"
)).
resolve
()
frontend_favicon
=
f
"
{
FRONTEND_BUILD_DIR
}
/
favicon.png"
if
os
.
path
.
exists
(
frontend_favicon
):
shutil
.
copyfile
(
frontend_favicon
,
f
"
{
STATIC_DIR
}
/
favicon.png"
)
frontend_favicon
=
FRONTEND_BUILD_DIR
/
"
favicon.png"
if
frontend_favicon
.
exists
(
):
shutil
.
copyfile
(
frontend_favicon
,
STATIC_DIR
/
"
favicon.png"
)
else
:
logging
.
warning
(
f
"Frontend favicon not found at
{
frontend_favicon
}
"
)
...
...
@@ -368,16 +380,23 @@ def create_config_file(file_path):
LITELLM_CONFIG_PATH
=
f
"
{
DATA_DIR
}
/litellm/config.yaml"
if
not
os
.
path
.
exists
(
LITELLM_CONFIG_PATH
):
log
.
info
(
"Config file doesn't exist. Creating..."
)
create_config_file
(
LITELLM_CONFIG_PATH
)
log
.
info
(
"Config file created successfully."
)
#
if not os.path.exists(LITELLM_CONFIG_PATH):
#
log.info("Config file doesn't exist. Creating...")
#
create_config_file(LITELLM_CONFIG_PATH)
#
log.info("Config file created successfully.")
####################################
# OLLAMA_BASE_URL
####################################
ENABLE_OLLAMA_API
=
PersistentConfig
(
"ENABLE_OLLAMA_API"
,
"ollama.enable"
,
os
.
environ
.
get
(
"ENABLE_OLLAMA_API"
,
"True"
).
lower
()
==
"true"
,
)
OLLAMA_API_BASE_URL
=
os
.
environ
.
get
(
"OLLAMA_API_BASE_URL"
,
"http://localhost:11434/api"
)
...
...
@@ -549,6 +568,27 @@ WEBHOOK_URL = PersistentConfig(
ENABLE_ADMIN_EXPORT
=
os
.
environ
.
get
(
"ENABLE_ADMIN_EXPORT"
,
"True"
).
lower
()
==
"true"
ENABLE_COMMUNITY_SHARING
=
PersistentConfig
(
"ENABLE_COMMUNITY_SHARING"
,
"ui.enable_community_sharing"
,
os
.
environ
.
get
(
"ENABLE_COMMUNITY_SHARING"
,
"True"
).
lower
()
==
"true"
,
)
class
BannerModel
(
BaseModel
):
id
:
str
type
:
str
title
:
Optional
[
str
]
=
None
content
:
str
dismissible
:
bool
timestamp
:
int
WEBUI_BANNERS
=
PersistentConfig
(
"WEBUI_BANNERS"
,
"ui.banners"
,
[
BannerModel
(
**
banner
)
for
banner
in
json
.
loads
(
"[]"
)],
)
####################################
# WEBUI_SECRET_KEY
####################################
...
...
@@ -813,18 +853,6 @@ AUDIO_OPENAI_API_VOICE = PersistentConfig(
os
.
getenv
(
"AUDIO_OPENAI_API_VOICE"
,
"alloy"
),
)
####################################
# LiteLLM
####################################
ENABLE_LITELLM
=
os
.
environ
.
get
(
"ENABLE_LITELLM"
,
"True"
).
lower
()
==
"true"
LITELLM_PROXY_PORT
=
int
(
os
.
getenv
(
"LITELLM_PROXY_PORT"
,
"14365"
))
if
LITELLM_PROXY_PORT
<
0
or
LITELLM_PROXY_PORT
>
65535
:
raise
ValueError
(
"Invalid port number for LITELLM_PROXY_PORT"
)
LITELLM_PROXY_HOST
=
os
.
getenv
(
"LITELLM_PROXY_HOST"
,
"127.0.0.1"
)
####################################
# Database
...
...
backend/constants.py
View file @
5166e92f
...
...
@@ -32,6 +32,8 @@ class ERROR_MESSAGES(str, Enum):
COMMAND_TAKEN
=
"Uh-oh! This command is already registered. Please choose another command string."
FILE_EXISTS
=
"Uh-oh! This file is already registered. Please choose another file."
MODEL_ID_TAKEN
=
"Uh-oh! This model id is already registered. Please choose another model id string."
NAME_TAG_TAKEN
=
"Uh-oh! This name tag is already registered. Please choose another name tag string."
INVALID_TOKEN
=
(
"Your session has expired or the token is invalid. Please sign in again."
...
...
backend/main.py
View file @
5166e92f
...
...
@@ -8,6 +8,7 @@ import sys
import
logging
import
aiohttp
import
requests
import
mimetypes
from
fastapi
import
FastAPI
,
Request
,
Depends
,
status
from
fastapi.staticfiles
import
StaticFiles
...
...
@@ -18,27 +19,20 @@ from starlette.exceptions import HTTPException as StarletteHTTPException
from
starlette.middleware.base
import
BaseHTTPMiddleware
from
starlette.responses
import
StreamingResponse
,
Response
from
apps.ollama.main
import
app
as
ollama_app
from
apps.openai.main
import
app
as
openai_app
from
apps.litellm.main
import
(
app
as
litellm_app
,
start_litellm_background
,
shutdown_litellm_background
,
)
from
apps.ollama.main
import
app
as
ollama_app
,
get_all_models
as
get_ollama_models
from
apps.openai.main
import
app
as
openai_app
,
get_all_models
as
get_openai_models
from
apps.audio.main
import
app
as
audio_app
from
apps.images.main
import
app
as
images_app
from
apps.rag.main
import
app
as
rag_app
from
apps.web.main
import
app
as
webui_app
from
apps.web
ui
.main
import
app
as
webui_app
import
asyncio
from
pydantic
import
BaseModel
from
typing
import
List
from
typing
import
List
,
Optional
from
utils.utils
import
get_admin_user
from
apps.webui.models.models
import
Models
,
ModelModel
from
utils.utils
import
get_admin_user
,
get_verified_user
from
apps.rag.utils
import
rag_messages
from
config
import
(
...
...
@@ -52,7 +46,8 @@ from config import (
FRONTEND_BUILD_DIR
,
CACHE_DIR
,
STATIC_DIR
,
ENABLE_LITELLM
,
ENABLE_OPENAI_API
,
ENABLE_OLLAMA_API
,
ENABLE_MODEL_FILTER
,
MODEL_FILTER_LIST
,
GLOBAL_LOG_LEVEL
,
...
...
@@ -60,6 +55,7 @@ from config import (
WEBHOOK_URL
,
ENABLE_ADMIN_EXPORT
,
AppConfig
,
WEBUI_BUILD_HASH
,
)
from
constants
import
ERROR_MESSAGES
...
...
@@ -90,6 +86,7 @@ print(
v
{
VERSION
}
- building the best open-source AI user interface.
{
f
"Commit:
{
WEBUI_BUILD_HASH
}
" if WEBUI_BUILD_HASH != "
dev
-
build
" else ""
}
https://github.com/open-webui/open-webui
"""
)
...
...
@@ -97,11 +94,7 @@ https://github.com/open-webui/open-webui
@
asynccontextmanager
async
def
lifespan
(
app
:
FastAPI
):
if
ENABLE_LITELLM
:
asyncio
.
create_task
(
start_litellm_background
())
yield
if
ENABLE_LITELLM
:
await
shutdown_litellm_background
()
app
=
FastAPI
(
...
...
@@ -109,11 +102,19 @@ app = FastAPI(
)
app
.
state
.
config
=
AppConfig
()
app
.
state
.
config
.
ENABLE_OPENAI_API
=
ENABLE_OPENAI_API
app
.
state
.
config
.
ENABLE_OLLAMA_API
=
ENABLE_OLLAMA_API
app
.
state
.
config
.
ENABLE_MODEL_FILTER
=
ENABLE_MODEL_FILTER
app
.
state
.
config
.
MODEL_FILTER_LIST
=
MODEL_FILTER_LIST
app
.
state
.
config
.
WEBHOOK_URL
=
WEBHOOK_URL
app
.
state
.
MODELS
=
{}
origins
=
[
"*"
]
...
...
@@ -230,6 +231,11 @@ app.add_middleware(
@
app
.
middleware
(
"http"
)
async
def
check_url
(
request
:
Request
,
call_next
):
if
len
(
app
.
state
.
MODELS
)
==
0
:
await
get_all_models
()
else
:
pass
start_time
=
int
(
time
.
time
())
response
=
await
call_next
(
request
)
process_time
=
int
(
time
.
time
())
-
start_time
...
...
@@ -246,9 +252,8 @@ async def update_embedding_function(request: Request, call_next):
return
response
app
.
mount
(
"/litellm/api"
,
litellm_app
)
app
.
mount
(
"/ollama"
,
ollama_app
)
app
.
mount
(
"/openai
/api
"
,
openai_app
)
app
.
mount
(
"/openai"
,
openai_app
)
app
.
mount
(
"/images/api/v1"
,
images_app
)
app
.
mount
(
"/audio/api/v1"
,
audio_app
)
...
...
@@ -259,6 +264,87 @@ app.mount("/api/v1", webui_app)
webui_app
.
state
.
EMBEDDING_FUNCTION
=
rag_app
.
state
.
EMBEDDING_FUNCTION
async
def
get_all_models
():
openai_models
=
[]
ollama_models
=
[]
if
app
.
state
.
config
.
ENABLE_OPENAI_API
:
openai_models
=
await
get_openai_models
()
openai_models
=
openai_models
[
"data"
]
if
app
.
state
.
config
.
ENABLE_OLLAMA_API
:
ollama_models
=
await
get_ollama_models
()
ollama_models
=
[
{
"id"
:
model
[
"model"
],
"name"
:
model
[
"name"
],
"object"
:
"model"
,
"created"
:
int
(
time
.
time
()),
"owned_by"
:
"ollama"
,
"ollama"
:
model
,
}
for
model
in
ollama_models
[
"models"
]
]
models
=
openai_models
+
ollama_models
custom_models
=
Models
.
get_all_models
()
for
custom_model
in
custom_models
:
if
custom_model
.
base_model_id
==
None
:
for
model
in
models
:
if
(
custom_model
.
id
==
model
[
"id"
]
or
custom_model
.
id
==
model
[
"id"
].
split
(
":"
)[
0
]
):
model
[
"name"
]
=
custom_model
.
name
model
[
"info"
]
=
custom_model
.
model_dump
()
else
:
owned_by
=
"openai"
for
model
in
models
:
if
(
custom_model
.
base_model_id
==
model
[
"id"
]
or
custom_model
.
base_model_id
==
model
[
"id"
].
split
(
":"
)[
0
]
):
owned_by
=
model
[
"owned_by"
]
break
models
.
append
(
{
"id"
:
custom_model
.
id
,
"name"
:
custom_model
.
name
,
"object"
:
"model"
,
"created"
:
custom_model
.
created_at
,
"owned_by"
:
owned_by
,
"info"
:
custom_model
.
model_dump
(),
"preset"
:
True
,
}
)
app
.
state
.
MODELS
=
{
model
[
"id"
]:
model
for
model
in
models
}
webui_app
.
state
.
MODELS
=
app
.
state
.
MODELS
return
models
@
app
.
get
(
"/api/models"
)
async
def
get_models
(
user
=
Depends
(
get_verified_user
)):
models
=
await
get_all_models
()
if
app
.
state
.
config
.
ENABLE_MODEL_FILTER
:
if
user
.
role
==
"user"
:
models
=
list
(
filter
(
lambda
model
:
model
[
"id"
]
in
app
.
state
.
config
.
MODEL_FILTER_LIST
,
models
,
)
)
return
{
"data"
:
models
}
return
{
"data"
:
models
}
@
app
.
get
(
"/api/config"
)
async
def
get_app_config
():
# Checking and Handling the Absence of 'ui' in CONFIG_DATA
...
...
@@ -272,13 +358,17 @@ async def get_app_config():
"status"
:
True
,
"name"
:
WEBUI_NAME
,
"version"
:
VERSION
,
"auth"
:
WEBUI_AUTH
,
"default_locale"
:
default_locale
,
"images"
:
images_app
.
state
.
config
.
ENABLED
,
"default_models"
:
webui_app
.
state
.
config
.
DEFAULT_MODELS
,
"default_prompt_suggestions"
:
webui_app
.
state
.
config
.
DEFAULT_PROMPT_SUGGESTIONS
,
"trusted_header_auth"
:
bool
(
webui_app
.
state
.
AUTH_TRUSTED_EMAIL_HEADER
),
"admin_export_enabled"
:
ENABLE_ADMIN_EXPORT
,
"features"
:
{
"auth"
:
WEBUI_AUTH
,
"auth_trusted_header"
:
bool
(
webui_app
.
state
.
AUTH_TRUSTED_EMAIL_HEADER
),
"enable_signup"
:
webui_app
.
state
.
config
.
ENABLE_SIGNUP
,
"enable_image_generation"
:
images_app
.
state
.
config
.
ENABLED
,
"enable_admin_export"
:
ENABLE_ADMIN_EXPORT
,
"enable_community_sharing"
:
webui_app
.
state
.
config
.
ENABLE_COMMUNITY_SHARING
,
},
}
...
...
@@ -302,15 +392,6 @@ async def update_model_filter_config(
app
.
state
.
config
.
ENABLE_MODEL_FILTER
=
form_data
.
enabled
app
.
state
.
config
.
MODEL_FILTER_LIST
=
form_data
.
models
ollama_app
.
state
.
config
.
ENABLE_MODEL_FILTER
=
app
.
state
.
config
.
ENABLE_MODEL_FILTER
ollama_app
.
state
.
config
.
MODEL_FILTER_LIST
=
app
.
state
.
config
.
MODEL_FILTER_LIST
openai_app
.
state
.
config
.
ENABLE_MODEL_FILTER
=
app
.
state
.
config
.
ENABLE_MODEL_FILTER
openai_app
.
state
.
config
.
MODEL_FILTER_LIST
=
app
.
state
.
config
.
MODEL_FILTER_LIST
litellm_app
.
state
.
ENABLE_MODEL_FILTER
=
app
.
state
.
config
.
ENABLE_MODEL_FILTER
litellm_app
.
state
.
MODEL_FILTER_LIST
=
app
.
state
.
config
.
MODEL_FILTER_LIST
return
{
"enabled"
:
app
.
state
.
config
.
ENABLE_MODEL_FILTER
,
"models"
:
app
.
state
.
config
.
MODEL_FILTER_LIST
,
...
...
@@ -331,7 +412,6 @@ class UrlForm(BaseModel):
@
app
.
post
(
"/api/webhook"
)
async
def
update_webhook_url
(
form_data
:
UrlForm
,
user
=
Depends
(
get_admin_user
)):
app
.
state
.
config
.
WEBHOOK_URL
=
form_data
.
url
webui_app
.
state
.
WEBHOOK_URL
=
app
.
state
.
config
.
WEBHOOK_URL
return
{
...
...
@@ -339,6 +419,19 @@ async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
}
@
app
.
get
(
"/api/community_sharing"
,
response_model
=
bool
)
async
def
get_community_sharing_status
(
request
:
Request
,
user
=
Depends
(
get_admin_user
)):
return
webui_app
.
state
.
config
.
ENABLE_COMMUNITY_SHARING
@
app
.
get
(
"/api/community_sharing/toggle"
,
response_model
=
bool
)
async
def
toggle_community_sharing
(
request
:
Request
,
user
=
Depends
(
get_admin_user
)):
webui_app
.
state
.
config
.
ENABLE_COMMUNITY_SHARING
=
(
not
webui_app
.
state
.
config
.
ENABLE_COMMUNITY_SHARING
)
return
webui_app
.
state
.
config
.
ENABLE_COMMUNITY_SHARING
@
app
.
get
(
"/api/version"
)
async
def
get_app_config
():
return
{
...
...
@@ -408,6 +501,7 @@ app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
app
.
mount
(
"/cache"
,
StaticFiles
(
directory
=
CACHE_DIR
),
name
=
"cache"
)
if
os
.
path
.
exists
(
FRONTEND_BUILD_DIR
):
mimetypes
.
add_type
(
"text/javascript"
,
".js"
)
app
.
mount
(
"/"
,
SPAStaticFiles
(
directory
=
FRONTEND_BUILD_DIR
,
html
=
True
),
...
...
backend/open_webui/__init__.py
0 → 100644
View file @
5166e92f
import
base64
import
os
import
random
from
pathlib
import
Path
import
typer
import
uvicorn
app
=
typer
.
Typer
()
KEY_FILE
=
Path
.
cwd
()
/
".webui_secret_key"
if
(
frontend_build_dir
:
=
Path
(
__file__
).
parent
/
"frontend"
).
exists
():
os
.
environ
[
"FRONTEND_BUILD_DIR"
]
=
str
(
frontend_build_dir
)
@
app
.
command
()
def
serve
(
host
:
str
=
"0.0.0.0"
,
port
:
int
=
8080
,
):
if
os
.
getenv
(
"WEBUI_SECRET_KEY"
)
is
None
:
typer
.
echo
(
"Loading WEBUI_SECRET_KEY from file, not provided as an environment variable."
)
if
not
KEY_FILE
.
exists
():
typer
.
echo
(
f
"Generating a new secret key and saving it to
{
KEY_FILE
}
"
)
KEY_FILE
.
write_bytes
(
base64
.
b64encode
(
random
.
randbytes
(
12
)))
typer
.
echo
(
f
"Loading WEBUI_SECRET_KEY from
{
KEY_FILE
}
"
)
os
.
environ
[
"WEBUI_SECRET_KEY"
]
=
KEY_FILE
.
read_text
()
if
os
.
getenv
(
"USE_CUDA_DOCKER"
,
"false"
)
==
"true"
:
typer
.
echo
(
"CUDA is enabled, appending LD_LIBRARY_PATH to include torch/cudnn & cublas libraries."
)
LD_LIBRARY_PATH
=
os
.
getenv
(
"LD_LIBRARY_PATH"
,
""
).
split
(
":"
)
os
.
environ
[
"LD_LIBRARY_PATH"
]
=
":"
.
join
(
LD_LIBRARY_PATH
+
[
"/usr/local/lib/python3.11/site-packages/torch/lib"
,
"/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
,
]
)
import
main
# we need set environment variables before importing main
uvicorn
.
run
(
main
.
app
,
host
=
host
,
port
=
port
,
forwarded_allow_ips
=
"*"
)
@
app
.
command
()
def
dev
(
host
:
str
=
"0.0.0.0"
,
port
:
int
=
8080
,
reload
:
bool
=
True
,
):
uvicorn
.
run
(
"main:app"
,
host
=
host
,
port
=
port
,
reload
=
reload
,
forwarded_allow_ips
=
"*"
)
if
__name__
==
"__main__"
:
app
()
backend/requirements.txt
View file @
5166e92f
fastapi==0.1
09.2
fastapi==0.1
11.0
uvicorn[standard]==0.22.0
pydantic==2.7.1
python-multipart==0.0.9
Flask==3.0.3
Flask-Cors==4.0.
0
Flask-Cors==4.0.
1
python-socketio==5.11.2
python-jose==3.3.0
passlib[bcrypt]==1.7.4
requests==2.3
1.0
requests==2.3
2.2
aiohttp==3.9.5
peewee==3.17.
3
peewee==3.17.
5
peewee-migrate==1.12.2
psycopg2-binary==2.9.9
PyMySQL==1.1.
0
bcrypt==4.1.
2
PyMySQL==1.1.
1
bcrypt==4.1.
3
litellm[proxy]==1.35.28
boto3==1.34.95
boto3==1.34.110
argon2-cffi==23.1.0
APScheduler==3.10.4
google-generativeai==0.5.
2
google-generativeai==0.5.
4
langchain==0.
1.16
langchain-community==0.
0.34
langchain-chroma==0.1.
0
langchain==0.
2.0
langchain-community==0.
2.0
langchain-chroma==0.1.
1
fake-useragent==1.5.1
chromadb==0.
4.24
chromadb==0.
5.0
sentence-transformers==2.7.0
pypdf==4.2.0
docx2txt==0.8
python-pptx==0.6.23
unstructured==0.1
1.8
unstructured==0.1
4.0
Markdown==3.6
pypandoc==1.13
pandas==2.2.2
...
...
@@ -46,16 +44,16 @@ xlrd==2.0.1
validators==0.28.1
opencv-python-headless==4.9.0.80
rapidocr-onnxruntime==1.
2.3
rapidocr-onnxruntime==1.
3.22
fpdf2==2.7.
8
fpdf2==2.7.
9
rank-bm25==0.2.2
faster-whisper==1.0.
1
faster-whisper==1.0.
2
PyJWT[crypto]==2.8.0
black==24.4.2
langfuse==2.
27.3
langfuse==2.
33.0
youtube-transcript-api==0.6.2
pytube
\ No newline at end of file
pytube==15.0.0
\ No newline at end of file
backend/start.sh
View file @
5166e92f
...
...
@@ -30,4 +30,29 @@ if [ "$USE_CUDA_DOCKER" = "true" ]; then
export
LD_LIBRARY_PATH
=
"
$LD_LIBRARY_PATH
:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
fi
# Check if SPACE_ID is set, if so, configure for space
if
[
-n
"
$SPACE_ID
"
]
;
then
echo
"Configuring for HuggingFace Space deployment"
if
[
-n
"
$ADMIN_USER_EMAIL
"
]
&&
[
-n
"
$ADMIN_USER_PASSWORD
"
]
;
then
echo
"Admin user configured, creating"
WEBUI_SECRET_KEY
=
"
$WEBUI_SECRET_KEY
"
uvicorn main:app
--host
"
$HOST
"
--port
"
$PORT
"
--forwarded-allow-ips
'*'
&
webui_pid
=
$!
echo
"Waiting for webui to start..."
while
!
curl
-s
http://localhost:8080/health
>
/dev/null
;
do
sleep
1
done
echo
"Creating admin user..."
curl
\
-X
POST
"http://localhost:8080/api/v1/auths/signup"
\
-H
"accept: application/json"
\
-H
"Content-Type: application/json"
\
-d
"{
\"
email
\"
:
\"
${
ADMIN_USER_EMAIL
}
\"
,
\"
password
\"
:
\"
${
ADMIN_USER_PASSWORD
}
\"
,
\"
name
\"
:
\"
Admin
\"
}"
echo
"Shutting down webui..."
kill
$webui_pid
fi
export
WEBUI_URL
=
${
SPACE_HOST
}
fi
WEBUI_SECRET_KEY
=
"
$WEBUI_SECRET_KEY
"
exec
uvicorn main:app
--host
"
$HOST
"
--port
"
$PORT
"
--forwarded-allow-ips
'*'
backend/utils/misc.py
View file @
5166e92f
from
pathlib
import
Path
import
hashlib
import
json
import
re
from
datetime
import
timedelta
from
typing
import
Optional
...
...
@@ -110,3 +111,76 @@ def parse_duration(duration: str) -> Optional[timedelta]:
total_duration
+=
timedelta
(
weeks
=
number
)
return
total_duration
def
parse_ollama_modelfile
(
model_text
):
parameters_meta
=
{
"mirostat"
:
int
,
"mirostat_eta"
:
float
,
"mirostat_tau"
:
float
,
"num_ctx"
:
int
,
"repeat_last_n"
:
int
,
"repeat_penalty"
:
float
,
"temperature"
:
float
,
"seed"
:
int
,
"stop"
:
str
,
"tfs_z"
:
float
,
"num_predict"
:
int
,
"top_k"
:
int
,
"top_p"
:
float
,
}
data
=
{
"base_model_id"
:
None
,
"params"
:
{}}
# Parse base model
base_model_match
=
re
.
search
(
r
"^FROM\s+(\w+)"
,
model_text
,
re
.
MULTILINE
|
re
.
IGNORECASE
)
if
base_model_match
:
data
[
"base_model_id"
]
=
base_model_match
.
group
(
1
)
# Parse template
template_match
=
re
.
search
(
r
'TEMPLATE\s+"""(.+?)"""'
,
model_text
,
re
.
DOTALL
|
re
.
IGNORECASE
)
if
template_match
:
data
[
"params"
]
=
{
"template"
:
template_match
.
group
(
1
).
strip
()}
# Parse stops
stops
=
re
.
findall
(
r
'PARAMETER stop "(.*?)"'
,
model_text
,
re
.
IGNORECASE
)
if
stops
:
data
[
"params"
][
"stop"
]
=
stops
# Parse other parameters from the provided list
for
param
,
param_type
in
parameters_meta
.
items
():
param_match
=
re
.
search
(
rf
"PARAMETER
{
param
}
(.+)"
,
model_text
,
re
.
IGNORECASE
)
if
param_match
:
value
=
param_match
.
group
(
1
)
if
param_type
==
int
:
value
=
int
(
value
)
elif
param_type
==
float
:
value
=
float
(
value
)
data
[
"params"
][
param
]
=
value
# Parse adapter
adapter_match
=
re
.
search
(
r
"ADAPTER (.+)"
,
model_text
,
re
.
IGNORECASE
)
if
adapter_match
:
data
[
"params"
][
"adapter"
]
=
adapter_match
.
group
(
1
)
# Parse system description
system_desc_match
=
re
.
search
(
r
'SYSTEM\s+"""(.+?)"""'
,
model_text
,
re
.
DOTALL
|
re
.
IGNORECASE
)
if
system_desc_match
:
data
[
"params"
][
"system"
]
=
system_desc_match
.
group
(
1
).
strip
()
# Parse messages
messages
=
[]
message_matches
=
re
.
findall
(
r
"MESSAGE (\w+) (.+)"
,
model_text
,
re
.
IGNORECASE
)
for
role
,
content
in
message_matches
:
messages
.
append
({
"role"
:
role
,
"content"
:
content
})
if
messages
:
data
[
"params"
][
"messages"
]
=
messages
return
data
backend/utils/models.py
0 → 100644
View file @
5166e92f
from
apps.webui.models.models
import
Models
,
ModelModel
,
ModelForm
,
ModelResponse
def
get_model_id_from_custom_model_id
(
id
:
str
):
model
=
Models
.
get_model_by_id
(
id
)
if
model
:
return
model
.
id
else
:
return
id
backend/utils/utils.py
View file @
5166e92f
from
fastapi.security
import
HTTPBearer
,
HTTPAuthorizationCredentials
from
fastapi
import
HTTPException
,
status
,
Depends
from
apps.web.models.users
import
Users
from
apps.web
ui
.models.users
import
Users
from
pydantic
import
BaseModel
from
typing
import
Union
,
Optional
...
...
cypress/e2e/chat.cy.ts
View file @
5166e92f
...
...
@@ -74,5 +74,28 @@ describe('Settings', () => {
expect
(
spy
).
to
.
be
.
callCount
(
2
);
});
});
it
(
'
user can generate image
'
,
()
=>
{
// Click on the model selector
cy
.
get
(
'
button[aria-label="Select a model"]
'
).
click
();
// Select the first model
cy
.
get
(
'
button[aria-label="model-item"]
'
).
first
().
click
();
// Type a message
cy
.
get
(
'
#chat-textarea
'
).
type
(
'
Hi, what can you do? A single sentence only please.
'
,
{
force
:
true
});
// Send the message
cy
.
get
(
'
button[type="submit"]
'
).
click
();
// User's message should be visible
cy
.
get
(
'
.chat-user
'
).
should
(
'
exist
'
);
// Wait for the response
cy
.
get
(
'
.chat-assistant
'
,
{
timeout
:
120
_000
})
// .chat-assistant is created after the first token is received
.
find
(
'
div[aria-label="Generation Info"]
'
,
{
timeout
:
120
_000
})
// Generation Info is created after the stop token is received
.
should
(
'
exist
'
);
// Click on the generate image button
cy
.
get
(
'
[aria-label="Generate Image"]
'
).
click
();
// Wait for image to be visible
cy
.
get
(
'
img[data-cy="image"]
'
,
{
timeout
:
60
_000
}).
should
(
'
be.visible
'
);
});
});
});
docker-compose.a1111-test.yaml
0 → 100644
View file @
5166e92f
# This is an overlay that spins up stable-diffusion-webui for integration testing
# This is not designed to be used in production
services
:
stable-diffusion-webui
:
# Not built for ARM64
platform
:
linux/amd64
image
:
ghcr.io/neggles/sd-webui-docker:latest
restart
:
unless-stopped
environment
:
CLI_ARGS
:
"
--api
--use-cpu
all
--precision
full
--no-half
--skip-torch-cuda-test
--ckpt
/empty.pt
--do-not-download-clip
--disable-nan-check
--disable-opt-split-attention"
PYTHONUNBUFFERED
:
"
1"
TERM
:
"
vt100"
SD_WEBUI_VARIANT
:
"
default"
# Hack to get container working on Apple Silicon
# Rosetta creates a conflict ${HOME}/.cache folder
entrypoint
:
/bin/bash
command
:
-
-c
-
|
export HOME=/root-home
rm -rf $${HOME}/.cache
/docker/entrypoint.sh python -u webui.py --listen --port $${WEBUI_PORT} --skip-version-check $${CLI_ARGS}
volumes
:
-
./test/test_files/image_gen/sd-empty.pt:/empty.pt
open-webui
:
environment
:
ENABLE_IMAGE_GENERATION
:
"
true"
AUTOMATIC1111_BASE_URL
:
http://stable-diffusion-webui:7860
IMAGE_SIZE
:
"
64x64"
IMAGE_STEPS
:
"
3"
hatch_build.py
0 → 100644
View file @
5166e92f
# noqa: INP001
import
os
import
shutil
import
subprocess
from
sys
import
stderr
from
hatchling.builders.hooks.plugin.interface
import
BuildHookInterface
class
CustomBuildHook
(
BuildHookInterface
):
def
initialize
(
self
,
version
,
build_data
):
super
().
initialize
(
version
,
build_data
)
stderr
.
write
(
">>> Building Open Webui frontend
\n
"
)
npm
=
shutil
.
which
(
"npm"
)
if
npm
is
None
:
raise
RuntimeError
(
"NodeJS `npm` is required for building Open Webui but it was not found"
)
stderr
.
write
(
"### npm install
\n
"
)
subprocess
.
run
([
npm
,
"install"
],
check
=
True
)
# noqa: S603
stderr
.
write
(
"
\n
### npm run build
\n
"
)
os
.
environ
[
"APP_BUILD_HASH"
]
=
version
subprocess
.
run
([
npm
,
"run"
,
"build"
],
check
=
True
)
# noqa: S603
package-lock.json
View file @
5166e92f
{
"name"
:
"open-webui"
,
"version"
:
"0.
1.125
"
,
"version"
:
"0.
2.0.dev2
"
,
"lockfileVersion"
:
3
,
"requires"
:
true
,
"packages"
:
{
""
:
{
"name"
:
"open-webui"
,
"version"
:
"0.
1.125
"
,
"version"
:
"0.
2.0.dev2
"
,
"dependencies"
:
{
"@pyscript/core"
:
"^0.4.32"
,
"@sveltejs/adapter-node"
:
"^1.3.1"
,
...
...
Prev
1
2
3
4
5
6
7
…
9
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment