Unverified Commit c5e9ceeb authored by Que Nguyen's avatar Que Nguyen Committed by GitHub
Browse files

Merge branch 'open-webui:main' into main

parents 47a04a7e 2fa94956
...@@ -5,6 +5,23 @@ All notable changes to this project will be documented in this file. ...@@ -5,6 +5,23 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.1.114] - 2024-03-20
### Added
- **🔗 Webhook Integration**: Now you can subscribe to new user sign-up events via webhook. Simply navigate to the admin panel > admin settings > webhook URL.
- **🛡️ Enhanced Model Filtering**: Alongside Ollama, OpenAI proxy model whitelisting, we've added model filtering functionality for LiteLLM proxy.
- **🌍 Expanded Language Support**: Spanish, Catalan, and Vietnamese languages are now available, with improvements made to others.
### Fixed
- **🔧 Input Field Spelling**: Resolved issue with spelling mistakes in input fields.
- **🖊️ Light Mode Styling**: Fixed styling issue with light mode in document adding.
### Changed
- **🔄 Language Sorting**: Languages are now sorted alphabetically by their code for improved organization.
## [0.1.113] - 2024-03-18 ## [0.1.113] - 2024-03-18
### Added ### Added
......
from litellm.proxy.proxy_server import ProxyConfig, initialize from litellm.proxy.proxy_server import ProxyConfig, initialize
from litellm.proxy.proxy_server import app from litellm.proxy.proxy_server import app
from fastapi import FastAPI, Request, Depends, status from fastapi import FastAPI, Request, Depends, status, Response
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.responses import StreamingResponse
import json
from utils.utils import get_http_authorization_cred, get_current_user from utils.utils import get_http_authorization_cred, get_current_user
from config import ENV from config import ENV
from config import (
MODEL_FILTER_ENABLED,
MODEL_FILTER_LIST,
)
proxy_config = ProxyConfig() proxy_config = ProxyConfig()
...@@ -26,16 +38,58 @@ async def on_startup(): ...@@ -26,16 +38,58 @@ async def on_startup():
await startup() await startup()
app.state.MODEL_FILTER_ENABLED = MODEL_FILTER_ENABLED
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
@app.middleware("http") @app.middleware("http")
async def auth_middleware(request: Request, call_next): async def auth_middleware(request: Request, call_next):
auth_header = request.headers.get("Authorization", "") auth_header = request.headers.get("Authorization", "")
request.state.user = None
if ENV != "dev": try:
try: user = get_current_user(get_http_authorization_cred(auth_header))
user = get_current_user(get_http_authorization_cred(auth_header)) print(user)
print(user) request.state.user = user
except Exception as e: except Exception as e:
return JSONResponse(status_code=400, content={"detail": str(e)}) return JSONResponse(status_code=400, content={"detail": str(e)})
response = await call_next(request) response = await call_next(request)
return response return response
class ModifyModelsResponseMiddleware(BaseHTTPMiddleware):
async def dispatch(
self, request: Request, call_next: RequestResponseEndpoint
) -> Response:
response = await call_next(request)
user = request.state.user
if "/models" in request.url.path:
if isinstance(response, StreamingResponse):
# Read the content of the streaming response
body = b""
async for chunk in response.body_iterator:
body += chunk
data = json.loads(body.decode("utf-8"))
if app.state.MODEL_FILTER_ENABLED:
if user and user.role == "user":
data["data"] = list(
filter(
lambda model: model["id"]
in app.state.MODEL_FILTER_LIST,
data["data"],
)
)
# Modified Flag
data["modified"] = True
return JSONResponse(content=data)
return response
app.add_middleware(ModifyModelsResponseMiddleware)
...@@ -298,7 +298,7 @@ USER_PERMISSIONS_CHAT_DELETION = ( ...@@ -298,7 +298,7 @@ USER_PERMISSIONS_CHAT_DELETION = (
USER_PERMISSIONS = {"chat": {"deletion": USER_PERMISSIONS_CHAT_DELETION}} USER_PERMISSIONS = {"chat": {"deletion": USER_PERMISSIONS_CHAT_DELETION}}
MODEL_FILTER_ENABLED = os.environ.get("MODEL_FILTER_ENABLED", False) MODEL_FILTER_ENABLED = os.environ.get("MODEL_FILTER_ENABLED", "False").lower() == "true"
MODEL_FILTER_LIST = os.environ.get("MODEL_FILTER_LIST", "") MODEL_FILTER_LIST = os.environ.get("MODEL_FILTER_LIST", "")
MODEL_FILTER_LIST = [model.strip() for model in MODEL_FILTER_LIST.split(";")] MODEL_FILTER_LIST = [model.strip() for model in MODEL_FILTER_LIST.split(";")]
......
{ {
"name": "open-webui", "name": "open-webui",
"version": "0.1.113", "version": "0.1.114",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "vite dev --host", "dev": "vite dev --host",
......
...@@ -4,8 +4,8 @@ ...@@ -4,8 +4,8 @@
"title": "English (US)" "title": "English (US)"
}, },
{ {
"code": "fa-IR", "code": "ca-ES",
"title": "فارسی (Farsi)" "title": "Catalan"
}, },
{ {
"code": "de-DE", "code": "de-DE",
...@@ -16,13 +16,17 @@ ...@@ -16,13 +16,17 @@
"title": "Spanish" "title": "Spanish"
}, },
{ {
"code": "fr-FR", "code": "fa-IR",
"title": "French (France)" "title": "فارسی (Farsi)"
}, },
{ {
"code": "fr-CA", "code": "fr-CA",
"title": "French (Canada)" "title": "French (Canada)"
}, },
{
"code": "fr-FR",
"title": "French (France)"
},
{ {
"code": "ru-RU", "code": "ru-RU",
"title": "Russian (Russia)" "title": "Russian (Russia)"
...@@ -32,19 +36,15 @@ ...@@ -32,19 +36,15 @@
"title": "Ukrainian" "title": "Ukrainian"
}, },
{ {
"code": "zh-TW", "code": "vi-VN",
"title": "Chinese (Traditional)" "title": "Tiếng Việt"
}, },
{ {
"code": "zh-CN", "code": "zh-CN",
"title": "Chinese (Simplified)" "title": "Chinese (Simplified)"
}, },
{
"code": "vi-VN",
"title": "Tiếng Việt"
},
{ {
"code": "ca-ES", "code": "zh-TW",
"title": "Catalan" "title": "Chinese (Traditional)"
} }
] ]
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment