Unverified Commit 27c59d98 authored by Ido Henri Mamia's avatar Ido Henri Mamia Committed by GitHub
Browse files

Merge branch 'dev' into feat/rtl-layout-chat-support

parents e5d57430 400bfa5a
......@@ -75,6 +75,10 @@ with open(LITELLM_CONFIG_DIR, "r") as file:
litellm_config = yaml.safe_load(file)
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER.value
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST.value
app.state.ENABLE = ENABLE_LITELLM
app.state.CONFIG = litellm_config
......@@ -151,10 +155,6 @@ async def shutdown_litellm_background():
background_process = None
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
@app.get("/")
async def get_status():
return {"status": True}
......
......@@ -64,8 +64,8 @@ app.add_middleware(
app.state.config = AppConfig()
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
app.state.config.OLLAMA_BASE_URLS = OLLAMA_BASE_URLS
app.state.MODELS = {}
......@@ -124,8 +124,9 @@ async def cancel_ollama_request(request_id: str, user=Depends(get_current_user))
async def fetch_url(url):
timeout = aiohttp.ClientTimeout(total=5)
try:
async with aiohttp.ClientSession() as session:
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(url) as response:
return await response.json()
except Exception as e:
......@@ -177,11 +178,12 @@ async def get_ollama_tags(
if url_idx == None:
models = await get_all_models()
if app.state.ENABLE_MODEL_FILTER:
if app.state.config.ENABLE_MODEL_FILTER:
if user.role == "user":
models["models"] = list(
filter(
lambda model: model["name"] in app.state.MODEL_FILTER_LIST,
lambda model: model["name"]
in app.state.config.MODEL_FILTER_LIST,
models["models"],
)
)
......@@ -1045,11 +1047,12 @@ async def get_openai_models(
if url_idx == None:
models = await get_all_models()
if app.state.ENABLE_MODEL_FILTER:
if app.state.config.ENABLE_MODEL_FILTER:
if user.role == "user":
models["models"] = list(
filter(
lambda model: model["name"] in app.state.MODEL_FILTER_LIST,
lambda model: model["name"]
in app.state.config.MODEL_FILTER_LIST,
models["models"],
)
)
......
......@@ -21,6 +21,7 @@ from utils.utils import (
)
from config import (
SRC_LOG_LEVELS,
ENABLE_OPENAI_API,
OPENAI_API_BASE_URLS,
OPENAI_API_KEYS,
CACHE_DIR,
......@@ -46,11 +47,14 @@ app.add_middleware(
allow_headers=["*"],
)
app.state.config = AppConfig()
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
app.state.config.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS
app.state.config.OPENAI_API_KEYS = OPENAI_API_KEYS
......@@ -68,6 +72,21 @@ async def check_url(request: Request, call_next):
return response
@app.get("/config")
async def get_config(user=Depends(get_admin_user)):
return {"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API}
class OpenAIConfigForm(BaseModel):
enable_openai_api: Optional[bool] = None
@app.post("/config/update")
async def update_config(form_data: OpenAIConfigForm, user=Depends(get_admin_user)):
app.state.config.ENABLE_OPENAI_API = form_data.enable_openai_api
return {"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API}
class UrlsUpdateForm(BaseModel):
urls: List[str]
......@@ -164,11 +183,15 @@ async def speech(request: Request, user=Depends(get_verified_user)):
async def fetch_url(url, key):
timeout = aiohttp.ClientTimeout(total=5)
try:
if key != "":
headers = {"Authorization": f"Bearer {key}"}
async with aiohttp.ClientSession() as session:
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(url, headers=headers) as response:
return await response.json()
else:
return None
except Exception as e:
# Handle connection error here
log.error(f"Connection error: {e}")
......@@ -200,7 +223,7 @@ async def get_all_models():
if (
len(app.state.config.OPENAI_API_KEYS) == 1
and app.state.config.OPENAI_API_KEYS[0] == ""
):
) or not app.state.config.ENABLE_OPENAI_API:
models = {"data": []}
else:
tasks = [
......@@ -237,11 +260,11 @@ async def get_all_models():
async def get_models(url_idx: Optional[int] = None, user=Depends(get_current_user)):
if url_idx == None:
models = await get_all_models()
if app.state.ENABLE_MODEL_FILTER:
if app.state.config.ENABLE_MODEL_FILTER:
if user.role == "user":
models["data"] = list(
filter(
lambda model: model["id"] in app.state.MODEL_FILTER_LIST,
lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST,
models["data"],
)
)
......
......@@ -433,12 +433,12 @@ async def update_query_settings(
form_data: QuerySettingsForm, user=Depends(get_admin_user)
):
app.state.config.RAG_TEMPLATE = (
form_data.template if form_data.template else RAG_TEMPLATE,
form_data.template if form_data.template else RAG_TEMPLATE
)
app.state.config.TOP_K = form_data.k if form_data.k else 4
app.state.config.RELEVANCE_THRESHOLD = form_data.r if form_data.r else 0.0
app.state.config.ENABLE_RAG_HYBRID_SEARCH = (
form_data.hybrid if form_data.hybrid else False,
form_data.hybrid if form_data.hybrid else False
)
return {
"status": True,
......
......@@ -417,6 +417,14 @@ OLLAMA_BASE_URLS = PersistentConfig(
# OPENAI_API
####################################
ENABLE_OPENAI_API = PersistentConfig(
"ENABLE_OPENAI_API",
"openai.enable",
os.environ.get("ENABLE_OPENAI_API", "True").lower() == "true",
)
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "")
......
......@@ -118,15 +118,15 @@ origins = ["*"]
# Custom middleware to add security headers
class SecurityHeadersMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
response: Response = await call_next(request)
response.headers["Cross-Origin-Opener-Policy"] = "same-origin"
response.headers["Cross-Origin-Embedder-Policy"] = "require-corp"
return response
# class SecurityHeadersMiddleware(BaseHTTPMiddleware):
# async def dispatch(self, request: Request, call_next):
# response: Response = await call_next(request)
# response.headers["Cross-Origin-Opener-Policy"] = "same-origin"
# response.headers["Cross-Origin-Embedder-Policy"] = "require-corp"
# return response
app.add_middleware(SecurityHeadersMiddleware)
# app.add_middleware(SecurityHeadersMiddleware)
class RAGMiddleware(BaseHTTPMiddleware):
......@@ -289,14 +289,14 @@ class ModelFilterConfigForm(BaseModel):
async def update_model_filter_config(
form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
):
app.state.config.ENABLE_MODEL_FILTER, form_data.enabled
app.state.config.MODEL_FILTER_LIST, form_data.models
app.state.config.ENABLE_MODEL_FILTER = form_data.enabled
app.state.config.MODEL_FILTER_LIST = form_data.models
ollama_app.state.ENABLE_MODEL_FILTER = app.state.config.ENABLE_MODEL_FILTER
ollama_app.state.MODEL_FILTER_LIST = app.state.config.MODEL_FILTER_LIST
ollama_app.state.config.ENABLE_MODEL_FILTER = app.state.config.ENABLE_MODEL_FILTER
ollama_app.state.config.MODEL_FILTER_LIST = app.state.config.MODEL_FILTER_LIST
openai_app.state.ENABLE_MODEL_FILTER = app.state.config.ENABLE_MODEL_FILTER
openai_app.state.MODEL_FILTER_LIST = app.state.config.MODEL_FILTER_LIST
openai_app.state.config.ENABLE_MODEL_FILTER = app.state.config.ENABLE_MODEL_FILTER
openai_app.state.config.MODEL_FILTER_LIST = app.state.config.MODEL_FILTER_LIST
litellm_app.state.ENABLE_MODEL_FILTER = app.state.config.ENABLE_MODEL_FILTER
litellm_app.state.MODEL_FILTER_LIST = app.state.config.MODEL_FILTER_LIST
......
import { OPENAI_API_BASE_URL } from '$lib/constants';
import { promptTemplate } from '$lib/utils';
export const getOpenAIConfig = async (token: string = '') => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/config`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res;
};
export const updateOpenAIConfig = async (token: string = '', enable_openai_api: boolean) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/config/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
enable_openai_api: enable_openai_api
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res;
};
export const getOpenAIUrls = async (token: string = '') => {
let error = null;
......
......@@ -585,7 +585,7 @@
/>
<form
dir={$settings?.chatDirection}
class=" flex flex-col relative w-full rounded-3xl px-1.5 border border-gray-100 dark:border-gray-850 bg-white dark:bg-gray-900 dark:text-gray-100"
class=" flex flex-col relative w-full rounded-3xl px-1.5 bg-gray-50 dark:bg-gray-850 dark:text-gray-100"
on:submit|preventDefault={() => {
submitPrompt(prompt, user);
}}
......@@ -755,7 +755,7 @@
<textarea
id="chat-textarea"
bind:this={chatTextAreaElement}
class="scrollbar-hidden dark:bg-gray-900 dark:text-gray-100 outline-none w-full py-3 px-3 {fileUploadEnabled
class="scrollbar-hidden bg-gray-50 dark:bg-gray-850 dark:text-gray-100 outline-none w-full py-3 px-3 {fileUploadEnabled
? ''
: ' pl-4'} rounded-xl resize-none h-[48px]"
placeholder={chatInputPlaceholder !== ''
......
......@@ -308,7 +308,10 @@ __builtins__.input = input`);
class="language-{lang} rounded-t-none whitespace-pre">{@html highlightedCode || code}</code
></pre>
<div id="plt-canvas-{id}" class="bg-[#202123] text-white" />
<div
id="plt-canvas-{id}"
class="bg-[#202123] text-white max-w-full overflow-x-auto scrollbar-hidden"
/>
{#if executing}
<div class="bg-[#202123] text-white px-4 py-4 rounded-b-lg">
......
<script lang="ts">
import { settings } from '$lib/stores';
import { WEBUI_BASE_URL } from '$lib/constants';
export let src = '/user.png';
</script>
......@@ -7,7 +8,11 @@
<div class={$settings?.chatDirection === 'LTR' ? "mr-3" : "ml-3"}>
<img
crossorigin="anonymous"
{src}
src={src.startsWith(WEBUI_BASE_URL) ||
src.startsWith('https://www.gravatar.com/avatar/') ||
src.startsWith('data:')
? src
: `/user.png`}
class=" w-8 object-cover rounded-full"
alt="profile"
draggable="false"
......
......@@ -5,28 +5,27 @@
import { getOllamaUrls, getOllamaVersion, updateOllamaUrls } from '$lib/apis/ollama';
import {
getOpenAIConfig,
getOpenAIKeys,
getOpenAIUrls,
updateOpenAIConfig,
updateOpenAIKeys,
updateOpenAIUrls
} from '$lib/apis/openai';
import { toast } from 'svelte-sonner';
import Switch from '$lib/components/common/Switch.svelte';
const i18n = getContext('i18n');
export let getModels: Function;
// External
let OLLAMA_BASE_URL = '';
let OLLAMA_BASE_URLS = [''];
let OPENAI_API_KEY = '';
let OPENAI_API_BASE_URL = '';
let OPENAI_API_KEYS = [''];
let OPENAI_API_BASE_URLS = [''];
let showOpenAI = false;
let ENABLE_OPENAI_API = false;
const updateOpenAIHandler = async () => {
OPENAI_API_BASE_URLS = await updateOpenAIUrls(localStorage.token, OPENAI_API_BASE_URLS);
......@@ -52,6 +51,10 @@
onMount(async () => {
if ($user.role === 'admin') {
OLLAMA_BASE_URLS = await getOllamaUrls(localStorage.token);
const config = await getOpenAIConfig(localStorage.token);
ENABLE_OPENAI_API = config.ENABLE_OPENAI_API;
OPENAI_API_BASE_URLS = await getOpenAIUrls(localStorage.token);
OPENAI_API_KEYS = await getOpenAIKeys(localStorage.token);
}
......@@ -70,16 +73,18 @@
<div class="mt-2 space-y-2 pr-1.5">
<div class="flex justify-between items-center text-sm">
<div class=" font-medium">{$i18n.t('OpenAI API')}</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showOpenAI = !showOpenAI;
}}>{showOpenAI ? $i18n.t('Hide') : $i18n.t('Show')}</button
>
<div class="mt-1">
<Switch
bind:state={ENABLE_OPENAI_API}
on:change={async () => {
updateOpenAIConfig(localStorage.token, ENABLE_OPENAI_API);
}}
/>
</div>
</div>
{#if showOpenAI}
{#if ENABLE_OPENAI_API}
<div class="flex flex-col gap-1">
{#each OPENAI_API_BASE_URLS as url, idx}
<div class="flex w-full gap-2">
......
......@@ -128,7 +128,7 @@
{$i18n.t('and create a new shared link.')}
{:else}
{$i18n.t(
"Messages you send after creating your link won't be shared. Users with the URL will beable to view the shared chat."
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat."
)}
{/if}
</div>
......
......@@ -14,7 +14,7 @@
}}
class="flex h-5 min-h-5 w-9 shrink-0 cursor-pointer items-center rounded-full px-[3px] transition {state
? ' bg-emerald-600'
: 'bg-gray-200 dark:bg-transparent'} outline outline-gray-100 dark:outline-gray-800"
: 'bg-gray-200 dark:bg-transparent'} outline outline-1 outline-gray-100 dark:outline-gray-800"
>
<Switch.Thumb
class="pointer-events-none block size-4 shrink-0 rounded-full bg-white transition-transform data-[state=checked]:translate-x-3.5 data-[state=unchecked]:translate-x-0 data-[state=unchecked]:shadow-mini "
......
......@@ -256,7 +256,7 @@
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "يمكن تنزيل 3 نماذج كحد أقصى في وقت واحد. الرجاء معاودة المحاولة في وقت لاحق.",
"May": "",
"Memory": "",
"Messages you send after creating your link won't be shared. Users with the URL will beable to view the shared chat.": "لن تتم مشاركة الرسائل التي ترسلها بعد إنشاء الرابط الخاص بك. سيتمكن المستخدمون الذين لديهم عنوان URL من عرض الدردشة المشتركة.",
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat.": "",
"Minimum Score": "الحد الأدنى من النقاط",
"Mirostat": "Mirostat",
"Mirostat Eta": "Mirostat Eta",
......
......@@ -256,7 +256,7 @@
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Максимум 3 модели могат да бъдат сваляни едновременно. Моля, опитайте отново по-късно.",
"May": "",
"Memory": "",
"Messages you send after creating your link won't be shared. Users with the URL will beable to view the shared chat.": "",
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat.": "",
"Minimum Score": "",
"Mirostat": "Mirostat",
"Mirostat Eta": "Mirostat Eta",
......
......@@ -256,7 +256,7 @@
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "একসঙ্গে সর্বোচ্চ তিনটি মডেল ডাউনলোড করা যায়। দয়া করে পরে আবার চেষ্টা করুন।",
"May": "",
"Memory": "",
"Messages you send after creating your link won't be shared. Users with the URL will beable to view the shared chat.": "",
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat.": "",
"Minimum Score": "",
"Mirostat": "Mirostat",
"Mirostat Eta": "Mirostat Eta",
......
......@@ -256,7 +256,7 @@
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Es poden descarregar un màxim de 3 models simultàniament. Si us plau, prova-ho més tard.",
"May": "",
"Memory": "",
"Messages you send after creating your link won't be shared. Users with the URL will beable to view the shared chat.": "",
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat.": "",
"Minimum Score": "",
"Mirostat": "Mirostat",
"Mirostat Eta": "Eta de Mirostat",
......
......@@ -256,7 +256,7 @@
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Es können maximal 3 Modelle gleichzeitig heruntergeladen werden. Bitte versuche es später erneut.",
"May": "Mai",
"Memory": "",
"Messages you send after creating your link won't be shared. Users with the URL will beable to view the shared chat.": "Fortlaudende Nachrichten in diesem Chat werden nicht automatisch geteilt. Benutzer mit dem Link können den Chat einsehen.",
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat.": "",
"Minimum Score": "Mindestscore",
"Mirostat": "Mirostat",
"Mirostat Eta": "Mirostat Eta",
......
......@@ -256,7 +256,7 @@
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Maximum of 3 models can be downloaded simultaneously. Please try again later.",
"May": "",
"Memory": "",
"Messages you send after creating your link won't be shared. Users with the URL will beable to view the shared chat.": "",
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat.": "",
"Minimum Score": "",
"Mirostat": "Mirostat",
"Mirostat Eta": "Mirostat Eta",
......
......@@ -256,7 +256,7 @@
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "",
"May": "",
"Memory": "",
"Messages you send after creating your link won't be shared. Users with the URL will beable to view the shared chat.": "",
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat.": "",
"Minimum Score": "",
"Mirostat": "",
"Mirostat Eta": "",
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment