Commit 562e40a7 authored by Jun Siang Cheah's avatar Jun Siang Cheah
Browse files

Merge branch 'dev' into feat/trusted-email-header

parents 0e3b7a11 587a8c59
This diff is collapsed.
...@@ -360,4 +360,4 @@ ...@@ -360,4 +360,4 @@
"You": "Tu", "You": "Tu",
"You're a helpful assistant.": "Sei un assistente utile.", "You're a helpful assistant.": "Sei un assistente utile.",
"You're now logged in.": "Ora hai effettuato l'accesso." "You're now logged in.": "Ora hai effettuato l'accesso."
} }
\ No newline at end of file
...@@ -360,4 +360,4 @@ ...@@ -360,4 +360,4 @@
"You": "あなた", "You": "あなた",
"You're a helpful assistant.": "あなたは役に立つアシスタントです。", "You're a helpful assistant.": "あなたは役に立つアシスタントです。",
"You're now logged in.": "ログインしました。" "You're now logged in.": "ログインしました。"
} }
\ No newline at end of file
This diff is collapsed.
...@@ -39,6 +39,10 @@ ...@@ -39,6 +39,10 @@
"code": "ja-JP", "code": "ja-JP",
"title": "Japanese" "title": "Japanese"
}, },
{
"code": "ko-KR",
"title": "Korean"
},
{ {
"code": "nl-NL", "code": "nl-NL",
"title": "Dutch (Netherlands)" "title": "Dutch (Netherlands)"
......
...@@ -48,6 +48,7 @@ ...@@ -48,6 +48,7 @@
let messagesContainerElement: HTMLDivElement; let messagesContainerElement: HTMLDivElement;
let currentRequestId = null; let currentRequestId = null;
let showModelSelector = false;
let selectedModels = ['']; let selectedModels = [''];
let selectedModelfile = null; let selectedModelfile = null;
...@@ -533,6 +534,8 @@ ...@@ -533,6 +534,8 @@
console.log(docs); console.log(docs);
console.log(model);
const res = await generateOpenAIChatCompletion( const res = await generateOpenAIChatCompletion(
localStorage.token, localStorage.token,
{ {
...@@ -585,7 +588,9 @@ ...@@ -585,7 +588,9 @@
max_tokens: $settings?.options?.num_predict ?? undefined, max_tokens: $settings?.options?.num_predict ?? undefined,
docs: docs.length > 0 ? docs : undefined docs: docs.length > 0 ? docs : undefined
}, },
model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}` model?.source?.toLowerCase() === 'litellm'
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
); );
if (res && res.ok) { if (res && res.ok) {
...@@ -776,7 +781,7 @@ ...@@ -776,7 +781,7 @@
titleModelId, titleModelId,
userPrompt, userPrompt,
titleModel?.external ?? false titleModel?.external ?? false
? titleModel.source === 'litellm' ? titleModel?.source?.toLowerCase() === 'litellm'
? `${LITELLM_API_BASE_URL}/v1` ? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}` : `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1` : `${OLLAMA_API_BASE_URL}/v1`
...@@ -837,7 +842,16 @@ ...@@ -837,7 +842,16 @@
</svelte:head> </svelte:head>
<div class="h-screen max-h-[100dvh] w-full flex flex-col"> <div class="h-screen max-h-[100dvh] w-full flex flex-col">
<Navbar {title} shareEnabled={messages.length > 0} {initNewChat} {tags} {addTag} {deleteTag} /> <Navbar
{title}
bind:selectedModels
bind:showModelSelector
shareEnabled={messages.length > 0}
{initNewChat}
{tags}
{addTag}
{deleteTag}
/>
<div class="flex flex-col flex-auto"> <div class="flex flex-col flex-auto">
<div <div
class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0" class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0"
...@@ -849,15 +863,7 @@ ...@@ -849,15 +863,7 @@
messagesContainerElement.clientHeight + 5; messagesContainerElement.clientHeight + 5;
}} }}
> >
<div <div class=" h-full w-full flex flex-col pt-2 pb-4">
class="{$settings?.fullScreenMode ?? null
? 'max-w-full'
: 'max-w-2xl md:px-0'} mx-auto w-full px-4"
>
<ModelSelector bind:selectedModels />
</div>
<div class=" h-full w-full flex flex-col py-8">
<Messages <Messages
chatId={$chatId} chatId={$chatId}
{selectedModels} {selectedModels}
......
...@@ -56,8 +56,10 @@ ...@@ -56,8 +56,10 @@
let currentRequestId = null; let currentRequestId = null;
// let chatId = $page.params.id; // let chatId = $page.params.id;
let showModelSelector = false;
let selectedModels = ['']; let selectedModels = [''];
let selectedModelfile = null; let selectedModelfile = null;
$: selectedModelfile = $: selectedModelfile =
selectedModels.length === 1 && selectedModels.length === 1 &&
$modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0 $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
...@@ -600,7 +602,9 @@ ...@@ -600,7 +602,9 @@
max_tokens: $settings?.options?.num_predict ?? undefined, max_tokens: $settings?.options?.num_predict ?? undefined,
docs: docs.length > 0 ? docs : undefined docs: docs.length > 0 ? docs : undefined
}, },
model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}` model?.source?.toLowerCase() === 'litellm'
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
); );
if (res && res.ok) { if (res && res.ok) {
...@@ -791,7 +795,7 @@ ...@@ -791,7 +795,7 @@
titleModelId, titleModelId,
userPrompt, userPrompt,
titleModel?.external ?? false titleModel?.external ?? false
? titleModel.source === 'litellm' ? titleModel?.source?.toLowerCase() === 'litellm'
? `${LITELLM_API_BASE_URL}/v1` ? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}` : `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1` : `${OLLAMA_API_BASE_URL}/v1`
...@@ -861,6 +865,8 @@ ...@@ -861,6 +865,8 @@
<div class="min-h-screen max-h-screen w-full flex flex-col"> <div class="min-h-screen max-h-screen w-full flex flex-col">
<Navbar <Navbar
{title} {title}
bind:selectedModels
bind:showModelSelector
shareEnabled={messages.length > 0} shareEnabled={messages.length > 0}
initNewChat={async () => { initNewChat={async () => {
if (currentRequestId !== null) { if (currentRequestId !== null) {
...@@ -885,15 +891,7 @@ ...@@ -885,15 +891,7 @@
messagesContainerElement.clientHeight + 5; messagesContainerElement.clientHeight + 5;
}} }}
> >
<div <div class=" h-full w-full flex flex-col py-4">
class="{$settings?.fullScreenMode ?? null
? 'max-w-full'
: 'max-w-2xl md:px-0'} mx-auto w-full px-4"
>
<ModelSelector bind:selectedModels />
</div>
<div class=" h-full w-full flex flex-col py-8">
<Messages <Messages
chatId={$chatId} chatId={$chatId}
{selectedModels} {selectedModels}
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
import { splitStream } from '$lib/utils'; import { splitStream } from '$lib/utils';
import ChatCompletion from '$lib/components/playground/ChatCompletion.svelte'; import ChatCompletion from '$lib/components/playground/ChatCompletion.svelte';
import Selector from '$lib/components/chat/ModelSelector/Selector.svelte';
const i18n = getContext('i18n'); const i18n = getContext('i18n');
...@@ -315,27 +316,24 @@ ...@@ -315,27 +316,24 @@
</div> </div>
</div> </div>
<div class=" flex gap-1 px-1"> <div class="flex flex-col gap-1 px-1 w-full">
<select <div class="flex w-full">
id="models" <div class="overflow-hidden w-full">
class="outline-none bg-transparent text-sm font-medium rounded-lg w-full placeholder-gray-400" <div class="max-w-full">
<Selector
placeholder={$i18n.t('Select a model')}
items={$models
.filter((model) => model.name !== 'hr')
.map((model) => ({
value: model.id,
label: model.name,
info: model
}))}
bind:value={selectedModelId} bind:value={selectedModelId}
> />
<option class=" text-gray-800" value="" selected disabled </div>
>{$i18n.t('Select a model')}</option </div>
> </div>
{#each $models as model}
{#if model.name === 'hr'}
<hr />
{:else}
<option value={model.id} class="text-gray-800 text-lg"
>{model.name +
`${model.size ? ` (${(model.size / 1024 ** 3).toFixed(1)}GB)` : ''}`}</option
>
{/if}
{/each}
</select>
<!-- <button <!-- <button
class=" self-center dark:hover:text-gray-300" class=" self-center dark:hover:text-gray-300"
......
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
import '../tailwind.css'; import '../tailwind.css';
import 'tippy.js/dist/tippy.css'; import 'tippy.js/dist/tippy.css';
import { WEBUI_BASE_URL } from '$lib/constants'; import { WEBUI_BASE_URL } from '$lib/constants';
import i18n from '$lib/i18n'; import i18n, { initI18n } from '$lib/i18n';
setContext('i18n', i18n); setContext('i18n', i18n);
...@@ -25,6 +25,11 @@ ...@@ -25,6 +25,11 @@
if (backendConfig) { if (backendConfig) {
// Save Backend Status to Store // Save Backend Status to Store
await config.set(backendConfig); await config.set(backendConfig);
if ($config.default_locale) {
initI18n($config.default_locale);
} else {
initI18n();
}
await WEBUI_NAME.set(backendConfig.name); await WEBUI_NAME.set(backendConfig.name);
console.log(backendConfig); console.log(backendConfig);
......
...@@ -3,7 +3,8 @@ ...@@ -3,7 +3,8 @@
@tailwind utilities; @tailwind utilities;
@layer base { @layer base {
html, pre { html,
pre {
font-family: -apple-system, 'Arimo', ui-sans-serif, system-ui, 'Segoe UI', Roboto, Ubuntu, font-family: -apple-system, 'Arimo', ui-sans-serif, system-ui, 'Segoe UI', Roboto, Ubuntu,
Cantarell, 'Noto Sans', sans-serif, 'Helvetica Neue', Arial, 'Apple Color Emoji', Cantarell, 'Noto Sans', sans-serif, 'Helvetica Neue', Arial, 'Apple Color Emoji',
'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji'; 'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
......
...@@ -16,6 +16,12 @@ const config = { ...@@ -16,6 +16,12 @@ const config = {
assets: 'build', assets: 'build',
fallback: 'index.html' fallback: 'index.html'
}) })
},
onwarn: (warning, handler) => {
const { code, _ } = warning;
if (code === 'css-unused-selector') return;
handler(warning);
} }
}; };
......
...@@ -16,9 +16,8 @@ export default { ...@@ -16,9 +16,8 @@ export default {
700: '#4e4e4e', 700: '#4e4e4e',
800: '#333', 800: '#333',
850: '#262626', 850: '#262626',
900: 'var(--color-gray-900, #171717)',
900: '#171717', 950: 'var(--color-gray-950, #0d0d0d)'
950: '#0d0d0d'
} }
}, },
typography: { typography: {
......
{
"model_name": "string",
"litellm_params": {
"model": "ollama/mistral"
}
}
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment