Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
02fb517b
Unverified
Commit
02fb517b
authored
Feb 25, 2024
by
Timothy Jaeryang Baek
Committed by
GitHub
Feb 25, 2024
Browse files
Merge pull request #907 from open-webui/dev
0.1.103
parents
6676f0b4
0bd3ec9e
Changes
27
Show whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
770 additions
and
437 deletions
+770
-437
src/lib/components/chat/Settings/Images.svelte
src/lib/components/chat/Settings/Images.svelte
+29
-6
src/lib/components/chat/Settings/Models.svelte
src/lib/components/chat/Settings/Models.svelte
+596
-265
src/lib/components/chat/SettingsModal.svelte
src/lib/components/chat/SettingsModal.svelte
+21
-14
src/lib/constants.ts
src/lib/constants.ts
+2
-0
src/routes/(app)/+layout.svelte
src/routes/(app)/+layout.svelte
+23
-74
src/routes/(app)/+page.svelte
src/routes/(app)/+page.svelte
+93
-78
test.json
test.json
+6
-0
No files found.
src/lib/components/chat/Settings/Images.svelte
View file @
02fb517b
...
...
@@ -12,7 +12,9 @@
toggleImageGenerationEnabledStatus,
updateAUTOMATIC1111Url,
updateDefaultDiffusionModel,
updateImageSize
updateImageSize,
getImageSteps,
updateImageSteps
} from '$lib/apis/images';
import { getBackendConfig } from '$lib/apis';
const dispatch = createEventDispatcher();
...
...
@@ -21,20 +23,23 @@
let loading = false;
let enableImageGeneration =
tru
e;
let enableImageGeneration =
fals
e;
let AUTOMATIC1111_BASE_URL = '';
let selectedModel = '';
let models =
[]
;
let models =
null
;
let imageSize = '';
let steps = 50;
const getModels = async () => {
models = await getDiffusionModels(localStorage.token).catch((error) => {
toast.error(error);
return null;
});
selectedModel = await getDefaultDiffusionModel(localStorage.token);
selectedModel = await getDefaultDiffusionModel(localStorage.token).catch((error) => {
return '';
});
};
const updateAUTOMATIC1111UrlHandler = async () => {
...
...
@@ -83,6 +88,7 @@
if (enableImageGeneration && AUTOMATIC1111_BASE_URL) {
imageSize = await getImageSize(localStorage.token);
steps = await getImageSteps(localStorage.token);
getModels();
}
}
...
...
@@ -98,12 +104,16 @@
toast.error(error);
return null;
});
await updateImageSteps(localStorage.token, steps).catch((error) => {
toast.error(error);
return null;
});
dispatch('save');
loading = false;
}}
>
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-[2
1
rem]">
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-[2
0.5
rem]">
<div>
<div class=" mb-1 text-sm font-medium">Image Settings</div>
...
...
@@ -188,7 +198,7 @@
{#if !selectedModel}
<option value="" disabled selected>Select a model</option>
{/if}
{#each models as model}
{#each models
?? []
as model}
<option value={model.title} class="bg-gray-100 dark:bg-gray-700"
>{model.model_name}</option
>
...
...
@@ -210,6 +220,19 @@
</div>
</div>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">Set Steps</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter Number of Steps (e.g. 50)"
bind:value={steps}
/>
</div>
</div>
</div>
{/if}
</div>
...
...
src/lib/components/chat/Settings/Models.svelte
View file @
02fb517b
...
...
@@ -2,14 +2,33 @@
import queue from 'async/queue';
import toast from 'svelte-french-toast';
import { createModel, deleteModel, pullModel } from '$lib/apis/ollama';
import { createModel, deleteModel,
getOllamaVersion,
pullModel } from '$lib/apis/ollama';
import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
import { WEBUI_NAME, models, user } from '$lib/stores';
import { splitStream } from '$lib/utils';
import { onMount } from 'svelte';
import { addLiteLLMModel, deleteLiteLLMModel, getLiteLLMModelInfo } from '$lib/apis/litellm';
export let getModels: Function;
let showLiteLLM = false;
let showLiteLLMParams = false;
let liteLLMModelInfo = [];
let liteLLMModel = '';
let liteLLMModelName = '';
let liteLLMAPIBase = '';
let liteLLMAPIKey = '';
let liteLLMRPM = '';
let deleteLiteLLMModelId = '';
$: liteLLMModelName = liteLLMModel;
// Models
let showExperimentalOllama = false;
let ollamaVersion = '';
const MAX_PARALLEL_DOWNLOADS = 3;
const modelDownloadQueue = queue(
(task: { modelName: string }, cb) =>
...
...
@@ -286,22 +305,82 @@
opts.callback({ success: true, modelName: opts.modelName });
}
};
const addLiteLLMModelHandler = async () => {
if (!liteLLMModelInfo.find((info) => info.model_name === liteLLMModelName)) {
const res = await addLiteLLMModel(localStorage.token, {
name: liteLLMModelName,
model: liteLLMModel,
api_base: liteLLMAPIBase,
api_key: liteLLMAPIKey,
rpm: liteLLMRPM
}).catch((error) => {
toast.error(error);
return null;
});
if (res) {
if (res.message) {
toast.success(res.message);
}
}
} else {
toast.error(`Model ${liteLLMModelName} already exists.`);
}
liteLLMModelName = '';
liteLLMModel = '';
liteLLMAPIBase = '';
liteLLMAPIKey = '';
liteLLMRPM = '';
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
models.set(await getModels());
};
const deleteLiteLLMModelHandler = async () => {
const res = await deleteLiteLLMModel(localStorage.token, deleteLiteLLMModelId).catch(
(error) => {
toast.error(error);
return null;
}
);
if (res) {
if (res.message) {
toast.success(res.message);
}
}
deleteLiteLLMModelId = '';
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
models.set(await getModels());
};
onMount(async () => {
ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => false);
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
});
</script>
<div class="flex flex-col h-full justify-between text-sm">
<div class=" space-y-3 pr-1.5 overflow-y-scroll h-80">
<div class=" space-y-3 pr-1.5 overflow-y-scroll h-[23rem]">
{#if ollamaVersion}
<div class="space-y-2 pr-1.5">
<div>
<div class=" mb-2.5 text-sm font-medium">Pull a model from Ollama.com</div>
<div class=" mb-2 text-sm font-medium">Manage Ollama Models</div>
<div class=" mb-2 text-sm font-medium">Pull a model from Ollama.com</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-8
0
0 outline-none"
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-8
5
0 outline-none"
placeholder="Enter model tag (e.g. mistral:7b)"
bind:value={modelTag}
/>
</div>
<button
class="px-3
text
-gray-100
bg-emerald-600 hover:bg-emerald-700 disabled
:bg-gray-
7
00 d
isabled:cursor-not-allowed
rounded transition"
class="px-3
bg
-gray-100
hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover
:bg-gray-
8
00 d
ark:text-gray-100
rounded transition"
on:click={() => {
pullModelHandler();
}}
...
...
@@ -379,14 +458,12 @@
{/if}
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2
.5
text-sm font-medium">Delete a model</div>
<div class=" mb-2 text-sm font-medium">Delete a model</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-8
0
0 outline-none"
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-8
5
0 outline-none"
bind:value={deleteModelTag}
placeholder="Select a model"
>
...
...
@@ -401,7 +478,7 @@
</select>
</div>
<button
class="px-3 bg-
red-7
00 hover:bg-
red-800
text-gray-100 rounded transition"
class="px-3 bg-
gray-1
00 hover:bg-
gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:
text-gray-100 rounded transition"
on:click={() => {
deleteModelHandler();
}}
...
...
@@ -422,21 +499,27 @@
</div>
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">Experimental</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showExperimentalOllama = !showExperimentalOllama;
}}>{showExperimentalOllama ? 'Show' : 'Hide'}</button
>
</div>
</div>
{#if showExperimentalOllama}
<form
on:submit|preventDefault={() => {
uploadModelHandler();
}}
>
<div class=" mb-2 flex w-full justify-between">
<div class=" text-sm font-medium">
Upload a GGUF model <a
class=" text-xs font-medium text-gray-500 underline"
href="https://github.com/jmorganca/ollama/blob/main/README.md#import-from-gguf"
target="_blank">(Experimental)</a
>
</div>
<div class=" text-sm font-medium">Upload a GGUF model</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
...
...
@@ -475,7 +558,7 @@
<button
type="button"
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-8
0
0"
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-8
5
0"
on:click={() => {
document.getElementById('model-upload-input').click();
}}
...
...
@@ -490,7 +573,7 @@
{:else}
<div class="flex-1 {modelFileUrl !== '' ? 'mr-2' : ''}">
<input
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-8
0
0 outline-none {modelFileUrl !==
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-8
5
0 outline-none {modelFileUrl !==
''
? 'mr-2'
: ''}"
...
...
@@ -592,5 +675,253 @@
</div>
{/if}
</form>
{/if}
</div>
<hr class=" dark:border-gray-700 my-2" />
{/if}
<div class=" space-y-3">
<div class="mt-2 space-y-3 pr-1.5">
<div>
<div class=" mb-2 text-sm font-medium">Manage LiteLLM Models</div>
<div>
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">Add a model</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showLiteLLMParams = !showLiteLLMParams;
}}>{showLiteLLMParams ? 'Advanced' : 'Default'}</button
>
</div>
</div>
<div class="my-2 space-y-2">
<div class="flex w-full mb-1.5">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM Model (litellm_params.model)"
bind:value={liteLLMModel}
autocomplete="off"
/>
</div>
<button
class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
on:click={() => {
addLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</svg>
</button>
</div>
{#if showLiteLLMParams}
<div>
<div class=" mb-1.5 text-sm font-medium">Model Name</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter Model Name (model_name)"
bind:value={liteLLMModelName}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-1.5 text-sm font-medium">API Base URL</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API Base URL (litellm_params.api_base)"
bind:value={liteLLMAPIBase}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-1.5 text-sm font-medium">API Key</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API Key (litellm_params.api_key)"
bind:value={liteLLMAPIKey}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class="mb-1.5 text-sm font-medium">API RPM</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API RPM (litellm_params.rpm)"
bind:value={liteLLMRPM}
autocomplete="off"
/>
</div>
</div>
</div>
{/if}
</div>
<div class="mb-2 text-xs text-gray-400 dark:text-gray-500">
Not sure what to add?
<a
class=" text-gray-300 font-medium"
href="https://litellm.vercel.app/docs/proxy/configs#quick-start"
target="_blank"
>
Click here for help.
</a>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">Delete a model</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={deleteLiteLLMModelId}
placeholder="Select a model"
>
{#if !deleteLiteLLMModelId}
<option value="" disabled selected>Select a model</option>
{/if}
{#each liteLLMModelInfo as model}
<option value={model.model_info.id} class="bg-gray-100 dark:bg-gray-700"
>{model.model_name}</option
>
{/each}
</select>
</div>
<button
class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
on:click={() => {
deleteLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M5 3.25V4H2.75a.75.75 0 0 0 0 1.5h.3l.815 8.15A1.5 1.5 0 0 0 5.357 15h5.285a1.5 1.5 0 0 0 1.493-1.35l.815-8.15h.3a.75.75 0 0 0 0-1.5H11v-.75A2.25 2.25 0 0 0 8.75 1h-1.5A2.25 2.25 0 0 0 5 3.25Zm2.25-.75a.75.75 0 0 0-.75.75V4h3v-.75a.75.75 0 0 0-.75-.75h-1.5ZM6.05 6a.75.75 0 0 1 .787.713l.275 5.5a.75.75 0 0 1-1.498.075l-.275-5.5A.75.75 0 0 1 6.05 6Zm3.9 0a.75.75 0 0 1 .712.787l-.275 5.5a.75.75 0 0 1-1.498-.075l.275-5.5a.75.75 0 0 1 .786-.711Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
</div>
</div>
</div>
<!-- <div class="mt-2 space-y-3 pr-1.5">
<div>
<div class=" mb-2.5 text-sm font-medium">Add LiteLLM Model</div>
<div class="flex w-full mb-2">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM Model (e.g. ollama/mistral)"
bind:value={liteLLMModel}
autocomplete="off"
/>
</div>
</div>
<div class="flex justify-between items-center text-sm">
<div class=" font-medium">Advanced Model Params</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showLiteLLMParams = !showLiteLLMParams;
}}>{showLiteLLMParams ? 'Hide' : 'Show'}</button
>
</div>
{#if showLiteLLMParams}
<div>
<div class=" mb-2.5 text-sm font-medium">LiteLLM API Key</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM API Key (e.g. os.environ/AZURE_API_KEY_CA)"
bind:value={liteLLMAPIKey}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">LiteLLM API Base URL</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM API Base URL"
bind:value={liteLLMAPIBase}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">LiteLLM API RPM</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM API RPM"
bind:value={liteLLMRPM}
autocomplete="off"
/>
</div>
</div>
</div>
{/if}
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
Not sure what to add?
<a
class=" text-gray-300 font-medium"
href="https://litellm.vercel.app/docs/proxy/configs#quick-start"
target="_blank"
>
Click here for help.
</a>
</div>
</div>
</div> -->
</div>
</div>
</div>
src/lib/components/chat/SettingsModal.svelte
View file @
02fb517b
...
...
@@ -4,6 +4,7 @@
import { getOllamaModels } from '$lib/apis/ollama';
import { getOpenAIModels } from '$lib/apis/openai';
import { getLiteLLMModels } from '$lib/apis/litellm';
import Modal from '../common/Modal.svelte';
import Account from './Settings/Account.svelte';
...
...
@@ -27,23 +28,29 @@
let selectedTab = 'general';
const getModels = async (type = 'all') => {
const models = [];
models.push(
...(await getOllamaModels(localStorage.token).catch((error) => {
toast.error(error);
return [];
}))
);
if (type === 'all') {
const openAIModels = await getOpenAIModels(localStorage.token).catch((error) => {
const getModels = async () => {
let models = await Promise.all([
await getOllamaModels(localStorage.token).catch((error) => {
console.log(error);
return null;
});
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
}
}),
await getOpenAIModels(localStorage.token).catch((error) => {
console.log(error);
return null;
}),
await getLiteLLMModels(localStorage.token).catch((error) => {
console.log(error);
return null;
})
]);
models = models
.filter((models) => models)
.reduce((a, e, i, arr) => a.concat(e, ...(i < arr.length - 1 ? [{ name: 'hr' }] : [])), []);
// models.push(...(ollamaModels ? [{ name: 'hr' }, ...ollamaModels] : []));
// models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
// models.push(...(liteLLMModels ? [{ name: 'hr' }, ...liteLLMModels] : []));
return models;
};
</script>
...
...
src/lib/constants.ts
View file @
02fb517b
...
...
@@ -5,6 +5,8 @@ export const APP_NAME = 'Open WebUI';
export
const
WEBUI_BASE_URL
=
dev
?
`http://
${
location
.
hostname
}
:8080`
:
``
;
export
const
WEBUI_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/api/v1`
;
export
const
LITELLM_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/litellm/api`
;
export
const
OLLAMA_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/ollama/api`
;
export
const
OPENAI_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/openai/api`
;
export
const
AUDIO_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/audio/api/v1`
;
...
...
src/routes/(app)/+layout.svelte
View file @
02fb517b
...
...
@@ -11,6 +11,7 @@
import { getModelfiles } from '$lib/apis/modelfiles';
import { getPrompts } from '$lib/apis/prompts';
import { getOpenAIModels } from '$lib/apis/openai';
import { getLiteLLMModels } from '$lib/apis/litellm';
import { getDocs } from '$lib/apis/documents';
import { getAllChatTags } from '$lib/apis/chats';
...
...
@@ -43,24 +44,28 @@
let showShortcuts = false;
const getModels = async () => {
let models = [];
models.push(
...(await getOllamaModels(localStorage.token).catch((error) => {
toast.error(error);
return [];
}))
);
// $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1',
// $settings.OPENAI_API_KEY
const openAIModels = await getOpenAIModels(localStorage.token).catch((error) => {
let models = await Promise.all([
await getOllamaModels(localStorage.token).catch((error) => {
console.log(error);
return null;
});
}),
await getOpenAIModels(localStorage.token).catch((error) => {
console.log(error);
return null;
}),
await getLiteLLMModels(localStorage.token).catch((error) => {
console.log(error);
return null;
})
]);
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
models = models
.filter((models) => models)
.reduce((a, e, i, arr) => a.concat(e, ...(i < arr.length - 1 ? [{ name: 'hr' }] : [])), []);
// models.push(...(ollamaModels ? [{ name: 'hr' }, ...ollamaModels] : []));
// models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
// models.push(...(liteLLMModels ? [{ name: 'hr' }, ...liteLLMModels] : []));
return models;
};
...
...
@@ -117,8 +122,6 @@
await models.set(await getModels());
});
await setOllamaVersion();
document.addEventListener('keydown', function (event) {
const isCtrlPressed = event.ctrlKey || event.metaKey; // metaKey is for Cmd key on Mac
// Check if the Shift key is pressed
...
...
@@ -250,60 +253,6 @@
</div>
</div>
</div>
{:else if checkVersion(REQUIRED_OLLAMA_VERSION, ollamaVersion ?? '0')}
<div class="fixed w-full h-full flex z-50">
<div
class="absolute w-full h-full backdrop-blur-md bg-white/20 dark:bg-gray-900/50 flex justify-center"
>
<div class="m-auto pb-44 flex flex-col justify-center">
<div class="max-w-md">
<div class="text-center dark:text-white text-2xl font-medium z-50">
Connection Issue or Update Needed
</div>
<div class=" mt-4 text-center text-sm dark:text-gray-200 w-full">
Oops! It seems like your Ollama needs a little attention. <br
class=" hidden sm:flex"
/>We've detected either a connection hiccup or observed that you're using an older
version. Ensure you're on the latest Ollama version
<br class=" hidden sm:flex" />(version
<span class=" dark:text-white font-medium">{REQUIRED_OLLAMA_VERSION} or higher</span
>) or check your connection.
<div class="mt-1 text-sm">
Trouble accessing Ollama?
<a
class=" text-black dark:text-white font-semibold underline"
href="https://github.com/open-webui/open-webui#troubleshooting"
target="_blank"
>
Click here for help.
</a>
</div>
</div>
<div class=" mt-6 mx-auto relative group w-fit">
<button
class="relative z-20 flex px-5 py-2 rounded-full bg-white border border-gray-100 dark:border-none hover:bg-gray-100 transition font-medium text-sm"
on:click={async () => {
location.href = '/';
// await setOllamaVersion();
}}
>
Check Again
</button>
<button
class="text-xs text-center w-full mt-2 text-gray-400 underline"
on:click={async () => {
await setOllamaVersion(REQUIRED_OLLAMA_VERSION);
}}>Close</button
>
</div>
</div>
</div>
</div>
</div>
{:else if localDBChats.length > 0}
<div class="fixed w-full h-full flex z-50">
<div
...
...
src/routes/(app)/+page.svelte
View file @
02fb517b
...
...
@@ -36,6 +36,7 @@
import
ModelSelector
from
'$lib/components/chat/ModelSelector.svelte'
;
import
Navbar
from
'$lib/components/layout/Navbar.svelte'
;
import
{
RAGTemplate
}
from
'$lib/utils/rag'
;
import
{
LITELLM_API_BASE_URL
,
OPENAI_API_BASE_URL
}
from
'$lib/constants'
;
import
{
WEBUI_BASE_URL
}
from
'$lib/constants'
;
let
stopResponseFlag
=
false
;
...
...
@@ -132,6 +133,10 @@
selectedModels
=
[
''
];
}
selectedModels
=
selectedModels
.
map
((
modelId
)
=>
$
models
.
map
((
m
)
=>
m
.
id
).
includes
(
modelId
)
?
modelId
:
''
);
let
_settings
=
JSON
.
parse
(
localStorage
.
getItem
(
'settings'
)
??
'{}'
);
settings
.
set
({
...
_settings
...
...
@@ -150,6 +155,10 @@
const
submitPrompt
=
async
(
userPrompt
,
_user
=
null
)
=>
{
console
.
log
(
'submitPrompt'
,
$
chatId
);
selectedModels
=
selectedModels
.
map
((
modelId
)
=>
$
models
.
map
((
m
)
=>
m
.
id
).
includes
(
modelId
)
?
modelId
:
''
);
if
(
selectedModels
.
includes
(
''
))
{
toast
.
error
(
'Model not selected'
);
}
else
if
(
messages
.
length
!= 0 && messages.at(-1).done != true) {
...
...
@@ -278,10 +287,10 @@
}
await
Promise
.
all
(
selectedModels.map(async (model) => {
console.log(model);
const modelTag = $models.filter((m) => m.name === model).at(0);
selectedModels
.
map
(
async
(
modelId
)
=>
{
const
model
=
$
models
.
filter
((
m
)
=>
m
.
id
===
modelId
).
at
(
0
);
if
(
model
)
{
//
Create
response
message
let
responseMessageId
=
uuidv4
();
let
responseMessage
=
{
...
...
@@ -290,7 +299,7 @@
childrenIds
:
[],
role
:
'assistant'
,
content
:
''
,
model: model,
model
:
model
.
id
,
timestamp
:
Math
.
floor
(
Date
.
now
()
/
1000
)
//
Unix
epoch
};
...
...
@@ -306,12 +315,13 @@
];
}
if (model
Tag
?.external) {
if
(
model
?.
external
)
{
await
sendPromptOpenAI
(
model
,
prompt
,
responseMessageId
,
_chatId
);
} else if (model
Tag
) {
}
else
if
(
model
)
{
await
sendPromptOllama
(
model
,
prompt
,
responseMessageId
,
_chatId
);
}
}
else
{
toast.error(`Model ${model} not found`);
toast
.
error
(`
Model
${
model
Id
}
not
found
`);
}
})
);
...
...
@@ -320,6 +330,7 @@
};
const
sendPromptOllama
=
async
(
model
,
userPrompt
,
responseMessageId
,
_chatId
)
=>
{
model
=
model
.
id
;
const
responseMessage
=
history
.
messages
[
responseMessageId
];
//
Wait
until
history
/
message
have
been
updated
...
...
@@ -531,8 +542,10 @@
const
responseMessage
=
history
.
messages
[
responseMessageId
];
scrollToBottom
();
const res = await generateOpenAIChatCompletion(localStorage.token, {
model: model,
const
res
=
await
generateOpenAIChatCompletion
(
localStorage
.
token
,
{
model
:
model
.
id
,
stream
:
true
,
messages
:
[
$
settings
.
system
...
...
@@ -578,7 +591,9 @@
num_ctx
:
$
settings
?.
options
?.
num_ctx
??
undefined
,
frequency_penalty
:
$
settings
?.
options
?.
repeat_penalty
??
undefined
,
max_tokens
:
$
settings
?.
options
?.
num_predict
??
undefined
});
},
model
.
source
===
'litellm'
?
`${
LITELLM_API_BASE_URL
}/
v1
`
:
`${
OPENAI_API_BASE_URL
}`
);
if
(
res
&&
res
.
ok
)
{
const
reader
=
res
.
body
...
...
test.json
0 → 100644
View file @
02fb517b
{
"model_name"
:
"string"
,
"litellm_params"
:
{
"model"
:
"ollama/mistral"
}
}
\ No newline at end of file
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment