Commit 468c6398 authored by Timothy J. Baek's avatar Timothy J. Baek
Browse files

feat: unified models integration

parent e80e4c30
...@@ -207,7 +207,7 @@ def merge_models_lists(model_lists): ...@@ -207,7 +207,7 @@ def merge_models_lists(model_lists):
[ [
{ {
**model, **model,
"name": model["id"], "name": model.get("name", model["id"]),
"owned_by": "openai", "owned_by": "openai",
"openai": model, "openai": model,
"urlIdx": idx, "urlIdx": idx,
...@@ -319,6 +319,8 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)): ...@@ -319,6 +319,8 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
body = body.decode("utf-8") body = body.decode("utf-8")
body = json.loads(body) body = json.loads(body)
print(app.state.MODELS)
model = app.state.MODELS[body.get("model")] model = app.state.MODELS[body.get("model")]
idx = model["urlIdx"] idx = model["urlIdx"]
......
...@@ -276,13 +276,11 @@ async def get_models(user=Depends(get_verified_user)): ...@@ -276,13 +276,11 @@ async def get_models(user=Depends(get_verified_user)):
if app.state.config.ENABLE_OPENAI_API: if app.state.config.ENABLE_OPENAI_API:
openai_models = await get_openai_models() openai_models = await get_openai_models()
openai_app.state.MODELS = openai_models
openai_models = openai_models["data"] openai_models = openai_models["data"]
if app.state.config.ENABLE_OLLAMA_API: if app.state.config.ENABLE_OLLAMA_API:
ollama_models = await get_ollama_models() ollama_models = await get_ollama_models()
ollama_app.state.MODELS = ollama_models
print(ollama_models) print(ollama_models)
......
...@@ -27,7 +27,7 @@ export const getModels = async (token: string = '') => { ...@@ -27,7 +27,7 @@ export const getModels = async (token: string = '') => {
let models = res?.data ?? []; let models = res?.data ?? [];
models = models.filter((models) => models).reduce((a, e, i, arr) => a.concat(e), []); models = models.filter((models) => models).sort((a, b) => (a.name > b.name ? 1 : -1));
console.log(models); console.log(models);
return models; return models;
......
...@@ -11,7 +11,6 @@ ...@@ -11,7 +11,6 @@
chats, chats,
config, config,
type Model, type Model,
modelfiles,
models, models,
settings, settings,
showSidebar, showSidebar,
...@@ -63,24 +62,6 @@ ...@@ -63,24 +62,6 @@
let selectedModels = ['']; let selectedModels = [''];
let atSelectedModel: Model | undefined; let atSelectedModel: Model | undefined;
let selectedModelfile = null;
$: selectedModelfile =
selectedModels.length === 1 &&
$modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
? $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0])[0]
: null;
let selectedModelfiles = {};
$: selectedModelfiles = selectedModels.reduce((a, tagName, i, arr) => {
const modelfile =
$modelfiles.filter((modelfile) => modelfile.tagName === tagName)?.at(0) ?? undefined;
return {
...a,
...(modelfile && { [tagName]: modelfile })
};
}, {});
let chat = null; let chat = null;
let tags = []; let tags = [];
...@@ -345,6 +326,7 @@ ...@@ -345,6 +326,7 @@
const hasImages = messages.some((message) => const hasImages = messages.some((message) =>
message.files?.some((file) => file.type === 'image') message.files?.some((file) => file.type === 'image')
); );
if (hasImages && !(model.custom_info?.meta.vision_capable ?? true)) { if (hasImages && !(model.custom_info?.meta.vision_capable ?? true)) {
toast.error( toast.error(
$i18n.t('Model {{modelName}} is not vision capable', { $i18n.t('Model {{modelName}} is not vision capable', {
...@@ -362,7 +344,7 @@ ...@@ -362,7 +344,7 @@
role: 'assistant', role: 'assistant',
content: '', content: '',
model: model.id, model: model.id,
modelName: model.custom_info?.name ?? model.name ?? model.id, modelName: model.name ?? model.id,
userContext: null, userContext: null,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch timestamp: Math.floor(Date.now() / 1000) // Unix epoch
}; };
...@@ -407,7 +389,7 @@ ...@@ -407,7 +389,7 @@
} }
responseMessage.userContext = userContext; responseMessage.userContext = userContext;
if (model?.external) { if (model?.owned_by === 'openai') {
await sendPromptOpenAI(model, prompt, responseMessageId, _chatId); await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
} else if (model) { } else if (model) {
await sendPromptOllama(model, prompt, responseMessageId, _chatId); await sendPromptOllama(model, prompt, responseMessageId, _chatId);
...@@ -956,10 +938,8 @@ ...@@ -956,10 +938,8 @@
) + ' {{prompt}}', ) + ' {{prompt}}',
titleModelId, titleModelId,
userPrompt, userPrompt,
titleModel?.external ?? false titleModel?.owned_by === 'openai' ?? false
? titleModel?.source?.toLowerCase() === 'litellm' ? `${OPENAI_API_BASE_URL}`
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1` : `${OLLAMA_API_BASE_URL}/v1`
); );
...@@ -1046,16 +1026,12 @@ ...@@ -1046,16 +1026,12 @@
<Messages <Messages
chatId={$chatId} chatId={$chatId}
{selectedModels} {selectedModels}
{selectedModelfiles}
{processing} {processing}
bind:history bind:history
bind:messages bind:messages
bind:autoScroll bind:autoScroll
bind:prompt bind:prompt
bottomPadding={files.length > 0} bottomPadding={files.length > 0}
suggestionPrompts={chatIdProp
? []
: selectedModelfile?.suggestionPrompts ?? $config.default_prompt_suggestions}
{sendPrompt} {sendPrompt}
{continueGeneration} {continueGeneration}
{regenerateResponse} {regenerateResponse}
......
<script lang="ts"> <script lang="ts">
import { v4 as uuidv4 } from 'uuid'; import { v4 as uuidv4 } from 'uuid';
import { chats, config, modelfiles, settings, user as _user, mobile } from '$lib/stores'; import { chats, config, settings, user as _user, mobile } from '$lib/stores';
import { tick, getContext } from 'svelte'; import { tick, getContext } from 'svelte';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
...@@ -26,7 +26,6 @@ ...@@ -26,7 +26,6 @@
export let user = $_user; export let user = $_user;
export let prompt; export let prompt;
export let suggestionPrompts = [];
export let processing = ''; export let processing = '';
export let bottomPadding = false; export let bottomPadding = false;
export let autoScroll; export let autoScroll;
...@@ -34,7 +33,6 @@ ...@@ -34,7 +33,6 @@
export let messages = []; export let messages = [];
export let selectedModels; export let selectedModels;
export let selectedModelfiles = [];
$: if (autoScroll && bottomPadding) { $: if (autoScroll && bottomPadding) {
(async () => { (async () => {
...@@ -247,9 +245,7 @@ ...@@ -247,9 +245,7 @@
<div class="h-full flex mb-16"> <div class="h-full flex mb-16">
{#if messages.length == 0} {#if messages.length == 0}
<Placeholder <Placeholder
models={selectedModels} modelIds={selectedModels}
modelfiles={selectedModelfiles}
{suggestionPrompts}
submitPrompt={async (p) => { submitPrompt={async (p) => {
let text = p; let text = p;
...@@ -316,7 +312,6 @@ ...@@ -316,7 +312,6 @@
{#key message.id} {#key message.id}
<ResponseMessage <ResponseMessage
{message} {message}
modelfiles={selectedModelfiles}
siblings={history.messages[message.parentId]?.childrenIds ?? []} siblings={history.messages[message.parentId]?.childrenIds ?? []}
isLastMessage={messageIdx + 1 === messages.length} isLastMessage={messageIdx + 1 === messages.length}
{readOnly} {readOnly}
...@@ -348,7 +343,6 @@ ...@@ -348,7 +343,6 @@
{chatId} {chatId}
parentMessage={history.messages[message.parentId]} parentMessage={history.messages[message.parentId]}
{messageIdx} {messageIdx}
{selectedModelfiles}
{updateChatMessages} {updateChatMessages}
{confirmEditResponseMessage} {confirmEditResponseMessage}
{rateMessage} {rateMessage}
......
<script lang="ts"> <script lang="ts">
import { WEBUI_BASE_URL } from '$lib/constants'; import { WEBUI_BASE_URL } from '$lib/constants';
import { user } from '$lib/stores'; import { config, user, models as _models } from '$lib/stores';
import { onMount, getContext } from 'svelte'; import { onMount, getContext } from 'svelte';
import { blur, fade } from 'svelte/transition'; import { blur, fade } from 'svelte/transition';
...@@ -9,23 +9,21 @@ ...@@ -9,23 +9,21 @@
const i18n = getContext('i18n'); const i18n = getContext('i18n');
export let modelIds = [];
export let models = []; export let models = [];
export let modelfiles = [];
export let submitPrompt; export let submitPrompt;
export let suggestionPrompts; export let suggestionPrompts;
let mounted = false; let mounted = false;
let modelfile = null;
let selectedModelIdx = 0; let selectedModelIdx = 0;
$: modelfile = $: if (modelIds.length > 0) {
models[selectedModelIdx] in modelfiles ? modelfiles[models[selectedModelIdx]] : null;
$: if (models.length > 0) {
selectedModelIdx = models.length - 1; selectedModelIdx = models.length - 1;
} }
$: models = modelIds.map((id) => $_models.find((m) => m.id === id));
onMount(() => { onMount(() => {
mounted = true; mounted = true;
}); });
...@@ -41,25 +39,14 @@ ...@@ -41,25 +39,14 @@
selectedModelIdx = modelIdx; selectedModelIdx = modelIdx;
}} }}
> >
{#if model in modelfiles}
<img
crossorigin="anonymous"
src={modelfiles[model]?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`}
alt="modelfile"
class=" size-[2.7rem] rounded-full border-[1px] border-gray-200 dark:border-none"
draggable="false"
/>
{:else}
<img <img
crossorigin="anonymous" crossorigin="anonymous"
src={$i18n.language === 'dg-DG' src={model?.info?.meta?.profile_image_url ??
? `/doge.png` ($i18n.language === 'dg-DG' ? `/doge.png` : `${WEBUI_BASE_URL}/static/favicon.png`)}
: `${WEBUI_BASE_URL}/static/favicon.png`}
class=" size-[2.7rem] rounded-full border-[1px] border-gray-200 dark:border-none" class=" size-[2.7rem] rounded-full border-[1px] border-gray-200 dark:border-none"
alt="logo" alt="logo"
draggable="false" draggable="false"
/> />
{/if}
</button> </button>
{/each} {/each}
</div> </div>
...@@ -70,23 +57,32 @@ ...@@ -70,23 +57,32 @@
> >
<div> <div>
<div class=" capitalize line-clamp-1" in:fade={{ duration: 200 }}> <div class=" capitalize line-clamp-1" in:fade={{ duration: 200 }}>
{#if modelfile} {#if models[selectedModelIdx]?.info}
{modelfile.title} {models[selectedModelIdx]?.info?.name}
{:else} {:else}
{$i18n.t('Hello, {{name}}', { name: $user.name })} {$i18n.t('Hello, {{name}}', { name: $user.name })}
{/if} {/if}
</div> </div>
<div in:fade={{ duration: 200, delay: 200 }}> <div in:fade={{ duration: 200, delay: 200 }}>
{#if modelfile} {#if models[selectedModelIdx]?.info}
<div class="mt-0.5 text-base font-normal text-gray-500 dark:text-gray-400"> <div class="mt-0.5 text-base font-normal text-gray-500 dark:text-gray-400">
{modelfile.desc} {models[selectedModelIdx]?.info?.meta?.description}
</div> </div>
{#if modelfile.user} {#if models[selectedModelIdx]?.info?.meta?.user}
<div class="mt-0.5 text-sm font-normal text-gray-400 dark:text-gray-500"> <div class="mt-0.5 text-sm font-normal text-gray-400 dark:text-gray-500">
By <a href="https://openwebui.com/m/{modelfile.user.username}" By
>{modelfile.user.name ? modelfile.user.name : `@${modelfile.user.username}`}</a {#if models[selectedModelIdx]?.info?.meta?.user.community}
<a
href="https://openwebui.com/m/{models[selectedModelIdx]?.info?.meta?.user
.username}"
>{models[selectedModelIdx]?.info?.meta?.user.name
? models[selectedModelIdx]?.info?.meta?.user.name
: `@${models[selectedModelIdx]?.info?.meta?.user.username}`}</a
> >
{:else}
{models[selectedModelIdx]?.info?.meta?.user.name}
{/if}
</div> </div>
{/if} {/if}
{:else} {:else}
...@@ -99,7 +95,11 @@ ...@@ -99,7 +95,11 @@
</div> </div>
<div class=" w-full" in:fade={{ duration: 200, delay: 300 }}> <div class=" w-full" in:fade={{ duration: 200, delay: 300 }}>
<Suggestions {suggestionPrompts} {submitPrompt} /> <Suggestions
suggestionPrompts={models[selectedModelIdx]?.info?.meta?.suggestion_prompts ??
$config.default_prompt_suggestions}
{submitPrompt}
/>
</div> </div>
</div> </div>
{/key} {/key}
{
"description": "Developer lead assistant with no code explanation",
"profile_image_url": "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/2wBDAQMDAwQDBAgEBAgQCwkLEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBD/wAARCABkAGQDASIAAhEBAxEB/8QAHQAAAQQDAQEAAAAAAAAAAAAAAAUGBwgCBAkBA//EADkQAAEDAwMCBQEGBAUFAAAAAAECAwQFBhEAEiEHMQgTIkFRYRQjMnGBkRUzQlIXJHKh0RaCkrHB/8QAGwEAAQUBAQAAAAAAAAAAAAAABgABAwQFAgf/xAA0EQABAgUCBAQEBAcAAAAAAAABAhEAAwQFIRIxQVFxgQYTImGRsfDxMkKhwQcUFSRSYtH/2gAMAwEAAhEDEQA/AOcQA9zrIJyeDrHHGdbENnzl7QMn2HydbccEtmFCn0tchtLinmGvMVsb8xe3er4+g7DccJ+vfSvCo77bi25TSmVtEpWlxJSUqHsQeQfodOG3bZqLsih0yW81AclLSuKuW0kt+U6cFe1Q5OQcDueNvJGn7bVuUiNcM1lVbXVGfMC26lFSpPnHOc+o7hnOc5ByMHToUkr0Hdn4/PbtGJX1aZUrzSfTt9cYZFHt9+a4lmOyST8+2nnD6S3JMfRFYpshxxz8KG2FqKvy451eTw2eDGnVqInqB1JRIiQHSJMSC4va4tkDKVuqP4Qe+Pg6tWzaApVrGBY1EplMKSoluMUJ80J7J3gElXyT++kqtkS1CW+XZ3YCBoS7hVjz5YZDEgM6ldB+5+EcfKt0KvCmRzJnUSpREdtz8Nbaf3I7ajyv2vIpii26EnvlSRxrtNNpt5UaiyKxU5FOVBjJUouhw7lp2nB38cFWAM98/vFN9dBenF9uqkVCkUiJUlJw1LjFpIW+RnYtKSEqyM+rHcjnJGbSDLmg6SDFNNVXUagqckkbswB+Z5YjkLNgqb5CTj2OMZ0mVKmyYLnlyWS2vaFhOedpGR/tq8vU3pbbvS+JVoV3WymQphRkQE7TtUtQKQQocgEhI+MgDvjVY6/07rcyS/VKhCYgtPBCmQ2coKVJyNuCckDv3OfbUM2WqWpjBRaLmK9BIBDcf2iOptHYaYklhb/mw2m33vMSAlSFKQkFOD/c4n9NI4GdONyAXG3qbT3VSJCf5risgqbTztQD/TkZP+lPAwdN5xCmXFNrGCDqKCNJcRho0aNKHjJOCDz7cDUo9FLZrdTrqpVEpqJzsJhUl5h5KSy4yAVEOJc9KhgE8/GRzqO6YYwQ4l1kvLcSUpSBgp4Pqz9Dz29u+rjeFjw31G5LLk9S3qk4mG04YyEMqwHCMBQUDwUjcARj99D/AImvCbFbl1jsRtgn9B7fDeNeyWuVd6xNLUfgO+W3x84QlWtP6j30xK/hbjCqktLLBQdiWlpQAAOwSjCTntgasf0L6JUCtX+uoV12RPjW1/mZrklwrEh/OEIJUTlO4Ek9iEnPfTOvV6g0KS1bdLq7LNYiNqkIbaUUEZHG44x9SBzjPtqc/DDWmKP0qui6lPQ33JD/AKVKUMY8kkJyfqc//dWvDVzm3mgTVrlspTM4Z0k4PQtHm/jqxos13FHKmvIRu2cpGQejjEWrplEduijQZlZVIitrQV/Y2XChCknsV45OR7fXTauZHTXpKpm47jveTSGUPF1lh+WpSXAkEqSGwCpSQkq47dtJsfxBwYEOnRahbkl+RIp5luOQnm1MpbSg5UCSMYKQCCeM++Ma5eeIrxEV6/LvlVhxEp4uuSG48QhTyUtJWoBCMcbex5JBP0I1VrF1tGsy1+kKfHBn+t8wU2W30F2HmpZRQzlsu31tiOltu+L7w1XDcTFkUy5EpdqEpuKyXac43GekLUNidxTgEkjlQGpsi0Gjwpbk2JTYzLruNy0NJSSeecgd+e+uElsdJ/EjdtErN3ROn77dLpiBIccSpCFbQ4hX3baDlSglC/btz7Z12G8JnUif1O6CW7dVSkLkyg25GceUoqW8WiUhRzySQNZyZoyELfmxjYraBEgJmaWbmMwzvF10ohXJ03rRpsNs1GNEeqELekrCy2NzrXzhSRkDP4kp1yNotHuaqThDp6JbEZ9fBAOwEJOSn9M9tdtbsmiXVVNKdfeaMYlQWMpQHAfQBjjgjIPPtrlxWLc6fUq37ouaBeNShXfTKo5Fp9BQz9y5FCglR3gcZBcUDkJBQAeVDJ1bkhcuUmpyMP0jzhFVKpaqeiSNOS3Xb66RDlZtagUSrtrbnyP4hFUlzzXAkp3DHO33Go9vB+LImyHH0BuoeafMQ2kBAOeSPgH204a9MfVLVKktOLyrncgg/ue2mbXl/aJiprqlb5Hrxjt+vx/xq1cpUlCgZQA6coKKATglpp3HGEzRo0azIvwo0iFMnlbcFlTi2x5hwCeB9ANXD8PfiQqHT6hvdPY9KcqNHkPeamMhX3zDpQN2Rjtxn8xqp1k1mqUSoKdpjnlqfSWFHcPwkcjHvxqx9PsqHavTo3dKUqVJjoRLfW02ltKWk7XHkn33lreE59wOw1UuVno7zRqpq1GpG5HHHI4LwkXKfa5yZ1ORqdg+R3hW6l31Kk3G/dESlNwU1SKmHIQ+nKwoA7HM+xwRhQ+n5iefCLf0aq2lcdoVNtiVFp7jNXQwpoLEhls4cST7gENgkn8KvjUWUO0rav20qqic5Lrk9qA69AVHc9SVMY3DgY8vy1NOAr4wrjSt0RoMDp46q5FVZyl1+DUZUV1ie+y3AnQG1FtzygpQW4FoKk5BzuHAJB1H4dqrZNQaSgQUkADO7J9Iy5JZmgc8Sy7lUJFVXkEpLsNgV+rYAYJx2bhEzdRaI3QJEdulNNi36l5lQpUxj0ulhw48pageNispKTz3+dV3qvT65qJfkS7LD6ZU64JDrzsmUZtWSHHN52koQVYbOC5kkkEjkDIBs3GoNQuRul1W1Cudbcqol1K3gsR2N5QFNuHGQMp7AZIIIByNQd0369IrPWK++nlctSiuMwnnkUdctoNtQkIkJjuNvPHKlbvu1j2UoYOMjC8YVEpFJLkvqWMluI2z77Rf/h1S1Jq50/TplkAZwX/1HEb54YESDJ8SvV0R4dn9MvDpEptEjMtJqzlaqjZcPmp3FTaWVblIKQSlZB3gpIAyNTr4OZQ6eeGWZKepqmmaK/UZyo4WCoDcp3ywc44BCc8DjVdLv6pu2fYteuO7WrUuCqRIDsSm/YWigpU02fKaWkoAW2jJO48ZxtznX28Ht9Vy4ujkrpq9VnG3avMdY2vqShc5v7OjzEtPr4UQUuZ9ikKGcjBDrPKRPmGW2l+OWxz37Yg88Ry5lLR+ekamIJHEu4wMdSHdtotdNvplVhS76q0hhQbivSnW9oQlB2DY2k+/Khj3yrVC7bpVp1e+KS9crcppUqqMInPSWQ2wmIcFxSlk8EZ+exzpx9W/EhelEkf4QQLAnUSU+r7O3/GYzrS9vBD/AJZTgp2jeCQpJwkg++o46q9VZRgU3pLYS5M+rONIgPupkArc5SFMb92BnykhROMJQrn7xWS2nrRQyxTqLgAkqJGBwzj7Z4GPMbj4ZNbUCtSdIJACW34qfMW88VXTzofP6FVKu0GgW4ZbTbblNm09hkFxe8J2hafx5Tu9z8+2uUt6KrlRVEpri5U2HQ4YYjpQjKY7ZUVEZA7ZPc6sj1BHUe0en9JoNWr0wuyEqQ+l8q8tACQfLZ/pUnKuVA8beeeNQMbqrlIYqtHgRm3xW2G2pG5lO9KUqyAkqHHIH6ampqrzJIMs6gcjLj4wWSaQJYzT6Rx+8RkQQSCCCO4OjX0klYfWFoAVuOR9dGrgdsxFG7QyszEtskh08tEDOFf84z+uNXEsu4XplKRCmYUxLgIjSI76DtSvZsGQASAPxfkCPcaqTaMeQ3UGapHfhJMZalbZK8JVtTnB/PsNSlbXV+sSruauebTZU1C1Bx1LbatpAONvGRwAeeM6h8yf5umWzNvxfpxjuZIkTad1uVPtzHXnE29JbL64Wk9Wadbdhu1SmPNhFMfFUhtpcABbxtdfQvaWlkjcB6mm8jA19ha9+xuq9BpvValOMU6DHNUqsVE9hzzkNpAbTllxQAW55aFdjsK8YJzpO6pTZKrybj0ev1WHRqnFadp70WS422lCgHUghKuSU4BOe24540jW5X5lChVm9qRVqk1MqJaprK5MkydvkukOoSXd3Csg/BwdC9Db6mnrEzULSNTHAIPqA98e45xt3SdLVIUZ6HOkd86gQeh368otM/1oRVLer0OHJShyMw7Gjw0uFOxCGwpK0IwNuCeCMcpz9TTvqBdVIg3nDr0Btxut1hmQmoodSoMSmMt7dytpBO5KvlQ2jv7uun9UL0SpTgrmFOApWpMVgFQIwQfRzwSP11sm4pdVWHpwiPuJAAWuCxkAHIx6OOdb86xmdLUAoOebwNUfiKXQT0zCgkDgGhlUut23eNzRrPXTDBpU6PMNQdRJVveDUZ14Np83BCXFtJTnv6u27BEw9H58KkrqFajx0xIzKEMx20pCQlA3cgADk8/ofqdJFOdeckoltiI283yl37I0FJ9iQdvwSNLaZciOyoCYooPJQltsAn57aalsK6eXoKgT3hXTxZKr5oWhCgANnEPO97u/xasiqUWDTWnbppDD7lr1B47nmJaEBXlA8ZS4UhI3HalSkL9lAwH0O6CeJK25zl9sdMpsoS4RaantXBTEO7FkFZBVJylSilPqIztChj1HT6oNzmj1WpTHGQ6ptkMRGQnG5agn0pwAO53KJPpSD7kai2NIuCRVLkji+q9QoLTqks4qDq47SyEkjalQG0E4SEkZJHfWFeJq7WkzUsySHwTttgHh0MbNopRfAKcfmBI4Y3I9nhz+I+T1OqjlPi1hxqAxFKj9gkTGJD4KgPUox9yP6f7s/Q99V0vC170okGLctUp62IU8FEZ9LiD5m3vwklQxj+oDU1TqHV6VWKXS25KbiaeRh59TTiFrcUDkklak4T3xj2+un74iOiVnWv4e6Lf6r+W7W0tNsqiJWlxhxThyWkgDIIAA79kEkZ11ZL8LkEB3d8tvjHTvG3d/D8qz0oDEKB24e/P9Giju4k5zydGg9zjRosgSjYp7AlTY8VbvlJedQhTg7oBIGf076vH4ZL8tayukdSbnU9ksR1vq2vgLUtRVtwrtnIGce2qRQaXImJDqcpQVbQe+T8asp0W6YVG4oX8BuK9IlMhywl9tnJdfeSe5SrIQCP7T6vbA76yL35Uum11EzQk47sflBBYFDzCnytZBBOAcAgkZ4H23iR7OofTC5en9Fr9zTquXac1uaS1KG8OsuLLZTtThCQSQd2RjgnSJIhx5nR6uSbbkF+l0VyKt1by0p2qcfSlAQnaVE4c7qUODnvxqykZ7o94c+nbNs9PqHMu+sVaMWJrbzaVJllaSFBRwQkZUTwM5HCe4FTuoTVlUuzpc2w+odLZZqKmV1S3iuUtZc88DZGcS35CkIWpJ8tZSpCU8KWcZilr82YmbIONQz7PnpA/KlT5fnIr5qlgvpBDaf8U9uO5hpxJwSAErPbn89O+zv4fU7kpVKqlUECFNnR40iYpvcI7S3EpU6U5GQkEqxkdvbUWxKoxxmQgf9w0twqxEBSFSmhjn+YBopSoEbwNTqdXLMWO6wUyxrTuSFB6eXEmsU5UJtTrwRt++BIOQSTkgAn2z24xpss1za2UOsIWT2I1GcSvQxyZjA5zw4Mf+9LEa4acB97PZAxn+YnUydIDPGcqnmPkZ6Q+4NsVNih1S+luOM01x9uC8422hQaJLe0LKgopClKTgpTgkYJyUpKWOmVOv+5pMB6qqH2iKKnuiJCvOU2tpK0+tSEg5UjOTjgYHvpY6XRXOotRes+pXCyu23FokOU9M9TBWpO0l0lA5AIQACR6u2cEibHrZ8PViPCmW1GuhutMpLiJqFsvbUlOxSUh13btPHHf0j40I3O31NSFhaCsKJbTuE9Txg1td9orcqUlK9KkpD6tnbOOUJthdLLQgWLXanWai1TI9IcShbtQfaSre9wnG1SgMkYGCSTqmvW+lVRFUmmLOZVDjKWpSA+FAKxjeUA+kkE4J576tXcUfp5cVmVuyq/HrK1VB5E9morQ0XUyWcloqShzG0ZIwO3++qS3za67ckzanIq7Mwy38rQvcHFEZI3pIAI5PYn8hnQ/YfDFVaKlc4lSUqOArOPY9IMrj4tk3um/lnC2Y49LYAx7OHiLjgEjPbjRrJxZccUsAJBJICRgD6AfGjR1AazRsQ6hJhg+U6QPgak/pr1BqseZHptRc+0Q8pQhxWSWxn3Sn1HGe6efodRNrbp9SfgOhTfKcglO7H7H2OqtfRy6+SZM0OIu0FZMoZwmyy0XpoPVagQIsyK9b8qtU5/PlyJLCo62CPTvS4tad6ce/BHfB5GmRdnhqo101pLtkR36eiQjz3XX8MxcYzuRkE5xz6AofUd9QXVurVSlwIcBFRLwSwho7g4G2U8A7iSpSiByQPT8acnTvxC3bbduS7aerjkWK+2ChJRu49wgkFSM8/h5PtoQ/o1ba5YVQl/bj9doLZN0ttxm/36W9xgd9/mI1bh6KSLLqq4kx8yUpOAWVBLavy3AnWIslFIDU2o0f+JU93nLeIrwB/tUpKkr/APHPzjTjtW8qHc8iXOqt6tUtceO6827IYU84++kehtPqyncT+I5GB254njw4zeklxvy5N3uw5stsAtGc5lLh/qGcjkccY+f0JqWqrhITLny0Jzv+b/g+BjAu67X6zShSm5DHbiYrbPm9OkM+Xa3Tp5BQoNuza9N8wIcI9mmQkEDvkk/6de23SLGrK3kXzVZ0RkHDSafT0fZ1A4yBsyeD/dgY+upt68w+nzFekvWmqjNwJP8AMZUk7EOpIOG3U5KQPfIV2+uq+V6bbMRIapbP+ZUCRKjvqSsnOVAFJwpPB5W3u79tSqKqlJSSR7j7MfhGXTaKZSZwQDjZQJ+OXHYxMVFpFoW+hc+074tyqsNM+UmBVYSktlGQSCU4IPHdIzyfnSo11u6QyWG/+peky6M/CUcVSgz1OMq+QY76kkpOO3mZxnGDqsa7zqMF8sy0olhslG15oBxGMdzj6/7aRavX5dTXhb61IByMnj9B2GuU0JIJmrUo8/wkd0s/eJZ9TKmKBlSUoHFiog9lEt2iwnVTrN04XTY83p+mS5NlJUHGy48luOQSAobznJHO3kD+49hXitV2q1t9L9TnrlLCeCr2B5xpOKlK7nXmrcmV5CNDk9STFZagouAB0g0aNGpo4g9gdGjRpjtDwayyQeFEYweONGjShHePNyknKSQe3GtqnVip0xS0wJjjIWkhQSe4PB0aNIwhkiM5lZqlQaSxLmuuNo5CN3pB/L9v21poUps5QtSSTtJBwcEHRo0oQMeKUpR3LUVE9yTk680aNPDQaNGjShQaNGjTiFH/2Q==",
"ollama": {
"modelfile": "FROM llama3\nPARAMETER temperature 1\nSYSTEM \"\"\"\nI want you to act as a senior full-stack tech leader and top-tier brilliant software developer, you embody technical excellence and a deep understanding of a wide range of technologies. Your expertise covers not just coding, but also algorithm design, system architecture, and technology strategy. for every question there is no need to explain, only give the solution.\n\nCoding Mastery: Possess exceptional skills in programming languages including Python, JavaScript, SQL, NoSQL, mySQL, C++, C, Rust, Groovy, Go, and Java. Your proficiency goes beyond mere syntax; you explore and master the nuances and complexities of each language, crafting code that is both highly efficient and robust. Your capability to optimize performance and manage complex codebases sets the benchmark in software development.\n\nPython | JavaScript | C++ | C | RUST | Groovy | Go | Java | SQL | MySQL | NoSQL\nEfficient, Optimal, Good Performance, Excellent Complexity, Robust Code\n\nCutting-Edge Technologies: Adept at leveraging the latest technologies, frameworks, and tools to drive innovation and efficiency. Experienced with Docker, Kubernetes, React, Angular, AWS, Supabase, Firebase, Azure, and Google Cloud. Your understanding of these platforms enables you to architect and deploy scalable, resilient applications that meet modern business demands.\n\nDocker | Kubernetes | React | Angular | AWS | Supabase | Firebase | Azure | Google Cloud\nSeamlessly Integrating Modern Tech Stacks\n\nComplex Algorithms & Data Structures\nOptimized Solutions for Enhanced Performance & Scalability\n\nSolution Architect: Your comprehensive grasp of the software development lifecycle empowers you to design solutions that are not only technically sound but also align perfectly with business goals. From concept to deployment, you ensure adherence to industry best practices and agile methodologies, making the development process both agile and effective.\n\nInteractive Solutions: When crafting user-facing features, employ modern ES6 JavaScript, TypeScript, and native browser APIs to manage interactivity seamlessly, enabling a dynamic and engaging user experience. Your focus lies in delivering functional, ready-to-deploy code, ensuring that explanations are succinct and directly aligned with the required solutions.\n\nnever explain the code just write code \n\"\"\""
},
"suggestion_prompts": [
{
"content": "Create a pac-man game in C"
},
{
"content": "Create react page example"
},
{
"content": "write character collisions in godot engine"
}
],
"categories": [
"assistant",
"programming",
"data analysis"
],
"user": {
"username": "vianch",
"name": "",
"community": true
}
}
\ No newline at end of file
...@@ -45,12 +45,10 @@ ...@@ -45,12 +45,10 @@
<div class="mr-1 max-w-full"> <div class="mr-1 max-w-full">
<Selector <Selector
placeholder={$i18n.t('Select a model')} placeholder={$i18n.t('Select a model')}
items={$models items={$models.map((model) => ({
.filter((model) => model.name !== 'hr')
.map((model) => ({
value: model.id, value: model.id,
label: model.custom_info?.name ?? model.name, label: model.name,
info: model model: model
}))} }))}
bind:value={selectedModel} bind:value={selectedModel}
/> />
......
...@@ -249,15 +249,17 @@ ...@@ -249,15 +249,17 @@
<div class="line-clamp-1"> <div class="line-clamp-1">
{item.label} {item.label}
{#if item.model.owned_by === 'ollama'}
<span class=" text-xs font-medium text-gray-600 dark:text-gray-400" <span class=" text-xs font-medium text-gray-600 dark:text-gray-400"
>{item.info?.details?.parameter_size ?? ''}</span >{item.model.ollama?.details?.parameter_size ?? ''}</span
> >
{/if}
</div> </div>
<!-- {JSON.stringify(item.info)} --> <!-- {JSON.stringify(item.info)} -->
{#if item.info.external} {#if item.model.owned_by === 'openai'}
<Tooltip content={`${item.info?.source ?? 'External'}`}> <Tooltip content={`${'External'}`}>
<div class=""> <div class="">
<svg <svg
xmlns="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg"
...@@ -278,13 +280,17 @@ ...@@ -278,13 +280,17 @@
</svg> </svg>
</div> </div>
</Tooltip> </Tooltip>
{:else} {:else if item.model.owned_by === 'ollama'}
<Tooltip <Tooltip
content={`${ content={`${
item.info?.details?.quantization_level item.model.ollama?.details?.quantization_level
? item.info?.details?.quantization_level + ' ' ? item.model.ollama?.details?.quantization_level + ' '
: ''
}${
item.model.ollama?.size
? `(${(item.model.ollama?.size / 1024 ** 3).toFixed(1)}GB)`
: '' : ''
}${item.info.size ? `(${(item.info.size / 1024 ** 3).toFixed(1)}GB)` : ''}`} }`}
> >
<div class=""> <div class="">
<svg <svg
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment