"...composable_kernel_onnxruntime.git" did not exist on "ad09ebdb531285c35f7c45be68db7fd52b5dc082"
Unverified Commit 0b62bbb5 authored by Chris's avatar Chris Committed by GitHub
Browse files

Merge branch 'dev' into feat/Teams_Incoming_Webhook

parents 427ca4e3 cb364f0a
import { v4 as uuidv4 } from 'uuid'; import { v4 as uuidv4 } from 'uuid';
import sha256 from 'js-sha256'; import sha256 from 'js-sha256';
import { getOllamaModels } from '$lib/apis/ollama';
import { getOpenAIModels } from '$lib/apis/openai';
import { getLiteLLMModels } from '$lib/apis/litellm';
export const getModels = async (token: string) => {
let models = await Promise.all([
await getOllamaModels(token).catch((error) => {
console.log(error);
return null;
}),
await getOpenAIModels(token).catch((error) => {
console.log(error);
return null;
}),
await getLiteLLMModels(token).catch((error) => {
console.log(error);
return null;
})
]);
models = models
.filter((models) => models)
.reduce((a, e, i, arr) => a.concat(e, ...(i < arr.length - 1 ? [{ name: 'hr' }] : [])), []);
return models;
};
////////////////////////// //////////////////////////
// Helper functions // Helper functions
////////////////////////// //////////////////////////
export const capitalizeFirstLetter = (string) => {
return string.charAt(0).toUpperCase() + string.slice(1);
};
export const splitStream = (splitOn) => { export const splitStream = (splitOn) => {
let buffer = ''; let buffer = '';
return new TransformStream({ return new TransformStream({
......
import { cubicOut } from 'svelte/easing';
import type { TransitionConfig } from 'svelte/transition';
type FlyAndScaleParams = {
y?: number;
start?: number;
duration?: number;
};
const defaultFlyAndScaleParams = { y: -8, start: 0.95, duration: 200 };
export const flyAndScale = (node: Element, params?: FlyAndScaleParams): TransitionConfig => {
const style = getComputedStyle(node);
const transform = style.transform === 'none' ? '' : style.transform;
const withDefaults = { ...defaultFlyAndScaleParams, ...params };
const scaleConversion = (valueA: number, scaleA: [number, number], scaleB: [number, number]) => {
const [minA, maxA] = scaleA;
const [minB, maxB] = scaleB;
const percentage = (valueA - minA) / (maxA - minA);
const valueB = percentage * (maxB - minB) + minB;
return valueB;
};
const styleToString = (style: Record<string, number | string | undefined>): string => {
return Object.keys(style).reduce((str, key) => {
if (style[key] === undefined) return str;
return str + `${key}:${style[key]};`;
}, '');
};
return {
duration: withDefaults.duration ?? 200,
delay: 0,
css: (t) => {
const y = scaleConversion(t, [0, 1], [withDefaults.y, 0]);
const scale = scaleConversion(t, [0, 1], [withDefaults.start, 1]);
return styleToString({
transform: `${transform} translate3d(0, ${y}px, 0) scale(${scale})`,
opacity: t
});
},
easing: cubicOut
};
};
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
} from '$lib/stores'; } from '$lib/stores';
import { copyToClipboard, splitStream } from '$lib/utils'; import { copyToClipboard, splitStream } from '$lib/utils';
import { generateChatCompletion, cancelChatCompletion, generateTitle } from '$lib/apis/ollama'; import { generateChatCompletion, cancelOllamaRequest } from '$lib/apis/ollama';
import { import {
addTagById, addTagById,
createNewChat, createNewChat,
...@@ -30,14 +30,14 @@ ...@@ -30,14 +30,14 @@
updateChatById updateChatById
} from '$lib/apis/chats'; } from '$lib/apis/chats';
import { queryCollection, queryDoc } from '$lib/apis/rag'; import { queryCollection, queryDoc } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai'; import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte'; import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte'; import Messages from '$lib/components/chat/Messages.svelte';
import ModelSelector from '$lib/components/chat/ModelSelector.svelte'; import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte'; import Navbar from '$lib/components/layout/Navbar.svelte';
import { RAGTemplate } from '$lib/utils/rag'; import { RAGTemplate } from '$lib/utils/rag';
import { LITELLM_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants'; import { LITELLM_API_BASE_URL, OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
import { WEBUI_BASE_URL } from '$lib/constants'; import { WEBUI_BASE_URL } from '$lib/constants';
const i18n = getContext('i18n'); const i18n = getContext('i18n');
...@@ -104,7 +104,7 @@ ...@@ -104,7 +104,7 @@
const initNewChat = async () => { const initNewChat = async () => {
if (currentRequestId !== null) { if (currentRequestId !== null) {
await cancelChatCompletion(localStorage.token, currentRequestId); await cancelOllamaRequest(localStorage.token, currentRequestId);
currentRequestId = null; currentRequestId = null;
} }
window.history.replaceState(history.state, '', `/`); window.history.replaceState(history.state, '', `/`);
...@@ -372,7 +372,7 @@ ...@@ -372,7 +372,7 @@
if (stopResponseFlag) { if (stopResponseFlag) {
controller.abort('User: Stop Response'); controller.abort('User: Stop Response');
await cancelChatCompletion(localStorage.token, currentRequestId); await cancelOllamaRequest(localStorage.token, currentRequestId);
} }
currentRequestId = null; currentRequestId = null;
...@@ -511,7 +511,8 @@ ...@@ -511,7 +511,8 @@
if (messages.length == 2 && messages.at(1).content !== '') { if (messages.length == 2 && messages.at(1).content !== '') {
window.history.replaceState(history.state, '', `/c/${_chatId}`); window.history.replaceState(history.state, '', `/c/${_chatId}`);
await generateChatTitle(_chatId, userPrompt); const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
} }
}; };
...@@ -696,11 +697,8 @@ ...@@ -696,11 +697,8 @@
if (messages.length == 2) { if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`); window.history.replaceState(history.state, '', `/c/${_chatId}`);
if ($settings?.titleAutoGenerateModel) { const _title = await generateChatTitle(userPrompt);
await generateChatTitle(_chatId, userPrompt); await setChatTitle(_chatId, _title);
} else {
await setChatTitle(_chatId, userPrompt);
}
} }
}; };
...@@ -754,23 +752,46 @@ ...@@ -754,23 +752,46 @@
} }
}; };
const generateChatTitle = async (_chatId, userPrompt) => { const generateChatTitle = async (userPrompt) => {
if ($settings.titleAutoGenerate ?? true) { if ($settings?.title?.auto ?? true) {
const model = $models.find((model) => model.id === selectedModels[0]);
const titleModelId =
model?.external ?? false
? $settings?.title?.modelExternal ?? selectedModels[0]
: $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId);
console.log(titleModel);
const title = await generateTitle( const title = await generateTitle(
localStorage.token, localStorage.token,
$settings?.titleGenerationPrompt ?? $settings?.title?.prompt ??
$i18n.t( $i18n.t(
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':" "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
) + ' {{prompt}}', ) + ' {{prompt}}',
$settings?.titleAutoGenerateModel ?? selectedModels[0], titleModelId,
userPrompt userPrompt,
titleModel?.external ?? false
? titleModel.source === 'litellm'
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1`
); );
if (title) { return title;
await setChatTitle(_chatId, title);
}
} else { } else {
await setChatTitle(_chatId, `${userPrompt}`); return `${userPrompt}`;
}
};
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
title = _title;
}
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
} }
}; };
...@@ -801,17 +822,6 @@ ...@@ -801,17 +822,6 @@
_tags.set(await getAllChatTags(localStorage.token)); _tags.set(await getAllChatTags(localStorage.token));
}; };
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
title = _title;
}
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
}
};
</script> </script>
<svelte:head> <svelte:head>
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
} from '$lib/stores'; } from '$lib/stores';
import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils'; import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils';
import { generateChatCompletion, generateTitle, cancelChatCompletion } from '$lib/apis/ollama'; import { generateChatCompletion, cancelOllamaRequest } from '$lib/apis/ollama';
import { import {
addTagById, addTagById,
createNewChat, createNewChat,
...@@ -31,14 +31,19 @@ ...@@ -31,14 +31,19 @@
updateChatById updateChatById
} from '$lib/apis/chats'; } from '$lib/apis/chats';
import { queryCollection, queryDoc } from '$lib/apis/rag'; import { queryCollection, queryDoc } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai'; import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte'; import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte'; import Messages from '$lib/components/chat/Messages.svelte';
import ModelSelector from '$lib/components/chat/ModelSelector.svelte'; import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte'; import Navbar from '$lib/components/layout/Navbar.svelte';
import { RAGTemplate } from '$lib/utils/rag'; import { RAGTemplate } from '$lib/utils/rag';
import { LITELLM_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants'; import {
LITELLM_API_BASE_URL,
OPENAI_API_BASE_URL,
OLLAMA_API_BASE_URL,
WEBUI_BASE_URL
} from '$lib/constants';
const i18n = getContext('i18n'); const i18n = getContext('i18n');
...@@ -382,7 +387,7 @@ ...@@ -382,7 +387,7 @@
if (stopResponseFlag) { if (stopResponseFlag) {
controller.abort('User: Stop Response'); controller.abort('User: Stop Response');
await cancelChatCompletion(localStorage.token, currentRequestId); await cancelOllamaRequest(localStorage.token, currentRequestId);
} }
currentRequestId = null; currentRequestId = null;
...@@ -521,7 +526,8 @@ ...@@ -521,7 +526,8 @@
if (messages.length == 2 && messages.at(1).content !== '') { if (messages.length == 2 && messages.at(1).content !== '') {
window.history.replaceState(history.state, '', `/c/${_chatId}`); window.history.replaceState(history.state, '', `/c/${_chatId}`);
await generateChatTitle(_chatId, userPrompt); const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
} }
}; };
...@@ -706,11 +712,8 @@ ...@@ -706,11 +712,8 @@
if (messages.length == 2) { if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`); window.history.replaceState(history.state, '', `/c/${_chatId}`);
if ($settings?.titleAutoGenerateModel) { const _title = await generateChatTitle(userPrompt);
await generateChatTitle(_chatId, userPrompt); await setChatTitle(_chatId, _title);
} else {
await setChatTitle(_chatId, userPrompt);
}
} }
}; };
...@@ -719,6 +722,19 @@ ...@@ -719,6 +722,19 @@
console.log('stopResponse'); console.log('stopResponse');
}; };
const regenerateResponse = async () => {
console.log('regenerateResponse');
if (messages.length != 0 && messages.at(-1).done == true) {
messages.splice(messages.length - 1, 1);
messages = messages;
let userMessage = messages.at(-1);
let userPrompt = userMessage.content;
await sendPrompt(userPrompt, userMessage.id);
}
};
const continueGeneration = async () => { const continueGeneration = async () => {
console.log('continueGeneration'); console.log('continueGeneration');
const _chatId = JSON.parse(JSON.stringify($chatId)); const _chatId = JSON.parse(JSON.stringify($chatId));
...@@ -751,36 +767,35 @@ ...@@ -751,36 +767,35 @@
} }
}; };
const regenerateResponse = async () => { const generateChatTitle = async (userPrompt) => {
console.log('regenerateResponse'); if ($settings?.title?.auto ?? true) {
if (messages.length != 0 && messages.at(-1).done == true) { const model = $models.find((model) => model.id === selectedModels[0]);
messages.splice(messages.length - 1, 1);
messages = messages;
let userMessage = messages.at(-1); const titleModelId =
let userPrompt = userMessage.content; model?.external ?? false
? $settings?.title?.modelExternal ?? selectedModels[0]
: $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId);
await sendPrompt(userPrompt, userMessage.id); console.log(titleModel);
}
};
const generateChatTitle = async (_chatId, userPrompt) => {
if ($settings.titleAutoGenerate ?? true) {
const title = await generateTitle( const title = await generateTitle(
localStorage.token, localStorage.token,
$settings?.titleGenerationPrompt ?? $settings?.title?.prompt ??
$i18n.t( $i18n.t(
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':" "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
) + ' {{prompt}}', ) + ' {{prompt}}',
$settings?.titleAutoGenerateModel ?? selectedModels[0], titleModelId,
userPrompt userPrompt,
titleModel?.external ?? false
? titleModel.source === 'litellm'
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1`
); );
if (title) { return title;
await setChatTitle(_chatId, title);
}
} else { } else {
await setChatTitle(_chatId, `${userPrompt}`); return `${userPrompt}`;
} }
}; };
...@@ -789,8 +804,10 @@ ...@@ -789,8 +804,10 @@
title = _title; title = _title;
} }
chat = await updateChatById(localStorage.token, _chatId, { title: _title }); if ($settings.saveChatHistory ?? true) {
await chats.set(await getChatList(localStorage.token)); chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
}
}; };
const getTags = async () => { const getTags = async () => {
...@@ -843,7 +860,7 @@ ...@@ -843,7 +860,7 @@
shareEnabled={messages.length > 0} shareEnabled={messages.length > 0}
initNewChat={async () => { initNewChat={async () => {
if (currentRequestId !== null) { if (currentRequestId !== null) {
await cancelChatCompletion(localStorage.token, currentRequestId); await cancelOllamaRequest(localStorage.token, currentRequestId);
currentRequestId = null; currentRequestId = null;
} }
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
} from '$lib/constants'; } from '$lib/constants';
import { WEBUI_NAME, config, user, models, settings } from '$lib/stores'; import { WEBUI_NAME, config, user, models, settings } from '$lib/stores';
import { cancelChatCompletion, generateChatCompletion } from '$lib/apis/ollama'; import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
import { generateOpenAIChatCompletion } from '$lib/apis/openai'; import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import { splitStream } from '$lib/utils'; import { splitStream } from '$lib/utils';
...@@ -52,7 +52,7 @@ ...@@ -52,7 +52,7 @@
// const cancelHandler = async () => { // const cancelHandler = async () => {
// if (currentRequestId) { // if (currentRequestId) {
// const res = await cancelChatCompletion(localStorage.token, currentRequestId); // const res = await cancelOllamaRequest(localStorage.token, currentRequestId);
// currentRequestId = null; // currentRequestId = null;
// loading = false; // loading = false;
// } // }
...@@ -95,7 +95,7 @@ ...@@ -95,7 +95,7 @@
const { value, done } = await reader.read(); const { value, done } = await reader.read();
if (done || stopResponseFlag) { if (done || stopResponseFlag) {
if (stopResponseFlag) { if (stopResponseFlag) {
await cancelChatCompletion(localStorage.token, currentRequestId); await cancelOllamaRequest(localStorage.token, currentRequestId);
} }
currentRequestId = null; currentRequestId = null;
...@@ -181,7 +181,7 @@ ...@@ -181,7 +181,7 @@
const { value, done } = await reader.read(); const { value, done } = await reader.read();
if (done || stopResponseFlag) { if (done || stopResponseFlag) {
if (stopResponseFlag) { if (stopResponseFlag) {
await cancelChatCompletion(localStorage.token, currentRequestId); await cancelOllamaRequest(localStorage.token, currentRequestId);
} }
currentRequestId = null; currentRequestId = null;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment