Unverified Commit 0b62bbb5 authored by Chris's avatar Chris Committed by GitHub
Browse files

Merge branch 'dev' into feat/Teams_Incoming_Webhook

parents 427ca4e3 cb364f0a
import { v4 as uuidv4 } from 'uuid';
import sha256 from 'js-sha256';
import { getOllamaModels } from '$lib/apis/ollama';
import { getOpenAIModels } from '$lib/apis/openai';
import { getLiteLLMModels } from '$lib/apis/litellm';
export const getModels = async (token: string) => {
let models = await Promise.all([
await getOllamaModels(token).catch((error) => {
console.log(error);
return null;
}),
await getOpenAIModels(token).catch((error) => {
console.log(error);
return null;
}),
await getLiteLLMModels(token).catch((error) => {
console.log(error);
return null;
})
]);
models = models
.filter((models) => models)
.reduce((a, e, i, arr) => a.concat(e, ...(i < arr.length - 1 ? [{ name: 'hr' }] : [])), []);
return models;
};
//////////////////////////
// Helper functions
//////////////////////////
export const capitalizeFirstLetter = (string) => {
return string.charAt(0).toUpperCase() + string.slice(1);
};
export const splitStream = (splitOn) => {
let buffer = '';
return new TransformStream({
......
import { cubicOut } from 'svelte/easing';
import type { TransitionConfig } from 'svelte/transition';
type FlyAndScaleParams = {
y?: number;
start?: number;
duration?: number;
};
const defaultFlyAndScaleParams = { y: -8, start: 0.95, duration: 200 };
export const flyAndScale = (node: Element, params?: FlyAndScaleParams): TransitionConfig => {
const style = getComputedStyle(node);
const transform = style.transform === 'none' ? '' : style.transform;
const withDefaults = { ...defaultFlyAndScaleParams, ...params };
const scaleConversion = (valueA: number, scaleA: [number, number], scaleB: [number, number]) => {
const [minA, maxA] = scaleA;
const [minB, maxB] = scaleB;
const percentage = (valueA - minA) / (maxA - minA);
const valueB = percentage * (maxB - minB) + minB;
return valueB;
};
const styleToString = (style: Record<string, number | string | undefined>): string => {
return Object.keys(style).reduce((str, key) => {
if (style[key] === undefined) return str;
return str + `${key}:${style[key]};`;
}, '');
};
return {
duration: withDefaults.duration ?? 200,
delay: 0,
css: (t) => {
const y = scaleConversion(t, [0, 1], [withDefaults.y, 0]);
const scale = scaleConversion(t, [0, 1], [withDefaults.start, 1]);
return styleToString({
transform: `${transform} translate3d(0, ${y}px, 0) scale(${scale})`,
opacity: t
});
},
easing: cubicOut
};
};
......@@ -19,7 +19,7 @@
} from '$lib/stores';
import { copyToClipboard, splitStream } from '$lib/utils';
import { generateChatCompletion, cancelChatCompletion, generateTitle } from '$lib/apis/ollama';
import { generateChatCompletion, cancelOllamaRequest } from '$lib/apis/ollama';
import {
addTagById,
createNewChat,
......@@ -30,14 +30,14 @@
updateChatById
} from '$lib/apis/chats';
import { queryCollection, queryDoc } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte';
import { RAGTemplate } from '$lib/utils/rag';
import { LITELLM_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
import { LITELLM_API_BASE_URL, OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
import { WEBUI_BASE_URL } from '$lib/constants';
const i18n = getContext('i18n');
......@@ -104,7 +104,7 @@
const initNewChat = async () => {
if (currentRequestId !== null) {
await cancelChatCompletion(localStorage.token, currentRequestId);
await cancelOllamaRequest(localStorage.token, currentRequestId);
currentRequestId = null;
}
window.history.replaceState(history.state, '', `/`);
......@@ -372,7 +372,7 @@
if (stopResponseFlag) {
controller.abort('User: Stop Response');
await cancelChatCompletion(localStorage.token, currentRequestId);
await cancelOllamaRequest(localStorage.token, currentRequestId);
}
currentRequestId = null;
......@@ -511,7 +511,8 @@
if (messages.length == 2 && messages.at(1).content !== '') {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
await generateChatTitle(_chatId, userPrompt);
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
}
};
......@@ -696,11 +697,8 @@
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
if ($settings?.titleAutoGenerateModel) {
await generateChatTitle(_chatId, userPrompt);
} else {
await setChatTitle(_chatId, userPrompt);
}
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
}
};
......@@ -754,23 +752,46 @@
}
};
const generateChatTitle = async (_chatId, userPrompt) => {
if ($settings.titleAutoGenerate ?? true) {
const generateChatTitle = async (userPrompt) => {
if ($settings?.title?.auto ?? true) {
const model = $models.find((model) => model.id === selectedModels[0]);
const titleModelId =
model?.external ?? false
? $settings?.title?.modelExternal ?? selectedModels[0]
: $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId);
console.log(titleModel);
const title = await generateTitle(
localStorage.token,
$settings?.titleGenerationPrompt ??
$settings?.title?.prompt ??
$i18n.t(
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
) + ' {{prompt}}',
$settings?.titleAutoGenerateModel ?? selectedModels[0],
userPrompt
titleModelId,
userPrompt,
titleModel?.external ?? false
? titleModel.source === 'litellm'
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1`
);
if (title) {
await setChatTitle(_chatId, title);
}
return title;
} else {
await setChatTitle(_chatId, `${userPrompt}`);
return `${userPrompt}`;
}
};
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
title = _title;
}
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
}
};
......@@ -801,17 +822,6 @@
_tags.set(await getAllChatTags(localStorage.token));
};
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
title = _title;
}
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
}
};
</script>
<svelte:head>
......
......@@ -19,7 +19,7 @@
} from '$lib/stores';
import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils';
import { generateChatCompletion, generateTitle, cancelChatCompletion } from '$lib/apis/ollama';
import { generateChatCompletion, cancelOllamaRequest } from '$lib/apis/ollama';
import {
addTagById,
createNewChat,
......@@ -31,14 +31,19 @@
updateChatById
} from '$lib/apis/chats';
import { queryCollection, queryDoc } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte';
import { RAGTemplate } from '$lib/utils/rag';
import { LITELLM_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
import {
LITELLM_API_BASE_URL,
OPENAI_API_BASE_URL,
OLLAMA_API_BASE_URL,
WEBUI_BASE_URL
} from '$lib/constants';
const i18n = getContext('i18n');
......@@ -382,7 +387,7 @@
if (stopResponseFlag) {
controller.abort('User: Stop Response');
await cancelChatCompletion(localStorage.token, currentRequestId);
await cancelOllamaRequest(localStorage.token, currentRequestId);
}
currentRequestId = null;
......@@ -521,7 +526,8 @@
if (messages.length == 2 && messages.at(1).content !== '') {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
await generateChatTitle(_chatId, userPrompt);
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
}
};
......@@ -706,11 +712,8 @@
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
if ($settings?.titleAutoGenerateModel) {
await generateChatTitle(_chatId, userPrompt);
} else {
await setChatTitle(_chatId, userPrompt);
}
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
}
};
......@@ -719,6 +722,19 @@
console.log('stopResponse');
};
const regenerateResponse = async () => {
console.log('regenerateResponse');
if (messages.length != 0 && messages.at(-1).done == true) {
messages.splice(messages.length - 1, 1);
messages = messages;
let userMessage = messages.at(-1);
let userPrompt = userMessage.content;
await sendPrompt(userPrompt, userMessage.id);
}
};
const continueGeneration = async () => {
console.log('continueGeneration');
const _chatId = JSON.parse(JSON.stringify($chatId));
......@@ -751,36 +767,35 @@
}
};
const regenerateResponse = async () => {
console.log('regenerateResponse');
if (messages.length != 0 && messages.at(-1).done == true) {
messages.splice(messages.length - 1, 1);
messages = messages;
const generateChatTitle = async (userPrompt) => {
if ($settings?.title?.auto ?? true) {
const model = $models.find((model) => model.id === selectedModels[0]);
let userMessage = messages.at(-1);
let userPrompt = userMessage.content;
const titleModelId =
model?.external ?? false
? $settings?.title?.modelExternal ?? selectedModels[0]
: $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId);
await sendPrompt(userPrompt, userMessage.id);
}
};
const generateChatTitle = async (_chatId, userPrompt) => {
if ($settings.titleAutoGenerate ?? true) {
console.log(titleModel);
const title = await generateTitle(
localStorage.token,
$settings?.titleGenerationPrompt ??
$settings?.title?.prompt ??
$i18n.t(
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
) + ' {{prompt}}',
$settings?.titleAutoGenerateModel ?? selectedModels[0],
userPrompt
titleModelId,
userPrompt,
titleModel?.external ?? false
? titleModel.source === 'litellm'
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1`
);
if (title) {
await setChatTitle(_chatId, title);
}
return title;
} else {
await setChatTitle(_chatId, `${userPrompt}`);
return `${userPrompt}`;
}
};
......@@ -789,8 +804,10 @@
title = _title;
}
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
}
};
const getTags = async () => {
......@@ -843,7 +860,7 @@
shareEnabled={messages.length > 0}
initNewChat={async () => {
if (currentRequestId !== null) {
await cancelChatCompletion(localStorage.token, currentRequestId);
await cancelOllamaRequest(localStorage.token, currentRequestId);
currentRequestId = null;
}
......
......@@ -13,7 +13,7 @@
} from '$lib/constants';
import { WEBUI_NAME, config, user, models, settings } from '$lib/stores';
import { cancelChatCompletion, generateChatCompletion } from '$lib/apis/ollama';
import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import { splitStream } from '$lib/utils';
......@@ -52,7 +52,7 @@
// const cancelHandler = async () => {
// if (currentRequestId) {
// const res = await cancelChatCompletion(localStorage.token, currentRequestId);
// const res = await cancelOllamaRequest(localStorage.token, currentRequestId);
// currentRequestId = null;
// loading = false;
// }
......@@ -95,7 +95,7 @@
const { value, done } = await reader.read();
if (done || stopResponseFlag) {
if (stopResponseFlag) {
await cancelChatCompletion(localStorage.token, currentRequestId);
await cancelOllamaRequest(localStorage.token, currentRequestId);
}
currentRequestId = null;
......@@ -181,7 +181,7 @@
const { value, done } = await reader.read();
if (done || stopResponseFlag) {
if (stopResponseFlag) {
await cancelChatCompletion(localStorage.token, currentRequestId);
await cancelOllamaRequest(localStorage.token, currentRequestId);
}
currentRequestId = null;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment