Unverified Commit 2e4373c6 authored by Marclass's avatar Marclass Committed by GitHub
Browse files

Merge branch 'ollama-webui:main' into main

parents 35ace577 b246c62d
......@@ -206,25 +206,32 @@ const convertOpenAIMessages = (convo) => {
const mapping = convo['mapping'];
const messages = [];
let currentId = '';
let lastId = null;
for (let message_id in mapping) {
const message = mapping[message_id];
currentId = message_id;
if (message['message'] == null || message['message']['content']['parts'][0] == '') {
// Skip chat messages with no content
continue;
} else {
const new_chat = {
id: message_id,
parentId: messages.length > 0 && message['parent'] in mapping ? message['parent'] : null,
childrenIds: message['children'] || [],
role: message['message']?.['author']?.['role'] !== 'user' ? 'assistant' : 'user',
content: message['message']?.['content']?.['parts']?.[0] || '',
model: 'gpt-3.5-turbo',
done: true,
context: null
};
messages.push(new_chat);
try {
if (messages.length == 0 && (message['message'] == null ||
(message['message']['content']['parts']?.[0] == '' && message['message']['content']['text'] == null))) {
// Skip chat messages with no content
continue;
} else {
const new_chat = {
id: message_id,
parentId: lastId,
childrenIds: message['children'] || [],
role: message['message']?.['author']?.['role'] !== 'user' ? 'assistant' : 'user',
content: message['message']?.['content']?.['parts']?.[0] || message['message']?.['content']?.['text'] || '',
model: 'gpt-3.5-turbo',
done: true,
context: null
};
messages.push(new_chat);
lastId = currentId;
}
} catch (error) {
console.log("Error with", message, "\nError:", error);
}
}
......@@ -245,13 +252,45 @@ const convertOpenAIMessages = (convo) => {
return chat;
};
const validateChat = (chat) => {
// Because ChatGPT sometimes has features we can't use like DALL-E or migh have corrupted messages, need to validate
const messages = chat.messages;
// Check if messages array is empty
if (messages.length === 0) {
return false;
}
// Last message's children should be an empty array
const lastMessage = messages[messages.length - 1];
if (lastMessage.childrenIds.length !== 0) {
return false;
}
// First message's parent should be null
const firstMessage = messages[0];
if (firstMessage.parentId !== null) {
return false;
}
// Every message's content should be a string
for (let message of messages) {
if (typeof message.content !== 'string') {
return false;
}
}
return true;
};
export const convertOpenAIChats = (_chats) => {
// Create a list of dictionaries with each conversation from import
const chats = [];
let failed = 0;
for (let convo of _chats) {
const chat = convertOpenAIMessages(convo);
if (Object.keys(chat.history.messages).length > 0) {
if (validateChat(chat)) {
chats.push({
id: convo['id'],
user_id: '',
......@@ -259,7 +298,8 @@ export const convertOpenAIChats = (_chats) => {
chat: chat,
timestamp: convo['timestamp']
});
}
} else { failed ++}
}
console.log(failed, "Conversations could not be imported");
return chats;
};
......@@ -20,7 +20,8 @@
models,
modelfiles,
prompts,
documents
documents,
tags
} from '$lib/stores';
import { REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
......@@ -29,6 +30,7 @@
import { checkVersion } from '$lib/utils';
import ShortcutsModal from '$lib/components/chat/ShortcutsModal.svelte';
import { getDocs } from '$lib/apis/documents';
import { getAllChatTags } from '$lib/apis/chats';
let ollamaVersion = '';
let loaded = false;
......@@ -106,6 +108,7 @@
await modelfiles.set(await getModelfiles(localStorage.token));
await prompts.set(await getPrompts(localStorage.token));
await documents.set(await getDocs(localStorage.token));
await tags.set(await getAllChatTags(localStorage.token));
modelfiles.subscribe(async () => {
// should fetch models
......
......@@ -6,11 +6,28 @@
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import { models, modelfiles, user, settings, chats, chatId, config } from '$lib/stores';
import {
models,
modelfiles,
user,
settings,
chats,
chatId,
config,
tags as _tags
} from '$lib/stores';
import { copyToClipboard, splitStream } from '$lib/utils';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
import { generateChatCompletion, cancelChatCompletion, generateTitle } from '$lib/apis/ollama';
import {
addTagById,
createNewChat,
deleteTagById,
getAllChatTags,
getChatList,
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { queryVectorDB } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
......@@ -24,6 +41,8 @@
let autoScroll = true;
let processing = '';
let currentRequestId = null;
let selectedModels = [''];
let selectedModelfile = null;
......@@ -45,6 +64,7 @@
}, {});
let chat = null;
let tags = [];
let title = '';
let prompt = '';
......@@ -78,6 +98,11 @@
//////////////////////////
const initNewChat = async () => {
if (currentRequestId !== null) {
await cancelChatCompletion(localStorage.token, currentRequestId);
currentRequestId = null;
}
window.history.replaceState(history.state, '', `/`);
console.log('initNewChat');
......@@ -145,7 +170,8 @@
role: 'user',
user: _user ?? undefined,
content: userPrompt,
files: files.length > 0 ? files : undefined
files: files.length > 0 ? files : undefined,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId
......@@ -173,6 +199,7 @@
},
messages: messages,
history: history,
tags: [],
timestamp: Date.now()
});
await chats.set(await getChatList(localStorage.token));
......@@ -256,7 +283,8 @@
childrenIds: [],
role: 'assistant',
content: '',
model: model
model: model,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId
......@@ -277,7 +305,7 @@
// Scroll down
window.scrollTo({ top: document.body.scrollHeight });
const res = await generateChatCompletion(localStorage.token, {
const [res, controller] = await generateChatCompletion(localStorage.token, {
model: model,
messages: [
$settings.system
......@@ -305,6 +333,8 @@
});
if (res && res.ok) {
console.log('controller', controller);
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
......@@ -315,6 +345,14 @@
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
await cancelChatCompletion(localStorage.token, currentRequestId);
}
currentRequestId = null;
break;
}
......@@ -330,52 +368,57 @@
throw data;
}
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
if ('id' in data) {
console.log(data);
currentRequestId = data.id;
} else {
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
} else {
responseMessage.content += data.message.content;
messages = messages;
}
} else {
responseMessage.content += data.message.content;
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = true;
responseMessage.content =
'Oops! No text generated from Ollama, Please try again.';
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
messages = messages;
}
} else {
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = true;
responseMessage.content =
'Oops! No text generated from Ollama, Please try again.';
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
messages = messages;
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `Ollama - ${model}`,
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? '/favicon.png'
}
);
}
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `Ollama - ${model}`,
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? '/favicon.png'
}
);
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
}
}
}
......@@ -446,7 +489,8 @@
childrenIds: [],
role: 'assistant',
content: '',
model: model
model: model,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
history.messages[responseMessageId] = responseMessage;
......@@ -648,6 +692,34 @@
}
};
const getTags = async () => {
return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
return [];
});
};
const addTag = async (tagName) => {
const res = await addTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
const deleteTag = async (tagName) => {
const res = await deleteTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
title = _title;
......@@ -666,7 +738,7 @@
}}
/>
<Navbar {title} shareEnabled={messages.length > 0} {initNewChat} />
<Navbar {title} shareEnabled={messages.length > 0} {initNewChat} {tags} {addTag} {deleteTag} />
<div class="min-h-screen w-full flex justify-center">
<div class=" py-2.5 flex flex-col justify-between w-full">
<div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10">
......
......@@ -6,11 +6,29 @@
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import { models, modelfiles, user, settings, chats, chatId, config } from '$lib/stores';
import {
models,
modelfiles,
user,
settings,
chats,
chatId,
config,
tags as _tags
} from '$lib/stores';
import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
import { createNewChat, getChatById, getChatList, updateChatById } from '$lib/apis/chats';
import {
addTagById,
createNewChat,
deleteTagById,
getAllChatTags,
getChatById,
getChatList,
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { queryVectorDB } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
......@@ -26,6 +44,8 @@
let autoScroll = true;
let processing = '';
let currentRequestId = null;
// let chatId = $page.params.id;
let selectedModels = [''];
let selectedModelfile = null;
......@@ -47,6 +67,7 @@
}, {});
let chat = null;
let tags = [];
let title = '';
let prompt = '';
......@@ -95,6 +116,7 @@
});
if (chat) {
tags = await getTags();
const chatContent = chat.chat;
if (chatContent) {
......@@ -135,8 +157,7 @@
// Ollama functions
//////////////////////////
const submitPrompt = async (userPrompt, user) => {
console.log(userPrompt, user);
const submitPrompt = async (userPrompt, _user = null) => {
console.log('submitPrompt', $chatId);
if (selectedModels.includes('')) {
......@@ -163,8 +184,10 @@
parentId: messages.length !== 0 ? messages.at(-1).id : null,
childrenIds: [],
role: 'user',
user: _user ?? undefined,
content: userPrompt,
files: files.length > 0 ? files : undefined
files: files.length > 0 ? files : undefined,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId
......@@ -200,15 +223,7 @@
await chatId.set('local');
}
await tick();
} else if (chat.chat["models"] != selectedModels) {
// If model is not saved in DB, then save selectedmodel when message is sent
chat = await updateChatById(localStorage.token, $chatId, {
models: selectedModels
});
await chats.set(await getChatList(localStorage.token));
}
// Reset chat input textarea
prompt = '';
files = [];
......@@ -282,7 +297,8 @@
childrenIds: [],
role: 'assistant',
content: '',
model: model
model: model,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId
......@@ -303,7 +319,7 @@
// Scroll down
window.scrollTo({ top: document.body.scrollHeight });
const res = await generateChatCompletion(localStorage.token, {
const [res, controller] = await generateChatCompletion(localStorage.token, {
model: model,
messages: [
$settings.system
......@@ -341,6 +357,13 @@
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
await cancelChatCompletion(localStorage.token, currentRequestId);
}
currentRequestId = null;
break;
}
......@@ -356,52 +379,57 @@
throw data;
}
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
if ('id' in data) {
console.log(data);
currentRequestId = data.id;
} else {
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
} else {
responseMessage.content += data.message.content;
messages = messages;
}
} else {
responseMessage.content += data.message.content;
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = true;
responseMessage.content =
'Oops! No text generated from Ollama, Please try again.';
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
messages = messages;
}
} else {
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = true;
responseMessage.content =
'Oops! No text generated from Ollama, Please try again.';
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
messages = messages;
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `Ollama - ${model}`,
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? '/favicon.png'
}
);
}
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `Ollama - ${model}`,
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? '/favicon.png'
}
);
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
}
}
}
......@@ -472,7 +500,8 @@
childrenIds: [],
role: 'assistant',
content: '',
model: model
model: model,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
history.messages[responseMessageId] = responseMessage;
......@@ -679,6 +708,34 @@
await chats.set(await getChatList(localStorage.token));
};
const getTags = async () => {
return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
return [];
});
};
const addTag = async (tagName) => {
const res = await addTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
const deleteTag = async (tagName) => {
const res = await deleteTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
onMount(async () => {
if (!($settings.saveChatHistory ?? true)) {
await goto('/');
......@@ -696,14 +753,25 @@
<Navbar
{title}
shareEnabled={messages.length > 0}
initNewChat={() => {
initNewChat={async () => {
if (currentRequestId !== null) {
await cancelChatCompletion(localStorage.token, currentRequestId);
currentRequestId = null;
}
goto('/');
}}
{tags}
{addTag}
{deleteTag}
/>
<div class="min-h-screen w-full flex justify-center">
<div class=" py-2.5 flex flex-col justify-between w-full">
<div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10">
<ModelSelector bind:selectedModels disabled={messages.length > 0 && !selectedModels.includes('')} />
<ModelSelector
bind:selectedModels
disabled={messages.length > 0 && !selectedModels.includes('')}
/>
</div>
<div class=" h-full mt-10 mb-32 w-full flex flex-col">
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment