Unverified Commit 2e4373c6 authored by Marclass's avatar Marclass Committed by GitHub
Browse files

Merge branch 'ollama-webui:main' into main

parents 35ace577 b246c62d
...@@ -206,25 +206,32 @@ const convertOpenAIMessages = (convo) => { ...@@ -206,25 +206,32 @@ const convertOpenAIMessages = (convo) => {
const mapping = convo['mapping']; const mapping = convo['mapping'];
const messages = []; const messages = [];
let currentId = ''; let currentId = '';
let lastId = null;
for (let message_id in mapping) { for (let message_id in mapping) {
const message = mapping[message_id]; const message = mapping[message_id];
currentId = message_id; currentId = message_id;
if (message['message'] == null || message['message']['content']['parts'][0] == '') { try {
if (messages.length == 0 && (message['message'] == null ||
(message['message']['content']['parts']?.[0] == '' && message['message']['content']['text'] == null))) {
// Skip chat messages with no content // Skip chat messages with no content
continue; continue;
} else { } else {
const new_chat = { const new_chat = {
id: message_id, id: message_id,
parentId: messages.length > 0 && message['parent'] in mapping ? message['parent'] : null, parentId: lastId,
childrenIds: message['children'] || [], childrenIds: message['children'] || [],
role: message['message']?.['author']?.['role'] !== 'user' ? 'assistant' : 'user', role: message['message']?.['author']?.['role'] !== 'user' ? 'assistant' : 'user',
content: message['message']?.['content']?.['parts']?.[0] || '', content: message['message']?.['content']?.['parts']?.[0] || message['message']?.['content']?.['text'] || '',
model: 'gpt-3.5-turbo', model: 'gpt-3.5-turbo',
done: true, done: true,
context: null context: null
}; };
messages.push(new_chat); messages.push(new_chat);
lastId = currentId;
}
} catch (error) {
console.log("Error with", message, "\nError:", error);
} }
} }
...@@ -245,13 +252,45 @@ const convertOpenAIMessages = (convo) => { ...@@ -245,13 +252,45 @@ const convertOpenAIMessages = (convo) => {
return chat; return chat;
}; };
const validateChat = (chat) => {
// Because ChatGPT sometimes has features we can't use like DALL-E or migh have corrupted messages, need to validate
const messages = chat.messages;
// Check if messages array is empty
if (messages.length === 0) {
return false;
}
// Last message's children should be an empty array
const lastMessage = messages[messages.length - 1];
if (lastMessage.childrenIds.length !== 0) {
return false;
}
// First message's parent should be null
const firstMessage = messages[0];
if (firstMessage.parentId !== null) {
return false;
}
// Every message's content should be a string
for (let message of messages) {
if (typeof message.content !== 'string') {
return false;
}
}
return true;
};
export const convertOpenAIChats = (_chats) => { export const convertOpenAIChats = (_chats) => {
// Create a list of dictionaries with each conversation from import // Create a list of dictionaries with each conversation from import
const chats = []; const chats = [];
let failed = 0;
for (let convo of _chats) { for (let convo of _chats) {
const chat = convertOpenAIMessages(convo); const chat = convertOpenAIMessages(convo);
if (Object.keys(chat.history.messages).length > 0) { if (validateChat(chat)) {
chats.push({ chats.push({
id: convo['id'], id: convo['id'],
user_id: '', user_id: '',
...@@ -259,7 +298,8 @@ export const convertOpenAIChats = (_chats) => { ...@@ -259,7 +298,8 @@ export const convertOpenAIChats = (_chats) => {
chat: chat, chat: chat,
timestamp: convo['timestamp'] timestamp: convo['timestamp']
}); });
} else { failed ++}
} }
} console.log(failed, "Conversations could not be imported");
return chats; return chats;
}; };
...@@ -20,7 +20,8 @@ ...@@ -20,7 +20,8 @@
models, models,
modelfiles, modelfiles,
prompts, prompts,
documents documents,
tags
} from '$lib/stores'; } from '$lib/stores';
import { REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants'; import { REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
...@@ -29,6 +30,7 @@ ...@@ -29,6 +30,7 @@
import { checkVersion } from '$lib/utils'; import { checkVersion } from '$lib/utils';
import ShortcutsModal from '$lib/components/chat/ShortcutsModal.svelte'; import ShortcutsModal from '$lib/components/chat/ShortcutsModal.svelte';
import { getDocs } from '$lib/apis/documents'; import { getDocs } from '$lib/apis/documents';
import { getAllChatTags } from '$lib/apis/chats';
let ollamaVersion = ''; let ollamaVersion = '';
let loaded = false; let loaded = false;
...@@ -106,6 +108,7 @@ ...@@ -106,6 +108,7 @@
await modelfiles.set(await getModelfiles(localStorage.token)); await modelfiles.set(await getModelfiles(localStorage.token));
await prompts.set(await getPrompts(localStorage.token)); await prompts.set(await getPrompts(localStorage.token));
await documents.set(await getDocs(localStorage.token)); await documents.set(await getDocs(localStorage.token));
await tags.set(await getAllChatTags(localStorage.token));
modelfiles.subscribe(async () => { modelfiles.subscribe(async () => {
// should fetch models // should fetch models
......
...@@ -6,11 +6,28 @@ ...@@ -6,11 +6,28 @@
import { goto } from '$app/navigation'; import { goto } from '$app/navigation';
import { page } from '$app/stores'; import { page } from '$app/stores';
import { models, modelfiles, user, settings, chats, chatId, config } from '$lib/stores'; import {
models,
modelfiles,
user,
settings,
chats,
chatId,
config,
tags as _tags
} from '$lib/stores';
import { copyToClipboard, splitStream } from '$lib/utils'; import { copyToClipboard, splitStream } from '$lib/utils';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama'; import { generateChatCompletion, cancelChatCompletion, generateTitle } from '$lib/apis/ollama';
import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats'; import {
addTagById,
createNewChat,
deleteTagById,
getAllChatTags,
getChatList,
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { queryVectorDB } from '$lib/apis/rag'; import { queryVectorDB } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai'; import { generateOpenAIChatCompletion } from '$lib/apis/openai';
...@@ -24,6 +41,8 @@ ...@@ -24,6 +41,8 @@
let autoScroll = true; let autoScroll = true;
let processing = ''; let processing = '';
let currentRequestId = null;
let selectedModels = ['']; let selectedModels = [''];
let selectedModelfile = null; let selectedModelfile = null;
...@@ -45,6 +64,7 @@ ...@@ -45,6 +64,7 @@
}, {}); }, {});
let chat = null; let chat = null;
let tags = [];
let title = ''; let title = '';
let prompt = ''; let prompt = '';
...@@ -78,6 +98,11 @@ ...@@ -78,6 +98,11 @@
////////////////////////// //////////////////////////
const initNewChat = async () => { const initNewChat = async () => {
if (currentRequestId !== null) {
await cancelChatCompletion(localStorage.token, currentRequestId);
currentRequestId = null;
}
window.history.replaceState(history.state, '', `/`); window.history.replaceState(history.state, '', `/`);
console.log('initNewChat'); console.log('initNewChat');
...@@ -145,7 +170,8 @@ ...@@ -145,7 +170,8 @@
role: 'user', role: 'user',
user: _user ?? undefined, user: _user ?? undefined,
content: userPrompt, content: userPrompt,
files: files.length > 0 ? files : undefined files: files.length > 0 ? files : undefined,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
}; };
// Add message to history and Set currentId to messageId // Add message to history and Set currentId to messageId
...@@ -173,6 +199,7 @@ ...@@ -173,6 +199,7 @@
}, },
messages: messages, messages: messages,
history: history, history: history,
tags: [],
timestamp: Date.now() timestamp: Date.now()
}); });
await chats.set(await getChatList(localStorage.token)); await chats.set(await getChatList(localStorage.token));
...@@ -256,7 +283,8 @@ ...@@ -256,7 +283,8 @@
childrenIds: [], childrenIds: [],
role: 'assistant', role: 'assistant',
content: '', content: '',
model: model model: model,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
}; };
// Add message to history and Set currentId to messageId // Add message to history and Set currentId to messageId
...@@ -277,7 +305,7 @@ ...@@ -277,7 +305,7 @@
// Scroll down // Scroll down
window.scrollTo({ top: document.body.scrollHeight }); window.scrollTo({ top: document.body.scrollHeight });
const res = await generateChatCompletion(localStorage.token, { const [res, controller] = await generateChatCompletion(localStorage.token, {
model: model, model: model,
messages: [ messages: [
$settings.system $settings.system
...@@ -305,6 +333,8 @@ ...@@ -305,6 +333,8 @@
}); });
if (res && res.ok) { if (res && res.ok) {
console.log('controller', controller);
const reader = res.body const reader = res.body
.pipeThrough(new TextDecoderStream()) .pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n')) .pipeThrough(splitStream('\n'))
...@@ -315,6 +345,14 @@ ...@@ -315,6 +345,14 @@
if (done || stopResponseFlag || _chatId !== $chatId) { if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true; responseMessage.done = true;
messages = messages; messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
await cancelChatCompletion(localStorage.token, currentRequestId);
}
currentRequestId = null;
break; break;
} }
...@@ -330,6 +368,10 @@ ...@@ -330,6 +368,10 @@
throw data; throw data;
} }
if ('id' in data) {
console.log(data);
currentRequestId = data.id;
} else {
if (data.done == false) { if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') { if (responseMessage.content == '' && data.message.content == '\n') {
continue; continue;
...@@ -380,6 +422,7 @@ ...@@ -380,6 +422,7 @@
} }
} }
} }
}
} catch (error) { } catch (error) {
console.log(error); console.log(error);
if ('detail' in error) { if ('detail' in error) {
...@@ -446,7 +489,8 @@ ...@@ -446,7 +489,8 @@
childrenIds: [], childrenIds: [],
role: 'assistant', role: 'assistant',
content: '', content: '',
model: model model: model,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
}; };
history.messages[responseMessageId] = responseMessage; history.messages[responseMessageId] = responseMessage;
...@@ -648,6 +692,34 @@ ...@@ -648,6 +692,34 @@
} }
}; };
const getTags = async () => {
return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
return [];
});
};
const addTag = async (tagName) => {
const res = await addTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
const deleteTag = async (tagName) => {
const res = await deleteTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
const setChatTitle = async (_chatId, _title) => { const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) { if (_chatId === $chatId) {
title = _title; title = _title;
...@@ -666,7 +738,7 @@ ...@@ -666,7 +738,7 @@
}} }}
/> />
<Navbar {title} shareEnabled={messages.length > 0} {initNewChat} /> <Navbar {title} shareEnabled={messages.length > 0} {initNewChat} {tags} {addTag} {deleteTag} />
<div class="min-h-screen w-full flex justify-center"> <div class="min-h-screen w-full flex justify-center">
<div class=" py-2.5 flex flex-col justify-between w-full"> <div class=" py-2.5 flex flex-col justify-between w-full">
<div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10"> <div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10">
......
...@@ -6,11 +6,29 @@ ...@@ -6,11 +6,29 @@
import { goto } from '$app/navigation'; import { goto } from '$app/navigation';
import { page } from '$app/stores'; import { page } from '$app/stores';
import { models, modelfiles, user, settings, chats, chatId, config } from '$lib/stores'; import {
models,
modelfiles,
user,
settings,
chats,
chatId,
config,
tags as _tags
} from '$lib/stores';
import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils'; import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama'; import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
import { createNewChat, getChatById, getChatList, updateChatById } from '$lib/apis/chats'; import {
addTagById,
createNewChat,
deleteTagById,
getAllChatTags,
getChatById,
getChatList,
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { queryVectorDB } from '$lib/apis/rag'; import { queryVectorDB } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai'; import { generateOpenAIChatCompletion } from '$lib/apis/openai';
...@@ -26,6 +44,8 @@ ...@@ -26,6 +44,8 @@
let autoScroll = true; let autoScroll = true;
let processing = ''; let processing = '';
let currentRequestId = null;
// let chatId = $page.params.id; // let chatId = $page.params.id;
let selectedModels = ['']; let selectedModels = [''];
let selectedModelfile = null; let selectedModelfile = null;
...@@ -47,6 +67,7 @@ ...@@ -47,6 +67,7 @@
}, {}); }, {});
let chat = null; let chat = null;
let tags = [];
let title = ''; let title = '';
let prompt = ''; let prompt = '';
...@@ -95,6 +116,7 @@ ...@@ -95,6 +116,7 @@
}); });
if (chat) { if (chat) {
tags = await getTags();
const chatContent = chat.chat; const chatContent = chat.chat;
if (chatContent) { if (chatContent) {
...@@ -135,8 +157,7 @@ ...@@ -135,8 +157,7 @@
// Ollama functions // Ollama functions
////////////////////////// //////////////////////////
const submitPrompt = async (userPrompt, user) => { const submitPrompt = async (userPrompt, _user = null) => {
console.log(userPrompt, user);
console.log('submitPrompt', $chatId); console.log('submitPrompt', $chatId);
if (selectedModels.includes('')) { if (selectedModels.includes('')) {
...@@ -163,8 +184,10 @@ ...@@ -163,8 +184,10 @@
parentId: messages.length !== 0 ? messages.at(-1).id : null, parentId: messages.length !== 0 ? messages.at(-1).id : null,
childrenIds: [], childrenIds: [],
role: 'user', role: 'user',
user: _user ?? undefined,
content: userPrompt, content: userPrompt,
files: files.length > 0 ? files : undefined files: files.length > 0 ? files : undefined,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
}; };
// Add message to history and Set currentId to messageId // Add message to history and Set currentId to messageId
...@@ -200,15 +223,7 @@ ...@@ -200,15 +223,7 @@
await chatId.set('local'); await chatId.set('local');
} }
await tick(); await tick();
} else if (chat.chat["models"] != selectedModels) {
// If model is not saved in DB, then save selectedmodel when message is sent
chat = await updateChatById(localStorage.token, $chatId, {
models: selectedModels
});
await chats.set(await getChatList(localStorage.token));
} }
// Reset chat input textarea // Reset chat input textarea
prompt = ''; prompt = '';
files = []; files = [];
...@@ -282,7 +297,8 @@ ...@@ -282,7 +297,8 @@
childrenIds: [], childrenIds: [],
role: 'assistant', role: 'assistant',
content: '', content: '',
model: model model: model,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
}; };
// Add message to history and Set currentId to messageId // Add message to history and Set currentId to messageId
...@@ -303,7 +319,7 @@ ...@@ -303,7 +319,7 @@
// Scroll down // Scroll down
window.scrollTo({ top: document.body.scrollHeight }); window.scrollTo({ top: document.body.scrollHeight });
const res = await generateChatCompletion(localStorage.token, { const [res, controller] = await generateChatCompletion(localStorage.token, {
model: model, model: model,
messages: [ messages: [
$settings.system $settings.system
...@@ -341,6 +357,13 @@ ...@@ -341,6 +357,13 @@
if (done || stopResponseFlag || _chatId !== $chatId) { if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true; responseMessage.done = true;
messages = messages; messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
await cancelChatCompletion(localStorage.token, currentRequestId);
}
currentRequestId = null;
break; break;
} }
...@@ -356,6 +379,10 @@ ...@@ -356,6 +379,10 @@
throw data; throw data;
} }
if ('id' in data) {
console.log(data);
currentRequestId = data.id;
} else {
if (data.done == false) { if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') { if (responseMessage.content == '' && data.message.content == '\n') {
continue; continue;
...@@ -406,6 +433,7 @@ ...@@ -406,6 +433,7 @@
} }
} }
} }
}
} catch (error) { } catch (error) {
console.log(error); console.log(error);
if ('detail' in error) { if ('detail' in error) {
...@@ -472,7 +500,8 @@ ...@@ -472,7 +500,8 @@
childrenIds: [], childrenIds: [],
role: 'assistant', role: 'assistant',
content: '', content: '',
model: model model: model,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
}; };
history.messages[responseMessageId] = responseMessage; history.messages[responseMessageId] = responseMessage;
...@@ -679,6 +708,34 @@ ...@@ -679,6 +708,34 @@
await chats.set(await getChatList(localStorage.token)); await chats.set(await getChatList(localStorage.token));
}; };
const getTags = async () => {
return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
return [];
});
};
const addTag = async (tagName) => {
const res = await addTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
const deleteTag = async (tagName) => {
const res = await deleteTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
onMount(async () => { onMount(async () => {
if (!($settings.saveChatHistory ?? true)) { if (!($settings.saveChatHistory ?? true)) {
await goto('/'); await goto('/');
...@@ -696,14 +753,25 @@ ...@@ -696,14 +753,25 @@
<Navbar <Navbar
{title} {title}
shareEnabled={messages.length > 0} shareEnabled={messages.length > 0}
initNewChat={() => { initNewChat={async () => {
if (currentRequestId !== null) {
await cancelChatCompletion(localStorage.token, currentRequestId);
currentRequestId = null;
}
goto('/'); goto('/');
}} }}
{tags}
{addTag}
{deleteTag}
/> />
<div class="min-h-screen w-full flex justify-center"> <div class="min-h-screen w-full flex justify-center">
<div class=" py-2.5 flex flex-col justify-between w-full"> <div class=" py-2.5 flex flex-col justify-between w-full">
<div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10"> <div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10">
<ModelSelector bind:selectedModels disabled={messages.length > 0 && !selectedModels.includes('')} /> <ModelSelector
bind:selectedModels
disabled={messages.length > 0 && !selectedModels.includes('')}
/>
</div> </div>
<div class=" h-full mt-10 mb-32 w-full flex flex-col"> <div class=" h-full mt-10 mb-32 w-full flex flex-col">
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment