"src/lib/vscode:/vscode.git/clone" did not exist on "adf9ccb5eb419785918882fdcf60f73d5e3f4e33"
Commit b565301a authored by Timothy J. Baek's avatar Timothy J. Baek
Browse files

feat: knowledge integration

parent 78272aed
......@@ -575,8 +575,6 @@
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
let _response = null;
model = model.id;
const responseMessage = history.messages[responseMessageId];
// Wait until history/message have been updated
......@@ -634,17 +632,29 @@
}
});
const docs = messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) =>
['doc', 'collection', 'web_search_results'].includes(item.type)
let docs = [];
if (model.info.meta.knowledge) {
docs = model.info.meta.knowledge;
}
docs = [
...docs,
...messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) =>
['doc', 'collection', 'web_search_results'].includes(item.type)
)
)
)
.flat(1);
.flat(1)
].filter(
(item, index, array) =>
array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
);
const [res, controller] = await generateChatCompletion(localStorage.token, {
model: model,
model: model.id,
messages: messagesBody,
options: {
...($settings.params ?? {}),
......@@ -682,7 +692,7 @@
controller.abort('User: Stop Response');
} else {
const messages = createMessagesList(responseMessageId);
await chatCompletedHandler(model, messages);
await chatCompletedHandler(model.id, messages);
}
_response = responseMessage.content;
......@@ -743,7 +753,7 @@
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `${model}`,
: `${model.id}`,
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
......@@ -830,16 +840,26 @@
let _response = null;
const responseMessage = history.messages[responseMessageId];
const docs = messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) =>
['doc', 'collection', 'web_search_results'].includes(item.type)
)
)
.flat(1);
let docs = [];
if (model.info.meta.knowledge) {
docs = model.info.meta.knowledge;
}
console.log(docs);
docs = [
...docs,
...messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) =>
['doc', 'collection', 'web_search_results'].includes(item.type)
)
)
.flat(1)
].filter(
(item, index, array) =>
array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
);
scrollToBottom();
......@@ -971,7 +991,7 @@
}
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
const notification = new Notification(`${model.id}`, {
body: responseMessage.content,
icon: `${WEBUI_BASE_URL}/static/favicon.png`
});
......
......@@ -67,7 +67,13 @@
class="flex gap-2.5 items-center px-3 py-2 text-sm cursor-pointer hover:bg-gray-50 dark:hover:bg-gray-800 rounded-md"
on:click={() => {
if (!knowledge.find((k) => k.name === item.name)) {
knowledge = [...knowledge, item];
knowledge = [
...knowledge,
{
...item,
type: item?.type ?? 'doc'
}
];
}
}}
>
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment