Commit 1d0eaec3 authored by Timothy J. Baek's avatar Timothy J. Baek
Browse files

refac: queryVectorDB renamed to queryCollection

parent 50f7b20a
......@@ -66,13 +66,13 @@ export const uploadWebToVectorDB = async (token: string, collection_name: string
export const queryVectorDB = async (
token: string,
collection_names: string[],
collection_name: string,
query: string,
k: number
) => {
let error = null;
const res = await fetch(`${RAG_API_BASE_URL}/query/collections`, {
const res = await fetch(`${RAG_API_BASE_URL}/query/collection`, {
method: 'POST',
headers: {
Accept: 'application/json',
......@@ -80,7 +80,7 @@ export const queryVectorDB = async (
authorization: `Bearer ${token}`
},
body: JSON.stringify({
collection_names: collection_names,
collection_name: collection_name,
query: query,
k: k
})
......
......@@ -28,7 +28,7 @@
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { queryVectorDB } from '$lib/apis/rag';
import { queryCollection } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
......@@ -232,28 +232,26 @@
processing = 'Reading';
const query = history.messages[parentId].content;
let relevantContexts = await queryVectorDB(
localStorage.token,
docs.map((d) => d.collection_name),
query,
4
).catch((error) => {
console.log(error);
return null;
});
if (relevantContexts) {
relevantContexts = relevantContexts.filter((context) => context);
let relevantContexts = await Promise.all(
docs.map(async (doc) => {
return await queryCollection(localStorage.token, doc.collection_name, query, 4).catch(
(error) => {
console.log(error);
return null;
}
);
})
);
relevantContexts = relevantContexts.filter((context) => context);
const contextString = relevantContexts.reduce((a, context, i, arr) => {
return `${a}${context.documents.join(' ')}\n`;
}, '');
const contextString = relevantContexts.reduce((a, context, i, arr) => {
return `${a}${context.documents.join(' ')}\n`;
}, '');
console.log(contextString);
console.log(contextString);
history.messages[parentId].raContent = RAGTemplate(contextString, query);
history.messages[parentId].contexts = relevantContexts;
}
history.messages[parentId].raContent = RAGTemplate(contextString, query);
history.messages[parentId].contexts = relevantContexts;
await tick();
processing = '';
}
......
......@@ -29,7 +29,7 @@
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { queryVectorDB } from '$lib/apis/rag';
import { queryCollection } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
......@@ -246,28 +246,26 @@
processing = 'Reading';
const query = history.messages[parentId].content;
let relevantContexts = await queryVectorDB(
localStorage.token,
docs.map((d) => d.collection_name),
query,
4
).catch((error) => {
console.log(error);
return null;
});
if (relevantContexts) {
relevantContexts = relevantContexts.filter((context) => context);
let relevantContexts = await Promise.all(
docs.map(async (doc) => {
return await queryCollection(localStorage.token, doc.collection_name, query, 4).catch(
(error) => {
console.log(error);
return null;
}
);
})
);
relevantContexts = relevantContexts.filter((context) => context);
const contextString = relevantContexts.reduce((a, context, i, arr) => {
return `${a}${context.documents.join(' ')}\n`;
}, '');
const contextString = relevantContexts.reduce((a, context, i, arr) => {
return `${a}${context.documents.join(' ')}\n`;
}, '');
console.log(contextString);
console.log(contextString);
history.messages[parentId].raContent = RAGTemplate(contextString, query);
history.messages[parentId].contexts = relevantContexts;
}
history.messages[parentId].raContent = RAGTemplate(contextString, query);
history.messages[parentId].contexts = relevantContexts;
await tick();
processing = '';
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment