Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
7349e1d8
Commit
7349e1d8
authored
Jun 10, 2024
by
Timothy J. Baek
Browse files
refac
parent
bf5a6229
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
71 additions
and
58 deletions
+71
-58
src/lib/components/chat/Chat.svelte
src/lib/components/chat/Chat.svelte
+71
-58
No files found.
src/lib/components/chat/Chat.svelte
View file @
7349e1d8
...
@@ -358,8 +358,61 @@
...
@@ -358,8 +358,61 @@
// Wait until history/message have been updated
// Wait until history/message have been updated
await tick();
await tick();
// Send prompt
_responses = await sendPrompt(userPrompt, userMessageId);
}
return _responses;
};
const sendPrompt = async (prompt, parentId, modelId = null) => {
let _responses = [];
// If modelId is provided, use it, else use selected model
let selectedModelIds = modelId
? [modelId]
: atSelectedModel !== undefined
? [atSelectedModel.id]
: selectedModels;
// Create response messages for each selected model
const responseMessageIds = {};
for (const modelId of selectedModelIds) {
const model = $models.filter((m) => m.id === modelId).at(0);
if (model) {
let responseMessageId = uuidv4();
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model.id,
modelName: model.name ?? model.id,
userContext: null,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
// Append messageId to childrenIds of parent message
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
responseMessageIds[modelId] = responseMessageId;
}
}
await tick();
// Create new chat if only one message in messages
// Create new chat if only one message in messages
if (messages.length ==
1
) {
if (messages.length ==
2
) {
if ($settings.saveChatHistory ?? true) {
if ($settings.saveChatHistory ?? true) {
chat = await createNewChat(localStorage.token, {
chat = await createNewChat(localStorage.token, {
id: $chatId,
id: $chatId,
...
@@ -381,24 +434,11 @@
...
@@ -381,24 +434,11 @@
}
}
await tick();
await tick();
}
}
// Send prompt
_responses = await sendPrompt(userPrompt, userMessageId);
}
return _responses;
};
const sendPrompt = async (prompt, parentId, modelId = null) => {
let _responses = [];
const _chatId = JSON.parse(JSON.stringify($chatId));
const _chatId = JSON.parse(JSON.stringify($chatId));
await Promise.all(
await Promise.all(
(modelId
selectedModelIds.map(async (modelId) => {
? [modelId]
: atSelectedModel !== undefined
? [atSelectedModel.id]
: selectedModels
).map(async (modelId) => {
console.log('modelId', modelId);
console.log('modelId', modelId);
const model = $models.filter((m) => m.id === modelId).at(0);
const model = $models.filter((m) => m.id === modelId).at(0);
...
@@ -416,33 +456,8 @@
...
@@ -416,33 +456,8 @@
);
);
}
}
// Create response message
let responseMessageId = responseMessageIds[modelId];
let responseMessageId = uuidv4();
let responseMessage = history.messages[responseMessageId];
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model.id,
modelName: model.name ?? model.id,
userContext: null,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
// Append messageId to childrenIds of parent message
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
await tick();
let userContext = null;
let userContext = null;
if ($settings?.memory ?? false) {
if ($settings?.memory ?? false) {
...
@@ -451,7 +466,6 @@
...
@@ -451,7 +466,6 @@
toast.error(error);
toast.error(error);
return null;
return null;
});
});
if (res) {
if (res) {
if (res.documents[0].length > 0) {
if (res.documents[0].length > 0) {
userContext = res.documents.reduce((acc, doc, index) => {
userContext = res.documents.reduce((acc, doc, index) => {
...
@@ -477,7 +491,6 @@
...
@@ -477,7 +491,6 @@
}
}
let _response = null;
let _response = null;
if (model?.owned_by === 'openai') {
if (model?.owned_by === 'openai') {
_response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
_response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
} else if (model) {
} else if (model) {
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment