"signatures/git@developer.sourcefind.cn:wangsen/mineru.git" did not exist on "ce67ccf83779f2519d7b3255fd94db25dabca908"
Commit 1ff0c9a9 authored by Timothy J. Baek's avatar Timothy J. Baek
Browse files

fix: continue generation

parent 62451466
......@@ -732,25 +732,26 @@
responseMessage.done = false;
await tick();
const modelTag = $models.filter((m) => m.name === responseMessage.model).at(0);
const model = $models.filter((m) => m.id === responseMessage.model).at(0);
if (modelTag?.external) {
if (model) {
if (model?.external) {
await sendPromptOpenAI(
responseMessage.model,
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
} else if (modelTag) {
} else
await sendPromptOllama(
responseMessage.model,
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
} else {
toast.error(`Model ${model} not found`);
}
} else {
toast.error(`Model ${modelId} not found`);
}
};
......
......@@ -238,7 +238,6 @@
await sendPrompt(userPrompt, userMessageId);
}
};
const sendPrompt = async (prompt, parentId) => {
const _chatId = JSON.parse(JSON.stringify($chatId));
......@@ -292,10 +291,10 @@
}
await Promise.all(
selectedModels.map(async (model) => {
console.log(model);
const modelTag = $models.filter((m) => m.name === model).at(0);
selectedModels.map(async (modelId) => {
const model = $models.filter((m) => m.id === modelId).at(0);
if (model) {
// Create response message
let responseMessageId = uuidv4();
let responseMessage = {
......@@ -304,7 +303,7 @@
childrenIds: [],
role: 'assistant',
content: '',
model: model,
model: model.id,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
......@@ -320,12 +319,13 @@
];
}
if (modelTag?.external) {
if (model?.external) {
await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
} else if (modelTag) {
} else if (model) {
await sendPromptOllama(model, prompt, responseMessageId, _chatId);
}
} else {
toast.error(`Model ${model} not found`);
toast.error(`Model ${modelId} not found`);
}
})
);
......@@ -334,6 +334,7 @@
};
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
model = model.id;
const responseMessage = history.messages[responseMessageId];
// Wait until history/message have been updated
......@@ -543,11 +544,12 @@
const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
const responseMessage = history.messages[responseMessageId];
scrollToBottom();
const res = await generateOpenAIChatCompletion(localStorage.token, {
model: model,
const res = await generateOpenAIChatCompletion(
localStorage.token,
{
model: model.id,
stream: true,
messages: [
$settings.system
......@@ -593,7 +595,9 @@
num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined
});
},
model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}`
);
if (res && res.ok) {
const reader = res.body
......@@ -704,7 +708,6 @@
await setChatTitle(_chatId, userPrompt);
}
};
const stopResponse = () => {
stopResponseFlag = true;
console.log('stopResponse');
......@@ -719,25 +722,26 @@
responseMessage.done = false;
await tick();
const modelTag = $models.filter((m) => m.name === responseMessage.model).at(0);
const model = $models.filter((m) => m.id === responseMessage.model).at(0);
if (modelTag?.external) {
if (model) {
if (model?.external) {
await sendPromptOpenAI(
responseMessage.model,
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
} else if (modelTag) {
} else
await sendPromptOllama(
responseMessage.model,
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
} else {
toast.error(`Model ${model} not found`);
}
} else {
toast.error(`Model ${modelId} not found`);
}
};
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment