Commit 715a4a6c authored by Jun Siang Cheah's avatar Jun Siang Cheah
Browse files

Merge remote-tracking branch 'upstream/dev' into feat/model-config

parents fa4c2249 120f8d77
......@@ -11,6 +11,7 @@
"About": "Про програму",
"Account": "Обліковий запис",
"Accurate information": "Точна інформація",
"Add": "",
"Add a model": "Додати модель",
"Add a model tag name": "Додати ім'я тегу моделі",
"Add a short description about what this modelfile does": "Додати короткий опис того, що робить цей файл моделі",
......@@ -19,6 +20,7 @@
"Add custom prompt": "Додати користувацьку підказку",
"Add Docs": "Додати документи",
"Add Files": "Додати файли",
"Add Memory": "",
"Add message": "Додати повідомлення",
"Add Model": "Додати модель",
"Add Tags": "додати теги",
......@@ -49,6 +51,7 @@
"Archived Chats": "Архівовані чати",
"are allowed - Activate this command by typing": "дозволено - активізуйте цю команду набором",
"Are you sure?": "Ви впевнені?",
"As you chat with LLMs, the details and preferences it remembers will be shown here.": "",
"Attach file": "Прикріпити файл",
"Attention to detail": "Увага до деталей",
"Audio": "Аудіо",
......@@ -62,7 +65,6 @@
"Bad Response": "Неправильна відповідь",
"before": "до того, як",
"Being lazy": "Не поспішати",
"Beta": "",
"Builder Mode": "Режим конструктора",
"Bypass SSL verification for Websites": "Обхід SSL-перевірки для веб-сайтів",
"Cancel": "Скасувати",
......@@ -173,6 +175,7 @@
"Enabled": "Увімкнено",
"Ensure your CSV file includes 4 columns in this order: Name, Email, Password, Role.": "Переконайтеся, що ваш CSV-файл містить 4 колонки в такому порядку: Ім'я, Email, Пароль, Роль.",
"Enter {{role}} message here": "Введіть повідомлення {{role}} тут",
"Enter a detail about yourself for your LLMs to recall": "",
"Enter Chunk Overlap": "Введіть перекриття фрагменту",
"Enter Chunk Size": "Введіть розмір фрагменту",
"Enter Image Size (e.g. 512x512)": "Введіть розмір зображення (напр., 512x512)",
......@@ -488,6 +491,7 @@
"variable": "змінна",
"variable to have them replaced with clipboard content.": "змінна, щоб замінити їх вмістом буфера обміну.",
"Version": "Версія",
"View": "",
"Warning: If you update or change your embedding model, you will need to re-import all documents.": "Попередження: Якщо ви оновлюєте або змінюєте модель вбудовування, вам потрібно буде повторно імпортувати всі документи.",
"Web": "Веб",
"Web Loader Settings": "Налаштування веб-завантажувача",
......
This diff is collapsed.
......@@ -11,6 +11,7 @@
"About": "关于",
"Account": "账户",
"Accurate information": "准确信息",
"Add": "",
"Add a model": "添加模型",
"Add a model tag name": "添加模型标签名称",
"Add a short description about what this modelfile does": "为这个模型文件添加一段简短的描述",
......@@ -19,6 +20,7 @@
"Add custom prompt": "添加自定义提示词",
"Add Docs": "添加文档",
"Add Files": "添加文件",
"Add Memory": "",
"Add message": "添加消息",
"Add Model": "添加模型",
"Add Tags": "添加标签",
......@@ -49,6 +51,7 @@
"Archived Chats": "聊天记录存档",
"are allowed - Activate this command by typing": "允许 - 通过输入来激活这个命令",
"Are you sure?": "你确定吗?",
"As you chat with LLMs, the details and preferences it remembers will be shown here.": "",
"Attach file": "附件",
"Attention to detail": "注重细节",
"Audio": "音频",
......@@ -62,7 +65,6 @@
"Bad Response": "不良响应",
"before": "之前",
"Being lazy": "懒惰",
"Beta": "",
"Builder Mode": "构建模式",
"Bypass SSL verification for Websites": "绕过网站的 SSL 验证",
"Cancel": "取消",
......@@ -173,6 +175,7 @@
"Enabled": "启用",
"Ensure your CSV file includes 4 columns in this order: Name, Email, Password, Role.": "确保您的 CSV 文件按以下顺序包含 4 列: 姓名、电子邮件、密码、角色。",
"Enter {{role}} message here": "在此处输入 {{role}} 信息",
"Enter a detail about yourself for your LLMs to recall": "",
"Enter Chunk Overlap": "输入块重叠 (Chunk Overlap)",
"Enter Chunk Size": "输入块大小 (Chunk Size)",
"Enter Image Size (e.g. 512x512)": "输入图片大小 (例如 512x512)",
......@@ -488,6 +491,7 @@
"variable": "变量",
"variable to have them replaced with clipboard content.": "变量将被剪贴板内容替换。",
"Version": "版本",
"View": "",
"Warning: If you update or change your embedding model, you will need to re-import all documents.": "警告: 如果更新或更改 embedding 模型,则需要重新导入所有文档。",
"Web": "网页",
"Web Loader Settings": "Web 加载器设置",
......
......@@ -11,6 +11,7 @@
"About": "關於",
"Account": "帳號",
"Accurate information": "",
"Add": "",
"Add a model": "新增模型",
"Add a model tag name": "新增模型標籤",
"Add a short description about what this modelfile does": "為這個 Modelfile 添加一段簡短的描述",
......@@ -19,6 +20,7 @@
"Add custom prompt": "",
"Add Docs": "新增文件",
"Add Files": "新增檔案",
"Add Memory": "",
"Add message": "新增訊息",
"Add Model": "",
"Add Tags": "新增標籤",
......@@ -49,6 +51,7 @@
"Archived Chats": "聊天記錄存檔",
"are allowed - Activate this command by typing": "是允許的 - 透過輸入",
"Are you sure?": "你確定嗎?",
"As you chat with LLMs, the details and preferences it remembers will be shown here.": "",
"Attach file": "",
"Attention to detail": "",
"Audio": "音訊",
......@@ -62,7 +65,6 @@
"Bad Response": "",
"before": "",
"Being lazy": "",
"Beta": "",
"Builder Mode": "建構模式",
"Bypass SSL verification for Websites": "",
"Cancel": "取消",
......@@ -173,6 +175,7 @@
"Enabled": "已啟用",
"Ensure your CSV file includes 4 columns in this order: Name, Email, Password, Role.": "",
"Enter {{role}} message here": "在這裡輸入 {{role}} 訊息",
"Enter a detail about yourself for your LLMs to recall": "",
"Enter Chunk Overlap": "輸入 Chunk Overlap",
"Enter Chunk Size": "輸入 Chunk 大小",
"Enter Image Size (e.g. 512x512)": "輸入圖片大小(例如 512x512)",
......@@ -488,6 +491,7 @@
"variable": "變數",
"variable to have them replaced with clipboard content.": "變數將替換為剪貼簿內容。",
"Version": "版本",
"View": "",
"Warning: If you update or change your embedding model, you will need to re-import all documents.": "",
"Web": "網頁",
"Web Loader Settings": "",
......
......@@ -59,7 +59,11 @@ self.onmessage = async (event) => {
// make sure loading is done
await loadPyodideAndPackages(self.packages);
try {
self.result = await self.pyodide.runPythonAsync(code);
} catch (error) {
self.stderr = error.toString();
}
self.postMessage({ id, result: self.result, stdout: self.stdout, stderr: self.stderr });
};
......
......@@ -203,6 +203,7 @@
user: _user ?? undefined,
content: userPrompt,
files: files.length > 0 ? files : undefined,
models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx),
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
......@@ -251,12 +252,16 @@
}
};
const sendPrompt = async (prompt, parentId) => {
const sendPrompt = async (prompt, parentId, modelId = null) => {
const _chatId = JSON.parse(JSON.stringify($chatId));
await Promise.all(
(atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels).map(
async (modelId) => {
(modelId
? [modelId]
: atSelectedModel !== undefined
? [atSelectedModel.id]
: selectedModels
).map(async (modelId) => {
console.log('modelId', modelId);
const model = $models.filter((m) => m.id === modelId).at(0);
......@@ -306,8 +311,7 @@
} else {
toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
}
}
)
})
);
await chats.set(await getChatList(localStorage.token));
......@@ -773,16 +777,18 @@
console.log('stopResponse');
};
const regenerateResponse = async () => {
const regenerateResponse = async (message) => {
console.log('regenerateResponse');
if (messages.length != 0 && messages.at(-1).done == true) {
messages.splice(messages.length - 1, 1);
messages = messages;
let userMessage = messages.at(-1);
if (messages.length != 0) {
let userMessage = history.messages[message.parentId];
let userPrompt = userMessage.content;
if ((userMessage?.models ?? [...selectedModels]).length == 1) {
await sendPrompt(userPrompt, userMessage.id);
} else {
await sendPrompt(userPrompt, userMessage.id, message.model);
}
}
};
......
......@@ -211,7 +211,8 @@
user: _user ?? undefined,
content: userPrompt,
files: files.length > 0 ? files : undefined,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
timestamp: Math.floor(Date.now() / 1000), // Unix epoch
models: selectedModels
};
// Add message to history and Set currentId to messageId
......@@ -256,12 +257,18 @@
await sendPrompt(userPrompt, userMessageId);
}
};
const sendPrompt = async (prompt, parentId) => {
const sendPrompt = async (prompt, parentId, modelId = null) => {
const _chatId = JSON.parse(JSON.stringify($chatId));
await Promise.all(
(atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels).map(
async (modelId) => {
(modelId
? [modelId]
: atSelectedModel !== undefined
? [atSelectedModel.id]
: selectedModels
).map(async (modelId) => {
console.log('modelId', modelId);
const model = $models.filter((m) => m.id === modelId).at(0);
if (model) {
......@@ -310,8 +317,7 @@
} else {
toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
}
}
)
})
);
await chats.set(await getChatList(localStorage.token));
......@@ -775,16 +781,18 @@
console.log('stopResponse');
};
const regenerateResponse = async () => {
const regenerateResponse = async (message) => {
console.log('regenerateResponse');
if (messages.length != 0 && messages.at(-1).done == true) {
messages.splice(messages.length - 1, 1);
messages = messages;
let userMessage = messages.at(-1);
if (messages.length != 0) {
let userMessage = history.messages[message.parentId];
let userPrompt = userMessage.content;
if ((userMessage?.models ?? [...selectedModels]).length == 1) {
await sendPrompt(userPrompt, userMessage.id);
} else {
await sendPrompt(userPrompt, userMessage.id, message.model);
}
}
};
......
......@@ -339,7 +339,7 @@ SYSTEM """${system}"""`.replace(/^\s*\n/gm, '');
if (
inputFiles &&
inputFiles.length > 0 &&
['image/gif', 'image/jpeg', 'image/png'].includes(inputFiles[0]['type'])
['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(inputFiles[0]['type'])
) {
reader.readAsDataURL(inputFiles[0]);
} else {
......
......@@ -238,7 +238,7 @@
if (
inputFiles &&
inputFiles.length > 0 &&
['image/gif', 'image/jpeg', 'image/png'].includes(inputFiles[0]['type'])
['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(inputFiles[0]['type'])
) {
reader.readAsDataURL(inputFiles[0]);
} else {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment