Commit 1a16f8fb authored by Jun Siang Cheah's avatar Jun Siang Cheah
Browse files

Merge remote-tracking branch 'origin/dev' into feat/model-config

parents 197af126 009e85d5
......@@ -5,6 +5,25 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.1.125] - 2024-05-19
### Added
- **🔄 Updated UI**: Chat interface revamped with chat bubbles. Easily switch back to the old style via settings > interface > chat bubble UI.
- **📂 Enhanced Sidebar UI**: Model files, documents, prompts, and playground merged into Workspace for streamlined access.
- **🚀 Improved Many Model Interaction**: All responses now displayed simultaneously for a smoother experience.
- **🐍 Python Code Execution**: Execute Python code locally in the browser with libraries like 'requests', 'beautifulsoup4', 'numpy', 'pandas', 'seaborn', 'matplotlib', 'scikit-learn', 'scipy', 'regex'.
- **🧠 Experimental Memory Feature**: Manually input personal information you want LLMs to remember via settings > personalization > memory.
- **💾 Persistent Settings**: Settings now saved as config.json for convenience.
- **🩺 Health Check Endpoint**: Added for Docker deployment.
- **↕️ RTL Support**: Toggle chat direction via settings > interface > chat direction.
- **🖥️ PowerPoint Support**: RAG pipeline now supports PowerPoint documents.
- **🌐 Language Updates**: Ukrainian, Turkish, Arabic, Chinese, Serbian, Vietnamese updated; Punjabi added.
### Changed
- **👤 Shared Chat Update**: Shared chat now includes creator user information.
## [0.1.124] - 2024-05-08
### Added
......
......@@ -69,6 +69,7 @@ from utils.misc import (
from utils.utils import get_current_user, get_admin_user
from config import (
ENV,
SRC_LOG_LEVELS,
UPLOAD_DIR,
DOCS_DIR,
......@@ -260,7 +261,7 @@ async def update_embedding_config(
app.state.config.OPENAI_API_BASE_URL = form_data.openai_config.url
app.state.config.OPENAI_API_KEY = form_data.openai_config.key
update_embedding_model(app.state.config.RAG_EMBEDDING_MODEL), True
update_embedding_model(app.state.config.RAG_EMBEDDING_MODEL)
app.state.EMBEDDING_FUNCTION = get_embedding_function(
app.state.config.RAG_EMBEDDING_ENGINE,
......@@ -951,3 +952,14 @@ def reset(user=Depends(get_admin_user)) -> bool:
log.exception(e)
return True
if ENV == "dev":
@app.get("/ef")
async def get_embeddings():
return {"result": app.state.EMBEDDING_FUNCTION("hello world")}
@app.get("/ef/{text}")
async def get_embeddings_text(text: str):
return {"result": app.state.EMBEDDING_FUNCTION(text)}
"""Peewee migrations -- 002_add_local_sharing.py.
Some examples (model - class or model name)::
> Model = migrator.orm['table_name'] # Return model in current state by name
> Model = migrator.ModelClass # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.run(func, *args, **kwargs) # Run python function with the given args
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.add_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
> migrator.add_constraint(model, name, sql)
> migrator.drop_index(model, *col_names)
> migrator.drop_not_null(model, *field_names)
> migrator.drop_constraints(model, *constraints)
"""
from contextlib import suppress
import peewee as pw
from peewee_migrate import Migrator
with suppress(ImportError):
import playhouse.postgres_ext as pw_pext
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
@migrator.create_model
class Memory(pw.Model):
id = pw.CharField(max_length=255, unique=True)
user_id = pw.CharField(max_length=255)
content = pw.TextField(null=False)
updated_at = pw.BigIntegerField(null=False)
created_at = pw.BigIntegerField(null=False)
class Meta:
table_name = "memory"
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your rollback migrations here."""
migrator.remove_model("memory")
......@@ -9,6 +9,7 @@ from apps.web.routers import (
modelfiles,
prompts,
configs,
memories,
utils,
)
from config import (
......@@ -41,6 +42,7 @@ app.state.config.USER_PERMISSIONS = USER_PERMISSIONS
app.state.config.WEBHOOK_URL = WEBHOOK_URL
app.state.AUTH_TRUSTED_EMAIL_HEADER = WEBUI_AUTH_TRUSTED_EMAIL_HEADER
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
......@@ -52,9 +54,12 @@ app.add_middleware(
app.include_router(auths.router, prefix="/auths", tags=["auths"])
app.include_router(users.router, prefix="/users", tags=["users"])
app.include_router(chats.router, prefix="/chats", tags=["chats"])
app.include_router(documents.router, prefix="/documents", tags=["documents"])
app.include_router(modelfiles.router, prefix="/modelfiles", tags=["modelfiles"])
app.include_router(prompts.router, prefix="/prompts", tags=["prompts"])
app.include_router(memories.router, prefix="/memories", tags=["memories"])
app.include_router(configs.router, prefix="/configs", tags=["configs"])
app.include_router(utils.router, prefix="/utils", tags=["utils"])
......
from pydantic import BaseModel
from peewee import *
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional
from apps.web.internal.db import DB
from apps.web.models.chats import Chats
import time
import uuid
####################
# Memory DB Schema
####################
class Memory(Model):
id = CharField(unique=True)
user_id = CharField()
content = TextField()
updated_at = BigIntegerField()
created_at = BigIntegerField()
class Meta:
database = DB
class MemoryModel(BaseModel):
id: str
user_id: str
content: str
updated_at: int # timestamp in epoch
created_at: int # timestamp in epoch
####################
# Forms
####################
class MemoriesTable:
def __init__(self, db):
self.db = db
self.db.create_tables([Memory])
def insert_new_memory(
self,
user_id: str,
content: str,
) -> Optional[MemoryModel]:
id = str(uuid.uuid4())
memory = MemoryModel(
**{
"id": id,
"user_id": user_id,
"content": content,
"created_at": int(time.time()),
"updated_at": int(time.time()),
}
)
result = Memory.create(**memory.model_dump())
if result:
return memory
else:
return None
def get_memories(self) -> List[MemoryModel]:
try:
memories = Memory.select()
return [MemoryModel(**model_to_dict(memory)) for memory in memories]
except:
return None
def get_memories_by_user_id(self, user_id: str) -> List[MemoryModel]:
try:
memories = Memory.select().where(Memory.user_id == user_id)
return [MemoryModel(**model_to_dict(memory)) for memory in memories]
except:
return None
def get_memory_by_id(self, id) -> Optional[MemoryModel]:
try:
memory = Memory.get(Memory.id == id)
return MemoryModel(**model_to_dict(memory))
except:
return None
def delete_memory_by_id(self, id: str) -> bool:
try:
query = Memory.delete().where(Memory.id == id)
query.execute() # Remove the rows, return number of rows removed.
return True
except:
return False
def delete_memories_by_user_id(self, user_id: str) -> bool:
try:
query = Memory.delete().where(Memory.user_id == user_id)
query.execute()
return True
except:
return False
def delete_memory_by_id_and_user_id(self, id: str, user_id: str) -> bool:
try:
query = Memory.delete().where(Memory.id == id, Memory.user_id == user_id)
query.execute()
return True
except:
return False
Memories = MemoriesTable(DB)
from fastapi import Response, Request
from fastapi import Depends, FastAPI, HTTPException, status
from datetime import datetime, timedelta
from typing import List, Union, Optional
from fastapi import APIRouter
from pydantic import BaseModel
import logging
from apps.web.models.memories import Memories, MemoryModel
from utils.utils import get_verified_user
from constants import ERROR_MESSAGES
from config import SRC_LOG_LEVELS, CHROMA_CLIENT
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["MODELS"])
router = APIRouter()
@router.get("/ef")
async def get_embeddings(request: Request):
return {"result": request.app.state.EMBEDDING_FUNCTION("hello world")}
############################
# GetMemories
############################
@router.get("/", response_model=List[MemoryModel])
async def get_memories(user=Depends(get_verified_user)):
return Memories.get_memories_by_user_id(user.id)
############################
# AddMemory
############################
class AddMemoryForm(BaseModel):
content: str
@router.post("/add", response_model=Optional[MemoryModel])
async def add_memory(
request: Request, form_data: AddMemoryForm, user=Depends(get_verified_user)
):
memory = Memories.insert_new_memory(user.id, form_data.content)
memory_embedding = request.app.state.EMBEDDING_FUNCTION(memory.content)
collection = CHROMA_CLIENT.get_or_create_collection(name=f"user-memory-{user.id}")
collection.upsert(
documents=[memory.content],
ids=[memory.id],
embeddings=[memory_embedding],
metadatas=[{"created_at": memory.created_at}],
)
return memory
############################
# QueryMemory
############################
class QueryMemoryForm(BaseModel):
content: str
@router.post("/query")
async def query_memory(
request: Request, form_data: QueryMemoryForm, user=Depends(get_verified_user)
):
query_embedding = request.app.state.EMBEDDING_FUNCTION(form_data.content)
collection = CHROMA_CLIENT.get_or_create_collection(name=f"user-memory-{user.id}")
results = collection.query(
query_embeddings=[query_embedding],
n_results=1, # how many results to return
)
return results
############################
# ResetMemoryFromVectorDB
############################
@router.get("/reset", response_model=bool)
async def reset_memory_from_vector_db(
request: Request, user=Depends(get_verified_user)
):
CHROMA_CLIENT.delete_collection(f"user-memory-{user.id}")
collection = CHROMA_CLIENT.get_or_create_collection(name=f"user-memory-{user.id}")
memories = Memories.get_memories_by_user_id(user.id)
for memory in memories:
memory_embedding = request.app.state.EMBEDDING_FUNCTION(memory.content)
collection.upsert(
documents=[memory.content],
ids=[memory.id],
embeddings=[memory_embedding],
)
return True
############################
# DeleteMemoriesByUserId
############################
@router.delete("/user", response_model=bool)
async def delete_memory_by_user_id(user=Depends(get_verified_user)):
result = Memories.delete_memories_by_user_id(user.id)
if result:
try:
CHROMA_CLIENT.delete_collection(f"user-memory-{user.id}")
except Exception as e:
log.error(e)
return True
return False
############################
# DeleteMemoryById
############################
@router.delete("/{memory_id}", response_model=bool)
async def delete_memory_by_id(memory_id: str, user=Depends(get_verified_user)):
result = Memories.delete_memory_by_id_and_user_id(memory_id, user.id)
if result:
collection = CHROMA_CLIENT.get_or_create_collection(
name=f"user-memory-{user.id}"
)
collection.delete(ids=[memory_id])
return True
return False
......@@ -240,9 +240,15 @@ async def check_url(request: Request, call_next):
return response
app.mount("/api/v1", webui_app)
app.mount("/litellm/api", litellm_app)
@app.middleware("http")
async def update_embedding_function(request: Request, call_next):
response = await call_next(request)
if "/embedding/update" in request.url.path:
webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
return response
app.mount("/litellm/api", litellm_app)
app.mount("/ollama", ollama_app)
app.mount("/openai/api", openai_app)
......@@ -250,6 +256,10 @@ app.mount("/images/api/v1", images_app)
app.mount("/audio/api/v1", audio_app)
app.mount("/rag/api/v1", rag_app)
app.mount("/api/v1", webui_app)
webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
@app.get("/api/config")
async def get_app_config():
......
{
"name": "open-webui",
"version": "0.1.124",
"version": "0.1.125",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "open-webui",
"version": "0.1.124",
"version": "0.1.125",
"dependencies": {
"@pyscript/core": "^0.4.32",
"@sveltejs/adapter-node": "^1.3.1",
......
{
"name": "open-webui",
"version": "0.1.124",
"version": "0.1.125",
"private": true,
"scripts": {
"dev": "npm run pyodide:fetch && vite dev --host",
......
......@@ -6,7 +6,8 @@ const packages = [
'matplotlib',
'scikit-learn',
'scipy',
'regex'
'regex',
'seaborn'
];
import { loadPyodide } from 'pyodide';
......
import { WEBUI_API_BASE_URL } from '$lib/constants';
export const getMemories = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/memories`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const addNewMemory = async (token: string, content: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/memories/add`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
},
body: JSON.stringify({
content: content
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const queryMemory = async (token: string, content: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/memories/query`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
},
body: JSON.stringify({
content: content
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const deleteMemoryById = async (token: string, id: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/memories/${id}`, {
method: 'DELETE',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const deleteMemoriesByUserId = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/memories/user`, {
method: 'DELETE',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
......@@ -644,7 +644,7 @@
}}
/>
<form
dir={$settings?.chatDirection}
dir={$settings?.chatDirection ?? 'LTR'}
class=" flex flex-col relative w-full rounded-3xl px-1.5 bg-gray-50 dark:bg-gray-850 dark:text-gray-100"
on:submit|preventDefault={() => {
submitPrompt(prompt, user);
......
......@@ -361,13 +361,14 @@
history: history
});
if (autoScroll) {
const element = document.getElementById('messages-container');
autoScroll =
element.scrollHeight - element.scrollTop <= element.clientHeight + 50;
setTimeout(() => {
scrollToBottom();
}, 100);
}
}}
/>
{/key}
......
......@@ -30,70 +30,17 @@
};
const checkPythonCode = (str) => {
// Check if the string contains typical Python keywords, syntax, or functions
const pythonKeywords = [
'def',
'class',
'import',
'from',
'if',
'else',
'elif',
'for',
'while',
'try',
'except',
'finally',
'return',
'yield',
'lambda',
'assert',
'pass',
'break',
'continue',
'global',
'nonlocal',
'del',
'True',
'False',
'None',
'and',
'or',
'not',
'in',
'is',
'as',
'with'
];
for (let keyword of pythonKeywords) {
if (str.includes(keyword)) {
return true;
}
}
// Check if the string contains typical Python syntax characters
const pythonSyntax = [
'def ',
'class ',
'import ',
'from ',
'if ',
'else:',
'elif ',
'for ',
'while ',
'try:',
'except:',
'finally:',
'return ',
'yield ',
'lambda ',
'assert ',
'pass',
'break',
'continue',
'global ',
'nonlocal ',
'del ',
'True',
......@@ -104,29 +51,7 @@
' not ',
' in ',
' is ',
' as ',
' with ',
':',
'=',
'==',
'!=',
'>',
'<',
'>=',
'<=',
'+',
'-',
'*',
'/',
'%',
'**',
'//',
'(',
')',
'[',
']',
'{',
'}'
' with '
];
for (let syntax of pythonSyntax) {
......@@ -186,7 +111,8 @@
code.includes('matplotlib') ? 'matplotlib' : null,
code.includes('sklearn') ? 'scikit-learn' : null,
code.includes('scipy') ? 'scipy' : null,
code.includes('re') ? 'regex' : null
code.includes('re') ? 'regex' : null,
code.includes('seaborn') ? 'seaborn' : null
].filter(Boolean);
console.log(packages);
......@@ -235,7 +161,8 @@ __builtins__.input = input`);
code.includes('pandas') ? 'pandas' : null,
code.includes('sklearn') ? 'scikit-learn' : null,
code.includes('scipy') ? 'scipy' : null,
code.includes('re') ? 'regex' : null
code.includes('re') ? 'regex' : null,
code.includes('seaborn') ? 'seaborn' : null
].filter(Boolean);
console.log(packages);
......
......@@ -68,11 +68,11 @@
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div
class=" snap-center min-w-80 w-full max-w-full m-1 outline outline-1 {history.messages[
class=" snap-center min-w-80 w-full max-w-full m-1 border {history.messages[
currentMessageId
].model === model
? 'outline-gray-200 dark:outline-gray-700 outline-2'
: 'outline-gray-100 dark:outline-gray-850 '} transition p-6 rounded-3xl"
? 'border-gray-100 dark:border-gray-700 border-[1.5px]'
: 'border-gray-50 dark:border-gray-850 '} transition p-5 rounded-3xl"
on:click={() => {
currentMessageId = groupedMessages[model].messages[groupedMessagesIdx[model]].id;
......
......@@ -5,7 +5,7 @@
export let src = '/user.png';
</script>
<div class={$settings?.chatDirection === 'LTR' ? 'mr-3' : 'ml-3'}>
<div class={($settings?.chatDirection ?? 'LTR') === 'LTR' ? 'mr-3' : 'ml-3'}>
<img
crossorigin="anonymous"
src={src.startsWith(WEBUI_BASE_URL) ||
......
......@@ -35,7 +35,7 @@
[key: string]: any;
} = [];
export let className = ' w-[30rem]';
export let className = 'w-[30rem]';
let show = false;
......@@ -213,7 +213,9 @@
</DropdownMenu.Trigger>
<DropdownMenu.Content
class=" z-40 {className} max-w-[calc(100vw-1rem)] justify-start rounded-xl bg-white dark:bg-gray-850 dark:text-white shadow-lg border border-gray-300/30 dark:border-gray-700/50 outline-none "
class=" z-40 {$mobile
? `w-full`
: `${className}`} max-w-[calc(100vw-1rem)] justify-start rounded-xl bg-white dark:bg-gray-850 dark:text-white shadow-lg border border-gray-300/30 dark:border-gray-700/50 outline-none "
transition={flyAndScale}
side={$mobile ? 'bottom' : 'bottom-start'}
sideOffset={4}
......
......@@ -16,11 +16,11 @@
let showManageModal = false;
// Addons
let enableMemory = true;
let enableMemory = false;
onMount(async () => {
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
enableMemory = settings?.memory ?? true;
enableMemory = settings?.memory ?? false;
});
</script>
......@@ -58,8 +58,8 @@
<div class="text-xs text-gray-600 dark:text-gray-400">
<div>
LLMs will become more helpful as you chat, picking up on details and preferences to tailor
its responses to you.
You can personalize your interactions with LLMs by adding memories through the 'Manage'
button below, making them more helpful and tailored to you.
</div>
<!-- <div class="mt-3">
......
<script>
import { getContext } from 'svelte';
import { createEventDispatcher, getContext } from 'svelte';
import Modal from '$lib/components/common/Modal.svelte';
import { addNewMemory } from '$lib/apis/memories';
import { toast } from 'svelte-sonner';
const dispatch = createEventDispatcher();
export let show;
const i18n = getContext('i18n');
let loading = false;
let memory = '';
let content = '';
const submitHandler = async () => {
loading = true;
const res = await addNewMemory(localStorage.token, content).catch((error) => {
toast.error(error);
return null;
});
if (res) {
console.log(res);
toast.success('Memory added successfully');
content = '';
show = false;
dispatch('save');
}
const submitHandler = () => {
console.log('submitHandler');
loading = false;
};
</script>
......@@ -48,7 +68,7 @@
>
<div class="">
<textarea
bind:value={memory}
bind:value={content}
class=" bg-transparent w-full text-sm resize-none rounded-xl p-3 outline outline-1 outline-gray-100 dark:outline-gray-800"
rows="3"
placeholder={$i18n.t('Enter a detail about yourself for your LLMs to recall')}
......
......@@ -7,6 +7,9 @@
import Modal from '$lib/components/common/Modal.svelte';
import AddMemoryModal from './AddMemoryModal.svelte';
import { deleteMemoriesByUserId, deleteMemoryById, getMemories } from '$lib/apis/memories';
import Tooltip from '$lib/components/common/Tooltip.svelte';
import { error } from '@sveltejs/kit';
const i18n = getContext('i18n');
......@@ -18,7 +21,7 @@
$: if (show) {
(async () => {
// chats = await getArchivedChatList(localStorage.token);
memories = await getMemories(localStorage.token);
})();
}
</script>
......@@ -65,20 +68,54 @@
</thead>
<tbody>
{#each memories as memory}
<tr class="border-b dark:border-gray-800">
<td class="px-3 py-2"> {memory.name} </td>
<td class="px-3 py-2 hidden md:flex">
{dayjs(memory.created_at).format($i18n.t('MMMM DD, YYYY'))}
<tr class="border-b dark:border-gray-800 items-center">
<td class="px-3 py-1">
<div class="line-clamp-1">
{memory.content}
</div>
</td>
<td class=" px-3 py-1 hidden md:flex h-[2.5rem]">
<div class="my-auto whitespace-nowrap">
{dayjs(memory.created_at * 1000).format($i18n.t('MMMM DD, YYYY'))}
</div>
</td>
<td class="px-3 py-2 text-right">
<td class="px-3 py-1">
<div class="flex justify-end w-full">
<Tooltip content="Delete">
<button
class="text-xs text-gray-500 dark:text-gray-400"
on:click={() => {
// showMemory(memory);
class="self-center w-fit text-sm px-2 py-2 hover:bg-black/5 dark:hover:bg-white/5 rounded-xl"
on:click={async () => {
const res = await deleteMemoryById(
localStorage.token,
memory.id
).catch((error) => {
toast.error(error);
return null;
});
if (res) {
toast.success('Memory deleted successfully');
memories = await getMemories(localStorage.token);
}
}}
>
{$i18n.t('View')}
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-4 h-4"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="m14.74 9-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 0 1-2.244 2.077H8.084a2.25 2.25 0 0 1-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 0 0-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 0 1 3.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 0 0-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 0 0-7.5 0"
/>
</svg>
</button>
</Tooltip>
</div>
</td>
</tr>
{/each}
......@@ -89,9 +126,7 @@
{:else}
<div class="text-center flex h-full text-sm w-full">
<div class=" my-auto pb-10 px-4 w-full text-gray-500">
{$i18n.t(
'As you chat with LLMs, the details and preferences it remembers will be shown here.'
)}
{$i18n.t('Memories accessible by LLMs will be shown here.')}
</div>
</div>
{/if}
......@@ -103,13 +138,28 @@
showAddMemoryModal = true;
}}>Add memory</button
>
<!-- <button
<button
class=" px-3.5 py-1.5 font-medium text-red-500 hover:bg-black/5 dark:hover:bg-white/5 outline outline-1 outline-red-300 dark:outline-red-800 rounded-3xl"
>Clear memory</button
> -->
on:click={async () => {
const res = await deleteMemoriesByUserId(localStorage.token).catch((error) => {
toast.error(error);
return null;
});
if (res) {
toast.success('Memory cleared successfully');
memories = [];
}
}}>Clear memory</button
>
</div>
</div>
</div>
</Modal>
<AddMemoryModal bind:show={showAddMemoryModal} />
<AddMemoryModal
bind:show={showAddMemoryModal}
on:save={async () => {
memories = await getMemories(localStorage.token);
}}
/>
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment