"src/include/ConstantTensorDescriptor.hpp" did not exist on "471830a052b2ed6135ad4c41244f0ec9057c0f09"
Unverified Commit 9bcd4ce5 authored by Timothy Jaeryang Baek's avatar Timothy Jaeryang Baek Committed by GitHub
Browse files

Merge pull request #3559 from open-webui/dev

0.3.8
parents 824966ad b38abf23
...@@ -10,7 +10,8 @@ node_modules ...@@ -10,7 +10,8 @@ node_modules
vite.config.js.timestamp-* vite.config.js.timestamp-*
vite.config.ts.timestamp-* vite.config.ts.timestamp-*
__pycache__ __pycache__
.env .idea
venv
_old _old
uploads uploads
.ipynb_checkpoints .ipynb_checkpoints
......
...@@ -35,6 +35,10 @@ jobs: ...@@ -35,6 +35,10 @@ jobs:
done done
echo "Service is up!" echo "Service is up!"
- name: Delete Docker build cache
run: |
docker builder prune --all --force
- name: Preload Ollama model - name: Preload Ollama model
run: | run: |
docker exec ollama ollama pull qwen:0.5b-chat-v1.5-q2_K docker exec ollama ollama pull qwen:0.5b-chat-v1.5-q2_K
...@@ -43,7 +47,7 @@ jobs: ...@@ -43,7 +47,7 @@ jobs:
uses: cypress-io/github-action@v6 uses: cypress-io/github-action@v6
with: with:
browser: chrome browser: chrome
wait-on: "http://localhost:3000" wait-on: 'http://localhost:3000'
config: baseUrl=http://localhost:3000 config: baseUrl=http://localhost:3000
- uses: actions/upload-artifact@v4 - uses: actions/upload-artifact@v4
...@@ -67,6 +71,28 @@ jobs: ...@@ -67,6 +71,28 @@ jobs:
path: compose-logs.txt path: compose-logs.txt
if-no-files-found: ignore if-no-files-found: ignore
# pytest:
# name: Run Backend Tests
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v4
# - name: Set up Python
# uses: actions/setup-python@v4
# with:
# python-version: ${{ matrix.python-version }}
# - name: Install dependencies
# run: |
# python -m pip install --upgrade pip
# pip install -r backend/requirements.txt
# - name: pytest run
# run: |
# ls -al
# cd backend
# PYTHONPATH=. pytest . -o log_cli=true -o log_cli_level=INFO
migration_test: migration_test:
name: Run Migration Tests name: Run Migration Tests
runs-on: ubuntu-latest runs-on: ubuntu-latest
...@@ -126,11 +152,11 @@ jobs: ...@@ -126,11 +152,11 @@ jobs:
cd backend cd backend
uvicorn main:app --port "8080" --forwarded-allow-ips '*' & uvicorn main:app --port "8080" --forwarded-allow-ips '*' &
UVICORN_PID=$! UVICORN_PID=$!
# Wait up to 20 seconds for the server to start # Wait up to 40 seconds for the server to start
for i in {1..20}; do for i in {1..40}; do
curl -s http://localhost:8080/api/config > /dev/null && break curl -s http://localhost:8080/api/config > /dev/null && break
sleep 1 sleep 1
if [ $i -eq 20 ]; then if [ $i -eq 40 ]; then
echo "Server failed to start" echo "Server failed to start"
kill -9 $UVICORN_PID kill -9 $UVICORN_PID
exit 1 exit 1
...@@ -171,7 +197,7 @@ jobs: ...@@ -171,7 +197,7 @@ jobs:
fi fi
# Check that service will reconnect to postgres when connection will be closed # Check that service will reconnect to postgres when connection will be closed
status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health) status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health/db)
if [[ "$status_code" -ne 200 ]] ; then if [[ "$status_code" -ne 200 ]] ; then
echo "Server has failed before postgres reconnect check" echo "Server has failed before postgres reconnect check"
exit 1 exit 1
...@@ -183,7 +209,7 @@ jobs: ...@@ -183,7 +209,7 @@ jobs:
cur = conn.cursor(); \ cur = conn.cursor(); \
cur.execute('SELECT pg_terminate_backend(psa.pid) FROM pg_stat_activity psa WHERE datname = current_database() AND pid <> pg_backend_pid();')" cur.execute('SELECT pg_terminate_backend(psa.pid) FROM pg_stat_activity psa WHERE datname = current_database() AND pid <> pg_backend_pid();')"
status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health) status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health/db)
if [[ "$status_code" -ne 200 ]] ; then if [[ "$status_code" -ne 200 ]] ; then
echo "Server has not reconnected to postgres after connection was closed: returned status $status_code" echo "Server has not reconnected to postgres after connection was closed: returned status $status_code"
exit 1 exit 1
......
...@@ -306,3 +306,4 @@ dist ...@@ -306,3 +306,4 @@ dist
# cypress artifacts # cypress artifacts
cypress/videos cypress/videos
cypress/screenshots cypress/screenshots
.vscode/settings.json
...@@ -5,6 +5,33 @@ All notable changes to this project will be documented in this file. ...@@ -5,6 +5,33 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.3.8] - 2024-07-09
### Added
- **💬 Chat Controls**: Easily adjust parameters for each chat session, offering more precise control over your interactions.
- **📌 Pinned Chats**: Support for pinned chats, allowing you to keep important conversations easily accessible.
- **📄 Apache Tika Integration**: Added support for using Apache Tika as a document loader, enhancing document processing capabilities.
- **🛠️ Custom Environment for OpenID Claims**: Allows setting custom claims for OpenID, providing more flexibility in user authentication.
- **🔧 Enhanced Tools & Functions API**: Introduced 'event_emitter' and 'event_call', now you can also add citations for better documentation and tracking. Detailed documentation will be provided on our documentation website.
- **↔️ Sideways Scrolling in Settings**: Settings tabs container now supports horizontal scrolling for easier navigation.
- **🌑 Darker OLED Theme**: Includes a new, darker OLED theme and improved styling for the light theme, enhancing visual appeal.
- **🌐 Language Updates**: Updated translations for Indonesian, German, French, and Catalan languages, expanding accessibility.
### Fixed
- **⏰ OpenAI Streaming Timeout**: Resolved issues with OpenAI streaming response using the 'AIOHTTP_CLIENT_TIMEOUT' setting, ensuring reliable performance.
- **💡 User Valves**: Fixed malfunctioning user valves, ensuring proper functionality.
- **🔄 Collapsible Components**: Addressed issues with collapsible components not working, restoring expected behavior.
### Changed
- **🗃️ Database Backend**: Switched from Peewee to SQLAlchemy for improved concurrency support, enhancing database performance.
- **🔤 Primary Font Styling**: Updated primary font to Archivo for better visual consistency.
- **🔄 Font Change for Windows**: Replaced Arimo with Inter font for Windows users, improving readability.
- **🚀 Lazy Loading**: Implemented lazy loading for 'faster_whisper' and 'sentence_transformers' to reduce startup memory usage.
- **📋 Task Generation Payload**: Task generations now include only the "task" field in the body instead of "title".
## [0.3.7] - 2024-06-29 ## [0.3.7] - 2024-06-29
### Added ### Added
......
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# sqlalchemy.url = REPLACE_WITH_DATABASE_URL
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
...@@ -14,7 +14,6 @@ from fastapi import ( ...@@ -14,7 +14,6 @@ from fastapi import (
from fastapi.responses import StreamingResponse, JSONResponse, FileResponse from fastapi.responses import StreamingResponse, JSONResponse, FileResponse
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from faster_whisper import WhisperModel
from pydantic import BaseModel from pydantic import BaseModel
import uuid import uuid
...@@ -277,6 +276,8 @@ def transcribe( ...@@ -277,6 +276,8 @@ def transcribe(
f.close() f.close()
if app.state.config.STT_ENGINE == "": if app.state.config.STT_ENGINE == "":
from faster_whisper import WhisperModel
whisper_kwargs = { whisper_kwargs = {
"model_size_or_path": WHISPER_MODEL, "model_size_or_path": WHISPER_MODEL,
"device": whisper_device_type, "device": whisper_device_type,
......
...@@ -12,7 +12,6 @@ from fastapi import ( ...@@ -12,7 +12,6 @@ from fastapi import (
Form, Form,
) )
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from faster_whisper import WhisperModel
from constants import ERROR_MESSAGES from constants import ERROR_MESSAGES
from utils.utils import ( from utils.utils import (
......
...@@ -25,6 +25,7 @@ from utils.task import prompt_template ...@@ -25,6 +25,7 @@ from utils.task import prompt_template
from config import ( from config import (
SRC_LOG_LEVELS, SRC_LOG_LEVELS,
ENABLE_OPENAI_API, ENABLE_OPENAI_API,
AIOHTTP_CLIENT_TIMEOUT,
OPENAI_API_BASE_URLS, OPENAI_API_BASE_URLS,
OPENAI_API_KEYS, OPENAI_API_KEYS,
CACHE_DIR, CACHE_DIR,
...@@ -463,7 +464,9 @@ async def generate_chat_completion( ...@@ -463,7 +464,9 @@ async def generate_chat_completion(
streaming = False streaming = False
try: try:
session = aiohttp.ClientSession(trust_env=True) session = aiohttp.ClientSession(
trust_env=True, timeout=aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT)
)
r = await session.request( r = await session.request(
method="POST", method="POST",
url=f"{url}/chat/completions", url=f"{url}/chat/completions",
......
...@@ -48,8 +48,6 @@ import mimetypes ...@@ -48,8 +48,6 @@ import mimetypes
import uuid import uuid
import json import json
import sentence_transformers
from apps.webui.models.documents import ( from apps.webui.models.documents import (
Documents, Documents,
DocumentForm, DocumentForm,
...@@ -93,6 +91,8 @@ from config import ( ...@@ -93,6 +91,8 @@ from config import (
SRC_LOG_LEVELS, SRC_LOG_LEVELS,
UPLOAD_DIR, UPLOAD_DIR,
DOCS_DIR, DOCS_DIR,
CONTENT_EXTRACTION_ENGINE,
TIKA_SERVER_URL,
RAG_TOP_K, RAG_TOP_K,
RAG_RELEVANCE_THRESHOLD, RAG_RELEVANCE_THRESHOLD,
RAG_EMBEDDING_ENGINE, RAG_EMBEDDING_ENGINE,
...@@ -148,6 +148,9 @@ app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = ( ...@@ -148,6 +148,9 @@ app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
) )
app.state.config.CONTENT_EXTRACTION_ENGINE = CONTENT_EXTRACTION_ENGINE
app.state.config.TIKA_SERVER_URL = TIKA_SERVER_URL
app.state.config.CHUNK_SIZE = CHUNK_SIZE app.state.config.CHUNK_SIZE = CHUNK_SIZE
app.state.config.CHUNK_OVERLAP = CHUNK_OVERLAP app.state.config.CHUNK_OVERLAP = CHUNK_OVERLAP
...@@ -190,6 +193,8 @@ def update_embedding_model( ...@@ -190,6 +193,8 @@ def update_embedding_model(
update_model: bool = False, update_model: bool = False,
): ):
if embedding_model and app.state.config.RAG_EMBEDDING_ENGINE == "": if embedding_model and app.state.config.RAG_EMBEDDING_ENGINE == "":
import sentence_transformers
app.state.sentence_transformer_ef = sentence_transformers.SentenceTransformer( app.state.sentence_transformer_ef = sentence_transformers.SentenceTransformer(
get_model_path(embedding_model, update_model), get_model_path(embedding_model, update_model),
device=DEVICE_TYPE, device=DEVICE_TYPE,
...@@ -204,6 +209,8 @@ def update_reranking_model( ...@@ -204,6 +209,8 @@ def update_reranking_model(
update_model: bool = False, update_model: bool = False,
): ):
if reranking_model: if reranking_model:
import sentence_transformers
app.state.sentence_transformer_rf = sentence_transformers.CrossEncoder( app.state.sentence_transformer_rf = sentence_transformers.CrossEncoder(
get_model_path(reranking_model, update_model), get_model_path(reranking_model, update_model),
device=DEVICE_TYPE, device=DEVICE_TYPE,
...@@ -388,6 +395,10 @@ async def get_rag_config(user=Depends(get_admin_user)): ...@@ -388,6 +395,10 @@ async def get_rag_config(user=Depends(get_admin_user)):
return { return {
"status": True, "status": True,
"pdf_extract_images": app.state.config.PDF_EXTRACT_IMAGES, "pdf_extract_images": app.state.config.PDF_EXTRACT_IMAGES,
"content_extraction": {
"engine": app.state.config.CONTENT_EXTRACTION_ENGINE,
"tika_server_url": app.state.config.TIKA_SERVER_URL,
},
"chunk": { "chunk": {
"chunk_size": app.state.config.CHUNK_SIZE, "chunk_size": app.state.config.CHUNK_SIZE,
"chunk_overlap": app.state.config.CHUNK_OVERLAP, "chunk_overlap": app.state.config.CHUNK_OVERLAP,
...@@ -417,6 +428,11 @@ async def get_rag_config(user=Depends(get_admin_user)): ...@@ -417,6 +428,11 @@ async def get_rag_config(user=Depends(get_admin_user)):
} }
class ContentExtractionConfig(BaseModel):
engine: str = ""
tika_server_url: Optional[str] = None
class ChunkParamUpdateForm(BaseModel): class ChunkParamUpdateForm(BaseModel):
chunk_size: int chunk_size: int
chunk_overlap: int chunk_overlap: int
...@@ -450,6 +466,7 @@ class WebConfig(BaseModel): ...@@ -450,6 +466,7 @@ class WebConfig(BaseModel):
class ConfigUpdateForm(BaseModel): class ConfigUpdateForm(BaseModel):
pdf_extract_images: Optional[bool] = None pdf_extract_images: Optional[bool] = None
content_extraction: Optional[ContentExtractionConfig] = None
chunk: Optional[ChunkParamUpdateForm] = None chunk: Optional[ChunkParamUpdateForm] = None
youtube: Optional[YoutubeLoaderConfig] = None youtube: Optional[YoutubeLoaderConfig] = None
web: Optional[WebConfig] = None web: Optional[WebConfig] = None
...@@ -463,6 +480,11 @@ async def update_rag_config(form_data: ConfigUpdateForm, user=Depends(get_admin_ ...@@ -463,6 +480,11 @@ async def update_rag_config(form_data: ConfigUpdateForm, user=Depends(get_admin_
else app.state.config.PDF_EXTRACT_IMAGES else app.state.config.PDF_EXTRACT_IMAGES
) )
if form_data.content_extraction is not None:
log.info(f"Updating text settings: {form_data.content_extraction}")
app.state.config.CONTENT_EXTRACTION_ENGINE = form_data.content_extraction.engine
app.state.config.TIKA_SERVER_URL = form_data.content_extraction.tika_server_url
if form_data.chunk is not None: if form_data.chunk is not None:
app.state.config.CHUNK_SIZE = form_data.chunk.chunk_size app.state.config.CHUNK_SIZE = form_data.chunk.chunk_size
app.state.config.CHUNK_OVERLAP = form_data.chunk.chunk_overlap app.state.config.CHUNK_OVERLAP = form_data.chunk.chunk_overlap
...@@ -499,6 +521,10 @@ async def update_rag_config(form_data: ConfigUpdateForm, user=Depends(get_admin_ ...@@ -499,6 +521,10 @@ async def update_rag_config(form_data: ConfigUpdateForm, user=Depends(get_admin_
return { return {
"status": True, "status": True,
"pdf_extract_images": app.state.config.PDF_EXTRACT_IMAGES, "pdf_extract_images": app.state.config.PDF_EXTRACT_IMAGES,
"content_extraction": {
"engine": app.state.config.CONTENT_EXTRACTION_ENGINE,
"tika_server_url": app.state.config.TIKA_SERVER_URL,
},
"chunk": { "chunk": {
"chunk_size": app.state.config.CHUNK_SIZE, "chunk_size": app.state.config.CHUNK_SIZE,
"chunk_overlap": app.state.config.CHUNK_OVERLAP, "chunk_overlap": app.state.config.CHUNK_OVERLAP,
...@@ -978,13 +1004,49 @@ def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> b ...@@ -978,13 +1004,49 @@ def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> b
return True return True
except Exception as e: except Exception as e:
log.exception(e)
if e.__class__.__name__ == "UniqueConstraintError": if e.__class__.__name__ == "UniqueConstraintError":
return True return True
log.exception(e)
return False return False
class TikaLoader:
def __init__(self, file_path, mime_type=None):
self.file_path = file_path
self.mime_type = mime_type
def load(self) -> List[Document]:
with open(self.file_path, "rb") as f:
data = f.read()
if self.mime_type is not None:
headers = {"Content-Type": self.mime_type}
else:
headers = {}
endpoint = app.state.config.TIKA_SERVER_URL
if not endpoint.endswith("/"):
endpoint += "/"
endpoint += "tika/text"
r = requests.put(endpoint, data=data, headers=headers)
if r.ok:
raw_metadata = r.json()
text = raw_metadata.get("X-TIKA:content", "<No text content found>")
if "Content-Type" in raw_metadata:
headers["Content-Type"] = raw_metadata["Content-Type"]
log.info("Tika extracted text: %s", text)
return [Document(page_content=text, metadata=headers)]
else:
raise Exception(f"Error calling Tika: {r.reason}")
def get_loader(filename: str, file_content_type: str, file_path: str): def get_loader(filename: str, file_content_type: str, file_path: str):
file_ext = filename.split(".")[-1].lower() file_ext = filename.split(".")[-1].lower()
known_type = True known_type = True
...@@ -1035,47 +1097,58 @@ def get_loader(filename: str, file_content_type: str, file_path: str): ...@@ -1035,47 +1097,58 @@ def get_loader(filename: str, file_content_type: str, file_path: str):
"msg", "msg",
] ]
if file_ext == "pdf": if (
loader = PyPDFLoader( app.state.config.CONTENT_EXTRACTION_ENGINE == "tika"
file_path, extract_images=app.state.config.PDF_EXTRACT_IMAGES and app.state.config.TIKA_SERVER_URL
)
elif file_ext == "csv":
loader = CSVLoader(file_path)
elif file_ext == "rst":
loader = UnstructuredRSTLoader(file_path, mode="elements")
elif file_ext == "xml":
loader = UnstructuredXMLLoader(file_path)
elif file_ext in ["htm", "html"]:
loader = BSHTMLLoader(file_path, open_encoding="unicode_escape")
elif file_ext == "md":
loader = UnstructuredMarkdownLoader(file_path)
elif file_content_type == "application/epub+zip":
loader = UnstructuredEPubLoader(file_path)
elif (
file_content_type
== "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
or file_ext in ["doc", "docx"]
):
loader = Docx2txtLoader(file_path)
elif file_content_type in [
"application/vnd.ms-excel",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
] or file_ext in ["xls", "xlsx"]:
loader = UnstructuredExcelLoader(file_path)
elif file_content_type in [
"application/vnd.ms-powerpoint",
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
] or file_ext in ["ppt", "pptx"]:
loader = UnstructuredPowerPointLoader(file_path)
elif file_ext == "msg":
loader = OutlookMessageLoader(file_path)
elif file_ext in known_source_ext or (
file_content_type and file_content_type.find("text/") >= 0
): ):
loader = TextLoader(file_path, autodetect_encoding=True) if file_ext in known_source_ext or (
file_content_type and file_content_type.find("text/") >= 0
):
loader = TextLoader(file_path, autodetect_encoding=True)
else:
loader = TikaLoader(file_path, file_content_type)
else: else:
loader = TextLoader(file_path, autodetect_encoding=True) if file_ext == "pdf":
known_type = False loader = PyPDFLoader(
file_path, extract_images=app.state.config.PDF_EXTRACT_IMAGES
)
elif file_ext == "csv":
loader = CSVLoader(file_path)
elif file_ext == "rst":
loader = UnstructuredRSTLoader(file_path, mode="elements")
elif file_ext == "xml":
loader = UnstructuredXMLLoader(file_path)
elif file_ext in ["htm", "html"]:
loader = BSHTMLLoader(file_path, open_encoding="unicode_escape")
elif file_ext == "md":
loader = UnstructuredMarkdownLoader(file_path)
elif file_content_type == "application/epub+zip":
loader = UnstructuredEPubLoader(file_path)
elif (
file_content_type
== "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
or file_ext in ["doc", "docx"]
):
loader = Docx2txtLoader(file_path)
elif file_content_type in [
"application/vnd.ms-excel",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
] or file_ext in ["xls", "xlsx"]:
loader = UnstructuredExcelLoader(file_path)
elif file_content_type in [
"application/vnd.ms-powerpoint",
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
] or file_ext in ["ppt", "pptx"]:
loader = UnstructuredPowerPointLoader(file_path)
elif file_ext == "msg":
loader = OutlookMessageLoader(file_path)
elif file_ext in known_source_ext or (
file_content_type and file_content_type.find("text/") >= 0
):
loader = TextLoader(file_path, autodetect_encoding=True)
else:
loader = TextLoader(file_path, autodetect_encoding=True)
known_type = False
return loader, known_type return loader, known_type
......
...@@ -294,14 +294,16 @@ def get_rag_context( ...@@ -294,14 +294,16 @@ def get_rag_context(
extracted_collections.extend(collection_names) extracted_collections.extend(collection_names)
context_string = "" contexts = []
citations = [] citations = []
for context in relevant_contexts: for context in relevant_contexts:
try: try:
if "documents" in context: if "documents" in context:
context_string += "\n\n".join( contexts.append(
[text for text in context["documents"][0] if text is not None] "\n\n".join(
[text for text in context["documents"][0] if text is not None]
)
) )
if "metadatas" in context: if "metadatas" in context:
...@@ -315,9 +317,7 @@ def get_rag_context( ...@@ -315,9 +317,7 @@ def get_rag_context(
except Exception as e: except Exception as e:
log.exception(e) log.exception(e)
context_string = context_string.strip() return contexts, citations
return context_string, citations
def get_model_path(model: str, update_model: bool = False): def get_model_path(model: str, update_model: bool = False):
...@@ -442,8 +442,6 @@ from langchain_core.documents import BaseDocumentCompressor, Document ...@@ -442,8 +442,6 @@ from langchain_core.documents import BaseDocumentCompressor, Document
from langchain_core.callbacks import Callbacks from langchain_core.callbacks import Callbacks
from langchain_core.pydantic_v1 import Extra from langchain_core.pydantic_v1 import Extra
from sentence_transformers import util
class RerankCompressor(BaseDocumentCompressor): class RerankCompressor(BaseDocumentCompressor):
embedding_function: Any embedding_function: Any
...@@ -468,6 +466,8 @@ class RerankCompressor(BaseDocumentCompressor): ...@@ -468,6 +466,8 @@ class RerankCompressor(BaseDocumentCompressor):
[(query, doc.page_content) for doc in documents] [(query, doc.page_content) for doc in documents]
) )
else: else:
from sentence_transformers import util
query_embedding = self.embedding_function(query) query_embedding = self.embedding_function(query)
document_embedding = self.embedding_function( document_embedding = self.embedding_function(
[doc.page_content for doc in documents] [doc.page_content for doc in documents]
......
import os import os
import logging import logging
import json import json
from contextlib import contextmanager
from peewee import *
from peewee_migrate import Router from peewee_migrate import Router
from apps.webui.internal.wrappers import register_connection from apps.webui.internal.wrappers import register_connection
from typing import Optional, Any
from typing_extensions import Self
from sqlalchemy import create_engine, types, Dialect
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.sql.type_api import _T
from config import SRC_LOG_LEVELS, DATA_DIR, DATABASE_URL, BACKEND_DIR from config import SRC_LOG_LEVELS, DATA_DIR, DATABASE_URL, BACKEND_DIR
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["DB"]) log.setLevel(SRC_LOG_LEVELS["DB"])
class JSONField(TextField): class JSONField(types.TypeDecorator):
impl = types.Text
cache_ok = True
def process_bind_param(self, value: Optional[_T], dialect: Dialect) -> Any:
return json.dumps(value)
def process_result_value(self, value: Optional[_T], dialect: Dialect) -> Any:
if value is not None:
return json.loads(value)
def copy(self, **kw: Any) -> Self:
return JSONField(self.impl.length)
def db_value(self, value): def db_value(self, value):
return json.dumps(value) return json.dumps(value)
...@@ -30,25 +51,60 @@ else: ...@@ -30,25 +51,60 @@ else:
pass pass
# The `register_connection` function encapsulates the logic for setting up # Workaround to handle the peewee migration
# the database connection based on the connection string, while `connect` # This is required to ensure the peewee migration is handled before the alembic migration
# is a Peewee-specific method to manage the connection state and avoid errors def handle_peewee_migration(DATABASE_URL):
# when a connection is already open. try:
try: # Replace the postgresql:// with postgres:// and %40 with @ in the DATABASE_URL
DB = register_connection(DATABASE_URL) db = register_connection(
log.info(f"Connected to a {DB.__class__.__name__} database.") DATABASE_URL.replace("postgresql://", "postgres://").replace("%40", "@")
except Exception as e: )
log.error(f"Failed to initialize the database connection: {e}") migrate_dir = BACKEND_DIR / "apps" / "webui" / "internal" / "migrations"
raise router = Router(db, logger=log, migrate_dir=migrate_dir)
router.run()
router = Router( db.close()
DB,
migrate_dir=BACKEND_DIR / "apps" / "webui" / "internal" / "migrations", # check if db connection has been closed
logger=log,
except Exception as e:
log.error(f"Failed to initialize the database connection: {e}")
raise
finally:
# Properly closing the database connection
if db and not db.is_closed():
db.close()
# Assert if db connection has been closed
assert db.is_closed(), "Database connection is still open."
handle_peewee_migration(DATABASE_URL)
SQLALCHEMY_DATABASE_URL = DATABASE_URL
if "sqlite" in SQLALCHEMY_DATABASE_URL:
engine = create_engine(
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
)
else:
engine = create_engine(SQLALCHEMY_DATABASE_URL, pool_pre_ping=True)
SessionLocal = sessionmaker(
autocommit=False, autoflush=False, bind=engine, expire_on_commit=False
) )
router.run() Base = declarative_base()
try: Session = scoped_session(SessionLocal)
DB.connect(reuse_if_open=True)
except OperationalError as e:
log.info(f"Failed to connect to database again due to: {e}") # Dependency
pass def get_session():
db = SessionLocal()
try:
yield db
finally:
db.close()
get_db = contextmanager(get_session)
"""Peewee migrations -- 017_add_user_oauth_sub.py. """Peewee migrations -- 017_add_user_oauth_sub.py.
Some examples (model - class or model name):: Some examples (model - class or model name)::
> Model = migrator.orm['table_name'] # Return model in current state by name > Model = migrator.orm['table_name'] # Return model in current state by name
> Model = migrator.ModelClass # Return model in current state by name > Model = migrator.ModelClass # Return model in current state by name
> migrator.sql(sql) # Run custom SQL > migrator.sql(sql) # Run custom SQL
> migrator.run(func, *args, **kwargs) # Run python function with the given args > migrator.run(func, *args, **kwargs) # Run python function with the given args
> migrator.create_model(Model) # Create a model (could be used as decorator) > migrator.create_model(Model) # Create a model (could be used as decorator)
...@@ -21,7 +18,6 @@ Some examples (model - class or model name):: ...@@ -21,7 +18,6 @@ Some examples (model - class or model name)::
> migrator.drop_index(model, *col_names) > migrator.drop_index(model, *col_names)
> migrator.drop_not_null(model, *field_names) > migrator.drop_not_null(model, *field_names)
> migrator.drop_constraints(model, *constraints) > migrator.drop_constraints(model, *constraints)
""" """
from contextlib import suppress from contextlib import suppress
......
# Database Migrations
This directory contains all the database migrations for the web app.
Migrations are done using the [`peewee-migrate`](https://github.com/klen/peewee_migrate) library.
Migrations are automatically ran at app startup.
## Creating a migration
Have you made a change to the schema of an existing model?
You will need to create a migration file to ensure that existing databases are updated for backwards compatibility.
1. Have a database file (`webui.db`) that has the old schema prior to any of your changes.
2. Make your changes to the models.
3. From the `backend` directory, run the following command:
```bash
pw_migrate create --auto --auto-source apps.webui.models --database sqlite:///${SQLITE_DB} --directory apps/web/internal/migrations ${MIGRATION_NAME}
```
- `$SQLITE_DB` should be the path to the database file.
- `$MIGRATION_NAME` should be a descriptive name for the migration.
4. The migration file will be created in the `apps/web/internal/migrations` directory.
...@@ -3,7 +3,7 @@ from fastapi.routing import APIRoute ...@@ -3,7 +3,7 @@ from fastapi.routing import APIRoute
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from starlette.middleware.sessions import SessionMiddleware from starlette.middleware.sessions import SessionMiddleware
from sqlalchemy.orm import Session
from apps.webui.routers import ( from apps.webui.routers import (
auths, auths,
users, users,
...@@ -19,8 +19,13 @@ from apps.webui.routers import ( ...@@ -19,8 +19,13 @@ from apps.webui.routers import (
functions, functions,
) )
from apps.webui.models.functions import Functions from apps.webui.models.functions import Functions
from apps.webui.models.models import Models
from apps.webui.utils import load_function_module_by_id from apps.webui.utils import load_function_module_by_id
from utils.misc import stream_message_template from utils.misc import stream_message_template
from utils.task import prompt_template
from config import ( from config import (
WEBUI_BUILD_HASH, WEBUI_BUILD_HASH,
...@@ -39,6 +44,8 @@ from config import ( ...@@ -39,6 +44,8 @@ from config import (
WEBUI_BANNERS, WEBUI_BANNERS,
ENABLE_COMMUNITY_SHARING, ENABLE_COMMUNITY_SHARING,
AppConfig, AppConfig,
OAUTH_USERNAME_CLAIM,
OAUTH_PICTURE_CLAIM,
) )
import inspect import inspect
...@@ -74,6 +81,9 @@ app.state.config.BANNERS = WEBUI_BANNERS ...@@ -74,6 +81,9 @@ app.state.config.BANNERS = WEBUI_BANNERS
app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING
app.state.config.OAUTH_USERNAME_CLAIM = OAUTH_USERNAME_CLAIM
app.state.config.OAUTH_PICTURE_CLAIM = OAUTH_PICTURE_CLAIM
app.state.MODELS = {} app.state.MODELS = {}
app.state.TOOLS = {} app.state.TOOLS = {}
app.state.FUNCTIONS = {} app.state.FUNCTIONS = {}
...@@ -129,7 +139,6 @@ async def get_pipe_models(): ...@@ -129,7 +139,6 @@ async def get_pipe_models():
function_module = app.state.FUNCTIONS[pipe.id] function_module = app.state.FUNCTIONS[pipe.id]
if hasattr(function_module, "valves") and hasattr(function_module, "Valves"): if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
print(f"Getting valves for {pipe.id}")
valves = Functions.get_function_valves_by_id(pipe.id) valves = Functions.get_function_valves_by_id(pipe.id)
function_module.valves = function_module.Valves( function_module.valves = function_module.Valves(
**(valves if valves else {}) **(valves if valves else {})
...@@ -181,6 +190,77 @@ async def get_pipe_models(): ...@@ -181,6 +190,77 @@ async def get_pipe_models():
async def generate_function_chat_completion(form_data, user): async def generate_function_chat_completion(form_data, user):
model_id = form_data.get("model")
model_info = Models.get_model_by_id(model_id)
if model_info:
if model_info.base_model_id:
form_data["model"] = model_info.base_model_id
model_info.params = model_info.params.model_dump()
if model_info.params:
if model_info.params.get("temperature", None) is not None:
form_data["temperature"] = float(model_info.params.get("temperature"))
if model_info.params.get("top_p", None):
form_data["top_p"] = int(model_info.params.get("top_p", None))
if model_info.params.get("max_tokens", None):
form_data["max_tokens"] = int(model_info.params.get("max_tokens", None))
if model_info.params.get("frequency_penalty", None):
form_data["frequency_penalty"] = int(
model_info.params.get("frequency_penalty", None)
)
if model_info.params.get("seed", None):
form_data["seed"] = model_info.params.get("seed", None)
if model_info.params.get("stop", None):
form_data["stop"] = (
[
bytes(stop, "utf-8").decode("unicode_escape")
for stop in model_info.params["stop"]
]
if model_info.params.get("stop", None)
else None
)
system = model_info.params.get("system", None)
if system:
system = prompt_template(
system,
**(
{
"user_name": user.name,
"user_location": (
user.info.get("location") if user.info else None
),
}
if user
else {}
),
)
# Check if the payload already has a system message
# If not, add a system message to the payload
if form_data.get("messages"):
for message in form_data["messages"]:
if message.get("role") == "system":
message["content"] = system + message["content"]
break
else:
form_data["messages"].insert(
0,
{
"role": "system",
"content": system,
},
)
else:
pass
async def job(): async def job():
pipe_id = form_data["model"] pipe_id = form_data["model"]
if "." in pipe_id: if "." in pipe_id:
...@@ -259,6 +339,9 @@ async def generate_function_chat_completion(form_data, user): ...@@ -259,6 +339,9 @@ async def generate_function_chat_completion(form_data, user):
if isinstance(line, BaseModel): if isinstance(line, BaseModel):
line = line.model_dump_json() line = line.model_dump_json()
line = f"data: {line}" line = f"data: {line}"
if isinstance(line, dict):
line = f"data: {json.dumps(line)}"
try: try:
line = line.decode("utf-8") line = line.decode("utf-8")
except: except:
......
from pydantic import BaseModel from pydantic import BaseModel
from typing import List, Union, Optional from typing import Optional
import time
import uuid import uuid
import logging import logging
from peewee import * from sqlalchemy import String, Column, Boolean, Text
from apps.webui.models.users import UserModel, Users from apps.webui.models.users import UserModel, Users
from utils.utils import verify_password from utils.utils import verify_password
from apps.webui.internal.db import DB from apps.webui.internal.db import Base, get_db
from config import SRC_LOG_LEVELS from config import SRC_LOG_LEVELS
...@@ -20,14 +19,13 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) ...@@ -20,14 +19,13 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"])
#################### ####################
class Auth(Model): class Auth(Base):
id = CharField(unique=True) __tablename__ = "auth"
email = CharField()
password = TextField()
active = BooleanField()
class Meta: id = Column(String, primary_key=True)
database = DB email = Column(String)
password = Column(Text)
active = Column(Boolean)
class AuthModel(BaseModel): class AuthModel(BaseModel):
...@@ -94,9 +92,6 @@ class AddUserForm(SignupForm): ...@@ -94,9 +92,6 @@ class AddUserForm(SignupForm):
class AuthsTable: class AuthsTable:
def __init__(self, db):
self.db = db
self.db.create_tables([Auth])
def insert_new_auth( def insert_new_auth(
self, self,
...@@ -107,36 +102,44 @@ class AuthsTable: ...@@ -107,36 +102,44 @@ class AuthsTable:
role: str = "pending", role: str = "pending",
oauth_sub: Optional[str] = None, oauth_sub: Optional[str] = None,
) -> Optional[UserModel]: ) -> Optional[UserModel]:
log.info("insert_new_auth") with get_db() as db:
id = str(uuid.uuid4()) log.info("insert_new_auth")
auth = AuthModel( id = str(uuid.uuid4())
**{"id": id, "email": email, "password": password, "active": True}
)
result = Auth.create(**auth.model_dump())
user = Users.insert_new_user( auth = AuthModel(
id, name, email, profile_image_url, role, oauth_sub **{"id": id, "email": email, "password": password, "active": True}
) )
result = Auth(**auth.model_dump())
db.add(result)
if result and user: user = Users.insert_new_user(
return user id, name, email, profile_image_url, role, oauth_sub
else: )
return None
db.commit()
db.refresh(result)
if result and user:
return user
else:
return None
def authenticate_user(self, email: str, password: str) -> Optional[UserModel]: def authenticate_user(self, email: str, password: str) -> Optional[UserModel]:
log.info(f"authenticate_user: {email}") log.info(f"authenticate_user: {email}")
try: try:
auth = Auth.get(Auth.email == email, Auth.active == True) with get_db() as db:
if auth:
if verify_password(password, auth.password): auth = db.query(Auth).filter_by(email=email, active=True).first()
user = Users.get_user_by_id(auth.id) if auth:
return user if verify_password(password, auth.password):
user = Users.get_user_by_id(auth.id)
return user
else:
return None
else: else:
return None return None
else:
return None
except: except:
return None return None
...@@ -155,46 +158,50 @@ class AuthsTable: ...@@ -155,46 +158,50 @@ class AuthsTable:
def authenticate_user_by_trusted_header(self, email: str) -> Optional[UserModel]: def authenticate_user_by_trusted_header(self, email: str) -> Optional[UserModel]:
log.info(f"authenticate_user_by_trusted_header: {email}") log.info(f"authenticate_user_by_trusted_header: {email}")
try: try:
auth = Auth.get(Auth.email == email, Auth.active == True) with get_db() as db:
if auth: auth = db.query(Auth).filter(email=email, active=True).first()
user = Users.get_user_by_id(auth.id) if auth:
return user user = Users.get_user_by_id(auth.id)
return user
except: except:
return None return None
def update_user_password_by_id(self, id: str, new_password: str) -> bool: def update_user_password_by_id(self, id: str, new_password: str) -> bool:
try: try:
query = Auth.update(password=new_password).where(Auth.id == id) with get_db() as db:
result = query.execute() result = (
db.query(Auth).filter_by(id=id).update({"password": new_password})
return True if result == 1 else False )
db.commit()
return True if result == 1 else False
except: except:
return False return False
def update_email_by_id(self, id: str, email: str) -> bool: def update_email_by_id(self, id: str, email: str) -> bool:
try: try:
query = Auth.update(email=email).where(Auth.id == id) with get_db() as db:
result = query.execute() result = db.query(Auth).filter_by(id=id).update({"email": email})
db.commit()
return True if result == 1 else False return True if result == 1 else False
except: except:
return False return False
def delete_auth_by_id(self, id: str) -> bool: def delete_auth_by_id(self, id: str) -> bool:
try: try:
# Delete User with get_db() as db:
result = Users.delete_user_by_id(id)
if result: # Delete User
# Delete Auth result = Users.delete_user_by_id(id)
query = Auth.delete().where(Auth.id == id)
query.execute() # Remove the rows, return number of rows removed.
return True if result:
else: db.query(Auth).filter_by(id=id).delete()
return False db.commit()
return True
else:
return False
except: except:
return False return False
Auths = AuthsTable(DB) Auths = AuthsTable()
from pydantic import BaseModel from pydantic import BaseModel, ConfigDict
from typing import List, Union, Optional from typing import List, Union, Optional
from peewee import *
from playhouse.shortcuts import model_to_dict
import json import json
import uuid import uuid
import time import time
from apps.webui.internal.db import DB from sqlalchemy import Column, String, BigInteger, Boolean, Text
from apps.webui.internal.db import Base, get_db
#################### ####################
# Chat DB Schema # Chat DB Schema
#################### ####################
class Chat(Model): class Chat(Base):
id = CharField(unique=True) __tablename__ = "chat"
user_id = CharField()
title = TextField()
chat = TextField() # Save Chat JSON as Text
created_at = BigIntegerField() id = Column(String, primary_key=True)
updated_at = BigIntegerField() user_id = Column(String)
title = Column(Text)
chat = Column(Text) # Save Chat JSON as Text
share_id = CharField(null=True, unique=True) created_at = Column(BigInteger)
archived = BooleanField(default=False) updated_at = Column(BigInteger)
class Meta: share_id = Column(Text, unique=True, nullable=True)
database = DB archived = Column(Boolean, default=False)
class ChatModel(BaseModel): class ChatModel(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: str id: str
user_id: str user_id: str
title: str title: str
...@@ -75,91 +77,104 @@ class ChatTitleIdResponse(BaseModel): ...@@ -75,91 +77,104 @@ class ChatTitleIdResponse(BaseModel):
class ChatTable: class ChatTable:
def __init__(self, db):
self.db = db
db.create_tables([Chat])
def insert_new_chat(self, user_id: str, form_data: ChatForm) -> Optional[ChatModel]: def insert_new_chat(self, user_id: str, form_data: ChatForm) -> Optional[ChatModel]:
id = str(uuid.uuid4()) with get_db() as db:
chat = ChatModel(
**{ id = str(uuid.uuid4())
"id": id, chat = ChatModel(
"user_id": user_id, **{
"title": ( "id": id,
form_data.chat["title"] if "title" in form_data.chat else "New Chat" "user_id": user_id,
), "title": (
"chat": json.dumps(form_data.chat), form_data.chat["title"]
"created_at": int(time.time()), if "title" in form_data.chat
"updated_at": int(time.time()), else "New Chat"
} ),
) "chat": json.dumps(form_data.chat),
"created_at": int(time.time()),
result = Chat.create(**chat.model_dump()) "updated_at": int(time.time()),
return chat if result else None }
)
result = Chat(**chat.model_dump())
db.add(result)
db.commit()
db.refresh(result)
return ChatModel.model_validate(result) if result else None
def update_chat_by_id(self, id: str, chat: dict) -> Optional[ChatModel]: def update_chat_by_id(self, id: str, chat: dict) -> Optional[ChatModel]:
try: try:
query = Chat.update( with get_db() as db:
chat=json.dumps(chat),
title=chat["title"] if "title" in chat else "New Chat", chat_obj = db.get(Chat, id)
updated_at=int(time.time()), chat_obj.chat = json.dumps(chat)
).where(Chat.id == id) chat_obj.title = chat["title"] if "title" in chat else "New Chat"
query.execute() chat_obj.updated_at = int(time.time())
db.commit()
chat = Chat.get(Chat.id == id) db.refresh(chat_obj)
return ChatModel(**model_to_dict(chat))
except: return ChatModel.model_validate(chat_obj)
except Exception as e:
return None return None
def insert_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]: def insert_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]:
# Get the existing chat to share with get_db() as db:
chat = Chat.get(Chat.id == chat_id)
# Check if the chat is already shared # Get the existing chat to share
if chat.share_id: chat = db.get(Chat, chat_id)
return self.get_chat_by_id_and_user_id(chat.share_id, "shared") # Check if the chat is already shared
# Create a new chat with the same data, but with a new ID if chat.share_id:
shared_chat = ChatModel( return self.get_chat_by_id_and_user_id(chat.share_id, "shared")
**{ # Create a new chat with the same data, but with a new ID
"id": str(uuid.uuid4()), shared_chat = ChatModel(
"user_id": f"shared-{chat_id}", **{
"title": chat.title, "id": str(uuid.uuid4()),
"chat": chat.chat, "user_id": f"shared-{chat_id}",
"created_at": chat.created_at, "title": chat.title,
"updated_at": int(time.time()), "chat": chat.chat,
} "created_at": chat.created_at,
) "updated_at": int(time.time()),
shared_result = Chat.create(**shared_chat.model_dump()) }
# Update the original chat with the share_id )
result = ( shared_result = Chat(**shared_chat.model_dump())
Chat.update(share_id=shared_chat.id).where(Chat.id == chat_id).execute() db.add(shared_result)
) db.commit()
db.refresh(shared_result)
return shared_chat if (shared_result and result) else None
# Update the original chat with the share_id
result = (
db.query(Chat)
.filter_by(id=chat_id)
.update({"share_id": shared_chat.id})
)
db.commit()
return shared_chat if (shared_result and result) else None
def update_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]: def update_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]:
try: try:
print("update_shared_chat_by_id") with get_db() as db:
chat = Chat.get(Chat.id == chat_id)
print(chat)
query = Chat.update( print("update_shared_chat_by_id")
title=chat.title, chat = db.get(Chat, chat_id)
chat=chat.chat, print(chat)
).where(Chat.id == chat.share_id) chat.title = chat.title
chat.chat = chat.chat
db.commit()
db.refresh(chat)
query.execute() return self.get_chat_by_id(chat.share_id)
chat = Chat.get(Chat.id == chat.share_id)
return ChatModel(**model_to_dict(chat))
except: except:
return None return None
def delete_shared_chat_by_chat_id(self, chat_id: str) -> bool: def delete_shared_chat_by_chat_id(self, chat_id: str) -> bool:
try: try:
query = Chat.delete().where(Chat.user_id == f"shared-{chat_id}") with get_db() as db:
query.execute() # Remove the rows, return number of rows removed.
db.query(Chat).filter_by(user_id=f"shared-{chat_id}").delete()
db.commit()
return True return True
except: except:
return False return False
...@@ -167,56 +182,50 @@ class ChatTable: ...@@ -167,56 +182,50 @@ class ChatTable:
self, id: str, share_id: Optional[str] self, id: str, share_id: Optional[str]
) -> Optional[ChatModel]: ) -> Optional[ChatModel]:
try: try:
query = Chat.update( with get_db() as db:
share_id=share_id,
).where(Chat.id == id)
query.execute()
chat = Chat.get(Chat.id == id) chat = db.get(Chat, id)
return ChatModel(**model_to_dict(chat)) chat.share_id = share_id
db.commit()
db.refresh(chat)
return ChatModel.model_validate(chat)
except: except:
return None return None
def toggle_chat_archive_by_id(self, id: str) -> Optional[ChatModel]: def toggle_chat_archive_by_id(self, id: str) -> Optional[ChatModel]:
try: try:
chat = self.get_chat_by_id(id) with get_db() as db:
query = Chat.update(
archived=(not chat.archived),
).where(Chat.id == id)
query.execute() chat = db.get(Chat, id)
chat.archived = not chat.archived
chat = Chat.get(Chat.id == id) db.commit()
return ChatModel(**model_to_dict(chat)) db.refresh(chat)
return ChatModel.model_validate(chat)
except: except:
return None return None
def archive_all_chats_by_user_id(self, user_id: str) -> bool: def archive_all_chats_by_user_id(self, user_id: str) -> bool:
try: try:
chats = self.get_chats_by_user_id(user_id) with get_db() as db:
for chat in chats: db.query(Chat).filter_by(user_id=user_id).update({"archived": True})
query = Chat.update( db.commit()
archived=True, return True
).where(Chat.id == chat.id)
query.execute()
return True
except: except:
return False return False
def get_archived_chat_list_by_user_id( def get_archived_chat_list_by_user_id(
self, user_id: str, skip: int = 0, limit: int = 50 self, user_id: str, skip: int = 0, limit: int = 50
) -> List[ChatModel]: ) -> List[ChatModel]:
return [ with get_db() as db:
ChatModel(**model_to_dict(chat))
for chat in Chat.select() all_chats = (
.where(Chat.archived == True) db.query(Chat)
.where(Chat.user_id == user_id) .filter_by(user_id=user_id, archived=True)
.order_by(Chat.updated_at.desc()) .order_by(Chat.updated_at.desc())
# .limit(limit) # .limit(limit).offset(skip)
# .offset(skip) .all()
] )
return [ChatModel.model_validate(chat) for chat in all_chats]
def get_chat_list_by_user_id( def get_chat_list_by_user_id(
self, self,
...@@ -225,131 +234,141 @@ class ChatTable: ...@@ -225,131 +234,141 @@ class ChatTable:
skip: int = 0, skip: int = 0,
limit: int = 50, limit: int = 50,
) -> List[ChatModel]: ) -> List[ChatModel]:
if include_archived: with get_db() as db:
return [ query = db.query(Chat).filter_by(user_id=user_id)
ChatModel(**model_to_dict(chat)) if not include_archived:
for chat in Chat.select() query = query.filter_by(archived=False)
.where(Chat.user_id == user_id) all_chats = (
.order_by(Chat.updated_at.desc()) query.order_by(Chat.updated_at.desc())
# .limit(limit) # .limit(limit).offset(skip)
# .offset(skip) .all()
] )
else: return [ChatModel.model_validate(chat) for chat in all_chats]
return [
ChatModel(**model_to_dict(chat))
for chat in Chat.select()
.where(Chat.archived == False)
.where(Chat.user_id == user_id)
.order_by(Chat.updated_at.desc())
# .limit(limit)
# .offset(skip)
]
def get_chat_list_by_chat_ids( def get_chat_list_by_chat_ids(
self, chat_ids: List[str], skip: int = 0, limit: int = 50 self, chat_ids: List[str], skip: int = 0, limit: int = 50
) -> List[ChatModel]: ) -> List[ChatModel]:
return [ with get_db() as db:
ChatModel(**model_to_dict(chat)) all_chats = (
for chat in Chat.select() db.query(Chat)
.where(Chat.archived == False) .filter(Chat.id.in_(chat_ids))
.where(Chat.id.in_(chat_ids)) .filter_by(archived=False)
.order_by(Chat.updated_at.desc()) .order_by(Chat.updated_at.desc())
] .all()
)
return [ChatModel.model_validate(chat) for chat in all_chats]
def get_chat_by_id(self, id: str) -> Optional[ChatModel]: def get_chat_by_id(self, id: str) -> Optional[ChatModel]:
try: try:
chat = Chat.get(Chat.id == id) with get_db() as db:
return ChatModel(**model_to_dict(chat))
chat = db.get(Chat, id)
return ChatModel.model_validate(chat)
except: except:
return None return None
def get_chat_by_share_id(self, id: str) -> Optional[ChatModel]: def get_chat_by_share_id(self, id: str) -> Optional[ChatModel]:
try: try:
chat = Chat.get(Chat.share_id == id) with get_db() as db:
if chat: chat = db.query(Chat).filter_by(share_id=id).first()
chat = Chat.get(Chat.id == id)
return ChatModel(**model_to_dict(chat)) if chat:
else: return self.get_chat_by_id(id)
return None else:
except: return None
except Exception as e:
return None return None
def get_chat_by_id_and_user_id(self, id: str, user_id: str) -> Optional[ChatModel]: def get_chat_by_id_and_user_id(self, id: str, user_id: str) -> Optional[ChatModel]:
try: try:
chat = Chat.get(Chat.id == id, Chat.user_id == user_id) with get_db() as db:
return ChatModel(**model_to_dict(chat))
chat = db.query(Chat).filter_by(id=id, user_id=user_id).first()
return ChatModel.model_validate(chat)
except: except:
return None return None
def get_chats(self, skip: int = 0, limit: int = 50) -> List[ChatModel]: def get_chats(self, skip: int = 0, limit: int = 50) -> List[ChatModel]:
return [ with get_db() as db:
ChatModel(**model_to_dict(chat))
for chat in Chat.select().order_by(Chat.updated_at.desc()) all_chats = (
# .limit(limit).offset(skip) db.query(Chat)
] # .limit(limit).offset(skip)
.order_by(Chat.updated_at.desc())
)
return [ChatModel.model_validate(chat) for chat in all_chats]
def get_chats_by_user_id(self, user_id: str) -> List[ChatModel]: def get_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
return [ with get_db() as db:
ChatModel(**model_to_dict(chat))
for chat in Chat.select() all_chats = (
.where(Chat.user_id == user_id) db.query(Chat)
.order_by(Chat.updated_at.desc()) .filter_by(user_id=user_id)
# .limit(limit).offset(skip) .order_by(Chat.updated_at.desc())
] )
return [ChatModel.model_validate(chat) for chat in all_chats]
def get_archived_chats_by_user_id(self, user_id: str) -> List[ChatModel]: def get_archived_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
return [ with get_db() as db:
ChatModel(**model_to_dict(chat))
for chat in Chat.select() all_chats = (
.where(Chat.archived == True) db.query(Chat)
.where(Chat.user_id == user_id) .filter_by(user_id=user_id, archived=True)
.order_by(Chat.updated_at.desc()) .order_by(Chat.updated_at.desc())
] )
return [ChatModel.model_validate(chat) for chat in all_chats]
def delete_chat_by_id(self, id: str) -> bool: def delete_chat_by_id(self, id: str) -> bool:
try: try:
query = Chat.delete().where((Chat.id == id)) with get_db() as db:
query.execute() # Remove the rows, return number of rows removed.
return True and self.delete_shared_chat_by_chat_id(id) db.query(Chat).filter_by(id=id).delete()
db.commit()
return True and self.delete_shared_chat_by_chat_id(id)
except: except:
return False return False
def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool: def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool:
try: try:
query = Chat.delete().where((Chat.id == id) & (Chat.user_id == user_id)) with get_db() as db:
query.execute() # Remove the rows, return number of rows removed.
db.query(Chat).filter_by(id=id, user_id=user_id).delete()
db.commit()
return True and self.delete_shared_chat_by_chat_id(id) return True and self.delete_shared_chat_by_chat_id(id)
except: except:
return False return False
def delete_chats_by_user_id(self, user_id: str) -> bool: def delete_chats_by_user_id(self, user_id: str) -> bool:
try: try:
self.delete_shared_chats_by_user_id(user_id) with get_db() as db:
query = Chat.delete().where(Chat.user_id == user_id) self.delete_shared_chats_by_user_id(user_id)
query.execute() # Remove the rows, return number of rows removed.
return True db.query(Chat).filter_by(user_id=user_id).delete()
db.commit()
return True
except: except:
return False return False
def delete_shared_chats_by_user_id(self, user_id: str) -> bool: def delete_shared_chats_by_user_id(self, user_id: str) -> bool:
try: try:
shared_chat_ids = [
f"shared-{chat.id}"
for chat in Chat.select().where(Chat.user_id == user_id)
]
query = Chat.delete().where(Chat.user_id << shared_chat_ids) with get_db() as db:
query.execute() # Remove the rows, return number of rows removed.
chats_by_user = db.query(Chat).filter_by(user_id=user_id).all()
shared_chat_ids = [f"shared-{chat.id}" for chat in chats_by_user]
db.query(Chat).filter(Chat.user_id.in_(shared_chat_ids)).delete()
db.commit()
return True return True
except: except:
return False return False
Chats = ChatTable(DB) Chats = ChatTable()
from pydantic import BaseModel from pydantic import BaseModel, ConfigDict
from peewee import * from typing import List, Optional
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional
import time import time
import logging import logging
from utils.utils import decode_token from sqlalchemy import String, Column, BigInteger, Text
from utils.misc import get_gravatar_url
from apps.webui.internal.db import DB from apps.webui.internal.db import Base, get_db
import json import json
...@@ -22,20 +19,21 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) ...@@ -22,20 +19,21 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"])
#################### ####################
class Document(Model): class Document(Base):
collection_name = CharField(unique=True) __tablename__ = "document"
name = CharField(unique=True)
title = TextField()
filename = TextField()
content = TextField(null=True)
user_id = CharField()
timestamp = BigIntegerField()
class Meta: collection_name = Column(String, primary_key=True)
database = DB name = Column(String, unique=True)
title = Column(Text)
filename = Column(Text)
content = Column(Text, nullable=True)
user_id = Column(String)
timestamp = Column(BigInteger)
class DocumentModel(BaseModel): class DocumentModel(BaseModel):
model_config = ConfigDict(from_attributes=True)
collection_name: str collection_name: str
name: str name: str
title: str title: str
...@@ -72,57 +70,63 @@ class DocumentForm(DocumentUpdateForm): ...@@ -72,57 +70,63 @@ class DocumentForm(DocumentUpdateForm):
class DocumentsTable: class DocumentsTable:
def __init__(self, db):
self.db = db
self.db.create_tables([Document])
def insert_new_doc( def insert_new_doc(
self, user_id: str, form_data: DocumentForm self, user_id: str, form_data: DocumentForm
) -> Optional[DocumentModel]: ) -> Optional[DocumentModel]:
document = DocumentModel( with get_db() as db:
**{
**form_data.model_dump(), document = DocumentModel(
"user_id": user_id, **{
"timestamp": int(time.time()), **form_data.model_dump(),
} "user_id": user_id,
) "timestamp": int(time.time()),
}
try: )
result = Document.create(**document.model_dump())
if result: try:
return document result = Document(**document.model_dump())
else: db.add(result)
db.commit()
db.refresh(result)
if result:
return DocumentModel.model_validate(result)
else:
return None
except:
return None return None
except:
return None
def get_doc_by_name(self, name: str) -> Optional[DocumentModel]: def get_doc_by_name(self, name: str) -> Optional[DocumentModel]:
try: try:
document = Document.get(Document.name == name) with get_db() as db:
return DocumentModel(**model_to_dict(document))
document = db.query(Document).filter_by(name=name).first()
return DocumentModel.model_validate(document) if document else None
except: except:
return None return None
def get_docs(self) -> List[DocumentModel]: def get_docs(self) -> List[DocumentModel]:
return [ with get_db() as db:
DocumentModel(**model_to_dict(doc))
for doc in Document.select() return [
# .limit(limit).offset(skip) DocumentModel.model_validate(doc) for doc in db.query(Document).all()
] ]
def update_doc_by_name( def update_doc_by_name(
self, name: str, form_data: DocumentUpdateForm self, name: str, form_data: DocumentUpdateForm
) -> Optional[DocumentModel]: ) -> Optional[DocumentModel]:
try: try:
query = Document.update( with get_db() as db:
title=form_data.title,
name=form_data.name, db.query(Document).filter_by(name=name).update(
timestamp=int(time.time()), {
).where(Document.name == name) "title": form_data.title,
query.execute() "name": form_data.name,
"timestamp": int(time.time()),
doc = Document.get(Document.name == form_data.name) }
return DocumentModel(**model_to_dict(doc)) )
db.commit()
return self.get_doc_by_name(form_data.name)
except Exception as e: except Exception as e:
log.exception(e) log.exception(e)
return None return None
...@@ -135,26 +139,29 @@ class DocumentsTable: ...@@ -135,26 +139,29 @@ class DocumentsTable:
doc_content = json.loads(doc.content if doc.content else "{}") doc_content = json.loads(doc.content if doc.content else "{}")
doc_content = {**doc_content, **updated} doc_content = {**doc_content, **updated}
query = Document.update( with get_db() as db:
content=json.dumps(doc_content),
timestamp=int(time.time()),
).where(Document.name == name)
query.execute()
doc = Document.get(Document.name == name) db.query(Document).filter_by(name=name).update(
return DocumentModel(**model_to_dict(doc)) {
"content": json.dumps(doc_content),
"timestamp": int(time.time()),
}
)
db.commit()
return self.get_doc_by_name(name)
except Exception as e: except Exception as e:
log.exception(e) log.exception(e)
return None return None
def delete_doc_by_name(self, name: str) -> bool: def delete_doc_by_name(self, name: str) -> bool:
try: try:
query = Document.delete().where((Document.name == name)) with get_db() as db:
query.execute() # Remove the rows, return number of rows removed.
return True db.query(Document).filter_by(name=name).delete()
db.commit()
return True
except: except:
return False return False
Documents = DocumentsTable(DB) Documents = DocumentsTable()
from pydantic import BaseModel from pydantic import BaseModel, ConfigDict
from peewee import *
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional from typing import List, Union, Optional
import time import time
import logging import logging
from apps.webui.internal.db import DB, JSONField
from sqlalchemy import Column, String, BigInteger, Text
from apps.webui.internal.db import JSONField, Base, get_db
import json import json
...@@ -18,15 +19,14 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) ...@@ -18,15 +19,14 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"])
#################### ####################
class File(Model): class File(Base):
id = CharField(unique=True) __tablename__ = "file"
user_id = CharField()
filename = TextField()
meta = JSONField()
created_at = BigIntegerField()
class Meta: id = Column(String, primary_key=True)
database = DB user_id = Column(String)
filename = Column(Text)
meta = Column(JSONField)
created_at = Column(BigInteger)
class FileModel(BaseModel): class FileModel(BaseModel):
...@@ -36,6 +36,8 @@ class FileModel(BaseModel): ...@@ -36,6 +36,8 @@ class FileModel(BaseModel):
meta: dict meta: dict
created_at: int # timestamp in epoch created_at: int # timestamp in epoch
model_config = ConfigDict(from_attributes=True)
#################### ####################
# Forms # Forms
...@@ -57,56 +59,68 @@ class FileForm(BaseModel): ...@@ -57,56 +59,68 @@ class FileForm(BaseModel):
class FilesTable: class FilesTable:
def __init__(self, db):
self.db = db
self.db.create_tables([File])
def insert_new_file(self, user_id: str, form_data: FileForm) -> Optional[FileModel]: def insert_new_file(self, user_id: str, form_data: FileForm) -> Optional[FileModel]:
file = FileModel( with get_db() as db:
**{
**form_data.model_dump(), file = FileModel(
"user_id": user_id, **{
"created_at": int(time.time()), **form_data.model_dump(),
} "user_id": user_id,
) "created_at": int(time.time()),
}
try: )
result = File.create(**file.model_dump())
if result: try:
return file result = File(**file.model_dump())
else: db.add(result)
db.commit()
db.refresh(result)
if result:
return FileModel.model_validate(result)
else:
return None
except Exception as e:
print(f"Error creating tool: {e}")
return None return None
except Exception as e:
print(f"Error creating tool: {e}")
return None
def get_file_by_id(self, id: str) -> Optional[FileModel]: def get_file_by_id(self, id: str) -> Optional[FileModel]:
try: with get_db() as db:
file = File.get(File.id == id)
return FileModel(**model_to_dict(file)) try:
except: file = db.get(File, id)
return None return FileModel.model_validate(file)
except:
return None
def get_files(self) -> List[FileModel]: def get_files(self) -> List[FileModel]:
return [FileModel(**model_to_dict(file)) for file in File.select()] with get_db() as db:
return [FileModel.model_validate(file) for file in db.query(File).all()]
def delete_file_by_id(self, id: str) -> bool: def delete_file_by_id(self, id: str) -> bool:
try:
query = File.delete().where((File.id == id))
query.execute() # Remove the rows, return number of rows removed.
return True with get_db() as db:
except:
return False try:
db.query(File).filter_by(id=id).delete()
db.commit()
return True
except:
return False
def delete_all_files(self) -> bool: def delete_all_files(self) -> bool:
try:
query = File.delete()
query.execute() # Remove the rows, return number of rows removed.
return True with get_db() as db:
except:
return False try:
db.query(File).delete()
db.commit()
return True
except:
return False
Files = FilesTable(DB) Files = FilesTable()
from pydantic import BaseModel from pydantic import BaseModel, ConfigDict
from peewee import *
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional from typing import List, Union, Optional
import time import time
import logging import logging
from apps.webui.internal.db import DB, JSONField
from sqlalchemy import Column, String, Text, BigInteger, Boolean
from apps.webui.internal.db import JSONField, Base, get_db
from apps.webui.models.users import Users from apps.webui.models.users import Users
import json import json
...@@ -21,21 +22,20 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) ...@@ -21,21 +22,20 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"])
#################### ####################
class Function(Model): class Function(Base):
id = CharField(unique=True) __tablename__ = "function"
user_id = CharField()
name = TextField()
type = TextField()
content = TextField()
meta = JSONField()
valves = JSONField()
is_active = BooleanField(default=False)
is_global = BooleanField(default=False)
updated_at = BigIntegerField()
created_at = BigIntegerField()
class Meta: id = Column(String, primary_key=True)
database = DB user_id = Column(String)
name = Column(Text)
type = Column(Text)
content = Column(Text)
meta = Column(JSONField)
valves = Column(JSONField)
is_active = Column(Boolean)
is_global = Column(Boolean)
updated_at = Column(BigInteger)
created_at = Column(BigInteger)
class FunctionMeta(BaseModel): class FunctionMeta(BaseModel):
...@@ -55,6 +55,8 @@ class FunctionModel(BaseModel): ...@@ -55,6 +55,8 @@ class FunctionModel(BaseModel):
updated_at: int # timestamp in epoch updated_at: int # timestamp in epoch
created_at: int # timestamp in epoch created_at: int # timestamp in epoch
model_config = ConfigDict(from_attributes=True)
#################### ####################
# Forms # Forms
...@@ -85,13 +87,11 @@ class FunctionValves(BaseModel): ...@@ -85,13 +87,11 @@ class FunctionValves(BaseModel):
class FunctionsTable: class FunctionsTable:
def __init__(self, db):
self.db = db
self.db.create_tables([Function])
def insert_new_function( def insert_new_function(
self, user_id: str, type: str, form_data: FunctionForm self, user_id: str, type: str, form_data: FunctionForm
) -> Optional[FunctionModel]: ) -> Optional[FunctionModel]:
function = FunctionModel( function = FunctionModel(
**{ **{
**form_data.model_dump(), **form_data.model_dump(),
...@@ -103,89 +103,102 @@ class FunctionsTable: ...@@ -103,89 +103,102 @@ class FunctionsTable:
) )
try: try:
result = Function.create(**function.model_dump()) with get_db() as db:
if result: result = Function(**function.model_dump())
return function db.add(result)
else: db.commit()
return None db.refresh(result)
if result:
return FunctionModel.model_validate(result)
else:
return None
except Exception as e: except Exception as e:
print(f"Error creating tool: {e}") print(f"Error creating tool: {e}")
return None return None
def get_function_by_id(self, id: str) -> Optional[FunctionModel]: def get_function_by_id(self, id: str) -> Optional[FunctionModel]:
try: try:
function = Function.get(Function.id == id) with get_db() as db:
return FunctionModel(**model_to_dict(function))
function = db.get(Function, id)
return FunctionModel.model_validate(function)
except: except:
return None return None
def get_functions(self, active_only=False) -> List[FunctionModel]: def get_functions(self, active_only=False) -> List[FunctionModel]:
if active_only: with get_db() as db:
return [
FunctionModel(**model_to_dict(function)) if active_only:
for function in Function.select().where(Function.is_active == True) return [
] FunctionModel.model_validate(function)
else: for function in db.query(Function).filter_by(is_active=True).all()
return [ ]
FunctionModel(**model_to_dict(function)) else:
for function in Function.select() return [
] FunctionModel.model_validate(function)
for function in db.query(Function).all()
]
def get_functions_by_type( def get_functions_by_type(
self, type: str, active_only=False self, type: str, active_only=False
) -> List[FunctionModel]: ) -> List[FunctionModel]:
if active_only: with get_db() as db:
return [
FunctionModel(**model_to_dict(function)) if active_only:
for function in Function.select().where( return [
Function.type == type, Function.is_active == True FunctionModel.model_validate(function)
) for function in db.query(Function)
] .filter_by(type=type, is_active=True)
else: .all()
return [ ]
FunctionModel(**model_to_dict(function)) else:
for function in Function.select().where(Function.type == type) return [
] FunctionModel.model_validate(function)
for function in db.query(Function).filter_by(type=type).all()
]
def get_global_filter_functions(self) -> List[FunctionModel]: def get_global_filter_functions(self) -> List[FunctionModel]:
return [ with get_db() as db:
FunctionModel(**model_to_dict(function))
for function in Function.select().where( return [
Function.type == "filter", FunctionModel.model_validate(function)
Function.is_active == True, for function in db.query(Function)
Function.is_global == True, .filter_by(type="filter", is_active=True, is_global=True)
) .all()
] ]
def get_function_valves_by_id(self, id: str) -> Optional[dict]: def get_function_valves_by_id(self, id: str) -> Optional[dict]:
try: with get_db() as db:
function = Function.get(Function.id == id)
return function.valves if function.valves else {} try:
except Exception as e: function = db.get(Function, id)
print(f"An error occurred: {e}") return function.valves if function.valves else {}
return None except Exception as e:
print(f"An error occurred: {e}")
return None
def update_function_valves_by_id( def update_function_valves_by_id(
self, id: str, valves: dict self, id: str, valves: dict
) -> Optional[FunctionValves]: ) -> Optional[FunctionValves]:
try: with get_db() as db:
query = Function.update(
**{"valves": valves}, try:
updated_at=int(time.time()), function = db.get(Function, id)
).where(Function.id == id) function.valves = valves
query.execute() function.updated_at = int(time.time())
db.commit()
function = Function.get(Function.id == id) db.refresh(function)
return FunctionValves(**model_to_dict(function)) return self.get_function_by_id(id)
except: except:
return None return None
def get_user_valves_by_id_and_user_id( def get_user_valves_by_id_and_user_id(
self, id: str, user_id: str self, id: str, user_id: str
) -> Optional[dict]: ) -> Optional[dict]:
try: try:
user = Users.get_user_by_id(user_id) user = Users.get_user_by_id(user_id)
user_settings = user.settings.model_dump() user_settings = user.settings.model_dump() if user.settings else {}
# Check if user has "functions" and "valves" settings # Check if user has "functions" and "valves" settings
if "functions" not in user_settings: if "functions" not in user_settings:
...@@ -201,9 +214,10 @@ class FunctionsTable: ...@@ -201,9 +214,10 @@ class FunctionsTable:
def update_user_valves_by_id_and_user_id( def update_user_valves_by_id_and_user_id(
self, id: str, user_id: str, valves: dict self, id: str, user_id: str, valves: dict
) -> Optional[dict]: ) -> Optional[dict]:
try: try:
user = Users.get_user_by_id(user_id) user = Users.get_user_by_id(user_id)
user_settings = user.settings.model_dump() user_settings = user.settings.model_dump() if user.settings else {}
# Check if user has "functions" and "valves" settings # Check if user has "functions" and "valves" settings
if "functions" not in user_settings: if "functions" not in user_settings:
...@@ -214,8 +228,7 @@ class FunctionsTable: ...@@ -214,8 +228,7 @@ class FunctionsTable:
user_settings["functions"]["valves"][id] = valves user_settings["functions"]["valves"][id] = valves
# Update the user settings in the database # Update the user settings in the database
query = Users.update_user_by_id(user_id, {"settings": user_settings}) Users.update_user_by_id(user_id, {"settings": user_settings})
query.execute()
return user_settings["functions"]["valves"][id] return user_settings["functions"]["valves"][id]
except Exception as e: except Exception as e:
...@@ -223,39 +236,44 @@ class FunctionsTable: ...@@ -223,39 +236,44 @@ class FunctionsTable:
return None return None
def update_function_by_id(self, id: str, updated: dict) -> Optional[FunctionModel]: def update_function_by_id(self, id: str, updated: dict) -> Optional[FunctionModel]:
try: with get_db() as db:
query = Function.update(
**updated, try:
updated_at=int(time.time()), db.query(Function).filter_by(id=id).update(
).where(Function.id == id) {
query.execute() **updated,
"updated_at": int(time.time()),
function = Function.get(Function.id == id) }
return FunctionModel(**model_to_dict(function)) )
except: db.commit()
return None return self.get_function_by_id(id)
except:
return None
def deactivate_all_functions(self) -> Optional[bool]: def deactivate_all_functions(self) -> Optional[bool]:
try: with get_db() as db:
query = Function.update(
**{"is_active": False}, try:
updated_at=int(time.time()), db.query(Function).update(
) {
"is_active": False,
query.execute() "updated_at": int(time.time()),
}
return True )
except: db.commit()
return None return True
except:
return None
def delete_function_by_id(self, id: str) -> bool: def delete_function_by_id(self, id: str) -> bool:
try: with get_db() as db:
query = Function.delete().where((Function.id == id)) try:
query.execute() # Remove the rows, return number of rows removed. db.query(Function).filter_by(id=id).delete()
db.commit()
return True return True
except: except:
return False return False
Functions = FunctionsTable(DB) Functions = FunctionsTable()
from pydantic import BaseModel from pydantic import BaseModel, ConfigDict
from peewee import *
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional from typing import List, Union, Optional
from apps.webui.internal.db import DB from sqlalchemy import Column, String, BigInteger, Text
from apps.webui.models.chats import Chats
from apps.webui.internal.db import Base, get_db
import time import time
import uuid import uuid
...@@ -14,15 +13,14 @@ import uuid ...@@ -14,15 +13,14 @@ import uuid
#################### ####################
class Memory(Model): class Memory(Base):
id = CharField(unique=True) __tablename__ = "memory"
user_id = CharField()
content = TextField()
updated_at = BigIntegerField()
created_at = BigIntegerField()
class Meta: id = Column(String, primary_key=True)
database = DB user_id = Column(String)
content = Column(Text)
updated_at = Column(BigInteger)
created_at = Column(BigInteger)
class MemoryModel(BaseModel): class MemoryModel(BaseModel):
...@@ -32,6 +30,8 @@ class MemoryModel(BaseModel): ...@@ -32,6 +30,8 @@ class MemoryModel(BaseModel):
updated_at: int # timestamp in epoch updated_at: int # timestamp in epoch
created_at: int # timestamp in epoch created_at: int # timestamp in epoch
model_config = ConfigDict(from_attributes=True)
#################### ####################
# Forms # Forms
...@@ -39,94 +39,110 @@ class MemoryModel(BaseModel): ...@@ -39,94 +39,110 @@ class MemoryModel(BaseModel):
class MemoriesTable: class MemoriesTable:
def __init__(self, db):
self.db = db
self.db.create_tables([Memory])
def insert_new_memory( def insert_new_memory(
self, self,
user_id: str, user_id: str,
content: str, content: str,
) -> Optional[MemoryModel]: ) -> Optional[MemoryModel]:
id = str(uuid.uuid4())
with get_db() as db:
memory = MemoryModel( id = str(uuid.uuid4())
**{
"id": id, memory = MemoryModel(
"user_id": user_id, **{
"content": content, "id": id,
"created_at": int(time.time()), "user_id": user_id,
"updated_at": int(time.time()), "content": content,
} "created_at": int(time.time()),
) "updated_at": int(time.time()),
result = Memory.create(**memory.model_dump()) }
if result: )
return memory result = Memory(**memory.model_dump())
else: db.add(result)
return None db.commit()
db.refresh(result)
if result:
return MemoryModel.model_validate(result)
else:
return None
def update_memory_by_id( def update_memory_by_id(
self, self,
id: str, id: str,
content: str, content: str,
) -> Optional[MemoryModel]: ) -> Optional[MemoryModel]:
try: with get_db() as db:
memory = Memory.get(Memory.id == id)
memory.content = content try:
memory.updated_at = int(time.time()) db.query(Memory).filter_by(id=id).update(
memory.save() {"content": content, "updated_at": int(time.time())}
return MemoryModel(**model_to_dict(memory)) )
except: db.commit()
return None return self.get_memory_by_id(id)
except:
return None
def get_memories(self) -> List[MemoryModel]: def get_memories(self) -> List[MemoryModel]:
try: with get_db() as db:
memories = Memory.select()
return [MemoryModel(**model_to_dict(memory)) for memory in memories] try:
except: memories = db.query(Memory).all()
return None return [MemoryModel.model_validate(memory) for memory in memories]
except:
return None
def get_memories_by_user_id(self, user_id: str) -> List[MemoryModel]: def get_memories_by_user_id(self, user_id: str) -> List[MemoryModel]:
try: with get_db() as db:
memories = Memory.select().where(Memory.user_id == user_id)
return [MemoryModel(**model_to_dict(memory)) for memory in memories] try:
except: memories = db.query(Memory).filter_by(user_id=user_id).all()
return None return [MemoryModel.model_validate(memory) for memory in memories]
except:
def get_memory_by_id(self, id) -> Optional[MemoryModel]: return None
try:
memory = Memory.get(Memory.id == id) def get_memory_by_id(self, id: str) -> Optional[MemoryModel]:
return MemoryModel(**model_to_dict(memory)) with get_db() as db:
except:
return None try:
memory = db.get(Memory, id)
return MemoryModel.model_validate(memory)
except:
return None
def delete_memory_by_id(self, id: str) -> bool: def delete_memory_by_id(self, id: str) -> bool:
try: with get_db() as db:
query = Memory.delete().where(Memory.id == id)
query.execute() # Remove the rows, return number of rows removed. try:
db.query(Memory).filter_by(id=id).delete()
db.commit()
return True return True
except: except:
return False return False
def delete_memories_by_user_id(self, user_id: str) -> bool: def delete_memories_by_user_id(self, user_id: str) -> bool:
try: with get_db() as db:
query = Memory.delete().where(Memory.user_id == user_id)
query.execute()
return True try:
except: db.query(Memory).filter_by(user_id=user_id).delete()
return False db.commit()
return True
except:
return False
def delete_memory_by_id_and_user_id(self, id: str, user_id: str) -> bool: def delete_memory_by_id_and_user_id(self, id: str, user_id: str) -> bool:
try: with get_db() as db:
query = Memory.delete().where(Memory.id == id, Memory.user_id == user_id)
query.execute() try:
db.query(Memory).filter_by(id=id, user_id=user_id).delete()
db.commit()
return True return True
except: except:
return False return False
Memories = MemoriesTable(DB) Memories = MemoriesTable()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment