Unverified Commit 22c50f62 authored by Timothy Jaeryang Baek's avatar Timothy Jaeryang Baek Committed by GitHub
Browse files

Merge pull request #1631 from open-webui/dev

0.1.120
parents e0ebd7ae eefe0145
...@@ -24,6 +24,9 @@ assignees: '' ...@@ -24,6 +24,9 @@ assignees: ''
## Environment ## Environment
- **Open WebUI Version:** [e.g., 0.1.120]
- **Ollama (if applicable):** [e.g., 0.1.30, 0.1.32-rc1]
- **Operating System:** [e.g., Windows 10, macOS Big Sur, Ubuntu 20.04] - **Operating System:** [e.g., Windows 10, macOS Big Sur, Ubuntu 20.04]
- **Browser (if applicable):** [e.g., Chrome 100.0, Firefox 98.0] - **Browser (if applicable):** [e.g., Chrome 100.0, Firefox 98.0]
......
...@@ -5,6 +5,22 @@ All notable changes to this project will be documented in this file. ...@@ -5,6 +5,22 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.1.120] - 2024-04-20
### Added
- **📦 Archive Chat Feature**: Easily archive chats with a new sidebar button, and access archived chats via the profile button > archived chats.
- **🔊 Configurable Text-to-Speech Endpoint**: Customize your Text-to-Speech experience with configurable OpenAI endpoints.
- **🛠️ Improved Error Handling**: Enhanced error message handling for connection failures.
- **⌨️ Enhanced Shortcut**: When editing messages, use ctrl/cmd+enter to save and submit, and esc to close.
- **🌐 Language Support**: Added support for Georgian and enhanced translations for Portuguese and Vietnamese.
### Fixed
- **🔧 Model Selector**: Resolved issue where default model selection was not saving.
- **🔗 Share Link Copy Button**: Fixed bug where the copy button wasn't copying links in Safari.
- **🎨 Light Theme Styling**: Addressed styling issue with the light theme.
## [0.1.119] - 2024-04-16 ## [0.1.119] - 2024-04-16
### Added ### Added
......
...@@ -185,4 +185,4 @@ If you have any questions, suggestions, or need assistance, please open an issue ...@@ -185,4 +185,4 @@ If you have any questions, suggestions, or need assistance, please open an issue
--- ---
Created by [Timothy J. Baek](https://github.com/tjbck) - Let's make Open Web UI even more amazing together! 💪 Created by [Timothy J. Baek](https://github.com/tjbck) - Let's make Open WebUI even more amazing together! 💪
...@@ -10,8 +10,19 @@ from fastapi import ( ...@@ -10,8 +10,19 @@ from fastapi import (
File, File,
Form, Form,
) )
from fastapi.responses import StreamingResponse, JSONResponse, FileResponse
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from faster_whisper import WhisperModel from faster_whisper import WhisperModel
from pydantic import BaseModel
import requests
import hashlib
from pathlib import Path
import json
from constants import ERROR_MESSAGES from constants import ERROR_MESSAGES
from utils.utils import ( from utils.utils import (
...@@ -30,6 +41,8 @@ from config import ( ...@@ -30,6 +41,8 @@ from config import (
WHISPER_MODEL_DIR, WHISPER_MODEL_DIR,
WHISPER_MODEL_AUTO_UPDATE, WHISPER_MODEL_AUTO_UPDATE,
DEVICE_TYPE, DEVICE_TYPE,
AUDIO_OPENAI_API_BASE_URL,
AUDIO_OPENAI_API_KEY,
) )
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -44,12 +57,104 @@ app.add_middleware( ...@@ -44,12 +57,104 @@ app.add_middleware(
allow_headers=["*"], allow_headers=["*"],
) )
app.state.OPENAI_API_BASE_URL = AUDIO_OPENAI_API_BASE_URL
app.state.OPENAI_API_KEY = AUDIO_OPENAI_API_KEY
# setting device type for whisper model # setting device type for whisper model
whisper_device_type = DEVICE_TYPE if DEVICE_TYPE and DEVICE_TYPE == "cuda" else "cpu" whisper_device_type = DEVICE_TYPE if DEVICE_TYPE and DEVICE_TYPE == "cuda" else "cpu"
log.info(f"whisper_device_type: {whisper_device_type}") log.info(f"whisper_device_type: {whisper_device_type}")
SPEECH_CACHE_DIR = Path(CACHE_DIR).joinpath("./audio/speech/")
SPEECH_CACHE_DIR.mkdir(parents=True, exist_ok=True)
class OpenAIConfigUpdateForm(BaseModel):
url: str
key: str
@app.get("/config")
async def get_openai_config(user=Depends(get_admin_user)):
return {
"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL,
"OPENAI_API_KEY": app.state.OPENAI_API_KEY,
}
@app.post("/config/update")
async def update_openai_config(
form_data: OpenAIConfigUpdateForm, user=Depends(get_admin_user)
):
if form_data.key == "":
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
app.state.OPENAI_API_BASE_URL = form_data.url
app.state.OPENAI_API_KEY = form_data.key
return {
"status": True,
"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL,
"OPENAI_API_KEY": app.state.OPENAI_API_KEY,
}
@app.post("/speech")
async def speech(request: Request, user=Depends(get_verified_user)):
body = await request.body()
name = hashlib.sha256(body).hexdigest()
file_path = SPEECH_CACHE_DIR.joinpath(f"{name}.mp3")
file_body_path = SPEECH_CACHE_DIR.joinpath(f"{name}.json")
# Check if the file already exists in the cache
if file_path.is_file():
return FileResponse(file_path)
headers = {}
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
headers["Content-Type"] = "application/json"
r = None
try:
r = requests.post(
url=f"{app.state.OPENAI_API_BASE_URL}/audio/speech",
data=body,
headers=headers,
stream=True,
)
r.raise_for_status()
# Save the streaming content to a file
with open(file_path, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
with open(file_body_path, "w") as f:
json.dump(json.loads(body.decode("utf-8")), f)
# Return the saved file
return FileResponse(file_path)
except Exception as e:
log.exception(e)
error_detail = "Open WebUI: Server Connection Error"
if r is not None:
try:
res = r.json()
if "error" in res:
error_detail = f"External: {res['error']['message']}"
except:
error_detail = f"External: {e}"
raise HTTPException(
status_code=r.status_code if r != None else 500,
detail=error_detail,
)
@app.post("/transcribe") @app.post("/transcriptions")
def transcribe( def transcribe(
file: UploadFile = File(...), file: UploadFile = File(...),
user=Depends(get_current_user), user=Depends(get_current_user),
......
...@@ -35,6 +35,8 @@ from config import ( ...@@ -35,6 +35,8 @@ from config import (
ENABLE_IMAGE_GENERATION, ENABLE_IMAGE_GENERATION,
AUTOMATIC1111_BASE_URL, AUTOMATIC1111_BASE_URL,
COMFYUI_BASE_URL, COMFYUI_BASE_URL,
OPENAI_API_BASE_URL,
OPENAI_API_KEY,
) )
...@@ -56,7 +58,9 @@ app.add_middleware( ...@@ -56,7 +58,9 @@ app.add_middleware(
app.state.ENGINE = "" app.state.ENGINE = ""
app.state.ENABLED = ENABLE_IMAGE_GENERATION app.state.ENABLED = ENABLE_IMAGE_GENERATION
app.state.OPENAI_API_KEY = "" app.state.OPENAI_API_BASE_URL = OPENAI_API_BASE_URL
app.state.OPENAI_API_KEY = OPENAI_API_KEY
app.state.MODEL = "" app.state.MODEL = ""
...@@ -360,7 +364,7 @@ def generate_image( ...@@ -360,7 +364,7 @@ def generate_image(
} }
r = requests.post( r = requests.post(
url=f"https://api.openai.com/v1/images/generations", url=f"{app.state.OPENAI_API_BASE_URL}/images/generations",
json=data, json=data,
headers=headers, headers=headers,
) )
......
...@@ -341,7 +341,7 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)): ...@@ -341,7 +341,7 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
try: try:
res = r.json() res = r.json()
if "error" in res: if "error" in res:
error_detail = f"External: {res['error']}" error_detail = f"External: {res['error']['message'] if 'message' in res['error'] else res['error']}"
except: except:
error_detail = f"External: {e}" error_detail = f"External: {e}"
......
...@@ -70,6 +70,8 @@ from config import ( ...@@ -70,6 +70,8 @@ from config import (
RAG_EMBEDDING_ENGINE, RAG_EMBEDDING_ENGINE,
RAG_EMBEDDING_MODEL, RAG_EMBEDDING_MODEL,
RAG_EMBEDDING_MODEL_AUTO_UPDATE, RAG_EMBEDDING_MODEL_AUTO_UPDATE,
RAG_OPENAI_API_BASE_URL,
RAG_OPENAI_API_KEY,
DEVICE_TYPE, DEVICE_TYPE,
CHROMA_CLIENT, CHROMA_CLIENT,
CHUNK_SIZE, CHUNK_SIZE,
...@@ -94,8 +96,8 @@ app.state.RAG_EMBEDDING_ENGINE = RAG_EMBEDDING_ENGINE ...@@ -94,8 +96,8 @@ app.state.RAG_EMBEDDING_ENGINE = RAG_EMBEDDING_ENGINE
app.state.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL app.state.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL
app.state.RAG_TEMPLATE = RAG_TEMPLATE app.state.RAG_TEMPLATE = RAG_TEMPLATE
app.state.RAG_OPENAI_API_BASE_URL = "https://api.openai.com" app.state.OPENAI_API_BASE_URL = RAG_OPENAI_API_BASE_URL
app.state.RAG_OPENAI_API_KEY = "" app.state.OPENAI_API_KEY = RAG_OPENAI_API_KEY
app.state.PDF_EXTRACT_IMAGES = False app.state.PDF_EXTRACT_IMAGES = False
...@@ -148,8 +150,8 @@ async def get_embedding_config(user=Depends(get_admin_user)): ...@@ -148,8 +150,8 @@ async def get_embedding_config(user=Depends(get_admin_user)):
"embedding_engine": app.state.RAG_EMBEDDING_ENGINE, "embedding_engine": app.state.RAG_EMBEDDING_ENGINE,
"embedding_model": app.state.RAG_EMBEDDING_MODEL, "embedding_model": app.state.RAG_EMBEDDING_MODEL,
"openai_config": { "openai_config": {
"url": app.state.RAG_OPENAI_API_BASE_URL, "url": app.state.OPENAI_API_BASE_URL,
"key": app.state.RAG_OPENAI_API_KEY, "key": app.state.OPENAI_API_KEY,
}, },
} }
...@@ -180,8 +182,8 @@ async def update_embedding_config( ...@@ -180,8 +182,8 @@ async def update_embedding_config(
app.state.sentence_transformer_ef = None app.state.sentence_transformer_ef = None
if form_data.openai_config != None: if form_data.openai_config != None:
app.state.RAG_OPENAI_API_BASE_URL = form_data.openai_config.url app.state.OPENAI_API_BASE_URL = form_data.openai_config.url
app.state.RAG_OPENAI_API_KEY = form_data.openai_config.key app.state.OPENAI_API_KEY = form_data.openai_config.key
else: else:
sentence_transformer_ef = ( sentence_transformer_ef = (
embedding_functions.SentenceTransformerEmbeddingFunction( embedding_functions.SentenceTransformerEmbeddingFunction(
...@@ -199,8 +201,8 @@ async def update_embedding_config( ...@@ -199,8 +201,8 @@ async def update_embedding_config(
"embedding_engine": app.state.RAG_EMBEDDING_ENGINE, "embedding_engine": app.state.RAG_EMBEDDING_ENGINE,
"embedding_model": app.state.RAG_EMBEDDING_MODEL, "embedding_model": app.state.RAG_EMBEDDING_MODEL,
"openai_config": { "openai_config": {
"url": app.state.RAG_OPENAI_API_BASE_URL, "url": app.state.OPENAI_API_BASE_URL,
"key": app.state.RAG_OPENAI_API_KEY, "key": app.state.OPENAI_API_KEY,
}, },
} }
...@@ -315,8 +317,8 @@ def query_doc_handler( ...@@ -315,8 +317,8 @@ def query_doc_handler(
query_embeddings = generate_openai_embeddings( query_embeddings = generate_openai_embeddings(
model=app.state.RAG_EMBEDDING_MODEL, model=app.state.RAG_EMBEDDING_MODEL,
text=form_data.query, text=form_data.query,
key=app.state.RAG_OPENAI_API_KEY, key=app.state.OPENAI_API_KEY,
url=app.state.RAG_OPENAI_API_BASE_URL, url=app.state.OPENAI_API_BASE_URL,
) )
return query_embeddings_doc( return query_embeddings_doc(
...@@ -367,8 +369,8 @@ def query_collection_handler( ...@@ -367,8 +369,8 @@ def query_collection_handler(
query_embeddings = generate_openai_embeddings( query_embeddings = generate_openai_embeddings(
model=app.state.RAG_EMBEDDING_MODEL, model=app.state.RAG_EMBEDDING_MODEL,
text=form_data.query, text=form_data.query,
key=app.state.RAG_OPENAI_API_KEY, key=app.state.OPENAI_API_KEY,
url=app.state.RAG_OPENAI_API_BASE_URL, url=app.state.OPENAI_API_BASE_URL,
) )
return query_embeddings_collection( return query_embeddings_collection(
...@@ -484,8 +486,8 @@ def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> b ...@@ -484,8 +486,8 @@ def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> b
generate_openai_embeddings( generate_openai_embeddings(
model=app.state.RAG_EMBEDDING_MODEL, model=app.state.RAG_EMBEDDING_MODEL,
text=text, text=text,
key=app.state.RAG_OPENAI_API_KEY, key=app.state.OPENAI_API_KEY,
url=app.state.RAG_OPENAI_API_BASE_URL, url=app.state.OPENAI_API_BASE_URL,
) )
for text in texts for text in texts
] ]
......
...@@ -324,11 +324,11 @@ def get_embedding_model_path( ...@@ -324,11 +324,11 @@ def get_embedding_model_path(
def generate_openai_embeddings( def generate_openai_embeddings(
model: str, text: str, key: str, url: str = "https://api.openai.com" model: str, text: str, key: str, url: str = "https://api.openai.com/v1"
): ):
try: try:
r = requests.post( r = requests.post(
f"{url}/v1/embeddings", f"{url}/embeddings",
headers={ headers={
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": f"Bearer {key}", "Authorization": f"Bearer {key}",
......
"""Peewee migrations -- 002_add_local_sharing.py.
Some examples (model - class or model name)::
> Model = migrator.orm['table_name'] # Return model in current state by name
> Model = migrator.ModelClass # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.run(func, *args, **kwargs) # Run python function with the given args
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.add_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
> migrator.add_constraint(model, name, sql)
> migrator.drop_index(model, *col_names)
> migrator.drop_not_null(model, *field_names)
> migrator.drop_constraints(model, *constraints)
"""
from contextlib import suppress
import peewee as pw
from peewee_migrate import Migrator
with suppress(ImportError):
import playhouse.postgres_ext as pw_pext
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your migrations here."""
migrator.add_fields("chat", archived=pw.BooleanField(default=False))
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your rollback migrations here."""
migrator.remove_fields("chat", "archived")
"""Peewee migrations -- 002_add_local_sharing.py.
Some examples (model - class or model name)::
> Model = migrator.orm['table_name'] # Return model in current state by name
> Model = migrator.ModelClass # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.run(func, *args, **kwargs) # Run python function with the given args
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.add_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
> migrator.add_constraint(model, name, sql)
> migrator.drop_index(model, *col_names)
> migrator.drop_not_null(model, *field_names)
> migrator.drop_constraints(model, *constraints)
"""
from contextlib import suppress
import peewee as pw
from peewee_migrate import Migrator
with suppress(ImportError):
import playhouse.postgres_ext as pw_pext
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your migrations here."""
# Adding fields created_at and updated_at to the 'chat' table
migrator.add_fields(
"chat",
created_at=pw.DateTimeField(null=True), # Allow null for transition
updated_at=pw.DateTimeField(null=True), # Allow null for transition
)
# Populate the new fields from an existing 'timestamp' field
migrator.sql(
"UPDATE chat SET created_at = timestamp, updated_at = timestamp WHERE timestamp IS NOT NULL"
)
# Now that the data has been copied, remove the original 'timestamp' field
migrator.remove_fields("chat", "timestamp")
# Update the fields to be not null now that they are populated
migrator.change_fields(
"chat",
created_at=pw.DateTimeField(null=False),
updated_at=pw.DateTimeField(null=False),
)
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your rollback migrations here."""
# Recreate the timestamp field initially allowing null values for safe transition
migrator.add_fields("chat", timestamp=pw.DateTimeField(null=True))
# Copy the earliest created_at date back into the new timestamp field
# This assumes created_at was originally a copy of timestamp
migrator.sql("UPDATE chat SET timestamp = created_at")
# Remove the created_at and updated_at fields
migrator.remove_fields("chat", "created_at", "updated_at")
# Finally, alter the timestamp field to not allow nulls if that was the original setting
migrator.change_fields("chat", timestamp=pw.DateTimeField(null=False))
...@@ -19,8 +19,12 @@ class Chat(Model): ...@@ -19,8 +19,12 @@ class Chat(Model):
user_id = CharField() user_id = CharField()
title = CharField() title = CharField()
chat = TextField() # Save Chat JSON as Text chat = TextField() # Save Chat JSON as Text
timestamp = DateField()
created_at = DateTimeField()
updated_at = DateTimeField()
share_id = CharField(null=True, unique=True) share_id = CharField(null=True, unique=True)
archived = BooleanField(default=False)
class Meta: class Meta:
database = DB database = DB
...@@ -31,8 +35,12 @@ class ChatModel(BaseModel): ...@@ -31,8 +35,12 @@ class ChatModel(BaseModel):
user_id: str user_id: str
title: str title: str
chat: str chat: str
timestamp: int # timestamp in epoch
created_at: int # timestamp in epoch
updated_at: int # timestamp in epoch
share_id: Optional[str] = None share_id: Optional[str] = None
archived: bool = False
#################### ####################
...@@ -53,13 +61,17 @@ class ChatResponse(BaseModel): ...@@ -53,13 +61,17 @@ class ChatResponse(BaseModel):
user_id: str user_id: str
title: str title: str
chat: dict chat: dict
timestamp: int # timestamp in epoch updated_at: int # timestamp in epoch
created_at: int # timestamp in epoch
share_id: Optional[str] = None # id of the chat to be shared share_id: Optional[str] = None # id of the chat to be shared
archived: bool
class ChatTitleIdResponse(BaseModel): class ChatTitleIdResponse(BaseModel):
id: str id: str
title: str title: str
updated_at: int
created_at: int
class ChatTable: class ChatTable:
...@@ -77,7 +89,8 @@ class ChatTable: ...@@ -77,7 +89,8 @@ class ChatTable:
form_data.chat["title"] if "title" in form_data.chat else "New Chat" form_data.chat["title"] if "title" in form_data.chat else "New Chat"
), ),
"chat": json.dumps(form_data.chat), "chat": json.dumps(form_data.chat),
"timestamp": int(time.time()), "created_at": int(time.time()),
"updated_at": int(time.time()),
} }
) )
...@@ -89,7 +102,7 @@ class ChatTable: ...@@ -89,7 +102,7 @@ class ChatTable:
query = Chat.update( query = Chat.update(
chat=json.dumps(chat), chat=json.dumps(chat),
title=chat["title"] if "title" in chat else "New Chat", title=chat["title"] if "title" in chat else "New Chat",
timestamp=int(time.time()), updated_at=int(time.time()),
).where(Chat.id == id) ).where(Chat.id == id)
query.execute() query.execute()
...@@ -111,7 +124,8 @@ class ChatTable: ...@@ -111,7 +124,8 @@ class ChatTable:
"user_id": f"shared-{chat_id}", "user_id": f"shared-{chat_id}",
"title": chat.title, "title": chat.title,
"chat": chat.chat, "chat": chat.chat,
"timestamp": int(time.time()), "created_at": chat.created_at,
"updated_at": int(time.time()),
} }
) )
shared_result = Chat.create(**shared_chat.model_dump()) shared_result = Chat.create(**shared_chat.model_dump())
...@@ -163,14 +177,42 @@ class ChatTable: ...@@ -163,14 +177,42 @@ class ChatTable:
except: except:
return None return None
def toggle_chat_archive_by_id(self, id: str) -> Optional[ChatModel]:
try:
chat = self.get_chat_by_id(id)
query = Chat.update(
archived=(not chat.archived),
).where(Chat.id == id)
query.execute()
chat = Chat.get(Chat.id == id)
return ChatModel(**model_to_dict(chat))
except:
return None
def get_archived_chat_lists_by_user_id(
self, user_id: str, skip: int = 0, limit: int = 50
) -> List[ChatModel]:
return [
ChatModel(**model_to_dict(chat))
for chat in Chat.select()
.where(Chat.archived == True)
.where(Chat.user_id == user_id)
.order_by(Chat.updated_at.desc())
# .limit(limit)
# .offset(skip)
]
def get_chat_lists_by_user_id( def get_chat_lists_by_user_id(
self, user_id: str, skip: int = 0, limit: int = 50 self, user_id: str, skip: int = 0, limit: int = 50
) -> List[ChatModel]: ) -> List[ChatModel]:
return [ return [
ChatModel(**model_to_dict(chat)) ChatModel(**model_to_dict(chat))
for chat in Chat.select() for chat in Chat.select()
.where(Chat.archived == False)
.where(Chat.user_id == user_id) .where(Chat.user_id == user_id)
.order_by(Chat.timestamp.desc()) .order_by(Chat.updated_at.desc())
# .limit(limit) # .limit(limit)
# .offset(skip) # .offset(skip)
] ]
...@@ -181,14 +223,15 @@ class ChatTable: ...@@ -181,14 +223,15 @@ class ChatTable:
return [ return [
ChatModel(**model_to_dict(chat)) ChatModel(**model_to_dict(chat))
for chat in Chat.select() for chat in Chat.select()
.where(Chat.archived == False)
.where(Chat.id.in_(chat_ids)) .where(Chat.id.in_(chat_ids))
.order_by(Chat.timestamp.desc()) .order_by(Chat.updated_at.desc())
] ]
def get_all_chats(self) -> List[ChatModel]: def get_all_chats(self) -> List[ChatModel]:
return [ return [
ChatModel(**model_to_dict(chat)) ChatModel(**model_to_dict(chat))
for chat in Chat.select().order_by(Chat.timestamp.desc()) for chat in Chat.select().order_by(Chat.updated_at.desc())
] ]
def get_all_chats_by_user_id(self, user_id: str) -> List[ChatModel]: def get_all_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
...@@ -196,7 +239,7 @@ class ChatTable: ...@@ -196,7 +239,7 @@ class ChatTable:
ChatModel(**model_to_dict(chat)) ChatModel(**model_to_dict(chat))
for chat in Chat.select() for chat in Chat.select()
.where(Chat.user_id == user_id) .where(Chat.user_id == user_id)
.order_by(Chat.timestamp.desc()) .order_by(Chat.updated_at.desc())
] ]
def get_chat_by_id(self, id: str) -> Optional[ChatModel]: def get_chat_by_id(self, id: str) -> Optional[ChatModel]:
......
...@@ -47,6 +47,18 @@ async def get_user_chats( ...@@ -47,6 +47,18 @@ async def get_user_chats(
return Chats.get_chat_lists_by_user_id(user.id, skip, limit) return Chats.get_chat_lists_by_user_id(user.id, skip, limit)
############################
# GetArchivedChats
############################
@router.get("/archived", response_model=List[ChatTitleIdResponse])
async def get_archived_user_chats(
user=Depends(get_current_user), skip: int = 0, limit: int = 50
):
return Chats.get_archived_chat_lists_by_user_id(user.id, skip, limit)
############################ ############################
# GetAllChats # GetAllChats
############################ ############################
...@@ -189,6 +201,23 @@ async def delete_chat_by_id(request: Request, id: str, user=Depends(get_current_ ...@@ -189,6 +201,23 @@ async def delete_chat_by_id(request: Request, id: str, user=Depends(get_current_
return result return result
############################
# ArchiveChat
############################
@router.get("/{id}/archive", response_model=Optional[ChatResponse])
async def archive_chat_by_id(id: str, user=Depends(get_current_user)):
chat = Chats.get_chat_by_id_and_user_id(id, user.id)
if chat:
chat = Chats.toggle_chat_archive_by_id(id)
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.DEFAULT()
)
############################ ############################
# ShareChatById # ShareChatById
############################ ############################
......
...@@ -321,6 +321,13 @@ OPENAI_API_BASE_URLS = [ ...@@ -321,6 +321,13 @@ OPENAI_API_BASE_URLS = [
for url in OPENAI_API_BASE_URLS.split(";") for url in OPENAI_API_BASE_URLS.split(";")
] ]
OPENAI_API_KEY = ""
OPENAI_API_KEY = OPENAI_API_KEYS[
OPENAI_API_BASE_URLS.index("https://api.openai.com/v1")
]
OPENAI_API_BASE_URL = "https://api.openai.com/v1"
#################################### ####################################
# WEBUI # WEBUI
#################################### ####################################
...@@ -447,6 +454,9 @@ And answer according to the language of the user's question. ...@@ -447,6 +454,9 @@ And answer according to the language of the user's question.
Given the context information, answer the query. Given the context information, answer the query.
Query: [query]""" Query: [query]"""
RAG_OPENAI_API_BASE_URL = os.getenv("RAG_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL)
RAG_OPENAI_API_KEY = os.getenv("RAG_OPENAI_API_KEY", OPENAI_API_KEY)
#################################### ####################################
# Transcribe # Transcribe
#################################### ####################################
...@@ -467,3 +477,11 @@ ENABLE_IMAGE_GENERATION = ( ...@@ -467,3 +477,11 @@ ENABLE_IMAGE_GENERATION = (
) )
AUTOMATIC1111_BASE_URL = os.getenv("AUTOMATIC1111_BASE_URL", "") AUTOMATIC1111_BASE_URL = os.getenv("AUTOMATIC1111_BASE_URL", "")
COMFYUI_BASE_URL = os.getenv("COMFYUI_BASE_URL", "") COMFYUI_BASE_URL = os.getenv("COMFYUI_BASE_URL", "")
####################################
# Audio
####################################
AUDIO_OPENAI_API_BASE_URL = os.getenv("AUDIO_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL)
AUDIO_OPENAI_API_KEY = os.getenv("AUDIO_OPENAI_API_KEY", OPENAI_API_KEY)
backend/static/favicon.png

6.02 KB | W: | H:

backend/static/favicon.png

10.7 KB | W: | H:

backend/static/favicon.png
backend/static/favicon.png
backend/static/favicon.png
backend/static/favicon.png
  • 2-up
  • Swipe
  • Onion skin
{ {
"name": "open-webui", "name": "open-webui",
"version": "0.1.119", "version": "0.1.120",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "open-webui", "name": "open-webui",
"version": "0.1.119", "version": "0.1.120",
"dependencies": { "dependencies": {
"@sveltejs/adapter-node": "^1.3.1", "@sveltejs/adapter-node": "^1.3.1",
"async": "^3.2.5", "async": "^3.2.5",
......
{ {
"name": "open-webui", "name": "open-webui",
"version": "0.1.119", "version": "0.1.120",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "vite dev --host", "dev": "vite dev --host",
......
import { AUDIO_API_BASE_URL } from '$lib/constants'; import { AUDIO_API_BASE_URL } from '$lib/constants';
export const getAudioConfig = async (token: string) => {
let error = null;
const res = await fetch(`${AUDIO_API_BASE_URL}/config`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
type OpenAIConfigForm = {
url: string;
key: string;
};
export const updateAudioConfig = async (token: string, payload: OpenAIConfigForm) => {
let error = null;
const res = await fetch(`${AUDIO_API_BASE_URL}/config/update`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
...payload
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const transcribeAudio = async (token: string, file: File) => { export const transcribeAudio = async (token: string, file: File) => {
const data = new FormData(); const data = new FormData();
data.append('file', file); data.append('file', file);
let error = null; let error = null;
const res = await fetch(`${AUDIO_API_BASE_URL}/transcribe`, { const res = await fetch(`${AUDIO_API_BASE_URL}/transcriptions`, {
method: 'POST', method: 'POST',
headers: { headers: {
Accept: 'application/json', Accept: 'application/json',
...@@ -29,3 +91,40 @@ export const transcribeAudio = async (token: string, file: File) => { ...@@ -29,3 +91,40 @@ export const transcribeAudio = async (token: string, file: File) => {
return res; return res;
}; };
export const synthesizeOpenAISpeech = async (
token: string = '',
speaker: string = 'alloy',
text: string = ''
) => {
let error = null;
const res = await fetch(`${AUDIO_API_BASE_URL}/speech`, {
method: 'POST',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: 'tts-1',
input: text,
voice: speaker
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res;
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
...@@ -62,6 +62,37 @@ export const getChatList = async (token: string = '') => { ...@@ -62,6 +62,37 @@ export const getChatList = async (token: string = '') => {
return res; return res;
}; };
export const getArchivedChatList = async (token: string = '') => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/archived`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const getAllChats = async (token: string) => { export const getAllChats = async (token: string) => {
let error = null; let error = null;
...@@ -282,6 +313,38 @@ export const shareChatById = async (token: string, id: string) => { ...@@ -282,6 +313,38 @@ export const shareChatById = async (token: string, id: string) => {
return res; return res;
}; };
export const archiveChatById = async (token: string, id: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/archive`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const deleteSharedChatById = async (token: string, id: string) => { export const deleteSharedChatById = async (token: string, id: string) => {
let error = null; let error = null;
......
...@@ -328,27 +328,28 @@ ...@@ -328,27 +328,28 @@
]; ];
}; };
const inputFiles = e.dataTransfer?.files; const inputFiles = Array.from(e.dataTransfer?.files);
if (inputFiles && inputFiles.length > 0) { if (inputFiles && inputFiles.length > 0) {
const file = inputFiles[0]; inputFiles.forEach((file) => {
console.log(file, file.name.split('.').at(-1)); console.log(file, file.name.split('.').at(-1));
if (['image/gif', 'image/jpeg', 'image/png'].includes(file['type'])) { if (['image/gif', 'image/jpeg', 'image/png'].includes(file['type'])) {
reader.readAsDataURL(file); reader.readAsDataURL(file);
} else if ( } else if (
SUPPORTED_FILE_TYPE.includes(file['type']) || SUPPORTED_FILE_TYPE.includes(file['type']) ||
SUPPORTED_FILE_EXTENSIONS.includes(file.name.split('.').at(-1)) SUPPORTED_FILE_EXTENSIONS.includes(file.name.split('.').at(-1))
) { ) {
uploadDoc(file); uploadDoc(file);
} else { } else {
toast.error( toast.error(
$i18n.t( $i18n.t(
`Unknown File Type '{{file_type}}', but accepting and treating as plain text`, `Unknown File Type '{{file_type}}', but accepting and treating as plain text`,
{ file_type: file['type'] } { file_type: file['type'] }
) )
); );
uploadDoc(file); uploadDoc(file);
} }
});
} else { } else {
toast.error($i18n.t(`File not found.`)); toast.error($i18n.t(`File not found.`));
} }
...@@ -467,6 +468,7 @@ ...@@ -467,6 +468,7 @@
bind:files={inputFiles} bind:files={inputFiles}
type="file" type="file"
hidden hidden
multiple
on:change={async () => { on:change={async () => {
let reader = new FileReader(); let reader = new FileReader();
reader.onload = (event) => { reader.onload = (event) => {
...@@ -482,25 +484,27 @@ ...@@ -482,25 +484,27 @@
}; };
if (inputFiles && inputFiles.length > 0) { if (inputFiles && inputFiles.length > 0) {
const file = inputFiles[0]; const _inputFiles = Array.from(inputFiles);
if (['image/gif', 'image/jpeg', 'image/png'].includes(file['type'])) { _inputFiles.forEach((file) => {
reader.readAsDataURL(file); if (['image/gif', 'image/jpeg', 'image/png'].includes(file['type'])) {
} else if ( reader.readAsDataURL(file);
SUPPORTED_FILE_TYPE.includes(file['type']) || } else if (
SUPPORTED_FILE_EXTENSIONS.includes(file.name.split('.').at(-1)) SUPPORTED_FILE_TYPE.includes(file['type']) ||
) { SUPPORTED_FILE_EXTENSIONS.includes(file.name.split('.').at(-1))
uploadDoc(file); ) {
filesInputElement.value = ''; uploadDoc(file);
} else { filesInputElement.value = '';
toast.error( } else {
$i18n.t( toast.error(
`Unknown File Type '{{file_type}}', but accepting and treating as plain text`, $i18n.t(
{ file_type: file['type'] } `Unknown File Type '{{file_type}}', but accepting and treating as plain text`,
) { file_type: file['type'] }
); )
uploadDoc(file); );
filesInputElement.value = ''; uploadDoc(file);
} filesInputElement.value = '';
}
});
} else { } else {
toast.error($i18n.t(`File not found.`)); toast.error($i18n.t(`File not found.`));
} }
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
import Placeholder from './Messages/Placeholder.svelte'; import Placeholder from './Messages/Placeholder.svelte';
import Spinner from '../common/Spinner.svelte'; import Spinner from '../common/Spinner.svelte';
import { imageGenerations } from '$lib/apis/images'; import { imageGenerations } from '$lib/apis/images';
import { copyToClipboard } from '$lib/utils';
const i18n = getContext('i18n'); const i18n = getContext('i18n');
...@@ -42,40 +43,11 @@ ...@@ -42,40 +43,11 @@
element.scrollTop = element.scrollHeight; element.scrollTop = element.scrollHeight;
}; };
const copyToClipboard = (text) => { const copyToClipboardWithToast = async (text) => {
if (!navigator.clipboard) { const res = await copyToClipboard(text);
var textArea = document.createElement('textarea'); if (res) {
textArea.value = text; toast.success($i18n.t('Copying to clipboard was successful!'));
// Avoid scrolling to bottom
textArea.style.top = '0';
textArea.style.left = '0';
textArea.style.position = 'fixed';
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
try {
var successful = document.execCommand('copy');
var msg = successful ? 'successful' : 'unsuccessful';
console.log('Fallback: Copying text command was ' + msg);
} catch (err) {
console.error('Fallback: Oops, unable to copy', err);
}
document.body.removeChild(textArea);
return;
} }
navigator.clipboard.writeText(text).then(
function () {
console.log('Async: Copying to clipboard was successful!');
toast.success($i18n.t('Copying to clipboard was successful!'));
},
function (err) {
console.error('Async: Could not copy text: ', err);
}
);
}; };
const confirmEditMessage = async (messageId, content) => { const confirmEditMessage = async (messageId, content) => {
...@@ -330,7 +302,7 @@ ...@@ -330,7 +302,7 @@
{confirmEditMessage} {confirmEditMessage}
{showPreviousMessage} {showPreviousMessage}
{showNextMessage} {showNextMessage}
{copyToClipboard} copyToClipboard={copyToClipboardWithToast}
/> />
{:else} {:else}
<ResponseMessage <ResponseMessage
...@@ -344,7 +316,7 @@ ...@@ -344,7 +316,7 @@
{showPreviousMessage} {showPreviousMessage}
{showNextMessage} {showNextMessage}
{rateMessage} {rateMessage}
{copyToClipboard} copyToClipboard={copyToClipboardWithToast}
{continueGeneration} {continueGeneration}
{regenerateResponse} {regenerateResponse}
on:save={async (e) => { on:save={async (e) => {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment