"...zh_cn/git@developer.sourcefind.cn:wangsen/mineru.git" did not exist on "a1c4f22a8c656cf4f75fd33ccdf6d2e1c293978f"
Unverified Commit 5166e92f authored by arkohut's avatar arkohut Committed by GitHub
Browse files

Merge branch 'dev' into support-py-for-run-code

parents b443d61c b6b71c08
{ {
"name": "open-webui", "name": "open-webui",
"version": "0.1.125", "version": "0.2.0.dev2",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "npm run pyodide:fetch && vite dev --host", "dev": "npm run pyodide:fetch && vite dev --host",
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
"lint:types": "npm run check", "lint:types": "npm run check",
"lint:backend": "pylint backend/", "lint:backend": "pylint backend/",
"format": "prettier --plugin-search-dir --write \"**/*.{js,ts,svelte,css,md,html,json}\"", "format": "prettier --plugin-search-dir --write \"**/*.{js,ts,svelte,css,md,html,json}\"",
"format:backend": "black . --exclude \"/venv/\"", "format:backend": "black . --exclude \".venv/|/venv/\"",
"i18n:parse": "i18next --config i18next-parser.config.ts && prettier --write \"src/lib/i18n/**/*.{js,json}\"", "i18n:parse": "i18next --config i18next-parser.config.ts && prettier --write \"src/lib/i18n/**/*.{js,json}\"",
"cy:open": "cypress open", "cy:open": "cypress open",
"test:frontend": "vitest", "test:frontend": "vitest",
......
[project]
name = "open-webui"
description = "Open WebUI (Formerly Ollama WebUI)"
authors = [
{ name = "Timothy Jaeryang Baek", email = "tim@openwebui.com" }
]
license = { file = "LICENSE" }
dependencies = [
"fastapi==0.111.0",
"uvicorn[standard]==0.22.0",
"pydantic==2.7.1",
"python-multipart==0.0.9",
"Flask==3.0.3",
"Flask-Cors==4.0.1",
"python-socketio==5.11.2",
"python-jose==3.3.0",
"passlib[bcrypt]==1.7.4",
"requests==2.32.2",
"aiohttp==3.9.5",
"peewee==3.17.5",
"peewee-migrate==1.12.2",
"psycopg2-binary==2.9.9",
"PyMySQL==1.1.0",
"bcrypt==4.1.3",
"litellm[proxy]==1.37.20",
"boto3==1.34.110",
"argon2-cffi==23.1.0",
"APScheduler==3.10.4",
"google-generativeai==0.5.4",
"langchain==0.2.0",
"langchain-community==0.2.0",
"langchain-chroma==0.1.1",
"fake-useragent==1.5.1",
"chromadb==0.5.0",
"sentence-transformers==2.7.0",
"pypdf==4.2.0",
"docx2txt==0.8",
"unstructured==0.14.0",
"Markdown==3.6",
"pypandoc==1.13",
"pandas==2.2.2",
"openpyxl==3.1.2",
"pyxlsb==1.0.10",
"xlrd==2.0.1",
"validators==0.28.1",
"opencv-python-headless==4.9.0.80",
"rapidocr-onnxruntime==1.3.22",
"fpdf2==2.7.9",
"rank-bm25==0.2.2",
"faster-whisper==1.0.2",
"PyJWT[crypto]==2.8.0",
"black==24.4.2",
"langfuse==2.33.0",
"youtube-transcript-api==0.6.2",
"pytube==15.0.0",
]
readme = "README.md"
requires-python = ">= 3.11, < 3.12.0a1"
dynamic = ["version"]
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.11",
"Topic :: Communications :: Chat",
"Topic :: Multimedia",
]
[project.scripts]
open-webui = "open_webui:app"
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.rye]
managed = true
dev-dependencies = []
[tool.hatch.metadata]
allow-direct-references = true
[tool.hatch.version]
path = "package.json"
pattern = '"version":\s*"(?P<version>[^"]+)"'
[tool.hatch.build.hooks.custom] # keep this for reading hooks from `hatch_build.py`
[tool.hatch.build.targets.wheel]
sources = ["backend"]
exclude = [
".dockerignore",
".gitignore",
".webui_secret_key",
"dev.sh",
"requirements.txt",
"start.sh",
"start_windows.bat",
"webui.db",
"chroma.sqlite3",
]
force-include = { "CHANGELOG.md" = "open_webui/CHANGELOG.md", build = "open_webui/frontend" }
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: false
# with-sources: false
# generate-hashes: false
-e file:.
aiohttp==3.9.5
# via langchain
# via langchain-community
# via litellm
# via open-webui
aiosignal==1.3.1
# via aiohttp
annotated-types==0.6.0
# via pydantic
anyio==4.3.0
# via httpx
# via openai
# via starlette
# via watchfiles
apscheduler==3.10.4
# via litellm
# via open-webui
argon2-cffi==23.1.0
# via open-webui
argon2-cffi-bindings==21.2.0
# via argon2-cffi
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
attrs==23.2.0
# via aiohttp
av==11.0.0
# via faster-whisper
backoff==2.2.1
# via langfuse
# via litellm
# via posthog
# via unstructured
bcrypt==4.1.3
# via chromadb
# via open-webui
# via passlib
beautifulsoup4==4.12.3
# via unstructured
bidict==0.23.1
# via python-socketio
black==24.4.2
# via open-webui
blinker==1.8.2
# via flask
boto3==1.34.110
# via open-webui
botocore==1.34.110
# via boto3
# via s3transfer
build==1.2.1
# via chromadb
cachetools==5.3.3
# via google-auth
certifi==2024.2.2
# via httpcore
# via httpx
# via kubernetes
# via requests
# via unstructured-client
cffi==1.16.0
# via argon2-cffi-bindings
# via cryptography
chardet==5.2.0
# via unstructured
charset-normalizer==3.3.2
# via requests
# via unstructured-client
chroma-hnswlib==0.7.3
# via chromadb
chromadb==0.5.0
# via langchain-chroma
# via open-webui
click==8.1.7
# via black
# via flask
# via litellm
# via nltk
# via peewee-migrate
# via rq
# via typer
# via uvicorn
coloredlogs==15.0.1
# via onnxruntime
cryptography==42.0.7
# via litellm
# via pyjwt
ctranslate2==4.2.1
# via faster-whisper
dataclasses-json==0.6.6
# via langchain
# via langchain-community
# via unstructured
# via unstructured-client
deepdiff==7.0.1
# via unstructured-client
defusedxml==0.7.1
# via fpdf2
deprecated==1.2.14
# via opentelemetry-api
# via opentelemetry-exporter-otlp-proto-grpc
distro==1.9.0
# via openai
dnspython==2.6.1
# via email-validator
docx2txt==0.8
# via open-webui
ecdsa==0.19.0
# via python-jose
email-validator==2.1.1
# via fastapi
# via pydantic
emoji==2.11.1
# via unstructured
et-xmlfile==1.1.0
# via openpyxl
fake-useragent==1.5.1
# via open-webui
fastapi==0.111.0
# via chromadb
# via fastapi-sso
# via langchain-chroma
# via litellm
# via open-webui
fastapi-cli==0.0.4
# via fastapi
fastapi-sso==0.10.0
# via litellm
faster-whisper==1.0.2
# via open-webui
filelock==3.14.0
# via huggingface-hub
# via torch
# via transformers
filetype==1.2.0
# via unstructured
flask==3.0.3
# via flask-cors
# via open-webui
flask-cors==4.0.1
# via open-webui
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.51.0
# via fpdf2
fpdf2==2.7.9
# via open-webui
frozenlist==1.4.1
# via aiohttp
# via aiosignal
fsspec==2024.3.1
# via huggingface-hub
# via torch
google-ai-generativelanguage==0.6.4
# via google-generativeai
google-api-core==2.19.0
# via google-ai-generativelanguage
# via google-api-python-client
# via google-generativeai
google-api-python-client==2.129.0
# via google-generativeai
google-auth==2.29.0
# via google-ai-generativelanguage
# via google-api-core
# via google-api-python-client
# via google-auth-httplib2
# via google-generativeai
# via kubernetes
google-auth-httplib2==0.2.0
# via google-api-python-client
google-generativeai==0.5.4
# via open-webui
googleapis-common-protos==1.63.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio==1.63.0
# via chromadb
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio-status==1.62.2
# via google-api-core
gunicorn==22.0.0
# via litellm
h11==0.14.0
# via httpcore
# via uvicorn
# via wsproto
httpcore==1.0.5
# via httpx
httplib2==0.22.0
# via google-api-python-client
# via google-auth-httplib2
httptools==0.6.1
# via uvicorn
httpx==0.27.0
# via fastapi
# via fastapi-sso
# via langfuse
# via openai
huggingface-hub==0.23.0
# via faster-whisper
# via sentence-transformers
# via tokenizers
# via transformers
humanfriendly==10.0
# via coloredlogs
idna==3.7
# via anyio
# via email-validator
# via httpx
# via langfuse
# via requests
# via unstructured-client
# via yarl
importlib-metadata==7.0.0
# via litellm
# via opentelemetry-api
importlib-resources==6.4.0
# via chromadb
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via fastapi
# via flask
# via litellm
# via torch
jmespath==1.0.1
# via boto3
# via botocore
joblib==1.4.2
# via nltk
# via scikit-learn
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==2.4
# via jsonpatch
kubernetes==29.0.0
# via chromadb
langchain==0.2.0
# via langchain-community
# via open-webui
langchain-chroma==0.1.1
# via open-webui
langchain-community==0.2.0
# via open-webui
langchain-core==0.2.1
# via langchain
# via langchain-chroma
# via langchain-community
# via langchain-text-splitters
langchain-text-splitters==0.2.0
# via langchain
langdetect==1.0.9
# via unstructured
langfuse==2.33.0
# via open-webui
langsmith==0.1.57
# via langchain
# via langchain-community
# via langchain-core
litellm==1.37.20
# via open-webui
lxml==5.2.2
# via unstructured
markdown==3.6
# via open-webui
markdown-it-py==3.0.0
# via rich
markupsafe==2.1.5
# via jinja2
# via werkzeug
marshmallow==3.21.2
# via dataclasses-json
# via unstructured-client
mdurl==0.1.2
# via markdown-it-py
mmh3==4.1.0
# via chromadb
monotonic==1.6
# via posthog
mpmath==1.3.0
# via sympy
multidict==6.0.5
# via aiohttp
# via yarl
mypy-extensions==1.0.0
# via black
# via typing-inspect
# via unstructured-client
networkx==3.3
# via torch
nltk==3.8.1
# via unstructured
numpy==1.26.4
# via chroma-hnswlib
# via chromadb
# via ctranslate2
# via langchain
# via langchain-chroma
# via langchain-community
# via onnxruntime
# via opencv-python
# via opencv-python-headless
# via pandas
# via rank-bm25
# via rapidocr-onnxruntime
# via scikit-learn
# via scipy
# via sentence-transformers
# via shapely
# via transformers
# via unstructured
oauthlib==3.2.2
# via fastapi-sso
# via kubernetes
# via requests-oauthlib
onnxruntime==1.17.3
# via chromadb
# via faster-whisper
# via rapidocr-onnxruntime
openai==1.28.1
# via litellm
opencv-python==4.9.0.80
# via rapidocr-onnxruntime
opencv-python-headless==4.9.0.80
# via open-webui
openpyxl==3.1.2
# via open-webui
opentelemetry-api==1.24.0
# via chromadb
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-instrumentation
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
# via opentelemetry-sdk
opentelemetry-exporter-otlp-proto-common==1.24.0
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-exporter-otlp-proto-grpc==1.24.0
# via chromadb
opentelemetry-instrumentation==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-asgi==0.45b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.45b0
# via chromadb
opentelemetry-proto==1.24.0
# via opentelemetry-exporter-otlp-proto-common
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-sdk==1.24.0
# via chromadb
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-semantic-conventions==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
# via opentelemetry-sdk
opentelemetry-util-http==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
ordered-set==4.1.0
# via deepdiff
orjson==3.10.3
# via chromadb
# via fastapi
# via langsmith
# via litellm
overrides==7.7.0
# via chromadb
packaging==23.2
# via black
# via build
# via gunicorn
# via huggingface-hub
# via langchain-core
# via langfuse
# via marshmallow
# via onnxruntime
# via transformers
# via unstructured-client
pandas==2.2.2
# via open-webui
passlib==1.7.4
# via open-webui
pathspec==0.12.1
# via black
peewee==3.17.5
# via open-webui
# via peewee-migrate
peewee-migrate==1.12.2
# via open-webui
pillow==10.3.0
# via fpdf2
# via rapidocr-onnxruntime
# via sentence-transformers
platformdirs==4.2.1
# via black
posthog==3.5.0
# via chromadb
proto-plus==1.23.0
# via google-ai-generativelanguage
# via google-api-core
protobuf==4.25.3
# via google-ai-generativelanguage
# via google-api-core
# via google-generativeai
# via googleapis-common-protos
# via grpcio-status
# via onnxruntime
# via opentelemetry-proto
# via proto-plus
psycopg2-binary==2.9.9
# via open-webui
pyasn1==0.6.0
# via pyasn1-modules
# via python-jose
# via rsa
pyasn1-modules==0.4.0
# via google-auth
pyclipper==1.3.0.post5
# via rapidocr-onnxruntime
pycparser==2.22
# via cffi
pydantic==2.7.1
# via chromadb
# via fastapi
# via fastapi-sso
# via google-generativeai
# via langchain
# via langchain-core
# via langfuse
# via langsmith
# via open-webui
# via openai
pydantic-core==2.18.2
# via pydantic
pygments==2.18.0
# via rich
pyjwt==2.8.0
# via litellm
# via open-webui
pymysql==1.1.0
# via open-webui
pypandoc==1.13
# via open-webui
pyparsing==3.1.2
# via httplib2
pypdf==4.2.0
# via open-webui
# via unstructured-client
pypika==0.48.9
# via chromadb
pyproject-hooks==1.1.0
# via build
python-dateutil==2.9.0.post0
# via botocore
# via kubernetes
# via pandas
# via posthog
# via unstructured-client
python-dotenv==1.0.1
# via litellm
# via uvicorn
python-engineio==4.9.0
# via python-socketio
python-iso639==2024.4.27
# via unstructured
python-jose==3.3.0
# via open-webui
python-magic==0.4.27
# via unstructured
python-multipart==0.0.9
# via fastapi
# via litellm
# via open-webui
python-socketio==5.11.2
# via open-webui
pytube==15.0.0
# via open-webui
pytz==2024.1
# via apscheduler
# via pandas
pyxlsb==1.0.10
# via open-webui
pyyaml==6.0.1
# via chromadb
# via ctranslate2
# via huggingface-hub
# via kubernetes
# via langchain
# via langchain-community
# via langchain-core
# via litellm
# via rapidocr-onnxruntime
# via transformers
# via uvicorn
rank-bm25==0.2.2
# via open-webui
rapidfuzz==3.9.0
# via unstructured
rapidocr-onnxruntime==1.3.22
# via open-webui
redis==5.0.4
# via rq
regex==2024.5.10
# via nltk
# via tiktoken
# via transformers
requests==2.32.2
# via chromadb
# via google-api-core
# via huggingface-hub
# via kubernetes
# via langchain
# via langchain-community
# via langsmith
# via litellm
# via open-webui
# via posthog
# via requests-oauthlib
# via tiktoken
# via transformers
# via unstructured
# via unstructured-client
# via youtube-transcript-api
requests-oauthlib==2.0.0
# via kubernetes
rich==13.7.1
# via typer
rq==1.16.2
# via litellm
rsa==4.9
# via google-auth
# via python-jose
s3transfer==0.10.1
# via boto3
safetensors==0.4.3
# via transformers
scikit-learn==1.4.2
# via sentence-transformers
scipy==1.13.0
# via scikit-learn
# via sentence-transformers
sentence-transformers==2.7.0
# via open-webui
setuptools==69.5.1
# via ctranslate2
# via opentelemetry-instrumentation
shapely==2.0.4
# via rapidocr-onnxruntime
shellingham==1.5.4
# via typer
simple-websocket==1.0.0
# via python-engineio
six==1.16.0
# via apscheduler
# via ecdsa
# via kubernetes
# via langdetect
# via posthog
# via python-dateutil
# via rapidocr-onnxruntime
# via unstructured-client
sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.5
# via beautifulsoup4
sqlalchemy==2.0.30
# via langchain
# via langchain-community
starlette==0.37.2
# via fastapi
sympy==1.12
# via onnxruntime
# via torch
tabulate==0.9.0
# via unstructured
tenacity==8.3.0
# via chromadb
# via langchain
# via langchain-community
# via langchain-core
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.6.0
# via litellm
tokenizers==0.15.2
# via chromadb
# via faster-whisper
# via litellm
# via transformers
torch==2.3.0
# via sentence-transformers
tqdm==4.66.4
# via chromadb
# via google-generativeai
# via huggingface-hub
# via nltk
# via openai
# via sentence-transformers
# via transformers
transformers==4.39.3
# via sentence-transformers
typer==0.12.3
# via chromadb
# via fastapi-cli
typing-extensions==4.11.0
# via chromadb
# via fastapi
# via google-generativeai
# via huggingface-hub
# via openai
# via opentelemetry-sdk
# via pydantic
# via pydantic-core
# via sqlalchemy
# via torch
# via typer
# via typing-inspect
# via unstructured
# via unstructured-client
typing-inspect==0.9.0
# via dataclasses-json
# via unstructured-client
tzdata==2024.1
# via pandas
tzlocal==5.2
# via apscheduler
ujson==5.10.0
# via fastapi
unstructured==0.14.0
# via open-webui
unstructured-client==0.22.0
# via unstructured
uritemplate==4.1.1
# via google-api-python-client
urllib3==2.2.1
# via botocore
# via kubernetes
# via requests
# via unstructured-client
uvicorn==0.22.0
# via chromadb
# via fastapi
# via litellm
# via open-webui
uvloop==0.19.0
# via uvicorn
validators==0.28.1
# via open-webui
watchfiles==0.21.0
# via uvicorn
websocket-client==1.8.0
# via kubernetes
websockets==12.0
# via uvicorn
werkzeug==3.0.3
# via flask
wrapt==1.16.0
# via deprecated
# via langfuse
# via opentelemetry-instrumentation
# via unstructured
wsproto==1.2.0
# via simple-websocket
xlrd==2.0.1
# via open-webui
yarl==1.9.4
# via aiohttp
youtube-transcript-api==0.6.2
# via open-webui
zipp==3.18.1
# via importlib-metadata
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: false
# with-sources: false
# generate-hashes: false
-e file:.
aiohttp==3.9.5
# via langchain
# via langchain-community
# via litellm
# via open-webui
aiosignal==1.3.1
# via aiohttp
annotated-types==0.6.0
# via pydantic
anyio==4.3.0
# via httpx
# via openai
# via starlette
# via watchfiles
apscheduler==3.10.4
# via litellm
# via open-webui
argon2-cffi==23.1.0
# via open-webui
argon2-cffi-bindings==21.2.0
# via argon2-cffi
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
attrs==23.2.0
# via aiohttp
av==11.0.0
# via faster-whisper
backoff==2.2.1
# via langfuse
# via litellm
# via posthog
# via unstructured
bcrypt==4.1.3
# via chromadb
# via open-webui
# via passlib
beautifulsoup4==4.12.3
# via unstructured
bidict==0.23.1
# via python-socketio
black==24.4.2
# via open-webui
blinker==1.8.2
# via flask
boto3==1.34.110
# via open-webui
botocore==1.34.110
# via boto3
# via s3transfer
build==1.2.1
# via chromadb
cachetools==5.3.3
# via google-auth
certifi==2024.2.2
# via httpcore
# via httpx
# via kubernetes
# via requests
# via unstructured-client
cffi==1.16.0
# via argon2-cffi-bindings
# via cryptography
chardet==5.2.0
# via unstructured
charset-normalizer==3.3.2
# via requests
# via unstructured-client
chroma-hnswlib==0.7.3
# via chromadb
chromadb==0.5.0
# via langchain-chroma
# via open-webui
click==8.1.7
# via black
# via flask
# via litellm
# via nltk
# via peewee-migrate
# via rq
# via typer
# via uvicorn
coloredlogs==15.0.1
# via onnxruntime
cryptography==42.0.7
# via litellm
# via pyjwt
ctranslate2==4.2.1
# via faster-whisper
dataclasses-json==0.6.6
# via langchain
# via langchain-community
# via unstructured
# via unstructured-client
deepdiff==7.0.1
# via unstructured-client
defusedxml==0.7.1
# via fpdf2
deprecated==1.2.14
# via opentelemetry-api
# via opentelemetry-exporter-otlp-proto-grpc
distro==1.9.0
# via openai
dnspython==2.6.1
# via email-validator
docx2txt==0.8
# via open-webui
ecdsa==0.19.0
# via python-jose
email-validator==2.1.1
# via fastapi
# via pydantic
emoji==2.11.1
# via unstructured
et-xmlfile==1.1.0
# via openpyxl
fake-useragent==1.5.1
# via open-webui
fastapi==0.111.0
# via chromadb
# via fastapi-sso
# via langchain-chroma
# via litellm
# via open-webui
fastapi-cli==0.0.4
# via fastapi
fastapi-sso==0.10.0
# via litellm
faster-whisper==1.0.2
# via open-webui
filelock==3.14.0
# via huggingface-hub
# via torch
# via transformers
filetype==1.2.0
# via unstructured
flask==3.0.3
# via flask-cors
# via open-webui
flask-cors==4.0.1
# via open-webui
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.51.0
# via fpdf2
fpdf2==2.7.9
# via open-webui
frozenlist==1.4.1
# via aiohttp
# via aiosignal
fsspec==2024.3.1
# via huggingface-hub
# via torch
google-ai-generativelanguage==0.6.4
# via google-generativeai
google-api-core==2.19.0
# via google-ai-generativelanguage
# via google-api-python-client
# via google-generativeai
google-api-python-client==2.129.0
# via google-generativeai
google-auth==2.29.0
# via google-ai-generativelanguage
# via google-api-core
# via google-api-python-client
# via google-auth-httplib2
# via google-generativeai
# via kubernetes
google-auth-httplib2==0.2.0
# via google-api-python-client
google-generativeai==0.5.4
# via open-webui
googleapis-common-protos==1.63.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio==1.63.0
# via chromadb
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio-status==1.62.2
# via google-api-core
gunicorn==22.0.0
# via litellm
h11==0.14.0
# via httpcore
# via uvicorn
# via wsproto
httpcore==1.0.5
# via httpx
httplib2==0.22.0
# via google-api-python-client
# via google-auth-httplib2
httptools==0.6.1
# via uvicorn
httpx==0.27.0
# via fastapi
# via fastapi-sso
# via langfuse
# via openai
huggingface-hub==0.23.0
# via faster-whisper
# via sentence-transformers
# via tokenizers
# via transformers
humanfriendly==10.0
# via coloredlogs
idna==3.7
# via anyio
# via email-validator
# via httpx
# via langfuse
# via requests
# via unstructured-client
# via yarl
importlib-metadata==7.0.0
# via litellm
# via opentelemetry-api
importlib-resources==6.4.0
# via chromadb
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via fastapi
# via flask
# via litellm
# via torch
jmespath==1.0.1
# via boto3
# via botocore
joblib==1.4.2
# via nltk
# via scikit-learn
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==2.4
# via jsonpatch
kubernetes==29.0.0
# via chromadb
langchain==0.2.0
# via langchain-community
# via open-webui
langchain-chroma==0.1.1
# via open-webui
langchain-community==0.2.0
# via open-webui
langchain-core==0.2.1
# via langchain
# via langchain-chroma
# via langchain-community
# via langchain-text-splitters
langchain-text-splitters==0.2.0
# via langchain
langdetect==1.0.9
# via unstructured
langfuse==2.33.0
# via open-webui
langsmith==0.1.57
# via langchain
# via langchain-community
# via langchain-core
litellm==1.37.20
# via open-webui
lxml==5.2.2
# via unstructured
markdown==3.6
# via open-webui
markdown-it-py==3.0.0
# via rich
markupsafe==2.1.5
# via jinja2
# via werkzeug
marshmallow==3.21.2
# via dataclasses-json
# via unstructured-client
mdurl==0.1.2
# via markdown-it-py
mmh3==4.1.0
# via chromadb
monotonic==1.6
# via posthog
mpmath==1.3.0
# via sympy
multidict==6.0.5
# via aiohttp
# via yarl
mypy-extensions==1.0.0
# via black
# via typing-inspect
# via unstructured-client
networkx==3.3
# via torch
nltk==3.8.1
# via unstructured
numpy==1.26.4
# via chroma-hnswlib
# via chromadb
# via ctranslate2
# via langchain
# via langchain-chroma
# via langchain-community
# via onnxruntime
# via opencv-python
# via opencv-python-headless
# via pandas
# via rank-bm25
# via rapidocr-onnxruntime
# via scikit-learn
# via scipy
# via sentence-transformers
# via shapely
# via transformers
# via unstructured
oauthlib==3.2.2
# via fastapi-sso
# via kubernetes
# via requests-oauthlib
onnxruntime==1.17.3
# via chromadb
# via faster-whisper
# via rapidocr-onnxruntime
openai==1.28.1
# via litellm
opencv-python==4.9.0.80
# via rapidocr-onnxruntime
opencv-python-headless==4.9.0.80
# via open-webui
openpyxl==3.1.2
# via open-webui
opentelemetry-api==1.24.0
# via chromadb
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-instrumentation
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
# via opentelemetry-sdk
opentelemetry-exporter-otlp-proto-common==1.24.0
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-exporter-otlp-proto-grpc==1.24.0
# via chromadb
opentelemetry-instrumentation==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-asgi==0.45b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.45b0
# via chromadb
opentelemetry-proto==1.24.0
# via opentelemetry-exporter-otlp-proto-common
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-sdk==1.24.0
# via chromadb
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-semantic-conventions==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
# via opentelemetry-sdk
opentelemetry-util-http==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
ordered-set==4.1.0
# via deepdiff
orjson==3.10.3
# via chromadb
# via fastapi
# via langsmith
# via litellm
overrides==7.7.0
# via chromadb
packaging==23.2
# via black
# via build
# via gunicorn
# via huggingface-hub
# via langchain-core
# via langfuse
# via marshmallow
# via onnxruntime
# via transformers
# via unstructured-client
pandas==2.2.2
# via open-webui
passlib==1.7.4
# via open-webui
pathspec==0.12.1
# via black
peewee==3.17.5
# via open-webui
# via peewee-migrate
peewee-migrate==1.12.2
# via open-webui
pillow==10.3.0
# via fpdf2
# via rapidocr-onnxruntime
# via sentence-transformers
platformdirs==4.2.1
# via black
posthog==3.5.0
# via chromadb
proto-plus==1.23.0
# via google-ai-generativelanguage
# via google-api-core
protobuf==4.25.3
# via google-ai-generativelanguage
# via google-api-core
# via google-generativeai
# via googleapis-common-protos
# via grpcio-status
# via onnxruntime
# via opentelemetry-proto
# via proto-plus
psycopg2-binary==2.9.9
# via open-webui
pyasn1==0.6.0
# via pyasn1-modules
# via python-jose
# via rsa
pyasn1-modules==0.4.0
# via google-auth
pyclipper==1.3.0.post5
# via rapidocr-onnxruntime
pycparser==2.22
# via cffi
pydantic==2.7.1
# via chromadb
# via fastapi
# via fastapi-sso
# via google-generativeai
# via langchain
# via langchain-core
# via langfuse
# via langsmith
# via open-webui
# via openai
pydantic-core==2.18.2
# via pydantic
pygments==2.18.0
# via rich
pyjwt==2.8.0
# via litellm
# via open-webui
pymysql==1.1.0
# via open-webui
pypandoc==1.13
# via open-webui
pyparsing==3.1.2
# via httplib2
pypdf==4.2.0
# via open-webui
# via unstructured-client
pypika==0.48.9
# via chromadb
pyproject-hooks==1.1.0
# via build
python-dateutil==2.9.0.post0
# via botocore
# via kubernetes
# via pandas
# via posthog
# via unstructured-client
python-dotenv==1.0.1
# via litellm
# via uvicorn
python-engineio==4.9.0
# via python-socketio
python-iso639==2024.4.27
# via unstructured
python-jose==3.3.0
# via open-webui
python-magic==0.4.27
# via unstructured
python-multipart==0.0.9
# via fastapi
# via litellm
# via open-webui
python-socketio==5.11.2
# via open-webui
pytube==15.0.0
# via open-webui
pytz==2024.1
# via apscheduler
# via pandas
pyxlsb==1.0.10
# via open-webui
pyyaml==6.0.1
# via chromadb
# via ctranslate2
# via huggingface-hub
# via kubernetes
# via langchain
# via langchain-community
# via langchain-core
# via litellm
# via rapidocr-onnxruntime
# via transformers
# via uvicorn
rank-bm25==0.2.2
# via open-webui
rapidfuzz==3.9.0
# via unstructured
rapidocr-onnxruntime==1.3.22
# via open-webui
redis==5.0.4
# via rq
regex==2024.5.10
# via nltk
# via tiktoken
# via transformers
requests==2.32.2
# via chromadb
# via google-api-core
# via huggingface-hub
# via kubernetes
# via langchain
# via langchain-community
# via langsmith
# via litellm
# via open-webui
# via posthog
# via requests-oauthlib
# via tiktoken
# via transformers
# via unstructured
# via unstructured-client
# via youtube-transcript-api
requests-oauthlib==2.0.0
# via kubernetes
rich==13.7.1
# via typer
rq==1.16.2
# via litellm
rsa==4.9
# via google-auth
# via python-jose
s3transfer==0.10.1
# via boto3
safetensors==0.4.3
# via transformers
scikit-learn==1.4.2
# via sentence-transformers
scipy==1.13.0
# via scikit-learn
# via sentence-transformers
sentence-transformers==2.7.0
# via open-webui
setuptools==69.5.1
# via ctranslate2
# via opentelemetry-instrumentation
shapely==2.0.4
# via rapidocr-onnxruntime
shellingham==1.5.4
# via typer
simple-websocket==1.0.0
# via python-engineio
six==1.16.0
# via apscheduler
# via ecdsa
# via kubernetes
# via langdetect
# via posthog
# via python-dateutil
# via rapidocr-onnxruntime
# via unstructured-client
sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.5
# via beautifulsoup4
sqlalchemy==2.0.30
# via langchain
# via langchain-community
starlette==0.37.2
# via fastapi
sympy==1.12
# via onnxruntime
# via torch
tabulate==0.9.0
# via unstructured
tenacity==8.3.0
# via chromadb
# via langchain
# via langchain-community
# via langchain-core
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.6.0
# via litellm
tokenizers==0.15.2
# via chromadb
# via faster-whisper
# via litellm
# via transformers
torch==2.3.0
# via sentence-transformers
tqdm==4.66.4
# via chromadb
# via google-generativeai
# via huggingface-hub
# via nltk
# via openai
# via sentence-transformers
# via transformers
transformers==4.39.3
# via sentence-transformers
typer==0.12.3
# via chromadb
# via fastapi-cli
typing-extensions==4.11.0
# via chromadb
# via fastapi
# via google-generativeai
# via huggingface-hub
# via openai
# via opentelemetry-sdk
# via pydantic
# via pydantic-core
# via sqlalchemy
# via torch
# via typer
# via typing-inspect
# via unstructured
# via unstructured-client
typing-inspect==0.9.0
# via dataclasses-json
# via unstructured-client
tzdata==2024.1
# via pandas
tzlocal==5.2
# via apscheduler
ujson==5.10.0
# via fastapi
unstructured==0.14.0
# via open-webui
unstructured-client==0.22.0
# via unstructured
uritemplate==4.1.1
# via google-api-python-client
urllib3==2.2.1
# via botocore
# via kubernetes
# via requests
# via unstructured-client
uvicorn==0.22.0
# via chromadb
# via fastapi
# via litellm
# via open-webui
uvloop==0.19.0
# via uvicorn
validators==0.28.1
# via open-webui
watchfiles==0.21.0
# via uvicorn
websocket-client==1.8.0
# via kubernetes
websockets==12.0
# via uvicorn
werkzeug==3.0.3
# via flask
wrapt==1.16.0
# via deprecated
# via langfuse
# via opentelemetry-instrumentation
# via unstructured
wsproto==1.2.0
# via simple-websocket
xlrd==2.0.1
# via open-webui
yarl==1.9.4
# via aiohttp
youtube-transcript-api==0.6.2
# via open-webui
zipp==3.18.1
# via importlib-metadata
...@@ -654,3 +654,35 @@ export const deleteAllChats = async (token: string) => { ...@@ -654,3 +654,35 @@ export const deleteAllChats = async (token: string) => {
return res; return res;
}; };
export const archiveAllChats = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/archive/all`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
import { WEBUI_API_BASE_URL } from '$lib/constants'; import { WEBUI_API_BASE_URL } from '$lib/constants';
import type { Banner } from '$lib/types';
export const setDefaultModels = async (token: string, models: string) => { export const setDefaultModels = async (token: string, models: string) => {
let error = null; let error = null;
...@@ -59,3 +60,60 @@ export const setDefaultPromptSuggestions = async (token: string, promptSuggestio ...@@ -59,3 +60,60 @@ export const setDefaultPromptSuggestions = async (token: string, promptSuggestio
return res; return res;
}; };
export const getBanners = async (token: string): Promise<Banner[]> => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/configs/banners`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const setBanners = async (token: string, banners: Banner[]) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/configs/banners`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
banners: banners
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
import { WEBUI_BASE_URL } from '$lib/constants'; import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
export const getModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/models`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
let models = res?.data ?? [];
models = models
.filter((models) => models)
.sort((a, b) => {
// Compare case-insensitively
const lowerA = a.name.toLowerCase();
const lowerB = b.name.toLowerCase();
if (lowerA < lowerB) return -1;
if (lowerA > lowerB) return 1;
// If same case-insensitively, sort by original strings,
// lowercase will come before uppercase due to ASCII values
if (a < b) return -1;
if (a > b) return 1;
return 0; // They are equal
});
console.log(models);
return models;
};
export const getBackendConfig = async () => { export const getBackendConfig = async () => {
let error = null; let error = null;
...@@ -196,3 +245,131 @@ export const updateWebhookUrl = async (token: string, url: string) => { ...@@ -196,3 +245,131 @@ export const updateWebhookUrl = async (token: string, url: string) => {
return res.url; return res.url;
}; };
export const getCommunitySharingEnabledStatus = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/community_sharing`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};
export const toggleCommunitySharingEnabledStatus = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/community_sharing/toggle`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const getModelConfig = async (token: string): Promise<GlobalModelConfig> => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/config/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res.models;
};
export interface ModelConfig {
id: string;
name: string;
meta: ModelMeta;
base_model_id?: string;
params: ModelParams;
}
export interface ModelMeta {
description?: string;
capabilities?: object;
}
export interface ModelParams {}
export type GlobalModelConfig = ModelConfig[];
export const updateModelConfig = async (token: string, config: GlobalModelConfig) => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/config/models`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
models: config
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};
import { LITELLM_API_BASE_URL } from '$lib/constants';
export const getLiteLLMModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/v1/models`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
return models
? models
.map((model) => ({
id: model.id,
name: model.name ?? model.id,
external: true,
source: 'LiteLLM'
}))
.sort((a, b) => {
return a.name.localeCompare(b.name);
})
: models;
};
export const getLiteLLMModelInfo = async (token: string = '') => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/model/info`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
return models;
};
type AddLiteLLMModelForm = {
name: string;
model: string;
api_base: string;
api_key: string;
rpm: string;
max_tokens: string;
};
export const addLiteLLMModel = async (token: string = '', payload: AddLiteLLMModelForm) => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/model/new`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
model_name: payload.name,
litellm_params: {
model: payload.model,
...(payload.api_base === '' ? {} : { api_base: payload.api_base }),
...(payload.api_key === '' ? {} : { api_key: payload.api_key }),
...(isNaN(parseInt(payload.rpm)) ? {} : { rpm: parseInt(payload.rpm) }),
...(payload.max_tokens === '' ? {} : { max_tokens: payload.max_tokens })
}
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
return res;
};
export const deleteLiteLLMModel = async (token: string = '', id: string) => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/model/delete`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
id: id
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
return res;
};
import { WEBUI_API_BASE_URL } from '$lib/constants'; import { WEBUI_API_BASE_URL } from '$lib/constants';
export const createNewModelfile = async (token: string, modelfile: object) => { export const addNewModel = async (token: string, model: object) => {
let error = null; let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/create`, { const res = await fetch(`${WEBUI_API_BASE_URL}/models/add`, {
method: 'POST', method: 'POST',
headers: { headers: {
Accept: 'application/json', Accept: 'application/json',
'Content-Type': 'application/json', 'Content-Type': 'application/json',
authorization: `Bearer ${token}` authorization: `Bearer ${token}`
}, },
body: JSON.stringify({ body: JSON.stringify(model)
modelfile: modelfile
})
}) })
.then(async (res) => { .then(async (res) => {
if (!res.ok) throw await res.json(); if (!res.ok) throw await res.json();
...@@ -31,10 +29,10 @@ export const createNewModelfile = async (token: string, modelfile: object) => { ...@@ -31,10 +29,10 @@ export const createNewModelfile = async (token: string, modelfile: object) => {
return res; return res;
}; };
export const getModelfiles = async (token: string = '') => { export const getModelInfos = async (token: string = '') => {
let error = null; let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/`, { const res = await fetch(`${WEBUI_API_BASE_URL}/models`, {
method: 'GET', method: 'GET',
headers: { headers: {
Accept: 'application/json', Accept: 'application/json',
...@@ -59,22 +57,22 @@ export const getModelfiles = async (token: string = '') => { ...@@ -59,22 +57,22 @@ export const getModelfiles = async (token: string = '') => {
throw error; throw error;
} }
return res.map((modelfile) => modelfile.modelfile); return res;
}; };
export const getModelfileByTagName = async (token: string, tagName: string) => { export const getModelById = async (token: string, id: string) => {
let error = null; let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/`, { const searchParams = new URLSearchParams();
method: 'POST', searchParams.append('id', id);
const res = await fetch(`${WEBUI_API_BASE_URL}/models?${searchParams.toString()}`, {
method: 'GET',
headers: { headers: {
Accept: 'application/json', Accept: 'application/json',
'Content-Type': 'application/json', 'Content-Type': 'application/json',
authorization: `Bearer ${token}` authorization: `Bearer ${token}`
}, }
body: JSON.stringify({
tag_name: tagName
})
}) })
.then(async (res) => { .then(async (res) => {
if (!res.ok) throw await res.json(); if (!res.ok) throw await res.json();
...@@ -94,27 +92,23 @@ export const getModelfileByTagName = async (token: string, tagName: string) => { ...@@ -94,27 +92,23 @@ export const getModelfileByTagName = async (token: string, tagName: string) => {
throw error; throw error;
} }
return res.modelfile; return res;
}; };
export const updateModelfileByTagName = async ( export const updateModelById = async (token: string, id: string, model: object) => {
token: string,
tagName: string,
modelfile: object
) => {
let error = null; let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/update`, { const searchParams = new URLSearchParams();
searchParams.append('id', id);
const res = await fetch(`${WEBUI_API_BASE_URL}/models/update?${searchParams.toString()}`, {
method: 'POST', method: 'POST',
headers: { headers: {
Accept: 'application/json', Accept: 'application/json',
'Content-Type': 'application/json', 'Content-Type': 'application/json',
authorization: `Bearer ${token}` authorization: `Bearer ${token}`
}, },
body: JSON.stringify({ body: JSON.stringify(model)
tag_name: tagName,
modelfile: modelfile
})
}) })
.then(async (res) => { .then(async (res) => {
if (!res.ok) throw await res.json(); if (!res.ok) throw await res.json();
...@@ -137,19 +131,19 @@ export const updateModelfileByTagName = async ( ...@@ -137,19 +131,19 @@ export const updateModelfileByTagName = async (
return res; return res;
}; };
export const deleteModelfileByTagName = async (token: string, tagName: string) => { export const deleteModelById = async (token: string, id: string) => {
let error = null; let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/delete`, { const searchParams = new URLSearchParams();
searchParams.append('id', id);
const res = await fetch(`${WEBUI_API_BASE_URL}/models/delete?${searchParams.toString()}`, {
method: 'DELETE', method: 'DELETE',
headers: { headers: {
Accept: 'application/json', Accept: 'application/json',
'Content-Type': 'application/json', 'Content-Type': 'application/json',
authorization: `Bearer ${token}` authorization: `Bearer ${token}`
}, }
body: JSON.stringify({
tag_name: tagName
})
}) })
.then(async (res) => { .then(async (res) => {
if (!res.ok) throw await res.json(); if (!res.ok) throw await res.json();
......
import { OLLAMA_API_BASE_URL } from '$lib/constants'; import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { promptTemplate } from '$lib/utils'; import { promptTemplate } from '$lib/utils';
export const getOllamaConfig = async (token: string = '') => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/config`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res;
};
export const updateOllamaConfig = async (token: string = '', enable_ollama_api: boolean) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/config/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
enable_ollama_api: enable_ollama_api
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res;
};
export const getOllamaUrls = async (token: string = '') => { export const getOllamaUrls = async (token: string = '') => {
let error = null; let error = null;
...@@ -97,7 +164,7 @@ export const getOllamaVersion = async (token: string = '') => { ...@@ -97,7 +164,7 @@ export const getOllamaVersion = async (token: string = '') => {
throw error; throw error;
} }
return res?.version ?? ''; return res?.version ?? false;
}; };
export const getOllamaModels = async (token: string = '') => { export const getOllamaModels = async (token: string = '') => {
......
...@@ -230,7 +230,12 @@ export const getOpenAIModels = async (token: string = '') => { ...@@ -230,7 +230,12 @@ export const getOpenAIModels = async (token: string = '') => {
return models return models
? models ? models
.map((model) => ({ id: model.id, name: model.name ?? model.id, external: true })) .map((model) => ({
id: model.id,
name: model.name ?? model.id,
external: true,
custom_info: model.custom_info
}))
.sort((a, b) => { .sort((a, b) => {
return a.name.localeCompare(b.name); return a.name.localeCompare(b.name);
}) })
......
...@@ -115,6 +115,62 @@ export const getUsers = async (token: string) => { ...@@ -115,6 +115,62 @@ export const getUsers = async (token: string) => {
return res ? res : []; return res ? res : [];
}; };
export const getUserSettings = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/users/user/settings`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const updateUserSettings = async (token: string, settings: object) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/users/user/settings/update`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
...settings
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const getUserById = async (token: string, userId: string) => { export const getUserById = async (token: string, userId: string) => {
let error = null; let error = null;
......
<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
import { getContext, onMount } from 'svelte';
import { banners as _banners } from '$lib/stores';
import type { Banner } from '$lib/types';
import { getBanners, setBanners } from '$lib/apis/configs';
import type { Writable } from 'svelte/store';
import type { i18n as i18nType } from 'i18next';
import Tooltip from '$lib/components/common/Tooltip.svelte';
import Switch from '$lib/components/common/Switch.svelte';
const i18n: Writable<i18nType> = getContext('i18n');
export let saveHandler: Function;
let banners: Banner[] = [];
onMount(async () => {
banners = await getBanners(localStorage.token);
});
const updateBanners = async () => {
_banners.set(await setBanners(localStorage.token, banners));
};
</script>
<form
class="flex flex-col h-full justify-between space-y-3 text-sm"
on:submit|preventDefault={async () => {
updateBanners();
saveHandler();
}}
>
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-80 h-full">
<div class=" space-y-3 pr-1.5">
<div class="flex w-full justify-between mb-2">
<div class=" self-center text-sm font-semibold">
{$i18n.t('Banners')}
</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
if (banners.length === 0 || banners.at(-1).content !== '') {
banners = [
...banners,
{
id: uuidv4(),
type: '',
title: '',
content: '',
dismissible: true,
timestamp: Math.floor(Date.now() / 1000)
}
];
}
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M10.75 4.75a.75.75 0 00-1.5 0v4.5h-4.5a.75.75 0 000 1.5h4.5v4.5a.75.75 0 001.5 0v-4.5h4.5a.75.75 0 000-1.5h-4.5v-4.5z"
/>
</svg>
</button>
</div>
<div class="flex flex-col space-y-1">
{#each banners as banner, bannerIdx}
<div class=" flex justify-between">
<div class="flex flex-row flex-1 border rounded-xl dark:border-gray-800">
<select
class="w-fit capitalize rounded-xl py-2 px-4 text-xs bg-transparent outline-none"
bind:value={banner.type}
>
{#if banner.type == ''}
<option value="" selected disabled class="text-gray-900">{$i18n.t('Type')}</option
>
{/if}
<option value="info" class="text-gray-900">{$i18n.t('Info')}</option>
<option value="warning" class="text-gray-900">{$i18n.t('Warning')}</option>
<option value="error" class="text-gray-900">{$i18n.t('Error')}</option>
<option value="success" class="text-gray-900">{$i18n.t('Success')}</option>
</select>
<input
class="pr-5 py-1.5 text-xs w-full bg-transparent outline-none"
placeholder={$i18n.t('Content')}
bind:value={banner.content}
/>
<div class="relative top-1.5 -left-2">
<Tooltip content="Dismissible" className="flex h-fit items-center">
<Switch bind:state={banner.dismissible} />
</Tooltip>
</div>
</div>
<button
class="px-2"
type="button"
on:click={() => {
banners.splice(bannerIdx, 1);
banners = banners;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
{/each}
</div>
</div>
</div>
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
type="submit"
>
Save
</button>
</div>
</form>
<script lang="ts"> <script lang="ts">
import fileSaver from 'file-saver';
const { saveAs } = fileSaver;
import { downloadDatabase } from '$lib/apis/utils'; import { downloadDatabase } from '$lib/apis/utils';
import { onMount, getContext } from 'svelte'; import { onMount, getContext } from 'svelte';
import { config } from '$lib/stores'; import { config, user } from '$lib/stores';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
import { getAllUserChats } from '$lib/apis/chats';
const i18n = getContext('i18n'); const i18n = getContext('i18n');
export let saveHandler: Function; export let saveHandler: Function;
const exportAllUserChats = async () => {
let blob = new Blob([JSON.stringify(await getAllUserChats(localStorage.token))], {
type: 'application/json'
});
saveAs(blob, `all-chats-export-${Date.now()}.json`);
};
onMount(async () => { onMount(async () => {
// permissions = await getUserPermissions(localStorage.token); // permissions = await getUserPermissions(localStorage.token);
}); });
...@@ -23,10 +34,10 @@ ...@@ -23,10 +34,10 @@
<div> <div>
<div class=" mb-2 text-sm font-medium">{$i18n.t('Database')}</div> <div class=" mb-2 text-sm font-medium">{$i18n.t('Database')}</div>
<div class=" flex w-full justify-between"> {#if $config?.features.enable_admin_export ?? true}
<!-- <div class=" self-center text-xs font-medium">{$i18n.t('Allow Chat Deletion')}</div> --> <div class=" flex w-full justify-between">
<!-- <div class=" self-center text-xs font-medium">{$i18n.t('Allow Chat Deletion')}</div> -->
{#if $config?.admin_export_enabled ?? true}
<button <button
class=" flex rounded-md py-1.5 px-3 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition" class=" flex rounded-md py-1.5 px-3 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
type="button" type="button"
...@@ -55,8 +66,36 @@ ...@@ -55,8 +66,36 @@
</div> </div>
<div class=" self-center text-sm font-medium">{$i18n.t('Download Database')}</div> <div class=" self-center text-sm font-medium">{$i18n.t('Download Database')}</div>
</button> </button>
{/if} </div>
</div>
<hr class=" dark:border-gray-700 my-1" />
<button
class=" flex rounded-md py-2 px-3.5 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
on:click={() => {
exportAllUserChats();
}}
>
<div class=" self-center mr-3">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M2 3a1 1 0 0 1 1-1h10a1 1 0 0 1 1 1v1a1 1 0 0 1-1 1H3a1 1 0 0 1-1-1V3Z" />
<path
fill-rule="evenodd"
d="M13 6H3v6a2 2 0 0 0 2 2h6a2 2 0 0 0 2-2V6ZM8.75 7.75a.75.75 0 0 0-1.5 0v2.69L6.03 9.22a.75.75 0 0 0-1.06 1.06l2.5 2.5a.75.75 0 0 0 1.06 0l2.5-2.5a.75.75 0 1 0-1.06-1.06l-1.22 1.22V7.75Z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class=" self-center text-sm font-medium">
{$i18n.t('Export All Chats (All Users)')}
</div>
</button>
{/if}
</div> </div>
</div> </div>
......
<script lang="ts"> <script lang="ts">
import { getWebhookUrl, updateWebhookUrl } from '$lib/apis'; import {
getCommunitySharingEnabledStatus,
getWebhookUrl,
toggleCommunitySharingEnabledStatus,
updateWebhookUrl
} from '$lib/apis';
import { import {
getDefaultUserRole, getDefaultUserRole,
getJWTExpiresDuration, getJWTExpiresDuration,
...@@ -18,6 +23,7 @@ ...@@ -18,6 +23,7 @@
let JWTExpiresIn = ''; let JWTExpiresIn = '';
let webhookUrl = ''; let webhookUrl = '';
let communitySharingEnabled = true;
const toggleSignUpEnabled = async () => { const toggleSignUpEnabled = async () => {
signUpEnabled = await toggleSignUpEnabledStatus(localStorage.token); signUpEnabled = await toggleSignUpEnabledStatus(localStorage.token);
...@@ -35,11 +41,28 @@ ...@@ -35,11 +41,28 @@
webhookUrl = await updateWebhookUrl(localStorage.token, webhookUrl); webhookUrl = await updateWebhookUrl(localStorage.token, webhookUrl);
}; };
const toggleCommunitySharingEnabled = async () => {
communitySharingEnabled = await toggleCommunitySharingEnabledStatus(localStorage.token);
};
onMount(async () => { onMount(async () => {
signUpEnabled = await getSignUpEnabledStatus(localStorage.token); await Promise.all([
defaultUserRole = await getDefaultUserRole(localStorage.token); (async () => {
JWTExpiresIn = await getJWTExpiresDuration(localStorage.token); signUpEnabled = await getSignUpEnabledStatus(localStorage.token);
webhookUrl = await getWebhookUrl(localStorage.token); })(),
(async () => {
defaultUserRole = await getDefaultUserRole(localStorage.token);
})(),
(async () => {
JWTExpiresIn = await getJWTExpiresDuration(localStorage.token);
})(),
(async () => {
webhookUrl = await getWebhookUrl(localStorage.token);
})(),
(async () => {
communitySharingEnabled = await getCommunitySharingEnabledStatus(localStorage.token);
})()
]);
}); });
</script> </script>
...@@ -114,6 +137,47 @@ ...@@ -114,6 +137,47 @@
</div> </div>
</div> </div>
<div class=" flex w-full justify-between">
<div class=" self-center text-xs font-medium">{$i18n.t('Enable Community Sharing')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
toggleCommunitySharingEnabled();
}}
type="button"
>
{#if communitySharingEnabled}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M11.5 1A3.5 3.5 0 0 0 8 4.5V7H2.5A1.5 1.5 0 0 0 1 8.5v5A1.5 1.5 0 0 0 2.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 9.5 7V4.5a2 2 0 1 1 4 0v1.75a.75.75 0 0 0 1.5 0V4.5A3.5 3.5 0 0 0 11.5 1Z"
/>
</svg>
<span class="ml-2 self-center">{$i18n.t('Enabled')}</span>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M8 1a3.5 3.5 0 0 0-3.5 3.5V7A1.5 1.5 0 0 0 3 8.5v5A1.5 1.5 0 0 0 4.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 11.5 7V4.5A3.5 3.5 0 0 0 8 1Zm2 6V4.5a2 2 0 1 0-4 0V7h4Z"
clip-rule="evenodd"
/>
</svg>
<span class="ml-2 self-center">{$i18n.t('Disabled')}</span>
{/if}
</button>
</div>
<hr class=" dark:border-gray-700 my-3" /> <hr class=" dark:border-gray-700 my-3" />
<div class=" w-full justify-between"> <div class=" w-full justify-between">
......
<script lang="ts"> <script lang="ts">
import { getModelFilterConfig, updateModelFilterConfig } from '$lib/apis'; import { getBackendConfig, getModelFilterConfig, updateModelFilterConfig } from '$lib/apis';
import { getSignUpEnabledStatus, toggleSignUpEnabledStatus } from '$lib/apis/auths'; import { getSignUpEnabledStatus, toggleSignUpEnabledStatus } from '$lib/apis/auths';
import { getUserPermissions, updateUserPermissions } from '$lib/apis/users'; import { getUserPermissions, updateUserPermissions } from '$lib/apis/users';
import { onMount, getContext } from 'svelte'; import { onMount, getContext } from 'svelte';
import { models } from '$lib/stores'; import { models, config } from '$lib/stores';
import Switch from '$lib/components/common/Switch.svelte';
import { setDefaultModels } from '$lib/apis/configs';
const i18n = getContext('i18n'); const i18n = getContext('i18n');
export let saveHandler: Function; export let saveHandler: Function;
let defaultModelId = '';
let whitelistEnabled = false; let whitelistEnabled = false;
let whitelistModels = ['']; let whitelistModels = [''];
let permissions = { let permissions = {
...@@ -24,9 +28,10 @@ ...@@ -24,9 +28,10 @@
const res = await getModelFilterConfig(localStorage.token); const res = await getModelFilterConfig(localStorage.token);
if (res) { if (res) {
whitelistEnabled = res.enabled; whitelistEnabled = res.enabled;
whitelistModels = res.models.length > 0 ? res.models : ['']; whitelistModels = res.models.length > 0 ? res.models : [''];
} }
defaultModelId = $config.default_models ? $config?.default_models.split(',')[0] : '';
}); });
</script> </script>
...@@ -34,10 +39,13 @@ ...@@ -34,10 +39,13 @@
class="flex flex-col h-full justify-between space-y-3 text-sm" class="flex flex-col h-full justify-between space-y-3 text-sm"
on:submit|preventDefault={async () => { on:submit|preventDefault={async () => {
// console.log('submit'); // console.log('submit');
await updateUserPermissions(localStorage.token, permissions);
await setDefaultModels(localStorage.token, defaultModelId);
await updateUserPermissions(localStorage.token, permissions);
await updateModelFilterConfig(localStorage.token, whitelistEnabled, whitelistModels); await updateModelFilterConfig(localStorage.token, whitelistEnabled, whitelistModels);
saveHandler(); saveHandler();
await config.set(await getBackendConfig());
}} }}
> >
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-80"> <div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-80">
...@@ -88,26 +96,40 @@ ...@@ -88,26 +96,40 @@
<hr class=" dark:border-gray-700 my-2" /> <hr class=" dark:border-gray-700 my-2" />
<div class="mt-2 space-y-3 pr-1.5"> <div class="mt-2 space-y-3">
<div> <div>
<div class="mb-2"> <div class="mb-2">
<div class="flex justify-between items-center text-xs"> <div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">{$i18n.t('Manage Models')}</div> <div class=" text-sm font-medium">{$i18n.t('Manage Models')}</div>
</div> </div>
</div> </div>
<div class=" space-y-1 mb-3">
<div class="mb-2">
<div class="flex justify-between items-center text-xs">
<div class=" text-xs font-medium">{$i18n.t('Default Model')}</div>
</div>
</div>
<div class="flex-1 mr-2">
<select
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={defaultModelId}
placeholder="Select a model"
>
<option value="" disabled selected>{$i18n.t('Select a model')}</option>
{#each $models.filter((model) => model.id) as model}
<option value={model.id} class="bg-gray-100 dark:bg-gray-700">{model.name}</option>
{/each}
</select>
</div>
</div>
<div class=" space-y-3"> <div class=" space-y-1">
<div> <div class="mb-2">
<div class="flex justify-between items-center text-xs"> <div class="flex justify-between items-center text-xs">
<div class=" text-xs font-medium">{$i18n.t('Model Whitelisting')}</div> <div class=" text-xs font-medium">{$i18n.t('Model Whitelisting')}</div>
<button <Switch bind:state={whitelistEnabled} />
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
whitelistEnabled = !whitelistEnabled;
}}>{whitelistEnabled ? $i18n.t('On') : $i18n.t('Off')}</button
>
</div> </div>
</div> </div>
......
...@@ -6,6 +6,9 @@ ...@@ -6,6 +6,9 @@
import General from './Settings/General.svelte'; import General from './Settings/General.svelte';
import Users from './Settings/Users.svelte'; import Users from './Settings/Users.svelte';
import Banners from '$lib/components/admin/Settings/Banners.svelte';
import { toast } from 'svelte-sonner';
const i18n = getContext('i18n'); const i18n = getContext('i18n');
export let show = false; export let show = false;
...@@ -117,24 +120,63 @@ ...@@ -117,24 +120,63 @@
</div> </div>
<div class=" self-center">{$i18n.t('Database')}</div> <div class=" self-center">{$i18n.t('Database')}</div>
</button> </button>
<button
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
'banners'
? 'bg-gray-200 dark:bg-gray-700'
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
on:click={() => {
selectedTab = 'banners';
}}
>
<div class=" self-center mr-2">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
class="size-4"
>
<path
d="M5.85 3.5a.75.75 0 0 0-1.117-1 9.719 9.719 0 0 0-2.348 4.876.75.75 0 0 0 1.479.248A8.219 8.219 0 0 1 5.85 3.5ZM19.267 2.5a.75.75 0 1 0-1.118 1 8.22 8.22 0 0 1 1.987 4.124.75.75 0 0 0 1.48-.248A9.72 9.72 0 0 0 19.266 2.5Z"
/>
<path
fill-rule="evenodd"
d="M12 2.25A6.75 6.75 0 0 0 5.25 9v.75a8.217 8.217 0 0 1-2.119 5.52.75.75 0 0 0 .298 1.206c1.544.57 3.16.99 4.831 1.243a3.75 3.75 0 1 0 7.48 0 24.583 24.583 0 0 0 4.83-1.244.75.75 0 0 0 .298-1.205 8.217 8.217 0 0 1-2.118-5.52V9A6.75 6.75 0 0 0 12 2.25ZM9.75 18c0-.034 0-.067.002-.1a25.05 25.05 0 0 0 4.496 0l.002.1a2.25 2.25 0 1 1-4.5 0Z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class=" self-center">{$i18n.t('Banners')}</div>
</button>
</div> </div>
<div class="flex-1 md:min-h-[380px]"> <div class="flex-1 md:min-h-[380px]">
{#if selectedTab === 'general'} {#if selectedTab === 'general'}
<General <General
saveHandler={() => { saveHandler={() => {
show = false; show = false;
toast.success($i18n.t('Settings saved successfully!'));
}} }}
/> />
{:else if selectedTab === 'users'} {:else if selectedTab === 'users'}
<Users <Users
saveHandler={() => { saveHandler={() => {
show = false; show = false;
toast.success($i18n.t('Settings saved successfully!'));
}} }}
/> />
{:else if selectedTab === 'db'} {:else if selectedTab === 'db'}
<Database <Database
saveHandler={() => { saveHandler={() => {
show = false; show = false;
toast.success($i18n.t('Settings saved successfully!'));
}}
/>
{:else if selectedTab === 'banners'}
<Banners
saveHandler={() => {
show = false;
toast.success($i18n.t('Settings saved successfully!'));
}} }}
/> />
{/if} {/if}
......
<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
import { toast } from 'svelte-sonner';
import { getContext, onMount, tick } from 'svelte';
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import {
chatId,
chats,
config,
type Model,
models,
settings,
showSidebar,
tags as _tags,
WEBUI_NAME,
banners
} from '$lib/stores';
import { convertMessagesToHistory, copyToClipboard, splitStream } from '$lib/utils';
import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
import {
addTagById,
createNewChat,
deleteTagById,
getAllChatTags,
getChatById,
getChatList,
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte';
import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
import { createOpenAITextStream } from '$lib/apis/streaming';
import { queryMemory } from '$lib/apis/memories';
import type { Writable } from 'svelte/store';
import type { i18n as i18nType } from 'i18next';
import Banner from '../common/Banner.svelte';
import { getUserSettings } from '$lib/apis/users';
const i18n: Writable<i18nType> = getContext('i18n');
export let chatIdProp = '';
let loaded = false;
let stopResponseFlag = false;
let autoScroll = true;
let processing = '';
let messagesContainerElement: HTMLDivElement;
let currentRequestId = null;
let showModelSelector = true;
let selectedModels = [''];
let atSelectedModel: Model | undefined;
let chat = null;
let tags = [];
let title = '';
let prompt = '';
let files = [];
let messages = [];
let history = {
messages: {},
currentId: null
};
$: if (history.currentId !== null) {
let _messages = [];
let currentMessage = history.messages[history.currentId];
while (currentMessage !== null) {
_messages.unshift({ ...currentMessage });
currentMessage =
currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
}
messages = _messages;
} else {
messages = [];
}
$: if (chatIdProp) {
(async () => {
if (await loadChat()) {
await tick();
loaded = true;
window.setTimeout(() => scrollToBottom(), 0);
const chatInput = document.getElementById('chat-textarea');
chatInput?.focus();
} else {
await goto('/');
}
})();
}
onMount(async () => {
if (!$chatId) {
await initNewChat();
} else {
if (!($settings.saveChatHistory ?? true)) {
await goto('/');
}
}
});
//////////////////////////
// Web functions
//////////////////////////
const initNewChat = async () => {
if (currentRequestId !== null) {
await cancelOllamaRequest(localStorage.token, currentRequestId);
currentRequestId = null;
}
window.history.replaceState(history.state, '', `/`);
await chatId.set('');
autoScroll = true;
title = '';
messages = [];
history = {
messages: {},
currentId: null
};
if ($page.url.searchParams.get('models')) {
selectedModels = $page.url.searchParams.get('models')?.split(',');
} else if ($settings?.models) {
selectedModels = $settings?.models;
} else if ($config?.default_models) {
console.log($config?.default_models.split(',') ?? '');
selectedModels = $config?.default_models.split(',');
} else {
selectedModels = [''];
}
if ($page.url.searchParams.get('q')) {
prompt = $page.url.searchParams.get('q') ?? '';
if (prompt) {
await tick();
submitPrompt(prompt);
}
}
selectedModels = selectedModels.map((modelId) =>
$models.map((m) => m.id).includes(modelId) ? modelId : ''
);
const userSettings = await getUserSettings(localStorage.token);
if (userSettings) {
settings.set(userSettings.ui);
} else {
settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
}
const chatInput = document.getElementById('chat-textarea');
setTimeout(() => chatInput?.focus(), 0);
};
const loadChat = async () => {
chatId.set(chatIdProp);
chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
await goto('/');
return null;
});
if (chat) {
tags = await getTags();
const chatContent = chat.chat;
if (chatContent) {
console.log(chatContent);
selectedModels =
(chatContent?.models ?? undefined) !== undefined
? chatContent.models
: [chatContent.models ?? ''];
history =
(chatContent?.history ?? undefined) !== undefined
? chatContent.history
: convertMessagesToHistory(chatContent.messages);
title = chatContent.title;
const userSettings = await getUserSettings(localStorage.token);
if (userSettings) {
await settings.set(userSettings.ui);
} else {
await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
}
await settings.set({
...$settings,
system: chatContent.system ?? $settings.system,
params: chatContent.options ?? $settings.params
});
autoScroll = true;
await tick();
if (messages.length > 0) {
history.messages[messages.at(-1).id].done = true;
}
await tick();
return true;
} else {
return null;
}
}
};
const scrollToBottom = async () => {
await tick();
if (messagesContainerElement) {
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
}
};
//////////////////////////
// Ollama functions
//////////////////////////
const submitPrompt = async (userPrompt, _user = null) => {
console.log('submitPrompt', $chatId);
selectedModels = selectedModels.map((modelId) =>
$models.map((m) => m.id).includes(modelId) ? modelId : ''
);
if (selectedModels.includes('')) {
toast.error($i18n.t('Model not selected'));
} else if (messages.length != 0 && messages.at(-1).done != true) {
// Response not done
console.log('wait');
} else if (
files.length > 0 &&
files.filter((file) => file.upload_status === false).length > 0
) {
// Upload not done
toast.error(
$i18n.t(
`Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
)
);
} else {
// Reset chat message textarea height
document.getElementById('chat-textarea').style.height = '';
// Create user message
let userMessageId = uuidv4();
let userMessage = {
id: userMessageId,
parentId: messages.length !== 0 ? messages.at(-1).id : null,
childrenIds: [],
role: 'user',
user: _user ?? undefined,
content: userPrompt,
files: files.length > 0 ? files : undefined,
timestamp: Math.floor(Date.now() / 1000), // Unix epoch
models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
};
// Add message to history and Set currentId to messageId
history.messages[userMessageId] = userMessage;
history.currentId = userMessageId;
// Append messageId to childrenIds of parent message
if (messages.length !== 0) {
history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
}
// Wait until history/message have been updated
await tick();
// Create new chat if only one message in messages
if (messages.length == 1) {
if ($settings.saveChatHistory ?? true) {
chat = await createNewChat(localStorage.token, {
id: $chatId,
title: $i18n.t('New Chat'),
models: selectedModels,
system: $settings.system ?? undefined,
options: {
...($settings.params ?? {})
},
messages: messages,
history: history,
tags: [],
timestamp: Date.now()
});
await chats.set(await getChatList(localStorage.token));
await chatId.set(chat.id);
} else {
await chatId.set('local');
}
await tick();
}
// Reset chat input textarea
prompt = '';
document.getElementById('chat-textarea').style.height = '';
files = [];
// Send prompt
await sendPrompt(userPrompt, userMessageId);
}
};
const sendPrompt = async (prompt, parentId, modelId = null) => {
const _chatId = JSON.parse(JSON.stringify($chatId));
await Promise.all(
(modelId
? [modelId]
: atSelectedModel !== undefined
? [atSelectedModel.id]
: selectedModels
).map(async (modelId) => {
console.log('modelId', modelId);
const model = $models.filter((m) => m.id === modelId).at(0);
if (model) {
// If there are image files, check if model is vision capable
const hasImages = messages.some((message) =>
message.files?.some((file) => file.type === 'image')
);
if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
toast.error(
$i18n.t('Model {{modelName}} is not vision capable', {
modelName: model.name ?? model.id
})
);
}
// Create response message
let responseMessageId = uuidv4();
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model.id,
modelName: model.name ?? model.id,
userContext: null,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
// Append messageId to childrenIds of parent message
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
await tick();
let userContext = null;
if ($settings?.memory ?? false) {
if (userContext === null) {
const res = await queryMemory(localStorage.token, prompt).catch((error) => {
toast.error(error);
return null;
});
if (res) {
if (res.documents[0].length > 0) {
userContext = res.documents.reduce((acc, doc, index) => {
const createdAtTimestamp = res.metadatas[index][0].created_at;
const createdAtDate = new Date(createdAtTimestamp * 1000)
.toISOString()
.split('T')[0];
acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
return acc;
}, []);
}
console.log(userContext);
}
}
}
responseMessage.userContext = userContext;
if (model?.owned_by === 'openai') {
await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
} else if (model) {
await sendPromptOllama(model, prompt, responseMessageId, _chatId);
}
} else {
toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
}
})
);
await chats.set(await getChatList(localStorage.token));
};
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
model = model.id;
const responseMessage = history.messages[responseMessageId];
// Wait until history/message have been updated
await tick();
// Scroll down
scrollToBottom();
const messagesBody = [
$settings.system || (responseMessage?.userContext ?? null)
? {
role: 'system',
content: `${$settings?.system ?? ''}${
responseMessage?.userContext ?? null
? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
: ''
}`
}
: undefined,
...messages
]
.filter((message) => message?.content?.trim())
.map((message, idx, arr) => {
// Prepare the base message object
const baseMessage = {
role: message.role,
content: message.content
};
// Extract and format image URLs if any exist
const imageUrls = message.files
?.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1));
// Add images array only if it contains elements
if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
baseMessage.images = imageUrls;
}
return baseMessage;
});
let lastImageIndex = -1;
// Find the index of the last object with images
messagesBody.forEach((item, index) => {
if (item.images) {
lastImageIndex = index;
}
});
// Remove images from all but the last one
messagesBody.forEach((item, index) => {
if (index !== lastImageIndex) {
delete item.images;
}
});
const docs = messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) => item.type === 'doc' || item.type === 'collection')
)
.flat(1);
const [res, controller] = await generateChatCompletion(localStorage.token, {
model: model,
messages: messagesBody,
options: {
...($settings.params ?? {}),
stop:
$settings?.params?.stop ?? undefined
? $settings.params.stop.map((str) =>
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
)
: undefined,
num_predict: $settings?.params?.max_tokens ?? undefined,
repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
},
format: $settings.requestFormat ?? undefined,
keep_alive: $settings.keepAlive ?? undefined,
docs: docs.length > 0 ? docs : undefined,
citations: docs.length > 0
});
if (res && res.ok) {
console.log('controller', controller);
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
await cancelOllamaRequest(localStorage.token, currentRequestId);
}
currentRequestId = null;
break;
}
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
if ('citations' in data) {
responseMessage.citations = data.citations;
continue;
}
if ('detail' in data) {
throw data;
}
if ('id' in data) {
console.log(data);
currentRequestId = data.id;
} else {
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
} else {
responseMessage.content += data.message.content;
messages = messages;
}
} else {
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = true;
responseMessage.content =
'Oops! No text generated from Ollama, Please try again.';
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
messages = messages;
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `${model}`,
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
}
);
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if ($settings.responseAutoPlayback) {
await tick();
document.getElementById(`speak-button-${responseMessage.id}`)?.click();
}
}
}
}
}
} catch (error) {
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
}
break;
}
if (autoScroll) {
scrollToBottom();
}
}
if ($chatId == _chatId) {
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history,
models: selectedModels
});
await chats.set(await getChatList(localStorage.token));
}
}
} else {
if (res !== null) {
const error = await res.json();
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
responseMessage.content = error.detail;
} else {
toast.error(error.error);
responseMessage.content = error.error;
}
} else {
toast.error(
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
);
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: 'Ollama'
});
}
responseMessage.error = true;
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: 'Ollama'
});
responseMessage.done = true;
messages = messages;
}
stopResponseFlag = false;
await tick();
if (autoScroll) {
scrollToBottom();
}
if (messages.length == 2 && messages.at(1).content !== '') {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
}
};
const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
const responseMessage = history.messages[responseMessageId];
const docs = messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) => item.type === 'doc' || item.type === 'collection')
)
.flat(1);
console.log(docs);
scrollToBottom();
try {
const [res, controller] = await generateOpenAIChatCompletion(
localStorage.token,
{
model: model.id,
stream: true,
messages: [
$settings.system || (responseMessage?.userContext ?? null)
? {
role: 'system',
content: `${$settings?.system ?? ''}${
responseMessage?.userContext ?? null
? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
: ''
}`
}
: undefined,
...messages
]
.filter((message) => message?.content?.trim())
.map((message, idx, arr) => ({
role: message.role,
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
message.role === 'user'
? {
content: [
{
type: 'text',
text:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: {
content:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
})
})),
seed: $settings?.params?.seed ?? undefined,
stop:
$settings?.params?.stop ?? undefined
? $settings.params.stop.map((str) =>
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
)
: undefined,
temperature: $settings?.params?.temperature ?? undefined,
top_p: $settings?.params?.top_p ?? undefined,
frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
max_tokens: $settings?.params?.max_tokens ?? undefined,
docs: docs.length > 0 ? docs : undefined,
citations: docs.length > 0
},
`${OPENAI_API_BASE_URL}`
);
// Wait until history/message have been updated
await tick();
scrollToBottom();
if (res && res.ok && res.body) {
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
for await (const update of textStream) {
const { value, done, citations, error } = update;
if (error) {
await handleOpenAIError(error, null, model, responseMessage);
break;
}
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
}
break;
}
if (citations) {
responseMessage.citations = citations;
continue;
}
if (responseMessage.content == '' && value == '\n') {
continue;
} else {
responseMessage.content += value;
messages = messages;
}
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: `${WEBUI_BASE_URL}/static/favicon.png`
});
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if ($settings.responseAutoPlayback) {
await tick();
document.getElementById(`speak-button-${responseMessage.id}`)?.click();
}
if (autoScroll) {
scrollToBottom();
}
}
if ($chatId == _chatId) {
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
models: selectedModels,
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
}
} else {
await handleOpenAIError(null, res, model, responseMessage);
}
} catch (error) {
await handleOpenAIError(error, null, model, responseMessage);
}
messages = messages;
stopResponseFlag = false;
await tick();
if (autoScroll) {
scrollToBottom();
}
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
}
};
const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
let errorMessage = '';
let innerError;
if (error) {
innerError = error;
} else if (res !== null) {
innerError = await res.json();
}
console.error(innerError);
if ('detail' in innerError) {
toast.error(innerError.detail);
errorMessage = innerError.detail;
} else if ('error' in innerError) {
if ('message' in innerError.error) {
toast.error(innerError.error.message);
errorMessage = innerError.error.message;
} else {
toast.error(innerError.error);
errorMessage = innerError.error;
}
} else if ('message' in innerError) {
toast.error(innerError.message);
errorMessage = innerError.message;
}
responseMessage.error = true;
responseMessage.content =
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: model.name ?? model.id
}) +
'\n' +
errorMessage;
responseMessage.done = true;
messages = messages;
};
const stopResponse = () => {
stopResponseFlag = true;
console.log('stopResponse');
};
const regenerateResponse = async (message) => {
console.log('regenerateResponse');
if (messages.length != 0) {
let userMessage = history.messages[message.parentId];
let userPrompt = userMessage.content;
if ((userMessage?.models ?? [...selectedModels]).length == 1) {
await sendPrompt(userPrompt, userMessage.id);
} else {
await sendPrompt(userPrompt, userMessage.id, message.model);
}
}
};
const continueGeneration = async () => {
console.log('continueGeneration');
const _chatId = JSON.parse(JSON.stringify($chatId));
if (messages.length != 0 && messages.at(-1).done == true) {
const responseMessage = history.messages[history.currentId];
responseMessage.done = false;
await tick();
const model = $models.filter((m) => m.id === responseMessage.model).at(0);
if (model) {
if (model?.external) {
await sendPromptOpenAI(
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
} else
await sendPromptOllama(
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
}
} else {
toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
}
};
const generateChatTitle = async (userPrompt) => {
if ($settings?.title?.auto ?? true) {
const model = $models.find((model) => model.id === selectedModels[0]);
const titleModelId =
model?.external ?? false
? $settings?.title?.modelExternal ?? selectedModels[0]
: $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId);
console.log(titleModel);
const title = await generateTitle(
localStorage.token,
$settings?.title?.prompt ??
$i18n.t(
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
) + ' {{prompt}}',
titleModelId,
userPrompt,
titleModel?.owned_by === 'openai' ?? false
? `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1`
);
return title;
} else {
return `${userPrompt}`;
}
};
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
title = _title;
}
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
}
};
const getTags = async () => {
return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
return [];
});
};
const addTag = async (tagName) => {
const res = await addTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
const deleteTag = async (tagName) => {
const res = await deleteTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
chat = await updateChatById(localStorage.token, $chatId, {
tags: tags
});
_tags.set(await getAllChatTags(localStorage.token));
};
</script>
<svelte:head>
<title>
{title
? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
: `${$WEBUI_NAME}`}
</title>
</svelte:head>
{#if !chatIdProp || (loaded && chatIdProp)}
<div
class="min-h-screen max-h-screen {$showSidebar
? 'md:max-w-[calc(100%-260px)]'
: ''} w-full max-w-full flex flex-col"
>
<Navbar
{title}
bind:selectedModels
bind:showModelSelector
shareEnabled={messages.length > 0}
{chat}
{initNewChat}
/>
{#if $banners.length > 0 && !$chatId && selectedModels.length <= 1}
<div
class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
>
<div class=" flex flex-col gap-1 w-full">
{#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
<Banner
{banner}
on:dismiss={(e) => {
const bannerId = e.detail;
localStorage.setItem(
'dismissedBannerIds',
JSON.stringify(
[
bannerId,
...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
].filter((id) => $banners.find((b) => b.id === id))
)
);
}}
/>
{/each}
</div>
</div>
{/if}
<div class="flex flex-col flex-auto">
<div
class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
id="messages-container"
bind:this={messagesContainerElement}
on:scroll={(e) => {
autoScroll =
messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
messagesContainerElement.clientHeight + 5;
}}
>
<div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
<Messages
chatId={$chatId}
{selectedModels}
{processing}
bind:history
bind:messages
bind:autoScroll
bind:prompt
bottomPadding={files.length > 0}
{sendPrompt}
{continueGeneration}
{regenerateResponse}
/>
</div>
</div>
</div>
</div>
<MessageInput
bind:files
bind:prompt
bind:autoScroll
bind:atSelectedModel
{selectedModels}
{messages}
{submitPrompt}
{stopResponse}
/>
{/if}
<script lang="ts"> <script lang="ts">
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
import { onMount, tick, getContext } from 'svelte'; import { onMount, tick, getContext } from 'svelte';
import { mobile, modelfiles, settings, showSidebar } from '$lib/stores'; import { type Model, mobile, settings, showSidebar, models } from '$lib/stores';
import { blobToFile, calculateSHA256, findWordIndices } from '$lib/utils'; import { blobToFile, calculateSHA256, findWordIndices } from '$lib/utils';
import { import {
...@@ -27,7 +27,9 @@ ...@@ -27,7 +27,9 @@
export let stopResponse: Function; export let stopResponse: Function;
export let autoScroll = true; export let autoScroll = true;
export let selectedModel = '';
export let atSelectedModel: Model | undefined;
export let selectedModels: [''];
let chatTextAreaElement: HTMLTextAreaElement; let chatTextAreaElement: HTMLTextAreaElement;
let filesInputElement; let filesInputElement;
...@@ -52,6 +54,11 @@ ...@@ -52,6 +54,11 @@
let speechRecognition; let speechRecognition;
let visionCapableModels = [];
$: visionCapableModels = [...(atSelectedModel ? [atSelectedModel] : selectedModels)].filter(
(model) => $models.find((m) => m.id === model)?.info?.meta?.capabilities?.vision ?? true
);
$: if (prompt) { $: if (prompt) {
if (chatTextAreaElement) { if (chatTextAreaElement) {
chatTextAreaElement.style.height = ''; chatTextAreaElement.style.height = '';
...@@ -358,6 +365,10 @@ ...@@ -358,6 +365,10 @@
inputFiles.forEach((file) => { inputFiles.forEach((file) => {
console.log(file, file.name.split('.').at(-1)); console.log(file, file.name.split('.').at(-1));
if (['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(file['type'])) { if (['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(file['type'])) {
if (visionCapableModels.length === 0) {
toast.error($i18n.t('Selected model(s) do not support image inputs'));
return;
}
let reader = new FileReader(); let reader = new FileReader();
reader.onload = (event) => { reader.onload = (event) => {
files = [ files = [
...@@ -429,8 +440,8 @@ ...@@ -429,8 +440,8 @@
<div class="fixed bottom-0 {$showSidebar ? 'left-0 md:left-[260px]' : 'left-0'} right-0"> <div class="fixed bottom-0 {$showSidebar ? 'left-0 md:left-[260px]' : 'left-0'} right-0">
<div class="w-full"> <div class="w-full">
<div class="px-2.5 md:px-16 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center"> <div class=" -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
<div class="flex flex-col max-w-5xl w-full"> <div class="flex flex-col max-w-6xl px-2.5 md:px-6 w-full">
<div class="relative"> <div class="relative">
{#if autoScroll === false && messages.length > 0} {#if autoScroll === false && messages.length > 0}
<div class=" absolute -top-12 left-0 right-0 flex justify-center z-30"> <div class=" absolute -top-12 left-0 right-0 flex justify-center z-30">
...@@ -494,12 +505,12 @@ ...@@ -494,12 +505,12 @@
bind:chatInputPlaceholder bind:chatInputPlaceholder
{messages} {messages}
on:select={(e) => { on:select={(e) => {
selectedModel = e.detail; atSelectedModel = e.detail;
chatTextAreaElement?.focus(); chatTextAreaElement?.focus();
}} }}
/> />
{#if selectedModel !== ''} {#if atSelectedModel !== undefined}
<div <div
class="px-3 py-2.5 text-left w-full flex justify-between items-center absolute bottom-0 left-0 right-0 bg-gradient-to-t from-50% from-white dark:from-gray-900" class="px-3 py-2.5 text-left w-full flex justify-between items-center absolute bottom-0 left-0 right-0 bg-gradient-to-t from-50% from-white dark:from-gray-900"
> >
...@@ -508,21 +519,21 @@ ...@@ -508,21 +519,21 @@
crossorigin="anonymous" crossorigin="anonymous"
alt="model profile" alt="model profile"
class="size-5 max-w-[28px] object-cover rounded-full" class="size-5 max-w-[28px] object-cover rounded-full"
src={$modelfiles.find((modelfile) => modelfile.tagName === selectedModel.id) src={$models.find((model) => model.id === atSelectedModel.id)?.info?.meta
?.imageUrl ?? ?.profile_image_url ??
($i18n.language === 'dg-DG' ($i18n.language === 'dg-DG'
? `/doge.png` ? `/doge.png`
: `${WEBUI_BASE_URL}/static/favicon.png`)} : `${WEBUI_BASE_URL}/static/favicon.png`)}
/> />
<div> <div>
Talking to <span class=" font-medium">{selectedModel.name} </span> Talking to <span class=" font-medium">{atSelectedModel.name}</span>
</div> </div>
</div> </div>
<div> <div>
<button <button
class="flex items-center" class="flex items-center"
on:click={() => { on:click={() => {
selectedModel = ''; atSelectedModel = undefined;
}} }}
> >
<XMark /> <XMark />
...@@ -535,7 +546,7 @@ ...@@ -535,7 +546,7 @@
</div> </div>
<div class="bg-white dark:bg-gray-900"> <div class="bg-white dark:bg-gray-900">
<div class="max-w-6xl px-2.5 md:px-16 mx-auto inset-x-0"> <div class="max-w-6xl px-2.5 md:px-6 mx-auto inset-x-0">
<div class=" pb-2"> <div class=" pb-2">
<input <input
bind:this={filesInputElement} bind:this={filesInputElement}
...@@ -550,6 +561,12 @@ ...@@ -550,6 +561,12 @@
if ( if (
['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(file['type']) ['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(file['type'])
) { ) {
if (visionCapableModels.length === 0) {
toast.error($i18n.t('Selected model(s) do not support image inputs'));
inputFiles = null;
filesInputElement.value = '';
return;
}
let reader = new FileReader(); let reader = new FileReader();
reader.onload = (event) => { reader.onload = (event) => {
files = [ files = [
...@@ -589,6 +606,7 @@ ...@@ -589,6 +606,7 @@
dir={$settings?.chatDirection ?? 'LTR'} dir={$settings?.chatDirection ?? 'LTR'}
class=" flex flex-col relative w-full rounded-3xl px-1.5 bg-gray-50 dark:bg-gray-850 dark:text-gray-100" class=" flex flex-col relative w-full rounded-3xl px-1.5 bg-gray-50 dark:bg-gray-850 dark:text-gray-100"
on:submit|preventDefault={() => { on:submit|preventDefault={() => {
// check if selectedModels support image input
submitPrompt(prompt, user); submitPrompt(prompt, user);
}} }}
> >
...@@ -597,7 +615,36 @@ ...@@ -597,7 +615,36 @@
{#each files as file, fileIdx} {#each files as file, fileIdx}
<div class=" relative group"> <div class=" relative group">
{#if file.type === 'image'} {#if file.type === 'image'}
<img src={file.url} alt="input" class=" h-16 w-16 rounded-xl object-cover" /> <div class="relative">
<img
src={file.url}
alt="input"
class=" h-16 w-16 rounded-xl object-cover"
/>
{#if atSelectedModel ? visionCapableModels.length === 0 : selectedModels.length !== visionCapableModels.length}
<Tooltip
className=" absolute top-1 left-1"
content={$i18n.t('{{ models }}', {
models: [...(atSelectedModel ? [atSelectedModel] : selectedModels)]
.filter((id) => !visionCapableModels.includes(id))
.join(', ')
})}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
class="size-4 fill-yellow-300"
>
<path
fill-rule="evenodd"
d="M9.401 3.003c1.155-2 4.043-2 5.197 0l7.355 12.748c1.154 2-.29 4.5-2.599 4.5H4.645c-2.309 0-3.752-2.5-2.598-4.5L9.4 3.003ZM12 8.25a.75.75 0 0 1 .75.75v3.75a.75.75 0 0 1-1.5 0V9a.75.75 0 0 1 .75-.75Zm0 8.25a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Z"
clip-rule="evenodd"
/>
</svg>
</Tooltip>
{/if}
</div>
{:else if file.type === 'doc'} {:else if file.type === 'doc'}
<div <div
class="h-16 w-[15rem] flex items-center space-x-3 px-2.5 dark:bg-gray-600 rounded-xl border border-gray-200 dark:border-none" class="h-16 w-[15rem] flex items-center space-x-3 px-2.5 dark:bg-gray-600 rounded-xl border border-gray-200 dark:border-none"
...@@ -883,7 +930,7 @@ ...@@ -883,7 +930,7 @@
if (e.key === 'Escape') { if (e.key === 'Escape') {
console.log('Escape'); console.log('Escape');
selectedModel = ''; atSelectedModel = undefined;
} }
}} }}
rows="1" rows="1"
......
<script lang="ts"> <script lang="ts">
import { v4 as uuidv4 } from 'uuid'; import { v4 as uuidv4 } from 'uuid';
import { chats, config, modelfiles, settings, user as _user, mobile } from '$lib/stores'; import { chats, config, settings, user as _user, mobile } from '$lib/stores';
import { tick, getContext } from 'svelte'; import { tick, getContext } from 'svelte';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
...@@ -26,7 +26,6 @@ ...@@ -26,7 +26,6 @@
export let user = $_user; export let user = $_user;
export let prompt; export let prompt;
export let suggestionPrompts = [];
export let processing = ''; export let processing = '';
export let bottomPadding = false; export let bottomPadding = false;
export let autoScroll; export let autoScroll;
...@@ -34,7 +33,6 @@ ...@@ -34,7 +33,6 @@
export let messages = []; export let messages = [];
export let selectedModels; export let selectedModels;
export let selectedModelfiles = [];
$: if (autoScroll && bottomPadding) { $: if (autoScroll && bottomPadding) {
(async () => { (async () => {
...@@ -247,9 +245,7 @@ ...@@ -247,9 +245,7 @@
<div class="h-full flex mb-16"> <div class="h-full flex mb-16">
{#if messages.length == 0} {#if messages.length == 0}
<Placeholder <Placeholder
models={selectedModels} modelIds={selectedModels}
modelfiles={selectedModelfiles}
{suggestionPrompts}
submitPrompt={async (p) => { submitPrompt={async (p) => {
let text = p; let text = p;
...@@ -316,7 +312,6 @@ ...@@ -316,7 +312,6 @@
{#key message.id} {#key message.id}
<ResponseMessage <ResponseMessage
{message} {message}
modelfiles={selectedModelfiles}
siblings={history.messages[message.parentId]?.childrenIds ?? []} siblings={history.messages[message.parentId]?.childrenIds ?? []}
isLastMessage={messageIdx + 1 === messages.length} isLastMessage={messageIdx + 1 === messages.length}
{readOnly} {readOnly}
...@@ -348,7 +343,6 @@ ...@@ -348,7 +343,6 @@
{chatId} {chatId}
parentMessage={history.messages[message.parentId]} parentMessage={history.messages[message.parentId]}
{messageIdx} {messageIdx}
{selectedModelfiles}
{updateChatMessages} {updateChatMessages}
{confirmEditResponseMessage} {confirmEditResponseMessage}
{rateMessage} {rateMessage}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment