Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
b8902072
Unverified
Commit
b8902072
authored
Mar 10, 2024
by
Ased Mammad
Committed by
GitHub
Mar 10, 2024
Browse files
Merge branch 'dev' into feat/add-i18n
parents
6e57fda8
96ada232
Changes
27
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
682 additions
and
347 deletions
+682
-347
backend/apps/images/main.py
backend/apps/images/main.py
+5
-5
backend/apps/ollama/main.py
backend/apps/ollama/main.py
+17
-3
backend/apps/openai/main.py
backend/apps/openai/main.py
+22
-4
backend/apps/rag/main.py
backend/apps/rag/main.py
+14
-75
backend/apps/rag/utils.py
backend/apps/rag/utils.py
+97
-0
backend/config.py
backend/config.py
+6
-1
backend/main.py
backend/main.py
+172
-1
backend/requirements.txt
backend/requirements.txt
+2
-1
src/lib/apis/index.ts
src/lib/apis/index.ts
+62
-0
src/lib/apis/rag/index.ts
src/lib/apis/rag/index.ts
+1
-1
src/lib/components/admin/Settings/Users.svelte
src/lib/components/admin/Settings/Users.svelte
+114
-0
src/lib/components/chat/MessageInput.svelte
src/lib/components/chat/MessageInput.svelte
+2
-2
src/lib/components/chat/Settings/Account.svelte
src/lib/components/chat/Settings/Account.svelte
+4
-2
src/lib/components/chat/Settings/Audio.svelte
src/lib/components/chat/Settings/Audio.svelte
+1
-1
src/lib/components/chat/Settings/Chats.svelte
src/lib/components/chat/Settings/Chats.svelte
+3
-1
src/lib/components/chat/Settings/Connections.svelte
src/lib/components/chat/Settings/Connections.svelte
+1
-1
src/lib/components/chat/Settings/General.svelte
src/lib/components/chat/Settings/General.svelte
+1
-1
src/lib/components/chat/Settings/Images.svelte
src/lib/components/chat/Settings/Images.svelte
+1
-1
src/lib/components/chat/Settings/Interface.svelte
src/lib/components/chat/Settings/Interface.svelte
+6
-27
src/lib/components/chat/Settings/Models.svelte
src/lib/components/chat/Settings/Models.svelte
+151
-220
No files found.
backend/apps/images/main.py
View file @
b8902072
...
...
@@ -250,7 +250,7 @@ class GenerateImageForm(BaseModel):
model
:
Optional
[
str
]
=
None
prompt
:
str
n
:
int
=
1
size
:
str
=
"512x512"
size
:
Optional
[
str
]
=
None
negative_prompt
:
Optional
[
str
]
=
None
...
...
@@ -278,8 +278,7 @@ def generate_image(
user
=
Depends
(
get_current_user
),
):
print
(
form_data
)
r
=
None
try
:
if
app
.
state
.
ENGINE
==
"openai"
:
...
...
@@ -291,10 +290,9 @@ def generate_image(
"model"
:
app
.
state
.
MODEL
if
app
.
state
.
MODEL
!=
""
else
"dall-e-2"
,
"prompt"
:
form_data
.
prompt
,
"n"
:
form_data
.
n
,
"size"
:
form_data
.
size
,
"size"
:
form_data
.
size
if
form_data
.
size
else
app
.
state
.
IMAGE_SIZE
,
"response_format"
:
"b64_json"
,
}
r
=
requests
.
post
(
url
=
f
"https://api.openai.com/v1/images/generations"
,
json
=
data
,
...
...
@@ -359,4 +357,6 @@ def generate_image(
except
Exception
as
e
:
print
(
e
)
if
r
:
print
(
r
.
json
())
raise
HTTPException
(
status_code
=
400
,
detail
=
ERROR_MESSAGES
.
DEFAULT
(
e
))
backend/apps/ollama/main.py
View file @
b8902072
...
...
@@ -15,7 +15,7 @@ import asyncio
from
apps.web.models.users
import
Users
from
constants
import
ERROR_MESSAGES
from
utils.utils
import
decode_token
,
get_current_user
,
get_admin_user
from
config
import
OLLAMA_BASE_URLS
from
config
import
OLLAMA_BASE_URLS
,
MODEL_FILTER_ENABLED
,
MODEL_FILTER_LIST
from
typing
import
Optional
,
List
,
Union
...
...
@@ -29,6 +29,10 @@ app.add_middleware(
allow_headers
=
[
"*"
],
)
app
.
state
.
MODEL_FILTER_ENABLED
=
MODEL_FILTER_ENABLED
app
.
state
.
MODEL_FILTER_LIST
=
MODEL_FILTER_LIST
app
.
state
.
OLLAMA_BASE_URLS
=
OLLAMA_BASE_URLS
app
.
state
.
MODELS
=
{}
...
...
@@ -129,9 +133,19 @@ async def get_all_models():
async
def
get_ollama_tags
(
url_idx
:
Optional
[
int
]
=
None
,
user
=
Depends
(
get_current_user
)
):
if
url_idx
==
None
:
return
await
get_all_models
()
models
=
await
get_all_models
()
if
app
.
state
.
MODEL_FILTER_ENABLED
:
if
user
.
role
==
"user"
:
models
[
"models"
]
=
list
(
filter
(
lambda
model
:
model
[
"name"
]
in
app
.
state
.
MODEL_FILTER_LIST
,
models
[
"models"
],
)
)
return
models
return
models
else
:
url
=
app
.
state
.
OLLAMA_BASE_URLS
[
url_idx
]
try
:
...
...
backend/apps/openai/main.py
View file @
b8902072
...
...
@@ -18,7 +18,13 @@ from utils.utils import (
get_verified_user
,
get_admin_user
,
)
from
config
import
OPENAI_API_BASE_URLS
,
OPENAI_API_KEYS
,
CACHE_DIR
from
config
import
(
OPENAI_API_BASE_URLS
,
OPENAI_API_KEYS
,
CACHE_DIR
,
MODEL_FILTER_ENABLED
,
MODEL_FILTER_LIST
,
)
from
typing
import
List
,
Optional
...
...
@@ -34,6 +40,9 @@ app.add_middleware(
allow_headers
=
[
"*"
],
)
app
.
state
.
MODEL_FILTER_ENABLED
=
MODEL_FILTER_ENABLED
app
.
state
.
MODEL_FILTER_LIST
=
MODEL_FILTER_LIST
app
.
state
.
OPENAI_API_BASE_URLS
=
OPENAI_API_BASE_URLS
app
.
state
.
OPENAI_API_KEYS
=
OPENAI_API_KEYS
...
...
@@ -186,12 +195,21 @@ async def get_all_models():
return
models
# , user=Depends(get_current_user)
@
app
.
get
(
"/models"
)
@
app
.
get
(
"/models/{url_idx}"
)
async
def
get_models
(
url_idx
:
Optional
[
int
]
=
None
):
async
def
get_models
(
url_idx
:
Optional
[
int
]
=
None
,
user
=
Depends
(
get_current_user
)
):
if
url_idx
==
None
:
return
await
get_all_models
()
models
=
await
get_all_models
()
if
app
.
state
.
MODEL_FILTER_ENABLED
:
if
user
.
role
==
"user"
:
models
[
"data"
]
=
list
(
filter
(
lambda
model
:
model
[
"id"
]
in
app
.
state
.
MODEL_FILTER_LIST
,
models
[
"data"
],
)
)
return
models
return
models
else
:
url
=
app
.
state
.
OPENAI_API_BASE_URLS
[
url_idx
]
try
:
...
...
backend/apps/rag/main.py
View file @
b8902072
...
...
@@ -44,6 +44,8 @@ from apps.web.models.documents import (
DocumentResponse
,
)
from
apps.rag.utils
import
query_doc
,
query_collection
from
utils.misc
import
(
calculate_sha256
,
calculate_sha256_string
,
...
...
@@ -248,21 +250,18 @@ class QueryDocForm(BaseModel):
@
app
.
post
(
"/query/doc"
)
def
query_doc
(
def
query_doc
_handler
(
form_data
:
QueryDocForm
,
user
=
Depends
(
get_current_user
),
):
try
:
# if you use docker use the model from the environment variable
collection
=
CHROMA_CLIENT
.
get_collection
(
name
=
form_data
.
collection_name
,
return
query_doc
(
collection_name
=
form_data
.
collection_name
,
query
=
form_data
.
query
,
k
=
form_data
.
k
if
form_data
.
k
else
app
.
state
.
TOP_K
,
embedding_function
=
app
.
state
.
sentence_transformer_ef
,
)
result
=
collection
.
query
(
query_texts
=
[
form_data
.
query
],
n_results
=
form_data
.
k
if
form_data
.
k
else
app
.
state
.
TOP_K
,
)
return
result
except
Exception
as
e
:
print
(
e
)
raise
HTTPException
(
...
...
@@ -277,76 +276,16 @@ class QueryCollectionsForm(BaseModel):
k
:
Optional
[
int
]
=
None
def
merge_and_sort_query_results
(
query_results
,
k
):
# Initialize lists to store combined data
combined_ids
=
[]
combined_distances
=
[]
combined_metadatas
=
[]
combined_documents
=
[]
# Combine data from each dictionary
for
data
in
query_results
:
combined_ids
.
extend
(
data
[
"ids"
][
0
])
combined_distances
.
extend
(
data
[
"distances"
][
0
])
combined_metadatas
.
extend
(
data
[
"metadatas"
][
0
])
combined_documents
.
extend
(
data
[
"documents"
][
0
])
# Create a list of tuples (distance, id, metadata, document)
combined
=
list
(
zip
(
combined_distances
,
combined_ids
,
combined_metadatas
,
combined_documents
)
)
# Sort the list based on distances
combined
.
sort
(
key
=
lambda
x
:
x
[
0
])
# Unzip the sorted list
sorted_distances
,
sorted_ids
,
sorted_metadatas
,
sorted_documents
=
zip
(
*
combined
)
# Slicing the lists to include only k elements
sorted_distances
=
list
(
sorted_distances
)[:
k
]
sorted_ids
=
list
(
sorted_ids
)[:
k
]
sorted_metadatas
=
list
(
sorted_metadatas
)[:
k
]
sorted_documents
=
list
(
sorted_documents
)[:
k
]
# Create the output dictionary
merged_query_results
=
{
"ids"
:
[
sorted_ids
],
"distances"
:
[
sorted_distances
],
"metadatas"
:
[
sorted_metadatas
],
"documents"
:
[
sorted_documents
],
"embeddings"
:
None
,
"uris"
:
None
,
"data"
:
None
,
}
return
merged_query_results
@
app
.
post
(
"/query/collection"
)
def
query_collection
(
def
query_collection
_handler
(
form_data
:
QueryCollectionsForm
,
user
=
Depends
(
get_current_user
),
):
results
=
[]
for
collection_name
in
form_data
.
collection_names
:
try
:
# if you use docker use the model from the environment variable
collection
=
CHROMA_CLIENT
.
get_collection
(
name
=
collection_name
,
embedding_function
=
app
.
state
.
sentence_transformer_ef
,
)
result
=
collection
.
query
(
query_texts
=
[
form_data
.
query
],
n_results
=
form_data
.
k
if
form_data
.
k
else
app
.
state
.
TOP_K
,
)
results
.
append
(
result
)
except
:
pass
return
merge_and_sort_query_results
(
results
,
form_data
.
k
if
form_data
.
k
else
app
.
state
.
TOP_K
return
query_collection
(
collection_names
=
form_data
.
collection_names
,
query
=
form_data
.
query
,
k
=
form_data
.
k
if
form_data
.
k
else
app
.
state
.
TOP_K
,
embedding_function
=
app
.
state
.
sentence_transformer_ef
,
)
...
...
backend/apps/rag/utils.py
0 → 100644
View file @
b8902072
import
re
from
typing
import
List
from
config
import
CHROMA_CLIENT
def
query_doc
(
collection_name
:
str
,
query
:
str
,
k
:
int
,
embedding_function
):
try
:
# if you use docker use the model from the environment variable
collection
=
CHROMA_CLIENT
.
get_collection
(
name
=
collection_name
,
embedding_function
=
embedding_function
,
)
result
=
collection
.
query
(
query_texts
=
[
query
],
n_results
=
k
,
)
return
result
except
Exception
as
e
:
raise
e
def
merge_and_sort_query_results
(
query_results
,
k
):
# Initialize lists to store combined data
combined_ids
=
[]
combined_distances
=
[]
combined_metadatas
=
[]
combined_documents
=
[]
# Combine data from each dictionary
for
data
in
query_results
:
combined_ids
.
extend
(
data
[
"ids"
][
0
])
combined_distances
.
extend
(
data
[
"distances"
][
0
])
combined_metadatas
.
extend
(
data
[
"metadatas"
][
0
])
combined_documents
.
extend
(
data
[
"documents"
][
0
])
# Create a list of tuples (distance, id, metadata, document)
combined
=
list
(
zip
(
combined_distances
,
combined_ids
,
combined_metadatas
,
combined_documents
)
)
# Sort the list based on distances
combined
.
sort
(
key
=
lambda
x
:
x
[
0
])
# Unzip the sorted list
sorted_distances
,
sorted_ids
,
sorted_metadatas
,
sorted_documents
=
zip
(
*
combined
)
# Slicing the lists to include only k elements
sorted_distances
=
list
(
sorted_distances
)[:
k
]
sorted_ids
=
list
(
sorted_ids
)[:
k
]
sorted_metadatas
=
list
(
sorted_metadatas
)[:
k
]
sorted_documents
=
list
(
sorted_documents
)[:
k
]
# Create the output dictionary
merged_query_results
=
{
"ids"
:
[
sorted_ids
],
"distances"
:
[
sorted_distances
],
"metadatas"
:
[
sorted_metadatas
],
"documents"
:
[
sorted_documents
],
"embeddings"
:
None
,
"uris"
:
None
,
"data"
:
None
,
}
return
merged_query_results
def
query_collection
(
collection_names
:
List
[
str
],
query
:
str
,
k
:
int
,
embedding_function
):
results
=
[]
for
collection_name
in
collection_names
:
try
:
# if you use docker use the model from the environment variable
collection
=
CHROMA_CLIENT
.
get_collection
(
name
=
collection_name
,
embedding_function
=
embedding_function
,
)
result
=
collection
.
query
(
query_texts
=
[
query
],
n_results
=
k
,
)
results
.
append
(
result
)
except
:
pass
return
merge_and_sort_query_results
(
results
,
k
)
def
rag_template
(
template
:
str
,
context
:
str
,
query
:
str
):
template
=
re
.
sub
(
r
"\[context\]"
,
context
,
template
)
template
=
re
.
sub
(
r
"\[query\]"
,
query
,
template
)
return
template
backend/config.py
View file @
b8902072
...
...
@@ -251,7 +251,7 @@ OPENAI_API_BASE_URLS = (
OPENAI_API_BASE_URLS
if
OPENAI_API_BASE_URLS
!=
""
else
OPENAI_API_BASE_URL
)
OPENAI_API_BASE_URLS
=
[
url
.
strip
()
for
url
in
OPENAI_API_BASE_URL
.
split
(
";"
)]
OPENAI_API_BASE_URLS
=
[
url
.
strip
()
for
url
in
OPENAI_API_BASE_URL
S
.
split
(
";"
)]
####################################
...
...
@@ -292,6 +292,11 @@ DEFAULT_USER_ROLE = os.getenv("DEFAULT_USER_ROLE", "pending")
USER_PERMISSIONS
=
{
"chat"
:
{
"deletion"
:
True
}}
MODEL_FILTER_ENABLED
=
os
.
environ
.
get
(
"MODEL_FILTER_ENABLED"
,
False
)
MODEL_FILTER_LIST
=
os
.
environ
.
get
(
"MODEL_FILTER_LIST"
,
""
)
MODEL_FILTER_LIST
=
[
model
.
strip
()
for
model
in
MODEL_FILTER_LIST
.
split
(
";"
)]
####################################
# WEBUI_VERSION
####################################
...
...
backend/main.py
View file @
b8902072
...
...
@@ -12,6 +12,7 @@ from fastapi import HTTPException
from
fastapi.middleware.wsgi
import
WSGIMiddleware
from
fastapi.middleware.cors
import
CORSMiddleware
from
starlette.exceptions
import
HTTPException
as
StarletteHTTPException
from
starlette.middleware.base
import
BaseHTTPMiddleware
from
apps.ollama.main
import
app
as
ollama_app
...
...
@@ -22,8 +23,22 @@ from apps.images.main import app as images_app
from
apps.rag.main
import
app
as
rag_app
from
apps.web.main
import
app
as
webui_app
from
pydantic
import
BaseModel
from
typing
import
List
from
config
import
WEBUI_NAME
,
ENV
,
VERSION
,
CHANGELOG
,
FRONTEND_BUILD_DIR
from
utils.utils
import
get_admin_user
from
apps.rag.utils
import
query_doc
,
query_collection
,
rag_template
from
config
import
(
WEBUI_NAME
,
ENV
,
VERSION
,
CHANGELOG
,
FRONTEND_BUILD_DIR
,
MODEL_FILTER_ENABLED
,
MODEL_FILTER_LIST
,
)
from
constants
import
ERROR_MESSAGES
...
...
@@ -40,6 +55,9 @@ class SPAStaticFiles(StaticFiles):
app
=
FastAPI
(
docs_url
=
"/docs"
if
ENV
==
"dev"
else
None
,
redoc_url
=
None
)
app
.
state
.
MODEL_FILTER_ENABLED
=
MODEL_FILTER_ENABLED
app
.
state
.
MODEL_FILTER_LIST
=
MODEL_FILTER_LIST
origins
=
[
"*"
]
app
.
add_middleware
(
...
...
@@ -56,6 +74,126 @@ async def on_startup():
await
litellm_app_startup
()
class
RAGMiddleware
(
BaseHTTPMiddleware
):
async
def
dispatch
(
self
,
request
:
Request
,
call_next
):
if
request
.
method
==
"POST"
and
(
"/api/chat"
in
request
.
url
.
path
or
"/chat/completions"
in
request
.
url
.
path
):
print
(
request
.
url
.
path
)
# Read the original request body
body
=
await
request
.
body
()
# Decode body to string
body_str
=
body
.
decode
(
"utf-8"
)
# Parse string to JSON
data
=
json
.
loads
(
body_str
)
if
body_str
else
{}
# Example: Add a new key-value pair or modify existing ones
# data["modified"] = True # Example modification
if
"docs"
in
data
:
docs
=
data
[
"docs"
]
print
(
docs
)
last_user_message_idx
=
None
for
i
in
range
(
len
(
data
[
"messages"
])
-
1
,
-
1
,
-
1
):
if
data
[
"messages"
][
i
][
"role"
]
==
"user"
:
last_user_message_idx
=
i
break
user_message
=
data
[
"messages"
][
last_user_message_idx
]
if
isinstance
(
user_message
[
"content"
],
list
):
# Handle list content input
content_type
=
"list"
query
=
""
for
content_item
in
user_message
[
"content"
]:
if
content_item
[
"type"
]
==
"text"
:
query
=
content_item
[
"text"
]
break
elif
isinstance
(
user_message
[
"content"
],
str
):
# Handle text content input
content_type
=
"text"
query
=
user_message
[
"content"
]
else
:
# Fallback in case the input does not match expected types
content_type
=
None
query
=
""
relevant_contexts
=
[]
for
doc
in
docs
:
context
=
None
try
:
if
doc
[
"type"
]
==
"collection"
:
context
=
query_collection
(
collection_names
=
doc
[
"collection_names"
],
query
=
query
,
k
=
rag_app
.
state
.
TOP_K
,
embedding_function
=
rag_app
.
state
.
sentence_transformer_ef
,
)
else
:
context
=
query_doc
(
collection_name
=
doc
[
"collection_name"
],
query
=
query
,
k
=
rag_app
.
state
.
TOP_K
,
embedding_function
=
rag_app
.
state
.
sentence_transformer_ef
,
)
except
Exception
as
e
:
print
(
e
)
context
=
None
relevant_contexts
.
append
(
context
)
context_string
=
""
for
context
in
relevant_contexts
:
if
context
:
context_string
+=
" "
.
join
(
context
[
"documents"
][
0
])
+
"
\n
"
ra_content
=
rag_template
(
template
=
rag_app
.
state
.
RAG_TEMPLATE
,
context
=
context_string
,
query
=
query
,
)
if
content_type
==
"list"
:
new_content
=
[]
for
content_item
in
user_message
[
"content"
]:
if
content_item
[
"type"
]
==
"text"
:
# Update the text item's content with ra_content
new_content
.
append
({
"type"
:
"text"
,
"text"
:
ra_content
})
else
:
# Keep other types of content as they are
new_content
.
append
(
content_item
)
new_user_message
=
{
**
user_message
,
"content"
:
new_content
}
else
:
new_user_message
=
{
**
user_message
,
"content"
:
ra_content
,
}
data
[
"messages"
][
last_user_message_idx
]
=
new_user_message
del
data
[
"docs"
]
print
(
data
[
"messages"
])
modified_body_bytes
=
json
.
dumps
(
data
).
encode
(
"utf-8"
)
# Create a new request with the modified body
scope
=
request
.
scope
scope
[
"body"
]
=
modified_body_bytes
request
=
Request
(
scope
,
receive
=
lambda
:
self
.
_receive
(
modified_body_bytes
))
response
=
await
call_next
(
request
)
return
response
async
def
_receive
(
self
,
body
:
bytes
):
return
{
"type"
:
"http.request"
,
"body"
:
body
,
"more_body"
:
False
}
app
.
add_middleware
(
RAGMiddleware
)
@
app
.
middleware
(
"http"
)
async
def
check_url
(
request
:
Request
,
call_next
):
start_time
=
int
(
time
.
time
())
...
...
@@ -90,6 +228,39 @@ async def get_app_config():
}
@
app
.
get
(
"/api/config/model/filter"
)
async
def
get_model_filter_config
(
user
=
Depends
(
get_admin_user
)):
return
{
"enabled"
:
app
.
state
.
MODEL_FILTER_ENABLED
,
"models"
:
app
.
state
.
MODEL_FILTER_LIST
,
}
class
ModelFilterConfigForm
(
BaseModel
):
enabled
:
bool
models
:
List
[
str
]
@
app
.
post
(
"/api/config/model/filter"
)
async
def
get_model_filter_config
(
form_data
:
ModelFilterConfigForm
,
user
=
Depends
(
get_admin_user
)
):
app
.
state
.
MODEL_FILTER_ENABLED
=
form_data
.
enabled
app
.
state
.
MODEL_FILTER_LIST
=
form_data
.
models
ollama_app
.
state
.
MODEL_FILTER_ENABLED
=
app
.
state
.
MODEL_FILTER_ENABLED
ollama_app
.
state
.
MODEL_FILTER_LIST
=
app
.
state
.
MODEL_FILTER_LIST
openai_app
.
state
.
MODEL_FILTER_ENABLED
=
app
.
state
.
MODEL_FILTER_ENABLED
openai_app
.
state
.
MODEL_FILTER_LIST
=
app
.
state
.
MODEL_FILTER_LIST
return
{
"enabled"
:
app
.
state
.
MODEL_FILTER_ENABLED
,
"models"
:
app
.
state
.
MODEL_FILTER_LIST
,
}
@
app
.
get
(
"/api/version"
)
async
def
get_app_config
():
...
...
backend/requirements.txt
View file @
b8902072
...
...
@@ -16,7 +16,8 @@ aiohttp
peewee
bcrypt
litellm
litellm==1.30.7
argon2-cffi
apscheduler
google-generativeai
...
...
src/lib/apis/index.ts
View file @
b8902072
...
...
@@ -77,3 +77,65 @@ export const getVersionUpdates = async () => {
return
res
;
};
export
const
getModelFilterConfig
=
async
(
token
:
string
)
=>
{
let
error
=
null
;
const
res
=
await
fetch
(
`
${
WEBUI_BASE_URL
}
/api/config/model/filter`
,
{
method
:
'
GET
'
,
headers
:
{
'
Content-Type
'
:
'
application/json
'
,
Authorization
:
`Bearer
${
token
}
`
}
})
.
then
(
async
(
res
)
=>
{
if
(
!
res
.
ok
)
throw
await
res
.
json
();
return
res
.
json
();
})
.
catch
((
err
)
=>
{
console
.
log
(
err
);
error
=
err
;
return
null
;
});
if
(
error
)
{
throw
error
;
}
return
res
;
};
export
const
updateModelFilterConfig
=
async
(
token
:
string
,
enabled
:
boolean
,
models
:
string
[]
)
=>
{
let
error
=
null
;
const
res
=
await
fetch
(
`
${
WEBUI_BASE_URL
}
/api/config/model/filter`
,
{
method
:
'
POST
'
,
headers
:
{
'
Content-Type
'
:
'
application/json
'
,
Authorization
:
`Bearer
${
token
}
`
},
body
:
JSON
.
stringify
({
enabled
:
enabled
,
models
:
models
})
})
.
then
(
async
(
res
)
=>
{
if
(
!
res
.
ok
)
throw
await
res
.
json
();
return
res
.
json
();
})
.
catch
((
err
)
=>
{
console
.
log
(
err
);
error
=
err
;
return
null
;
});
if
(
error
)
{
throw
error
;
}
return
res
;
};
src/lib/apis/rag/index.ts
View file @
b8902072
...
...
@@ -252,7 +252,7 @@ export const queryCollection = async (
token
:
string
,
collection_names
:
string
,
query
:
string
,
k
:
number
k
:
number
|
null
=
null
)
=>
{
let
error
=
null
;
...
...
src/lib/components/admin/Settings/Users.svelte
View file @
b8902072
<script lang="ts">
import { getModelFilterConfig, updateModelFilterConfig } from '$lib/apis';
import { getSignUpEnabledStatus, toggleSignUpEnabledStatus } from '$lib/apis/auths';
import { getUserPermissions, updateUserPermissions } from '$lib/apis/users';
import { onMount, getContext } from 'svelte';
import { models } from '$lib/stores';
const i18n = getContext('i18n');
export let saveHandler: Function;
let whitelistEnabled = false;
let whitelistModels = [''];
let permissions = {
chat: {
deletion: true
...
...
@@ -15,6 +20,13 @@
onMount(async () => {
permissions = await getUserPermissions(localStorage.token);
const res = await getModelFilterConfig(localStorage.token);
if (res) {
whitelistEnabled = res.enabled;
whitelistModels = res.models.length > 0 ? res.models : [''];
}
});
</script>
...
...
@@ -23,6 +35,8 @@
on:submit|preventDefault={async () => {
// console.log('submit');
await updateUserPermissions(localStorage.token, permissions);
await updateModelFilterConfig(localStorage.token, whitelistEnabled, whitelistModels);
saveHandler();
}}
>
...
...
@@ -71,6 +85,106 @@
</button>
</div>
</div>
<hr class=" dark:border-gray-700 my-2" />
<div class="mt-2 space-y-3 pr-1.5">
<div>
<div class="mb-2">
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">Manage Models</div>
</div>
</div>
<div class=" space-y-3">
<div>
<div class="flex justify-between items-center text-xs">
<div class=" text-xs font-medium">Model Whitelisting</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
whitelistEnabled = !whitelistEnabled;
}}>{whitelistEnabled ? 'On' : 'Off'}</button
>
</div>
</div>
{#if whitelistEnabled}
<div>
<div class=" space-y-1.5">
{#each whitelistModels as modelId, modelIdx}
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={modelId}
placeholder="Select a model"
>
<option value="" disabled selected>Select a model</option>
{#each $models.filter((model) => model.id) as model}
<option value={model.id} class="bg-gray-100 dark:bg-gray-700"
>{model.name}</option
>
{/each}
</select>
</div>
{#if modelIdx === 0}
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-900 dark:text-white rounded-lg transition"
type="button"
on:click={() => {
if (whitelistModels.at(-1) !== '') {
whitelistModels = [...whitelistModels, ''];
}
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</svg>
</button>
{:else}
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-900 dark:text-white rounded-lg transition"
type="button"
on:click={() => {
whitelistModels.splice(modelIdx, 1);
whitelistModels = whitelistModels;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M3.75 7.25a.75.75 0 0 0 0 1.5h8.5a.75.75 0 0 0 0-1.5h-8.5Z" />
</svg>
</button>
{/if}
</div>
{/each}
</div>
<div class="flex justify-end items-center text-xs mt-1.5 text-right">
<div class=" text-xs font-medium">
{whitelistModels.length} Model(s) Whitelisted
</div>
</div>
</div>
{/if}
</div>
</div>
</div>
</div>
<div class="flex justify-end pt-3 text-sm font-medium">
...
...
src/lib/components/chat/MessageInput.svelte
View file @
b8902072
...
...
@@ -364,12 +364,12 @@
{#if dragged}
<div
class="fixed w-full h-full flex z-50 touch-none pointer-events-none"
class="fixed
lg:w-[calc(100%-260px)]
w-full h-full flex z-50 touch-none pointer-events-none"
id="dropzone"
role="region"
aria-label="Drag and Drop Container"
>
<div class="absolute
rounded-xl
w-full h-full backdrop-blur bg-gray-800/40 flex justify-center">
<div class="absolute w-full h-full backdrop-blur bg-gray-800/40 flex justify-center">
<div class="m-auto pt-64 flex flex-col justify-center">
<div class="max-w-md">
<AddFilesPlaceholder />
...
...
src/lib/components/chat/Settings/Account.svelte
View file @
b8902072
...
...
@@ -111,7 +111,9 @@
<button
class="relative rounded-full dark:bg-gray-700"
type="button"
on:click={profileImageInputElement.click}
on:click={() => {
profileImageInputElement.click();
}}
>
<img
src={profileImageUrl !== '' ? profileImageUrl : '/user.png'}
...
...
@@ -271,7 +273,7 @@
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-
6
00 hover:bg-emerald-
7
00 text-gray-100 transition rounded"
class="
px-4 py-2 bg-emerald-
7
00 hover:bg-emerald-
8
00 text-gray-100 transition rounded
-lg
"
on:click={async () => {
const res = await submitHandler();
...
...
src/lib/components/chat/Settings/Audio.svelte
View file @
b8902072
...
...
@@ -259,7 +259,7 @@
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-
6
00 hover:bg-emerald-
7
00 text-gray-100 transition rounded"
class=" px-4 py-2 bg-emerald-
7
00 hover:bg-emerald-
8
00 text-gray-100 transition rounded
-lg
"
type="submit"
>
{$i18n.t('Save')}
...
...
src/lib/components/chat/Settings/Chats.svelte
View file @
b8902072
...
...
@@ -172,7 +172,9 @@
/>
<button
class=" flex rounded-md py-2 px-3.5 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
on:click={chatImportInputElement.click}
on:click={() => {
chatImportInputElement.click();
}}
>
<div class=" self-center mr-3">
<svg
...
...
src/lib/components/chat/Settings/Connections.svelte
View file @
b8902072
...
...
@@ -249,7 +249,7 @@
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-
6
00 hover:bg-emerald-
7
00 text-gray-100 transition rounded"
class="
px-4 py-2 bg-emerald-
7
00 hover:bg-emerald-
8
00 text-gray-100 transition rounded
-lg
"
type="submit"
>
{$i18n.t('Save')}
...
...
src/lib/components/chat/Settings/General.svelte
View file @
b8902072
...
...
@@ -272,7 +272,7 @@
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-
6
00 hover:bg-emerald-
7
00 text-gray-100 transition rounded"
class="
px-4 py-2 bg-emerald-
7
00 hover:bg-emerald-
8
00 text-gray-100 transition rounded
-lg
"
on:click={() => {
saveSettings({
system: system !== '' ? system : undefined,
...
...
src/lib/components/chat/Settings/Images.svelte
View file @
b8902072
...
...
@@ -301,7 +301,7 @@
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-
6
00 hover:bg-emerald-
7
00 text-gray-100 transition rounded flex flex-row space-x-1 items-center {loading
class=" px-4 py-2 bg-emerald-
7
00 hover:bg-emerald-
8
00 text-gray-100 transition rounded
-lg
flex flex-row space-x-1 items-center {loading
? ' cursor-not-allowed'
: ''}"
type="submit"
...
...
src/lib/components/chat/Settings/Interface.svelte
View file @
b8902072
...
...
@@ -65,6 +65,7 @@
}
saveSettings({
titleAutoGenerateModel: titleAutoGenerateModel !== '' ? titleAutoGenerateModel : undefined,
titleGenerationPrompt: titleGenerationPrompt ? titleGenerationPrompt : undefined
});
};
...
...
@@ -192,7 +193,7 @@
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-8
0
0 outline-none"
class="w-full rounded
-lg
py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-8
5
0 outline-none"
bind:value={titleAutoGenerateModel}
placeholder={$i18n.t('Select a model')}
>
...
...
@@ -206,35 +207,13 @@
{/each}
</select>
</div>
<button
class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
on:click={() => {
saveSettings({
titleAutoGenerateModel:
titleAutoGenerateModel !== '' ? titleAutoGenerateModel : undefined
});
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-3.5 h-3.5"
>
<path
fill-rule="evenodd"
d="M13.836 2.477a.75.75 0 0 1 .75.75v3.182a.75.75 0 0 1-.75.75h-3.182a.75.75 0 0 1 0-1.5h1.37l-.84-.841a4.5 4.5 0 0 0-7.08.932.75.75 0 0 1-1.3-.75 6 6 0 0 1 9.44-1.242l.842.84V3.227a.75.75 0 0 1 .75-.75Zm-.911 7.5A.75.75 0 0 1 13.199 11a6 6 0 0 1-9.44 1.241l-.84-.84v1.371a.75.75 0 0 1-1.5 0V9.591a.75.75 0 0 1 .75-.75H5.35a.75.75 0 0 1 0 1.5H3.98l.841.841a4.5 4.5 0 0 0 7.08-.932.75.75 0 0 1 1.025-.273Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
<div class="mt-3">
<div class="mt-3 mr-2">
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('Title Generation Prompt')}</div>
<textarea
bind:value={titleGenerationPrompt}
class="w-full rounded p-4 text-sm dark:text-gray-300 dark:bg-gray-8
0
0 outline-none resize-none"
class="w-full rounded
-lg
p-4 text-sm dark:text-gray-300 dark:bg-gray-8
5
0 outline-none resize-none"
rows="3"
/>
</div>
...
...
@@ -329,7 +308,7 @@
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-
6
00 hover:bg-emerald-
7
00 text-gray-100 transition rounded"
class=" px-4 py-2 bg-emerald-
7
00 hover:bg-emerald-
8
00 text-gray-100 transition rounded
-lg
"
type="submit"
>
{$i18n.t('Save')}
...
...
src/lib/components/chat/Settings/Models.svelte
View file @
b8902072
...
...
@@ -613,7 +613,9 @@
<button
type="button"
class="w-full rounded-lg text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-850"
on:click={modelUploadInputElement.click}
on:click={() => {
modelUploadInputElement.click();
}}
>
{#if modelInputFile && modelInputFile.length > 0}
{modelInputFile[0].name}
...
...
@@ -737,264 +739,193 @@
<div class=" space-y-3">
<div class="mt-2 space-y-3 pr-1.5">
<div>
<div class=" mb-2 text-sm font-medium">{$i18n.t('Manage LiteLLM Models')}</div>
<div>
<div class="mb-2">
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">{$i18n.t('
Add a m
odel')}</div>
<div class=" text-sm font-medium">{$i18n.t('
Manage LiteLLM M
odel
s
')}</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showLiteLLMParams = !showLiteLLMParams;
}}
>{showLiteLLMParams
? $i18n.t('Hide Additional Params')
: $i18n.t('Show Additional Params')}</button
showLiteLLM = !showLiteLLM;
}}>{showLiteLLM ? $i18n.t('Hide') : $i18n.t('Show')}</button
>
</div>
</div>
<div class="my-2 space-y-2">
<div class="flex w-full mb-1.5">
<div class="flex-1 mr-2">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM Model (litellm_params.model)"
bind:value={liteLLMModel}
autocomplete="off"
/>
{#if showLiteLLM}
<div>
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">Add a model</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showLiteLLMParams = !showLiteLLMParams;
}}
>{showLiteLLMParams ? $i18n.t('Hide Additional Params') : $i18n.t('Show Additional Params')}</button
>
</div>
</div>
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
on:click={() => {
addLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
<div class="my-2 space-y-2">
<div class="flex w-full mb-1.5">
<div class="flex-1 mr-2">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM Model (litellm_params.model)"
bind:value={liteLLMModel}
autocomplete="off"
/>
</svg>
</button>
</div>
</div>
{#if showLiteLLMParams}
<div>
<div class=" mb-1.5 text-sm font-medium">{$i18n.t('Model Name')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter Model Name (model_name)"
bind:value={liteLLMModelName}
autocomplete="off"
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
on:click={() => {
addLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</
div
>
</
div
>
</
svg
>
</
button
>
</div>
<div>
<div class=" mb-1.5 text-sm font-medium">{$i18n.t('API Base URL')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API Base URL (litellm_params.api_base)"
bind:value={liteLLMAPIBase}
autocomplete="off"
/>
{#if showLiteLLMParams}
<div>
<div class=" mb-1.5 text-sm font-medium">{$i18n.t('Model Name')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter Model Name (model_name)"
bind:value={liteLLMModelName}
autocomplete="off"
/>
</div>
</div>
</div>
</div>
<div>
<div class=" mb-1.5 text-sm font-medium">{$i18n.t('API Key')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API Key (litellm_params.api_key)"
bind:value={liteLLMAPIKey}
autocomplete="off"
/>
<div>
<div class=" mb-1.5 text-sm font-medium">API Base URL</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API Base URL (litellm_params.api_base)"
bind:value={liteLLMAPIBase}
autocomplete="off"
/>
</div>
</div>
</div>
</div>
<div>
<div class="mb-1.5 text-sm font-medium">{$i18n.t('API RPM')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API RPM (litellm_params.rpm)"
bind:value={liteLLMRPM}
autocomplete="off"
/>
<div>
<div class=" mb-1.5 text-sm font-medium">API Key</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API Key (litellm_params.api_key)"
bind:value={liteLLMAPIKey}
autocomplete="off"
/>
</div>
</div>
</div>
</div>
<div>
<div class="mb-1.5 text-sm font-medium">Max Tokens</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter Max Tokens (litellm_params.max_tokens)"
bind:value={liteLLMMaxTokens}
type="number"
min="1"
autocomplete="off"
/>
<div>
<div class="mb-1.5 text-sm font-medium">API RPM</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API RPM (litellm_params.rpm)"
bind:value={liteLLMRPM}
autocomplete="off"
/>
</div>
</div>
</div>
</div>
{/if}
</div>
<div class="mb-2 text-xs text-gray-400 dark:text-gray-500">
{$i18n.t('Not sure what to add?')}
<a
class=" text-gray-300 font-medium underline"
href="https://litellm.vercel.app/docs/proxy/configs#quick-start"
target="_blank"
>
{$i18n.t('Click here for help')}
</a>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('Delete a model')}</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={deleteLiteLLMModelId}
placeholder={$i18n.t('Select a model')}
>
{#if !deleteLiteLLMModelId}
<option value="" disabled selected>{$i18n.t('Select a model')}</option>
{/if}
{#each liteLLMModelInfo as model}
<option value={model.model_info.id} class="bg-gray-100 dark:bg-gray-700"
>{model.model_name}</option
>
{/each}
</select>
</div>
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
on:click={() => {
deleteLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M5 3.25V4H2.75a.75.75 0 0 0 0 1.5h.3l.815 8.15A1.5 1.5 0 0 0 5.357 15h5.285a1.5 1.5 0 0 0 1.493-1.35l.815-8.15h.3a.75.75 0 0 0 0-1.5H11v-.75A2.25 2.25 0 0 0 8.75 1h-1.5A2.25 2.25 0 0 0 5 3.25Zm2.25-.75a.75.75 0 0 0-.75.75V4h3v-.75a.75.75 0 0 0-.75-.75h-1.5ZM6.05 6a.75.75 0 0 1 .787.713l.275 5.5a.75.75 0 0 1-1.498.075l-.275-5.5A.75.75 0 0 1 6.05 6Zm3.9 0a.75.75 0 0 1 .712.787l-.275 5.5a.75.75 0 0 1-1.498-.075l.275-5.5a.75.75 0 0 1 .786-.711Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
</div>
</div>
</div>
<!-- <div class="mt-2 space-y-3 pr-1.5">
<div>
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('Add LiteLLM Model')}</div>
<div class="flex w-full mb-2">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM Model (e.g. ollama/mistral)"
bind:value={liteLLMModel}
autocomplete="off"
/>
</div>
</div>
<div class="flex justify-between items-center text-sm">
<div class=" font-medium">{$i18n.t('Advanced Model Params')}</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showLiteLLMParams = !showLiteLLMParams;
}}>{showLiteLLMParams ? 'Hide' : 'Show'}</button
>
</div>
{#if showLiteLLMParams}
<div>
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('LiteLLM API Key')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM API Key (e.g. os.environ/AZURE_API_KEY_CA)"
bind:value={liteLLMAPIKey}
autocomplete="off"
/>
<div>
<div class="mb-1.5 text-sm font-medium">Max Tokens</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter Max Tokens (litellm_params.max_tokens)"
bind:value={liteLLMMaxTokens}
type="number"
min="1"
autocomplete="off"
/>
</div>
</div>
</div>
</div>
{/if}
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('LiteLLM API Base URL')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM API Base URL"
bind:value={liteLLMAPIBase}
autocomplete="off"
/>
</div>
</div>
<div class="mb-2 text-xs text-gray-400 dark:text-gray-500">
Not sure what to add?
<a
class=" text-gray-300 font-medium underline"
href="https://litellm.vercel.app/docs/proxy/configs#quick-start"
target="_blank"
>
Click here for help.
</a>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">
{$i18n.t('LiteLLM API RPM')}
</div>
<div class=" mb-2.5 text-sm font-medium">
Delete a model
</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM API RPM"
bind:value={liteLLMRPM}
autocomplete="off"
/>
<div class="flex-1 mr-2">
<select
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={deleteLiteLLMModelId}
placeholder="Select a model"
>
{#if !deleteLiteLLMModelId}
<option value="" disabled selected>Select a model</option>
{/if}
{#each liteLLMModelInfo as model}
<option value={model.model_info.id} class="bg-gray-100 dark:bg-gray-700"
>{model.model_name}</option
>
{/each}
</select>
</div>
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
on:click={() => {
deleteLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M5 3.25V4H2.75a.75.75 0 0 0 0 1.5h.3l.815 8.15A1.5 1.5 0 0 0 5.357 15h5.285a1.5 1.5 0 0 0 1.493-1.35l.815-8.15h.3a.75.75 0 0 0 0-1.5H11v-.75A2.25 2.25 0 0 0 8.75 1h-1.5A2.25 2.25 0 0 0 5 3.25Zm2.25-.75a.75.75 0 0 0-.75.75V4h3v-.75a.75.75 0 0 0-.75-.75h-1.5ZM6.05 6a.75.75 0 0 1 .787.713l.275 5.5a.75.75 0 0 1-1.498.075l-.275-5.5A.75.75 0 0 1 6.05 6Zm3.9 0a.75.75 0 0 1 .712.787l-.275 5.5a.75.75 0 0 1-1.498-.075l.275-5.5a.75.75 0 0 1 .786-.711Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
</div>
{/if}
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
Not sure what to add?
<a
class=" text-gray-300 font-medium underline"
href="https://litellm.vercel.app/docs/proxy/configs#quick-start"
target="_blank"
>
Click here for help.
</a>
</div>
</div>
</div>
-->
</div>
</div>
</div>
</div>
</div>
\ No newline at end of file
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment