Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
635951b5
Unverified
Commit
635951b5
authored
May 06, 2024
by
Timothy Jaeryang Baek
Committed by
GitHub
May 06, 2024
Browse files
Merge branch 'dev' into feat/backend-web-search
parents
8b3e370a
bf604bc0
Changes
56
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
1063 additions
and
877 deletions
+1063
-877
.github/workflows/integration-test.yml
.github/workflows/integration-test.yml
+10
-1
backend/apps/rag/main.py
backend/apps/rag/main.py
+37
-10
backend/apps/rag/utils.py
backend/apps/rag/utils.py
+27
-22
backend/config.py
backend/config.py
+20
-11
backend/main.py
backend/main.py
+36
-3
src/app.css
src/app.css
+9
-0
src/lib/apis/ollama/index.ts
src/lib/apis/ollama/index.ts
+5
-1
src/lib/apis/openai/index.ts
src/lib/apis/openai/index.ts
+3
-1
src/lib/apis/rag/index.ts
src/lib/apis/rag/index.ts
+3
-2
src/lib/apis/streaming/index.ts
src/lib/apis/streaming/index.ts
+11
-0
src/lib/components/chat/Messages/CitationsModal.svelte
src/lib/components/chat/Messages/CitationsModal.svelte
+77
-0
src/lib/components/chat/Messages/ResponseMessage.svelte
src/lib/components/chat/Messages/ResponseMessage.svelte
+473
-420
src/lib/components/chat/ModelSelector.svelte
src/lib/components/chat/ModelSelector.svelte
+1
-1
src/lib/components/chat/ModelSelector/Selector.svelte
src/lib/components/chat/ModelSelector/Selector.svelte
+2
-2
src/lib/components/chat/Settings/Account.svelte
src/lib/components/chat/Settings/Account.svelte
+2
-2
src/lib/components/chat/Settings/Connections.svelte
src/lib/components/chat/Settings/Connections.svelte
+1
-1
src/lib/components/chat/ShareChatModal.svelte
src/lib/components/chat/ShareChatModal.svelte
+10
-6
src/lib/components/common/ImagePreview.svelte
src/lib/components/common/ImagePreview.svelte
+1
-1
src/lib/components/documents/Settings/ChunkParams.svelte
src/lib/components/documents/Settings/ChunkParams.svelte
+126
-0
src/lib/components/documents/Settings/General.svelte
src/lib/components/documents/Settings/General.svelte
+209
-393
No files found.
.github/workflows/integration-test.yml
View file @
635951b5
...
...
@@ -20,7 +20,16 @@ jobs:
-
name
:
Build and run Compose Stack
run
:
|
docker compose up --detach --build
docker compose --file docker-compose.yaml --file docker-compose.api.yaml up --detach --build
-
name
:
Wait for Ollama to be up
timeout-minutes
:
5
run
:
|
until curl --output /dev/null --silent --fail http://localhost:11434; do
printf '.'
sleep 1
done
echo "Service is up!"
-
name
:
Preload Ollama model
run
:
|
...
...
backend/apps/rag/main.py
View file @
635951b5
...
...
@@ -80,6 +80,7 @@ from config import (
RAG_EMBEDDING_MODEL_AUTO_UPDATE
,
RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE
,
ENABLE_RAG_HYBRID_SEARCH
,
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
,
RAG_RERANKING_MODEL
,
PDF_EXTRACT_IMAGES
,
RAG_RERANKING_MODEL_AUTO_UPDATE
,
...
...
@@ -91,7 +92,7 @@ from config import (
CHUNK_SIZE
,
CHUNK_OVERLAP
,
RAG_TEMPLATE
,
ENABLE_LOCAL_WEB_FETCH
,
ENABLE_
RAG_
LOCAL_WEB_FETCH
,
)
from
constants
import
ERROR_MESSAGES
...
...
@@ -105,6 +106,9 @@ app.state.TOP_K = RAG_TOP_K
app
.
state
.
RELEVANCE_THRESHOLD
=
RAG_RELEVANCE_THRESHOLD
app
.
state
.
ENABLE_RAG_HYBRID_SEARCH
=
ENABLE_RAG_HYBRID_SEARCH
app
.
state
.
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
=
(
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
)
app
.
state
.
CHUNK_SIZE
=
CHUNK_SIZE
app
.
state
.
CHUNK_OVERLAP
=
CHUNK_OVERLAP
...
...
@@ -114,6 +118,7 @@ app.state.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL
app
.
state
.
RAG_RERANKING_MODEL
=
RAG_RERANKING_MODEL
app
.
state
.
RAG_TEMPLATE
=
RAG_TEMPLATE
app
.
state
.
OPENAI_API_BASE_URL
=
RAG_OPENAI_API_BASE_URL
app
.
state
.
OPENAI_API_KEY
=
RAG_OPENAI_API_KEY
...
...
@@ -313,6 +318,7 @@ async def get_rag_config(user=Depends(get_admin_user)):
"chunk_size"
:
app
.
state
.
CHUNK_SIZE
,
"chunk_overlap"
:
app
.
state
.
CHUNK_OVERLAP
,
},
"web_loader_ssl_verification"
:
app
.
state
.
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
,
}
...
...
@@ -322,15 +328,34 @@ class ChunkParamUpdateForm(BaseModel):
class
ConfigUpdateForm
(
BaseModel
):
pdf_extract_images
:
bool
chunk
:
ChunkParamUpdateForm
pdf_extract_images
:
Optional
[
bool
]
=
None
chunk
:
Optional
[
ChunkParamUpdateForm
]
=
None
web_loader_ssl_verification
:
Optional
[
bool
]
=
None
@
app
.
post
(
"/config/update"
)
async
def
update_rag_config
(
form_data
:
ConfigUpdateForm
,
user
=
Depends
(
get_admin_user
)):
app
.
state
.
PDF_EXTRACT_IMAGES
=
form_data
.
pdf_extract_images
app
.
state
.
CHUNK_SIZE
=
form_data
.
chunk
.
chunk_size
app
.
state
.
CHUNK_OVERLAP
=
form_data
.
chunk
.
chunk_overlap
app
.
state
.
PDF_EXTRACT_IMAGES
=
(
form_data
.
pdf_extract_images
if
form_data
.
pdf_extract_images
!=
None
else
app
.
state
.
PDF_EXTRACT_IMAGES
)
app
.
state
.
CHUNK_SIZE
=
(
form_data
.
chunk
.
chunk_size
if
form_data
.
chunk
!=
None
else
app
.
state
.
CHUNK_SIZE
)
app
.
state
.
CHUNK_OVERLAP
=
(
form_data
.
chunk
.
chunk_overlap
if
form_data
.
chunk
!=
None
else
app
.
state
.
CHUNK_OVERLAP
)
app
.
state
.
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
=
(
form_data
.
web_loader_ssl_verification
if
form_data
.
web_loader_ssl_verification
!=
None
else
app
.
state
.
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
)
return
{
"status"
:
True
,
...
...
@@ -339,6 +364,7 @@ async def update_rag_config(form_data: ConfigUpdateForm, user=Depends(get_admin_
"chunk_size"
:
app
.
state
.
CHUNK_SIZE
,
"chunk_overlap"
:
app
.
state
.
CHUNK_OVERLAP
,
},
"web_loader_ssl_verification"
:
app
.
state
.
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
,
}
...
...
@@ -490,7 +516,9 @@ def store_youtube_video(form_data: UrlForm, user=Depends(get_current_user)):
def
store_web
(
form_data
:
UrlForm
,
user
=
Depends
(
get_current_user
)):
# "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm"
try
:
loader
=
get_web_loader
(
form_data
.
url
)
loader
=
get_web_loader
(
form_data
.
url
,
verify_ssl
=
app
.
state
.
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
)
data
=
loader
.
load
()
collection_name
=
form_data
.
collection_name
...
...
@@ -510,12 +538,11 @@ def store_web(form_data: UrlForm, user=Depends(get_current_user)):
detail
=
ERROR_MESSAGES
.
DEFAULT
(
e
),
)
def
get_web_loader
(
url
:
Union
[
str
,
Sequence
[
str
]]):
def
get_web_loader
(
url
:
Union
[
str
,
Sequence
[
str
]],
verify_ssl
:
bool
=
True
):
# Check if the URL is valid
if
not
validate_url
(
url
):
raise
ValueError
(
ERROR_MESSAGES
.
INVALID_URL
)
return
WebBaseLoader
(
url
)
return
WebBaseLoader
(
url
,
verify_ssl
=
verify_ssl
)
def
validate_url
(
url
:
Union
[
str
,
Sequence
[
str
]]):
...
...
backend/apps/rag/utils.py
View file @
635951b5
...
...
@@ -287,14 +287,14 @@ def rag_messages(
for
doc
in
docs
:
context
=
None
collection
=
doc
.
get
(
"collection_name"
)
if
collection
:
collection
=
[
collection
]
else
:
collection
=
doc
.
get
(
"collection_names"
,
[]
)
collection
_names
=
(
doc
[
"
collection
_names"
]
if
doc
[
"type"
]
=
=
"
collection
"
else
[
doc
[
"collection_name"
]]
)
collection
=
set
(
collection
).
difference
(
extracted_collections
)
if
not
collection
:
collection
_names
=
set
(
collection
_names
).
difference
(
extracted_collections
)
if
not
collection
_names
:
log
.
debug
(
f
"skipping
{
doc
}
as it has already been extracted"
)
continue
...
...
@@ -304,11 +304,7 @@ def rag_messages(
else
:
if
hybrid_search
:
context
=
query_collection_with_hybrid_search
(
collection_names
=
(
doc
[
"collection_names"
]
if
doc
[
"type"
]
==
"collection"
else
[
doc
[
"collection_name"
]]
),
collection_names
=
collection_names
,
query
=
query
,
embedding_function
=
embedding_function
,
k
=
k
,
...
...
@@ -317,11 +313,7 @@ def rag_messages(
)
else
:
context
=
query_collection
(
collection_names
=
(
doc
[
"collection_names"
]
if
doc
[
"type"
]
==
"collection"
else
[
doc
[
"collection_name"
]]
),
collection_names
=
collection_names
,
query
=
query
,
embedding_function
=
embedding_function
,
k
=
k
,
...
...
@@ -331,18 +323,31 @@ def rag_messages(
context
=
None
if
context
:
relevant_contexts
.
append
(
context
)
relevant_contexts
.
append
(
{
**
context
,
"source"
:
doc
}
)
extracted_collections
.
extend
(
collection
)
extracted_collections
.
extend
(
collection
_names
)
context_string
=
""
citations
=
[]
for
context
in
relevant_contexts
:
try
:
if
"documents"
in
context
:
items
=
[
item
for
item
in
context
[
"documents"
][
0
]
if
item
is
not
None
]
context_string
+=
"
\n\n
"
.
join
(
items
)
context_string
+=
"
\n\n
"
.
join
(
[
text
for
text
in
context
[
"documents"
][
0
]
if
text
is
not
None
]
)
if
"metadatas"
in
context
:
citations
.
append
(
{
"source"
:
context
[
"source"
],
"document"
:
context
[
"documents"
][
0
],
"metadata"
:
context
[
"metadatas"
][
0
],
}
)
except
Exception
as
e
:
log
.
exception
(
e
)
context_string
=
context_string
.
strip
()
ra_content
=
rag_template
(
...
...
@@ -371,7 +376,7 @@ def rag_messages(
messages
[
last_user_message_idx
]
=
new_user_message
return
messages
return
messages
,
citations
def
get_model_path
(
model
:
str
,
update_model
:
bool
=
False
):
...
...
backend/config.py
View file @
635951b5
...
...
@@ -18,6 +18,18 @@ from secrets import token_bytes
from
constants
import
ERROR_MESSAGES
####################################
# Load .env file
####################################
try
:
from
dotenv
import
load_dotenv
,
find_dotenv
load_dotenv
(
find_dotenv
(
"../.env"
))
except
ImportError
:
print
(
"dotenv not installed, skipping..."
)
####################################
# LOGGING
####################################
...
...
@@ -59,16 +71,6 @@ for source in log_sources:
log
.
setLevel
(
SRC_LOG_LEVELS
[
"CONFIG"
])
####################################
# Load .env file
####################################
try
:
from
dotenv
import
load_dotenv
,
find_dotenv
load_dotenv
(
find_dotenv
(
"../.env"
))
except
ImportError
:
log
.
warning
(
"dotenv not installed, skipping..."
)
WEBUI_NAME
=
os
.
environ
.
get
(
"WEBUI_NAME"
,
"Open WebUI"
)
if
WEBUI_NAME
!=
"Open WebUI"
:
...
...
@@ -454,6 +456,11 @@ ENABLE_RAG_HYBRID_SEARCH = (
os
.
environ
.
get
(
"ENABLE_RAG_HYBRID_SEARCH"
,
""
).
lower
()
==
"true"
)
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
=
(
os
.
environ
.
get
(
"ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION"
,
"True"
).
lower
()
==
"true"
)
RAG_EMBEDDING_ENGINE
=
os
.
environ
.
get
(
"RAG_EMBEDDING_ENGINE"
,
""
)
PDF_EXTRACT_IMAGES
=
os
.
environ
.
get
(
"PDF_EXTRACT_IMAGES"
,
"False"
).
lower
()
==
"true"
...
...
@@ -531,7 +538,9 @@ RAG_TEMPLATE = os.environ.get("RAG_TEMPLATE", DEFAULT_RAG_TEMPLATE)
RAG_OPENAI_API_BASE_URL
=
os
.
getenv
(
"RAG_OPENAI_API_BASE_URL"
,
OPENAI_API_BASE_URL
)
RAG_OPENAI_API_KEY
=
os
.
getenv
(
"RAG_OPENAI_API_KEY"
,
OPENAI_API_KEY
)
ENABLE_LOCAL_WEB_FETCH
=
os
.
getenv
(
"ENABLE_LOCAL_WEB_FETCH"
,
"False"
).
lower
()
==
"true"
ENABLE_RAG_LOCAL_WEB_FETCH
=
(
os
.
getenv
(
"ENABLE_RAG_LOCAL_WEB_FETCH"
,
"False"
).
lower
()
==
"true"
)
SEARXNG_QUERY_URL
=
os
.
getenv
(
"SEARXNG_QUERY_URL"
,
""
)
GOOGLE_PSE_API_KEY
=
os
.
getenv
(
"GOOGLE_PSE_API_KEY"
,
""
)
...
...
backend/main.py
View file @
635951b5
...
...
@@ -15,7 +15,7 @@ from fastapi.middleware.wsgi import WSGIMiddleware
from
fastapi.middleware.cors
import
CORSMiddleware
from
starlette.exceptions
import
HTTPException
as
StarletteHTTPException
from
starlette.middleware.base
import
BaseHTTPMiddleware
from
starlette.responses
import
StreamingResponse
from
apps.ollama.main
import
app
as
ollama_app
from
apps.openai.main
import
app
as
openai_app
...
...
@@ -102,6 +102,8 @@ origins = ["*"]
class
RAGMiddleware
(
BaseHTTPMiddleware
):
async
def
dispatch
(
self
,
request
:
Request
,
call_next
):
return_citations
=
False
if
request
.
method
==
"POST"
and
(
"/api/chat"
in
request
.
url
.
path
or
"/chat/completions"
in
request
.
url
.
path
):
...
...
@@ -114,11 +116,15 @@ class RAGMiddleware(BaseHTTPMiddleware):
# Parse string to JSON
data
=
json
.
loads
(
body_str
)
if
body_str
else
{}
return_citations
=
data
.
get
(
"citations"
,
False
)
if
"citations"
in
data
:
del
data
[
"citations"
]
# Example: Add a new key-value pair or modify existing ones
# data["modified"] = True # Example modification
if
"docs"
in
data
:
data
=
{
**
data
}
data
[
"messages"
]
=
rag_messages
(
data
[
"messages"
]
,
citations
=
rag_messages
(
docs
=
data
[
"docs"
],
messages
=
data
[
"messages"
],
template
=
rag_app
.
state
.
RAG_TEMPLATE
,
...
...
@@ -130,7 +136,9 @@ class RAGMiddleware(BaseHTTPMiddleware):
)
del
data
[
"docs"
]
log
.
debug
(
f
"data['messages']:
{
data
[
'messages'
]
}
"
)
log
.
debug
(
f
"data['messages']:
{
data
[
'messages'
]
}
, citations:
{
citations
}
"
)
modified_body_bytes
=
json
.
dumps
(
data
).
encode
(
"utf-8"
)
...
...
@@ -148,11 +156,36 @@ class RAGMiddleware(BaseHTTPMiddleware):
]
response
=
await
call_next
(
request
)
if
return_citations
:
# Inject the citations into the response
if
isinstance
(
response
,
StreamingResponse
):
# If it's a streaming response, inject it as SSE event or NDJSON line
content_type
=
response
.
headers
.
get
(
"Content-Type"
)
if
"text/event-stream"
in
content_type
:
return
StreamingResponse
(
self
.
openai_stream_wrapper
(
response
.
body_iterator
,
citations
),
)
if
"application/x-ndjson"
in
content_type
:
return
StreamingResponse
(
self
.
ollama_stream_wrapper
(
response
.
body_iterator
,
citations
),
)
return
response
async
def
_receive
(
self
,
body
:
bytes
):
return
{
"type"
:
"http.request"
,
"body"
:
body
,
"more_body"
:
False
}
async
def
openai_stream_wrapper
(
self
,
original_generator
,
citations
):
yield
f
"data:
{
json
.
dumps
(
{
'citations'
:
citations
}
)
}
\n\n
"
async
for
data
in
original_generator
:
yield
data
async
def
ollama_stream_wrapper
(
self
,
original_generator
,
citations
):
yield
f
"
{
json
.
dumps
(
{
'citations'
:
citations
}
)
}
\n
"
async
for
data
in
original_generator
:
yield
data
app
.
add_middleware
(
RAGMiddleware
)
...
...
src/app.css
View file @
635951b5
...
...
@@ -82,3 +82,12 @@ select {
.katex-mathml
{
display
:
none
;
}
.scrollbar-none
:active::-webkit-scrollbar-thumb
,
.scrollbar-none
:focus::-webkit-scrollbar-thumb
,
.scrollbar-none
:hover::-webkit-scrollbar-thumb
{
visibility
:
visible
;
}
.scrollbar-none
::-webkit-scrollbar-thumb
{
visibility
:
hidden
;
}
src/lib/apis/ollama/index.ts
View file @
635951b5
...
...
@@ -159,7 +159,11 @@ export const generateTitle = async (
body
:
JSON
.
stringify
({
model
:
model
,
prompt
:
template
,
stream
:
false
stream
:
false
,
options
:
{
// Restrict the number of tokens generated to 50
num_predict
:
50
}
})
})
.
then
(
async
(
res
)
=>
{
...
...
src/lib/apis/openai/index.ts
View file @
635951b5
...
...
@@ -295,7 +295,9 @@ export const generateTitle = async (
content
:
template
}
],
stream
:
false
stream
:
false
,
// Restricting the max tokens to 50 to avoid long titles
max_tokens
:
50
})
})
.
then
(
async
(
res
)
=>
{
...
...
src/lib/apis/rag/index.ts
View file @
635951b5
...
...
@@ -33,8 +33,9 @@ type ChunkConfigForm = {
};
type
RAGConfigForm
=
{
pdf_extract_images
:
boolean
;
chunk
:
ChunkConfigForm
;
pdf_extract_images
?:
boolean
;
chunk
?:
ChunkConfigForm
;
web_loader_ssl_verification
?:
boolean
;
};
export
const
updateRAGConfig
=
async
(
token
:
string
,
payload
:
RAGConfigForm
)
=>
{
...
...
src/lib/apis/streaming/index.ts
View file @
635951b5
...
...
@@ -4,6 +4,8 @@ import type { ParsedEvent } from 'eventsource-parser';
type
TextStreamUpdate
=
{
done
:
boolean
;
value
:
string
;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
citations
?:
any
;
};
// createOpenAITextStream takes a responseBody with a SSE response,
...
...
@@ -45,6 +47,11 @@ async function* openAIStreamToIterator(
const
parsedData
=
JSON
.
parse
(
data
);
console
.
log
(
parsedData
);
if
(
parsedData
.
citations
)
{
yield
{
done
:
false
,
value
:
''
,
citations
:
parsedData
.
citations
};
continue
;
}
yield
{
done
:
false
,
value
:
parsedData
.
choices
?.[
0
]?.
delta
?.
content
??
''
};
}
catch
(
e
)
{
console
.
error
(
'
Error extracting delta from SSE event:
'
,
e
);
...
...
@@ -62,6 +69,10 @@ async function* streamLargeDeltasAsRandomChunks(
yield
textStreamUpdate
;
return
;
}
if
(
textStreamUpdate
.
citations
)
{
yield
textStreamUpdate
;
continue
;
}
let
content
=
textStreamUpdate
.
value
;
if
(
content
.
length
<
5
)
{
yield
{
done
:
false
,
value
:
content
};
...
...
src/lib/components/chat/Messages/CitationsModal.svelte
0 → 100644
View file @
635951b5
<script lang="ts">
import { getContext, onMount, tick } from 'svelte';
import Modal from '$lib/components/common/Modal.svelte';
const i18n = getContext('i18n');
export let show = false;
export let citation;
let mergedDocuments = [];
$: if (citation) {
mergedDocuments = citation.document?.map((c, i) => {
return {
source: citation.source,
document: c,
metadata: citation.metadata?.[i]
};
});
}
</script>
<Modal size="lg" bind:show>
<div>
<div class=" flex justify-between dark:text-gray-300 px-5 pt-4 pb-2">
<div class=" text-lg font-medium self-center capitalize">
{$i18n.t('Citation')}
</div>
<button
class="self-center"
on:click={() => {
show = false;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-5 h-5"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
<div class="flex flex-col md:flex-row w-full px-6 pb-5 md:space-x-4">
<div
class="flex flex-col w-full dark:text-gray-200 overflow-y-scroll max-h-[22rem] scrollbar-none"
>
{#each mergedDocuments as document, documentIdx}
<div class="flex flex-col w-full">
<div class="text-sm font-medium dark:text-gray-300">
{$i18n.t('Source')}
</div>
<div class="text-sm dark:text-gray-400">
{document.source?.name ?? $i18n.t('No source available')}
</div>
</div>
<div class="flex flex-col w-full">
<div class=" text-sm font-medium dark:text-gray-300">
{$i18n.t('Content')}
</div>
<pre class="text-sm dark:text-gray-400 whitespace-pre-line">
{document.document}
</pre>
</div>
{#if documentIdx !== mergedDocuments.length - 1}
<hr class=" dark:border-gray-850 my-3" />
{/if}
{/each}
</div>
</div>
</div>
</Modal>
src/lib/components/chat/Messages/ResponseMessage.svelte
View file @
635951b5
This diff is collapsed.
Click to expand it.
src/lib/components/chat/ModelSelector.svelte
View file @
635951b5
...
...
@@ -82,7 +82,7 @@
</div>
{:else}
<div class=" self-center disabled:text-gray-600 disabled:hover:text-gray-600 mr-2">
<Tooltip content=
"
Remove Model
"
>
<Tooltip content=
{$i18n.t('
Remove Model
')}
>
<button
{disabled}
on:click={() => {
...
...
src/lib/components/chat/ModelSelector/Selector.svelte
View file @
635951b5
...
...
@@ -305,7 +305,7 @@
{:else}
<div>
<div class="block px-3 py-2 text-sm text-gray-700 dark:text-gray-100">
No results found
{$i18n.t('
No results found
')}
</div>
</div>
{/each}
...
...
@@ -317,7 +317,7 @@
pullModelHandler();
}}
>
Pull "{searchValue}" from Ollama.com
{$i18n.t(`
Pull "{
{
searchValue}
}
" from Ollama.com
`, { searchValue: searchValue })}
</button>
{/if}
...
...
src/lib/components/chat/Settings/Account.svelte
View file @
635951b5
...
...
@@ -447,7 +447,7 @@
{/if}
</button>
<Tooltip content=
"
Create new key
"
>
<Tooltip content=
{$i18n.t('
Create new key
')}
>
<button
class=" px-1.5 py-1 dark:hover:bg-gray-850transition rounded-lg"
on:click={() => {
...
...
@@ -479,7 +479,7 @@
>
<Plus strokeWidth="2" className=" size-3.5" />
Create new secret key</button
{$i18n.t('
Create new secret key
')}
</button
>
{/if}
</div>
...
...
src/lib/components/chat/Settings/Connections.svelte
View file @
635951b5
...
...
@@ -164,7 +164,7 @@
<div class="flex gap-1.5">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder=
"
Enter URL (e.g. http://localhost:11434)
"
placeholder=
{$i18n.t('
Enter URL (e.g. http://localhost:11434)
')}
bind:value={url}
/>
...
...
src/lib/components/chat/ShareChatModal.svelte
View file @
635951b5
...
...
@@ -97,9 +97,10 @@
<div class=" text-sm dark:text-gray-300 mb-1">
{#if chat.share_id}
<a href="/s/{chat.share_id}" target="_blank"
>You have shared this chat <span class=" underline">before</span>.</a
>{$i18n.t('You have shared this chat')}
<span class=" underline">{$i18n.t('before')}</span>.</a
>
Click here to
{$i18n.t('
Click here to
')}
<button
class="underline"
on:click={async () => {
...
...
@@ -108,11 +109,14 @@
if (res) {
chat = await getChatById(localStorage.token, chatId);
}
}}>delete this link</button
> and create a new shared link.
}}
>{$i18n.t('delete this link')}
</button>
{$i18n.t('and create a new shared link.')}
{:else}
Messages you send after creating your link won't be shared. Users with the URL will be
able to view the shared chat.
{$i18n.t(
"Messages you send after creating your link won't be shared. Users with the URL will beable to view the shared chat."
)}
{/if}
</div>
...
...
src/lib/components/common/ImagePreview.svelte
View file @
635951b5
...
...
@@ -51,7 +51,7 @@
<button
class=" p-5"
on:click={() => {
downloadImage(src,
'Image.png'
);
downloadImage(src,
src.substring(src.lastIndexOf('/') + 1)
);
}}
>
<svg
...
...
src/lib/components/documents/Settings/ChunkParams.svelte
0 → 100644
View file @
635951b5
<script lang="ts">
import { getDocs } from '$lib/apis/documents';
import {
getRAGConfig,
updateRAGConfig,
getQuerySettings,
scanDocs,
updateQuerySettings,
resetVectorDB,
getEmbeddingConfig,
updateEmbeddingConfig,
getRerankingConfig,
updateRerankingConfig
} from '$lib/apis/rag';
import { documents, models } from '$lib/stores';
import { onMount, getContext } from 'svelte';
import { toast } from 'svelte-sonner';
import Tooltip from '$lib/components/common/Tooltip.svelte';
const i18n = getContext('i18n');
export let saveHandler: Function;
let scanDirLoading = false;
let updateEmbeddingModelLoading = false;
let updateRerankingModelLoading = false;
let showResetConfirm = false;
let chunkSize = 0;
let chunkOverlap = 0;
let pdfExtractImages = true;
const submitHandler = async () => {
const res = await updateRAGConfig(localStorage.token, {
pdf_extract_images: pdfExtractImages,
chunk: {
chunk_overlap: chunkOverlap,
chunk_size: chunkSize
}
});
};
onMount(async () => {
const res = await getRAGConfig(localStorage.token);
if (res) {
pdfExtractImages = res.pdf_extract_images;
chunkSize = res.chunk.chunk_size;
chunkOverlap = res.chunk.chunk_overlap;
}
});
</script>
<form
class="flex flex-col h-full justify-between space-y-3 text-sm"
on:submit|preventDefault={() => {
submitHandler();
saveHandler();
}}
>
<div class=" space-y-3 pr-1.5 overflow-y-scroll h-full max-h-[22rem]">
<div class=" ">
<div class=" text-sm font-medium">{$i18n.t('Chunk Params')}</div>
<div class=" flex">
<div class=" flex w-full justify-between">
<div class="self-center text-xs font-medium min-w-fit">{$i18n.t('Chunk Size')}</div>
<div class="self-center p-3">
<input
class=" w-full rounded-lg py-1.5 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
type="number"
placeholder={$i18n.t('Enter Chunk Size')}
bind:value={chunkSize}
autocomplete="off"
min="0"
/>
</div>
</div>
<div class="flex w-full">
<div class=" self-center text-xs font-medium min-w-fit">
{$i18n.t('Chunk Overlap')}
</div>
<div class="self-center p-3">
<input
class="w-full rounded-lg py-1.5 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
type="number"
placeholder={$i18n.t('Enter Chunk Overlap')}
bind:value={chunkOverlap}
autocomplete="off"
min="0"
/>
</div>
</div>
</div>
<div class="pr-2">
<div class="flex justify-between items-center text-xs">
<div class=" text-xs font-medium">{$i18n.t('PDF Extract Images (OCR)')}</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
pdfExtractImages = !pdfExtractImages;
}}>{pdfExtractImages ? $i18n.t('On') : $i18n.t('Off')}</button
>
</div>
</div>
</div>
</div>
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
type="submit"
>
{$i18n.t('Save')}
</button>
</div>
</form>
src/lib/components/documents/Settings/General.svelte
View file @
635951b5
This diff is collapsed.
Click to expand it.
Prev
1
2
3
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment