Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
21ca55dd
Commit
21ca55dd
authored
May 21, 2024
by
Timothy J. Baek
Browse files
feat: toggleable ollama
parent
af022947
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
216 additions
and
92 deletions
+216
-92
backend/apps/ollama/main.py
backend/apps/ollama/main.py
+34
-7
backend/config.py
backend/config.py
+7
-0
src/lib/apis/ollama/index.ts
src/lib/apis/ollama/index.ts
+67
-0
src/lib/components/chat/Settings/Connections.svelte
src/lib/components/chat/Settings/Connections.svelte
+108
-85
No files found.
backend/apps/ollama/main.py
View file @
21ca55dd
...
...
@@ -43,6 +43,7 @@ from utils.utils import (
from
config
import
(
SRC_LOG_LEVELS
,
OLLAMA_BASE_URLS
,
ENABLE_OLLAMA_API
,
ENABLE_MODEL_FILTER
,
MODEL_FILTER_LIST
,
UPLOAD_DIR
,
...
...
@@ -67,6 +68,8 @@ app.state.config = AppConfig()
app
.
state
.
config
.
ENABLE_MODEL_FILTER
=
ENABLE_MODEL_FILTER
app
.
state
.
config
.
MODEL_FILTER_LIST
=
MODEL_FILTER_LIST
app
.
state
.
config
.
ENABLE_OLLAMA_API
=
ENABLE_OLLAMA_API
app
.
state
.
config
.
OLLAMA_BASE_URLS
=
OLLAMA_BASE_URLS
app
.
state
.
MODELS
=
{}
...
...
@@ -96,6 +99,21 @@ async def get_status():
return
{
"status"
:
True
}
@
app
.
get
(
"/config"
)
async
def
get_config
(
user
=
Depends
(
get_admin_user
)):
return
{
"ENABLE_OLLAMA_API"
:
app
.
state
.
config
.
ENABLE_OLLAMA_API
}
class
OllamaConfigForm
(
BaseModel
):
enable_ollama_api
:
Optional
[
bool
]
=
None
@
app
.
post
(
"/config/update"
)
async
def
update_config
(
form_data
:
OllamaConfigForm
,
user
=
Depends
(
get_admin_user
)):
app
.
state
.
config
.
ENABLE_OLLAMA_API
=
form_data
.
enable_ollama_api
return
{
"ENABLE_OLLAMA_API"
:
app
.
state
.
config
.
ENABLE_OLLAMA_API
}
@
app
.
get
(
"/urls"
)
async
def
get_ollama_api_urls
(
user
=
Depends
(
get_admin_user
)):
return
{
"OLLAMA_BASE_URLS"
:
app
.
state
.
config
.
OLLAMA_BASE_URLS
}
...
...
@@ -156,15 +174,24 @@ def merge_models_lists(model_lists):
async
def
get_all_models
():
log
.
info
(
"get_all_models()"
)
tasks
=
[
fetch_url
(
f
"
{
url
}
/api/tags"
)
for
url
in
app
.
state
.
config
.
OLLAMA_BASE_URLS
]
if
app
.
state
.
config
.
ENABLE_OLLAMA_API
:
tasks
=
[
fetch_url
(
f
"
{
url
}
/api/tags"
)
for
url
in
app
.
state
.
config
.
OLLAMA_BASE_URLS
]
responses
=
await
asyncio
.
gather
(
*
tasks
)
models
=
{
"models"
:
merge_models_lists
(
map
(
lambda
response
:
response
[
"models"
]
if
response
else
None
,
responses
)
map
(
lambda
response
:
response
[
"models"
]
if
response
else
None
,
responses
)
)
}
else
:
models
=
{
"models"
:
[]}
app
.
state
.
MODELS
=
{
model
[
"model"
]:
model
for
model
in
models
[
"models"
]}
return
models
...
...
backend/config.py
View file @
21ca55dd
...
...
@@ -384,6 +384,13 @@ if not os.path.exists(LITELLM_CONFIG_PATH):
# OLLAMA_BASE_URL
####################################
ENABLE_OLLAMA_API
=
PersistentConfig
(
"ENABLE_OLLAMA_API"
,
"ollama.enable"
,
os
.
environ
.
get
(
"ENABLE_OLLAMA_API"
,
"True"
).
lower
()
==
"true"
,
)
OLLAMA_API_BASE_URL
=
os
.
environ
.
get
(
"OLLAMA_API_BASE_URL"
,
"http://localhost:11434/api"
)
...
...
src/lib/apis/ollama/index.ts
View file @
21ca55dd
import
{
OLLAMA_API_BASE_URL
}
from
'
$lib/constants
'
;
import
{
promptTemplate
}
from
'
$lib/utils
'
;
export
const
getOllamaConfig
=
async
(
token
:
string
=
''
)
=>
{
let
error
=
null
;
const
res
=
await
fetch
(
`
${
OLLAMA_API_BASE_URL
}
/config`
,
{
method
:
'
GET
'
,
headers
:
{
Accept
:
'
application/json
'
,
'
Content-Type
'
:
'
application/json
'
,
...(
token
&&
{
authorization
:
`Bearer
${
token
}
`
})
}
})
.
then
(
async
(
res
)
=>
{
if
(
!
res
.
ok
)
throw
await
res
.
json
();
return
res
.
json
();
})
.
catch
((
err
)
=>
{
console
.
log
(
err
);
if
(
'
detail
'
in
err
)
{
error
=
err
.
detail
;
}
else
{
error
=
'
Server connection failed
'
;
}
return
null
;
});
if
(
error
)
{
throw
error
;
}
return
res
;
};
export
const
updateOllamaConfig
=
async
(
token
:
string
=
''
,
enable_ollama_api
:
boolean
)
=>
{
let
error
=
null
;
const
res
=
await
fetch
(
`
${
OLLAMA_API_BASE_URL
}
/config/update`
,
{
method
:
'
POST
'
,
headers
:
{
Accept
:
'
application/json
'
,
'
Content-Type
'
:
'
application/json
'
,
...(
token
&&
{
authorization
:
`Bearer
${
token
}
`
})
},
body
:
JSON
.
stringify
({
enable_ollama_api
:
enable_ollama_api
})
})
.
then
(
async
(
res
)
=>
{
if
(
!
res
.
ok
)
throw
await
res
.
json
();
return
res
.
json
();
})
.
catch
((
err
)
=>
{
console
.
log
(
err
);
if
(
'
detail
'
in
err
)
{
error
=
err
.
detail
;
}
else
{
error
=
'
Server connection failed
'
;
}
return
null
;
});
if
(
error
)
{
throw
error
;
}
return
res
;
};
export
const
getOllamaUrls
=
async
(
token
:
string
=
''
)
=>
{
let
error
=
null
;
...
...
src/lib/components/chat/Settings/Connections.svelte
View file @
21ca55dd
...
...
@@ -3,7 +3,13 @@
import { createEventDispatcher, onMount, getContext } from 'svelte';
const dispatch = createEventDispatcher();
import { getOllamaUrls, getOllamaVersion, updateOllamaUrls } from '$lib/apis/ollama';
import {
getOllamaConfig,
getOllamaUrls,
getOllamaVersion,
updateOllamaConfig,
updateOllamaUrls
} from '$lib/apis/ollama';
import {
getOpenAIConfig,
getOpenAIKeys,
...
...
@@ -26,6 +32,7 @@
let OPENAI_API_BASE_URLS = [''];
let ENABLE_OPENAI_API = false;
let ENABLE_OLLAMA_API = false;
const updateOpenAIHandler = async () => {
OPENAI_API_BASE_URLS = await updateOpenAIUrls(localStorage.token, OPENAI_API_BASE_URLS);
...
...
@@ -50,10 +57,13 @@
onMount(async () => {
if ($user.role === 'admin') {
OLLAMA_BASE_URLS = await getOllamaUrls(localStorage.token);
const ollamaConfig = await getOllamaConfig(localStorage.token);
const openaiConfig = await getOpenAIConfig(localStorage.token);
const config = await getOpenAIConfig(localStorage.token);
ENABLE_OPENAI_API = config.ENABLE_OPENAI_API;
ENABLE_OPENAI_API = openaiConfig.ENABLE_OPENAI_API;
ENABLE_OLLAMA_API = ollamaConfig.ENABLE_OLLAMA_API;
OLLAMA_BASE_URLS = await getOllamaUrls(localStorage.token);
OPENAI_API_BASE_URLS = await getOpenAIUrls(localStorage.token);
OPENAI_API_KEYS = await getOpenAIKeys(localStorage.token);
...
...
@@ -161,8 +171,20 @@
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('Ollama Base URL')}</div>
<div class="pr-1.5 space-y-2">
<div class="flex justify-between items-center text-sm">
<div class=" font-medium">{$i18n.t('Ollama API')}</div>
<div class="mt-1">
<Switch
bind:state={ENABLE_OLLAMA_API}
on:change={async () => {
updateOllamaConfig(localStorage.token, ENABLE_OLLAMA_API);
}}
/>
</div>
</div>
{#if ENABLE_OLLAMA_API}
<div class="flex w-full gap-1.5">
<div class="flex-1 flex flex-col gap-2">
{#each OLLAMA_BASE_URLS as url, idx}
...
...
@@ -216,9 +238,9 @@
{/each}
</div>
<div class="">
<div class="
flex
">
<button
class="p-2
.5
bg-gray-200 hover:bg-gray-300 dark:bg-gray-
85
0 dark:hover:bg-gray-8
0
0 rounded-lg transition"
class="
self-center
p-2 bg-gray-200 hover:bg-gray-300 dark:bg-gray-
90
0 dark:hover:bg-gray-8
5
0 rounded-lg transition"
on:click={() => {
updateOllamaUrlsHandler();
}}
...
...
@@ -250,6 +272,7 @@
{$i18n.t('Click here for help.')}
</a>
</div>
{/if}
</div>
</div>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment