Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
d077b3dc
Commit
d077b3dc
authored
Jun 05, 2024
by
Timothy J. Baek
Browse files
fix: ollama version request when ollama api is disabled
parent
1d6bbdf9
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
51 additions
and
44 deletions
+51
-44
backend/apps/ollama/main.py
backend/apps/ollama/main.py
+47
-44
backend/constants.py
backend/constants.py
+4
-0
No files found.
backend/apps/ollama/main.py
View file @
d077b3dc
...
@@ -274,54 +274,57 @@ async def get_ollama_tags(
...
@@ -274,54 +274,57 @@ async def get_ollama_tags(
@
app
.
get
(
"/api/version"
)
@
app
.
get
(
"/api/version"
)
@
app
.
get
(
"/api/version/{url_idx}"
)
@
app
.
get
(
"/api/version/{url_idx}"
)
async
def
get_ollama_versions
(
url_idx
:
Optional
[
int
]
=
None
):
async
def
get_ollama_versions
(
url_idx
:
Optional
[
int
]
=
None
):
if
app
.
state
.
config
.
ENABLE_OLLAMA_API
:
if
url_idx
==
None
:
# returns lowest version
tasks
=
[
fetch_url
(
f
"
{
url
}
/api/version"
)
for
url
in
app
.
state
.
config
.
OLLAMA_BASE_URLS
]
responses
=
await
asyncio
.
gather
(
*
tasks
)
responses
=
list
(
filter
(
lambda
x
:
x
is
not
None
,
responses
))
if
len
(
responses
)
>
0
:
lowest_version
=
min
(
responses
,
key
=
lambda
x
:
tuple
(
map
(
int
,
re
.
sub
(
r
"^v|-.*"
,
""
,
x
[
"version"
]).
split
(
"."
))
),
)
if
url_idx
==
None
:
return
{
"version"
:
lowest_version
[
"version"
]}
else
:
# returns lowest version
raise
HTTPException
(
tasks
=
[
status_code
=
500
,
fetch_url
(
f
"
{
url
}
/api/version"
)
for
url
in
app
.
state
.
config
.
OLLAMA_BASE_URLS
detail
=
ERROR_MESSAGES
.
OLLAMA_NOT_FOUND
,
]
)
responses
=
await
asyncio
.
gather
(
*
tasks
)
responses
=
list
(
filter
(
lambda
x
:
x
is
not
None
,
responses
))
if
len
(
responses
)
>
0
:
lowest_version
=
min
(
responses
,
key
=
lambda
x
:
tuple
(
map
(
int
,
re
.
sub
(
r
"^v|-.*"
,
""
,
x
[
"version"
]).
split
(
"."
))
),
)
return
{
"version"
:
lowest_version
[
"version"
]}
else
:
else
:
raise
HTTPException
(
url
=
app
.
state
.
config
.
OLLAMA_BASE_URLS
[
url_idx
]
status_code
=
500
,
detail
=
ERROR_MESSAGES
.
OLLAMA_NOT_FOUND
,
)
else
:
url
=
app
.
state
.
config
.
OLLAMA_BASE_URLS
[
url_idx
]
r
=
None
try
:
r
=
requests
.
request
(
method
=
"GET"
,
url
=
f
"
{
url
}
/api/version"
)
r
.
raise_for_status
()
return
r
.
json
()
r
=
None
except
Exception
as
e
:
try
:
log
.
exception
(
e
)
r
=
requests
.
request
(
method
=
"GET"
,
url
=
f
"
{
url
}
/api/version"
)
error_detail
=
"Open WebUI: Server Connection Error"
r
.
raise_for_status
()
if
r
is
not
None
:
try
:
return
r
.
json
()
res
=
r
.
json
()
except
Exception
as
e
:
if
"error"
in
res
:
log
.
exception
(
e
)
error_detail
=
f
"Ollama:
{
res
[
'error'
]
}
"
error_detail
=
"Open WebUI: Server Connection Error"
except
:
if
r
is
not
None
:
error_detail
=
f
"Ollama:
{
e
}
"
try
:
res
=
r
.
json
()
if
"error"
in
res
:
error_detail
=
f
"Ollama:
{
res
[
'error'
]
}
"
except
:
error_detail
=
f
"Ollama:
{
e
}
"
raise
HTTPException
(
raise
HTTPException
(
status_code
=
r
.
status_code
if
r
else
500
,
status_code
=
r
.
status_code
if
r
else
500
,
detail
=
error_detail
,
detail
=
error_detail
,
)
)
else
:
return
{
"version"
:
False
}
class
ModelNameForm
(
BaseModel
):
class
ModelNameForm
(
BaseModel
):
...
...
backend/constants.py
View file @
d077b3dc
...
@@ -84,3 +84,7 @@ class ERROR_MESSAGES(str, Enum):
...
@@ -84,3 +84,7 @@ class ERROR_MESSAGES(str, Enum):
WEB_SEARCH_ERROR
=
(
WEB_SEARCH_ERROR
=
(
lambda
err
=
""
:
f
"
{
err
if
err
else
'Oops! Something went wrong while searching the web.'
}
"
lambda
err
=
""
:
f
"
{
err
if
err
else
'Oops! Something went wrong while searching the web.'
}
"
)
)
OLLAMA_API_DISABLED
=
(
"The Ollama API is disabled. Please enable it to use this feature."
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment