Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
9b6dca3d
Commit
9b6dca3d
authored
Feb 22, 2024
by
Timothy J. Baek
Browse files
feat: litellm frontend integration
parent
de0084c8
Changes
8
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
134 additions
and
62 deletions
+134
-62
src/lib/apis/litellm/index.ts
src/lib/apis/litellm/index.ts
+42
-0
src/lib/apis/ollama/index.ts
src/lib/apis/ollama/index.ts
+5
-3
src/lib/apis/openai/index.ts
src/lib/apis/openai/index.ts
+8
-4
src/lib/components/chat/ModelSelector.svelte
src/lib/components/chat/ModelSelector.svelte
+2
-2
src/lib/components/chat/SettingsModal.svelte
src/lib/components/chat/SettingsModal.svelte
+9
-0
src/lib/constants.ts
src/lib/constants.ts
+2
-0
src/routes/(app)/+layout.svelte
src/routes/(app)/+layout.svelte
+8
-0
src/routes/(app)/+page.svelte
src/routes/(app)/+page.svelte
+58
-53
No files found.
src/lib/apis/litellm/index.ts
0 → 100644
View file @
9b6dca3d
import
{
LITELLM_API_BASE_URL
}
from
'
$lib/constants
'
;
export
const
getLiteLLMModels
=
async
(
token
:
string
=
''
)
=>
{
let
error
=
null
;
const
res
=
await
fetch
(
`
${
LITELLM_API_BASE_URL
}
/v1/models`
,
{
method
:
'
GET
'
,
headers
:
{
Accept
:
'
application/json
'
,
'
Content-Type
'
:
'
application/json
'
,
...(
token
&&
{
authorization
:
`Bearer
${
token
}
`
})
}
})
.
then
(
async
(
res
)
=>
{
if
(
!
res
.
ok
)
throw
await
res
.
json
();
return
res
.
json
();
})
.
catch
((
err
)
=>
{
console
.
log
(
err
);
error
=
`OpenAI:
${
err
?.
error
?.
message
??
'
Network Problem
'
}
`
;
return
[];
});
if
(
error
)
{
throw
error
;
}
const
models
=
Array
.
isArray
(
res
)
?
res
:
res
?.
data
??
null
;
return
models
?
models
.
map
((
model
)
=>
({
id
:
model
.
id
,
name
:
model
.
name
??
model
.
id
,
external
:
true
,
source
:
'
litellm
'
}))
.
sort
((
a
,
b
)
=>
{
return
a
.
name
.
localeCompare
(
b
.
name
);
})
:
models
;
};
src/lib/apis/ollama/index.ts
View file @
9b6dca3d
...
@@ -128,9 +128,11 @@ export const getOllamaModels = async (token: string = '') => {
...
@@ -128,9 +128,11 @@ export const getOllamaModels = async (token: string = '') => {
throw
error
;
throw
error
;
}
}
return
(
res
?.
models
??
[]).
sort
((
a
,
b
)
=>
{
return
(
res
?.
models
??
[])
return
a
.
name
.
localeCompare
(
b
.
name
);
.
map
((
model
)
=>
({
id
:
model
.
model
,
name
:
model
.
name
??
model
.
model
,
...
model
}))
});
.
sort
((
a
,
b
)
=>
{
return
a
.
name
.
localeCompare
(
b
.
name
);
});
};
};
// TODO: migrate to backend
// TODO: migrate to backend
...
...
src/lib/apis/openai/index.ts
View file @
9b6dca3d
...
@@ -163,7 +163,7 @@ export const getOpenAIModels = async (token: string = '') => {
...
@@ -163,7 +163,7 @@ export const getOpenAIModels = async (token: string = '') => {
return
models
return
models
?
models
?
models
.
map
((
model
)
=>
({
name
:
model
.
id
,
external
:
true
}))
.
map
((
model
)
=>
({
id
:
model
.
id
,
name
:
model
.
name
??
model
.
id
,
external
:
true
}))
.
sort
((
a
,
b
)
=>
{
.
sort
((
a
,
b
)
=>
{
return
a
.
name
.
localeCompare
(
b
.
name
);
return
a
.
name
.
localeCompare
(
b
.
name
);
})
})
...
@@ -200,17 +200,21 @@ export const getOpenAIModelsDirect = async (
...
@@ -200,17 +200,21 @@ export const getOpenAIModelsDirect = async (
const
models
=
Array
.
isArray
(
res
)
?
res
:
res
?.
data
??
null
;
const
models
=
Array
.
isArray
(
res
)
?
res
:
res
?.
data
??
null
;
return
models
return
models
.
map
((
model
)
=>
({
name
:
model
.
id
,
external
:
true
}))
.
map
((
model
)
=>
({
id
:
model
.
id
,
name
:
model
.
name
??
model
.
id
,
external
:
true
}))
.
filter
((
model
)
=>
(
base_url
.
includes
(
'
openai
'
)
?
model
.
name
.
includes
(
'
gpt
'
)
:
true
))
.
filter
((
model
)
=>
(
base_url
.
includes
(
'
openai
'
)
?
model
.
name
.
includes
(
'
gpt
'
)
:
true
))
.
sort
((
a
,
b
)
=>
{
.
sort
((
a
,
b
)
=>
{
return
a
.
name
.
localeCompare
(
b
.
name
);
return
a
.
name
.
localeCompare
(
b
.
name
);
});
});
};
};
export
const
generateOpenAIChatCompletion
=
async
(
token
:
string
=
''
,
body
:
object
)
=>
{
export
const
generateOpenAIChatCompletion
=
async
(
token
:
string
=
''
,
body
:
object
,
url
:
string
=
OPENAI_API_BASE_URL
)
=>
{
let
error
=
null
;
let
error
=
null
;
const
res
=
await
fetch
(
`
${
OPENAI_API_BASE_URL
}
/chat/completions`
,
{
const
res
=
await
fetch
(
`
${
url
}
/chat/completions`
,
{
method
:
'
POST
'
,
method
:
'
POST
'
,
headers
:
{
headers
:
{
Authorization
:
`Bearer
${
token
}
`
,
Authorization
:
`Bearer
${
token
}
`
,
...
...
src/lib/components/chat/ModelSelector.svelte
View file @
9b6dca3d
...
@@ -25,7 +25,7 @@
...
@@ -25,7 +25,7 @@
$: if (selectedModels.length > 0 && $models.length > 0) {
$: if (selectedModels.length > 0 && $models.length > 0) {
selectedModels = selectedModels.map((model) =>
selectedModels = selectedModels.map((model) =>
$models.map((m) => m.
name
).includes(model) ? model : ''
$models.map((m) => m.
id
).includes(model) ? model : ''
);
);
}
}
</script>
</script>
...
@@ -45,7 +45,7 @@
...
@@ -45,7 +45,7 @@
{#if model.name === 'hr'}
{#if model.name === 'hr'}
<hr />
<hr />
{:else}
{:else}
<option value={model.
name
} class="text-gray-700 text-lg"
<option value={model.
id
} class="text-gray-700 text-lg"
>{model.name +
>{model.name +
`${model.size ? ` (${(model.size / 1024 ** 3).toFixed(1)}GB)` : ''}`}</option
`${model.size ? ` (${(model.size / 1024 ** 3).toFixed(1)}GB)` : ''}`}</option
>
>
...
...
src/lib/components/chat/SettingsModal.svelte
View file @
9b6dca3d
...
@@ -4,6 +4,7 @@
...
@@ -4,6 +4,7 @@
import { getOllamaModels } from '$lib/apis/ollama';
import { getOllamaModels } from '$lib/apis/ollama';
import { getOpenAIModels } from '$lib/apis/openai';
import { getOpenAIModels } from '$lib/apis/openai';
import { getLiteLLMModels } from '$lib/apis/litellm';
import Modal from '../common/Modal.svelte';
import Modal from '../common/Modal.svelte';
import Account from './Settings/Account.svelte';
import Account from './Settings/Account.svelte';
...
@@ -41,7 +42,15 @@
...
@@ -41,7 +42,15 @@
console.log(error);
console.log(error);
return null;
return null;
});
});
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
const liteLLMModels = await getLiteLLMModels(localStorage.token).catch((error) => {
console.log(error);
return null;
});
models.push(...(liteLLMModels ? [{ name: 'hr' }, ...liteLLMModels] : []));
}
}
return models;
return models;
...
...
src/lib/constants.ts
View file @
9b6dca3d
...
@@ -5,6 +5,8 @@ export const WEBUI_NAME = 'Open WebUI';
...
@@ -5,6 +5,8 @@ export const WEBUI_NAME = 'Open WebUI';
export
const
WEBUI_BASE_URL
=
dev
?
`http://
${
location
.
hostname
}
:8080`
:
``
;
export
const
WEBUI_BASE_URL
=
dev
?
`http://
${
location
.
hostname
}
:8080`
:
``
;
export
const
WEBUI_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/api/v1`
;
export
const
WEBUI_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/api/v1`
;
export
const
LITELLM_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/litellm/api`
;
export
const
OLLAMA_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/ollama/api`
;
export
const
OLLAMA_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/ollama/api`
;
export
const
OPENAI_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/openai/api`
;
export
const
OPENAI_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/openai/api`
;
export
const
AUDIO_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/audio/api/v1`
;
export
const
AUDIO_API_BASE_URL
=
`
${
WEBUI_BASE_URL
}
/audio/api/v1`
;
...
...
src/routes/(app)/+layout.svelte
View file @
9b6dca3d
...
@@ -12,6 +12,7 @@
...
@@ -12,6 +12,7 @@
import { getPrompts } from '$lib/apis/prompts';
import { getPrompts } from '$lib/apis/prompts';
import { getOpenAIModels } from '$lib/apis/openai';
import { getOpenAIModels } from '$lib/apis/openai';
import { getLiteLLMModels } from '$lib/apis/litellm';
import {
import {
user,
user,
...
@@ -59,6 +60,13 @@
...
@@ -59,6 +60,13 @@
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
const liteLLMModels = await getLiteLLMModels(localStorage.token).catch((error) => {
console.log(error);
return null;
});
models.push(...(liteLLMModels ? [{ name: 'hr' }, ...liteLLMModels] : []));
return models;
return models;
};
};
...
...
src/routes/(app)/+page.svelte
View file @
9b6dca3d
...
@@ -36,6 +36,7 @@
...
@@ -36,6 +36,7 @@
import
ModelSelector
from
'$lib/components/chat/ModelSelector.svelte'
;
import
ModelSelector
from
'$lib/components/chat/ModelSelector.svelte'
;
import
Navbar
from
'$lib/components/layout/Navbar.svelte'
;
import
Navbar
from
'$lib/components/layout/Navbar.svelte'
;
import
{
RAGTemplate
}
from
'$lib/utils/rag'
;
import
{
RAGTemplate
}
from
'$lib/utils/rag'
;
import
{
LITELLM_API_BASE_URL
,
OPENAI_API_BASE_URL
}
from
'$lib/constants'
;
let
stopResponseFlag
=
false
;
let
stopResponseFlag
=
false
;
let
autoScroll
=
true
;
let
autoScroll
=
true
;
...
@@ -277,9 +278,8 @@
...
@@ -277,9 +278,8 @@
}
}
await
Promise
.
all
(
await
Promise
.
all
(
selectedModels.map(async (model) => {
selectedModels
.
map
(
async
(
modelId
)
=>
{
console.log(model);
const
model
=
$
models
.
filter
((
m
)
=>
m
.
id
===
modelId
).
at
(
0
);
const modelTag = $models.filter((m) => m.name === model).at(0);
//
Create
response
message
//
Create
response
message
let
responseMessageId
=
uuidv4
();
let
responseMessageId
=
uuidv4
();
...
@@ -289,7 +289,7 @@
...
@@ -289,7 +289,7 @@
childrenIds
:
[],
childrenIds
:
[],
role
:
'assistant'
,
role
:
'assistant'
,
content
:
''
,
content
:
''
,
model: model,
model
:
model
.
id
,
timestamp
:
Math
.
floor
(
Date
.
now
()
/
1000
)
//
Unix
epoch
timestamp
:
Math
.
floor
(
Date
.
now
()
/
1000
)
//
Unix
epoch
};
};
...
@@ -305,12 +305,12 @@
...
@@ -305,12 +305,12 @@
];
];
}
}
if (model
Tag
?.external) {
if
(
model
?.
external
)
{
await
sendPromptOpenAI
(
model
,
prompt
,
responseMessageId
,
_chatId
);
await
sendPromptOpenAI
(
model
,
prompt
,
responseMessageId
,
_chatId
);
} else if (model
Tag
) {
}
else
if
(
model
)
{
await
sendPromptOllama
(
model
,
prompt
,
responseMessageId
,
_chatId
);
await
sendPromptOllama
(
model
,
prompt
,
responseMessageId
,
_chatId
);
}
else
{
}
else
{
toast.error(`Model ${model} not found`);
toast
.
error
(`
Model
${
model
.
id
}
not
found
`);
}
}
})
})
);
);
...
@@ -319,6 +319,7 @@
...
@@ -319,6 +319,7 @@
};
};
const
sendPromptOllama
=
async
(
model
,
userPrompt
,
responseMessageId
,
_chatId
)
=>
{
const
sendPromptOllama
=
async
(
model
,
userPrompt
,
responseMessageId
,
_chatId
)
=>
{
model
=
model
.
id
;
const
responseMessage
=
history
.
messages
[
responseMessageId
];
const
responseMessage
=
history
.
messages
[
responseMessageId
];
//
Wait
until
history
/
message
have
been
updated
//
Wait
until
history
/
message
have
been
updated
...
@@ -530,54 +531,58 @@
...
@@ -530,54 +531,58 @@
const
responseMessage
=
history
.
messages
[
responseMessageId
];
const
responseMessage
=
history
.
messages
[
responseMessageId
];
scrollToBottom
();
scrollToBottom
();
const res = await generateOpenAIChatCompletion(localStorage.token, {
const
res
=
await
generateOpenAIChatCompletion
(
model: model,
localStorage
.
token
,
stream: true,
{
messages: [
model
:
model
.
id
,
$settings.system
stream
:
true
,
? {
messages
:
[
role: 'system',
$
settings
.
system
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message, idx, arr) => ({
role: message.role,
...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
?
{
?
{
content: [
role
:
'system'
,
{
content
:
$
settings
.
system
type: 'text',
text:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
}
: {
:
undefined
,
content:
...
messages
arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content
]
})
.
filter
((
message
)
=>
message
)
})),
.
map
((
message
,
idx
,
arr
)
=>
({
seed: $settings?.options?.seed ?? undefined,
role
:
message
.
role
,
stop: $settings?.options?.stop ?? undefined,
...(
message
.
files
?.
filter
((
file
)
=>
file
.
type
===
'image'
).
length
>
0
??
false
temperature: $settings?.options?.temperature ?? undefined,
?
{
top_p: $settings?.options?.top_p ?? undefined,
content
:
[
num_ctx: $settings?.options?.num_ctx ?? undefined,
{
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
type
:
'text'
,
max_tokens: $settings?.options?.num_predict ?? undefined
text
:
});
arr
.
length
-
1
!== idx
?
message
.
content
:
message
?.
raContent
??
message
.
content
},
...
message
.
files
.
filter
((
file
)
=>
file
.
type
===
'image'
)
.
map
((
file
)
=>
({
type
:
'image_url'
,
image_url
:
{
url
:
file
.
url
}
}))
]
}
:
{
content
:
arr
.
length
-
1
!== idx ? message.content : message?.raContent ?? message.content
})
})),
seed
:
$
settings
?.
options
?.
seed
??
undefined
,
stop
:
$
settings
?.
options
?.
stop
??
undefined
,
temperature
:
$
settings
?.
options
?.
temperature
??
undefined
,
top_p
:
$
settings
?.
options
?.
top_p
??
undefined
,
num_ctx
:
$
settings
?.
options
?.
num_ctx
??
undefined
,
frequency_penalty
:
$
settings
?.
options
?.
repeat_penalty
??
undefined
,
max_tokens
:
$
settings
?.
options
?.
num_predict
??
undefined
},
model
.
source
===
'litellm'
?
`${
LITELLM_API_BASE_URL
}/
v1
`
:
`${
OPENAI_API_BASE_URL
}`
);
if
(
res
&&
res
.
ok
)
{
if
(
res
&&
res
.
ok
)
{
const
reader
=
res
.
body
const
reader
=
res
.
body
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment