Commit c0b099da authored by Timothy J. Baek's avatar Timothy J. Baek
Browse files

feat: openai frontend refac

parent 17c66fde
...@@ -82,6 +82,7 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)): ...@@ -82,6 +82,7 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
headers = {} headers = {}
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}" headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
headers["Content-Type"] = "application/json"
try: try:
r = requests.request( r = requests.request(
......
...@@ -206,3 +206,26 @@ export const getOpenAIModelsDirect = async ( ...@@ -206,3 +206,26 @@ export const getOpenAIModelsDirect = async (
return a.name.localeCompare(b.name); return a.name.localeCompare(b.name);
}); });
}; };
export const generateOpenAIChatCompletion = async (token: string = '', body: object) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(body)
}).catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
import ModelSelector from '$lib/components/chat/ModelSelector.svelte'; import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte'; import Navbar from '$lib/components/layout/Navbar.svelte';
import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats'; import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
let stopResponseFlag = false; let stopResponseFlag = false;
let autoScroll = true; let autoScroll = true;
...@@ -345,15 +346,7 @@ ...@@ -345,15 +346,7 @@
window.scrollTo({ top: document.body.scrollHeight }); window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch( const res = await generateOpenAIChatCompletion(localStorage.token, {
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
{
method: 'POST',
headers: {
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: model, model: model,
stream: true, stream: true,
messages: [ messages: [
...@@ -394,11 +387,6 @@ ...@@ -394,11 +387,6 @@
num_ctx: $settings?.options?.num_ctx ?? undefined, num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined max_tokens: $settings?.options?.num_predict ?? undefined
})
}
).catch((err) => {
console.log(err);
return null;
}); });
if (res && res.ok) { if (res && res.ok) {
......
...@@ -9,6 +9,8 @@ ...@@ -9,6 +9,8 @@
import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores'; import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama'; import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import { copyToClipboard, splitStream } from '$lib/utils'; import { copyToClipboard, splitStream } from '$lib/utils';
import MessageInput from '$lib/components/chat/MessageInput.svelte'; import MessageInput from '$lib/components/chat/MessageInput.svelte';
...@@ -362,15 +364,7 @@ ...@@ -362,15 +364,7 @@
window.scrollTo({ top: document.body.scrollHeight }); window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch( const res = await generateOpenAIChatCompletion(localStorage.token, {
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
{
method: 'POST',
headers: {
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: model, model: model,
stream: true, stream: true,
messages: [ messages: [
...@@ -411,11 +405,6 @@ ...@@ -411,11 +405,6 @@
num_ctx: $settings?.options?.num_ctx ?? undefined, num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined max_tokens: $settings?.options?.num_predict ?? undefined
})
}
).catch((err) => {
console.log(err);
return null;
}); });
if (res && res.ok) { if (res && res.ok) {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment