Unverified Commit 7071716f authored by Timothy Jaeryang Baek's avatar Timothy Jaeryang Baek Committed by GitHub
Browse files

Merge pull request #408 from ollama-webui/main

rag
parents b050013c c55c8728
......@@ -10,7 +10,6 @@ import uuid
from apps.web.models.users import Users
from utils.utils import get_password_hash, get_current_user, create_token
from utils.misc import get_gravatar_url, validate_email_format
from constants import ERROR_MESSAGES
......@@ -28,9 +27,9 @@ class SetDefaultModelsForm(BaseModel):
@router.post("/default/models", response_model=str)
async def set_global_default_models(
request: Request, form_data: SetDefaultModelsForm, user=Depends(get_current_user)
):
async def set_global_default_models(request: Request,
form_data: SetDefaultModelsForm,
user=Depends(get_current_user)):
if user.role == "admin":
request.app.state.DEFAULT_MODELS = form_data.models
return request.app.state.DEFAULT_MODELS
......
......@@ -24,7 +24,9 @@ router = APIRouter()
@router.get("/", response_model=List[ModelfileResponse])
async def get_modelfiles(skip: int = 0, limit: int = 50, user=Depends(get_current_user)):
async def get_modelfiles(skip: int = 0,
limit: int = 50,
user=Depends(get_current_user)):
return Modelfiles.get_modelfiles(skip, limit)
......@@ -34,9 +36,8 @@ async def get_modelfiles(skip: int = 0, limit: int = 50, user=Depends(get_curren
@router.post("/create", response_model=Optional[ModelfileResponse])
async def create_new_modelfile(
form_data: ModelfileForm, user=Depends(get_current_user)
):
async def create_new_modelfile(form_data: ModelfileForm,
user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
......@@ -49,9 +50,9 @@ async def create_new_modelfile(
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile": json.loads(modelfile.modelfile),
}
)
"modelfile":
json.loads(modelfile.modelfile),
})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
......@@ -65,16 +66,17 @@ async def create_new_modelfile(
@router.post("/", response_model=Optional[ModelfileResponse])
async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm, user=Depends(get_current_user)):
async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm,
user=Depends(get_current_user)):
modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name)
if modelfile:
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile": json.loads(modelfile.modelfile),
}
)
"modelfile":
json.loads(modelfile.modelfile),
})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
......@@ -88,9 +90,8 @@ async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm, user=Depend
@router.post("/update", response_model=Optional[ModelfileResponse])
async def update_modelfile_by_tag_name(
form_data: ModelfileUpdateForm, user=Depends(get_current_user)
):
async def update_modelfile_by_tag_name(form_data: ModelfileUpdateForm,
user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
......@@ -104,15 +105,14 @@ async def update_modelfile_by_tag_name(
}
modelfile = Modelfiles.update_modelfile_by_tag_name(
form_data.tag_name, updated_modelfile
)
form_data.tag_name, updated_modelfile)
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile": json.loads(modelfile.modelfile),
}
)
"modelfile":
json.loads(modelfile.modelfile),
})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
......@@ -126,9 +126,8 @@ async def update_modelfile_by_tag_name(
@router.delete("/delete", response_model=bool)
async def delete_modelfile_by_tag_name(
form_data: ModelfileTagNameForm, user=Depends(get_current_user)
):
async def delete_modelfile_by_tag_name(form_data: ModelfileTagNameForm,
user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
......
......@@ -6,7 +6,6 @@ from fastapi import APIRouter
from pydantic import BaseModel
import json
from apps.web.models.prompts import Prompts, PromptForm, PromptModel
from utils.utils import get_current_user
......@@ -30,7 +29,8 @@ async def get_prompts(user=Depends(get_current_user)):
@router.post("/create", response_model=Optional[PromptModel])
async def create_new_prompt(form_data: PromptForm, user=Depends(get_current_user)):
async def create_new_prompt(form_data: PromptForm,
user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
......@@ -79,9 +79,9 @@ async def get_prompt_by_command(command: str, user=Depends(get_current_user)):
@router.post("/{command}/update", response_model=Optional[PromptModel])
async def update_prompt_by_command(
command: str, form_data: PromptForm, user=Depends(get_current_user)
):
async def update_prompt_by_command(command: str,
form_data: PromptForm,
user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
......@@ -104,7 +104,8 @@ async def update_prompt_by_command(
@router.delete("/{command}/delete", response_model=bool)
async def delete_prompt_by_command(command: str, user=Depends(get_current_user)):
async def delete_prompt_by_command(command: str,
user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
......
......@@ -8,14 +8,12 @@ from pydantic import BaseModel
import time
import uuid
from apps.web.models.users import UserModel, UserRoleUpdateForm, Users
from apps.web.models.users import UserModel, UserUpdateForm, UserRoleUpdateForm, Users
from apps.web.models.auths import Auths
from utils.utils import get_current_user
from utils.utils import get_current_user, get_password_hash
from constants import ERROR_MESSAGES
router = APIRouter()
############################
......@@ -57,6 +55,62 @@ async def update_user_role(
)
############################
# UpdateUserById
############################
@router.post("/{user_id}/update", response_model=Optional[UserModel])
async def update_user_by_id(
user_id: str, form_data: UserUpdateForm, session_user=Depends(get_current_user)
):
if session_user.role != "admin":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
user = Users.get_user_by_id(user_id)
if user:
if form_data.email.lower() != user.email:
email_user = Users.get_user_by_email(form_data.email.lower())
if email_user:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.EMAIL_TAKEN,
)
if form_data.password:
hashed = get_password_hash(form_data.password)
print(hashed)
Auths.update_user_password_by_id(user_id, hashed)
Auths.update_email_by_id(user_id, form_data.email.lower())
updated_user = Users.update_user_by_id(
user_id,
{
"name": form_data.name,
"email": form_data.email.lower(),
"profile_image_url": form_data.profile_image_url,
},
)
if updated_user:
return updated_user
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.DEFAULT(),
)
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.USER_NOT_FOUND,
)
############################
# DeleteUserById
############################
......
......@@ -9,12 +9,10 @@ import os
import aiohttp
import json
from utils.misc import calculate_sha256
from config import OLLAMA_API_BASE_URL
router = APIRouter()
......@@ -42,7 +40,10 @@ def parse_huggingface_url(hf_url):
return None
async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024):
async def download_file_stream(url,
file_path,
file_name,
chunk_size=1024 * 1024):
done = False
if os.path.exists(file_path):
......@@ -56,7 +57,8 @@ async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(url, headers=headers) as response:
total_size = int(response.headers.get("content-length", 0)) + current_size
total_size = int(response.headers.get("content-length",
0)) + current_size
with open(file_path, "ab+") as file:
async for data in response.content.iter_chunked(chunk_size):
......@@ -89,9 +91,7 @@ async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024
@router.get("/download")
async def download(
url: str,
):
async def download(url: str, ):
# url = "https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf"
file_name = parse_huggingface_url(url)
......@@ -161,4 +161,5 @@ async def upload(file: UploadFile = File(...)):
res = {"error": str(e)}
yield f"data: {json.dumps(res)}\n\n"
return StreamingResponse(file_write_stream(), media_type="text/event-stream")
return StreamingResponse(file_write_stream(),
media_type="text/event-stream")
......@@ -19,19 +19,28 @@ ENV = os.environ.get("ENV", "dev")
# OLLAMA_API_BASE_URL
####################################
OLLAMA_API_BASE_URL = os.environ.get(
"OLLAMA_API_BASE_URL", "http://localhost:11434/api"
)
OLLAMA_API_BASE_URL = os.environ.get("OLLAMA_API_BASE_URL",
"http://localhost:11434/api")
if ENV == "prod":
if OLLAMA_API_BASE_URL == "/ollama/api":
OLLAMA_API_BASE_URL = "http://host.docker.internal:11434/api"
####################################
# OPENAI_API
####################################
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "")
if OPENAI_API_BASE_URL == "":
OPENAI_API_BASE_URL = "https://api.openai.com/v1"
####################################
# WEBUI_VERSION
####################################
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.42")
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.50")
####################################
# WEBUI_AUTH (Required for security)
......
......@@ -6,6 +6,7 @@ class MESSAGES(str, Enum):
class ERROR_MESSAGES(str, Enum):
def __str__(self) -> str:
return super().__str__()
......@@ -29,8 +30,8 @@ class ERROR_MESSAGES(str, Enum):
UNAUTHORIZED = "401 Unauthorized"
ACCESS_PROHIBITED = "You do not have permission to access this resource. Please contact your administrator for assistance."
ACTION_PROHIBITED = (
"The requested action has been restricted as a security measure."
)
"The requested action has been restricted as a security measure.")
NOT_FOUND = "We could not find what you're looking for :/"
USER_NOT_FOUND = "We could not find what you're looking for :/"
API_KEY_NOT_FOUND = "Oops! It looks like there's a hiccup. The API key is missing. Please make sure to provide a valid API key to access this feature."
MALICIOUS = "Unusual activities detected, please try again in a few minutes."
......@@ -6,12 +6,15 @@ from fastapi.middleware.cors import CORSMiddleware
from starlette.exceptions import HTTPException as StarletteHTTPException
from apps.ollama.main import app as ollama_app
from apps.openai.main import app as openai_app
from apps.web.main import app as webui_app
import time
class SPAStaticFiles(StaticFiles):
async def get_response(self, path: str, scope):
try:
return await super().get_response(path, scope)
......@@ -46,5 +49,9 @@ async def check_url(request: Request, call_next):
app.mount("/api/v1", webui_app)
app.mount("/ollama/api", WSGIMiddleware(ollama_app))
app.mount("/", SPAStaticFiles(directory="../build", html=True), name="spa-static-files")
app.mount("/ollama/api", ollama_app)
app.mount("/openai/api", openai_app)
app.mount("/",
SPAStaticFiles(directory="../build", html=True),
name="spa-static-files")
......@@ -8,9 +8,12 @@ from passlib.context import CryptContext
from datetime import datetime, timedelta
import requests
import jwt
import logging
import config
logging.getLogger("passlib").setLevel(logging.ERROR)
JWT_SECRET_KEY = config.WEBUI_JWT_SECRET_KEY
ALGORITHM = "HS256"
......
File added
This image diff could not be displayed because it is too large. You can view the blob instead.
......@@ -3,4 +3,4 @@ version: '3.8'
services:
ollama:
volumes:
- ${OLLAMA_DATA_DIR-./ollama-data}:/root/.ollama
\ No newline at end of file
- ${OLLAMA_DATA_DIR-./ollama-data}:/root/.ollama
......@@ -25,7 +25,7 @@ services:
ports:
- ${OLLAMA_WEBUI_PORT-3000}:8080
environment:
- "OLLAMA_API_BASE_URL=http://ollama:11434/api"
- 'OLLAMA_API_BASE_URL=http://ollama:11434/api'
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
......
# If you're serving both the frontend and backend (Recommended)
# Set the public API base URL for seamless communication
PUBLIC_API_BASE_URL='/ollama/api'
# If you're serving only the frontend (Not recommended and not fully supported)
# Comment above and Uncomment below
# You can use the default value or specify a custom path, e.g., '/api'
# PUBLIC_API_BASE_URL='http://{location.hostname}:11434/api'
# Ollama URL for the backend to connect
# The path '/ollama/api' will be redirected to the specified backend URL
OLLAMA_API_BASE_URL='http://localhost:11434/api'
OPENAI_API_BASE_URL=''
OPENAI_API_KEY=''
\ No newline at end of file
This diff is collapsed.
......@@ -8,22 +8,23 @@
"preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"lint": "npm run eslint",
"lint": "npm run lint:frontend ; npm run lint:types ; npm run lint:backend",
"lint:frontend": "eslint . --fix",
"lint:types": "npm run check",
"fmt": "npm run prettier:svelte && npm run prettier",
"eslint": "npx -p eslint@8 -- eslint .",
"prettier:svelte": "npx -p prettier@2 -- prettier --plugin-search-dir . --write .",
"prettier": "npx -p prettier@2 -- prettier --write '**/*.{js,css,md,html,json}'"
"lint:backend": "pylint backend/",
"format": "prettier --plugin-search-dir --write '**/*.{js,ts,svelte,css,md,html,json}'",
"format:backend": "yapf --recursive backend -p -i"
},
"devDependencies": {
"@sveltejs/adapter-auto": "^2.0.0",
"@sveltejs/adapter-static": "^2.0.3",
"@sveltejs/kit": "^1.20.4",
"@sveltejs/kit": "^1.30.0",
"@tailwindcss/typography": "^0.5.10",
"@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^6.0.0",
"@types/bun": "latest",
"@typescript-eslint/eslint-plugin": "^6.17.0",
"@typescript-eslint/parser": "^6.17.0",
"autoprefixer": "^10.4.16",
"eslint": "^8.28.0",
"eslint": "^8.56.0",
"eslint-config-prettier": "^8.5.0",
"eslint-plugin-svelte": "^2.30.0",
"postcss": "^8.4.31",
......@@ -39,6 +40,7 @@
"type": "module",
"dependencies": {
"@sveltejs/adapter-node": "^1.3.1",
"dayjs": "^1.11.10",
"file-saver": "^2.0.5",
"highlight.js": "^11.9.0",
"idb": "^7.1.1",
......
docker rm -f ollama || true
docker pull ollama/ollama
# CPU Only
docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
# GPU Support
# docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
docker image prune -f
\ No newline at end of file
import { OLLAMA_API_BASE_URL } from '$lib/constants';
export const getOllamaVersion = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = ''
) => {
export const getOllamaAPIUrl = async (token: string = '') => {
let error = null;
const res = await fetch(`${base_url}/version`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/url`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OLLAMA_API_BASE_URL;
};
export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/url/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
url: url
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OLLAMA_API_BASE_URL;
};
export const getOllamaVersion = async (token: string = '') => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/version`, {
method: 'GET',
headers: {
Accept: 'application/json',
......@@ -35,13 +99,10 @@ export const getOllamaVersion = async (
return res?.version ?? '';
};
export const getOllamaModels = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = ''
) => {
export const getOllamaModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${base_url}/tags`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/tags`, {
method: 'GET',
headers: {
Accept: 'application/json',
......@@ -67,18 +128,15 @@ export const getOllamaModels = async (
throw error;
}
return res?.models ?? [];
return (res?.models ?? []).sort((a, b) => {
return a.name.localeCompare(b.name);
});
};
export const generateTitle = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = '',
model: string,
prompt: string
) => {
export const generateTitle = async (token: string = '', model: string, prompt: string) => {
let error = null;
const res = await fetch(`${base_url}/generate`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
......@@ -86,7 +144,7 @@ export const generateTitle = async (
},
body: JSON.stringify({
model: model,
prompt: `Generate a brief 3-5 word title for this question, excluding the term 'title.' Then, please reply with only the title: ${prompt}`,
prompt: `Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title': ${prompt}`,
stream: false
})
})
......@@ -109,14 +167,10 @@ export const generateTitle = async (
return res?.response ?? 'New Chat';
};
export const generateChatCompletion = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = '',
body: object
) => {
export const generateChatCompletion = async (token: string = '', body: object) => {
let error = null;
const res = await fetch(`${base_url}/chat`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/chat`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
......@@ -135,15 +189,10 @@ export const generateChatCompletion = async (
return res;
};
export const createModel = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string,
tagName: string,
content: string
) => {
export const createModel = async (token: string, tagName: string, content: string) => {
let error = null;
const res = await fetch(`${base_url}/create`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/create`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
......@@ -165,14 +214,10 @@ export const createModel = async (
return res;
};
export const deleteModel = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string,
tagName: string
) => {
export const deleteModel = async (token: string, tagName: string) => {
let error = null;
const res = await fetch(`${base_url}/delete`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/delete`, {
method: 'DELETE',
headers: {
'Content-Type': 'text/event-stream',
......@@ -202,3 +247,27 @@ export const deleteModel = async (
return res;
};
export const pullModel = async (token: string, tagName: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/pull`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
name: tagName
})
}).catch((err) => {
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};
export const getOpenAIModels = async (
import { OPENAI_API_BASE_URL } from '$lib/constants';
export const getOpenAIUrl = async (token: string = '') => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/url`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OPENAI_API_BASE_URL;
};
export const updateOpenAIUrl = async (token: string = '', url: string) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/url/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
url: url
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OPENAI_API_BASE_URL;
};
export const getOpenAIKey = async (token: string = '') => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/key`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OPENAI_API_KEY;
};
export const updateOpenAIKey = async (token: string = '', key: string) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/key/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
key: key
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OPENAI_API_KEY;
};
export const getOpenAIModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/models`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
return models
? models
.map((model) => ({ name: model.id, external: true }))
.sort((a, b) => {
return a.name.localeCompare(b.name);
})
: models;
};
export const getOpenAIModelsDirect = async (
base_url: string = 'https://api.openai.com/v1',
api_key: string = ''
) => {
......@@ -25,9 +197,35 @@ export const getOpenAIModels = async (
throw error;
}
let models = Array.isArray(res) ? res : res?.data ?? null;
const models = Array.isArray(res) ? res : res?.data ?? null;
return models
.map((model) => ({ name: model.id, external: true }))
.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true));
.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true))
.sort((a, b) => {
return a.name.localeCompare(b.name);
});
};
export const generateOpenAIChatCompletion = async (token: string = '', body: object) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(body)
}).catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};
......@@ -84,3 +84,43 @@ export const deleteUserById = async (token: string, userId: string) => {
return res;
};
type UserUpdateForm = {
profile_image_url: string;
email: string;
name: string;
password: string;
};
export const updateUserById = async (token: string, userId: string, user: UserUpdateForm) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/users/${userId}/update`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
profile_image_url: user.profile_image_url,
email: user.email,
name: user.name,
password: user.password !== '' ? user.password : undefined
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment