"...composable_kernel.git" did not exist on "bc7b53348b81a25b1122480739de63778a4cda08"
Unverified Commit 9c2429ff authored by Timothy Jaeryang Baek's avatar Timothy Jaeryang Baek Committed by GitHub
Browse files

Merge pull request #4402 from michaelpoluektov/remove-ollama

refactor: re-use utils in Ollama
parents d0645d3c 547611b7
import asyncio import asyncio
import websocket # NOTE: websocket-client (https://github.com/websocket-client/websocket-client) import websocket # NOTE: websocket-client (https://github.com/websocket-client/websocket-client)
import uuid
import json import json
import urllib.request import urllib.request
import urllib.parse import urllib.parse
...@@ -398,7 +397,9 @@ async def comfyui_generate_image( ...@@ -398,7 +397,9 @@ async def comfyui_generate_image(
return None return None
try: try:
images = await asyncio.to_thread(get_images, ws, comfyui_prompt, client_id, base_url) images = await asyncio.to_thread(
get_images, ws, comfyui_prompt, client_id, base_url
)
except Exception as e: except Exception as e:
log.exception(f"Error while receiving images: {e}") log.exception(f"Error while receiving images: {e}")
images = None images = None
......
This diff is collapsed.
...@@ -17,7 +17,10 @@ from utils.utils import ( ...@@ -17,7 +17,10 @@ from utils.utils import (
get_verified_user, get_verified_user,
get_admin_user, get_admin_user,
) )
from utils.misc import apply_model_params_to_body, apply_model_system_prompt_to_body from utils.misc import (
apply_model_params_to_body_openai,
apply_model_system_prompt_to_body,
)
from config import ( from config import (
SRC_LOG_LEVELS, SRC_LOG_LEVELS,
...@@ -368,7 +371,7 @@ async def generate_chat_completion( ...@@ -368,7 +371,7 @@ async def generate_chat_completion(
payload["model"] = model_info.base_model_id payload["model"] = model_info.base_model_id
params = model_info.params.model_dump() params = model_info.params.model_dump()
payload = apply_model_params_to_body(params, payload) payload = apply_model_params_to_body_openai(params, payload)
payload = apply_model_system_prompt_to_body(params, payload, user) payload = apply_model_system_prompt_to_body(params, payload, user)
model = app.state.MODELS[payload.get("model")] model = app.state.MODELS[payload.get("model")]
......
...@@ -22,7 +22,7 @@ from apps.webui.utils import load_function_module_by_id ...@@ -22,7 +22,7 @@ from apps.webui.utils import load_function_module_by_id
from utils.misc import ( from utils.misc import (
openai_chat_chunk_message_template, openai_chat_chunk_message_template,
openai_chat_completion_message_template, openai_chat_completion_message_template,
apply_model_params_to_body, apply_model_params_to_body_openai,
apply_model_system_prompt_to_body, apply_model_system_prompt_to_body,
) )
...@@ -291,7 +291,7 @@ async def generate_function_chat_completion(form_data, user): ...@@ -291,7 +291,7 @@ async def generate_function_chat_completion(form_data, user):
form_data["model"] = model_info.base_model_id form_data["model"] = model_info.base_model_id
params = model_info.params.model_dump() params = model_info.params.model_dump()
form_data = apply_model_params_to_body(params, form_data) form_data = apply_model_params_to_body_openai(params, form_data)
form_data = apply_model_system_prompt_to_body(params, form_data, user) form_data = apply_model_system_prompt_to_body(params, form_data, user)
pipe_id = get_pipe_id(form_data) pipe_id = get_pipe_id(form_data)
......
...@@ -2,7 +2,7 @@ from pathlib import Path ...@@ -2,7 +2,7 @@ from pathlib import Path
import hashlib import hashlib
import re import re
from datetime import timedelta from datetime import timedelta
from typing import Optional, List, Tuple from typing import Optional, List, Tuple, Callable
import uuid import uuid
import time import time
...@@ -135,10 +135,21 @@ def apply_model_system_prompt_to_body(params: dict, form_data: dict, user) -> di ...@@ -135,10 +135,21 @@ def apply_model_system_prompt_to_body(params: dict, form_data: dict, user) -> di
# inplace function: form_data is modified # inplace function: form_data is modified
def apply_model_params_to_body(params: dict, form_data: dict) -> dict: def apply_model_params_to_body(
params: dict, form_data: dict, mappings: dict[str, Callable]
) -> dict:
if not params: if not params:
return form_data return form_data
for key, cast_func in mappings.items():
if (value := params.get(key)) is not None:
form_data[key] = cast_func(value)
return form_data
# inplace function: form_data is modified
def apply_model_params_to_body_openai(params: dict, form_data: dict) -> dict:
mappings = { mappings = {
"temperature": float, "temperature": float,
"top_p": int, "top_p": int,
...@@ -147,10 +158,39 @@ def apply_model_params_to_body(params: dict, form_data: dict) -> dict: ...@@ -147,10 +158,39 @@ def apply_model_params_to_body(params: dict, form_data: dict) -> dict:
"seed": lambda x: x, "seed": lambda x: x,
"stop": lambda x: [bytes(s, "utf-8").decode("unicode_escape") for s in x], "stop": lambda x: [bytes(s, "utf-8").decode("unicode_escape") for s in x],
} }
return apply_model_params_to_body(params, form_data, mappings)
def apply_model_params_to_body_ollama(params: dict, form_data: dict) -> dict:
opts = [
"temperature",
"top_p",
"seed",
"mirostat",
"mirostat_eta",
"mirostat_tau",
"num_ctx",
"num_batch",
"num_keep",
"repeat_last_n",
"tfs_z",
"top_k",
"min_p",
"use_mmap",
"use_mlock",
"num_thread",
]
mappings = {i: lambda x: x for i in opts}
form_data = apply_model_params_to_body(params, form_data, mappings)
name_differences = {
"max_tokens": "num_predict",
"frequency_penalty": "repeat_penalty",
}
for key, cast_func in mappings.items(): for key, value in name_differences.items():
if (value := params.get(key)) is not None: if (param := params.get(key, None)) is not None:
form_data[key] = cast_func(value) form_data[value] = param
return form_data return form_data
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment