main.py 10.8 KB
Newer Older
1
2
import sys

Timothy J. Baek's avatar
Timothy J. Baek committed
3
from fastapi import FastAPI, Depends, HTTPException
4
5
from fastapi.routing import APIRoute
from fastapi.middleware.cors import CORSMiddleware
Timothy J. Baek's avatar
Timothy J. Baek committed
6

7
import logging
8
from fastapi import FastAPI, Request, Depends, status, Response
Timothy J. Baek's avatar
Timothy J. Baek committed
9
from fastapi.responses import JSONResponse
10
11
12
13

from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.responses import StreamingResponse
import json
14
import time
Timothy J. Baek's avatar
Timothy J. Baek committed
15
import requests
16

17
from pydantic import BaseModel, ConfigDict
Timothy J. Baek's avatar
Timothy J. Baek committed
18
19
from typing import Optional, List

20
from utils.utils import get_verified_user, get_current_user, get_admin_user
21
from config import SRC_LOG_LEVELS, ENV, MODEL_CONFIG
22
from constants import MESSAGES
23

24
25
import os

26
27
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["LITELLM"])
Timothy J. Baek's avatar
Timothy J. Baek committed
28

29

30
from config import (
Timothy J. Baek's avatar
fix  
Timothy J. Baek committed
31
    ENABLE_LITELLM,
Timothy J. Baek's avatar
Timothy J. Baek committed
32
    ENABLE_MODEL_FILTER,
33
34
35
    MODEL_FILTER_LIST,
    DATA_DIR,
    LITELLM_PROXY_PORT,
36
    LITELLM_PROXY_HOST,
37
)
38

Timothy J. Baek's avatar
Timothy J. Baek committed
39
40
41
42
import warnings

warnings.simplefilter("ignore")

43
from litellm.utils import get_llm_provider
44

45
46
import asyncio
import subprocess
Timothy J. Baek's avatar
Timothy J. Baek committed
47
import yaml
Timothy J. Baek's avatar
Timothy J. Baek committed
48

49
app = FastAPI()
Timothy J. Baek's avatar
Timothy J. Baek committed
50

51
origins = ["*"]
Timothy J. Baek's avatar
Timothy J. Baek committed
52

53
54
55
56
57
58
59
app.add_middleware(
    CORSMiddleware,
    allow_origins=origins,
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)
Timothy J. Baek's avatar
Timothy J. Baek committed
60

61

Timothy J. Baek's avatar
Timothy J. Baek committed
62
63
64
65
66
LITELLM_CONFIG_DIR = f"{DATA_DIR}/litellm/config.yaml"

with open(LITELLM_CONFIG_DIR, "r") as file:
    litellm_config = yaml.safe_load(file)

Timothy J. Baek's avatar
fix  
Timothy J. Baek committed
67
68

app.state.ENABLE = ENABLE_LITELLM
Timothy J. Baek's avatar
Timothy J. Baek committed
69
app.state.CONFIG = litellm_config
70
app.state.MODEL_CONFIG = MODEL_CONFIG.get("litellm", [])
Timothy J. Baek's avatar
Timothy J. Baek committed
71

72
73
74
# Global variable to store the subprocess reference
background_process = None

75
76
77
78
79
80
81
CONFLICT_ENV_VARS = [
    # Uvicorn uses PORT, so LiteLLM might use it as well
    "PORT",
    # LiteLLM uses DATABASE_URL for Prisma connections
    "DATABASE_URL",
]

Timothy J. Baek's avatar
Timothy J. Baek committed
82

83
84
async def run_background_process(command):
    global background_process
Timothy J. Baek's avatar
Timothy J. Baek committed
85
    log.info("run_background_process")
86
87
88

    try:
        # Log the command to be executed
Timothy J. Baek's avatar
Timothy J. Baek committed
89
        log.info(f"Executing command: {command}")
90
91
        # Filter environment variables known to conflict with litellm
        env = {k: v for k, v in os.environ.items() if k not in CONFLICT_ENV_VARS}
92
93
        # Execute the command and create a subprocess
        process = await asyncio.create_subprocess_exec(
94
            *command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
95
96
        )
        background_process = process
Timothy J. Baek's avatar
Timothy J. Baek committed
97
        log.info("Subprocess started successfully.")
98
99
100
101
102

        # Capture STDERR for debugging purposes
        stderr_output = await process.stderr.read()
        stderr_text = stderr_output.decode().strip()
        if stderr_text:
Timothy J. Baek's avatar
Timothy J. Baek committed
103
            log.info(f"Subprocess STDERR: {stderr_text}")
104

Timothy J. Baek's avatar
Timothy J. Baek committed
105
        # log.info output line by line
106
        async for line in process.stdout:
Timothy J. Baek's avatar
Timothy J. Baek committed
107
            log.info(line.decode().strip())
108
109
110

        # Wait for the process to finish
        returncode = await process.wait()
Timothy J. Baek's avatar
Timothy J. Baek committed
111
        log.info(f"Subprocess exited with return code {returncode}")
112
113
114
    except Exception as e:
        log.error(f"Failed to start subprocess: {e}")
        raise  # Optionally re-raise the exception if you want it to propagate
115
116
117


async def start_litellm_background():
Timothy J. Baek's avatar
Timothy J. Baek committed
118
    log.info("start_litellm_background")
119
    # Command to run in the background
120
121
122
123
124
125
126
127
128
129
130
    command = [
        "litellm",
        "--port",
        str(LITELLM_PROXY_PORT),
        "--host",
        LITELLM_PROXY_HOST,
        "--telemetry",
        "False",
        "--config",
        LITELLM_CONFIG_DIR,
    ]
Timothy J. Baek's avatar
Timothy J. Baek committed
131

132
    await run_background_process(command)
Timothy J. Baek's avatar
Timothy J. Baek committed
133
134


135
async def shutdown_litellm_background():
Timothy J. Baek's avatar
Timothy J. Baek committed
136
    log.info("shutdown_litellm_background")
137
138
139
140
    global background_process
    if background_process:
        background_process.terminate()
        await background_process.wait()  # Ensure the process has terminated
Timothy J. Baek's avatar
Timothy J. Baek committed
141
        log.info("Subprocess terminated")
142
        background_process = None
143
144


Timothy J. Baek's avatar
Timothy J. Baek committed
145
@app.on_event("startup")
146
async def startup_event():
Timothy J. Baek's avatar
Timothy J. Baek committed
147
    log.info("startup_event")
Timothy J. Baek's avatar
Timothy J. Baek committed
148
    # TODO: Check config.yaml file and create one
149
    asyncio.create_task(start_litellm_background())
Timothy J. Baek's avatar
Timothy J. Baek committed
150
151


Timothy J. Baek's avatar
Timothy J. Baek committed
152
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
153
154
155
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST


156
157
158
159
160
@app.get("/")
async def get_status():
    return {"status": True}


Timothy J. Baek's avatar
Timothy J. Baek committed
161
async def restart_litellm():
162
163
164
165
166
167
168
169
170
171
    """
    Endpoint to restart the litellm background service.
    """
    log.info("Requested restart of litellm service.")
    try:
        # Shut down the existing process if it is running
        await shutdown_litellm_background()
        log.info("litellm service shutdown complete.")

        # Restart the background service
Timothy J. Baek's avatar
Timothy J. Baek committed
172
173

        asyncio.create_task(start_litellm_background())
174
175
176
177
178
179
180
        log.info("litellm service restart complete.")

        return {
            "status": "success",
            "message": "litellm service restarted successfully.",
        }
    except Exception as e:
Timothy J. Baek's avatar
Timothy J. Baek committed
181
        log.info(f"Error restarting litellm service: {e}")
182
183
184
185
186
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
        )


Timothy J. Baek's avatar
Timothy J. Baek committed
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
@app.get("/restart")
async def restart_litellm_handler(user=Depends(get_admin_user)):
    return await restart_litellm()


@app.get("/config")
async def get_config(user=Depends(get_admin_user)):
    return app.state.CONFIG


class LiteLLMConfigForm(BaseModel):
    general_settings: Optional[dict] = None
    litellm_settings: Optional[dict] = None
    model_list: Optional[List[dict]] = None
    router_settings: Optional[dict] = None

203
204
    model_config = ConfigDict(protected_namespaces=())

Timothy J. Baek's avatar
Timothy J. Baek committed
205
206
207
208
209
210
211
212
213
214
215
216

@app.post("/config/update")
async def update_config(form_data: LiteLLMConfigForm, user=Depends(get_admin_user)):
    app.state.CONFIG = form_data.model_dump(exclude_none=True)

    with open(LITELLM_CONFIG_DIR, "w") as file:
        yaml.dump(app.state.CONFIG, file)

    await restart_litellm()
    return app.state.CONFIG


Timothy J. Baek's avatar
Timothy J. Baek committed
217
218
219
@app.get("/models")
@app.get("/v1/models")
async def get_models(user=Depends(get_current_user)):
220

Timothy J. Baek's avatar
fix  
Timothy J. Baek committed
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
    if app.state.ENABLE:
        while not background_process:
            await asyncio.sleep(0.1)

        url = f"http://localhost:{LITELLM_PROXY_PORT}/v1"
        r = None
        try:
            r = requests.request(method="GET", url=f"{url}/models")
            r.raise_for_status()

            data = r.json()

            if app.state.ENABLE_MODEL_FILTER:
                if user and user.role == "user":
                    data["data"] = list(
                        filter(
                            lambda model: model["id"] in app.state.MODEL_FILTER_LIST,
                            data["data"],
                        )
Timothy J. Baek's avatar
Timothy J. Baek committed
240
                    )
241

242
243
            for model in data["data"]:
                add_custom_info_to_model(model)
Timothy J. Baek's avatar
fix  
Timothy J. Baek committed
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
            return data
        except Exception as e:

            log.exception(e)
            error_detail = "Open WebUI: Server Connection Error"
            if r is not None:
                try:
                    res = r.json()
                    if "error" in res:
                        error_detail = f"External: {res['error']}"
                except:
                    error_detail = f"External: {e}"

            return {
                "data": [
                    {
                        "id": model["model_name"],
                        "object": "model",
                        "created": int(time.time()),
                        "owned_by": "openai",
264
265
266
267
268
269
270
271
                        "custom_info": next(
                            (
                                item
                                for item in app.state.MODEL_CONFIG
                                if item["name"] == model["model_name"]
                            ),
                            {},
                        ),
Timothy J. Baek's avatar
fix  
Timothy J. Baek committed
272
273
274
275
276
277
                    }
                    for model in app.state.CONFIG["model_list"]
                ],
                "object": "list",
            }
    else:
278
        return {
Timothy J. Baek's avatar
fix  
Timothy J. Baek committed
279
            "data": [],
280
281
            "object": "list",
        }
282
283


284
285
def add_custom_info_to_model(model: dict):
    model["custom_info"] = next(
286
        (item for item in app.state.MODEL_CONFIG if item["id"] == model["id"]), {}
287
288
289
    )


290
291
292
293
294
295
296
297
298
@app.get("/model/info")
async def get_model_list(user=Depends(get_admin_user)):
    return {"data": app.state.CONFIG["model_list"]}


class AddLiteLLMModelForm(BaseModel):
    model_name: str
    litellm_params: dict

299
300
    model_config = ConfigDict(protected_namespaces=())

301
302
303
304
305

@app.post("/model/new")
async def add_model_to_config(
    form_data: AddLiteLLMModelForm, user=Depends(get_admin_user)
):
306
307
308
    try:
        get_llm_provider(model=form_data.model_name)
        app.state.CONFIG["model_list"].append(form_data.model_dump())
309

310
311
        with open(LITELLM_CONFIG_DIR, "w") as file:
            yaml.dump(app.state.CONFIG, file)
312

313
        await restart_litellm()
314

315
316
317
318
319
320
        return {"message": MESSAGES.MODEL_ADDED(form_data.model_name)}
    except Exception as e:
        print(e)
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
        )
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341


class DeleteLiteLLMModelForm(BaseModel):
    id: str


@app.post("/model/delete")
async def delete_model_from_config(
    form_data: DeleteLiteLLMModelForm, user=Depends(get_admin_user)
):
    app.state.CONFIG["model_list"] = [
        model
        for model in app.state.CONFIG["model_list"]
        if model["model_name"] != form_data.id
    ]

    with open(LITELLM_CONFIG_DIR, "w") as file:
        yaml.dump(app.state.CONFIG, file)

    await restart_litellm()

342
    return {"message": MESSAGES.MODEL_DELETED(form_data.id)}
343
344


Timothy J. Baek's avatar
Timothy J. Baek committed
345
346
347
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
    body = await request.body()
348

349
    url = f"http://localhost:{LITELLM_PROXY_PORT}"
350

Timothy J. Baek's avatar
Timothy J. Baek committed
351
    target_url = f"{url}/{path}"
352

Timothy J. Baek's avatar
Timothy J. Baek committed
353
354
355
    headers = {}
    # headers["Authorization"] = f"Bearer {key}"
    headers["Content-Type"] = "application/json"
356

Timothy J. Baek's avatar
Timothy J. Baek committed
357
    r = None
358

Timothy J. Baek's avatar
Timothy J. Baek committed
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
    try:
        r = requests.request(
            method=request.method,
            url=target_url,
            data=body,
            headers=headers,
            stream=True,
        )

        r.raise_for_status()

        # Check if response is SSE
        if "text/event-stream" in r.headers.get("Content-Type", ""):
            return StreamingResponse(
                r.iter_content(chunk_size=8192),
                status_code=r.status_code,
                headers=dict(r.headers),
            )
        else:
            response_data = r.json()
            return response_data
    except Exception as e:
        log.exception(e)
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"External: {res['error']['message'] if 'message' in res['error'] else res['error']}"
            except:
                error_detail = f"External: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500, detail=error_detail
        )