main.py 28.2 KB
Newer Older
1
from fastapi import FastAPI, Request, Response, HTTPException, Depends, status
2
3
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
Timothy J. Baek's avatar
Timothy J. Baek committed
4
from fastapi.concurrency import run_in_threadpool
Timothy J. Baek's avatar
Timothy J. Baek committed
5

Timothy J. Baek's avatar
Timothy J. Baek committed
6
from pydantic import BaseModel, ConfigDict
7
8

import random
Timothy J. Baek's avatar
Timothy J. Baek committed
9
10
import requests
import json
11
import uuid
12
13
import aiohttp
import asyncio
14
import logging
Timothy J. Baek's avatar
Timothy J. Baek committed
15

16
17
from apps.web.models.users import Users
from constants import ERROR_MESSAGES
18
from utils.utils import decode_token, get_current_user, get_admin_user
19
from config import SRC_LOG_LEVELS, OLLAMA_BASE_URLS, MODEL_FILTER_ENABLED, MODEL_FILTER_LIST
Timothy J. Baek's avatar
Timothy J. Baek committed
20

21
22
from typing import Optional, List, Union

23
24
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["OLLAMA"])
25

26
27
28
29
30
31
32
33
app = FastAPI()
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)
Timothy J. Baek's avatar
Timothy J. Baek committed
34

Timothy J. Baek's avatar
Timothy J. Baek committed
35

36
37
app.state.MODEL_FILTER_ENABLED = MODEL_FILTER_ENABLED
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
Timothy J. Baek's avatar
Timothy J. Baek committed
38

39
app.state.OLLAMA_BASE_URLS = OLLAMA_BASE_URLS
40
app.state.MODELS = {}
Timothy J. Baek's avatar
Timothy J. Baek committed
41
42


43
44
45
REQUEST_POOL = []


Timothy J. Baek's avatar
Timothy J. Baek committed
46
47
48
49
50
# TODO: Implement a more intelligent load balancing mechanism for distributing requests among multiple backend instances.
# Current implementation uses a simple round-robin approach (random.choice). Consider incorporating algorithms like weighted round-robin,
# least connections, or least response time for better resource utilization and performance optimization.


51
52
53
54
55
56
57
58
59
60
61
62
63
64
@app.middleware("http")
async def check_url(request: Request, call_next):
    if len(app.state.MODELS) == 0:
        await get_all_models()
    else:
        pass

    response = await call_next(request)
    return response


@app.get("/urls")
async def get_ollama_api_urls(user=Depends(get_admin_user)):
    return {"OLLAMA_BASE_URLS": app.state.OLLAMA_BASE_URLS}
65

Timothy J. Baek's avatar
Timothy J. Baek committed
66

67
class UrlUpdateForm(BaseModel):
68
    urls: List[str]
69
70


71
@app.post("/urls/update")
Timothy J. Baek's avatar
Timothy J. Baek committed
72
async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
73
74
    app.state.OLLAMA_BASE_URLS = form_data.urls

75
    log.info(f"app.state.OLLAMA_BASE_URLS: {app.state.OLLAMA_BASE_URLS}")
76
    return {"OLLAMA_BASE_URLS": app.state.OLLAMA_BASE_URLS}
Timothy J. Baek's avatar
Timothy J. Baek committed
77
78


79
80
81
82
83
84
85
86
87
88
@app.get("/cancel/{request_id}")
async def cancel_ollama_request(request_id: str, user=Depends(get_current_user)):
    if user:
        if request_id in REQUEST_POOL:
            REQUEST_POOL.remove(request_id)
        return True
    else:
        raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)


89
90
91
92
93
94
95
async def fetch_url(url):
    try:
        async with aiohttp.ClientSession() as session:
            async with session.get(url) as response:
                return await response.json()
    except Exception as e:
        # Handle connection error here
96
        log.error(f"Connection error: {e}")
97
98
99
100
101
102
103
        return None


def merge_models_lists(model_lists):
    merged_models = {}

    for idx, model_list in enumerate(model_lists):
Timothy J. Baek's avatar
Timothy J. Baek committed
104
105
106
107
108
109
110
111
        if model_list is not None:
            for model in model_list:
                digest = model["digest"]
                if digest not in merged_models:
                    model["urls"] = [idx]
                    merged_models[digest] = model
                else:
                    merged_models[digest]["urls"].append(idx)
112
113
114
115
116
117
118
119

    return list(merged_models.values())


# user=Depends(get_current_user)


async def get_all_models():
120
    log.info("get_all_models()")
121
122
123
124
125
    tasks = [fetch_url(f"{url}/api/tags") for url in app.state.OLLAMA_BASE_URLS]
    responses = await asyncio.gather(*tasks)

    models = {
        "models": merge_models_lists(
Timothy J. Baek's avatar
Timothy J. Baek committed
126
            map(lambda response: response["models"] if response else None, responses)
127
128
        )
    }
Timothy J. Baek's avatar
Timothy J. Baek committed
129

130
131
132
133
134
135
136
137
138
139
140
    app.state.MODELS = {model["model"]: model for model in models["models"]}

    return models


@app.get("/api/tags")
@app.get("/api/tags/{url_idx}")
async def get_ollama_tags(
    url_idx: Optional[int] = None, user=Depends(get_current_user)
):
    if url_idx == None:
Timothy J. Baek's avatar
Timothy J. Baek committed
141
        models = await get_all_models()
142

Timothy J. Baek's avatar
Timothy J. Baek committed
143
144
        if app.state.MODEL_FILTER_ENABLED:
            if user.role == "user":
145
146
                models["models"] = list(
                    filter(
147
                        lambda model: model["name"] in app.state.MODEL_FILTER_LIST,
148
149
                        models["models"],
                    )
Timothy J. Baek's avatar
Timothy J. Baek committed
150
151
152
                )
                return models
        return models
153
154
155
156
157
158
159
160
    else:
        url = app.state.OLLAMA_BASE_URLS[url_idx]
        try:
            r = requests.request(method="GET", url=f"{url}/api/tags")
            r.raise_for_status()

            return r.json()
        except Exception as e:
161
            log.exception(e)
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
            error_detail = "Open WebUI: Server Connection Error"
            if r is not None:
                try:
                    res = r.json()
                    if "error" in res:
                        error_detail = f"Ollama: {res['error']}"
                except:
                    error_detail = f"Ollama: {e}"

            raise HTTPException(
                status_code=r.status_code if r else 500,
                detail=error_detail,
            )


@app.get("/api/version")
@app.get("/api/version/{url_idx}")
async def get_ollama_versions(url_idx: Optional[int] = None):

    if url_idx == None:

        # returns lowest version
        tasks = [fetch_url(f"{url}/api/version") for url in app.state.OLLAMA_BASE_URLS]
        responses = await asyncio.gather(*tasks)
        responses = list(filter(lambda x: x is not None, responses))

Timothy J. Baek's avatar
Timothy J. Baek committed
188
189
190
191
        if len(responses) > 0:
            lowest_version = min(
                responses, key=lambda x: tuple(map(int, x["version"].split(".")))
            )
192

Timothy J. Baek's avatar
Timothy J. Baek committed
193
194
195
196
197
198
            return {"version": lowest_version["version"]}
        else:
            raise HTTPException(
                status_code=500,
                detail=ERROR_MESSAGES.OLLAMA_NOT_FOUND,
            )
199
200
201
202
203
204
205
206
    else:
        url = app.state.OLLAMA_BASE_URLS[url_idx]
        try:
            r = requests.request(method="GET", url=f"{url}/api/version")
            r.raise_for_status()

            return r.json()
        except Exception as e:
207
            log.exception(e)
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
            error_detail = "Open WebUI: Server Connection Error"
            if r is not None:
                try:
                    res = r.json()
                    if "error" in res:
                        error_detail = f"Ollama: {res['error']}"
                except:
                    error_detail = f"Ollama: {e}"

            raise HTTPException(
                status_code=r.status_code if r else 500,
                detail=error_detail,
            )


class ModelNameForm(BaseModel):
    name: str


@app.post("/api/pull")
@app.post("/api/pull/{url_idx}")
async def pull_model(
    form_data: ModelNameForm, url_idx: int = 0, user=Depends(get_admin_user)
):
    url = app.state.OLLAMA_BASE_URLS[url_idx]
233
    log.info(f"url: {url}")
234

235
236
    r = None

237
238
    def get_request():
        nonlocal url
239
240
241
242
243
244
245
246
247
248
        nonlocal r
        try:

            def stream_content():
                for chunk in r.iter_content(chunk_size=8192):
                    yield chunk

            r = requests.request(
                method="POST",
                url=f"{url}/api/pull",
Timothy J. Baek's avatar
Timothy J. Baek committed
249
                data=form_data.model_dump_json(exclude_none=True).encode(),
250
251
252
253
254
255
256
257
258
259
260
261
262
263
                stream=True,
            )

            r.raise_for_status()

            return StreamingResponse(
                stream_content(),
                status_code=r.status_code,
                headers=dict(r.headers),
            )
        except Exception as e:
            raise e

    try:
264
        return await run_in_threadpool(get_request)
265
    except Exception as e:
266
        log.exception(e)
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
        )


class PushModelForm(BaseModel):
    name: str
    insecure: Optional[bool] = None
    stream: Optional[bool] = None


@app.delete("/api/push")
@app.delete("/api/push/{url_idx}")
async def push_model(
    form_data: PushModelForm,
    url_idx: Optional[int] = None,
    user=Depends(get_admin_user),
):
    if url_idx == None:
        if form_data.name in app.state.MODELS:
            url_idx = app.state.MODELS[form_data.name]["urls"][0]
        else:
            raise HTTPException(
                status_code=400,
Timothy J. Baek's avatar
Timothy J. Baek committed
301
                detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name),
302
303
304
            )

    url = app.state.OLLAMA_BASE_URLS[url_idx]
305
    log.debug(f"url: {url}")
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320

    r = None

    def get_request():
        nonlocal url
        nonlocal r
        try:

            def stream_content():
                for chunk in r.iter_content(chunk_size=8192):
                    yield chunk

            r = requests.request(
                method="POST",
                url=f"{url}/api/push",
Timothy J. Baek's avatar
Timothy J. Baek committed
321
                data=form_data.model_dump_json(exclude_none=True).encode(),
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
            )

            r.raise_for_status()

            return StreamingResponse(
                stream_content(),
                status_code=r.status_code,
                headers=dict(r.headers),
            )
        except Exception as e:
            raise e

    try:
        return await run_in_threadpool(get_request)
    except Exception as e:
337
        log.exception(e)
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
        )


class CreateModelForm(BaseModel):
    name: str
    modelfile: Optional[str] = None
    stream: Optional[bool] = None
    path: Optional[str] = None


@app.post("/api/create")
@app.post("/api/create/{url_idx}")
async def create_model(
    form_data: CreateModelForm, url_idx: int = 0, user=Depends(get_admin_user)
):
365
    log.debug(f"form_data: {form_data}")
366
    url = app.state.OLLAMA_BASE_URLS[url_idx]
367
    log.info(f"url: {url}")
368

369
370
371
372
373
374
375
376
377
378
379
380
381
382
    r = None

    def get_request():
        nonlocal url
        nonlocal r
        try:

            def stream_content():
                for chunk in r.iter_content(chunk_size=8192):
                    yield chunk

            r = requests.request(
                method="POST",
                url=f"{url}/api/create",
Timothy J. Baek's avatar
Timothy J. Baek committed
383
                data=form_data.model_dump_json(exclude_none=True).encode(),
384
385
386
387
388
                stream=True,
            )

            r.raise_for_status()

389
            log.debug(f"r: {r}")
390
391
392
393
394
395
396
397
398
399
400
401

            return StreamingResponse(
                stream_content(),
                status_code=r.status_code,
                headers=dict(r.headers),
            )
        except Exception as e:
            raise e

    try:
        return await run_in_threadpool(get_request)
    except Exception as e:
402
        log.exception(e)
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
        )


class CopyModelForm(BaseModel):
    source: str
    destination: str


@app.post("/api/copy")
@app.post("/api/copy/{url_idx}")
async def copy_model(
    form_data: CopyModelForm,
    url_idx: Optional[int] = None,
    user=Depends(get_admin_user),
):
    if url_idx == None:
        if form_data.source in app.state.MODELS:
            url_idx = app.state.MODELS[form_data.source]["urls"][0]
        else:
            raise HTTPException(
                status_code=400,
Timothy J. Baek's avatar
Timothy J. Baek committed
436
                detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.source),
437
438
439
            )

    url = app.state.OLLAMA_BASE_URLS[url_idx]
440
    log.info(f"url: {url}")
441
442
443
444
445

    try:
        r = requests.request(
            method="POST",
            url=f"{url}/api/copy",
Timothy J. Baek's avatar
Timothy J. Baek committed
446
            data=form_data.model_dump_json(exclude_none=True).encode(),
447
448
449
        )
        r.raise_for_status()

450
        log.debug(f"r.text: {r.text}")
451
452
453

        return True
    except Exception as e:
454
        log.exception(e)
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
        )


@app.delete("/api/delete")
@app.delete("/api/delete/{url_idx}")
async def delete_model(
    form_data: ModelNameForm,
    url_idx: Optional[int] = None,
    user=Depends(get_admin_user),
):
    if url_idx == None:
        if form_data.name in app.state.MODELS:
            url_idx = app.state.MODELS[form_data.name]["urls"][0]
        else:
            raise HTTPException(
                status_code=400,
Timothy J. Baek's avatar
Timothy J. Baek committed
483
                detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name),
484
485
486
            )

    url = app.state.OLLAMA_BASE_URLS[url_idx]
487
    log.info(f"url: {url}")
488
489
490
491
492

    try:
        r = requests.request(
            method="DELETE",
            url=f"{url}/api/delete",
Timothy J. Baek's avatar
Timothy J. Baek committed
493
            data=form_data.model_dump_json(exclude_none=True).encode(),
494
495
496
        )
        r.raise_for_status()

497
        log.debug(f"r.text: {r.text}")
498
499
500

        return True
    except Exception as e:
501
        log.exception(e)
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
        )


@app.post("/api/show")
async def show_model_info(form_data: ModelNameForm, user=Depends(get_current_user)):
    if form_data.name not in app.state.MODELS:
        raise HTTPException(
            status_code=400,
Timothy J. Baek's avatar
Timothy J. Baek committed
522
            detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name),
523
524
525
526
        )

    url_idx = random.choice(app.state.MODELS[form_data.name]["urls"])
    url = app.state.OLLAMA_BASE_URLS[url_idx]
527
    log.info(f"url: {url}")
528
529
530
531
532

    try:
        r = requests.request(
            method="POST",
            url=f"{url}/api/show",
Timothy J. Baek's avatar
Timothy J. Baek committed
533
            data=form_data.model_dump_json(exclude_none=True).encode(),
534
535
536
537
538
        )
        r.raise_for_status()

        return r.json()
    except Exception as e:
539
        log.exception(e)
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
        )


class GenerateEmbeddingsForm(BaseModel):
    model: str
    prompt: str
    options: Optional[dict] = None
    keep_alive: Optional[Union[int, str]] = None


@app.post("/api/embeddings")
@app.post("/api/embeddings/{url_idx}")
async def generate_embeddings(
    form_data: GenerateEmbeddingsForm,
    url_idx: Optional[int] = None,
    user=Depends(get_current_user),
):
    if url_idx == None:
        if form_data.model in app.state.MODELS:
            url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
        else:
            raise HTTPException(
                status_code=400,
Timothy J. Baek's avatar
Timothy J. Baek committed
575
                detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
576
577
578
            )

    url = app.state.OLLAMA_BASE_URLS[url_idx]
579
    log.info(f"url: {url}")
580
581
582
583
584

    try:
        r = requests.request(
            method="POST",
            url=f"{url}/api/embeddings",
Timothy J. Baek's avatar
Timothy J. Baek committed
585
            data=form_data.model_dump_json(exclude_none=True).encode(),
586
587
588
589
590
        )
        r.raise_for_status()

        return r.json()
    except Exception as e:
591
        log.exception(e)
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
        )


class GenerateCompletionForm(BaseModel):
    model: str
    prompt: str
    images: Optional[List[str]] = None
    format: Optional[str] = None
    options: Optional[dict] = None
    system: Optional[str] = None
    template: Optional[str] = None
    context: Optional[str] = None
    stream: Optional[bool] = True
    raw: Optional[bool] = None
    keep_alive: Optional[Union[int, str]] = None


@app.post("/api/generate")
@app.post("/api/generate/{url_idx}")
async def generate_completion(
    form_data: GenerateCompletionForm,
    url_idx: Optional[int] = None,
    user=Depends(get_current_user),
):

    if url_idx == None:
        if form_data.model in app.state.MODELS:
            url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
        else:
            raise HTTPException(
                status_code=400,
                detail="error_detail",
            )

    url = app.state.OLLAMA_BASE_URLS[url_idx]
639
    log.info(f"url: {url}")
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659

    r = None

    def get_request():
        nonlocal form_data
        nonlocal r

        request_id = str(uuid.uuid4())
        try:
            REQUEST_POOL.append(request_id)

            def stream_content():
                try:
                    if form_data.stream:
                        yield json.dumps({"id": request_id, "done": False}) + "\n"

                    for chunk in r.iter_content(chunk_size=8192):
                        if request_id in REQUEST_POOL:
                            yield chunk
                        else:
660
                            log.warning("User: canceled request")
661
662
663
664
665
666
667
668
669
670
                            break
                finally:
                    if hasattr(r, "close"):
                        r.close()
                        if request_id in REQUEST_POOL:
                            REQUEST_POOL.remove(request_id)

            r = requests.request(
                method="POST",
                url=f"{url}/api/generate",
Timothy J. Baek's avatar
Timothy J. Baek committed
671
                data=form_data.model_dump_json(exclude_none=True).encode(),
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
                stream=True,
            )

            r.raise_for_status()

            return StreamingResponse(
                stream_content(),
                status_code=r.status_code,
                headers=dict(r.headers),
            )
        except Exception as e:
            raise e

    try:
        return await run_in_threadpool(get_request)
    except Exception as e:
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
        )


class ChatMessage(BaseModel):
    role: str
    content: str
    images: Optional[List[str]] = None


class GenerateChatCompletionForm(BaseModel):
    model: str
    messages: List[ChatMessage]
    format: Optional[str] = None
    options: Optional[dict] = None
    template: Optional[str] = None
    stream: Optional[bool] = True
    keep_alive: Optional[Union[int, str]] = None


@app.post("/api/chat")
@app.post("/api/chat/{url_idx}")
Timothy J. Baek's avatar
Timothy J. Baek committed
721
async def generate_chat_completion(
722
723
724
725
726
727
728
729
730
731
732
    form_data: GenerateChatCompletionForm,
    url_idx: Optional[int] = None,
    user=Depends(get_current_user),
):

    if url_idx == None:
        if form_data.model in app.state.MODELS:
            url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
        else:
            raise HTTPException(
                status_code=400,
Timothy J. Baek's avatar
Timothy J. Baek committed
733
                detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
734
735
736
            )

    url = app.state.OLLAMA_BASE_URLS[url_idx]
737
    log.info(f"url: {url}")
738
739
740

    r = None

741
    log.debug("form_data.model_dump_json(exclude_none=True).encode(): {0} ".format(form_data.model_dump_json(exclude_none=True).encode()))
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759

    def get_request():
        nonlocal form_data
        nonlocal r

        request_id = str(uuid.uuid4())
        try:
            REQUEST_POOL.append(request_id)

            def stream_content():
                try:
                    if form_data.stream:
                        yield json.dumps({"id": request_id, "done": False}) + "\n"

                    for chunk in r.iter_content(chunk_size=8192):
                        if request_id in REQUEST_POOL:
                            yield chunk
                        else:
760
                            log.warning("User: canceled request")
761
762
763
764
765
766
767
768
769
770
                            break
                finally:
                    if hasattr(r, "close"):
                        r.close()
                        if request_id in REQUEST_POOL:
                            REQUEST_POOL.remove(request_id)

            r = requests.request(
                method="POST",
                url=f"{url}/api/chat",
Timothy J. Baek's avatar
Timothy J. Baek committed
771
                data=form_data.model_dump_json(exclude_none=True).encode(),
772
773
774
775
776
777
778
779
780
781
782
                stream=True,
            )

            r.raise_for_status()

            return StreamingResponse(
                stream_content(),
                status_code=r.status_code,
                headers=dict(r.headers),
            )
        except Exception as e:
783
            log.exception(e)
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
            raise e

    try:
        return await run_in_threadpool(get_request)
    except Exception as e:
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
Timothy J. Baek's avatar
Timothy J. Baek committed
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
        )


# TODO: we should update this part once Ollama supports other types
class OpenAIChatMessage(BaseModel):
    role: str
    content: str

    model_config = ConfigDict(extra="allow")


class OpenAIChatCompletionForm(BaseModel):
    model: str
    messages: List[OpenAIChatMessage]

    model_config = ConfigDict(extra="allow")


@app.post("/v1/chat/completions")
@app.post("/v1/chat/completions/{url_idx}")
async def generate_openai_chat_completion(
    form_data: OpenAIChatCompletionForm,
    url_idx: Optional[int] = None,
    user=Depends(get_current_user),
):

    if url_idx == None:
        if form_data.model in app.state.MODELS:
            url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
        else:
            raise HTTPException(
                status_code=400,
                detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
            )

    url = app.state.OLLAMA_BASE_URLS[url_idx]
837
    log.info(f"url: {url}")
Timothy J. Baek's avatar
Timothy J. Baek committed
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859

    r = None

    def get_request():
        nonlocal form_data
        nonlocal r

        request_id = str(uuid.uuid4())
        try:
            REQUEST_POOL.append(request_id)

            def stream_content():
                try:
                    if form_data.stream:
                        yield json.dumps(
                            {"request_id": request_id, "done": False}
                        ) + "\n"

                    for chunk in r.iter_content(chunk_size=8192):
                        if request_id in REQUEST_POOL:
                            yield chunk
                        else:
860
                            log.warning("User: canceled request")
Timothy J. Baek's avatar
Timothy J. Baek committed
861
862
863
864
865
866
867
868
869
870
                            break
                finally:
                    if hasattr(r, "close"):
                        r.close()
                        if request_id in REQUEST_POOL:
                            REQUEST_POOL.remove(request_id)

            r = requests.request(
                method="POST",
                url=f"{url}/v1/chat/completions",
Timothy J. Baek's avatar
Timothy J. Baek committed
871
                data=form_data.model_dump_json(exclude_none=True).encode(),
Timothy J. Baek's avatar
Timothy J. Baek committed
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
                stream=True,
            )

            r.raise_for_status()

            return StreamingResponse(
                stream_content(),
                status_code=r.status_code,
                headers=dict(r.headers),
            )
        except Exception as e:
            raise e

    try:
        return await run_in_threadpool(get_request)
    except Exception as e:
        error_detail = "Open WebUI: Server Connection Error"
        if r is not None:
            try:
                res = r.json()
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

        raise HTTPException(
            status_code=r.status_code if r else 500,
            detail=error_detail,
900
901
902
        )


903
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
Timothy J. Baek's avatar
Timothy J. Baek committed
904
async def deprecated_proxy(path: str, request: Request, user=Depends(get_current_user)):
905
906
    url = app.state.OLLAMA_BASE_URLS[0]
    target_url = f"{url}/{path}"
907
908
909
910
911
912
913

    body = await request.body()
    headers = dict(request.headers)

    if user.role in ["user", "admin"]:
        if path in ["pull", "delete", "push", "copy", "create"]:
            if user.role != "admin":
Timothy J. Baek's avatar
Timothy J. Baek committed
914
                raise HTTPException(
Timothy J. Baek's avatar
Timothy J. Baek committed
915
916
                    status_code=status.HTTP_401_UNAUTHORIZED,
                    detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
Timothy J. Baek's avatar
Timothy J. Baek committed
917
                )
918
    else:
Timothy J. Baek's avatar
Timothy J. Baek committed
919
920
921
922
        raise HTTPException(
            status_code=status.HTTP_401_UNAUTHORIZED,
            detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
        )
923

924
925
926
927
    headers.pop("host", None)
    headers.pop("authorization", None)
    headers.pop("origin", None)
    headers.pop("referer", None)
Timothy J. Baek's avatar
Timothy J. Baek committed
928

Timothy J. Baek's avatar
Timothy J. Baek committed
929
930
931
932
    r = None

    def get_request():
        nonlocal r
933
934

        request_id = str(uuid.uuid4())
Timothy J. Baek's avatar
Timothy J. Baek committed
935
        try:
936
937
938
939
            REQUEST_POOL.append(request_id)

            def stream_content():
                try:
940
941
942
943
944
945
946
                    if path == "generate":
                        data = json.loads(body.decode("utf-8"))

                        if not ("stream" in data and data["stream"] == False):
                            yield json.dumps({"id": request_id, "done": False}) + "\n"

                    elif path == "chat":
947
948
949
950
951
952
                        yield json.dumps({"id": request_id, "done": False}) + "\n"

                    for chunk in r.iter_content(chunk_size=8192):
                        if request_id in REQUEST_POOL:
                            yield chunk
                        else:
953
                            log.warning("User: canceled request")
954
955
956
957
                            break
                finally:
                    if hasattr(r, "close"):
                        r.close()
958
959
                        if request_id in REQUEST_POOL:
                            REQUEST_POOL.remove(request_id)
960

Timothy J. Baek's avatar
Timothy J. Baek committed
961
962
963
964
965
966
967
968
969
970
            r = requests.request(
                method=request.method,
                url=target_url,
                data=body,
                headers=headers,
                stream=True,
            )

            r.raise_for_status()

971
972
            # r.close()

Timothy J. Baek's avatar
Timothy J. Baek committed
973
            return StreamingResponse(
974
                stream_content(),
Timothy J. Baek's avatar
Timothy J. Baek committed
975
976
977
978
979
                status_code=r.status_code,
                headers=dict(r.headers),
            )
        except Exception as e:
            raise e
980

Timothy J. Baek's avatar
Timothy J. Baek committed
981
982
    try:
        return await run_in_threadpool(get_request)
983
    except Exception as e:
Timothy J. Baek's avatar
Timothy J. Baek committed
984
        error_detail = "Open WebUI: Server Connection Error"
Timothy J. Baek's avatar
Timothy J. Baek committed
985
        if r is not None:
986
            try:
Timothy J. Baek's avatar
Timothy J. Baek committed
987
                res = r.json()
988
989
990
991
992
                if "error" in res:
                    error_detail = f"Ollama: {res['error']}"
            except:
                error_detail = f"Ollama: {e}"

Timothy J. Baek's avatar
Timothy J. Baek committed
993
        raise HTTPException(
Timothy J. Baek's avatar
Timothy J. Baek committed
994
            status_code=r.status_code if r else 500,
Timothy J. Baek's avatar
Timothy J. Baek committed
995
996
            detail=error_detail,
        )