Unverified Commit dff2860a authored by josephrocca's avatar josephrocca Committed by GitHub
Browse files

Fix CORS compatibility with OpenAI, vLLM, TGI, LMDeploy (#1373)


Co-authored-by: default avatarYineng Zhang <me@zhyncs.com>
parent e72275cf
...@@ -37,6 +37,7 @@ import requests ...@@ -37,6 +37,7 @@ import requests
import uvicorn import uvicorn
import uvloop import uvloop
from fastapi import FastAPI, File, Form, Request, UploadFile from fastapi import FastAPI, File, Form, Request, UploadFile
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse, Response, StreamingResponse from fastapi.responses import JSONResponse, Response, StreamingResponse
from sglang.lang.backend.runtime_endpoint import RuntimeEndpoint from sglang.lang.backend.runtime_endpoint import RuntimeEndpoint
...@@ -93,6 +94,14 @@ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) ...@@ -93,6 +94,14 @@ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
app = FastAPI() app = FastAPI()
tokenizer_manager = None tokenizer_manager = None
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.get("/health") @app.get("/health")
async def health() -> Response: async def health() -> Response:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment