"llm/git@developer.sourcefind.cn:orangecat/ollama.git" did not exist on "cbe2adc78ab7a34fafebbf4c83582d6c29a461ed"
Commit e008738f authored by Timothy J. Baek's avatar Timothy J. Baek
Browse files

feat: cancel download from backend

parent 244f34c2
...@@ -234,11 +234,26 @@ async def pull_model( ...@@ -234,11 +234,26 @@ async def pull_model(
def get_request(): def get_request():
nonlocal url nonlocal url
nonlocal r nonlocal r
request_id = str(uuid.uuid4())
try: try:
REQUEST_POOL.append(request_id)
def stream_content(): def stream_content():
for chunk in r.iter_content(chunk_size=8192): try:
yield chunk yield json.dumps({"id": request_id, "done": False}) + "\n"
for chunk in r.iter_content(chunk_size=8192):
if request_id in REQUEST_POOL:
yield chunk
else:
print("User: canceled request")
break
finally:
if hasattr(r, "close"):
r.close()
if request_id in REQUEST_POOL:
REQUEST_POOL.remove(request_id)
r = requests.request( r = requests.request(
method="POST", method="POST",
...@@ -259,6 +274,7 @@ async def pull_model( ...@@ -259,6 +274,7 @@ async def pull_model(
try: try:
return await run_in_threadpool(get_request) return await run_in_threadpool(get_request)
except Exception as e: except Exception as e:
print(e) print(e)
error_detail = "Open WebUI: Server Connection Error" error_detail = "Open WebUI: Server Connection Error"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment