Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
a640652a
Commit
a640652a
authored
Dec 23, 2023
by
Timothy J. Baek
Browse files
fix: large file upload issue
parent
e539cf5c
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
74 additions
and
22 deletions
+74
-22
backend/apps/web/routers/utils.py
backend/apps/web/routers/utils.py
+74
-22
No files found.
backend/apps/web/routers/utils.py
View file @
a640652a
...
@@ -21,19 +21,74 @@ class UploadBlobForm(BaseModel):
...
@@ -21,19 +21,74 @@ class UploadBlobForm(BaseModel):
filename
:
str
filename
:
str
from
urllib.parse
import
urlparse
def
parse_huggingface_url
(
hf_url
):
# Parse the URL
parsed_url
=
urlparse
(
hf_url
)
# Get the path and split it into components
path_components
=
parsed_url
.
path
.
split
(
"/"
)
# Extract the desired output
user_repo
=
"/"
.
join
(
path_components
[
1
:
3
])
model_file
=
path_components
[
-
1
]
return
[
user_repo
,
model_file
]
def
download_file_stream
(
url
,
file_path
,
chunk_size
=
1024
*
1024
):
done
=
False
if
os
.
path
.
exists
(
file_path
):
current_size
=
os
.
path
.
getsize
(
file_path
)
else
:
current_size
=
0
headers
=
{
"Range"
:
f
"bytes=
{
current_size
}
-"
}
if
current_size
>
0
else
{}
with
requests
.
get
(
url
,
headers
=
headers
,
stream
=
True
)
as
response
:
total_size
=
int
(
response
.
headers
.
get
(
"content-length"
,
0
))
+
current_size
with
open
(
file_path
,
"ab"
)
as
file
:
for
data
in
response
.
iter_content
(
chunk_size
=
chunk_size
):
current_size
+=
len
(
data
)
file
.
write
(
data
)
done
=
current_size
==
total_size
progress
=
round
((
current_size
/
total_size
)
*
100
,
2
)
yield
f
'data: {{"progress":
{
progress
}
, "current":
{
current_size
}
, "total":
{
total_size
}
}}
\n\n
'
@
router
.
get
(
"/download"
)
async
def
download
(
url
:
str
=
"https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf"
,
):
user_repo
,
model_file
=
parse_huggingface_url
(
url
)
os
.
makedirs
(
"./uploads"
,
exist_ok
=
True
)
file_path
=
os
.
path
.
join
(
"./uploads"
,
f
"
{
model_file
}
"
)
return
StreamingResponse
(
download_file_stream
(
url
,
file_path
),
media_type
=
"text/event-stream"
)
@
router
.
post
(
"/upload"
)
@
router
.
post
(
"/upload"
)
async
def
upload
(
file
:
UploadFile
=
File
(...)):
async
def
upload
(
file
:
UploadFile
=
File
(...)):
os
.
makedirs
(
"./uploads"
,
exist_ok
=
True
)
os
.
makedirs
(
"./uploads"
,
exist_ok
=
True
)
file_path
=
os
.
path
.
join
(
"./uploads"
,
file
.
filename
)
file_path
=
os
.
path
.
join
(
"./uploads"
,
file
.
filename
)
def
file_write_stream
():
async
def
file_write_stream
():
total
=
0
total
=
0
total_size
=
file
.
size
total_size
=
file
.
size
chunk_size
=
1024
*
1024
chunk_size
=
1024
*
1024
done
=
False
done
=
False
try
:
try
:
with
open
(
file_path
,
"wb"
)
as
f
:
with
open
(
file_path
,
"wb
+
"
)
as
f
:
while
True
:
while
True
:
chunk
=
file
.
file
.
read
(
chunk_size
)
chunk
=
file
.
file
.
read
(
chunk_size
)
if
not
chunk
:
if
not
chunk
:
...
@@ -50,26 +105,23 @@ async def upload(file: UploadFile = File(...)):
...
@@ -50,26 +105,23 @@ async def upload(file: UploadFile = File(...)):
yield
f
"data:
{
json
.
dumps
(
res
)
}
\n\n
"
yield
f
"data:
{
json
.
dumps
(
res
)
}
\n\n
"
if
done
:
if
done
:
with
open
(
file_path
,
"rb"
)
as
f
:
f
.
seek
(
0
)
hashed
=
calculate_sha256
(
f
)
hashed
=
calculate_sha256
(
f
)
f
.
seek
(
0
)
f
.
seek
(
0
)
file_data
=
f
.
read
()
url
=
f
"
{
OLLAMA_API_BASE_URL
}
/blobs/sha256:
{
hashed
}
"
response
=
requests
.
post
(
url
,
data
=
f
)
url
=
f
"
{
OLLAMA_API_BASE_URL
}
/blobs/sha256:
{
hashed
}
"
if
response
.
ok
:
response
=
requests
.
post
(
url
,
data
=
file_data
)
res
=
{
"done"
:
done
,
if
response
.
ok
:
"blob"
:
f
"sha256:
{
hashed
}
"
,
res
=
{
}
"done"
:
done
,
os
.
remove
(
file_path
)
"blob"
:
f
"sha256:
{
hashed
}
"
,
}
yield
f
"data:
{
json
.
dumps
(
res
)
}
\n\n
"
os
.
remove
(
file_path
)
else
:
raise
"Ollama: Could not create blob, Please try again."
yield
f
"data:
{
json
.
dumps
(
res
)
}
\n\n
"
else
:
raise
"Ollama: Could not create blob, Please try again."
except
Exception
as
e
:
except
Exception
as
e
:
res
=
{
"error"
:
str
(
e
)}
res
=
{
"error"
:
str
(
e
)}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment