Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
open-webui
Commits
fde0139b
Commit
fde0139b
authored
Mar 22, 2024
by
Jannik Streidl
Browse files
All in one Dockerfile for including Ollama
parent
afa591af
Changes
5
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
53 additions
and
20 deletions
+53
-20
Dockerfile
Dockerfile
+26
-9
Dockerfile-cuda
Dockerfile-cuda
+8
-3
Dockerfile-ollama
Dockerfile-ollama
+1
-1
backend/config.py
backend/config.py
+5
-1
backend/start.sh
backend/start.sh
+13
-6
No files found.
Dockerfile
View file @
fde0139b
...
...
@@ -2,6 +2,7 @@
# Initialize device type args
ARG
USE_CUDA=false
ARG
USE_MPS=false
ARG
INCLUDE_OLLAMA=false
######## WebUI frontend ########
FROM
node:21-alpine3.19 as build
...
...
@@ -29,10 +30,12 @@ FROM python:3.11-slim-bookworm as base
# Use args
ARG
USE_CUDA
ARG
USE_MPS
ARG
INCLUDE_OLLAMA
## Basis ##
ENV
ENV=prod \
PORT=8080
PORT=8080 \
INCLUDE_OLLAMA_ENV=${INCLUDE_OLLAMA}
## Basis URL Config
##
ENV
OLLAMA_BASE_URL="/ollama" \
...
...
@@ -88,14 +91,28 @@ RUN if [ "$USE_CUDA" = "true" ]; then \
python
-c
"import os; from chromadb.utils import embedding_functions; sentence_transformer_ef = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=os.environ['RAG_EMBEDDING_MODEL'], device=os.environ['DEVICE_TYPE'])"
;
\
fi
# install required packages
RUN
apt-get update
\
RUN if
[
"
$INCLUDE_OLLAMA
"
=
"true"
]
;
then
\
apt-get update
&&
\
# Install pandoc and netcat
apt-get install -y --no-install-recommends pandoc netcat-openbsd && \
# for RAG OCR
apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \
# install helper tools
apt-get install -y --no-install-recommends curl && \
# install ollama
curl -fsSL https://ollama.com/install.sh | sh && \
# cleanup
rm -rf /var/lib/apt/lists/*; \
else \
apt-get update && \
# Install pandoc and netcat
&&
apt-get install -y --no-install-recommends pandoc netcat-openbsd \
apt-get install -y --no-install-recommends pandoc netcat-openbsd
&&
\
# for RAG OCR
&&
apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 \
apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6
&&
\
# cleanup
&& rm -rf /var/lib/apt/lists/*
rm -rf /var/lib/apt/lists/*; \
fi
...
...
Dockerfile-cuda
View file @
fde0139b
...
...
@@ -81,9 +81,14 @@ WORKDIR /app/backend
# apk del /var/cache/apk/*.tbz2
# Install only the dependencies in the container, python will come from the base image used
RUN apk update && \
apk add --no-install-recommends ffmpeg libsm6 libxext6 pandoc netcat-openbsd && \
apk del /var/cache/apk/*.tbz2
RUN apt-get update && \
apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 pandoc netcat-openbsd && \
rm -rf /var/cache/apk/*.tbz2
# Install python and pip
RUN apt-get update && \
apt-get install -y python3 python3-pip && \
rm -rf /var/cache/apk/*.tbz2
COPY ./backend/requirements.txt ./requirements.txt
RUN pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118 --no-cache-dir && \
...
...
Dockerfile-ollama
View file @
fde0139b
...
...
@@ -64,7 +64,7 @@ RUN pip3 install -r requirements.txt --no-cache-dir
# Install pandoc and netcat
# RUN python -c "import pypandoc; pypandoc.download_pandoc()"
RUN apt-get update \
&& apt-get install -y pandoc netcat-openbsd \
&& apt-get install -y pandoc netcat-openbsd
curl
\
&& rm -rf /var/lib/apt/lists/*
# preload embedding model
...
...
backend/config.py
View file @
fde0139b
...
...
@@ -208,6 +208,7 @@ OLLAMA_API_BASE_URL = os.environ.get(
)
OLLAMA_BASE_URL
=
os
.
environ
.
get
(
"OLLAMA_BASE_URL"
,
""
)
INCLUDE_OLLAMA
=
os
.
environ
.
get
(
"OLLAMA_BASE_URL"
,
"false"
)
if
OLLAMA_BASE_URL
==
""
and
OLLAMA_API_BASE_URL
!=
""
:
...
...
@@ -219,6 +220,9 @@ if OLLAMA_BASE_URL == "" and OLLAMA_API_BASE_URL != "":
if
ENV
==
"prod"
:
if
OLLAMA_BASE_URL
==
"/ollama"
:
if
INCLUDE_OLLAMA
:
OLLAMA_BASE_URL
=
"http://localhost:11434"
else
:
OLLAMA_BASE_URL
=
"http://host.docker.internal:11434"
...
...
backend/start.sh
View file @
fde0139b
#!/usr/bin/env bash
INCLUDE_OLLAMA
=
${
INCLUDE_OLLAMA_ENV
:-
false
}
SCRIPT_DIR
=
$(
cd
--
"
$(
dirname
--
"
${
BASH_SOURCE
[0]
}
"
)
"
&> /dev/null
&&
pwd
)
cd
"
$SCRIPT_DIR
"
||
exit
...
...
@@ -7,16 +9,21 @@ KEY_FILE=.webui_secret_key
PORT
=
"
${
PORT
:-
8080
}
"
if
test
"
$WEBUI_SECRET_KEY
$WEBUI_JWT_SECRET_KEY
"
=
" "
;
then
echo
No WEBUI_SECRET_KEY provided
echo
"
No WEBUI_SECRET_KEY provided
"
if
!
[
-e
"
$KEY_FILE
"
]
;
then
echo
Generating WEBUI_SECRET_KEY
echo
"
Generating WEBUI_SECRET_KEY
"
# Generate a random value to use as a WEBUI_SECRET_KEY in case the user didn't provide one.
echo
$(
head
-c
12 /dev/random |
base64
)
>
$KEY_FILE
echo
$(
head
-c
12 /dev/random |
base64
)
>
"
$KEY_FILE
"
fi
echo
Loading WEBUI_SECRET_KEY from
$KEY_FILE
WEBUI_SECRET_KEY
=
`
cat
$KEY_FILE
`
echo
"Loading WEBUI_SECRET_KEY from
$KEY_FILE
"
WEBUI_SECRET_KEY
=
$(
cat
"
$KEY_FILE
"
)
fi
if
[
"
$INCLUDE_OLLAMA
"
=
"true"
]
;
then
echo
"INCLUDE_OLLAMA is set to true, starting ollama serve."
ollama serve &
fi
WEBUI_SECRET_KEY
=
"
$WEBUI_SECRET_KEY
"
exec
uvicorn main:app
--host
0.0.0.0
--port
"
$PORT
"
--forwarded-allow-ips
'*'
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment