Dockerfile 5.8 KB
Newer Older
Timothy J. Baek's avatar
Timothy J. Baek committed
1
# syntax=docker/dockerfile:1
2
# Initialize device type args
Jannik Streidl's avatar
grammar  
Jannik Streidl committed
3
# use build args in the docker build commmand with --build-arg="BUILDARG=true"
4
ARG USE_CUDA=false
Jannik Streidl's avatar
Jannik Streidl committed
5
ARG USE_OLLAMA=false
6
# Tested with cu117 for CUDA 11 and cu121 for CUDA 12 (default)
Jannik Streidl's avatar
Jannik Streidl committed
7
8
9
10
ARG USE_CUDA_VER=cu121
# any sentence transformer model; models to use can be found at https://huggingface.co/models?library=sentence-transformers
# Leaderboard: https://huggingface.co/spaces/mteb/leaderboard 
# for better performance and multilangauge support use "intfloat/multilingual-e5-large" (~2.5GB) or "intfloat/multilingual-e5-base" (~1.5GB)
Steven Kreitzer's avatar
Steven Kreitzer committed
11
# IMPORTANT: If you change the embedding model (sentence-transformers/all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them.
12
ARG USE_EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2
13
ARG USE_RERANKING_MODEL=""
14
ARG BUILD_HASH=dev-build
15
16
17
# Override at your own risk - non-root configurations are untested
ARG UID=0
ARG GID=0
Timothy J. Baek's avatar
Timothy J. Baek committed
18

Jannik Streidl's avatar
Jannik Streidl committed
19
######## WebUI frontend ########
Jun Siang Cheah's avatar
Jun Siang Cheah committed
20
FROM --platform=$BUILDPLATFORM node:21-alpine3.19 as build
21
ARG BUILD_HASH
22

Timothy J. Baek's avatar
Timothy J. Baek committed
23
24
WORKDIR /app

Xiaodong Ye's avatar
Xiaodong Ye committed
25
COPY package.json package-lock.json ./
26
RUN npm ci
27

28
COPY . .
29
ENV APP_BUILD_HASH=${BUILD_HASH}
30
RUN npm run build
Timothy J. Baek's avatar
Timothy J. Baek committed
31

Jannik Streidl's avatar
Jannik Streidl committed
32
######## WebUI backend ########
Timothy J. Baek's avatar
Timothy J. Baek committed
33
FROM python:3.11-slim-bookworm as base
Timothy J. Baek's avatar
Timothy J. Baek committed
34

35
# Use args
36
ARG USE_CUDA
Jannik Streidl's avatar
Jannik Streidl committed
37
38
39
ARG USE_OLLAMA
ARG USE_CUDA_VER
ARG USE_EMBEDDING_MODEL
Steven Kreitzer's avatar
Steven Kreitzer committed
40
ARG USE_RERANKING_MODEL
41
42
ARG UID
ARG GID
43

Jannik Streidl's avatar
Jannik Streidl committed
44
45
## Basis ##
ENV ENV=prod \
46
    PORT=8080 \
Jannik Streidl's avatar
Jannik Streidl committed
47
    # pass build args to the build
Jannik Streidl's avatar
Jannik Streidl committed
48
49
50
    USE_OLLAMA_DOCKER=${USE_OLLAMA} \
    USE_CUDA_DOCKER=${USE_CUDA} \
    USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \
Steven Kreitzer's avatar
Steven Kreitzer committed
51
52
    USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \
    USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL}
Timothy J. Baek's avatar
Timothy J. Baek committed
53

Jannik Streidl's avatar
Jannik Streidl committed
54
55
56
## Basis URL Config ##
ENV OLLAMA_BASE_URL="/ollama" \
    OPENAI_API_BASE_URL=""
Timothy J. Baek's avatar
Timothy J. Baek committed
57

Jannik Streidl's avatar
Jannik Streidl committed
58
59
60
61
## API Key and Security Config ##
ENV OPENAI_API_KEY="" \
    WEBUI_SECRET_KEY="" \
    SCARF_NO_ANALYTICS=true \
62
63
    DO_NOT_TRACK=true \
    ANONYMIZED_TELEMETRY=false
Timothy J. Baek's avatar
Timothy J. Baek committed
64

Jannik Streidl's avatar
Jannik Streidl committed
65
66
#### Other models #########################################################
## whisper TTS model settings ##
Jannik Streidl's avatar
Jannik Streidl committed
67
68
ENV WHISPER_MODEL="base" \
    WHISPER_MODEL_DIR="/app/backend/data/cache/whisper/models"
69

Jannik Streidl's avatar
Jannik Streidl committed
70
71
## RAG Embedding model settings ##
ENV RAG_EMBEDDING_MODEL="$USE_EMBEDDING_MODEL_DOCKER" \
Steven Kreitzer's avatar
Steven Kreitzer committed
72
    RAG_RERANKING_MODEL="$USE_RERANKING_MODEL_DOCKER" \
Jannik Streidl's avatar
Jannik Streidl committed
73
    SENTENCE_TRANSFORMERS_HOME="/app/backend/data/cache/embedding/models"
74
75
76

## Hugging Face download cache ##
ENV HF_HOME="/app/backend/data/cache/embedding/models"
Jannik Streidl's avatar
Jannik Streidl committed
77
#### Other models ##########################################################
78

Timothy J. Baek's avatar
Timothy J. Baek committed
79
WORKDIR /app/backend
Timothy J. Baek's avatar
Timothy J. Baek committed
80

81
ENV HOME /root
82
83
# Create user and group if not root
RUN if [ $UID -ne 0 ]; then \
Timothy J. Baek's avatar
Timothy J. Baek committed
84
85
86
87
    if [ $GID -ne 0 ]; then \
    addgroup --gid $GID app; \
    fi; \
    adduser --uid $UID --gid $GID --home $HOME --disabled-password --no-create-home app; \
88
89
    fi

90
91
92
RUN mkdir -p $HOME/.cache/chroma
RUN echo -n 00000000-0000-0000-0000-000000000000 > $HOME/.cache/chroma/telemetry_user_id

93
94
95
# Make sure the user has access to the app and root directory
RUN chown -R $UID:$GID /app $HOME

96
RUN if [ "$USE_OLLAMA" = "true" ]; then \
Timothy J. Baek's avatar
Timothy J. Baek committed
97
98
99
100
101
102
    apt-get update && \
    # Install pandoc and netcat
    apt-get install -y --no-install-recommends pandoc netcat-openbsd curl && \
    # for RAG OCR
    apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \
    # install helper tools
Timothy J. Baek's avatar
Timothy J. Baek committed
103
    apt-get install -y --no-install-recommends curl jq && \
Timothy J. Baek's avatar
Timothy J. Baek committed
104
105
106
107
    # install ollama
    curl -fsSL https://ollama.com/install.sh | sh && \
    # cleanup
    rm -rf /var/lib/apt/lists/*; \
Jannik Streidl's avatar
Jannik Streidl committed
108
    else \
Timothy J. Baek's avatar
Timothy J. Baek committed
109
110
    apt-get update && \
    # Install pandoc and netcat
111
    apt-get install -y --no-install-recommends pandoc netcat-openbsd curl jq && \
Timothy J. Baek's avatar
Timothy J. Baek committed
112
113
114
115
    # for RAG OCR
    apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \
    # cleanup
    rm -rf /var/lib/apt/lists/*; \
Jannik Streidl's avatar
Jannik Streidl committed
116
117
    fi

118
# install python dependencies
119
COPY --chown=$UID:$GID ./backend/requirements.txt ./requirements.txt
120

Justin Hayes's avatar
Justin Hayes committed
121
122
RUN pip3 install uv && \
    if [ "$USE_CUDA" = "true" ]; then \
Timothy J. Baek's avatar
Timothy J. Baek committed
123
124
125
126
127
    # If you use CUDA the whisper and embedding model will be downloaded on first use
    pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/$USE_CUDA_DOCKER_VER --no-cache-dir && \
    uv pip install --system -r requirements.txt --no-cache-dir && \
    python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')" && \
    python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \
128
    else \
Timothy J. Baek's avatar
Timothy J. Baek committed
129
130
131
132
    pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu --no-cache-dir && \
    uv pip install --system -r requirements.txt --no-cache-dir && \
    python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')" && \
    python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \
133
134
    fi; \
    chown -R $UID:$GID /app/backend/data/
Timothy J. Baek's avatar
Timothy J. Baek committed
135

136

137

138
# copy embedding weight from build
Jannik Streidl's avatar
Jannik Streidl committed
139
140
# RUN mkdir -p /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2
# COPY --from=build /app/onnx /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2/onnx
141
142

# copy built frontend files
143
144
145
COPY --chown=$UID:$GID --from=build /app/build /app/build
COPY --chown=$UID:$GID --from=build /app/CHANGELOG.md /app/CHANGELOG.md
COPY --chown=$UID:$GID --from=build /app/package.json /app/package.json
146
147

# copy backend files
148
COPY --chown=$UID:$GID ./backend .
Timothy J. Baek's avatar
Timothy J. Baek committed
149

Jannik S's avatar
Jannik S committed
150
151
EXPOSE 8080

Timothy J. Baek's avatar
Timothy J. Baek committed
152
153
HEALTHCHECK CMD curl --silent --fail http://localhost:8080/health | jq -e '.status == true' || exit 1

154
USER $UID:$GID
joecryptotoo's avatar
joecryptotoo committed
155

156
ARG BUILD_HASH
157
ENV WEBUI_BUILD_VERSION=${BUILD_HASH}
158

Silentoplayz's avatar
Silentoplayz committed
159
CMD [ "bash", "start.sh"]