Dockerfile 7.41 KB
Newer Older
1
# Rust builder
Nicolas Patry's avatar
Nicolas Patry committed
2
FROM lukemathwalker/cargo-chef:latest-rust-1.71 AS chef
3
4
WORKDIR /usr/src

5
6
ARG CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse

7
8
9
10
FROM chef as planner
COPY Cargo.toml Cargo.toml
COPY rust-toolchain.toml rust-toolchain.toml
COPY proto proto
11
COPY benchmark benchmark
12
13
14
15
16
COPY router router
COPY launcher launcher
RUN cargo chef prepare --recipe-path recipe.json

FROM chef AS builder
17

18
ARG GIT_SHA
19
ARG DOCKER_LABEL
20

21
22
23
24
25
RUN PROTOC_ZIP=protoc-21.12-linux-x86_64.zip && \
    curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v21.12/$PROTOC_ZIP && \
    unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \
    unzip -o $PROTOC_ZIP -d /usr/local 'include/*' && \
    rm -f $PROTOC_ZIP
Olivier Dehaene's avatar
Olivier Dehaene committed
26

27
28
COPY --from=planner /usr/src/recipe.json recipe.json
RUN cargo chef cook --release --recipe-path recipe.json
Olivier Dehaene's avatar
Olivier Dehaene committed
29

30
COPY Cargo.toml Cargo.toml
31
COPY rust-toolchain.toml rust-toolchain.toml
Olivier Dehaene's avatar
Olivier Dehaene committed
32
COPY proto proto
33
COPY benchmark benchmark
Olivier Dehaene's avatar
Olivier Dehaene committed
34
COPY router router
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
35
COPY launcher launcher
36
RUN cargo build --release
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
37

38
39
# Python builder
# Adapted from: https://github.com/pytorch/pytorch/blob/master/Dockerfile
40
FROM nvidia/cuda:12.1.0-devel-ubuntu20.04 as pytorch-install
41

42
43
ARG PYTORCH_VERSION=2.1.1
ARG PYTHON_VERSION=3.10
44
# Keep in sync with `server/pyproject.toml
45
46
ARG CUDA_VERSION=12.1
ARG MAMBA_VERSION=23.3.1-1
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
ARG CUDA_CHANNEL=nvidia
ARG INSTALL_CHANNEL=pytorch
# Automatically set by buildx
ARG TARGETPLATFORM

ENV PATH /opt/conda/bin:$PATH

RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
        build-essential \
        ca-certificates \
        ccache \
        curl \
        git && \
        rm -rf /var/lib/apt/lists/*

# Install conda
# translating Docker's TARGETPLATFORM into mamba arches
RUN case ${TARGETPLATFORM} in \
         "linux/arm64")  MAMBA_ARCH=aarch64  ;; \
         *)              MAMBA_ARCH=x86_64   ;; \
    esac && \
    curl -fsSL -v -o ~/mambaforge.sh -O  "https://github.com/conda-forge/miniforge/releases/download/${MAMBA_VERSION}/Mambaforge-${MAMBA_VERSION}-Linux-${MAMBA_ARCH}.sh"
RUN chmod +x ~/mambaforge.sh && \
    bash ~/mambaforge.sh -b -p /opt/conda && \
    rm ~/mambaforge.sh

# Install pytorch
# On arm64 we exit with an error code
RUN case ${TARGETPLATFORM} in \
         "linux/arm64")  exit 1 ;; \
         *)              /opt/conda/bin/conda update -y conda &&  \
78
                         /opt/conda/bin/conda install -c "${INSTALL_CHANNEL}" -c "${CUDA_CHANNEL}" -y "python=${PYTHON_VERSION}" "pytorch=$PYTORCH_VERSION" "pytorch-cuda=$(echo $CUDA_VERSION | cut -d'.' -f 1-2)"  ;; \
79
80
81
82
83
84
    esac && \
    /opt/conda/bin/conda clean -ya

# CUDA kernels builder image
FROM pytorch-install as kernel-builder

85
86
ARG MAX_JOBS=8

87
88
89
90
91
92
93
94
95
96
97
98
99
100
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
        ninja-build \
        && rm -rf /var/lib/apt/lists/*

# Build Flash Attention CUDA kernels
FROM kernel-builder as flash-att-builder

WORKDIR /usr/src

COPY server/Makefile-flash-att Makefile

# Build specific version of flash attention
RUN make build-flash-attention

101
102
103
104
105
106
107
108
109
110
# Build Flash Attention v2 CUDA kernels
FROM kernel-builder as flash-att-v2-builder

WORKDIR /usr/src

COPY server/Makefile-flash-att-v2 Makefile

# Build specific version of flash attention v2
RUN make build-flash-attention-v2

111
112
113
114
115
116
117
# Build Transformers exllama kernels
FROM kernel-builder as exllama-kernels-builder
WORKDIR /usr/src
COPY server/exllama_kernels/ .
# Build specific version of transformers
RUN TORCH_CUDA_ARCH_LIST="8.0;8.6+PTX" python setup.py build

118
119
120
121
122
123
124
# Build Transformers awq kernels
FROM kernel-builder as awq-kernels-builder
WORKDIR /usr/src
COPY server/Makefile-awq Makefile
# Build specific version of transformers
RUN TORCH_CUDA_ARCH_LIST="8.0;8.6+PTX" make build-awq

125
126
127
128
129
130
131
# Build eetq kernels
FROM kernel-builder as eetq-kernels-builder
WORKDIR /usr/src
COPY server/Makefile-eetq Makefile
# Build specific version of transformers
RUN TORCH_CUDA_ARCH_LIST="8.0;8.6+PTX" make build-eetq

132
# Build Transformers CUDA kernels
133
FROM kernel-builder as custom-kernels-builder
134
WORKDIR /usr/src
135
COPY server/custom_kernels/ .
136
# Build specific version of transformers
137
RUN python setup.py build
138

139
140
141
142
143
144
145
146
147
148
# Build vllm CUDA kernels
FROM kernel-builder as vllm-builder

WORKDIR /usr/src

COPY server/Makefile-vllm Makefile

# Build specific version of vllm
RUN make build-vllm

149
# Text Generation Inference base image
150
FROM nvidia/cuda:12.1.0-base-ubuntu20.04 as base
151
152
153
154
155
156
157

# Conda env
ENV PATH=/opt/conda/bin:$PATH \
    CONDA_PREFIX=/opt/conda

# Text Generation Inference base env
ENV HUGGINGFACE_HUB_CACHE=/data \
158
    HF_HUB_ENABLE_HF_TRANSFER=1 \
159
    PORT=80
Olivier Dehaene's avatar
Olivier Dehaene committed
160

161
WORKDIR /usr/src
162

163
164
165
166
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
        libssl-dev \
        ca-certificates \
        make \
167
        curl \
168
        && rm -rf /var/lib/apt/lists/*
Olivier Dehaene's avatar
Olivier Dehaene committed
169

170
171
# Copy conda with PyTorch installed
COPY --from=pytorch-install /opt/conda /opt/conda
Olivier Dehaene's avatar
Olivier Dehaene committed
172

173
# Copy build artifacts from flash attention builder
174
175
176
COPY --from=flash-att-builder /usr/src/flash-attention/build/lib.linux-x86_64-cpython-310 /opt/conda/lib/python3.10/site-packages
COPY --from=flash-att-builder /usr/src/flash-attention/csrc/layer_norm/build/lib.linux-x86_64-cpython-310 /opt/conda/lib/python3.10/site-packages
COPY --from=flash-att-builder /usr/src/flash-attention/csrc/rotary/build/lib.linux-x86_64-cpython-310 /opt/conda/lib/python3.10/site-packages
177

178
# Copy build artifacts from flash attention v2 builder
179
COPY --from=flash-att-v2-builder /usr/src/flash-attention-v2/build/lib.linux-x86_64-cpython-310 /opt/conda/lib/python3.10/site-packages
180

181
# Copy build artifacts from custom kernels builder
182
COPY --from=custom-kernels-builder /usr/src/build/lib.linux-x86_64-cpython-310 /opt/conda/lib/python3.10/site-packages
183
# Copy build artifacts from exllama kernels builder
184
COPY --from=exllama-kernels-builder /usr/src/build/lib.linux-x86_64-cpython-310 /opt/conda/lib/python3.10/site-packages
185
# Copy build artifacts from awq kernels builder
186
COPY --from=awq-kernels-builder /usr/src/llm-awq/awq/kernels/build/lib.linux-x86_64-cpython-310 /opt/conda/lib/python3.10/site-packages
187
# Copy build artifacts from eetq kernels builder
188
COPY --from=eetq-kernels-builder /usr/src/eetq/build/lib.linux-x86_64-cpython-310 /opt/conda/lib/python3.10/site-packages
189

190
# Copy builds artifacts from vllm builder
191
COPY --from=vllm-builder /usr/src/vllm/build/lib.linux-x86_64-cpython-310 /opt/conda/lib/python3.10/site-packages
192

193
194
# Install flash-attention dependencies
RUN pip install einops --no-cache-dir
195

Olivier Dehaene's avatar
Olivier Dehaene committed
196
# Install server
Nicolas Patry's avatar
Nicolas Patry committed
197
COPY proto proto
Olivier Dehaene's avatar
Olivier Dehaene committed
198
COPY server server
199
COPY server/Makefile server/Makefile
Olivier Dehaene's avatar
Olivier Dehaene committed
200
RUN cd server && \
Nicolas Patry's avatar
Nicolas Patry committed
201
    make gen-server && \
202
    pip install -r requirements.txt && \
203
    pip install ".[bnb, accelerate, quantize, peft]" --no-cache-dir
Olivier Dehaene's avatar
Olivier Dehaene committed
204

205
206
# Install benchmarker
COPY --from=builder /usr/src/target/release/text-generation-benchmark /usr/local/bin/text-generation-benchmark
Olivier Dehaene's avatar
Olivier Dehaene committed
207
# Install router
208
COPY --from=builder /usr/src/target/release/text-generation-router /usr/local/bin/text-generation-router
Nicolas Patry's avatar
Nicolas Patry committed
209
# Install launcher
210
COPY --from=builder /usr/src/target/release/text-generation-launcher /usr/local/bin/text-generation-launcher
Olivier Dehaene's avatar
Olivier Dehaene committed
211

212
213
214
215
216
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
        build-essential \
        g++ \
        && rm -rf /var/lib/apt/lists/*

217
218
219
220
221
222
223
224
# AWS Sagemaker compatbile image
FROM base as sagemaker

COPY sagemaker-entrypoint.sh entrypoint.sh
RUN chmod +x entrypoint.sh

ENTRYPOINT ["./entrypoint.sh"]

225
# Final image
226
227
FROM base

228
ENTRYPOINT ["text-generation-launcher"]
229
CMD ["--json-output"]