Dockerfile 1.75 KB
Newer Older
helloyongyang's avatar
helloyongyang committed
1
FROM pytorch/pytorch:2.7.1-cuda12.8-cudnn9-devel AS base
helloyongyang's avatar
helloyongyang committed
2

helloyongyang's avatar
helloyongyang committed
3
WORKDIR /app
4

helloyongyang's avatar
helloyongyang committed
5
6
7
8
ENV DEBIAN_FRONTEND=noninteractive
ENV LANG=C.UTF-8
ENV LC_ALL=C.UTF-8

Dongz's avatar
Dongz committed
9
10
11
# use tsinghua source
RUN sed -i 's|http://archive.ubuntu.com/ubuntu/|https://mirrors.tuna.tsinghua.edu.cn/ubuntu/|g' /etc/apt/sources.list \
    && sed -i 's|http://security.ubuntu.com/ubuntu/|https://mirrors.tuna.tsinghua.edu.cn/ubuntu/|g' /etc/apt/sources.list
helloyongyang's avatar
helloyongyang committed
12

helloyongyang's avatar
helloyongyang committed
13
14
RUN apt-get update && apt-get install -y vim tmux zip unzip wget git build-essential libibverbs-dev ca-certificates \
    curl iproute2 ffmpeg libsm6 libxext6 kmod ccache libnuma-dev \
Dongz's avatar
Dongz committed
15
    && apt-get clean && rm -rf /var/lib/apt/lists/*
helloyongyang's avatar
helloyongyang committed
16

helloyongyang's avatar
helloyongyang committed
17
18
19
20
21
22
23
RUN pip config set global.index-url https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple

RUN pip install --no-cache-dir packaging ninja cmake scikit-build-core uv ruff pre-commit -U

RUN git clone https://github.com/vllm-project/vllm.git && cd vllm \
    && python use_existing_torch.py && pip install -r requirements/build.txt \
    && pip install --no-cache-dir --no-build-isolation -v -e .
helloyongyang's avatar
helloyongyang committed
24

helloyongyang's avatar
helloyongyang committed
25
26
RUN git clone https://github.com/sgl-project/sglang.git && cd sglang/sgl-kernel \
    && make build && make clean
helloyongyang's avatar
helloyongyang committed
27

helloyongyang's avatar
helloyongyang committed
28
29
RUN pip install --no-cache-dir diffusers transformers tokenizers accelerate safetensors opencv-python numpy imageio \
    imageio-ffmpeg einops loguru qtorch ftfy easydict
helloyongyang's avatar
helloyongyang committed
30

helloyongyang's avatar
helloyongyang committed
31
RUN git clone https://github.com/Dao-AILab/flash-attention.git --recursive
helloyongyang's avatar
helloyongyang committed
32

helloyongyang's avatar
helloyongyang committed
33
34
35
36
RUN cd flash-attention && python setup.py install && rm -rf build

RUN cd flash-attention/hopper && python setup.py install && rm -rf build

Yang Yong(雍洋)'s avatar
Yang Yong(雍洋) committed
37
RUN git clone https://github.com/ModelTC/SageAttention.git
helloyongyang's avatar
helloyongyang committed
38

Yang Yong(雍洋)'s avatar
Yang Yong(雍洋) committed
39
RUN cd SageAttention && CUDA_ARCHITECTURES="8.0,8.6,8.9,9.0,12.0" EXT_PARALLEL=4 NVCC_APPEND_FLAGS="--threads 8" MAX_JOBS=32 pip install --no-cache-dir -v -e .
helloyongyang's avatar
helloyongyang committed
40

helloyongyang's avatar
helloyongyang committed
41
WORKDIR /workspace