ci_install_dependency.sh 3.8 KB
Newer Older
1
#!/bin/bash
2
# Install the dependency in CI.
3
set -euxo pipefail
Lianmin Zheng's avatar
Lianmin Zheng committed
4

5
IS_BLACKWELL=${IS_BLACKWELL:-0}
fzyzcjy's avatar
fzyzcjy committed
6

7
if [ "$IS_BLACKWELL" = "1" ]; then
fzyzcjy's avatar
fzyzcjy committed
8
    CU_VERSION="cu129"
9
10
else
    CU_VERSION="cu126"
fzyzcjy's avatar
fzyzcjy committed
11
12
fi

Cheng Wan's avatar
Cheng Wan committed
13
14
15
# Clear torch compilation cache
python3 -c 'import os, shutil, tempfile, getpass; cache_dir = os.environ.get("TORCHINDUCTOR_CACHE_DIR") or os.path.join(tempfile.gettempdir(), "torchinductor_" + getpass.getuser()); shutil.rmtree(cache_dir, ignore_errors=True)'

16
# Kill existing processes
17
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
18
bash "${SCRIPT_DIR}/../killall_sglang.sh"
19
echo "CUDA_VISIBLE_DEVICES=${CUDA_VISIBLE_DEVICES:-}"
20

21
22
23
24
25
26
27
28
29
# Install apt packages
apt install -y git libnuma-dev

# Install uv
if [ "$IS_BLACKWELL" = "1" ]; then
    # The blackwell CI runner has some issues with pip and uv,
    # so we can only use pip with `--break-system-packages`
    PIP_CMD="pip"
    PIP_INSTALL_SUFFIX="--break-system-packages"
fzyzcjy's avatar
fzyzcjy committed
30

31
32
33
34
    # Clean up existing installations
    $PIP_CMD uninstall -y flashinfer_python sgl-kernel sglang vllm $PIP_INSTALL_SUFFIX || true
else
    # In normal cases, we use uv, which is much faster than pip.
Cheng Wan's avatar
Cheng Wan committed
35
    pip install --upgrade pip
36
37
    pip install uv
    export UV_SYSTEM_PYTHON=true
38

39
40
41
42
43
44
    PIP_CMD="uv pip"
    PIP_INSTALL_SUFFIX="--index-strategy unsafe-best-match"

    # Clean up existing installations
    $PIP_CMD uninstall flashinfer_python sgl-kernel sglang vllm || true
fi
Xiaoyu Zhang's avatar
Xiaoyu Zhang committed
45
46

# Install the main package
47
$PIP_CMD install -e "python[dev]" --extra-index-url https://download.pytorch.org/whl/${CU_VERSION} $PIP_INSTALL_SUFFIX
Xiaoyu Zhang's avatar
Xiaoyu Zhang committed
48

49
50
51
# Install router for pd-disagg test
SGLANG_ROUTER_BUILD_NO_RUST=1 $PIP_CMD install -e "sgl-router" $PIP_INSTALL_SUFFIX

52
53
54
SGL_KERNEL_VERSION_FROM_KERNEL=$(grep -Po '(?<=^version = ")[^"]*' sgl-kernel/pyproject.toml)
SGL_KERNEL_VERSION_FROM_SRT=$(grep -Po -m1 '(?<=sgl-kernel==)[0-9A-Za-z\.\-]+' python/pyproject.toml)
echo "SGL_KERNEL_VERSION_FROM_KERNEL=${SGL_KERNEL_VERSION_FROM_KERNEL} SGL_KERNEL_VERSION_FROM_SRT=${SGL_KERNEL_VERSION_FROM_SRT}"
55

56
if [ "$IS_BLACKWELL" = "1" ]; then
57
    SGL_KERNEL_CUDA_VERSION=cu128
58
else
59
60
61
62
63
    SGL_KERNEL_CUDA_VERSION=cu124
fi

if [ "${CUSTOM_BUILD_SGL_KERNEL:-}" = "true" ]; then
    ls -alh sgl-kernel/dist
64
65
66
67
68
69
    WHEEL_FILE=$(ls sgl-kernel/dist/sgl_kernel-${SGL_KERNEL_VERSION_FROM_KERNEL}+${SGL_KERNEL_CUDA_VERSION}-cp310-abi3-manylinux2014_x86_64.whl 2>/dev/null || true)
    if [ -f "$WHEEL_FILE" ]; then
      $PIP_CMD install sgl-kernel/dist/sgl_kernel-${SGL_KERNEL_VERSION_FROM_KERNEL}+${SGL_KERNEL_CUDA_VERSION}-cp310-abi3-manylinux2014_x86_64.whl --force-reinstall $PIP_INSTALL_SUFFIX
    else
      $PIP_CMD install sgl-kernel/dist/sgl_kernel-${SGL_KERNEL_VERSION_FROM_KERNEL}-cp310-abi3-manylinux2014_x86_64.whl --force-reinstall $PIP_INSTALL_SUFFIX
    fi
70
71
else
    $PIP_CMD install https://github.com/sgl-project/whl/releases/download/v${SGL_KERNEL_VERSION_FROM_SRT}/sgl_kernel-${SGL_KERNEL_VERSION_FROM_SRT}+${SGL_KERNEL_CUDA_VERSION}-cp310-abi3-manylinux2014_x86_64.whl --force-reinstall $PIP_INSTALL_SUFFIX
72
73
fi

74
# Show current packages
75
$PIP_CMD list
76

Xiaoyu Zhang's avatar
Xiaoyu Zhang committed
77
# Install additional dependencies
78
$PIP_CMD install mooncake-transfer-engine==0.3.6.post1 nvidia-cuda-nvrtc-cu12 py-spy huggingface_hub[hf_xet] $PIP_INSTALL_SUFFIX
79

80
if [ "$IS_BLACKWELL" != "1" ]; then
fzyzcjy's avatar
fzyzcjy committed
81
82
    # For lmms_evals evaluating MMMU
    git clone --branch v0.3.3 --depth 1 https://github.com/EvolvingLMMs-Lab/lmms-eval.git
83
    $PIP_CMD install -e lmms-eval/ $PIP_INSTALL_SUFFIX
84

fzyzcjy's avatar
fzyzcjy committed
85
    # Install xformers
86
    $PIP_CMD install xformers --index-url https://download.pytorch.org/whl/${CU_VERSION} --no-deps $PIP_INSTALL_SUFFIX
fzyzcjy's avatar
fzyzcjy committed
87
fi
88

89
90
# Install FlashMLA for attention backend tests
# $PIP_CMD install git+https://github.com/deepseek-ai/FlashMLA.git $PIP_INSTALL_SUFFIX
Mick's avatar
Mick committed
91

92
# Show current packages
93
$PIP_CMD list
94
95
96
97
98
99


if [ -n "${HF_TOKEN:-}" ]; then
    $PIP_CMD install -U "huggingface_hub[cli]" $PIP_INSTALL_SUFFIX
    hf auth login --token $HF_TOKEN
fi