Commit 4976650f authored by Tri Dao's avatar Tri Dao
Browse files

Set single threaded compilation for CUDA 12.2 so CI doesn't OOM

parent 6a89b2f1
...@@ -151,7 +151,7 @@ jobs: ...@@ -151,7 +151,7 @@ jobs:
export PATH=/usr/local/nvidia/bin:/usr/local/nvidia/lib64:$PATH export PATH=/usr/local/nvidia/bin:/usr/local/nvidia/lib64:$PATH
export LD_LIBRARY_PATH=/usr/local/nvidia/lib64:/usr/local/cuda/lib64:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=/usr/local/nvidia/lib64:/usr/local/cuda/lib64:$LD_LIBRARY_PATH
# Currently for this setting the runner goes OOM if we pass --threads 4 to nvcc # Currently for this setting the runner goes OOM if we pass --threads 4 to nvcc
if [[ ${MATRIX_CUDA_VERSION} =~ "12." && ${MATRIX_TORCH_VERSION} == "2.1" ]]; then if [[ ( ${MATRIX_CUDA_VERSION} == "121" || ${MATRIX_CUDA_VERSION} == "122" ) && ${MATRIX_TORCH_VERSION} == "2.1" ]]; then
export FLASH_ATTENTION_FORCE_SINGLE_THREAD="TRUE" export FLASH_ATTENTION_FORCE_SINGLE_THREAD="TRUE"
fi fi
# Limit MAX_JOBS otherwise the github runner goes OOM # Limit MAX_JOBS otherwise the github runner goes OOM
......
__version__ = "2.1.2.post2" __version__ = "2.1.2.post3"
from flash_attn.flash_attn_interface import ( from flash_attn.flash_attn_interface import (
flash_attn_func, flash_attn_func,
......
...@@ -85,11 +85,11 @@ RUN pip install transformers==4.25.1 datasets==2.8.0 pytorch-lightning==1.8.6 tr ...@@ -85,11 +85,11 @@ RUN pip install transformers==4.25.1 datasets==2.8.0 pytorch-lightning==1.8.6 tr
RUN pip install git+https://github.com/mlcommons/logging.git@2.1.0 RUN pip install git+https://github.com/mlcommons/logging.git@2.1.0
# Install FlashAttention # Install FlashAttention
RUN pip install flash-attn==2.1.2.post2 RUN pip install flash-attn==2.1.2.post3
# Install CUDA extensions for cross-entropy, fused dense, layer norm # Install CUDA extensions for cross-entropy, fused dense, layer norm
RUN git clone https://github.com/HazyResearch/flash-attention \ RUN git clone https://github.com/HazyResearch/flash-attention \
&& cd flash-attention && git checkout v2.1.2.post2 \ && cd flash-attention && git checkout v2.1.2.post3 \
&& cd csrc/fused_softmax && pip install . && cd ../../ \ && cd csrc/fused_softmax && pip install . && cd ../../ \
&& cd csrc/rotary && pip install . && cd ../../ \ && cd csrc/rotary && pip install . && cd ../../ \
&& cd csrc/xentropy && pip install . && cd ../../ \ && cd csrc/xentropy && pip install . && cd ../../ \
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment