[CI] use flash_attn==2.7.4.post1

This commit is contained in:
botbw 2025-07-10 12:42:42 +08:00
parent 4b7727996e
commit 12c55538f5

View File

@ -138,6 +138,10 @@ jobs:
cp -p -r /github/home/cuda_ext_cache/* /__w/ColossalAI/ColossalAI/
fi
- name: Install flash-attention
run: |
pip install flash-attn==2.7.4.post1 --no-build-isolation
- name: Install Colossal-AI
run: |
BUILD_EXT=1 pip install -v -e .