mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-04-27 11:31:58 +00:00
* update flash-context-attention * adding kernels * fix * reset * add build script * add building process * add llama2 exmaple * add colossal-llama2 test * clean * fall back test setting * fix test file * clean * clean * clean --------- Co-authored-by: cuiqing.li <lixx336@gmail.com>
25 lines
407 B
Bash
25 lines
407 B
Bash
#!/usr/bin/env bash
|
|
|
|
# install triton
|
|
pip install triton
|
|
pip install transformers
|
|
|
|
# install lightllm and flash-attention
|
|
mkdir 3rdParty
|
|
cd 3rdParty
|
|
git clone https://github.com/ModelTC/lightllm
|
|
cd lightllm
|
|
git checkout 28c1267cfca536b7b4f28e921e03de735b003039
|
|
pip install -e .
|
|
cd ..
|
|
|
|
git clone -recursive https://github.com/Dao-AILab/flash-attention
|
|
cd flash-attention
|
|
pip install -e .
|
|
|
|
cd ../../
|
|
|
|
|
|
|
|
|