mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-07 03:52:01 +00:00
[example] add llama2 example (#4527)
* [example] transfer llama-1 example * [example] fit llama-2 * [example] refactor scripts folder * [example] fit new gemini plugin * [cli] fix multinode runner * [example] fit gemini optim checkpoint * [example] refactor scripts * [example] update requirements * [example] update requirements * [example] rename llama to llama2 * [example] update readme and pretrain script * [example] refactor scripts
This commit is contained in:
17
examples/language/llama2/scripts/benchmark_70B/3d.sh
Normal file
17
examples/language/llama2/scripts/benchmark_70B/3d.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
# TODO: fix this
|
||||
echo "3D parallel for LLaMA-2 is not ready yet"
|
||||
exit 1
|
||||
|
||||
################
|
||||
#Load your environments and modules here
|
||||
################
|
||||
|
||||
HOSTFILE=$(realpath hosts.txt)
|
||||
|
||||
cd ../..
|
||||
|
||||
export OMP_NUM_THREADS=8
|
||||
|
||||
colossalai run --nproc_per_node 8 --hostfile $HOSTFILE benchmark.py -c 70b -p 3d -g -x -b 8 --tp 4 --pp 2 --mbs 4
|
13
examples/language/llama2/scripts/benchmark_70B/gemini.sh
Normal file
13
examples/language/llama2/scripts/benchmark_70B/gemini.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
################
|
||||
#Load your environments and modules here
|
||||
################
|
||||
|
||||
HOSTFILE=$(realpath hosts.txt)
|
||||
|
||||
cd ../..
|
||||
|
||||
export OMP_NUM_THREADS=8
|
||||
|
||||
colossalai run --nproc_per_node 8 --hostfile $HOSTFILE benchmark.py -c 70b -g -x -b 2
|
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
################
|
||||
#Load your environments and modules here
|
||||
################
|
||||
|
||||
HOSTFILE=$(realpath hosts.txt)
|
||||
|
||||
cd ../..
|
||||
|
||||
export OMP_NUM_THREADS=8
|
||||
|
||||
colossalai run --nproc_per_node 8 --hostfile $HOSTFILE benchmark.py -c 70b -p gemini_auto -g -x -b 2
|
13
examples/language/llama2/scripts/benchmark_7B/gemini.sh
Normal file
13
examples/language/llama2/scripts/benchmark_7B/gemini.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
################
|
||||
#Load your environments and modules here
|
||||
################
|
||||
|
||||
HOSTFILE=$(realpath hosts.txt)
|
||||
|
||||
cd ../..
|
||||
|
||||
export OMP_NUM_THREADS=8
|
||||
|
||||
colossalai run --nproc_per_node 8 --hostfile $HOSTFILE benchmark.py -g -x -b 16
|
13
examples/language/llama2/scripts/benchmark_7B/gemini_auto.sh
Normal file
13
examples/language/llama2/scripts/benchmark_7B/gemini_auto.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
################
|
||||
#Load your environments and modules here
|
||||
################
|
||||
|
||||
HOSTFILE=$(realpath hosts.txt)
|
||||
|
||||
cd ../..
|
||||
|
||||
export OMP_NUM_THREADS=8
|
||||
|
||||
colossalai run --nproc_per_node 8 --hostfile $HOSTFILE benchmark.py -p gemini_auto -g -x -b 16
|
Reference in New Issue
Block a user