[misc] update pre-commit and run all files (#4752)

* [misc] update pre-commit

* [misc] run pre-commit

* [misc] remove useless configuration files

* [misc] ignore cuda for clang-format
This commit is contained in:
Hongxin Liu
2023-09-19 14:20:26 +08:00
committed by GitHub
parent 3c6b831c26
commit 079bf3cb26
1268 changed files with 50037 additions and 38444 deletions

View File

@@ -4,7 +4,7 @@ from colossalai.legacy.amp import AMP_TYPE
TRAIN_ITERS = 10
DECAY_ITERS = 4
WARMUP_FRACTION = 0.01
GLOBAL_BATCH_SIZE = 32 # dp world size * sentences per GPU
GLOBAL_BATCH_SIZE = 32 # dp world size * sentences per GPU
EVAL_ITERS = 10
EVAL_INTERVAL = 10
LR = 0.0001
@@ -28,8 +28,8 @@ SEED = 1234
NUM_MICRO_BATCHES = 4
# colossalai config
parallel = dict(pipeline=1, tensor=dict(size=2, mode='sequence'))
parallel = dict(pipeline=1, tensor=dict(size=2, mode="sequence"))
fp16 = dict(mode=AMP_TYPE.NAIVE, verbose=True)
gradient_handler = [dict(type='SequenceParallelGradientHandler')]
gradient_handler = [dict(type="SequenceParallelGradientHandler")]