mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-14 05:33:23 +00:00
[legacy] move communication and nn to legacy and refactor logger (#4671)
* [legacy] move communication to legacy (#4640) * [legacy] refactor logger and clean up legacy codes (#4654) * [legacy] make logger independent to gpc * [legacy] make optim independent to registry * [legacy] move test engine to legacy * [legacy] move nn to legacy (#4656) * [legacy] move nn to legacy * [checkpointio] fix save hf config * [test] remove useledd rpc pp test * [legacy] fix nn init * [example] skip tutorial hybriad parallel example * [devops] test doc check * [devops] test doc check
This commit is contained in:
57
tests/test_legacy/test_layers/test_2p5d/test_2p5d.py
Normal file
57
tests/test_legacy/test_layers/test_2p5d/test_2p5d.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import pytest
|
||||
import torch
|
||||
from checks_2p5d.check_layer_2p5d import *
|
||||
from checks_2p5d.check_operation_2p5d import check_AB, check_ABT, check_ATB
|
||||
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.initialize import launch
|
||||
from colossalai.logging import disable_existing_loggers
|
||||
from colossalai.testing import rerun_if_address_is_in_use, spawn
|
||||
|
||||
CONFIG = dict(parallel=dict(
|
||||
pipeline=dict(size=1),
|
||||
tensor=dict(size=4, mode='2.5d', depth=1),
|
||||
),)
|
||||
|
||||
|
||||
def check_operations():
|
||||
check_AB()
|
||||
check_ABT()
|
||||
check_ATB()
|
||||
|
||||
|
||||
def check_layer():
|
||||
check_linear()
|
||||
check_layernorm()
|
||||
check_embed()
|
||||
check_patch_embed()
|
||||
check_vocab_parallel_embed()
|
||||
check_classifier_no_given_weight()
|
||||
check_vocab_parallel_classifier_no_given_weight()
|
||||
check_classifier_given_embed_weight()
|
||||
check_vocab_parallel_classifier_given_embed_weight()
|
||||
check_loss()
|
||||
check_vocab_parallel_loss()
|
||||
|
||||
|
||||
def check_layer_and_operation(rank, world_size, port):
|
||||
disable_existing_loggers()
|
||||
launch(config=CONFIG, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
|
||||
|
||||
torch.backends.cuda.matmul.allow_tf32 = False
|
||||
torch.backends.cudnn.allow_tf32 = False
|
||||
torch.backends.cudnn.deterministic = True
|
||||
check_operations()
|
||||
check_layer()
|
||||
gpc.destroy()
|
||||
torch.cuda.empty_cache()
|
||||
|
||||
|
||||
@pytest.mark.dist
|
||||
@rerun_if_address_is_in_use()
|
||||
def test_2p5d():
|
||||
spawn(check_layer_and_operation, 4)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_2p5d()
|
Reference in New Issue
Block a user