mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-05 11:02:05 +00:00
[test] refactor tests with spawn (#3452)
* [test] added spawn decorator * polish code * polish code * polish code * polish code * polish code * polish code
This commit is contained in:
@@ -1,15 +1,14 @@
|
||||
from functools import partial
|
||||
import pytest
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.multiprocessing as mp
|
||||
|
||||
import colossalai
|
||||
from colossalai.context import ParallelMode
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.utils import free_port, get_current_device
|
||||
from colossalai.nn.layer.moe import Top1Router, Top2Router, MoeLayer, Experts
|
||||
from colossalai.context.moe_context import MOE_CONTEXT
|
||||
from colossalai.testing import rerun_if_address_is_in_use
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.nn.layer.moe import Experts, MoeLayer, Top1Router, Top2Router
|
||||
from colossalai.testing import rerun_if_address_is_in_use, spawn
|
||||
from colossalai.utils import get_current_device
|
||||
|
||||
BATCH_SIZE = 16
|
||||
NUM_EXPERTS = 4
|
||||
@@ -90,15 +89,7 @@ def run_routing(rank, world_size, port, rs=2, hidden_size=128, data_type=torch.f
|
||||
@pytest.mark.parametrize("router", [Top1Router, Top2Router])
|
||||
@rerun_if_address_is_in_use()
|
||||
def test_moe_kernel(rs, hidden_size, data_type, router):
|
||||
world_size = 4
|
||||
run_func = partial(run_routing,
|
||||
world_size=world_size,
|
||||
port=free_port(),
|
||||
rs=rs,
|
||||
hidden_size=hidden_size,
|
||||
data_type=data_type,
|
||||
router=router)
|
||||
mp.spawn(run_func, nprocs=world_size)
|
||||
spawn(run_routing, 4, rs=rs, hidden_size=hidden_size, data_type=data_type, router=router)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
Reference in New Issue
Block a user