mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-18 07:31:19 +00:00
[test] merge old components to test to model zoo (#4945)
* [test] add custom models in model zoo * [test] update legacy test * [test] update model zoo * [test] update gemini test * [test] remove components to test
This commit is contained in:
@@ -6,7 +6,7 @@ import torch
|
||||
import colossalai
|
||||
from colossalai.legacy.amp import convert_to_apex_amp, convert_to_naive_amp
|
||||
from colossalai.testing import assert_close_loose, clear_cache_before_run, rerun_if_address_is_in_use, spawn
|
||||
from tests.components_to_test.registry import non_distributed_component_funcs
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
|
||||
def check_equal(a, b):
|
||||
@@ -25,13 +25,12 @@ def run_naive_amp():
|
||||
torch.backends.cudnn.deterministic = True
|
||||
|
||||
# create layer
|
||||
test_models = ["repeated_computed_layers", "nested_model", "resnet18"]
|
||||
test_models = ["custom_repeated_computed_layers", "custom_nested_model", "torchvision_resnet18"]
|
||||
for test_name in test_models:
|
||||
get_component_func = non_distributed_component_funcs.get_callable(test_name)
|
||||
model_builder, train_dataloader, _, optim_class, _ = get_component_func()
|
||||
model_builder, data_gen_fn, *_ = next(iter(model_zoo.get_sub_registry(test_name).values()))
|
||||
|
||||
# create model
|
||||
naive_amp_model = model_builder(checkpoint=True).cuda()
|
||||
naive_amp_model = model_builder().cuda()
|
||||
apex_amp_model = copy.deepcopy(naive_amp_model)
|
||||
|
||||
# create optimizer
|
||||
@@ -48,13 +47,12 @@ def run_naive_amp():
|
||||
apex_amp_model, apex_amp_optimizer = convert_to_apex_amp(apex_amp_model, apex_amp_optimizer, apex_amp_config)
|
||||
|
||||
# create data
|
||||
data_iter = iter(train_dataloader)
|
||||
data, label = next(data_iter)
|
||||
data = data.cuda()
|
||||
data = data_gen_fn()
|
||||
data = {k: v.cuda() if isinstance(v, torch.Tensor) else v for k, v in data.items()}
|
||||
|
||||
# forward pass
|
||||
naive_amp_output = naive_amp_model(data)
|
||||
apex_amp_output = apex_amp_model(data)
|
||||
naive_amp_output = naive_amp_model(**data)
|
||||
apex_amp_output = apex_amp_model(**data)
|
||||
assert_close_loose(naive_amp_output, apex_amp_output)
|
||||
|
||||
# backward
|
||||
|
@@ -6,7 +6,7 @@ import torch
|
||||
import colossalai
|
||||
from colossalai.legacy.amp import convert_to_apex_amp, convert_to_torch_amp
|
||||
from colossalai.testing import assert_close_loose, clear_cache_before_run, rerun_if_address_is_in_use, spawn
|
||||
from tests.components_to_test.registry import non_distributed_component_funcs
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
|
||||
def run_torch_amp():
|
||||
@@ -18,13 +18,12 @@ def run_torch_amp():
|
||||
torch.backends.cudnn.deterministic = True
|
||||
|
||||
# create layer
|
||||
test_models = ["resnet18", "simple_net"]
|
||||
test_models = ["torchvision_resnet18", "custom_simple_net"]
|
||||
for test_name in test_models:
|
||||
get_component_func = non_distributed_component_funcs.get_callable(test_name)
|
||||
model_builder, train_dataloader, _, optim_class, _ = get_component_func()
|
||||
model_builder, data_gen_fn, *_ = next(iter(model_zoo.get_sub_registry(test_name).values()))
|
||||
|
||||
# create model
|
||||
torch_amp_model = model_builder(checkpoint=True).cuda()
|
||||
torch_amp_model = model_builder().cuda()
|
||||
apex_amp_model = copy.deepcopy(torch_amp_model)
|
||||
|
||||
# create optimizer
|
||||
@@ -41,13 +40,12 @@ def run_torch_amp():
|
||||
apex_amp_model, apex_amp_optimizer = convert_to_apex_amp(apex_amp_model, apex_amp_optimizer, apex_amp_config)
|
||||
|
||||
# create data
|
||||
data_iter = iter(train_dataloader)
|
||||
data, label = next(data_iter)
|
||||
data = data.cuda()
|
||||
data = data_gen_fn()
|
||||
data = {k: v.cuda() if isinstance(v, torch.Tensor) else v for k, v in data.items()}
|
||||
|
||||
# forward pass
|
||||
torch_amp_output = torch_amp_model(data)
|
||||
apex_amp_output = apex_amp_model(data)
|
||||
torch_amp_output = torch_amp_model(**data)
|
||||
apex_amp_output = apex_amp_model(**data)
|
||||
assert_close_loose(torch_amp_output, apex_amp_output)
|
||||
|
||||
for torch_amp_param, apex_amp_param in zip(torch_amp_model.parameters(), apex_amp_model.parameters()):
|
||||
|
@@ -1,10 +1,11 @@
|
||||
import pytest
|
||||
import torch
|
||||
|
||||
import colossalai
|
||||
from colossalai.legacy.amp import AMP_TYPE
|
||||
from colossalai.legacy.core import global_context as gpc
|
||||
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn
|
||||
from tests.components_to_test.registry import non_distributed_component_funcs
|
||||
from colossalai.testing import DummyDataloader, parameterize, rerun_if_address_is_in_use, spawn
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
CONFIG = dict(
|
||||
parallel=dict(pipeline=dict(size=1), tensor=dict(size=1, mode=None)), fp16=dict(mode=None), clip_grad_norm=1.0
|
||||
@@ -15,29 +16,29 @@ CONFIG = dict(
|
||||
@parameterize("amp_mode", [AMP_TYPE.APEX, AMP_TYPE.TORCH, AMP_TYPE.NAIVE, None])
|
||||
def run_train(model_name, amp_mode):
|
||||
# FIXME: test bert
|
||||
get_components_func = non_distributed_component_funcs.get_callable(model_name)
|
||||
model_builder, data_gen_fn, *_ = next(iter(model_zoo.get_sub_registry(model_name).values()))
|
||||
train_dataloader = DummyDataloader(data_gen_fn)
|
||||
criterion = lambda x: x.sum()
|
||||
gpc.config.fp16["mode"] = amp_mode
|
||||
model_builder, train_dataloader, _, optimizer_class, criterion = get_components_func()
|
||||
|
||||
model = model_builder(checkpoint=False)
|
||||
model = model_builder()
|
||||
engine, train_dataloader, *args = colossalai.legacy.initialize(
|
||||
model=model,
|
||||
optimizer=optimizer_class(model.parameters(), lr=1e-3),
|
||||
optimizer=torch.optim.Adam(model.parameters(), lr=1e-3),
|
||||
criterion=criterion,
|
||||
train_dataloader=train_dataloader,
|
||||
)
|
||||
|
||||
try:
|
||||
engine.train()
|
||||
for data, label in train_dataloader:
|
||||
for data in train_dataloader:
|
||||
engine.zero_grad()
|
||||
data = data.cuda()
|
||||
label = label.cuda()
|
||||
data = {k: v.cuda() if isinstance(v, torch.Tensor) else v for k, v in data.items()}
|
||||
if criterion:
|
||||
output = engine(data)
|
||||
loss = engine.criterion(output, label)
|
||||
output = engine(**data)
|
||||
loss = engine.criterion(output)
|
||||
else:
|
||||
loss = engine(data, label)
|
||||
loss = engine(**data)
|
||||
engine.backward(loss)
|
||||
engine.step()
|
||||
break
|
||||
|
@@ -5,9 +5,9 @@ import colossalai
|
||||
from colossalai.legacy.amp.amp_type import AMP_TYPE
|
||||
from colossalai.legacy.trainer import Trainer
|
||||
from colossalai.logging import get_dist_logger
|
||||
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn
|
||||
from colossalai.testing import DummyDataloader, parameterize, rerun_if_address_is_in_use, spawn
|
||||
from colossalai.utils import MultiTimer
|
||||
from tests.components_to_test.registry import non_distributed_component_funcs
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
BATCH_SIZE = 4
|
||||
IMG_SIZE = 32
|
||||
@@ -16,12 +16,14 @@ NUM_EPOCHS = 200
|
||||
CONFIG = dict(fp16=dict(mode=AMP_TYPE.TORCH))
|
||||
|
||||
|
||||
@parameterize("model_name", ["repeated_computed_layers", "resnet18", "nested_model"])
|
||||
@parameterize("model_name", ["custom_repeated_computed_layers", "torchvision_resnet18", "custom_nested_model"])
|
||||
def run_trainer(model_name):
|
||||
get_components_func = non_distributed_component_funcs.get_callable(model_name)
|
||||
model_builder, train_dataloader, test_dataloader, optimizer_class, criterion = get_components_func()
|
||||
model_builder, data_gen_fn, *_ = next(iter(model_zoo.get_sub_registry(model_name).values()))
|
||||
model = model_builder()
|
||||
optimizer = optimizer_class(model.parameters(), lr=1e-3)
|
||||
optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
|
||||
train_dataloader = DummyDataloader(data_gen_fn)
|
||||
test_dataloader = DummyDataloader(data_gen_fn)
|
||||
criterion = lambda x: x.sum()
|
||||
engine, train_dataloader, *_ = colossalai.legacy.initialize(
|
||||
model=model, optimizer=optimizer, criterion=criterion, train_dataloader=train_dataloader
|
||||
)
|
||||
|
Reference in New Issue
Block a user