[devops] update torch version of CI (#3725)

* [test] fix flop tensor test

* [test] fix autochunk test

* [test] fix lazyinit test

* [devops] update torch version of CI

* [devops] enable testmon

* [devops] fix ci

* [devops] fix ci

* [test] fix checkpoint io test

* [test] fix cluster test

* [test] fix timm test

* [devops] fix ci

* [devops] fix ci

* [devops] fix ci

* [devops] fix ci

* [devops] force sync to test ci

* [test] skip fsdp test
This commit is contained in:
Hongxin Liu
2023-05-15 17:20:56 +08:00
committed by GitHub
parent b37797ed3d
commit afb239bbf8
17 changed files with 74 additions and 46 deletions

View File

@@ -15,9 +15,9 @@ try:
from colossalai.utils.model.experimental import LazyInitContext, LazyTensor, _MyTensor
except:
pass
from tests.kit.model_zoo import model_zoo
from utils import SUPPORT_LAZY, assert_dist_model_equal, set_seed
# from utils import assert_dist_model_equal, set_seed
from tests.kit.model_zoo import model_zoo
def find_shard_dim(shape: torch.Size) -> Optional[int]:
@@ -70,9 +70,8 @@ def generate_layout_dict(model: nn.Module, device_mesh: DeviceMesh) -> dict:
def run_dist_lazy_init(subset, seed: int = 42):
sub_model_zoo = model_zoo.get_sub_registry(subset)
device_mesh = DeviceMesh(torch.Tensor([0, 1, 2, 3]), (2, 2), init_process_group=True)
# FIXME(ver217): uncomment this line
# _MyTensor._pre_op_fn = lambda *args: set_seed(seed)
# LazyTensor._pre_op_fn = lambda *args: set_seed(seed)
_MyTensor._pre_op_fn = lambda *args: set_seed(seed)
LazyTensor._pre_op_fn = lambda *args: set_seed(seed)
for name, entry in sub_model_zoo.items():
# TODO(ver217): lazy init does not support weight norm, skip these models
@@ -88,8 +87,7 @@ def run_dist_lazy_init(subset, seed: int = 42):
deferred_model = model_fn()
layout_dict = generate_layout_dict(deferred_model, device_mesh)
ctx.distribute(deferred_model, layout_dict, verbose=True)
# FIXME(ver217): uncomment this line
# assert_dist_model_equal(model, deferred_model, layout_dict)
assert_dist_model_equal(model, deferred_model, layout_dict)
def run_dist(rank, world_size, port) -> None:
@@ -97,8 +95,7 @@ def run_dist(rank, world_size, port) -> None:
run_dist_lazy_init()
# FIXME(ver217): temporarily skip this test since torch 1.11 does not fully support meta tensor
@pytest.mark.skip
@pytest.mark.skipif(not SUPPORT_LAZY, reason='torch version should be >= 1.12.0')
@pytest.mark.dist
@rerun_if_address_is_in_use()
def test_dist_lazy_init():

View File

@@ -1,13 +1,10 @@
import pytest
from utils import SUPPORT_LAZY, check_lazy_init
from tests.kit.model_zoo import model_zoo
# FIXME(ver217): uncomment this line
# from utils import check_lazy_init
# FIXME(ver217): temporarily skip this test since torch 1.11 does not fully support meta tensor
@pytest.mark.skip
@pytest.mark.skipif(not SUPPORT_LAZY, reason='requires torch >= 1.12.0')
@pytest.mark.parametrize('subset', ['torchvision', 'diffusers', 'timm', 'transformers', 'torchaudio', 'deepfm', 'dlrm'])
def test_torchvision_models_lazy_init(subset):
sub_model_zoo = model_zoo.get_sub_registry(subset)
@@ -15,8 +12,7 @@ def test_torchvision_models_lazy_init(subset):
# TODO(ver217): lazy init does not support weight norm, skip these models
if name in ('torchaudio_wav2vec2_base', 'torchaudio_hubert_base'):
continue
# FIXME(ver217): uncomment this line
# check_lazy_init(entry, verbose=True)
check_lazy_init(entry, verbose=True)
if __name__ == '__main__':

View File

@@ -3,11 +3,14 @@ from typing import Any, Callable, Optional, Tuple
import numpy as np
import torch
from packaging import version
from colossalai.tensor.d_tensor.layout_converter import to_global
from colossalai.utils.model.experimental import LazyInitContext, LazyTensor, _MyTensor
from tests.kit.model_zoo.registry import ModelAttribute
SUPPORT_LAZY = version.parse(torch.__version__) >= version.parse('1.12.0')
# model_fn, data_gen_fn, output_transform_fn, model_attr
TestingEntry = Tuple[Callable[[], torch.nn.Module], Callable[[], dict], Callable[[], dict], Optional[ModelAttribute]]