mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-15 22:19:38 +00:00
[test] refactor tests with spawn (#3452)
* [test] added spawn decorator * polish code * polish code * polish code * polish code * polish code * polish code
This commit is contained in:
@@ -3,6 +3,7 @@ import torch
|
||||
from hf_tracer_utils import trace_model_and_compare_output
|
||||
from packaging import version
|
||||
|
||||
from colossalai.testing import clear_cache_before_run
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
BATCH_SIZE = 2
|
||||
@@ -10,6 +11,7 @@ SEQ_LENGTH = 16
|
||||
|
||||
|
||||
@pytest.mark.skipif(version.parse(torch.__version__) < version.parse('1.12.0'), reason='torch version < 12')
|
||||
@clear_cache_before_run()
|
||||
def test_albert():
|
||||
sub_registry = model_zoo.get_sub_registry('transformers_albert')
|
||||
|
||||
|
@@ -3,10 +3,12 @@ import torch
|
||||
from hf_tracer_utils import trace_model_and_compare_output
|
||||
from packaging import version
|
||||
|
||||
from colossalai.testing import clear_cache_before_run
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
|
||||
@pytest.mark.skipif(version.parse(torch.__version__) < version.parse('1.12.0'), reason='torch version < 12')
|
||||
@clear_cache_before_run()
|
||||
def test_bert():
|
||||
sub_registry = model_zoo.get_sub_registry('transformers_bert')
|
||||
|
||||
|
@@ -2,6 +2,7 @@ import pytest
|
||||
import torch
|
||||
|
||||
from colossalai.fx import symbolic_trace
|
||||
from colossalai.testing import clear_cache_before_run
|
||||
from colossalai.testing.random import seed_all
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
@@ -40,6 +41,7 @@ def trace_and_compare(model_cls, data, output_fn):
|
||||
|
||||
|
||||
@pytest.mark.skip(reason='cannot pass this test yet')
|
||||
@clear_cache_before_run()
|
||||
def test_diffusers():
|
||||
seed_all(9091, cuda_deterministic=True)
|
||||
|
||||
@@ -52,6 +54,7 @@ def test_diffusers():
|
||||
print(f"{name:40s} √")
|
||||
|
||||
|
||||
@clear_cache_before_run()
|
||||
def test_torch_diffusers():
|
||||
seed_all(65535, cuda_deterministic=True)
|
||||
|
||||
|
@@ -3,10 +3,12 @@ import torch
|
||||
from hf_tracer_utils import trace_model_and_compare_output
|
||||
from packaging import version
|
||||
|
||||
from colossalai.testing import clear_cache_before_run
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
|
||||
@pytest.mark.skipif(version.parse(torch.__version__) < version.parse('1.12.0'), reason='torch version < 12')
|
||||
@clear_cache_before_run()
|
||||
def test_gpt():
|
||||
sub_registry = model_zoo.get_sub_registry('transformers_gpt')
|
||||
|
||||
|
@@ -3,10 +3,12 @@ import torch
|
||||
from hf_tracer_utils import trace_model_and_compare_output
|
||||
from packaging import version
|
||||
|
||||
from colossalai.testing import clear_cache_before_run
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
|
||||
@pytest.mark.skipif(version.parse(torch.__version__) < version.parse('1.12.0'), reason='torch version < 12')
|
||||
@clear_cache_before_run()
|
||||
def test_opt():
|
||||
sub_registry = model_zoo.get_sub_registry('transformers_opt')
|
||||
|
||||
|
@@ -3,10 +3,12 @@ import torch
|
||||
from hf_tracer_utils import trace_model_and_compare_output
|
||||
from packaging import version
|
||||
|
||||
from colossalai.testing import clear_cache_before_run
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
|
||||
@pytest.mark.skipif(version.parse(torch.__version__) < version.parse('1.12.0'), reason='torch version < 12')
|
||||
@clear_cache_before_run()
|
||||
def test_t5():
|
||||
sub_registry = model_zoo.get_sub_registry('transformers_t5')
|
||||
|
||||
|
Reference in New Issue
Block a user