mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-05 02:51:59 +00:00
[ci] fixed booster test (#5251)
* [ci] fixed booster test * [ci] fixed booster test * [ci] fixed booster test
This commit is contained in:
@@ -12,10 +12,11 @@ from colossalai.fx import is_compatible_with_meta
|
||||
from colossalai.lazy.lazy_init import LazyInitContext
|
||||
from colossalai.nn.optimizer import HybridAdam
|
||||
from colossalai.tensor.colo_parameter import ColoParameter
|
||||
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn
|
||||
from tests.kit.model_zoo import model_zoo, COMMON_MODELS, IS_FAST_TEST
|
||||
from colossalai.testing import clear_cache_before_run, parameterize, rerun_if_address_is_in_use, spawn
|
||||
from tests.kit.model_zoo import COMMON_MODELS, IS_FAST_TEST, model_zoo
|
||||
|
||||
|
||||
@clear_cache_before_run()
|
||||
def run_fn(init_method, model_fn, data_gen_fn, output_transform_fn, zero_size, tp_size) -> Optional[str]:
|
||||
try:
|
||||
if init_method == "lazy":
|
||||
@@ -116,7 +117,7 @@ def check_gemini_plugin(
|
||||
"transformers_falcon_for_sequence_classification",
|
||||
"transformers_falcon_for_token_classification",
|
||||
"transformers_falcon_for_question_answering",
|
||||
"transformers_gptj_lm", # lead to OOM when running in ci
|
||||
"transformers_gptj_lm", # lead to OOM when running in ci
|
||||
"transformers_gptj_for_question_answering",
|
||||
"transformers_gptj_for_sequence_classification",
|
||||
]:
|
||||
@@ -145,7 +146,6 @@ def check_gemini_plugin(
|
||||
tp_size = 1
|
||||
|
||||
err = run_fn(init_method, model_fn, data_gen_fn, output_transform_fn, zero_size, tp_size)
|
||||
torch.cuda.empty_cache()
|
||||
if err is None:
|
||||
passed_models.append(name)
|
||||
else:
|
||||
|
Reference in New Issue
Block a user