From 1b76564e1607aa8cf24566c794977b260de44f6c Mon Sep 17 00:00:00 2001
From: duanjunwen <935724073@qq.com>
Date: Mon, 3 Jun 2024 15:26:01 +0800
Subject: [PATCH] [test] Fix/fix testcase (#5770)

* [fix] branch for fix testcase;

* [fix] fix test_analyzer & test_auto_parallel;

* [fix] remove local change about moe;

* [fix] rm local change moe;
---
 colossalai/_analyzer/fx/codegen.py                     | 2 +-
 colossalai/fx/codegen/activation_checkpoint_codegen.py | 2 +-
 tests/test_auto_parallel/test_offload/model_utils.py   | 6 +++---
 tests/test_auto_parallel/test_offload/test_perf.py     | 7 +++++--
 4 files changed, 10 insertions(+), 7 deletions(-)

diff --git a/colossalai/_analyzer/fx/codegen.py b/colossalai/_analyzer/fx/codegen.py
index 68a27d919..9fd82ccd4 100644
--- a/colossalai/_analyzer/fx/codegen.py
+++ b/colossalai/_analyzer/fx/codegen.py
@@ -469,4 +469,4 @@ class ActivationCheckpointCodeGen(CodeGen):
 {wrap_stmts}
 {prologue}
 {code}"""
-        return PythonCode(fn_code, globals_)
+        return PythonCode(fn_code, globals_, {})
diff --git a/colossalai/fx/codegen/activation_checkpoint_codegen.py b/colossalai/fx/codegen/activation_checkpoint_codegen.py
index 28451bdd1..ed3b3b1ef 100644
--- a/colossalai/fx/codegen/activation_checkpoint_codegen.py
+++ b/colossalai/fx/codegen/activation_checkpoint_codegen.py
@@ -859,7 +859,7 @@ if CODEGEN_AVAILABLE:
 {wrap_stmts}
 {prologue}
 {code}"""
-            return PythonCode(fn_code, globals_)
+            return PythonCode(fn_code, globals_, {})
 
 else:
 
diff --git a/tests/test_auto_parallel/test_offload/model_utils.py b/tests/test_auto_parallel/test_offload/model_utils.py
index 0efe84655..9a0dbcbd7 100644
--- a/tests/test_auto_parallel/test_offload/model_utils.py
+++ b/tests/test_auto_parallel/test_offload/model_utils.py
@@ -2,7 +2,7 @@ import torch
 import torch.nn as nn
 from transformers import BertConfig, BertLMHeadModel, GPT2Config, GPT2LMHeadModel
 
-from tests.components_to_test.registry import non_distributed_component_funcs
+# from tests.components_to_test.registry import non_distributed_component_funcs
 
 
 class GPTLMModel(nn.Module):
@@ -55,7 +55,7 @@ class BertLMModel(nn.Module):
         return self.model(input_ids=input_ids, attention_mask=attention_mask, use_cache=True)[0]
 
 
-@non_distributed_component_funcs.register(name="bert_")
+# @non_distributed_component_funcs.register(name="bert_")
 def get_bert_components():
     vocab_size = 1024
     seq_len = 64
@@ -74,7 +74,7 @@ def get_bert_components():
     return bert_model_builder, bert_data_gen
 
 
-@non_distributed_component_funcs.register(name="gpt2_")
+# @non_distributed_component_funcs.register(name="gpt2_")
 def get_gpt2_components():
     vocab_size = 1024
     seq_len = 8
diff --git a/tests/test_auto_parallel/test_offload/test_perf.py b/tests/test_auto_parallel/test_offload/test_perf.py
index 3db7a1925..f895721dd 100644
--- a/tests/test_auto_parallel/test_offload/test_perf.py
+++ b/tests/test_auto_parallel/test_offload/test_perf.py
@@ -10,11 +10,14 @@ from colossalai.auto_parallel.offload.amp_optimizer import AMPOptimizer
 from colossalai.auto_parallel.offload.mem_optimize import memory_optimize
 from colossalai.auto_parallel.offload.solver import NOT_NVML
 from colossalai.fx.profiler import parameter_size
+from colossalai.legacy.zero.gemini.colo_init_context import ColoInitContext
 from colossalai.nn.optimizer import HybridAdam
 from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn
-from colossalai.zero import ColoInitContext, zero_model_wrapper, zero_optim_wrapper
+from colossalai.utils import set_seed
+from colossalai.zero import zero_model_wrapper, zero_optim_wrapper
 from tests.test_auto_parallel.test_offload.model_utils import *
-from tests.test_tensor.common_utils import set_seed
+
+# from tests.test_tensor.common_utils import set_seed
 
 
 @parameterize("model_name", ["gpt2_"])