[plugin]fix 3d checkpoint load when booster boost without optimizer. (#5135)

* fix 3d checkpoint load when booster boost without optimizer

fix 3d checkpoint load when booster boost without optimizer

* test ci

* revert ci

* fix

fix
This commit is contained in:
flybird11111
2023-11-30 18:37:47 +08:00
committed by GitHub
parent f6731db67c
commit 2a2ec49aa7
2 changed files with 5 additions and 2 deletions

View File

@@ -21,7 +21,7 @@ from torch.utils.data.distributed import DistributedSampler
from colossalai.amp.naive_amp.mixed_precision_optimizer import MixedPrecisionOptimizer
from colossalai.checkpoint_io import CheckpointIO, HybridParallelCheckpointIO
from colossalai.cluster import ProcessGroupMesh
from colossalai.interface import ModelWrapper, OptimizerWrapper
from colossalai.interface import ModelWrapper, OptimizerWrapper, AMPModelMixin
from colossalai.pipeline.schedule import InterleavedSchedule, OneForwardOneBackwardSchedule
from colossalai.pipeline.stage_manager import PipelineStageManager
from colossalai.shardformer import ShardConfig, ShardFormer
@@ -42,7 +42,7 @@ def _convert_floating_point(x, dtype: torch.dtype = torch.float16):
return x
class HybridParallelModule(ModelWrapper):
class HybridParallelModule(ModelWrapper, AMPModelMixin):
def __init__(
self,
module: Module,