mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-07 20:10:17 +00:00
[legacy] move communication and nn to legacy and refactor logger (#4671)
* [legacy] move communication to legacy (#4640) * [legacy] refactor logger and clean up legacy codes (#4654) * [legacy] make logger independent to gpc * [legacy] make optim independent to registry * [legacy] move test engine to legacy * [legacy] move nn to legacy (#4656) * [legacy] move nn to legacy * [checkpointio] fix save hf config * [test] remove useledd rpc pp test * [legacy] fix nn init * [example] skip tutorial hybriad parallel example * [devops] test doc check * [devops] test doc check
This commit is contained in:
@@ -11,8 +11,6 @@ from typing import Iterator, List, Mapping, Optional, OrderedDict, Tuple
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
from torch.optim import Optimizer
|
||||
from transformers.modeling_utils import PreTrainedModel, get_parameter_dtype
|
||||
from transformers.modeling_utils import unwrap_model as unwrap_huggingface_model
|
||||
|
||||
from colossalai.interface import ModelWrapper, OptimizerWrapper
|
||||
from colossalai.nn.optimizer import ColossalaiOptimizer
|
||||
@@ -383,6 +381,11 @@ def save_config_file(model: nn.Module, checkpoint_path: str, is_master: bool = T
|
||||
checkpoint_path (str): Path to the checkpoint directory.
|
||||
is_master (bool): Whether current rank is main process.
|
||||
"""
|
||||
try:
|
||||
from transformers.modeling_utils import PreTrainedModel, get_parameter_dtype
|
||||
from transformers.modeling_utils import unwrap_model as unwrap_huggingface_model
|
||||
except ImportError:
|
||||
return
|
||||
if not isinstance(model, PreTrainedModel):
|
||||
return
|
||||
|
||||
|
Reference in New Issue
Block a user