mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-10-23 07:39:31 +00:00
* Add gradient accumulation, fix lr scheduler * fix FP16 optimizer and adapted torch amp with tensor parallel (#18) * fixed bugs in compatibility between torch amp and tensor parallel and performed some minor fixes * fixed trainer * Revert "fixed trainer" This reverts commit2e0b0b7699
. * improved consistency between trainer, engine and schedule (#23) Co-authored-by: 1SAA <c2h214748@gmail.com> * Split conv2d, class token, positional embedding in 2d, Fix random number in ddp Fix convergence in cifar10, Imagenet1000 * Integrate 1d tensor parallel in Colossal-AI (#39) * fixed 1D and 2D convergence (#38) * optimized 2D operations * fixed 1D ViT convergence problem * Feature/ddp (#49) * remove redundancy func in setup (#19) (#20) * use env to control the language of doc (#24) (#25) * Support TP-compatible Torch AMP and Update trainer API (#27) * Add gradient accumulation, fix lr scheduler * fix FP16 optimizer and adapted torch amp with tensor parallel (#18) * fixed bugs in compatibility between torch amp and tensor parallel and performed some minor fixes * fixed trainer * Revert "fixed trainer" This reverts commit2e0b0b7699
. * improved consistency between trainer, engine and schedule (#23) Co-authored-by: 1SAA <c2h214748@gmail.com> Co-authored-by: 1SAA <c2h214748@gmail.com> Co-authored-by: ver217 <lhx0217@gmail.com> * add an example of ViT-B/16 and remove w_norm clipping in LAMB (#29) * add explanation for ViT example (#35) (#36) * support torch ddp * fix loss accumulation * add log for ddp * change seed * modify timing hook Co-authored-by: Frank Lee <somerlee.9@gmail.com> Co-authored-by: 1SAA <c2h214748@gmail.com> Co-authored-by: binmakeswell <binmakeswell@gmail.com> * Feature/pipeline (#40) * remove redundancy func in setup (#19) (#20) * use env to control the language of doc (#24) (#25) * Support TP-compatible Torch AMP and Update trainer API (#27) * Add gradient accumulation, fix lr scheduler * fix FP16 optimizer and adapted torch amp with tensor parallel (#18) * fixed bugs in compatibility between torch amp and tensor parallel and performed some minor fixes * fixed trainer * Revert "fixed trainer" This reverts commit2e0b0b7699
. * improved consistency between trainer, engine and schedule (#23) Co-authored-by: 1SAA <c2h214748@gmail.com> Co-authored-by: 1SAA <c2h214748@gmail.com> Co-authored-by: ver217 <lhx0217@gmail.com> * add an example of ViT-B/16 and remove w_norm clipping in LAMB (#29) * add explanation for ViT example (#35) (#36) * optimize communication of pipeline parallel * fix grad clip for pipeline Co-authored-by: Frank Lee <somerlee.9@gmail.com> Co-authored-by: 1SAA <c2h214748@gmail.com> Co-authored-by: binmakeswell <binmakeswell@gmail.com> * optimized 3d layer to fix slow computation ; tested imagenet performance with 3d; reworked lr_scheduler config definition; fixed launch args; fixed some printing issues; simplified apis of 3d layers (#51) * Update 2.5d layer code to get a similar accuracy on imagenet-1k dataset * update api for better usability (#58) update api for better usability Co-authored-by: 1SAA <c2h214748@gmail.com> Co-authored-by: ver217 <lhx0217@gmail.com> Co-authored-by: puck_WCR <46049915+WANG-CR@users.noreply.github.com> Co-authored-by: binmakeswell <binmakeswell@gmail.com> Co-authored-by: アマデウス <kurisusnowdeng@users.noreply.github.com> Co-authored-by: BoxiangW <45734921+BoxiangW@users.noreply.github.com>
152 lines
5.5 KiB
Python
152 lines
5.5 KiB
Python
#!/usr/bin/env python
|
|
# -*- encoding: utf-8 -*-
|
|
|
|
import colossalai
|
|
import logging
|
|
from pathlib import Path
|
|
from typing import Union
|
|
|
|
from colossalai.context.parallel_mode import ParallelMode
|
|
|
|
|
|
_FORMAT = 'colossalai - %(name)s - %(asctime)s %(levelname)s: %(message)s'
|
|
logging.basicConfig(level=logging.INFO, format=_FORMAT)
|
|
|
|
|
|
class DistributedLogger:
|
|
"""This is a distributed event logger class essentially based on :class:`logging`.
|
|
|
|
:param name: The name of the logger
|
|
:type name: str
|
|
"""
|
|
|
|
__instances = dict()
|
|
|
|
@staticmethod
|
|
def get_instance(name: str):
|
|
"""Get the unique single logger instance based on name.
|
|
:param name: The name of the logger
|
|
:type name: str
|
|
:return: a DistributedLogger object
|
|
:rtype: DistributedLogger
|
|
"""
|
|
if name in DistributedLogger.__instances:
|
|
return DistributedLogger.__instances[name]
|
|
else:
|
|
logger = DistributedLogger(name=name)
|
|
return logger
|
|
|
|
def __init__(self, name):
|
|
if name in DistributedLogger.__instances:
|
|
raise Exception('Logger with the same name has been created, you should use colossalai.logging.get_dist_logger')
|
|
else:
|
|
self._name = name
|
|
self._logger = logging.getLogger(name)
|
|
DistributedLogger.__instances[name] = self
|
|
|
|
@staticmethod
|
|
def _check_valid_logging_level(level: str):
|
|
assert level in ['INFO', 'DEBUG', 'WARNING', 'ERROR'], 'found invalid logging level'
|
|
|
|
def set_level(self, level: str):
|
|
"""Set the logging level
|
|
:param level: can only be INFO, DEBUG, WARNING and ERROR
|
|
:type level: str
|
|
"""
|
|
self._check_valid_logging_level(level)
|
|
self._logger.setLevel(getattr(logging, level))
|
|
|
|
def log_to_file(self,
|
|
path: Union[str, Path],
|
|
mode: str = 'a',
|
|
level: str = 'INFO',
|
|
suffix: str = None):
|
|
"""Save the logs to file
|
|
:param path: the file to save the log
|
|
:type path: a string or pathlib.Path object
|
|
:param mode: the mode to write log into the file
|
|
:type mode: str
|
|
:param level: can only be INFO, DEBUG, WARNING and ERROR
|
|
:type level: str
|
|
"""
|
|
assert isinstance(path, (str, Path)), \
|
|
f'expected argument path to be type str or Path, but got {type(path)}'
|
|
self._check_valid_logging_level(level)
|
|
if isinstance(path, str):
|
|
path = Path(path)
|
|
|
|
# set the default file name if path is a directory
|
|
if not colossalai.core.global_context.is_initialized(ParallelMode.GLOBAL):
|
|
rank = 0
|
|
else:
|
|
rank = colossalai.core.global_context.get_global_rank()
|
|
|
|
if suffix is not None:
|
|
log_file_name = f'rank_{rank}_{suffix}.log'
|
|
else:
|
|
log_file_name = f'rank_{rank}.log'
|
|
path = path.joinpath(log_file_name)
|
|
|
|
# add file handler
|
|
file_handler = logging.FileHandler(path, mode)
|
|
file_handler.setLevel(getattr(logging, level))
|
|
formatter = logging.Formatter(_FORMAT)
|
|
file_handler.setFormatter(formatter)
|
|
self._logger.addHandler(file_handler)
|
|
|
|
def _log(self, level, message: str, parallel_mode: ParallelMode = ParallelMode.GLOBAL, ranks: list = None):
|
|
if ranks is None:
|
|
getattr(self._logger, level)(message)
|
|
else:
|
|
local_rank = colossalai.core.global_context.get_local_rank(parallel_mode)
|
|
if local_rank in ranks:
|
|
getattr(self._logger, level)(message)
|
|
|
|
def info(self, message: str, parallel_mode: ParallelMode = ParallelMode.GLOBAL, ranks: list = None):
|
|
"""Log an info message.
|
|
|
|
:param message:
|
|
:type message:
|
|
:param parallel_mode:
|
|
:type parallel_mode:
|
|
:param ranks:
|
|
:type ranks:
|
|
"""
|
|
self._log('info', message, parallel_mode, ranks)
|
|
|
|
def warning(self, message: str, parallel_mode: ParallelMode = ParallelMode.GLOBAL, ranks: list = None):
|
|
"""Log a warning message.
|
|
|
|
:param message: The message to be logged
|
|
:type message: str
|
|
:param parallel_mode: The parallel mode used for logging. Defaults to ParallelMode.GLOBAL
|
|
:type parallel_mode: :class:`colossalai.context.parallel_mode.ParallelMode`
|
|
:param ranks: List of parallel ranks
|
|
:type ranks: list
|
|
"""
|
|
self._log('warning', message, parallel_mode, ranks)
|
|
|
|
def debug(self, message: str, parallel_mode: ParallelMode = ParallelMode.GLOBAL, ranks: list = None):
|
|
"""Log a debug message.
|
|
|
|
:param message: The message to be logged
|
|
:type message: str
|
|
:param parallel_mode: The parallel mode used for logging. Defaults to ParallelMode.GLOBAL
|
|
:type parallel_mode: :class:`colossalai.context.parallel_mode.ParallelMode`
|
|
:param ranks: List of parallel ranks
|
|
:type ranks: list
|
|
"""
|
|
self._log('debug', message, parallel_mode, ranks)
|
|
|
|
def error(self, message: str, parallel_mode: ParallelMode = ParallelMode.GLOBAL, ranks: list = None):
|
|
"""Log an error message.
|
|
|
|
:param message: The message to be logged
|
|
:type message: str
|
|
:param parallel_mode: The parallel mode used for logging. Defaults to ParallelMode.GLOBAL
|
|
:type parallel_mode: :class:`colossalai.context.parallel_mode.ParallelMode`
|
|
:param ranks: List of parallel ranks
|
|
:type ranks: list
|
|
"""
|
|
self._log('error', message, parallel_mode, ranks)
|