mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-03 18:19:58 +00:00
* [legacy] move communication to legacy (#4640) * [legacy] refactor logger and clean up legacy codes (#4654) * [legacy] make logger independent to gpc * [legacy] make optim independent to registry * [legacy] move test engine to legacy * [legacy] move nn to legacy (#4656) * [legacy] move nn to legacy * [checkpointio] fix save hf config * [test] remove useledd rpc pp test * [legacy] fix nn init * [example] skip tutorial hybriad parallel example * [devops] test doc check * [devops] test doc check
32 lines
992 B
Python
32 lines
992 B
Python
import torch.nn as nn
|
|
|
|
from colossalai.context import ParallelMode, seed
|
|
|
|
from ..parallel_1d import *
|
|
from ..utils import get_tensor_parallel_mode
|
|
from ._utils import ColossalaiModule
|
|
|
|
|
|
class Dropout(ColossalaiModule):
|
|
"""Dropout layer of colossalai.
|
|
|
|
Args:
|
|
p (float, optional): probability of an element to be zeroed, defaults 0.5.
|
|
inplace (bool, optional): whether to do dropout in-place, default to be False.
|
|
"""
|
|
|
|
def __init__(self, p: float = 0.5, inplace: bool = False) -> None:
|
|
tensor_parallel = get_tensor_parallel_mode()
|
|
if tensor_parallel == "1d":
|
|
drop = Dropout1D(p, inplace)
|
|
else:
|
|
drop = nn.Dropout(p, inplace)
|
|
super().__init__(drop, tensor_parallel=tensor_parallel)
|
|
|
|
def forward(self, *args):
|
|
if self.tensor_parallel in [None, '1d']:
|
|
return super().forward(*args)
|
|
else:
|
|
with seed(ParallelMode.TENSOR):
|
|
return super().forward(*args)
|