1
0
mirror of https://github.com/hpcaitech/ColossalAI.git synced 2025-05-01 21:26:42 +00:00
ColossalAI/colossalai/kernel/jit/bias_dropout_add.py
Hongxin Liu 079bf3cb26
[misc] update pre-commit and run all files ()
* [misc] update pre-commit

* [misc] run pre-commit

* [misc] remove useless configuration files

* [misc] ignore cuda for clang-format
2023-09-19 14:20:26 +08:00

23 lines
670 B
Python

import torch
def bias_dropout_add(x, bias, residual, prob, training):
# type: (Tensor, Tensor, Tensor, float, bool) -> Tensor
out = torch.nn.functional.dropout(x + bias, p=prob, training=training)
out = residual + out
return out
@torch.jit.script
def bias_dropout_add_fused_train(
x: torch.Tensor, bias: torch.Tensor, residual: torch.Tensor, prob: float
) -> torch.Tensor:
return bias_dropout_add(x, bias, residual, prob, True)
@torch.jit.script
def bias_dropout_add_fused_inference(
x: torch.Tensor, bias: torch.Tensor, residual: torch.Tensor, prob: float
) -> torch.Tensor:
return bias_dropout_add(x, bias, residual, prob, False)