[misc] update pre-commit and run all files (#4752)

* [misc] update pre-commit

* [misc] run pre-commit

* [misc] remove useless configuration files

* [misc] ignore cuda for clang-format
This commit is contained in:
Hongxin Liu
2023-09-19 14:20:26 +08:00
committed by GitHub
parent 3c6b831c26
commit 079bf3cb26
1268 changed files with 50037 additions and 38444 deletions

View File

@@ -2,7 +2,6 @@ import torch
def get_dropout_add_func():
from transformers.models.bloom.modeling_bloom import dropout_add
def self_dropout_add(self, x: torch.Tensor, residual: torch.Tensor, prob: float, training: bool) -> torch.Tensor:
@@ -12,7 +11,6 @@ def get_dropout_add_func():
def get_jit_fused_dropout_add_func():
from colossalai.kernel.jit import bias_dropout_add_fused_inference, bias_dropout_add_fused_train
def self_dropout_add(self, x: torch.Tensor, residual: torch.Tensor, prob: float, training: bool) -> torch.Tensor:
@@ -25,7 +23,6 @@ def get_jit_fused_dropout_add_func():
def get_jit_fused_gelu_forward_func():
from colossalai.kernel.jit.bias_gelu import bias_gelu
def bloom_gelu_forward(x: torch.Tensor, bias: torch.Tensor) -> torch.Tensor: