[misc] update pre-commit and run all files (#4752)

* [misc] update pre-commit

* [misc] run pre-commit

* [misc] remove useless configuration files

* [misc] ignore cuda for clang-format
This commit is contained in:
Hongxin Liu
2023-09-19 14:20:26 +08:00
committed by GitHub
parent 3c6b831c26
commit 079bf3cb26
1268 changed files with 50037 additions and 38444 deletions

View File

@@ -6,7 +6,6 @@ from colossalai.legacy.core import global_context as gpc
class PreProcessor(nn.Module):
def __init__(self, sub_seq_length):
super().__init__()
self.sub_seq_length = sub_seq_length
@@ -15,10 +14,9 @@ class PreProcessor(nn.Module):
# Create position ids
seq_length = token_ids.size(1)
local_rank = gpc.get_local_rank(ParallelMode.SEQUENCE)
position_ids = torch.arange(seq_length * local_rank,
seq_length * (local_rank + 1),
dtype=torch.long,
device=token_ids.device)
position_ids = torch.arange(
seq_length * local_rank, seq_length * (local_rank + 1), dtype=torch.long, device=token_ids.device
)
position_ids = position_ids.unsqueeze(0).expand_as(token_ids)
return position_ids
@@ -42,7 +40,7 @@ class PreProcessor(nn.Module):
extended_attention_mask = attention_mask_bss.unsqueeze(1)
# Convert attention mask to binary:
extended_attention_mask = (extended_attention_mask < 0.5)
extended_attention_mask = extended_attention_mask < 0.5
return extended_attention_mask