mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-23 10:30:03 +00:00
[misc] update pre-commit and run all files (#4752)
* [misc] update pre-commit * [misc] run pre-commit * [misc] remove useless configuration files * [misc] ignore cuda for clang-format
This commit is contained in:
@@ -16,6 +16,7 @@ def check_selfattention():
|
||||
layer = layer.to(get_current_device())
|
||||
|
||||
hidden_states = torch.rand(SUB_SEQ_LENGTH, BATCH, HIDDEN_SIZE).to(get_current_device())
|
||||
attention_mask = torch.randint(low=0, high=2,
|
||||
size=(BATCH, 1, 1, 1, SUB_SEQ_LENGTH * WORLD_SIZE)).to(get_current_device())
|
||||
out = layer(hidden_states, attention_mask)
|
||||
attention_mask = torch.randint(low=0, high=2, size=(BATCH, 1, 1, 1, SUB_SEQ_LENGTH * WORLD_SIZE)).to(
|
||||
get_current_device()
|
||||
)
|
||||
layer(hidden_states, attention_mask)
|
||||
|
Reference in New Issue
Block a user