From 0e9d628bb7cd8eed95977eb54c052fd7b03e59a4 Mon Sep 17 00:00:00 2001 From: wangbluo <2538539015@qq.com> Date: Wed, 14 May 2025 12:50:07 +0800 Subject: [PATCH] add the explanation --- colossalai/shardformer/modeling/sam.py | 1 + 1 file changed, 1 insertion(+) diff --git a/colossalai/shardformer/modeling/sam.py b/colossalai/shardformer/modeling/sam.py index c84395989..e172bfb5a 100644 --- a/colossalai/shardformer/modeling/sam.py +++ b/colossalai/shardformer/modeling/sam.py @@ -2,6 +2,7 @@ import torch from torch import nn +# Same as the SamVisionAttention forward method in the v4.51.3 transformers def forward_fn(): def forward(self, hidden_states: torch.Tensor, output_attentions=False) -> torch.Tensor: batch_size, height, width, _ = hidden_states.shape