add the explanation

This commit is contained in:
wangbluo 2025-05-14 12:50:07 +08:00
parent b032cf9b16
commit 0e9d628bb7

View File

@ -2,6 +2,7 @@ import torch
from torch import nn
# Same as the SamVisionAttention forward method in the v4.51.3 transformers
def forward_fn():
def forward(self, hidden_states: torch.Tensor, output_attentions=False) -> torch.Tensor:
batch_size, height, width, _ = hidden_states.shape