Skip to content

Commit

Permalink
Update siglip_vit.py
Browse files Browse the repository at this point in the history
  • Loading branch information
StevenLiuWen committed Jan 16, 2025
1 parent 9dd1776 commit ff23960
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion deepseek_vl2/models/siglip_vit.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def forward(self, x: torch.Tensor) -> torch.Tensor:

if not self.qk_norm:
if self.head_dim % 32 == 0 and is_flash_attn_2_available():
# flashattn的head_dim必须是32的倍数,SigLIP-SO400M无法使用flashattn
# flashattn must have head_dim as a multiple of 32
x = flash_attn_qkvpacked_func(qkv, dropout_p=self.attn_drop.p if self.training else 0.,
deterministic=self.deterministic)
else:
Expand Down

0 comments on commit ff23960

Please sign in to comment.