forked from xmu-xiaoma666/External-Attention-pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
3ea4c82
commit 9cb2aea
Showing
6 changed files
with
65 additions
and
17 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
import torch | ||
from torch import nn | ||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
class sMLPBlock(nn.Module): | ||
def __init__(self,h=224,w=224,c=3): | ||
super().__init__() | ||
self.proj_h=nn.Linear(h,h) | ||
self.proj_w=nn.Linear(w,w) | ||
self.fuse=nn.Linear(3*c,c) | ||
|
||
def forward(self,x): | ||
x_h=self.proj_h(x.permute(0,1,3,2)).permute(0,1,3,2) | ||
x_w=self.proj_w(x) | ||
x_id=x | ||
x_fuse=torch.cat([x_h,x_w,x_id],dim=1) | ||
out=self.fuse(x_fuse.permute(0,2,3,1)).permute(0,3,1,2) | ||
return out | ||
|
||
|
||
if __name__ == '__main__': | ||
input=torch.randn(50,3,224,224) | ||
smlp=sMLPBlock(h=224,w=224) | ||
out=smlp(input) | ||
print(out.shape) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,23 +1,15 @@ | ||
# from attention.gfnet import GFNet | ||
# import torch | ||
# from torch import nn | ||
# from torch.nn import functional as F | ||
from fightingcv.mlp.sMLP_block import sMLPBlock | ||
import torch | ||
from torch import nn | ||
from torch.nn import functional as F | ||
|
||
# x = torch.randn(1, 3, 224, 224) | ||
# gfnet = GFNet(embed_dim=384, img_size=224, patch_size=16, num_classes=1000) | ||
# out = gfnet(x) | ||
# print(out.shape) | ||
if __name__ == '__main__': | ||
input=torch.randn(50,3,224,224) | ||
smlp=sMLPBlock(h=224,w=224) | ||
out=smlp(input) | ||
print(out.shape) | ||
|
||
|
||
# from backbone_cnn.resnext import ResNeXt50,ResNeXt101,ResNeXt152 | ||
# import torch | ||
|
||
# if __name__ == '__main__': | ||
# input=torch.randn(50,3,224,224) | ||
# resnext50=ResNeXt50(1000) | ||
# # resnext101=ResNeXt101(1000) | ||
# # resnext152=ResNeXt152(1000) | ||
# out=resnext50(input) | ||
# print(out.shape) | ||
|
||
|