赞
踩
先给出13种注意力机制的下载地址:
https://github.com/z1069614715/objectdetection_script
将以下代码复制到SimAM.py文件种
- import torch
- import torch.nn as nn
-
-
- class SimAM(torch.nn.Module):
-
- # 不需要接收通道数输入
- def __init__(self, e_lambda=1e-4):
- super(SimAM, self).__init__()
-
- self.activaton = nn.Sigmoid()
- self.e_lambda = e_lambda
-
- def __repr__(self):
- s = self.__class__.__name__ + '('
- s += ('lambda=%f)' % self.e_lambda)
- return s
-
- @staticmethod
- def get_module_name():
- return "simam"
-
- def forward(self, x):
- b, c, h, w = x.size()
-
- n = w * h - 1
-
- x_minus_mu_square = (x - x.mean(dim=[2, 3], keepdim=True)).pow(2)
- y = x_minus_mu_square / (4 * (x_minus_mu_square.sum(dim=[2, 3], keepdim=True) / n + self.e_lambda)) + 0.5
-
- return x * self.activaton(y)
在某一层添加注意力机制
[from,number,module,args]
注意:!!!!!!!!!!!!!!!!!!!
添加完一层注意力机制之后,会对后面层数造成影响,记得在检测头那里要改层数
- import numpy as np
- import torch
- from torch import nn
- from torch.nn import init
-
-
-
- class SEAttention(nn.Module):
-
- def __init__(self, channel=512,reduction=16):
- super().__init__()
- self.avg_pool = nn.AdaptiveAvgPool2d(1)
- self.fc = nn.Sequential(
- nn.Linear(channel, channel // reduction, bias=False),
- nn.ReLU(inplace=True),
- nn.Linear(channel // reduction, channel, bias=False),
- nn.Sigmoid()
- )
-
-
- def init_weights(self):
- for m in self.modules():
- if isinstance(m, nn.Conv2d):
- init.kaiming_normal_(m.weight, mode='fan_out')
- if m.bias is not None:
- init.constant_(m.bias, 0)
- elif isinstance(m, nn.BatchNorm2d):
- init.constant_(m.weight, 1)
- init.constant_(m.bias, 0)
- elif isinstance(m, nn.Linear):
- init.normal_(m.weight, std=0.001)
- if m.bias is not None:
- init.constant_(m.bias, 0)
-
- def forward(self, x):
- b, c, _, _ = x.size()
- y = self.avg_pool(x).view(b, c)
- y = self.fc(y).view(b, c, 1, 1)
- return x * y.expand_as(x)
添加这两行代码
- elif m is SEAttention:
- args = [ch[f]]
- # YOLOv5 声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/码创造者/article/detail/873804推荐阅读
相关标签
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。