赞
踩
# 创作不易,有偿出售源码@马化腾:1444151069 class GnBlock(nn.Module): def __init__(self, dim, shortcut=False, layer_scale_init_value=1e-6): super().__init__() self.shortcut = shortcut self.norm1 = LayerNorm(dim, eps=1e-6, data_format='channels_first') self.gnconv = GnConv(dim, dim) # depthwise conv self.norm2 = LayerNorm(dim, eps=1e-6) self.pwconv1 = nn.Linear(dim, 2 * dim) # pointwise/1x1 convs, implemented with linear layers self.act = nn.GELU() self.pwconv2 = nn.Linear(2 * dim, dim) self.gamma1 = nn.Parameter(layer_scale_init_value * torch.ones(dim), requires_grad=True) if layer_scale_init_value > 0 else None self.gamma2 = nn.Parameter(layer_scale_init_value * torch.ones((dim)), requires_grad=True) if layer_scale_init_value > 0 else None def forward(self, x): B, C, H, W = x.shape if self.gamma1 is not None: gamma1 = self.gamma1.view(C, 1, 1) else: gamma1 = 1 x = (x + gamma1 * self.gnconv(self.norm1(x))) if self.shortcut else gamma1 * self.gnconv(self.norm1(x)) input = x x = x.permute(0, 2, 3, 1) # (N, C, H, W) -> (N, H, W, C) x = self.norm2(x) x = self.pwconv1(x) x = self.act(x) x = self.pwconv2(x) if self.gamma2 is not None: x = self.gamma2 * x x = x.permute(0, 3, 1, 2) # (N, H, W, C) -> (N, C, H, W) x = (input + x) if self.shortcut else x return x class C3GN(nn.Module): # CSP GnBlock with 3 GnConv def __init__(self, c1, c2, n=1, shortcut=True, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion super().__init__() c_ = int(c2 * e) # hidden channels self.cv1 = GnConv(c1, c_, 3) self.cv2 = GnConv(c1, c_, 3) self.cv3 = GnConv(2 * c_, c2, 3) # act=FReLU(c2) self.m = nn.Sequential(*[GnBlock(c_, shortcut) for _ in range(n)]) def forward(self, x): return self.cv3(torch.cat((self.m(self.cv1(x)), self.cv2(x)), dim=1))
# 创作不易,有偿出售源码@马化腾:1444151069 from n params module arguments 0 -1 1 3520 models.common.Conv [3, 32, 6, 2, 2] 1 -1 1 18560 models.common.Conv [32, 64, 3, 2] 2 -1 1 18816 models.common.C3 [64, 64, 1] 3 -1 1 73984 models.common.Conv [64, 128, 3, 2] 4 -1 2 115712 models.common.C3 [128, 128, 2] 5 -1 1 295424 models.common.Conv [128, 256, 3, 2] 6 -1 3 625152 models.common.C3 [256, 256, 3] 7 -1 1 1180672 models.common.Conv [256, 512, 3, 2] 8 -1 1 1182720 models.common.C3 [512, 512, 1] 9 -1 1 656896 models.common.SPPF [512, 512, 5] 10 -1 1 246400 models.common.GnConv [512, 256, 3, 1] 11 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest'] 12 [-1, 6] 1 0 models.common.Concat [1] 13 -1 1 506176 models.common.C3GN [512, 256, 1, False] 14 -1 1 63296 models.common.GnConv [256, 128, 3, 1] 15 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest'] 16 [-1, 4] 1 0 models.common.Concat [1] 17 -1 1 131872 models.common.C3GN [256, 128, 1, False] 18 -1 1 64192 models.common.GnConv [128, 128, 3, 2] 19 [-1, 14] 1 0 models.common.Concat [1] 20 -1 1 505664 models.common.C3GN [256, 256, 1, False] 21 -1 1 248192 models.common.GnConv [256, 256, 3, 2] 22 [-1, 10] 1 0 models.common.Concat [1] 23 -1 1 1981056 models.common.C3GN [512, 512, 1, False] 24 [17, 20, 23] 1 229245 Detect [80, [[10, 13, 16, 30, 33, 23], [30, 61, 62, 45, 59, 119], [116, 90, 156, 198, 373, 326]], [128, 256, 512]] Model Summary: 366 layers, 8147549 parameters, 8147549 gradients, 18.5 GFLOPs
在私有内部瓜果数据集上检测效果提升明显,yolov5s与yolov5s-c3gn-neck模型mAP对比曲线:
可视化结果
此思想可以用在不同基于Convs的目标检测器上,例如yolov3~yolov7,centernet,yolox,yolor,fasterrcnn等,本人已将yolov5-6.0版本打包好,需要改进其他网络寻求帮助可私信。其他目标检测模型创新可浏览其他帖子!!!
在此感谢一位粉丝的提醒,本人上一个作品和他人的做法雷同,已将上一个作品下架,并进行了网络的调整,连夜跑实验,特此更新!
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。