赞
踩
- #!/usr/bin/python3
- # -*- coding: utf-8 -*-
- # @Time : 2023/5/29 19:50
- # @Author : Denxun
- # @FileName: pixel_atten.py
- # @Software: PyCharm
- import torch.nn as nn
- import torch
- import random
- import skimage
- import math
- import os
- import numpy as np
- from torchsummary import summary
-
- seed=888
- random.seed(seed)
- os.environ['PYTHONHASHSEED'] = str(seed) # 为了禁止hash随机化,使得实验可复现
- np.random.seed(seed)
- torch.manual_seed(seed)
- torch.cuda.manual_seed(seed)
- torch.cuda.manual_seed_all(seed)
-
- class pixel_att(nn.Module):
- def __init__(self,in_channels, out_channel,imgesize,heads,ker_size,stride):
- super(pixel_att,self).__init__()
- self.pactch_conv=nn.Sequential(nn.Conv2d(in_channels,out_channel,kernel_size=ker_size,stride=stride),
- nn.BatchNorm2d(out_channel),nn.ReLU())#特定卷积核选取patch
- self.token_size=(imgesize//ker_size)**2
- self.layer_norm=nn.LayerNorm(self.token_size)#对隐藏层layernorm
- self.flatten=nn.Flatten(2)#从第二维开始展品
- self.class_token=nn.Parameter(torch.zeros(1,1,self.token_size),requires_grad=True)#添加分类token
- self.pos_embedding = nn.Parameter(torch.randn(1, out_channel + 1,self.token_size),requires_grad=True)#添加位置编码矩阵
- self.dropout=nn.Dropout(0.5)
- self.att=self_att(self.token_size,heads)
- self.mlp=mlp_block(self.token_size)
- self.layer_norm1=nn.LayerNorm(self.token_size)
- def forward(self,x):
- batch,chanel,w,h=x.size()#取batch
- patch_x=self.pactch_conv(x)#进行像素patch
- token=self.flatten(patch_x)#展平后两个维度例如输入为4,1,64,64变为4,1,4096
- token_layer_norm=self.layer_norm(token)#对4096进行layer_norm
- clas_token=torch.repeat_interleave(self.class_token,dim=0,repeats=batch)#复制变为batch,1,imgesize*imgesize
- token_cat_class=torch.cat([token_layer_norm,clas_token],dim=1)#添加分类token
- #print(token_cat_class.shape,(self.pos_embedding[:, :(chanel + 1)]).shape)
- token_cat_class+= self.pos_embedding#[:, :(chanel + 1)]#添加位置编码
- last_token=self.dropout(token_cat_class)
- atten_token=self.att(last_token)
- atten_token=atten_token+last_token#残差连接
- atten_tok
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。