当前位置:   article > 正文

nn.Sequential()方法中访问中间层_self.model = nn.sequential

self.model = nn.sequential

一、

使用nn.Sequential按顺序构造所有层,在forward函数中直接调用

nn.Sequential的特点:将容器视为单个模块,即一个模块可以包含许多层

nn.Sequential有三种常见定义模型的方式:

① 基本实现方式:顺序定义每一层,特点:每一层没有名字,仅能通过下标访问各层
 

  1. import torch
  2. import torch.nn as nn
  3. class Net(nn.Module):
  4. def __init__(self, n_feature, n_hidden, n_output):
  5. super(Net,self).__init__()
  6. self.net_1 = nn.Sequential(
  7. nn.Linear(n_feature, n_hidden),
  8. nn.ReLU(),
  9. nn.Linear(n_hidden, n_output)
  10. )
  11. def forward(self,x):
  12. x = self.net_1(x)
  13. return x
  14. model_2 = Net(1,10,1)
  15. print(model_2)
  16. '''运行结果为:
  17. Net(
  18. (net_1): Sequential(
  19. (0): Linear(in_features=1, out_features=10, bias=True)
  20. (1): ReLU()
  21. (2): Linear(in_features=10, out_features=1, bias=True)
  22. )
  23. )
  24. '''

② 给每一层自定义名称

  1. import torch.nn as nn
  2. from collections import OrderedDict
  3. model = nn.Sequential(OrderedDict([
  4. ('conv1', nn.Conv2d(1, 20, 5)),
  5. ('relu1', nn.ReLU()),
  6. ('conv2', nn.Conv2d(20, 64, 5)),
  7. ('relu2', nn.ReLU())
  8. ]))
  9. print(model)
  10. print(model[2]) # 通过索引获取第几个层
  11. print(model.conv1)
  12. '''运行结果为:
  13. Sequential(
  14. (conv1): Conv2d(1, 20, kernel_size=(5, 5), stride=(1, 1))
  15. (relu1): ReLU()
  16. (conv2): Conv2d(20, 64, kernel_size=(5, 5), stride=(1, 1))
  17. (relu2): ReLU()
  18. )
  19. Conv2d(20, 64, kernel_size=(5, 5), stride=(1, 1))
  20. Conv2d(1, 20, kernel_size=(5, 5), stride=(1, 1))
  21. '''

③ 使用add_module方法逐层加入Sequential中,该方法是从nn.Module类继承而来,nn.Sequental本身没有该方法,可通过自定义名称访问。

  1. 1 import torch.nn as nn
  2. 2 from collections import OrderedDict
  3. 3
  4. 4 model = nn.Sequential()
  5. 5 model.add_module("conv1", nn.Conv2d(1, 20, 5))
  6. 6 model.add_module('relu1', nn.ReLU())
  7. 7 model.add_module('conv2', nn.Conv2d(20, 64, 5))
  8. 8 model.add_module('relu2', nn.ReLU())
  9. 9
  10. 10 print(model)
  11. 11 print(model[2]) # 通过索引获取第几个层
  12. 12 print(model.conv1)
  13. 13 '''运行结果为:
  14. 14 Sequential(
  15. 15 (conv1): Conv2d(1, 20, kernel_size=(5, 5), stride=(1, 1))
  16. 16 (relu1): ReLU()
  17. 17 (conv2): Conv2d(20, 64, kernel_size=(5, 5), stride=(1, 1))
  18. 18 (relu2): ReLU()
  19. 19 )
  20. 20 Conv2d(20, 64, kernel_size=(5, 5), stride=(1, 1))
  21. 21 Conv2d(1, 20, kernel_size=(5, 5), stride=(1, 1))
  22. 22 '''

二、

使用双层nn.Sequential()则可以采用双重索引的形式访问某一层。

  1. def conv_bn(inp, oup, stride):
  2. return nn.Sequential(
  3. nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
  4. nn.BatchNorm2d(oup),
  5. nn.ReLU(inplace=True)
  6. )
  7. def conv_dw(inp, oup, stride):
  8. return nn.Sequential(
  9. nn.Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False),
  10. nn.BatchNorm2d(inp),
  11. nn.ReLU(inplace=True),
  12. nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
  13. nn.BatchNorm2d(oup),
  14. nn.ReLU(inplace=True),
  15. )
  16. self.model = nn.Sequential(
  17. conv_bn(3, 32, 2),
  18. conv_dw(32, 64, 1),
  19. conv_dw(64, 128, 2),
  20. conv_dw(128, 128, 1),
  21. conv_dw(128, 256, 2),
  22. conv_dw(256, 256, 1),
  23. conv_dw(256, 512, 2),
  24. conv_dw(512, 512, 1),
  25. conv_dw(512, 512, 1),
  26. conv_dw(512, 512, 1),
  27. conv_dw(512, 512, 1),
  28. conv_dw(512, 512, 1),
  29. conv_dw(512, 1024, 2),
  30. conv_dw(1024, 1024, 1),
  31. nn.AvgPool2d(7),
  32. )
  33. self.fc = nn.Linear(1024, 1000)
  34. def forward(self, x):
  35. x = self.model(x)
  36. x = x.view(-1, 1024)
  37. x = self.fc(x)
  38. return x
  39. def get_bn_before_relu(self):
  40. bn1 = self.model[3][-2]
  41. bn2 = self.model[5][-2]
  42. bn3 = self.model[11][-2]
  43. bn4 = self.model[13][-2]
  44. # self.model[3][-2]即是访问第一个nn.Sequential()中的第4层,第二个nn.Sequential()中的倒数第二层

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/小丑西瓜9/article/detail/537873
推荐阅读
相关标签
  

闽ICP备14008679号