1.ResNet简介
这个我已经写过一篇文章了感兴趣的去翻一翻
2.ResNet34的简单实现
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 17 09:31:45 2020
@author: elliot
"""
import torch as t
import torch.nn as nn
from torch.nn import functional as F
class ResidualBlock(nn.Module):
#显式的继承自nn.Module
#resnet是卷积的一种
def __init__(self, inchannel, outchannel, stride=1, shortcut=None):
#shortcut是直连,resnet和densenet的精髓所在
#层的定义都在初始化里
super(ResidualBlock, self).__init__()
self.left = nn.Sequential(
nn.Conv2d(inchannel, outchannel, 3, stride, 1, bias=False),
nn.BatchNorm2d(outchannel),
nn.ReLU(inplace=True),
nn.Conv2d(outchannel,outchannel,3,1,1,bias=False),
nn.BatchNorm2d(outchannel))
self.right = shortcut
def forward(self,x):
out = self.left(x)
residual = x if self.right is None else self.right(x)
out += residual
return F.relu(out)
class ResNet(nn.Module):
#包括34,50,101等多种结构,可以按需实现,这里是Resnet34
def __init__(self, num_classes=1000):
super(ResNet,self).__init__()
self.pre = nn.Sequential(nn.Conv2d(3,64,7,2,3,bias=False),
nn.BatchNorm2d(64),#这个64是指feature_num
nn.ReLU(inplace=True),
nn.MaxPool2d(3,2,1) )
self.layer1 = self._make_layer(64, 128, 3)
self.layer2 = self._make_layer(128, 256, 4, stride=2)
self.layer3 = self._make_layer(256, 512, 6, stride=2)
self.layer4 = self._make_layer(512, 512, 3, stride=2)
self.fc = nn.Linear(512, num_classes)
def _make_layer(self, inchannel, outchannel, block_num, stride=1):
short_cut = nn.Sequential(
nn.Conv2d(inchannel,outchannel,1,stride,bias=False),
nn.BatchNorm2d(outchannel)
)
layers = []
layers.append(ResidualBlock(inchannel,outchannel,stride,short_cut))
for i in range(1, block_num):
layers.append(ResidualBlock(outchannel,outchannel))#输入和输出要一致
return nn.Sequential(*layers)
def forward(self, x):
x = self.pre(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = F.avg_pool2d(x,7)#注意F和原生的区别
x = x.view(x.size(0), -1)
return self.fc(x)
if __name__ == '__main__':
net = ResNet()
print(net)
input = t.autograd.Variable(t.randn(1,3,224,224))
o = net(input)
print(o)
网友评论