-
Notifications
You must be signed in to change notification settings - Fork 0
/
resnet.py
126 lines (101 loc) · 4.25 KB
/
resnet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu May 9 12:50:29 2019
@author: haoxingliang
"""
import torch.nn as nn
cfg = {
'ResNet18': ['basic',2,2,2,2]}
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample = None):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, stride = stride, padding = 1, bias = False) #stride ?
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace = True)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride = 1, padding = 1,bias = False) #steide?
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
def forward(self,x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample != None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion =4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, stride = 1,bias = False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3 , stride =stride, padding =1,bias = False )
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size =1, stride =1,bias = False)
self.bn3 = nn.BatchNorm2d(planes * self.expansion)
self.relu = nn.ReLU(inplace = True)
self.downsample = downsample
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample != None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, version,num_classes = 10):
super(ResNet, self).__init__()
block = BasicBlock if cfg[version][0] == 'basic' else Bottleneck
num_blocks = cfg[version][1:]
self.in_planes = 64
self.conv1 = nn.Conv2d(3, self.in_planes, kernel_size = 3, stride = 1,bias = False) #padding?
self.bn1 = nn.BatchNorm2d(self.in_planes)
self.relu = nn.ReLU(inplace = True)
#self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) #padding?
self.layer1 = self._make_layer(block, 64, num_blocks[0],stride = 1)
self.layer2 = self._make_layer(block, 128, num_blocks[1],stride = 2)
self.layer3 = self._make_layer(block, 256, num_blocks[2],stride = 2)
self.layer4 = self._make_layer(block, 512, num_blocks[3],stride = 2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512 * block.expansion, num_classes)
#ResNet 18
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
#x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0),-1) #batch dimension
x = self.fc(x)
return x
def _make_layer(self, block, planes, num_blocks, stride):
downsample = None
if stride !=1 or self.in_planes != planes * block.expansion: #
downsample = nn.Sequential(
nn.Conv2d(self.in_planes, planes * block.expansion, kernel_size =1, stride = stride, bias = False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.in_planes, planes, stride, downsample))
self.in_planes = planes * block.expansion #####
for _ in range(1, num_blocks):
layers.append(block(self.in_planes, planes, 1))
return nn.Sequential(*layers)