-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathmodel.py
More file actions
160 lines (133 loc) · 6.29 KB
/
model.py
File metadata and controls
160 lines (133 loc) · 6.29 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
import torch
import torch.nn as nn
from functools import partial
class Conv2dAuto(nn.Conv2d):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.padding = (self.kernel_size[0] // 2, self.kernel_size[1] // 2)
conv3x3 = partial(Conv2dAuto, kernel_size=3, bias=False)
def activations_func(activation):
return nn.ModuleDict([["relu", nn.ReLU(inplace=True)], ["leaky_relu", nn.LeakyReLU(negative_slope=0.01, inplace=True)], ["selu", nn.SELU(inplace=True)], ["none", nn.Identity()]])[activation]
class ResidualBlock(nn.Module):
def __init__(self, in_channels, out_channels, activation="relu"):
super().__init__()
self.in_channels, self.out_channels, self.activation = in_channels, out_channels, activation
self.blocks = nn.Identity()
self.activate = activations_func(activation)
self.shortcut = nn.Identity()
def forward(self, x):
residual = x
if self.should_apply_shortcut:
residual = self.shortcut(x)
x = self.blocks(x)
x += residual
x = self.activate(x)
return x
@property
def should_apply_shortcut(self):
return self.in_channels != self.out_channels
class ResNetResidualBlock(ResidualBlock):
def __init__(self, in_channels, out_channels, expansion=1, downsampling=1, conv=conv3x3, *args, **kwargs):
super().__init__(in_channels, out_channels, *args, **kwargs)
self.expansion, self.downsampling, self.conv = expansion, downsampling, conv
self.shortcut = nn.Sequential(
nn.Conv2d(self.in_channels, self.expanded_channels, kernel_size=1,
stride = self.downsampling, bias=False),
nn.BatchNorm2d(self.expanded_channels)) if self.should_apply_shortcut else None
@property
def expanded_channels(self):
return self.out_channels * self.expansion
@property
def should_apply_shortcut(self):
return self.in_channels != self.expanded_channels
def conv_bn(in_channels, out_channels, conv, *args, **kwargs):
return nn.Sequential(conv(in_channels, out_channels, *args, **kwargs), nn.BatchNorm2d(out_channels))
class ResNetBasicBlock(ResNetResidualBlock):
"""
Basic ResNet block composed by two layers of 3x3 conv/batchnorm/activation
"""
expansion=1
def __init__(self, in_channels, out_channels, *args, **kwargs):
super().__init__(in_channels, out_channels, *args, **kwargs)
self.blocks = nn.Sequential(
conv_bn(self.in_channels, self.out_channels, conv=self.conv, bias=False, stride=self.downsampling),
activations_func(self.activation),
conv_bn(self.out_channels, self.expanded_channels, conv=self.conv, bias=False),
)
class ResNetBottleNeckBlock(ResNetResidualBlock):
expansion = 4
def __init__(self, in_channels, out_channels, *args, **kwargs):
super().__init__(in_channels, out_channels, expansion=4, *args, **kwargs)
self.blocks = nn.Sequential(
conv_bn(self.in_channels, self.out_channels, self.conv, kernel_size=1),
activations_func(self.activation),
conv_bn(self.out_channels, self.out_channels, self.conv, kernel_size=3, stride=self.downsampling),
activations_func(self.activation),
conv_bn(self.out_channels, self.expanded_channels, self.conv, kernel_size=1),
)
class ResNetLayer(nn.Module):
"""
A ResNet later composed by 'n' blocks stacked one after the other
"""
def __init__(self, in_channels, out_channels, block=ResNetBasicBlock, n=1, *args, **kwargs):
super().__init__()
downsampling = 2 if in_channels != out_channels else 1
self.blocks = nn.Sequential(
block(in_channels, out_channels, *args, **kwargs, downsampling=downsampling),
*[block(out_channels * block.expansion,
out_channels, downsampling=1, *args, **kwargs) for _ in range(n-1)]
)
def forward(self, x):
x = self.blocks(x)
return x
class ResNetEncoder(nn.Module):
"""
ResNet encoder composed by layers with increasing features
"""
def __init__(self, in_channels=3, blocks_sizes=[64,128,256,512], deepths=[2,2,2,2], activation="relu", block=ResNetBasicBlock, *args, **kwargs):
super().__init__()
self.blocks_sizes = blocks_sizes
self.gate = nn.Sequential(
nn.Conv2d(in_channels, self.blocks_sizes[0], kernel_size=7, stride=2, padding=3, bias=False),
nn.BatchNorm2d(self.blocks_sizes[0]),
activations_func(activation),
nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
)
self.in_out_block_sizes = list(zip(blocks_sizes, blocks_sizes[1:]))
self.blocks = nn.ModuleList([
ResNetLayer(blocks_sizes[0], blocks_sizes[0],n=deepths[0], activation=activation, block=block, *args, **kwargs),
*[ResNetLayer(in_channels * block.expansion,
out_channels, n=n, activation=activation,
block=block, *args, **kwargs)
for (in_channels, out_channels), n in zip(self.in_out_block_sizes, deepths[1:])]
])
def forward(self, x):
x = self.gate(x)
for block in self.blocks:
x = block(x)
return x
class ResnetDecoder(nn.Module):
"""
This class represents the tail of ResNet. It performs a global pooling and maps the output to the
correct class by using a fully connected layer.
"""
def __init__(self, in_features, n_classes):
super().__init__()
self.avg = nn.AdaptiveAvgPool2d((1, 1))
self.decoder = nn.Linear(in_features, n_classes)
def forward(self, x):
x = self.avg(x)
x = x.view(x.size(0), -1)
x = self.decoder(x)
return x
class ResNet(nn.Module):
def __init__(self, in_channels, n_classes, *args, **kwargs):
super().__init__()
self.encoder = ResNetEncoder(in_channels, *args, **kwargs)
self.decoder = ResnetDecoder(self.encoder.blocks[-1].blocks[-1].expanded_channels, n_classes)
def forward(self, x):
x = self.encoder(x)
x = self.decoder(x)
return x
def resnet18(in_channels, n_classes, block=ResNetBasicBlock, *args, **kwargs):
return ResNet(in_channels, n_classes, block=block, deepths=[2, 2, 2, 2], *args, **kwargs)