|
- import math
- import torch
- import torch.nn as nn
- import torch.nn.functional as F
-
-
- # DenseNet-B
- class Bottleneck(nn.Module):
- def __init__(self, nChannels, growthRate, use_dropout):
- super(Bottleneck, self).__init__()
- interChannels = 4 * growthRate
- self.bn1 = nn.BatchNorm2d(interChannels)
- self.conv1 = nn.Conv2d(nChannels, interChannels, kernel_size=1, bias=False)
- self.bn2 = nn.BatchNorm2d(growthRate)
- self.conv2 = nn.Conv2d(interChannels, growthRate, kernel_size=3, padding=1, bias=False)
- self.use_dropout = use_dropout
- self.dropout = nn.Dropout(p=0.2)
-
- def forward(self, x):
- out = F.relu(self.bn1(self.conv1(x)), inplace=True)
- if self.use_dropout:
- out = self.dropout(out)
- out = F.relu(self.bn2(self.conv2(out)), inplace=True)
- if self.use_dropout:
- out = self.dropout(out)
- out = torch.cat((x, out), 1)
- return out
-
-
- # single layer
- class SingleLayer(nn.Module):
- def __init__(self, nChannels, growthRate, use_dropout):
- super(SingleLayer, self).__init__()
- self.bn1 = nn.BatchNorm2d(nChannels)
- self.conv1 = nn.Conv2d(nChannels, growthRate, kernel_size=3, padding=1, bias=False)
- self.use_dropout = use_dropout
- self.dropout = nn.Dropout(p=0.2)
-
- def forward(self, x):
- out = self.conv1(F.relu(x, inplace=True))
- if self.use_dropout:
- out = self.dropout(out)
- out = torch.cat((x, out), 1)
- return out
-
-
- # transition layer
- class Transition(nn.Module):
- def __init__(self, nChannels, nOutChannels, use_dropout):
- super(Transition, self).__init__()
- self.bn1 = nn.BatchNorm2d(nOutChannels)
- self.conv1 = nn.Conv2d(nChannels, nOutChannels, kernel_size=1, bias=False)
- self.use_dropout = use_dropout
- self.dropout = nn.Dropout(p=0.2)
-
- def forward(self, x):
- out = F.relu(self.bn1(self.conv1(x)), inplace=True)
- if self.use_dropout:
- out = self.dropout(out)
- out = F.avg_pool2d(out, 2, ceil_mode=True)
- return out
-
-
- class DenseNet(nn.Module):
- def __init__(self, params):
- super(DenseNet, self).__init__()
- growthRate = params['densenet']['growthRate']
- reduction = params['densenet']['reduction']
- bottleneck = params['densenet']['bottleneck']
- use_dropout = params['densenet']['use_dropout']
-
- nDenseBlocks = 16
- nChannels = 2 * growthRate
- self.conv1 = nn.Conv2d(params['encoder']['input_channel'], nChannels, kernel_size=7, padding=3, stride=2, bias=False)
- self.dense1 = self._make_dense(nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout)
- nChannels += nDenseBlocks * growthRate
- nOutChannels = int(math.floor(nChannels * reduction))
- self.trans1 = Transition(nChannels, nOutChannels, use_dropout)
-
- nChannels = nOutChannels
- self.dense2 = self._make_dense(nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout)
- nChannels += nDenseBlocks * growthRate
- nOutChannels = int(math.floor(nChannels * reduction))
- self.trans2 = Transition(nChannels, nOutChannels, use_dropout)
-
- nChannels = nOutChannels
- self.dense3 = self._make_dense(nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout)
-
- def _make_dense(self, nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout):
- layers = []
- for i in range(int(nDenseBlocks)):
- if bottleneck:
- layers.append(Bottleneck(nChannels, growthRate, use_dropout))
- else:
- layers.append(SingleLayer(nChannels, growthRate, use_dropout))
- nChannels += growthRate
- return nn.Sequential(*layers)
-
- def forward(self, x):
- out = self.conv1(x)
- out = F.relu(out, inplace=True)
- out = F.max_pool2d(out, 2, ceil_mode=True)
- out = self.dense1(out)
- out = self.trans1(out)
- out = self.dense2(out)
- out = self.trans2(out)
- out = self.dense3(out)
- return out
|