|
- """
- This script defines the structure of FullNet
-
- Author: Hui Qu
- """
-
- import mindspore
- import mindspore.nn as nn
- from mindspore.ops import operations as P
- from mindspore import ops
- import math
- from mindspore.common import initializer as init
- from models.var_init import default_recurisive_init, KaimingNormal
-
-
- class ConvLayer(nn.SequentialCell):
- def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, pad_mode="same"):
- super(ConvLayer, self).__init__()
- self.append(nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
- padding=int(padding), has_bias=False, dilation=int(dilation), pad_mode=pad_mode))
- self.append(nn.LeakyReLU())
- self.append(nn.BatchNorm2d(out_channels))
-
-
- class ResizeBilinear(nn.Cell):
-
- def __init__(self, scale_factor=4):
- """Initialize ResizeBilinear."""
- super(ResizeBilinear, self).__init__()
- self.scale_factor = scale_factor
-
- def construct(self, x, size=None, align_corners=False):
- shape = nn.layer.basic.bilinear(x.shape, size, self.scale_factor, align_corners)
- resize_bilinear = P.ResizeBilinear(shape, align_corners)
- return resize_bilinear(x)
-
-
- # --- different types of layers --- #
- class BasicLayer(nn.SequentialCell):
- def __init__(self, in_channels, growth_rate, drop_rate, dilation=1):
- super(BasicLayer, self).__init__()
- self.conv = ConvLayer(in_channels, growth_rate, kernel_size=3, stride=1, padding=0,
- dilation=dilation)
- self.drop_rate = drop_rate
- self.cat = ops.Concat(axis=1)
- # self.dropout = op.Dropout(keep_prob=1 - self.drop_rate)
-
- def construct(self, x):
- out = self.conv(x)
- # if self.drop_rate > 0:
- # out = op.Dropout(out, p=self.drop_rate, training=self.training)
- return self.cat((x, out))
-
-
- class BottleneckLayer(nn.SequentialCell):
- def __init__(self, in_channels, growth_rate, drop_rate, dilation=1):
- super(BottleneckLayer, self).__init__()
-
- inter_planes = growth_rate * 4
- self.conv1 = ConvLayer(in_channels, inter_planes, kernel_size=1, padding=0)
- self.conv2 = ConvLayer(inter_planes, growth_rate, kernel_size=3, padding=dilation, dilation=dilation, pad_mode="pad")
- self.drop_rate = drop_rate
- self.cat = ops.Concat(axis=1)
-
- def construct(self, x):
- out = self.conv2(self.conv1(x))
- # if self.drop_rate > 0:
- # out = F.dropout(out, p=self.drop_rate, training=self.training)
- return self.cat((x, out))
-
-
- # --- dense block structure --- #
- class DenseBlock(nn.SequentialCell):
- def __init__(self, in_channels, growth_rate, drop_rate, layer_type, dilations):
- super(DenseBlock, self).__init__()
- for i in range(len(dilations)):
- layer = layer_type(in_channels + i * growth_rate, growth_rate, drop_rate, dilations[i])
- self.append(layer)
-
-
- def choose_hybrid_dilations(n_layers, dilation_schedule, is_hybrid):
- import numpy as np
- # key: (dilation, n_layers)
- HD_dict = {(1, 4): [1, 1, 1, 1],
- (2, 4): [1, 2, 3, 2],
- (4, 4): [1, 2, 5, 9],
- (8, 4): [3, 7, 10, 13],
- (16, 4): [13, 15, 17, 19],
- (1, 6): [1, 1, 1, 1, 1, 1],
- (2, 6): [1, 2, 3, 1, 2, 3],
- (4, 6): [1, 2, 3, 5, 6, 7],
- (8, 6): [2, 5, 7, 9, 11, 14],
- (16, 6): [10, 13, 16, 17, 19, 21]}
-
- dilation_list = np.zeros((len(dilation_schedule), n_layers), dtype=np.int32)
-
- for i in range(len(dilation_schedule)):
- dilation = dilation_schedule[i]
- if is_hybrid:
- dilation_list[i] = HD_dict[(dilation, n_layers)]
- else:
- dilation_list[i] = [dilation for k in range(n_layers)]
-
- return dilation_list
-
-
- class MultiTaskFullNet(nn.Cell):
- def __init__(self, color_channels, output_channels=3, n_layers=6, growth_rate=24, compress_ratio=0.5,
- drop_rate=0.1, dilations=(1, 2, 4, 8, 16, 4, 1), is_hybrid=True, layer_type='basic'):
- super(MultiTaskFullNet, self).__init__()
- if layer_type == 'basic':
- layer_type = BasicLayer
- else:
- layer_type = BottleneckLayer
-
- # 1st conv before any dense block
- in_channels = 24
- self.conv1 = ConvLayer(color_channels, in_channels, kernel_size=3, padding=1, pad_mode="pad")
- self.softmax = nn.Softmax(axis=0)
-
- self.blocks = nn.SequentialCell()
- n_blocks = len(dilations)
-
- dilation_list = choose_hybrid_dilations(n_layers, dilations, is_hybrid)
-
- for i in range(n_blocks): # no trans in last block
- block = DenseBlock(in_channels, growth_rate, drop_rate, layer_type, dilation_list[i])
- self.blocks.append(block)
- num_trans_in = int(in_channels + n_layers * growth_rate)
- num_trans_out = int(math.floor(num_trans_in * compress_ratio))
- trans = ConvLayer(num_trans_in, num_trans_out, kernel_size=1, padding=0)
- self.blocks.append(trans)
- in_channels = num_trans_out
-
- # final conv
- self.conv2 = nn.Conv2d(in_channels, output_channels, kernel_size=3, stride=1,
- padding=1, has_bias=False, pad_mode="pad")
-
- self.classBranch = nn.SequentialCell(
- nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=2, padding=1, has_bias=False, pad_mode="pad"),
- nn.LeakyReLU(),
- nn.BatchNorm2d(in_channels),
- nn.MaxPool2d(kernel_size=2),
- nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=2, padding=1, has_bias=False, pad_mode="pad"),
- nn.LeakyReLU(),
- nn.BatchNorm2d(in_channels),
- nn.MaxPool2d(kernel_size=2),
- )
- self.custom_init_weight()
-
- self.fc = nn.Dense(15 * 15 * in_channels, 3)
-
- def construct(self, x):
- out = self.conv1(x)
- out = self.blocks(out)
- segout = self.conv2(out)
- classout = self.classBranch(out)
- classout = classout.view(x.shape[0], -1)
- classout = self.fc(classout)
- classout = self.softmax(classout)
- return segout, classout
-
- def custom_init_weight(self):
- """
- Init the weight of Conv2d and Dense in the net.
- """
- for _, cell in self.cells_and_names():
- if isinstance(cell, nn.Conv2d):
- cell.weight.set_data(init.initializer(
- KaimingNormal(a=math.sqrt(5), mode='fan_out', nonDenseity='relu'),
- cell.weight.shape, cell.weight.dtype))
- if cell.bias is not None:
- cell.bias.set_data(init.initializer(
- 'zeros', cell.bias.shape, cell.bias.dtype))
- elif isinstance(cell, nn.Dense):
- cell.weight.set_data(init.initializer(
- init.Normal(0.01), cell.weight.shape, cell.weight.dtype))
- if cell.bias is not None:
- cell.bias.set_data(init.initializer(
- 'zeros', cell.bias.shape, cell.bias.dtype))
-
-
- class EndoFullNet(nn.Cell):
- def __init__(self, color_channels, output_channels=8, n_layers=6, growth_rate=24, compress_ratio=0.5,
- drop_rate=0.1, dilations=(1, 2, 4, 8, 16, 4, 1), is_hybrid=True, layer_type='basic'):
- super(EndoFullNet, self).__init__()
- if layer_type == 'basic':
- layer_type = BasicLayer
- else:
- layer_type = BottleneckLayer
-
- # 1st conv before any dense block
- in_channels = 24
- self.conv1 = ConvLayer(color_channels, in_channels, kernel_size=3, padding=1, pad_mode="pad")
-
- self.blocks = nn.SequentialCell()
- n_blocks = len(dilations)
-
- dilation_list = choose_hybrid_dilations(n_layers, dilations, is_hybrid)
-
- for i in range(n_blocks): # no trans in last block
- block = DenseBlock(in_channels, growth_rate, drop_rate, layer_type, dilation_list[i])
- self.blocks.append(block)
- num_trans_in = int(in_channels + n_layers * growth_rate)
- num_trans_out = int(math.floor(num_trans_in * compress_ratio))
- trans = ConvLayer(num_trans_in, num_trans_out, kernel_size=1, padding=0)
- self.blocks.append(trans)
- in_channels = num_trans_out
-
- # final conv
- self.conv2 = nn.Conv2d(in_channels, output_channels, kernel_size=3, stride=1,
- padding=1, has_bias=False, pad_mode="pad")
-
- self.custom_init_weight()
-
- def construct(self, x):
- # print(x.shape)
- out = self.conv1(x)
- out = self.blocks(out)
- segout = self.conv2(out)
- return segout
-
- def custom_init_weight(self):
- """
- Init the weight of Conv2d and Dense in the net.
- """
- for _, cell in self.cells_and_names():
- if isinstance(cell, nn.Conv2d):
- cell.weight.set_data(init.initializer(
- KaimingNormal(a=math.sqrt(5), mode='fan_out', nonDenseity='relu'),
- cell.weight.shape, cell.weight.dtype))
- if cell.bias is not None:
- cell.bias.set_data(init.initializer(
- 'zeros', cell.bias.shape, cell.bias.dtype))
- elif isinstance(cell, nn.Dense):
- cell.weight.set_data(init.initializer(
- init.Normal(0.01), cell.weight.shape, cell.weight.dtype))
- if cell.bias is not None:
- cell.bias.set_data(init.initializer(
- 'zeros', cell.bias.shape, cell.bias.dtype))
-
-
- class FullNet(nn.Cell):
- def __init__(self, color_channels, output_channels=3, n_layers=6, growth_rate=24, compress_ratio=0.5,
- drop_rate=0.1, dilations=(1, 2, 4, 8, 16, 4, 1), is_hybrid=True, layer_type='basic'):
- super(FullNet, self).__init__()
- if layer_type == 'basic':
- layer_type = BasicLayer
- else:
- layer_type = BottleneckLayer
-
- # 1st conv before any dense block
- in_channels = 24
- self.conv1 = ConvLayer(color_channels, in_channels, kernel_size=3, padding=1, pad_mode="pad")
-
- self.blocks = nn.SequentialCell()
- n_blocks = len(dilations)
-
- dilation_list = choose_hybrid_dilations(n_layers, dilations, is_hybrid)
-
- for i in range(n_blocks): # no trans in last block
- block = DenseBlock(in_channels, growth_rate, drop_rate, layer_type, dilation_list[i])
- self.blocks.append(block)
- num_trans_in = int(in_channels + n_layers * growth_rate)
- num_trans_out = int(math.floor(num_trans_in * compress_ratio))
- trans = ConvLayer(num_trans_in, num_trans_out, kernel_size=1, padding=0)
- self.blocks.append(trans)
- in_channels = num_trans_out
-
- # final conv
- self.conv2 = nn.Conv2d(in_channels, output_channels, kernel_size=3, stride=1,
- padding=1, has_bias=False, pad_mode="pad")
-
- def construct(self, x):
- # print(x.shape)
- out = self.conv1(x)
- out = self.blocks(out)
- out = self.conv2(out)
- return out
-
- def custom_init_weight(self):
- """
- Init the weight of Conv2d and Dense in the net.
- """
- for _, cell in self.cells_and_names():
- if isinstance(cell, nn.Conv2d):
- cell.weight.set_data(init.initializer(
- KaimingNormal(a=math.sqrt(5), mode='fan_out', nonDenseity='relu'),
- cell.weight.shape, cell.weight.dtype))
- if cell.bias is not None:
- cell.bias.set_data(init.initializer(
- 'zeros', cell.bias.shape, cell.bias.dtype))
- elif isinstance(cell, nn.Dense):
- cell.weight.set_data(init.initializer(
- init.Normal(0.01), cell.weight.shape, cell.weight.dtype))
- if cell.bias is not None:
- cell.bias.set_data(init.initializer(
- 'zeros', cell.bias.shape, cell.bias.dtype))
-
-
- class FCN_pooling(nn.Cell):
- """same structure with FullNet, except that there are pooling operations after block 1, 2, 3, 4
- and upsampling after block 5, 6
- """
-
- def __init__(self, color_channels, output_channels=2, n_layers=6, growth_rate=24, compress_ratio=0.5,
- drop_rate=0.1, dilations=(1, 2, 4, 8, 16, 4, 1), hybrid=1, layer_type='basic'):
- super(FCN_pooling, self).__init__()
- if layer_type == 'basic':
- layer_type = BasicLayer
- else:
- layer_type = BottleneckLayer
-
- # 1st conv before any dense block
- in_channels = 24
- self.conv1 = ConvLayer(color_channels, in_channels, kernel_size=3, padding=1, pad_mode="pad")
-
- self.blocks = nn.SequentialCell()
- n_blocks = len(dilations)
-
- dilation_list = choose_hybrid_dilations(n_layers, dilations, hybrid)
-
- for i in range(7):
- block = DenseBlock(in_channels, growth_rate, drop_rate, layer_type, dilation_list[i])
- self.blocks.append(block)
- num_trans_in = int(in_channels + n_layers * growth_rate)
- num_trans_out = int(math.floor(num_trans_in * compress_ratio))
- trans = ConvLayer(num_trans_in, num_trans_out, kernel_size=1, padding=0)
- self.blocks.append(trans)
- if i in range(0, 4):
- self.blocks.append(nn.MaxPool2d(kernel_size=2, stride=2))
- elif i in range(4, 6):
- self.blocks.append(ResizeBilinear(scale_factor=4))
- in_channels = num_trans_out
-
- # final conv
- self.conv2 = nn.Conv2d(in_channels, output_channels, kernel_size=3, stride=1,
- padding=1, has_bias=False, pad_mode="pad")
- self.custom_init_weight()
-
- def construct(self, x):
- out = self.conv1(x)
- out = self.blocks(out)
- out = self.conv2(out)
- return out
-
- def custom_init_weight(self):
- """
- Init the weight of Conv2d and Dense in the net.
- """
- for _, cell in self.cells_and_names():
- if isinstance(cell, nn.Conv2d):
- cell.weight.set_data(init.initializer(
- KaimingNormal(a=math.sqrt(5), mode='fan_out', nonDenseity='relu'),
- cell.weight.shape, cell.weight.dtype))
- if cell.bias is not None:
- cell.bias.set_data(init.initializer(
- 'zeros', cell.bias.shape, cell.bias.dtype))
- elif isinstance(cell, nn.Dense):
- cell.weight.set_data(init.initializer(
- init.Normal(0.01), cell.weight.shape, cell.weight.dtype))
- if cell.bias is not None:
- cell.bias.set_data(init.initializer(
- 'zeros', cell.bias.shape, cell.bias.dtype))
-
-
- if __name__ == '__main__':
- import numpy as np
- from mindspore import Tensor
-
- net = FullNet(color_channels=3)
- # net = FCN_pooling(color_channels=3)
- a = Tensor(3)
- x = np.random.randn(2, 3, 64, 64).astype(np.float32)
- x = Tensor(x)
- out = net(x)
- print(out.shape)
- # net = FCN_pooling(color_channels=3)
- # print(net)
|