|
- #!/usr/bin/env python
- # -*- coding: utf-8 -*-
- # @Time : 2023/2/12 下午2:24
- # @File : model
- # ----------------------------------------------
- # ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆
- # >>> Author : kevin
- # >>> QQ : 565479588
- # >>> Mail : lovecode@gmail.com
- # >>> Github : https://github.com/lovecode100
- # >>> Blog : https://www.cnblogs.com/lovecode
- # ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆ ☆
- import torch
- import torch.nn as nn
- import torch.nn.functional as F
-
-
- class Block(nn.Module): # (N,in_planes,H,W) -> (N,out_planes,H,W)
- '''Depthwise conv + Pointwise conv
- 实现深度卷积和逐点卷积'''
- def __init__(self, in_planes, out_planes, stride=1):
- super(Block, self).__init__()
- self.conv1 = nn.Conv2d(
- in_planes, in_planes, kernel_size=3, stride=stride,
- padding=1, groups=in_planes, bias=False)
- self.bn1 = nn.BatchNorm2d(in_planes)
- self.conv2 = nn.Conv2d(
- in_planes, out_planes, kernel_size=1,
- stride=1, padding=0, bias=False)
- self.bn2 = nn.BatchNorm2d(out_planes)
- self.relu_1 = torch.nn.ReLU()
- self.relu_2 = torch.nn.ReLU()
-
- def forward(self, x):
- out = self.relu_1(self.bn1(self.conv1(x)))
- out = self.relu_2(self.bn2(self.conv2(out)))
- return out
-
-
- class MobileNet(nn.Module):
- # (128,2) means conv planes=128, conv stride=2,
- # by default conv stride=1
- cfg = [64, (128,2), 128, (256,2), 256, (512,2),
- 512, 512, 512, 512, 512, (1024,2), 1024]
-
- def __init__(self, num_classes=10):
- super(MobileNet, self).__init__()
- self.conv1 = nn.Conv2d(3, 32, kernel_size=3,
- stride=1, padding=1, bias=False) # 只改变通道数
- self.bn1 = nn.BatchNorm2d(32)
- self.layers = self._make_layers(in_planes=32)
- self.linear = nn.Linear(1024, num_classes)
- self.relu = torch.nn.ReLU()
-
- def _make_layers(self, in_planes):
- layers = []
- for x in self.cfg:
- out_planes = x if isinstance(x, int) else x[0]
- stride = 1 if isinstance(x, int) else x[1]
- layers.append(Block(in_planes, out_planes, stride))
- in_planes = out_planes
- return nn.Sequential(*layers)
-
- def forward(self, x):
- out = self.relu(self.bn1(self.conv1(x))) # torch.Size([1, 32, 32, 32])
- out = self.layers(out)
- out = F.avg_pool2d(out, 2)
- out = out.view(out.size(0), -1)
- out = self.linear(out)
- return out
|