|
- import numpy as np
- import torch
- import torch.nn as nn
- import torch.nn.functional as F
- from torch.autograd import Variable
-
- class T_Net_points(nn.Module):
- def __init__(self):
- super(T_Net_points,self).__init__()
-
- self.convs = nn.Sequential(nn.Conv1d(3,64,1),
- nn.BatchNorm1d(64),
- nn.ReLU(),
- nn.Conv1d(64,128,1),
- nn.BatchNorm1d(128),
- nn.ReLU(),
- nn.Conv1d(128,1024,1),
- nn.BatchNorm1d(1024),
- nn.ReLU())
-
- self.fcs = nn.Sequential(nn.Linear(1024,512),
- nn.BatchNorm1d(512),
- nn.ReLU(),
- nn.Linear(512,256),
- nn.BatchNorm1d(256),
- nn.ReLU(),
- nn.Linear(256,9))
-
- def forward(self,x):
- bs = x.size()[0]
- x = self.convs(x)
- x = torch.max(x,2,keepdim=True)[0]
- x = x.view(-1,1024)
- x = self.fcs(x)
- temp = Variable(torch.from_numpy(np.array([1,0,0,0,1,0,0,0,1]).astype(np.float32))).view(1,9).repeat(bs,1).cuda()
- x = x + temp
- out = x.view(-1,3,3)
-
- return out
-
-
- class T_Net_feature(nn.Module):
-
- def __init__(self,k=64):
- super(T_Net_feature,self).__init__()
-
- self.convs = nn.Sequential(nn.Conv1d(k,64,1),
- nn.BatchNorm1d(64),
- nn.ReLU(),
- nn.Conv1d(64,128,1),
- nn.BatchNorm1d(128),
- nn.ReLU(),
- nn.Conv1d(128,1024,1),
- nn.BatchNorm1d(1024),
- nn.ReLU())
- self.fcs = nn.Sequential(nn.Linear(1024,512),
- nn.BatchNorm1d(512),
- nn.ReLU(),
- nn.Linear(512,256),
- nn.BatchNorm1d(256),
- nn.ReLU(),
- nn.Linear(256, k*k))
-
- self.k = k
-
- def forward(self,x):
- bs = x.size()[0]
- x = self.convs(x)
- x = torch.max(x, 2, keepdim=True)[0]
- x = x.view(-1, 1024)
-
- x = self.fcs(x)
-
- temp = Variable(torch.from_numpy(np.eye(self.k).flatten().astype(np.float32))).view(1,self.k*self.k).repeat(bs,1).cuda()
- x = x + temp
- out = x.view(-1, self.k, self.k)
- return out
-
- class PointNetfeat(nn.Module):
- def __init__(self, global_feat = True, feature_transform = True):
- super(PointNetfeat, self).__init__()
- self.stn = T_Net_points()
- self.conv1 = nn.Sequential(nn.Conv1d(3, 64, 1),nn.BatchNorm1d(64), nn.ReLU())
- self.conv2 = nn.Sequential(nn.Conv1d(64, 128, 1),nn.BatchNorm1d(128), nn.ReLU())
- self.conv3 = nn.Sequential(nn.Conv1d(128, 1024, 1),nn.BatchNorm1d(1024))
-
- self.global_feat = global_feat
- self.feature_transform = feature_transform
- if self.feature_transform:
- self.fstn = T_Net_feature(k=64)
-
- def forward(self, x):
- n_pts = x.size()[2]
- trans = self.stn(x)
- x = x.transpose(2, 1)
- x = torch.bmm(x, trans)
- x = x.transpose(2, 1)
- x = self.conv1(x)
-
- if self.feature_transform:
- trans_feat = self.fstn(x)
- x = x.transpose(2,1)
- x = torch.bmm(x, trans_feat)
- x = x.transpose(2,1)
- else:
- trans_feat = None
-
- pointfeat = x
- x = self.conv2(x)
- x = self.conv3(x)
- x = torch.max(x, 2, keepdim=True)[0]
- x = x.view(-1, 1024)
- if self.global_feat:
- return x, trans, trans_feat
- else:
- x = x.view(-1, 1024, 1).repeat(1, 1, n_pts)
- return torch.cat([x, pointfeat], 1), trans, trans_feat
-
-
- class PointNetSeg(nn.Module):
- def __init__(self, k = 2, feature_transform=True):
- super(PointNetSeg, self).__init__()
- self.k = k
- self.feature_transform=feature_transform
- self.feat = PointNetfeat(global_feat=False, feature_transform=feature_transform)
-
- self.conv1 = nn.Sequential(nn.Conv1d(1088, 512, 1),nn.BatchNorm1d(512), nn.ReLU())
- self.conv2 = nn.Sequential(nn.Conv1d(512, 256, 1),nn.BatchNorm1d(256), nn.ReLU())
- self.conv3 = nn.Sequential(nn.Conv1d(256, 128, 1),nn.BatchNorm1d(128), nn.ReLU())
-
- self.conv4 = torch.nn.Conv1d(128, self.k, 1)
- self.bn1 = nn.BatchNorm1d(512)
- self.bn2 = nn.BatchNorm1d(256)
- self.bn3 = nn.BatchNorm1d(128)
-
- def forward(self, x):
- bs = x.size()[0]
- n_pts = x.size()[2]
- x, trans, trans_feat = self.feat(x)
- x = self.conv1(x)
- x = self.conv2(x)
- x = self.conv3(x)
- x = self.conv4(x)
- x = x.transpose(2,1).contiguous()
- x = F.log_softmax(x.view(-1,self.k), dim=-1)
- x = x.view(bs, n_pts, self.k)
- return x, trans, trans_feat
-
- def feature_transform_regularizer(trans):
- d = trans.size()[1]
- I = torch.eye(d)[None, :, :].cuda()
- loss = torch.mean(torch.norm(torch.bmm(trans, trans.transpose(2,1)) - I, dim=(1,2)))
- return loss
|