""" Author: Andreas Rössler """ import torchvision import torch import torch.nn as nn import torch.nn.functional as F import torch.utils.model_zoo as model_zoo from metrics.registry import BACKBONE pretrained_settings = { 'xception': { 'imagenet': { 'url': 'http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth', 'input_space': 'RGB', 'input_size': [3, 299, 299], 'input_range': [0, 1], 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'num_classes': 1000, 'scale': 0.8975 # The resize parameter of the validation transform should be 333, and make sure to center crop at 299x299 } } } class SeparableConv2d(nn.Module): def __init__(self, in_channels, out_channels, kernel_size=1, stride=1, padding=0, dilation=1, bias=False): super(SeparableConv2d, self).__init__() self.conv1 = nn.Conv2d(in_channels, in_channels, kernel_size, stride, padding, dilation, groups=in_channels, bias=bias) self.pointwise = nn.Conv2d( in_channels, out_channels, 1, 1, 0, 1, 1, bias=bias) def forward(self, x): x = self.conv1(x) x = self.pointwise(x) return x class RegressionMap(nn.Module): def __init__(self, c_in): super(RegressionMap, self).__init__() self.c = SeparableConv2d(c_in, 1, 3, stride=1, padding=1, bias=False) self.s = nn.Sigmoid() def forward(self, x): mask = self.c(x) mask = self.s(mask) return mask class Block(nn.Module): def __init__(self, in_filters, out_filters, reps, strides=1, start_with_relu=True, grow_first=True): super(Block, self).__init__() if out_filters != in_filters or strides != 1: self.skip = nn.Conv2d(in_filters, out_filters, 1, stride=strides, bias=False) self.skipbn = nn.BatchNorm2d(out_filters) else: self.skip = None self.relu = nn.ReLU(inplace=False) rep = [] filters = in_filters if grow_first: # whether the number of filters grows first rep.append(self.relu) rep.append(SeparableConv2d(in_filters, out_filters, 3, stride=1, padding=1, bias=False)) rep.append(nn.BatchNorm2d(out_filters)) filters = out_filters for i in range(reps - 1): rep.append(self.relu) rep.append(SeparableConv2d(filters, filters, 3, stride=1, padding=1, bias=False)) rep.append(nn.BatchNorm2d(filters)) if not grow_first: rep.append(self.relu) rep.append(SeparableConv2d(in_filters, out_filters, 3, stride=1, padding=1, bias=False)) rep.append(nn.BatchNorm2d(out_filters)) if not start_with_relu: rep = rep[1:] else: rep[0] = nn.ReLU(inplace=False) if strides != 1: rep.append(nn.MaxPool2d(3, strides, 1)) self.rep = nn.Sequential(*rep) def forward(self, inp): x = self.rep(inp) if self.skip is not None: skip = self.skip(inp) skip = self.skipbn(skip) else: skip = inp x += skip return x @BACKBONE.register_module(module_name="xception_sladd") class Xception_SLADD(nn.Module): """ Xception optimized for the ImageNet dataset, as specified in https://arxiv.org/pdf/1610.02357.pdf """ def __init__(self, config): """ Constructor Args: num_classes: number of classes """ super(Xception_SLADD, self).__init__() num_classes = config["num_classes"] inc = config["inc"] dropout = config["dropout"] # Entry flow self.conv1 = nn.Conv2d(inc, 32, 3, 2, 0, bias=False) self.bn1 = nn.BatchNorm2d(32) self.relu = nn.ReLU(inplace=False) self.conv2 = nn.Conv2d(32, 64, 3, bias=False) self.bn2 = nn.BatchNorm2d(64) # do relu here self.block1 = Block( 64, 128, 2, 2, start_with_relu=False, grow_first=True) self.block2 = Block( 128, 256, 2, 2, start_with_relu=True, grow_first=True) self.block3 = Block( 256, 728, 2, 2, start_with_relu=True, grow_first=True) # middle flow self.block4 = Block( 728, 728, 3, 1, start_with_relu=True, grow_first=True) self.block5 = Block( 728, 728, 3, 1, start_with_relu=True, grow_first=True) self.block6 = Block( 728, 728, 3, 1, start_with_relu=True, grow_first=True) self.block7 = Block( 728, 728, 3, 1, start_with_relu=True, grow_first=True) self.block8 = Block( 728, 728, 3, 1, start_with_relu=True, grow_first=True) self.block9 = Block( 728, 728, 3, 1, start_with_relu=True, grow_first=True) self.block10 = Block( 728, 728, 3, 1, start_with_relu=True, grow_first=True) self.block11 = Block( 728, 728, 3, 1, start_with_relu=True, grow_first=True) # Exit flow self.block12 = Block( 728, 1024, 2, 2, start_with_relu=True, grow_first=False) self.conv3 = SeparableConv2d(1024, 1536, 3, 1, 1) self.bn3 = nn.BatchNorm2d(1536) # do relu here self.conv4 = SeparableConv2d(1536, 2048, 3, 1, 1) self.bn4 = nn.BatchNorm2d(2048) final_channel = 2048 self.last_linear = nn.Linear(final_channel, num_classes) if dropout: self.last_linear = nn.Sequential( nn.Dropout(p=dropout), nn.Linear(final_channel, num_classes) ) self.type_fc = nn.Linear(2048, 5) self.mag_fc = nn.Linear(2048, 1) self.map = RegressionMap(728) self.pecent = 1.0 / 1.5 def fea_part1_0(self, x): x = self.conv1(x) x = self.bn1(x) x = self.relu(x) return x def fea_part1_1(self, x): x = self.conv2(x) x = self.bn2(x) x = self.relu(x) return x def fea_part1(self, x): x = self.conv1(x) x = self.bn1(x) x = self.relu(x) x = self.conv2(x) x = self.bn2(x) x = self.relu(x) return x def fea_part2(self, x): x = self.block1(x) x = self.block2(x) x = self.block3(x) return x def fea_part3(self, x): x = self.block4(x) x = self.block5(x) x = self.block6(x) x = self.block7(x) return x def fea_part4(self, x): x = self.block8(x) x = self.block9(x) x = self.block10(x) x = self.block11(x) x = self.block12(x) return x def fea_part5(self, x): x = self.conv3(x) x = self.bn3(x) x = self.relu(x) x = self.conv4(x) x = self.bn4(x) return x def features(self, input): x = self.fea_part1(input) x = self.fea_part2(x) x3 = self.fea_part3(x) x = self.fea_part4(x3) x = self.fea_part5(x) return x,x3 # def classifier(self, features): def classifier(self, x): x = self.relu(x) x = F.adaptive_avg_pool2d(x, (1, 1)) x = x.view(x.size(0), -1) out = self.last_linear(x) return out, x def estimateMap(self, x): map = self.map(x) return map # def forward(self, input): def forward(self, x): x,x3=self.features(x) out, fea, type, mag = self.classifier(x) map = self.estimateMap(x3) return out, fea, map, type, mag