GenSim / cliport /models /resnet.py
LeroyWaa's picture
add gensim code
8fc2b4e
raw
history blame
5.33 kB
import torch
import torch.nn as nn
import torch.nn.functional as F
import cliport.utils.utils as utils
class IdentityBlock(nn.Module):
def __init__(self, in_planes, filters, kernel_size, stride=1, final_relu=True, batchnorm=True):
super(IdentityBlock, self).__init__()
self.final_relu = final_relu
self.batchnorm = batchnorm
filters1, filters2, filters3 = filters
self.conv1 = nn.Conv2d(in_planes, filters1, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(filters1) if self.batchnorm else nn.Identity()
self.conv2 = nn.Conv2d(filters1, filters2, kernel_size=kernel_size, dilation=1,
stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(filters2) if self.batchnorm else nn.Identity()
self.conv3 = nn.Conv2d(filters2, filters3, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(filters3) if self.batchnorm else nn.Identity()
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += x
if self.final_relu:
out = F.relu(out)
return out
class ConvBlock(nn.Module):
def __init__(self, in_planes, filters, kernel_size, stride=1, final_relu=True, batchnorm=True):
super(ConvBlock, self).__init__()
self.final_relu = final_relu
self.batchnorm = batchnorm
filters1, filters2, filters3 = filters
self.conv1 = nn.Conv2d(in_planes, filters1, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(filters1) if self.batchnorm else nn.Identity()
self.conv2 = nn.Conv2d(filters1, filters2, kernel_size=kernel_size, dilation=1,
stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(filters2) if self.batchnorm else nn.Identity()
self.conv3 = nn.Conv2d(filters2, filters3, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(filters3) if self.batchnorm else nn.Identity()
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, filters3,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(filters3) if self.batchnorm else nn.Identity()
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += self.shortcut(x)
if self.final_relu:
out = F.relu(out)
return out
class ResNet43_8s(nn.Module):
def __init__(self, input_shape, output_dim, cfg, device, preprocess):
super(ResNet43_8s, self).__init__()
self.input_shape = input_shape
self.input_dim = input_shape[-1]
self.output_dim = output_dim
self.cfg = cfg
self.device = device
self.batchnorm = self.cfg['train']['batchnorm']
self.preprocess = preprocess
self.layers = self._make_layers()
def _make_layers(self):
layers = nn.Sequential(
# conv1
nn.Conv2d(self.input_dim, 64, stride=1, kernel_size=3, padding=1),
nn.BatchNorm2d(64) if self.batchnorm else nn.Identity(),
nn.ReLU(True),
# fcn
ConvBlock(64, [64, 64, 64], kernel_size=3, stride=1, batchnorm=self.batchnorm),
IdentityBlock(64, [64, 64, 64], kernel_size=3, stride=1, batchnorm=self.batchnorm),
ConvBlock(64, [128, 128, 128], kernel_size=3, stride=2, batchnorm=self.batchnorm),
IdentityBlock(128, [128, 128, 128], kernel_size=3, stride=1, batchnorm=self.batchnorm),
ConvBlock(128, [256, 256, 256], kernel_size=3, stride=2, batchnorm=self.batchnorm),
IdentityBlock(256, [256, 256, 256], kernel_size=3, stride=1, batchnorm=self.batchnorm),
ConvBlock(256, [512, 512, 512], kernel_size=3, stride=2, batchnorm=self.batchnorm),
IdentityBlock(512, [512, 512, 512], kernel_size=3, stride=1, batchnorm=self.batchnorm),
# head
ConvBlock(512, [256, 256, 256], kernel_size=3, stride=1, batchnorm=self.batchnorm),
IdentityBlock(256, [256, 256, 256], kernel_size=3, stride=1, batchnorm=self.batchnorm),
nn.UpsamplingBilinear2d(scale_factor=2),
ConvBlock(256, [128, 128, 128], kernel_size=3, stride=1, batchnorm=self.batchnorm),
IdentityBlock(128, [128, 128, 128], kernel_size=3, stride=1, batchnorm=self.batchnorm),
nn.UpsamplingBilinear2d(scale_factor=2),
ConvBlock(128, [64, 64, 64], kernel_size=3, stride=1, batchnorm=self.batchnorm),
IdentityBlock(64, [64, 64, 64], kernel_size=3, stride=1, batchnorm=self.batchnorm),
nn.UpsamplingBilinear2d(scale_factor=2),
# conv2
ConvBlock(64, [16, 16, self.output_dim], kernel_size=3, stride=1,
final_relu=False, batchnorm=self.batchnorm),
IdentityBlock(self.output_dim, [16, 16, self.output_dim], kernel_size=3, stride=1,
final_relu=False, batchnorm=self.batchnorm),
)
return layers
def forward(self, x):
x = self.preprocess(x, dist='transporter')
out = self.layers(x)
return out