File size: 1,390 Bytes
c614b0f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import torch.nn as nn
def build_act_layer(act_layer):
if act_layer == "ReLU":
return nn.ReLU(inplace=True)
elif act_layer == "SiLU":
return nn.SiLU(inplace=True)
elif act_layer == "GELU":
return nn.GELU()
raise NotImplementedError(f"build_act_layer does not support {act_layer}")
def build_norm_layer(
dim, norm_layer, in_format="channels_last", out_format="channels_last", eps=1e-6
):
layers = []
if norm_layer == "BN":
if in_format == "channels_last":
layers.append(to_channels_first())
layers.append(nn.BatchNorm2d(dim))
if out_format == "channels_last":
layers.append(to_channels_last())
elif norm_layer == "LN":
if in_format == "channels_first":
layers.append(to_channels_last())
layers.append(nn.LayerNorm(dim, eps=eps))
if out_format == "channels_first":
layers.append(to_channels_first())
else:
raise NotImplementedError(f"build_norm_layer does not support {norm_layer}")
return nn.Sequential(*layers)
class to_channels_first(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return x.permute(0, 3, 1, 2)
class to_channels_last(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return x.permute(0, 2, 3, 1)
|