import torch
import torch.nn as nn

from engine.BiRefNet.config import Config
from engine.BiRefNet.models.modules.aspp import ASPP, ASPPDeformable

config = Config()


class BasicDecBlk(nn.Module):
    def __init__(self, in_channels=64, out_channels=64, inter_channels=64):
        super(BasicDecBlk, self).__init__()
        inter_channels = in_channels // 4 if config.dec_channels_inter == "adap" else 64
        self.conv_in = nn.Conv2d(in_channels, inter_channels, 3, 1, padding=1)
        self.relu_in = nn.ReLU(inplace=True)
        if config.dec_att == "ASPP":
            self.dec_att = ASPP(in_channels=inter_channels)
        elif config.dec_att == "ASPPDeformable":
            self.dec_att = ASPPDeformable(in_channels=inter_channels)
        self.conv_out = nn.Conv2d(inter_channels, out_channels, 3, 1, padding=1)
        self.bn_in = (
            nn.BatchNorm2d(inter_channels) if config.batch_size > 1 else nn.Identity()
        )
        self.bn_out = (
            nn.BatchNorm2d(out_channels) if config.batch_size > 1 else nn.Identity()
        )

    def forward(self, x):
        x = self.conv_in(x)
        x = self.bn_in(x)
        x = self.relu_in(x)
        if hasattr(self, "dec_att"):
            x = self.dec_att(x)
        x = self.conv_out(x)
        x = self.bn_out(x)
        return x


class ResBlk(nn.Module):
    def __init__(self, in_channels=64, out_channels=None, inter_channels=64):
        super(ResBlk, self).__init__()
        if out_channels is None:
            out_channels = in_channels
        inter_channels = in_channels // 4 if config.dec_channels_inter == "adap" else 64

        self.conv_in = nn.Conv2d(in_channels, inter_channels, 3, 1, padding=1)
        self.bn_in = (
            nn.BatchNorm2d(inter_channels) if config.batch_size > 1 else nn.Identity()
        )
        self.relu_in = nn.ReLU(inplace=True)

        if config.dec_att == "ASPP":
            self.dec_att = ASPP(in_channels=inter_channels)
        elif config.dec_att == "ASPPDeformable":
            self.dec_att = ASPPDeformable(in_channels=inter_channels)

        self.conv_out = nn.Conv2d(inter_channels, out_channels, 3, 1, padding=1)
        self.bn_out = (
            nn.BatchNorm2d(out_channels) if config.batch_size > 1 else nn.Identity()
        )

        self.conv_resi = nn.Conv2d(in_channels, out_channels, 1, 1, 0)

    def forward(self, x):
        _x = self.conv_resi(x)
        x = self.conv_in(x)
        x = self.bn_in(x)
        x = self.relu_in(x)
        if hasattr(self, "dec_att"):
            x = self.dec_att(x)
        x = self.conv_out(x)
        x = self.bn_out(x)
        return x + _x