|
import mmcv |
|
import pytest |
|
import torch |
|
|
|
from mmseg.models.utils.se_layer import SELayer |
|
|
|
|
|
def test_se_layer(): |
|
with pytest.raises(AssertionError): |
|
|
|
SELayer(32, act_cfg=(dict(type='ReLU'), )) |
|
|
|
|
|
se_layer = SELayer(16) |
|
assert se_layer.conv1.conv.kernel_size == (1, 1) |
|
assert se_layer.conv1.conv.stride == (1, 1) |
|
assert se_layer.conv1.conv.padding == (0, 0) |
|
assert isinstance(se_layer.conv1.activate, torch.nn.ReLU) |
|
assert se_layer.conv2.conv.kernel_size == (1, 1) |
|
assert se_layer.conv2.conv.stride == (1, 1) |
|
assert se_layer.conv2.conv.padding == (0, 0) |
|
assert isinstance(se_layer.conv2.activate, mmcv.cnn.HSigmoid) |
|
|
|
x = torch.rand(1, 16, 64, 64) |
|
output = se_layer(x) |
|
assert output.shape == (1, 16, 64, 64) |
|
|
|
|
|
se_layer = SELayer(16, act_cfg=dict(type='ReLU')) |
|
assert se_layer.conv1.conv.kernel_size == (1, 1) |
|
assert se_layer.conv1.conv.stride == (1, 1) |
|
assert se_layer.conv1.conv.padding == (0, 0) |
|
assert isinstance(se_layer.conv1.activate, torch.nn.ReLU) |
|
assert se_layer.conv2.conv.kernel_size == (1, 1) |
|
assert se_layer.conv2.conv.stride == (1, 1) |
|
assert se_layer.conv2.conv.padding == (0, 0) |
|
assert isinstance(se_layer.conv2.activate, torch.nn.ReLU) |
|
|
|
x = torch.rand(1, 16, 64, 64) |
|
output = se_layer(x) |
|
assert output.shape == (1, 16, 64, 64) |
|
|