File size: 1,898 Bytes
67c46fd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
# Copyright 2020 Emiru Tsunoo
#  Apache 2.0  (http://www.apache.org/licenses/LICENSE-2.0)

"""Subsampling layer definition."""

import math
import torch


class Conv2dSubsamplingWOPosEnc(torch.nn.Module):
    """Convolutional 2D subsampling.

    Args:
        idim (int): Input dimension.
        odim (int): Output dimension.
        dropout_rate (float): Dropout rate.
        kernels (list): kernel sizes
        strides (list): stride sizes

    """

    def __init__(self, idim, odim, dropout_rate, kernels, strides):
        """Construct an Conv2dSubsamplingWOPosEnc object."""
        assert len(kernels) == len(strides)
        super().__init__()
        conv = []
        olen = idim
        for i, (k, s) in enumerate(zip(kernels, strides)):
            conv += [
                torch.nn.Conv2d(1 if i == 0 else odim, odim, k, s),
                torch.nn.ReLU(),
            ]
            olen = math.floor((olen - k) / s + 1)
        self.conv = torch.nn.Sequential(*conv)
        self.out = torch.nn.Linear(odim * olen, odim)
        self.strides = strides
        self.kernels = kernels

    def forward(self, x, x_mask):
        """Subsample x.

        Args:
            x (torch.Tensor): Input tensor (#batch, time, idim).
            x_mask (torch.Tensor): Input mask (#batch, 1, time).

        Returns:
            torch.Tensor: Subsampled tensor (#batch, time', odim),
                where time' = time // 4.
            torch.Tensor: Subsampled mask (#batch, 1, time'),
                where time' = time // 4.

        """
        x = x.unsqueeze(1)  # (b, c, t, f)
        x = self.conv(x)
        b, c, t, f = x.size()
        x = self.out(x.transpose(1, 2).contiguous().view(b, t, c * f))
        if x_mask is None:
            return x, None
        for k, s in zip(self.kernels, self.strides):
            x_mask = x_mask[:, :, : -k + 1 : s]
        return x, x_mask