File size: 2,912 Bytes
29f689c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import torch
import torch.nn.functional as F
from torch import nn


class CPPDLoss(nn.Module):

    def __init__(self,
                 smoothing=False,
                 ignore_index=100,
                 pos_len=False,
                 sideloss_weight=1.0,
                 max_len=25,
                 **kwargs):
        super(CPPDLoss, self).__init__()
        self.edge_ce = nn.CrossEntropyLoss(reduction='mean',
                                           ignore_index=ignore_index)
        self.char_node_ce = nn.CrossEntropyLoss(reduction='mean')
        if pos_len:
            self.pos_node_ce = nn.CrossEntropyLoss(reduction='mean',
                                                   ignore_index=ignore_index)
        else:
            self.pos_node_ce = nn.BCEWithLogitsLoss(reduction='mean')

        self.smoothing = smoothing
        self.ignore_index = ignore_index
        self.pos_len = pos_len
        self.sideloss_weight = sideloss_weight
        self.max_len = max_len + 1

    def label_smoothing_ce(self, preds, targets):
        zeros_ = torch.zeros_like(targets)
        ignore_index_ = zeros_ + self.ignore_index
        non_pad_mask = torch.not_equal(targets, ignore_index_)

        tgts = torch.where(targets == ignore_index_, zeros_, targets)
        eps = 0.1
        n_class = preds.shape[1]
        one_hot = F.one_hot(tgts, preds.shape[1])
        one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1)
        log_prb = F.log_softmax(preds, dim=1)
        loss = -(one_hot * log_prb).sum(dim=1)
        loss = loss.masked_select(non_pad_mask).mean()
        return loss

    def forward(self, pred, batch):
        node_feats, edge_feats = pred
        node_tgt = batch[2]
        char_tgt = batch[1]

        # updated code
        char_num_label = torch.clip(node_tgt[:, :-self.max_len].flatten(0, 1),
                                    0, node_feats[0].shape[-1] - 1)
        loss_char_node = self.char_node_ce(node_feats[0].flatten(0, 1),
                                           char_num_label)
        if self.pos_len:
            loss_pos_node = self.pos_node_ce(
                node_feats[1].flatten(0, 1),
                node_tgt[:, -self.max_len:].flatten(0, 1))
        else:
            loss_pos_node = self.pos_node_ce(
                node_feats[1].flatten(0, 1),
                node_tgt[:, -self.max_len:].flatten(0, 1).float())
        loss_node = loss_char_node + loss_pos_node
        # -----
        edge_feats = edge_feats.flatten(0, 1)
        char_tgt = char_tgt.flatten(0, 1)
        if self.smoothing:
            loss_edge = self.label_smoothing_ce(edge_feats, char_tgt)
        else:
            loss_edge = self.edge_ce(edge_feats, char_tgt)

        return {
            'loss': self.sideloss_weight * loss_node + loss_edge,
            'loss_node': self.sideloss_weight * loss_node,
            'loss_edge': loss_edge,
        }