basic_loss.py 4.61 KB
Newer Older
littletomatodonkey's avatar
littletomatodonkey committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
#copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.

import paddle
import paddle.nn as nn
import paddle.nn.functional as F

from paddle.nn import L1Loss
from paddle.nn import MSELoss as L2Loss
from paddle.nn import SmoothL1Loss


class CELoss(nn.Layer):
littletomatodonkey's avatar
littletomatodonkey committed
25
    def __init__(self, epsilon=None):
littletomatodonkey's avatar
littletomatodonkey committed
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
        super().__init__()
        if epsilon is not None and (epsilon <= 0 or epsilon >= 1):
            epsilon = None
        self.epsilon = epsilon

    def _labelsmoothing(self, target, class_num):
        if target.shape[-1] != class_num:
            one_hot_target = F.one_hot(target, class_num)
        else:
            one_hot_target = target
        soft_target = F.label_smooth(one_hot_target, epsilon=self.epsilon)
        soft_target = paddle.reshape(soft_target, shape=[-1, class_num])
        return soft_target

    def forward(self, x, label):
        loss_dict = {}
        if self.epsilon is not None:
            class_num = x.shape[-1]
            label = self._labelsmoothing(label, class_num)
            x = -F.log_softmax(x, axis=-1)
            loss = paddle.sum(x * label, axis=-1)
        else:
            if label.shape[-1] == x.shape[-1]:
                label = F.softmax(label, axis=-1)
                soft_label = True
            else:
                soft_label = False
            loss = F.cross_entropy(x, label=label, soft_label=soft_label)
littletomatodonkey's avatar
littletomatodonkey committed
54
        return loss
littletomatodonkey's avatar
littletomatodonkey committed
55
56


LDOUBLEV's avatar
LDOUBLEV committed
57
58
class KLJSLoss(object):
    def __init__(self, mode='kl'):
59
60
        assert mode in ['kl', 'js', 'KL', 'JS'
                        ], "mode can only be one of ['kl', 'js', 'KL', 'JS']"
LDOUBLEV's avatar
LDOUBLEV committed
61
62
63
64
        self.mode = mode

    def __call__(self, p1, p2, reduction="mean"):

65
        loss = paddle.multiply(p2, paddle.log((p2 + 1e-5) / (p1 + 1e-5) + 1e-5))
LDOUBLEV's avatar
LDOUBLEV committed
66
67

        if self.mode.lower() == "js":
68
69
            loss += paddle.multiply(
                p1, paddle.log((p1 + 1e-5) / (p2 + 1e-5) + 1e-5))
LDOUBLEV's avatar
LDOUBLEV committed
70
71
            loss *= 0.5
        if reduction == "mean":
72
73
74
            loss = paddle.mean(loss, axis=[1, 2])
        elif reduction == "none" or reduction is None:
            return loss
LDOUBLEV's avatar
LDOUBLEV committed
75
        else:
76
77
78
            loss = paddle.sum(loss, axis=[1, 2])

        return loss
LDOUBLEV's avatar
LDOUBLEV committed
79
80


littletomatodonkey's avatar
littletomatodonkey committed
81
82
83
84
85
class DMLLoss(nn.Layer):
    """
    DMLLoss
    """

86
    def __init__(self, act=None, use_log=False):
littletomatodonkey's avatar
littletomatodonkey committed
87
        super().__init__()
88
89
90
91
92
93
94
95
        if act is not None:
            assert act in ["softmax", "sigmoid"]
        if act == "softmax":
            self.act = nn.Softmax(axis=-1)
        elif act == "sigmoid":
            self.act = nn.Sigmoid()
        else:
            self.act = None
96
97
98

        self.use_log = use_log

LDOUBLEV's avatar
LDOUBLEV committed
99
        self.jskl_loss = KLJSLoss(mode="js")
littletomatodonkey's avatar
littletomatodonkey committed
100
101

    def forward(self, out1, out2):
102
103
104
        if self.act is not None:
            out1 = self.act(out1)
            out2 = self.act(out2)
105
106
        if self.use_log:
            # for recognition distillation, log is needed for feature map
LDOUBLEV's avatar
LDOUBLEV committed
107
108
109
110
111
112
            log_out1 = paddle.log(out1)
            log_out2 = paddle.log(out2)
            loss = (F.kl_div(
                log_out1, out2, reduction='batchmean') + F.kl_div(
                    log_out2, out1, reduction='batchmean')) / 2.0
        else:
113
            # for detection distillation log is not needed
LDOUBLEV's avatar
LDOUBLEV committed
114
            loss = self.jskl_loss(out1, out2)
littletomatodonkey's avatar
littletomatodonkey committed
115
        return loss
littletomatodonkey's avatar
littletomatodonkey committed
116
117
118
119
120
121
122
123


class DistanceLoss(nn.Layer):
    """
    DistanceLoss:
        mode: loss mode
    """

littletomatodonkey's avatar
littletomatodonkey committed
124
    def __init__(self, mode="l2", **kargs):
125
        super().__init__()
littletomatodonkey's avatar
littletomatodonkey committed
126
127
128
        assert mode in ["l1", "l2", "smooth_l1"]
        if mode == "l1":
            self.loss_func = nn.L1Loss(**kargs)
129
        elif mode == "l2":
littletomatodonkey's avatar
littletomatodonkey committed
130
131
132
133
134
            self.loss_func = nn.MSELoss(**kargs)
        elif mode == "smooth_l1":
            self.loss_func = nn.SmoothL1Loss(**kargs)

    def forward(self, x, y):
littletomatodonkey's avatar
littletomatodonkey committed
135
        return self.loss_func(x, y)
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150


class LossFromOutput(nn.Layer):
    def __init__(self, key='loss', reduction='none'):
        super().__init__()
        self.key = key
        self.reduction = reduction

    def forward(self, predicts, batch):
        loss = predicts[self.key]
        if self.reduction == 'mean':
            loss = paddle.mean(loss)
        elif self.reduction == 'sum':
            loss = paddle.sum(loss)
        return {'loss': loss}