summaryrefslogtreecommitdiffstats
path: root/mlplib/loss.py
blob: 6c154db0575362a16fbfbbe914c893c9a858e028 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
"""
# -*- coding: utf-8 -*-
#
# Copyright 2021 Michael Büsch <m@bues.ch>
#
# Licensed under the Apache License version 2.0
# or the MIT license, at your option.
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
"""

__all__ = [
    "Loss",
    "MAE",
    "MSE",
]

from abc import ABC, abstractmethod
import numpy as np

class Loss(ABC):
    @abstractmethod
    def fn(self, yh, y):
        """Forward loss function.
        yh: predicted value
        y: expected value
        """

    @abstractmethod
    def fn_d(self, yh, y):
        """Loss function derivative.
        """

class MAE(Loss):
    """MAE Mean Absolute Error (L1) loss.
    """

    def fn(self, yh, y):
        assert yh.size == y.size
        if y.size:
            return np.absolute(y - yh).mean()
        return 0.0

    def fn_d(self, yh, y):
        assert yh.size == y.size
        return ((yh > y).astype(y.dtype) * 2.0) - 1.0

class MSE(Loss):
    """MSE Mean Squared Error (L2) loss.
    """

    def fn(self, yh, y):
        assert yh.size == y.size
        if y.size:
            return np.square(y - yh).mean()
        return 0.0

    def fn_d(self, yh, y):
        assert yh.size == y.size
        return yh - y

# vim: ts=4 sw=4 expandtab
bues.ch cgit interface