1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
|
"""
# -*- coding: utf-8 -*-
#
# Copyright 2021 Michael Büsch <m@bues.ch>
#
# Licensed under the Apache License version 2.0
# or the MIT license, at your option.
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
"""
__all__ = [
"LayerState",
"forward_prop",
]
from mlplib.parameters import Parameters
from dataclasses import dataclass
from typing import Union, Tuple, List
import numpy as np
@dataclass
class LayerState(object):
layer: int
x: np.ndarray
z: np.ndarray
yh: np.ndarray
def forward_prop(x: np.ndarray,
params: Parameters,
store_netstate: bool = False)\
-> Union[np.ndarray, Tuple[np.ndarray, List[LayerState]]]:
"""
x: np.array with shape (m, n) or shape (n,)
m = rows = number of samples.
n = columns = number of input layer neurons.
params: Parameters instance.
Returns: np.array with shape (m, n):
m = rows = number of samples.
n = columns = number of output layer neurons.
"""
assert len(params.weights) >= 1
assert len(params.weights) == len(params.biases)
assert len(params.weights) == len(params.actvns)
one_dim_data = x.ndim == 1
if one_dim_data:
x = np.array(x, ndmin=2)
assert x.ndim == 2
state = []
xx = x
for i, (w, b, actv, *_) in enumerate(params):
assert w.ndim == 2
assert w.shape[0] == xx.shape[1]
assert b.ndim == 2
assert b.shape == (1, w.shape[1])
z = (xx @ w) + b
yh = actv.fn(z)
if store_netstate:
state.append(LayerState(i, xx, z, yh))
xx = yh
assert yh.shape[0] == x.shape[0]
if one_dim_data:
assert yh.shape[0] == 1
yh = yh.reshape((yh.shape[1],))
if store_netstate:
return yh, state
return yh
# vim: ts=4 sw=4 expandtab
|