1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
|
"""
# -*- coding: utf-8 -*-
#
# Copyright 2021 Michael Büsch <m@bues.ch>
#
# Licensed under the Apache License version 2.0
# or the MIT license, at your option.
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
"""
from mlplib.activation import *
from mlplib.forward import *
from mlplib.init import *
from mlplib.parameters import *
import numpy as np
def test_forward_prop_singleinput():
seed(42)
x = np.array([1, 2])
inputs = 2
layout = (3, 1)
params = Parameters(
weights=init_layers_weights(inputs, layout),
biases=init_layers_biases(layout),
actvns=[
Sigmoid(),
Sigmoid(),
],
)
yh, state = forward_prop(x, params, store_netstate=True)
assert np.allclose(yh, np.array([0.77996251]))
def test_forward_prop_multiinput():
seed(43)
x = np.array([[1, 2],
[9, 10],
[8, 7],
[6, 5]])
inputs = 2
layout = (3, 1)
params = Parameters(
weights=init_layers_weights(inputs, layout),
biases=init_layers_biases(layout),
actvns=[
Sigmoid(),
Sigmoid(),
],
)
yh, state = forward_prop(x, params, store_netstate=True)
assert np.allclose(yh,
np.array([[0.81728153],
[0.73177059],
[0.60556236],
[0.62344552],]))
def test_forward_prop_nostore():
seed(43)
x = np.array([[1, 2],
[9, 10],
[8, 7],
[6, 5]])
inputs = 2
layout = (3, 1)
params = Parameters(
weights=init_layers_weights(inputs, layout),
biases=init_layers_biases(layout),
actvns=[
Sigmoid(),
Sigmoid(),
],
)
yh = forward_prop(x, params)
assert np.allclose(yh,
np.array([[0.81728153],
[0.73177059],
[0.60556236],
[0.62344552],]))
# vim: ts=4 sw=4 expandtab
|