""" # -*- coding: utf-8 -*- # # Copyright 2021 Michael Büsch # # Licensed under the Apache License version 2.0 # or the MIT license, at your option. # SPDX-License-Identifier: Apache-2.0 OR MIT # """ from mlplib.activation import * from mlplib.forward import * from mlplib.init import * from mlplib.parameters import * import numpy as np def test_forward_prop_singleinput(): seed(42) x = np.array([1, 2]) inputs = 2 layout = (3, 1) params = Parameters( weights=init_layers_weights(inputs, layout), biases=init_layers_biases(layout), actvns=[ Sigmoid(), Sigmoid(), ], ) yh, state = forward_prop(x, params, store_netstate=True) assert np.allclose(yh, np.array([0.77996251])) def test_forward_prop_multiinput(): seed(43) x = np.array([[1, 2], [9, 10], [8, 7], [6, 5]]) inputs = 2 layout = (3, 1) params = Parameters( weights=init_layers_weights(inputs, layout), biases=init_layers_biases(layout), actvns=[ Sigmoid(), Sigmoid(), ], ) yh, state = forward_prop(x, params, store_netstate=True) assert np.allclose(yh, np.array([[0.81728153], [0.73177059], [0.60556236], [0.62344552],])) def test_forward_prop_nostore(): seed(43) x = np.array([[1, 2], [9, 10], [8, 7], [6, 5]]) inputs = 2 layout = (3, 1) params = Parameters( weights=init_layers_weights(inputs, layout), biases=init_layers_biases(layout), actvns=[ Sigmoid(), Sigmoid(), ], ) yh = forward_prop(x, params) assert np.allclose(yh, np.array([[0.81728153], [0.73177059], [0.60556236], [0.62344552],])) # vim: ts=4 sw=4 expandtab