""" # -*- coding: utf-8 -*- # # Copyright 2021 Michael Büsch # # Licensed under the Apache License version 2.0 # or the MIT license, at your option. # SPDX-License-Identifier: Apache-2.0 OR MIT # """ from mlplib.activation import * from mlplib.backward import * from mlplib.forward import * from mlplib.gradient_check import * from mlplib.init import * from mlplib.loss import * from mlplib.parameters import * import numpy as np def test_backward_prop(): seed(42) inputs = 4 layout = (6, 6, 9, 2) params = Parameters( weights=init_layers_weights(inputs, layout), biases=init_layers_biases(layout), actvns=[ ReLU(), LReLU(0.1), Tanh(), Sigmoid(), ], ) x = standard_normal((20, inputs)) y = standard_normal((20, layout[-1])) gradients, yh = backward_prop(x, y, params, MSE()) yh2 = forward_prop(x, params) assert np.all(yh == yh2) ok = gradient_check(x, y, params, MSE(), gradients) assert ok # vim: ts=4 sw=4 expandtab