improve xor example with relu activation fn

This commit is contained in:
markichnich 2024-03-21 22:02:18 +01:00
parent 0e2308b472
commit 562613ad4b

View File

@ -1,9 +1,10 @@
use aicaramba::functions::*; use aicaramba::functions::*;
use aicaramba::matrix::Mat;
use aicaramba::neural_net::NeuralNet; use aicaramba::neural_net::NeuralNet;
fn main() { fn main() {
let mut net = NeuralNet::new(vec![2, 3, 1], SIGMOID, MSE, 0.05); let mut net = NeuralNet::new(vec![2, 3, 1], RELU, MSE, 0.05);
let epochs = 10_000; let epochs = 500;
let inputs = vec![ let inputs = vec![
vec![0.0, 0.0], vec![0.0, 0.0],
@ -14,5 +15,11 @@ fn main() {
let expected = vec![vec![0.0], vec![1.0], vec![1.0], vec![0.0]]; let expected = vec![vec![0.0], vec![1.0], vec![1.0], vec![0.0]];
net.train_basic(inputs, expected, epochs); net.train_basic(inputs.clone(), expected, epochs);
for input in inputs {
let output = net.forward(Mat::from(input.clone()));
let o = output.into_iter().collect::<Vec<_>>();
println!("{} ^ {} = {:.20}", input[0], input[1], o[0]);
}
} }