backprop logic correction

This commit is contained in:
markichnich 2024-03-21 22:00:41 +01:00
parent a1e2584367
commit 5787924b35

View File

@ -90,8 +90,8 @@ where
.elementwise_mul(&losses) .elementwise_mul(&losses)
.map(|x| x * self.learning_rate.clone()); .map(|x| x * self.learning_rate.clone());
self.weights[i] = self.weights[i].add(&gradients.dot(&self.data[i].transpose())); self.weights[i] = self.weights[i].sub(&gradients.dot(&self.data[i].transpose()));
self.biases[i] = self.biases[i].add(&gradients); self.biases[i] = self.biases[i].sub(&gradients);
losses = self.weights[i].transpose().dot(&losses); losses = self.weights[i].transpose().dot(&losses);
gradients = self.data[i].map(self.activation.f_prime); gradients = self.data[i].map(self.activation.f_prime);
@ -125,10 +125,7 @@ where
sum + (self.loss.f)(y_hat, y.clone()) sum + (self.loss.f)(y_hat, y.clone())
}); });
} }
println!( println!("epoch: {i:0>width$} / {epochs:0>width$} ;\tloss: {}", loss);
"epoch: {i:0>width$} / {epochs:0>width$} ;\tloss: {:.5}",
loss
);
} }
} }
} }