From 26a51ce476c2304df7236628c3392327bd4489b0 Mon Sep 17 00:00:00 2001 From: Michael Mandl Date: Fri, 16 Oct 2015 23:23:27 +0200 Subject: [PATCH] Hidden neuron gradients complete --- Neuron.cpp | 11 ++++++++++- Neuron.h | 2 ++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/Neuron.cpp b/Neuron.cpp index 5fc8944..29aebb4 100644 --- a/Neuron.cpp +++ b/Neuron.cpp @@ -1,6 +1,7 @@ #pragma once #include "Neuron.h" +#include "Layer.h" Neuron::Neuron(double value) : outputValue(value) @@ -64,7 +65,10 @@ double Neuron::sumDOW(const Layer & nextLayer) const { double sum = 0; - // sum it up + for (size_t i = 0; i < nextLayer.size() - 1; ++i) + { + sum += outputWeights[i] * nextLayer[i].getGradient(); + } return sum; } @@ -75,3 +79,8 @@ void Neuron::calcHiddenGradients(const Layer &nextLayer) gradient = dow * transferFunctionDerivative(outputValue); } +double Neuron::getGradient() const +{ + return gradient; +} + diff --git a/Neuron.h b/Neuron.h index 51d68ed..862c17d 100644 --- a/Neuron.h +++ b/Neuron.h @@ -23,6 +23,8 @@ public: void calcOutputGradients(double targetValue); void calcHiddenGradients(const Layer &nextLayer); + double getGradient() const; + private: static double transferFunction(double inputValue); static double transferFunctionDerivative(double inputValue);