From 370451c2e6f5e4a73b75123f6e1a4294fb2c7914 Mon Sep 17 00:00:00 2001 From: Michael Mandl Date: Fri, 16 Oct 2015 22:59:04 +0200 Subject: [PATCH] Calculation of hidden neuron gradients (partial) --- Layer.cpp | 4 ++++ Layer.h | 2 ++ Net.cpp | 19 ++++++++++++++++--- Neuron.cpp | 15 +++++++++++++++ Neuron.h | 12 +++++++++--- 5 files changed, 46 insertions(+), 6 deletions(-) diff --git a/Layer.cpp b/Layer.cpp index 259f68e..466dd84 100644 --- a/Layer.cpp +++ b/Layer.cpp @@ -50,3 +50,7 @@ void Layer::connectTo(const Layer & nextLayer) neuron.createRandomOutputWeights(nextLayer.size()); } } + +void Layer::updateInputWeights(const Layer & prevLayer) +{ +} diff --git a/Layer.h b/Layer.h index 3524e04..8251a20 100644 --- a/Layer.h +++ b/Layer.h @@ -13,4 +13,6 @@ public: void feedForward(const Layer &inputLayer); double getWeightedSum(int outputNeuron) const; void connectTo(const Layer & nextLayer); + + void updateInputWeights(const Layer &prevLayer); }; diff --git a/Net.cpp b/Net.cpp index 5488ac0..fbbda5b 100644 --- a/Net.cpp +++ b/Net.cpp @@ -68,6 +68,8 @@ void Net::backProp(const std::vector &targetValues) std::vector resultValues = getOutput(); size_t numResultValues = resultValues.size(); + + // calculate rms error double rmsError = 0.0; for (unsigned int i = 0; i < numResultValues; ++i) @@ -78,19 +80,30 @@ void Net::backProp(const std::vector &targetValues) rmsError = sqrt(rmsError / numResultValues); + // calculate output neuron gradients for (unsigned int i = 0; i < numResultValues; ++i) { outputLayer[i].calcOutputGradients(targetValues[i]); } + // calculate hidden neuron gradients for (auto it = end() - 1; it != begin(); --it) { - Layer &hiddenLayer = *it; - Layer &prevLayer = *(it - 1); + Layer &hiddenLayer = *(it - 1); + Layer &nextLayer = *it; for (auto neuron : hiddenLayer) { - //neuron.calcHiddenGradients(prevLayer); + neuron.calcHiddenGradients(nextLayer); } } + + // update the input weights + for (auto it = end() - 1; it != begin(); --it) + { + Layer ¤tLayer = *it; + Layer &prevLayer = *(it - 1); + + currentLayer.updateInputWeights(prevLayer); + } } diff --git a/Neuron.cpp b/Neuron.cpp index c10133c..5fc8944 100644 --- a/Neuron.cpp +++ b/Neuron.cpp @@ -60,3 +60,18 @@ void Neuron::calcOutputGradients(double targetValue) gradient = delta * transferFunctionDerivative(outputValue); } +double Neuron::sumDOW(const Layer & nextLayer) const +{ + double sum = 0; + + // sum it up + + return sum; +} + +void Neuron::calcHiddenGradients(const Layer &nextLayer) +{ + double dow = sumDOW(nextLayer); + gradient = dow * transferFunctionDerivative(outputValue); +} + diff --git a/Neuron.h b/Neuron.h index acd8af8..51d68ed 100644 --- a/Neuron.h +++ b/Neuron.h @@ -2,6 +2,8 @@ #include +class Layer; + class Neuron { private: @@ -13,13 +15,17 @@ public: Neuron(double value = 1.0); void setOutputValue(double value); - static double transferFunction(double inputValue); - static double transferFunctionDerivative(double inputValue); void feedForward(double inputValue); double getWeightedOutputValue(unsigned int outputNeuron) const; void createRandomOutputWeights(size_t numberOfWeights); double getOutputValue() const; void calcOutputGradients(double targetValue); - //void calcHiddenGradients(const Layer &prevLayer); + void calcHiddenGradients(const Layer &nextLayer); + +private: + static double transferFunction(double inputValue); + static double transferFunctionDerivative(double inputValue); + double sumDOW(const Layer &nextLayer) const; + };