From e3a804242cac2e5f516752de89df69aac52f6700 Mon Sep 17 00:00:00 2001 From: Michael Mandl Date: Mon, 23 Mar 2015 21:58:30 +0100 Subject: [PATCH] Split up to different source files, entry-point for back propagation --- Layer.cpp | 52 ++++++++++++++ Layer.h | 16 +++++ Net.cpp | 75 ++++++++++++++++++++ Net.h | 154 ++---------------------------------------- Neuro.vcxproj | 5 ++ Neuro.vcxproj.filters | 15 ++++ Neuron.cpp | 43 ++++++++++++ Neuron.h | 19 ++++++ 8 files changed, 230 insertions(+), 149 deletions(-) create mode 100644 Layer.cpp create mode 100644 Layer.h create mode 100644 Net.cpp create mode 100644 Neuron.cpp create mode 100644 Neuron.h diff --git a/Layer.cpp b/Layer.cpp new file mode 100644 index 0000000..e917c8c --- /dev/null +++ b/Layer.cpp @@ -0,0 +1,52 @@ +#include "Layer.h" + +Layer::Layer(unsigned int numNeurons) +{ + for (unsigned int i = 0; i < numNeurons; ++i) + { + push_back(Neuron()); + } +} + +void Layer::setOutputValues(const std::vector & outputValues) +{ + if (size() != outputValues.size()) + { + throw std::exception("The number of output values has to match the layer size"); + } + + auto valueIt = outputValues.begin(); + for (Neuron &neuron : *this) + { + neuron.setOutputValue(*valueIt++); + } +} + +void Layer::feedForward(const Layer &inputLayer) +{ + int neuronNumber = 0; + for (Neuron &neuron : *this) + { + neuron.feedForward(inputLayer.getWeightedSum(neuronNumber)); + } +} + +double Layer::getWeightedSum(int outputNeuron) const +{ + double sum = 0.0; + + for (const Neuron &neuron : *this) + { + sum += neuron.getWeightedOutputValue(outputNeuron); + } + + return sum; +} + +void Layer::connectTo(const Layer & nextLayer) +{ + for (Neuron &neuron : *this) + { + neuron.createOutputWeights(nextLayer.size()); + } +} diff --git a/Layer.h b/Layer.h new file mode 100644 index 0000000..39db9a1 --- /dev/null +++ b/Layer.h @@ -0,0 +1,16 @@ +#pragma once + +#include + +#include "Neuron.h" + +class Layer : public std::vector < Neuron > +{ +public: + Layer(unsigned int numNeurons); + + void setOutputValues(const std::vector & outputValues); + void feedForward(const Layer &inputLayer); + double getWeightedSum(int outputNeuron) const; + void connectTo(const Layer & nextLayer); +}; diff --git a/Net.cpp b/Net.cpp new file mode 100644 index 0000000..713828e --- /dev/null +++ b/Net.cpp @@ -0,0 +1,75 @@ +#include "Net.h" + +Net::Net(std::initializer_list layerSizes) +{ + if (layerSizes.size() < 3) + { + throw std::exception("A net needs at least 3 layers"); + } + + for (unsigned int numNeurons : layerSizes) + { + push_back(Layer(numNeurons)); + } + + for (auto layerIt = begin(); layerIt != end() - 1; ++layerIt) + { + Layer ¤tLayer = *layerIt; + const Layer &nextLayer = *(layerIt + 1); + + currentLayer.connectTo(nextLayer); + } +} + +void Net::feedForward(const std::vector &inputValues) +{ + Layer &inputLayer = front(); + + if (inputLayer.size() != inputValues.size()) + { + throw std::exception("The number of input values has to match the input layer size"); + } + + inputLayer.setOutputValues(inputValues); + + for (auto layerIt = begin(); layerIt != end() - 1; ++layerIt) + { + const Layer ¤tLayer = *layerIt; + Layer &nextLayer = *(layerIt + 1); + + nextLayer.feedForward(currentLayer); + } +} + +std::vector Net::getResult() +{ + std::vector result; + + const Layer &outputLayer = back(); + for (const Neuron &neuron : outputLayer) + { + result.push_back(neuron.getOutputValue()); + } + + return result; +} + +void Net::backProp(const std::vector &targetValues) +{ + const Layer &outputLayer = back(); + + if (targetValues.size() != outputLayer.size()) + { + throw std::exception("The number of target values has to match the output layer size"); + } + + std::vector resultValues = getResult(); + + double rmsError = 0.0; + for (unsigned int i = 0; i < resultValues.size(); ++i) + { + double delta = resultValues[i] - targetValues[i]; + rmsError += delta * delta; + } + rmsError = sqrt(rmsError / resultValues.size()); +} diff --git a/Net.h b/Net.h index ecfb44f..6b7a52b 100644 --- a/Net.h +++ b/Net.h @@ -2,158 +2,14 @@ #include -class Neuron -{ -private: - double outputValue; - std::vector outputWeights; - -public: - void setOutputValue(double value) - { - outputValue = value; - } - - static double transferFunction(double inputValue) - { - return std::tanh(inputValue); - } - - void feedForward(double inputValue) - { - outputValue = Neuron::transferFunction(inputValue); - } - - double getWeightedOutputValue(int outputNeuron) const - { - return outputValue * outputWeights[outputNeuron]; - } - - void createOutputWeights(unsigned int number) - { - outputWeights.clear(); - - for (unsigned int i = 0; i < number; ++i) - { - outputWeights.push_back(std::rand() / (double)RAND_MAX); - } - } - - double getOutputValue() const - { - return outputValue; - } -}; - -class Layer : public std::vector < Neuron > -{ -public: - Layer(unsigned int numNeurons) - { - for (unsigned int i = 0; i < numNeurons; ++i) - { - push_back(Neuron()); - } - } - - void setOutputValues(const std::vector & outputValues) - { - if (size() != outputValues.size()) - { - throw std::exception("The number of output values has to match the layer size"); - } - - auto valueIt = outputValues.begin(); - for (Neuron &neuron : *this) - { - neuron.setOutputValue(*valueIt++); - } - } - - void feedForward(const Layer &inputLayer) - { - int neuronNumber = 0; - for (Neuron &neuron : *this) - { - neuron.feedForward(inputLayer.getWeightedSum(neuronNumber)); - } - } - - double getWeightedSum(int outputNeuron) const - { - double sum = 0.0; - - for (const Neuron &neuron : *this) - { - sum += neuron.getWeightedOutputValue(outputNeuron); - } - - return sum; - } - - void connectTo(const Layer & nextLayer) - { - for (Neuron &neuron : *this) - { - neuron.createOutputWeights(nextLayer.size()); - } - } -}; +#include "Layer.h" class Net : public std::vector < Layer > { public: - Net(std::initializer_list layerSizes) - { - if (layerSizes.size() < 3) - { - throw std::exception("A net needs at least 3 layers"); - } + Net(std::initializer_list layerSizes); - for (unsigned int numNeurons : layerSizes) - { - push_back(Layer(numNeurons)); - } - - for (auto layerIt = begin(); layerIt != end() - 1; ++layerIt) - { - Layer ¤tLayer = *layerIt; - const Layer &nextLayer = *(layerIt + 1); - - currentLayer.connectTo(nextLayer); - } - } - - void feedForward(const std::vector &inputValues) - { - Layer &inputLayer = front(); - - if (inputLayer.size() != inputValues.size()) - { - throw std::exception("The number of input values has to match the input layer size"); - } - - inputLayer.setOutputValues(inputValues); - - for (auto layerIt = begin(); layerIt != end() - 1; ++layerIt) - { - const Layer ¤tLayer = *layerIt; - Layer &nextLayer = *(layerIt + 1); - - nextLayer.feedForward(currentLayer); - } - } - - std::vector getResult() - { - std::vector result; - - const Layer &outputLayer = back(); - for (const Neuron &neuron : outputLayer) - { - result.push_back(neuron.getOutputValue()); - } - - return result; - } + void feedForward(const std::vector &inputValues); + std::vector getResult(); + void backProp(const std::vector &targetValues); }; \ No newline at end of file diff --git a/Neuro.vcxproj b/Neuro.vcxproj index 5b97cd8..343f0de 100644 --- a/Neuro.vcxproj +++ b/Neuro.vcxproj @@ -78,10 +78,15 @@ + + + + + diff --git a/Neuro.vcxproj.filters b/Neuro.vcxproj.filters index fe9b3fe..103c35f 100644 --- a/Neuro.vcxproj.filters +++ b/Neuro.vcxproj.filters @@ -18,10 +18,25 @@ Source Files + + Source Files + + + Source Files + + + Source Files + Header Files + + Header Files + + + Header Files + \ No newline at end of file diff --git a/Neuron.cpp b/Neuron.cpp new file mode 100644 index 0000000..405f51c --- /dev/null +++ b/Neuron.cpp @@ -0,0 +1,43 @@ +#pragma once + +#include "Neuron.h" + +void Neuron::setOutputValue(double value) +{ + outputValue = value; +} + +double Neuron::transferFunction(double inputValue) +{ + return std::tanh(inputValue); +} + +double Neuron::transferFunctionDerivative(double inputValue) +{ + return 1.0 - (inputValue * inputValue); +} + +void Neuron::feedForward(double inputValue) +{ + outputValue = Neuron::transferFunction(inputValue); +} + +double Neuron::getWeightedOutputValue(int outputNeuron) const +{ + return outputValue * outputWeights[outputNeuron]; +} + +void Neuron::createOutputWeights(unsigned int number) +{ + outputWeights.clear(); + + for (unsigned int i = 0; i < number; ++i) + { + outputWeights.push_back(std::rand() / (double)RAND_MAX); + } +} + +double Neuron::getOutputValue() const +{ + return outputValue; +} diff --git a/Neuron.h b/Neuron.h new file mode 100644 index 0000000..98ed7e3 --- /dev/null +++ b/Neuron.h @@ -0,0 +1,19 @@ +#pragma once + +#include + +class Neuron +{ +private: + double outputValue; + std::vector outputWeights; + +public: + void setOutputValue(double value); + static double transferFunction(double inputValue); + static double transferFunctionDerivative(double inputValue); + void feedForward(double inputValue); + double getWeightedOutputValue(int outputNeuron) const; + void createOutputWeights(unsigned int number); + double getOutputValue() const; +};