From 6ef1f9657c9c42acc8c63107b3fc752207211e1c Mon Sep 17 00:00:00 2001 From: Michael Mandl Date: Sun, 18 Oct 2015 21:20:37 +0200 Subject: [PATCH] Backprop seems to be working, yay --- Layer.cpp | 24 +++++++++++++++++++++--- Layer.h | 7 +++++++ Net.cpp | 3 +-- Neuro.cpp | 6 +++--- 4 files changed, 32 insertions(+), 8 deletions(-) diff --git a/Layer.cpp b/Layer.cpp index c68c135..74846d8 100644 --- a/Layer.cpp +++ b/Layer.cpp @@ -53,19 +53,37 @@ void Layer::connectTo(const Layer & nextLayer) void Layer::updateInputWeights(Layer & prevLayer) { - static const double trainingRate = 0.8; + static const double trainingRate = 0.5; - for (size_t currentLayerIndex = 0; currentLayerIndex < size() - 1; ++currentLayerIndex) + for (size_t currentLayerIndex = 0; currentLayerIndex < sizeWithoutBiasNeuron(); ++currentLayerIndex) { Neuron &targetNeuron = at(currentLayerIndex); for (size_t prevLayerIndex = 0; prevLayerIndex < prevLayer.size(); ++prevLayerIndex) { Neuron &sourceNeuron = prevLayer.at(prevLayerIndex); - + sourceNeuron.setOutputWeight(currentLayerIndex, sourceNeuron.getOutputWeight(currentLayerIndex) + sourceNeuron.getOutputValue() * targetNeuron.getGradient() * trainingRate); } } } + +void Layer::addBiasNeuron() +{ + push_back(Neuron(1.0)); + hasBiasNeuron = true; +} + +size_t Layer::sizeWithoutBiasNeuron() const +{ + if (hasBiasNeuron) + { + return size() - 1; + } + else + { + return size(); + } +} diff --git a/Layer.h b/Layer.h index a3f118f..76c2078 100644 --- a/Layer.h +++ b/Layer.h @@ -6,6 +6,9 @@ class Layer : public std::vector < Neuron > { +private: + bool hasBiasNeuron = false; + public: Layer(size_t numNeurons); @@ -15,4 +18,8 @@ public: void connectTo(const Layer & nextLayer); void updateInputWeights(Layer &prevLayer); + + void addBiasNeuron(); + + size_t sizeWithoutBiasNeuron() const; }; diff --git a/Net.cpp b/Net.cpp index 9119e4a..64574bc 100644 --- a/Net.cpp +++ b/Net.cpp @@ -17,8 +17,7 @@ Net::Net(std::initializer_list layerSizes) Layer ¤tLayer = *layerIt; const Layer &nextLayer = *(layerIt + 1); - Neuron biasNeuron(1.0); - currentLayer.push_back(biasNeuron); + currentLayer.addBiasNeuron(); currentLayer.connectTo(nextLayer); } diff --git a/Neuro.cpp b/Neuro.cpp index 7803bc8..cbd3315 100644 --- a/Neuro.cpp +++ b/Neuro.cpp @@ -9,12 +9,12 @@ int main() { std::cout << "Neuro running" << std::endl; - std::vector inputValues = { 1.0, 4.0, 5.0 }; - std::vector targetValues = { 3.0 }; + std::vector inputValues = { 0.1, 0.2, 0.8 }; + std::vector targetValues = { 0.8 }; Net myNet({ inputValues.size(), 4, targetValues.size() }); - for (int i = 0; i < 20; ++i) + for (int i = 0; i < 200; ++i) { myNet.feedForward(inputValues);