diff --git a/Layer.cpp b/Layer.cpp index 466dd84..c68c135 100644 --- a/Layer.cpp +++ b/Layer.cpp @@ -51,6 +51,21 @@ void Layer::connectTo(const Layer & nextLayer) } } -void Layer::updateInputWeights(const Layer & prevLayer) +void Layer::updateInputWeights(Layer & prevLayer) { + static const double trainingRate = 0.8; + + for (size_t currentLayerIndex = 0; currentLayerIndex < size() - 1; ++currentLayerIndex) + { + Neuron &targetNeuron = at(currentLayerIndex); + + for (size_t prevLayerIndex = 0; prevLayerIndex < prevLayer.size(); ++prevLayerIndex) + { + Neuron &sourceNeuron = prevLayer.at(prevLayerIndex); + + sourceNeuron.setOutputWeight(currentLayerIndex, + sourceNeuron.getOutputWeight(currentLayerIndex) + + sourceNeuron.getOutputValue() * targetNeuron.getGradient() * trainingRate); + } + } } diff --git a/Layer.h b/Layer.h index 8251a20..a3f118f 100644 --- a/Layer.h +++ b/Layer.h @@ -14,5 +14,5 @@ public: double getWeightedSum(int outputNeuron) const; void connectTo(const Layer & nextLayer); - void updateInputWeights(const Layer &prevLayer); + void updateInputWeights(Layer &prevLayer); }; diff --git a/Neuro.cpp b/Neuro.cpp index 7757734..7803bc8 100644 --- a/Neuro.cpp +++ b/Neuro.cpp @@ -14,18 +14,21 @@ int main() Net myNet({ inputValues.size(), 4, targetValues.size() }); - myNet.feedForward(inputValues); - - std::vector outputValues = myNet.getOutput(); - - std::cout << "Result: "; - for (double &value : outputValues) + for (int i = 0; i < 20; ++i) { - std::cout << value << " "; - } - std::cout << std::endl; + myNet.feedForward(inputValues); - myNet.backProp(targetValues); + std::vector outputValues = myNet.getOutput(); + + std::cout << "Result: "; + for (double &value : outputValues) + { + std::cout << value << " "; + } + std::cout << std::endl; + + myNet.backProp(targetValues); + } } catch (std::exception &ex) { diff --git a/Neuron.cpp b/Neuron.cpp index e57782e..7083f99 100644 --- a/Neuron.cpp +++ b/Neuron.cpp @@ -84,3 +84,13 @@ double Neuron::getGradient() const return gradient; } +double Neuron::getOutputWeight(size_t index) const +{ + return outputWeights.at(index); +} + +void Neuron::setOutputWeight(size_t index, double value) +{ + outputWeights.at(index) = value; +} + diff --git a/Neuron.h b/Neuron.h index 862c17d..c0356d3 100644 --- a/Neuron.h +++ b/Neuron.h @@ -25,6 +25,9 @@ public: double getGradient() const; + double getOutputWeight(size_t index) const; + void setOutputWeight(size_t index, double value); + private: static double transferFunction(double inputValue); static double transferFunctionDerivative(double inputValue);