First implementation of weight updates. Very slow rate of change in the output value.
parent
de06daaad3
commit
a79abb5db1
17
Layer.cpp
17
Layer.cpp
|
@ -51,6 +51,21 @@ void Layer::connectTo(const Layer & nextLayer)
|
|||
}
|
||||
}
|
||||
|
||||
void Layer::updateInputWeights(const Layer & prevLayer)
|
||||
void Layer::updateInputWeights(Layer & prevLayer)
|
||||
{
|
||||
static const double trainingRate = 0.8;
|
||||
|
||||
for (size_t currentLayerIndex = 0; currentLayerIndex < size() - 1; ++currentLayerIndex)
|
||||
{
|
||||
Neuron &targetNeuron = at(currentLayerIndex);
|
||||
|
||||
for (size_t prevLayerIndex = 0; prevLayerIndex < prevLayer.size(); ++prevLayerIndex)
|
||||
{
|
||||
Neuron &sourceNeuron = prevLayer.at(prevLayerIndex);
|
||||
|
||||
sourceNeuron.setOutputWeight(currentLayerIndex,
|
||||
sourceNeuron.getOutputWeight(currentLayerIndex) +
|
||||
sourceNeuron.getOutputValue() * targetNeuron.getGradient() * trainingRate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
2
Layer.h
2
Layer.h
|
@ -14,5 +14,5 @@ public:
|
|||
double getWeightedSum(int outputNeuron) const;
|
||||
void connectTo(const Layer & nextLayer);
|
||||
|
||||
void updateInputWeights(const Layer &prevLayer);
|
||||
void updateInputWeights(Layer &prevLayer);
|
||||
};
|
||||
|
|
23
Neuro.cpp
23
Neuro.cpp
|
@ -14,18 +14,21 @@ int main()
|
|||
|
||||
Net myNet({ inputValues.size(), 4, targetValues.size() });
|
||||
|
||||
myNet.feedForward(inputValues);
|
||||
|
||||
std::vector<double> outputValues = myNet.getOutput();
|
||||
|
||||
std::cout << "Result: ";
|
||||
for (double &value : outputValues)
|
||||
for (int i = 0; i < 20; ++i)
|
||||
{
|
||||
std::cout << value << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
myNet.feedForward(inputValues);
|
||||
|
||||
myNet.backProp(targetValues);
|
||||
std::vector<double> outputValues = myNet.getOutput();
|
||||
|
||||
std::cout << "Result: ";
|
||||
for (double &value : outputValues)
|
||||
{
|
||||
std::cout << value << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
||||
myNet.backProp(targetValues);
|
||||
}
|
||||
}
|
||||
catch (std::exception &ex)
|
||||
{
|
||||
|
|
10
Neuron.cpp
10
Neuron.cpp
|
@ -84,3 +84,13 @@ double Neuron::getGradient() const
|
|||
return gradient;
|
||||
}
|
||||
|
||||
double Neuron::getOutputWeight(size_t index) const
|
||||
{
|
||||
return outputWeights.at(index);
|
||||
}
|
||||
|
||||
void Neuron::setOutputWeight(size_t index, double value)
|
||||
{
|
||||
outputWeights.at(index) = value;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue