Fixed gradient computation
parent
26a51ce476
commit
de06daaad3
4
Net.cpp
4
Net.cpp
|
@ -87,12 +87,12 @@ void Net::backProp(const std::vector<double> &targetValues)
|
|||
}
|
||||
|
||||
// calculate hidden neuron gradients
|
||||
for (auto it = end() - 1; it != begin(); --it)
|
||||
for (auto it = end() - 1; (it - 1) != begin(); --it)
|
||||
{
|
||||
Layer &hiddenLayer = *(it - 1);
|
||||
Layer &nextLayer = *it;
|
||||
|
||||
for (auto neuron : hiddenLayer)
|
||||
for (Neuron &neuron : hiddenLayer)
|
||||
{
|
||||
neuron.calcHiddenGradients(nextLayer);
|
||||
}
|
||||
|
|
|
@ -65,7 +65,7 @@ double Neuron::sumDOW(const Layer & nextLayer) const
|
|||
{
|
||||
double sum = 0;
|
||||
|
||||
for (size_t i = 0; i < nextLayer.size() - 1; ++i)
|
||||
for (size_t i = 0; i < outputWeights.size(); ++i)
|
||||
{
|
||||
sum += outputWeights[i] * nextLayer[i].getGradient();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue