Neuro/Layer.cpp

96 lines
1.9 KiB
C++
Raw Normal View History

#include "Layer.h"
#include <stdexcept>
2015-10-15 20:37:13 +00:00
Layer::Layer(size_t numNeurons)
{
for (unsigned int i = 0; i < numNeurons; ++i)
{
push_back(Neuron());
}
}
void Layer::setOutputValues(const std::vector<double> & outputValues)
{
2015-03-24 12:45:38 +00:00
if (size() - 1 != outputValues.size())
{
throw std::runtime_error("The number of output values has to match the layer size");
}
2015-03-24 12:45:38 +00:00
auto neuronIt = begin();
for (const double &value : outputValues)
{
(neuronIt++)->setOutputValue(value);
2015-10-27 14:33:10 +00:00
}
}
void Layer::feedForward(const Layer &inputLayer)
2015-10-22 14:02:27 +00:00
{
for (size_t neuronNumber = 0; neuronNumber < sizeWithoutBiasNeuron(); ++neuronNumber)
{
2015-10-22 14:02:27 +00:00
at(neuronNumber).feedForward(inputLayer.getWeightedSum(neuronNumber));
}
}
double Layer::getWeightedSum(size_t outputNeuron) const
{
double sum = 0.0;
for (const Neuron &neuron : *this)
{
sum += neuron.getWeightedOutputValue(outputNeuron);
}
2015-11-15 15:08:49 +00:00
return sum / size();
}
void Layer::connectTo(const Layer & nextLayer)
{
for (Neuron &neuron : *this)
{
neuron.createRandomOutputWeights(nextLayer.sizeWithoutBiasNeuron());
}
}
void Layer::updateInputWeights(Layer & prevLayer)
{
static const double trainingRate = 0.2;
for (size_t targetLayerIndex = 0; targetLayerIndex < sizeWithoutBiasNeuron(); ++targetLayerIndex)
{
const Neuron &targetNeuron = at(targetLayerIndex);
for (size_t sourceLayerIndex = 0; sourceLayerIndex < prevLayer.size(); ++sourceLayerIndex)
{
Neuron &sourceNeuron = prevLayer.at(sourceLayerIndex);
2015-10-18 19:20:37 +00:00
sourceNeuron.setOutputWeight(targetLayerIndex,
sourceNeuron.getOutputWeight(targetLayerIndex) +
sourceNeuron.getOutputValue() * targetNeuron.getGradient() * trainingRate);
}
}
}
2015-10-18 19:20:37 +00:00
void Layer::addBiasNeuron()
{
push_back(Neuron(1.0));
m_hasBiasNeuron = true;
}
bool Layer::hasBiasNeuron() const
{
return m_hasBiasNeuron;
2015-10-18 19:20:37 +00:00
}
size_t Layer::sizeWithoutBiasNeuron() const
{
if (m_hasBiasNeuron)
2015-10-18 19:20:37 +00:00
{
return size() - 1;
}
else
{
return size();
}
}