Neuro/Layer.cpp

54 lines
1003 B
C++
Raw Normal View History

#include "Layer.h"
Layer::Layer(unsigned int numNeurons)
{
for (unsigned int i = 0; i < numNeurons; ++i)
{
push_back(Neuron());
}
}
void Layer::setOutputValues(const std::vector<double> & outputValues)
{
2015-03-24 12:45:38 +00:00
if (size() - 1 != outputValues.size())
{
throw std::exception("The number of output values has to match the layer size");
}
2015-03-24 12:45:38 +00:00
auto neuronIt = begin();
for (const double &value : outputValues)
{
2015-03-24 12:45:38 +00:00
neuronIt->setOutputValue(value);
neuronIt++;
}
}
void Layer::feedForward(const Layer &inputLayer)
{
int neuronNumber = 0;
2015-03-24 12:45:38 +00:00
for (auto neuronIt = begin(); neuronIt != end(); ++neuronIt)
{
2015-03-24 12:45:38 +00:00
neuronIt->feedForward(inputLayer.getWeightedSum(neuronNumber++));
}
}
double Layer::getWeightedSum(int outputNeuron) const
{
double sum = 0.0;
for (const Neuron &neuron : *this)
{
sum += neuron.getWeightedOutputValue(outputNeuron);
}
return sum;
}
void Layer::connectTo(const Layer & nextLayer)
{
for (Neuron &neuron : *this)
{
neuron.createOutputWeights(nextLayer.size());
}
}