Added a (hacky) bias neuron

main
mandlm 2015-03-24 13:45:38 +01:00
parent e3a804242c
commit 2f556d1b92
5 changed files with 28 additions and 12 deletions

View File

@ -10,24 +10,25 @@ Layer::Layer(unsigned int numNeurons)
void Layer::setOutputValues(const std::vector<double> & outputValues)
{
if (size() != outputValues.size())
if (size() - 1 != outputValues.size())
{
throw std::exception("The number of output values has to match the layer size");
}
auto valueIt = outputValues.begin();
for (Neuron &neuron : *this)
auto neuronIt = begin();
for (const double &value : outputValues)
{
neuron.setOutputValue(*valueIt++);
neuronIt->setOutputValue(value);
neuronIt++;
}
}
void Layer::feedForward(const Layer &inputLayer)
{
int neuronNumber = 0;
for (Neuron &neuron : *this)
for (auto neuronIt = begin(); neuronIt != end(); ++neuronIt)
{
neuron.feedForward(inputLayer.getWeightedSum(neuronNumber));
neuronIt->feedForward(inputLayer.getWeightedSum(neuronNumber++));
}
}

View File

@ -17,6 +17,8 @@ Net::Net(std::initializer_list<unsigned int> layerSizes)
Layer &currentLayer = *layerIt;
const Layer &nextLayer = *(layerIt + 1);
currentLayer.push_back(Neuron(1.0));
currentLayer.connectTo(nextLayer);
}
}
@ -25,7 +27,7 @@ void Net::feedForward(const std::vector<double> &inputValues)
{
Layer &inputLayer = front();
if (inputLayer.size() != inputValues.size())
if (inputLayer.size() - 1 != inputValues.size())
{
throw std::exception("The number of input values has to match the input layer size");
}

View File

@ -9,9 +9,9 @@ int main()
{
std::cout << "Neuro running" << std::endl;
Net myNet({ 2, 3, 1 });
Net myNet({ 3, 4, 2 });
myNet.feedForward({ 1.0, 0.0 });
myNet.feedForward({ 1.0, 2.0, 3.0 });
std::vector<double> result = myNet.getResult();

View File

@ -2,6 +2,12 @@
#include "Neuron.h"
Neuron::Neuron(double value)
: outputValue(value)
{
}
void Neuron::setOutputValue(double value)
{
outputValue = value;
@ -22,9 +28,14 @@ void Neuron::feedForward(double inputValue)
outputValue = Neuron::transferFunction(inputValue);
}
double Neuron::getWeightedOutputValue(int outputNeuron) const
double Neuron::getWeightedOutputValue(unsigned int outputNeuron) const
{
return outputValue * outputWeights[outputNeuron];
if (outputNeuron < outputWeights.size())
{
return outputValue * outputWeights[outputNeuron];
}
return 0.0;
}
void Neuron::createOutputWeights(unsigned int number)

View File

@ -9,11 +9,13 @@ private:
std::vector<double> outputWeights;
public:
Neuron(double value = 1.0);
void setOutputValue(double value);
static double transferFunction(double inputValue);
static double transferFunctionDerivative(double inputValue);
void feedForward(double inputValue);
double getWeightedOutputValue(int outputNeuron) const;
double getWeightedOutputValue(unsigned int outputNeuron) const;
void createOutputWeights(unsigned int number);
double getOutputValue() const;
};