Added a (hacky) bias neuron

This commit is contained in:
Michael Mandl 2015-03-24 13:45:38 +01:00
parent e3a804242c
commit 2f556d1b92
5 changed files with 28 additions and 12 deletions

View file

@ -10,24 +10,25 @@ Layer::Layer(unsigned int numNeurons)
void Layer::setOutputValues(const std::vector<double> & outputValues) void Layer::setOutputValues(const std::vector<double> & outputValues)
{ {
if (size() != outputValues.size()) if (size() - 1 != outputValues.size())
{ {
throw std::exception("The number of output values has to match the layer size"); throw std::exception("The number of output values has to match the layer size");
} }
auto valueIt = outputValues.begin(); auto neuronIt = begin();
for (Neuron &neuron : *this) for (const double &value : outputValues)
{ {
neuron.setOutputValue(*valueIt++); neuronIt->setOutputValue(value);
neuronIt++;
} }
} }
void Layer::feedForward(const Layer &inputLayer) void Layer::feedForward(const Layer &inputLayer)
{ {
int neuronNumber = 0; int neuronNumber = 0;
for (Neuron &neuron : *this) for (auto neuronIt = begin(); neuronIt != end(); ++neuronIt)
{ {
neuron.feedForward(inputLayer.getWeightedSum(neuronNumber)); neuronIt->feedForward(inputLayer.getWeightedSum(neuronNumber++));
} }
} }

View file

@ -17,6 +17,8 @@ Net::Net(std::initializer_list<unsigned int> layerSizes)
Layer &currentLayer = *layerIt; Layer &currentLayer = *layerIt;
const Layer &nextLayer = *(layerIt + 1); const Layer &nextLayer = *(layerIt + 1);
currentLayer.push_back(Neuron(1.0));
currentLayer.connectTo(nextLayer); currentLayer.connectTo(nextLayer);
} }
} }
@ -25,7 +27,7 @@ void Net::feedForward(const std::vector<double> &inputValues)
{ {
Layer &inputLayer = front(); Layer &inputLayer = front();
if (inputLayer.size() != inputValues.size()) if (inputLayer.size() - 1 != inputValues.size())
{ {
throw std::exception("The number of input values has to match the input layer size"); throw std::exception("The number of input values has to match the input layer size");
} }

View file

@ -9,9 +9,9 @@ int main()
{ {
std::cout << "Neuro running" << std::endl; std::cout << "Neuro running" << std::endl;
Net myNet({ 2, 3, 1 }); Net myNet({ 3, 4, 2 });
myNet.feedForward({ 1.0, 0.0 }); myNet.feedForward({ 1.0, 2.0, 3.0 });
std::vector<double> result = myNet.getResult(); std::vector<double> result = myNet.getResult();

View file

@ -2,6 +2,12 @@
#include "Neuron.h" #include "Neuron.h"
Neuron::Neuron(double value)
: outputValue(value)
{
}
void Neuron::setOutputValue(double value) void Neuron::setOutputValue(double value)
{ {
outputValue = value; outputValue = value;
@ -22,9 +28,14 @@ void Neuron::feedForward(double inputValue)
outputValue = Neuron::transferFunction(inputValue); outputValue = Neuron::transferFunction(inputValue);
} }
double Neuron::getWeightedOutputValue(int outputNeuron) const double Neuron::getWeightedOutputValue(unsigned int outputNeuron) const
{ {
if (outputNeuron < outputWeights.size())
{
return outputValue * outputWeights[outputNeuron]; return outputValue * outputWeights[outputNeuron];
}
return 0.0;
} }
void Neuron::createOutputWeights(unsigned int number) void Neuron::createOutputWeights(unsigned int number)

View file

@ -9,11 +9,13 @@ private:
std::vector<double> outputWeights; std::vector<double> outputWeights;
public: public:
Neuron(double value = 1.0);
void setOutputValue(double value); void setOutputValue(double value);
static double transferFunction(double inputValue); static double transferFunction(double inputValue);
static double transferFunctionDerivative(double inputValue); static double transferFunctionDerivative(double inputValue);
void feedForward(double inputValue); void feedForward(double inputValue);
double getWeightedOutputValue(int outputNeuron) const; double getWeightedOutputValue(unsigned int outputNeuron) const;
void createOutputWeights(unsigned int number); void createOutputWeights(unsigned int number);
double getOutputValue() const; double getOutputValue() const;
}; };