2015-03-23 20:58:30 +00:00
|
|
|
#pragma once
|
|
|
|
|
|
|
|
#include "Neuron.h"
|
2015-10-16 21:23:27 +00:00
|
|
|
#include "Layer.h"
|
2015-03-23 20:58:30 +00:00
|
|
|
|
2015-03-24 12:45:38 +00:00
|
|
|
Neuron::Neuron(double value)
|
|
|
|
: outputValue(value)
|
2015-10-15 20:16:34 +00:00
|
|
|
, gradient(0)
|
2015-03-24 12:45:38 +00:00
|
|
|
{
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2015-03-23 20:58:30 +00:00
|
|
|
void Neuron::setOutputValue(double value)
|
|
|
|
{
|
|
|
|
outputValue = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
double Neuron::transferFunction(double inputValue)
|
|
|
|
{
|
|
|
|
return std::tanh(inputValue);
|
|
|
|
}
|
|
|
|
|
|
|
|
double Neuron::transferFunctionDerivative(double inputValue)
|
|
|
|
{
|
|
|
|
return 1.0 - (inputValue * inputValue);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Neuron::feedForward(double inputValue)
|
|
|
|
{
|
2015-10-15 17:18:26 +00:00
|
|
|
outputValue = transferFunction(inputValue);
|
2015-03-23 20:58:30 +00:00
|
|
|
}
|
|
|
|
|
2015-03-24 12:45:38 +00:00
|
|
|
double Neuron::getWeightedOutputValue(unsigned int outputNeuron) const
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
2015-03-24 12:45:38 +00:00
|
|
|
if (outputNeuron < outputWeights.size())
|
|
|
|
{
|
|
|
|
return outputValue * outputWeights[outputNeuron];
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0.0;
|
2015-03-23 20:58:30 +00:00
|
|
|
}
|
|
|
|
|
2015-10-15 20:37:13 +00:00
|
|
|
void Neuron::createRandomOutputWeights(size_t numberOfWeights)
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
|
|
|
outputWeights.clear();
|
|
|
|
|
2015-10-15 17:18:26 +00:00
|
|
|
for (unsigned int i = 0; i < numberOfWeights; ++i)
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
|
|
|
outputWeights.push_back(std::rand() / (double)RAND_MAX);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
double Neuron::getOutputValue() const
|
|
|
|
{
|
|
|
|
return outputValue;
|
|
|
|
}
|
2015-10-15 20:16:34 +00:00
|
|
|
|
|
|
|
void Neuron::calcOutputGradients(double targetValue)
|
|
|
|
{
|
|
|
|
double delta = targetValue - outputValue;
|
|
|
|
gradient = delta * transferFunctionDerivative(outputValue);
|
|
|
|
}
|
|
|
|
|
2015-10-16 20:59:04 +00:00
|
|
|
double Neuron::sumDOW(const Layer & nextLayer) const
|
|
|
|
{
|
|
|
|
double sum = 0;
|
|
|
|
|
2015-10-17 19:02:10 +00:00
|
|
|
for (size_t i = 0; i < outputWeights.size(); ++i)
|
2015-10-16 21:23:27 +00:00
|
|
|
{
|
|
|
|
sum += outputWeights[i] * nextLayer[i].getGradient();
|
|
|
|
}
|
2015-10-16 20:59:04 +00:00
|
|
|
|
|
|
|
return sum;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Neuron::calcHiddenGradients(const Layer &nextLayer)
|
|
|
|
{
|
|
|
|
double dow = sumDOW(nextLayer);
|
|
|
|
gradient = dow * transferFunctionDerivative(outputValue);
|
|
|
|
}
|
|
|
|
|
2015-10-16 21:23:27 +00:00
|
|
|
double Neuron::getGradient() const
|
|
|
|
{
|
|
|
|
return gradient;
|
|
|
|
}
|
|
|
|
|
2015-10-17 20:05:27 +00:00
|
|
|
double Neuron::getOutputWeight(size_t index) const
|
|
|
|
{
|
|
|
|
return outputWeights.at(index);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Neuron::setOutputWeight(size_t index, double value)
|
|
|
|
{
|
|
|
|
outputWeights.at(index) = value;
|
|
|
|
}
|
|
|
|
|