Backprop seems to be working, yay

This commit is contained in:
Michael Mandl 2015-10-18 21:20:37 +02:00
parent a79abb5db1
commit 6ef1f9657c
4 changed files with 32 additions and 8 deletions

View file

@ -53,9 +53,9 @@ void Layer::connectTo(const Layer & nextLayer)
void Layer::updateInputWeights(Layer & prevLayer) void Layer::updateInputWeights(Layer & prevLayer)
{ {
static const double trainingRate = 0.8; static const double trainingRate = 0.5;
for (size_t currentLayerIndex = 0; currentLayerIndex < size() - 1; ++currentLayerIndex) for (size_t currentLayerIndex = 0; currentLayerIndex < sizeWithoutBiasNeuron(); ++currentLayerIndex)
{ {
Neuron &targetNeuron = at(currentLayerIndex); Neuron &targetNeuron = at(currentLayerIndex);
@ -69,3 +69,21 @@ void Layer::updateInputWeights(Layer & prevLayer)
} }
} }
} }
void Layer::addBiasNeuron()
{
push_back(Neuron(1.0));
hasBiasNeuron = true;
}
size_t Layer::sizeWithoutBiasNeuron() const
{
if (hasBiasNeuron)
{
return size() - 1;
}
else
{
return size();
}
}

View file

@ -6,6 +6,9 @@
class Layer : public std::vector < Neuron > class Layer : public std::vector < Neuron >
{ {
private:
bool hasBiasNeuron = false;
public: public:
Layer(size_t numNeurons); Layer(size_t numNeurons);
@ -15,4 +18,8 @@ public:
void connectTo(const Layer & nextLayer); void connectTo(const Layer & nextLayer);
void updateInputWeights(Layer &prevLayer); void updateInputWeights(Layer &prevLayer);
void addBiasNeuron();
size_t sizeWithoutBiasNeuron() const;
}; };

View file

@ -17,8 +17,7 @@ Net::Net(std::initializer_list<size_t> layerSizes)
Layer &currentLayer = *layerIt; Layer &currentLayer = *layerIt;
const Layer &nextLayer = *(layerIt + 1); const Layer &nextLayer = *(layerIt + 1);
Neuron biasNeuron(1.0); currentLayer.addBiasNeuron();
currentLayer.push_back(biasNeuron);
currentLayer.connectTo(nextLayer); currentLayer.connectTo(nextLayer);
} }

View file

@ -9,12 +9,12 @@ int main()
{ {
std::cout << "Neuro running" << std::endl; std::cout << "Neuro running" << std::endl;
std::vector<double> inputValues = { 1.0, 4.0, 5.0 }; std::vector<double> inputValues = { 0.1, 0.2, 0.8 };
std::vector<double> targetValues = { 3.0 }; std::vector<double> targetValues = { 0.8 };
Net myNet({ inputValues.size(), 4, targetValues.size() }); Net myNet({ inputValues.size(), 4, targetValues.size() });
for (int i = 0; i < 20; ++i) for (int i = 0; i < 200; ++i)
{ {
myNet.feedForward(inputValues); myNet.feedForward(inputValues);