diff --git a/Layer.cpp b/Layer.cpp index 74846d8..a38ba24 100644 --- a/Layer.cpp +++ b/Layer.cpp @@ -53,7 +53,7 @@ void Layer::connectTo(const Layer & nextLayer) void Layer::updateInputWeights(Layer & prevLayer) { - static const double trainingRate = 0.5; + static const double trainingRate = 0.2; for (size_t currentLayerIndex = 0; currentLayerIndex < sizeWithoutBiasNeuron(); ++currentLayerIndex) { diff --git a/Net.cpp b/Net.cpp index 64574bc..4e6d68c 100644 --- a/Net.cpp +++ b/Net.cpp @@ -2,9 +2,9 @@ Net::Net(std::initializer_list layerSizes) { - if (layerSizes.size() < 3) + if (layerSizes.size() < 2) { - throw std::exception("A net needs at least 3 layers"); + throw std::exception("A net needs at least 2 layers"); } for (size_t numNeurons : layerSizes) diff --git a/Neuro.cpp b/Neuro.cpp index cbd3315..513e4f0 100644 --- a/Neuro.cpp +++ b/Neuro.cpp @@ -9,22 +9,27 @@ int main() { std::cout << "Neuro running" << std::endl; - std::vector inputValues = { 0.1, 0.2, 0.8 }; - std::vector targetValues = { 0.8 }; + Net myNet({ 3, 2, 1 }); - Net myNet({ inputValues.size(), 4, targetValues.size() }); - - for (int i = 0; i < 200; ++i) + for (int i = 0; i < 100000; ++i) { + std::vector inputValues = + { + std::rand() / (double)RAND_MAX, + std::rand() / (double)RAND_MAX, + std::rand() / (double)RAND_MAX + }; + + std::vector targetValues = { inputValues[2] }; + myNet.feedForward(inputValues); std::vector outputValues = myNet.getOutput(); - std::cout << "Result: "; - for (double &value : outputValues) - { - std::cout << value << " "; - } + double error = outputValues[0] - targetValues[0]; + + std::cout << "Error: "; + std::cout << std::abs(error); std::cout << std::endl; myNet.backProp(targetValues);