Implemented dynamic learning

This commit is contained in:
Michael Mandl 2015-10-18 22:05:18 +02:00
parent 6ef1f9657c
commit 3d30346f2d
3 changed files with 18 additions and 13 deletions

View file

@ -53,7 +53,7 @@ void Layer::connectTo(const Layer & nextLayer)
void Layer::updateInputWeights(Layer & prevLayer) void Layer::updateInputWeights(Layer & prevLayer)
{ {
static const double trainingRate = 0.5; static const double trainingRate = 0.2;
for (size_t currentLayerIndex = 0; currentLayerIndex < sizeWithoutBiasNeuron(); ++currentLayerIndex) for (size_t currentLayerIndex = 0; currentLayerIndex < sizeWithoutBiasNeuron(); ++currentLayerIndex)
{ {

View file

@ -2,9 +2,9 @@
Net::Net(std::initializer_list<size_t> layerSizes) Net::Net(std::initializer_list<size_t> layerSizes)
{ {
if (layerSizes.size() < 3) if (layerSizes.size() < 2)
{ {
throw std::exception("A net needs at least 3 layers"); throw std::exception("A net needs at least 2 layers");
} }
for (size_t numNeurons : layerSizes) for (size_t numNeurons : layerSizes)

View file

@ -9,22 +9,27 @@ int main()
{ {
std::cout << "Neuro running" << std::endl; std::cout << "Neuro running" << std::endl;
std::vector<double> inputValues = { 0.1, 0.2, 0.8 }; Net myNet({ 3, 2, 1 });
std::vector<double> targetValues = { 0.8 };
Net myNet({ inputValues.size(), 4, targetValues.size() }); for (int i = 0; i < 100000; ++i)
for (int i = 0; i < 200; ++i)
{ {
std::vector<double> inputValues =
{
std::rand() / (double)RAND_MAX,
std::rand() / (double)RAND_MAX,
std::rand() / (double)RAND_MAX
};
std::vector<double> targetValues = { inputValues[2] };
myNet.feedForward(inputValues); myNet.feedForward(inputValues);
std::vector<double> outputValues = myNet.getOutput(); std::vector<double> outputValues = myNet.getOutput();
std::cout << "Result: "; double error = outputValues[0] - targetValues[0];
for (double &value : outputValues)
{ std::cout << "Error: ";
std::cout << value << " "; std::cout << std::abs(error);
}
std::cout << std::endl; std::cout << std::endl;
myNet.backProp(targetValues); myNet.backProp(targetValues);