diff --git a/Layer.cpp b/Layer.cpp index a3647d5..d5484c7 100644 --- a/Layer.cpp +++ b/Layer.cpp @@ -18,8 +18,7 @@ void Layer::setOutputValues(const std::vector & outputValues) auto neuronIt = begin(); for (const double &value : outputValues) { - neuronIt->setOutputValue(value); - neuronIt++; + (neuronIt++)->setOutputValue(value); } } @@ -48,6 +47,6 @@ void Layer::connectTo(const Layer & nextLayer) { for (Neuron &neuron : *this) { - neuron.createOutputWeights(nextLayer.size()); + neuron.createRandomOutputWeights(nextLayer.size()); } } diff --git a/Net.cpp b/Net.cpp index 50dbced..7daa4eb 100644 --- a/Net.cpp +++ b/Net.cpp @@ -17,7 +17,8 @@ Net::Net(std::initializer_list layerSizes) Layer ¤tLayer = *layerIt; const Layer &nextLayer = *(layerIt + 1); - currentLayer.push_back(Neuron(1.0)); + Neuron biasNeuron(1.0); + currentLayer.push_back(biasNeuron); currentLayer.connectTo(nextLayer); } @@ -43,7 +44,7 @@ void Net::feedForward(const std::vector &inputValues) } } -std::vector Net::getResult() +std::vector Net::getOutput() { std::vector result; @@ -65,7 +66,7 @@ void Net::backProp(const std::vector &targetValues) throw std::exception("The number of target values has to match the output layer size"); } - std::vector resultValues = getResult(); + std::vector resultValues = getOutput(); double rmsError = 0.0; for (unsigned int i = 0; i < resultValues.size(); ++i) diff --git a/Net.h b/Net.h index 6b7a52b..4f7fe2f 100644 --- a/Net.h +++ b/Net.h @@ -10,6 +10,6 @@ public: Net(std::initializer_list layerSizes); void feedForward(const std::vector &inputValues); - std::vector getResult(); + std::vector getOutput(); void backProp(const std::vector &targetValues); }; \ No newline at end of file diff --git a/Neuro.cpp b/Neuro.cpp index b6205e3..7757734 100644 --- a/Neuro.cpp +++ b/Neuro.cpp @@ -9,18 +9,23 @@ int main() { std::cout << "Neuro running" << std::endl; - Net myNet({ 3, 4, 2 }); + std::vector inputValues = { 1.0, 4.0, 5.0 }; + std::vector targetValues = { 3.0 }; - myNet.feedForward({ 1.0, 2.0, 3.0 }); + Net myNet({ inputValues.size(), 4, targetValues.size() }); - std::vector result = myNet.getResult(); + myNet.feedForward(inputValues); + + std::vector outputValues = myNet.getOutput(); std::cout << "Result: "; - for (double &value : result) + for (double &value : outputValues) { std::cout << value << " "; } std::cout << std::endl; + + myNet.backProp(targetValues); } catch (std::exception &ex) { diff --git a/Neuro.vcxproj b/Neuro.vcxproj index 343f0de..9980c77 100644 --- a/Neuro.vcxproj +++ b/Neuro.vcxproj @@ -1,5 +1,5 @@  - + Debug @@ -19,13 +19,13 @@ Application true - v120 + v140 Unicode Application false - v120 + v140 true Unicode diff --git a/Neuron.cpp b/Neuron.cpp index e8be5ca..3983237 100644 --- a/Neuron.cpp +++ b/Neuron.cpp @@ -25,7 +25,7 @@ double Neuron::transferFunctionDerivative(double inputValue) void Neuron::feedForward(double inputValue) { - outputValue = Neuron::transferFunction(inputValue); + outputValue = transferFunction(inputValue); } double Neuron::getWeightedOutputValue(unsigned int outputNeuron) const @@ -38,11 +38,11 @@ double Neuron::getWeightedOutputValue(unsigned int outputNeuron) const return 0.0; } -void Neuron::createOutputWeights(unsigned int number) +void Neuron::createRandomOutputWeights(unsigned int numberOfWeights) { outputWeights.clear(); - for (unsigned int i = 0; i < number; ++i) + for (unsigned int i = 0; i < numberOfWeights; ++i) { outputWeights.push_back(std::rand() / (double)RAND_MAX); } diff --git a/Neuron.h b/Neuron.h index ffd01cd..3aecfd3 100644 --- a/Neuron.h +++ b/Neuron.h @@ -16,6 +16,6 @@ public: static double transferFunctionDerivative(double inputValue); void feedForward(double inputValue); double getWeightedOutputValue(unsigned int outputNeuron) const; - void createOutputWeights(unsigned int number); + void createRandomOutputWeights(unsigned int numberOfWeights); double getOutputValue() const; };