2015-03-23 20:58:30 +00:00
|
|
|
#include "Net.h"
|
|
|
|
|
2015-10-25 16:40:22 +00:00
|
|
|
#include <string>
|
|
|
|
#include <iostream>
|
|
|
|
#include <fstream>
|
|
|
|
|
2015-10-15 20:37:13 +00:00
|
|
|
Net::Net(std::initializer_list<size_t> layerSizes)
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
2015-10-18 20:05:18 +00:00
|
|
|
if (layerSizes.size() < 2)
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
2015-10-18 20:05:18 +00:00
|
|
|
throw std::exception("A net needs at least 2 layers");
|
2015-03-23 20:58:30 +00:00
|
|
|
}
|
|
|
|
|
2015-10-15 20:37:13 +00:00
|
|
|
for (size_t numNeurons : layerSizes)
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
|
|
|
push_back(Layer(numNeurons));
|
|
|
|
}
|
|
|
|
|
|
|
|
for (auto layerIt = begin(); layerIt != end() - 1; ++layerIt)
|
|
|
|
{
|
|
|
|
Layer ¤tLayer = *layerIt;
|
|
|
|
const Layer &nextLayer = *(layerIt + 1);
|
|
|
|
|
2015-10-18 19:20:37 +00:00
|
|
|
currentLayer.addBiasNeuron();
|
2015-03-24 12:45:38 +00:00
|
|
|
|
2015-03-23 20:58:30 +00:00
|
|
|
currentLayer.connectTo(nextLayer);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-10-25 16:40:22 +00:00
|
|
|
Net::Net(const std::string &filename)
|
|
|
|
{
|
|
|
|
load(filename);
|
|
|
|
}
|
|
|
|
|
2015-03-23 20:58:30 +00:00
|
|
|
void Net::feedForward(const std::vector<double> &inputValues)
|
|
|
|
{
|
|
|
|
Layer &inputLayer = front();
|
|
|
|
|
2015-03-24 12:45:38 +00:00
|
|
|
if (inputLayer.size() - 1 != inputValues.size())
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
|
|
|
throw std::exception("The number of input values has to match the input layer size");
|
|
|
|
}
|
|
|
|
|
|
|
|
inputLayer.setOutputValues(inputValues);
|
|
|
|
|
|
|
|
for (auto layerIt = begin(); layerIt != end() - 1; ++layerIt)
|
|
|
|
{
|
|
|
|
const Layer ¤tLayer = *layerIt;
|
|
|
|
Layer &nextLayer = *(layerIt + 1);
|
|
|
|
|
|
|
|
nextLayer.feedForward(currentLayer);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-10-15 17:18:26 +00:00
|
|
|
std::vector<double> Net::getOutput()
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
|
|
|
std::vector<double> result;
|
|
|
|
|
|
|
|
const Layer &outputLayer = back();
|
|
|
|
for (const Neuron &neuron : outputLayer)
|
|
|
|
{
|
|
|
|
result.push_back(neuron.getOutputValue());
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Net::backProp(const std::vector<double> &targetValues)
|
|
|
|
{
|
2015-10-15 20:16:34 +00:00
|
|
|
Layer &outputLayer = back();
|
2015-03-23 20:58:30 +00:00
|
|
|
|
|
|
|
if (targetValues.size() != outputLayer.size())
|
|
|
|
{
|
|
|
|
throw std::exception("The number of target values has to match the output layer size");
|
|
|
|
}
|
|
|
|
|
2015-10-15 17:18:26 +00:00
|
|
|
std::vector<double> resultValues = getOutput();
|
2015-10-15 20:37:13 +00:00
|
|
|
size_t numResultValues = resultValues.size();
|
2015-10-16 20:59:04 +00:00
|
|
|
|
|
|
|
// calculate rms error
|
2015-03-23 20:58:30 +00:00
|
|
|
double rmsError = 0.0;
|
2015-10-15 20:16:34 +00:00
|
|
|
|
|
|
|
for (unsigned int i = 0; i < numResultValues; ++i)
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
2015-10-22 20:09:35 +00:00
|
|
|
rmsError += std::pow(resultValues[i] - targetValues[i], 2);
|
2015-03-23 20:58:30 +00:00
|
|
|
}
|
2015-10-15 20:16:34 +00:00
|
|
|
|
2015-10-22 20:09:35 +00:00
|
|
|
rmsError = std::sqrt(rmsError / numResultValues);
|
2015-10-15 20:16:34 +00:00
|
|
|
|
2015-10-16 20:59:04 +00:00
|
|
|
// calculate output neuron gradients
|
2015-10-15 20:16:34 +00:00
|
|
|
for (unsigned int i = 0; i < numResultValues; ++i)
|
|
|
|
{
|
|
|
|
outputLayer[i].calcOutputGradients(targetValues[i]);
|
|
|
|
}
|
|
|
|
|
2015-10-16 20:59:04 +00:00
|
|
|
// calculate hidden neuron gradients
|
2015-10-17 19:02:10 +00:00
|
|
|
for (auto it = end() - 1; (it - 1) != begin(); --it)
|
2015-10-15 20:16:34 +00:00
|
|
|
{
|
2015-10-16 20:59:04 +00:00
|
|
|
Layer &hiddenLayer = *(it - 1);
|
|
|
|
Layer &nextLayer = *it;
|
2015-10-15 20:16:34 +00:00
|
|
|
|
2015-10-17 19:02:10 +00:00
|
|
|
for (Neuron &neuron : hiddenLayer)
|
2015-10-15 20:16:34 +00:00
|
|
|
{
|
2015-10-16 20:59:04 +00:00
|
|
|
neuron.calcHiddenGradients(nextLayer);
|
2015-10-15 20:16:34 +00:00
|
|
|
}
|
|
|
|
}
|
2015-10-16 20:59:04 +00:00
|
|
|
|
|
|
|
// update the input weights
|
|
|
|
for (auto it = end() - 1; it != begin(); --it)
|
|
|
|
{
|
|
|
|
Layer ¤tLayer = *it;
|
|
|
|
Layer &prevLayer = *(it - 1);
|
|
|
|
|
|
|
|
currentLayer.updateInputWeights(prevLayer);
|
2015-10-25 16:40:22 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Net::save(const std::string &filename)
|
|
|
|
{
|
|
|
|
std::ofstream outFile;
|
|
|
|
outFile.open(filename);
|
|
|
|
if (!outFile.is_open())
|
|
|
|
{
|
|
|
|
throw std::exception("unable to open output file");
|
|
|
|
}
|
|
|
|
|
|
|
|
outFile << size() << std::endl;
|
|
|
|
for (const Layer &layer : *this)
|
|
|
|
{
|
|
|
|
outFile << layer.size() << std::endl;
|
|
|
|
outFile << layer.hasBiasNeuron() << std::endl;
|
|
|
|
|
|
|
|
for (const Neuron &neuron : layer)
|
|
|
|
{
|
|
|
|
size_t numOutputWeights = neuron.getNumOutputWeights();
|
|
|
|
outFile << numOutputWeights << std::endl;
|
|
|
|
|
|
|
|
for (size_t outputWeightIndex = 0; outputWeightIndex < numOutputWeights; ++outputWeightIndex)
|
|
|
|
{
|
|
|
|
outFile << neuron.getOutputWeight(outputWeightIndex) << std::endl;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
outFile.close();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Net::load(const std::string &filename)
|
|
|
|
{
|
|
|
|
std::ifstream inFile;
|
|
|
|
inFile.open(filename, std::ios::binary);
|
|
|
|
if (!inFile.is_open())
|
|
|
|
{
|
|
|
|
throw std::exception("unable to open input file");
|
|
|
|
}
|
|
|
|
|
|
|
|
clear();
|
|
|
|
|
|
|
|
std::string buffer;
|
|
|
|
|
|
|
|
getline(inFile, buffer);
|
|
|
|
size_t numLayers = std::stol(buffer);
|
|
|
|
|
|
|
|
for (size_t layerIndex = 0; layerIndex < numLayers; ++layerIndex)
|
|
|
|
{
|
|
|
|
|
|
|
|
getline(inFile, buffer);
|
|
|
|
size_t numNeurons = std::stol(buffer);
|
|
|
|
|
|
|
|
getline(inFile, buffer);
|
|
|
|
bool hasBiasNeuron = std::stol(buffer) != 0;
|
|
|
|
|
|
|
|
size_t numNeuronsWithoutBiasNeuron = hasBiasNeuron ? numNeurons - 1 : numNeurons;
|
|
|
|
|
|
|
|
Layer newLayer(numNeuronsWithoutBiasNeuron);
|
|
|
|
if (hasBiasNeuron)
|
|
|
|
{
|
|
|
|
newLayer.addBiasNeuron();
|
|
|
|
}
|
|
|
|
|
|
|
|
for (size_t neuronIndex = 0; neuronIndex < numNeurons; ++neuronIndex)
|
|
|
|
{
|
|
|
|
getline(inFile, buffer);
|
|
|
|
size_t numWeights = std::stol(buffer);
|
|
|
|
|
|
|
|
std::list<double> outputWeights;
|
|
|
|
for (size_t weightIndex = 0; weightIndex < numWeights; ++weightIndex)
|
|
|
|
{
|
|
|
|
getline(inFile, buffer);
|
|
|
|
outputWeights.push_back(std::stod(buffer));
|
|
|
|
}
|
|
|
|
|
|
|
|
newLayer.at(neuronIndex).createOutputWeights(outputWeights);
|
|
|
|
}
|
|
|
|
|
|
|
|
push_back(newLayer);
|
|
|
|
}
|
|
|
|
|
|
|
|
inFile.close();
|
2015-03-23 20:58:30 +00:00
|
|
|
}
|