From 99ef63e01966dd1de63ccc726a1712c164a8c0e1 Mon Sep 17 00:00:00 2001 From: Michael Mandl Date: Sun, 25 Oct 2015 17:40:22 +0100 Subject: [PATCH] Added simple (de-)serialization of (trained) nets --- Layer.cpp | 11 +++-- Layer.h | 3 +- Net.cpp | 95 +++++++++++++++++++++++++++++++++++++- Net.h | 6 ++- Neuro.cpp | 5 +- Neuron.cpp | 7 ++- Neuron.h | 1 + gui/NeuroUI/netlearner.cpp | 4 +- gui/NeuroUI/neuroui.cpp | 2 + mynet.nnet | 28 +++++++++++ 10 files changed, 153 insertions(+), 9 deletions(-) create mode 100644 mynet.nnet diff --git a/Layer.cpp b/Layer.cpp index da1b4ca..28fb630 100644 --- a/Layer.cpp +++ b/Layer.cpp @@ -46,7 +46,7 @@ void Layer::connectTo(const Layer & nextLayer) { for (Neuron &neuron : *this) { - neuron.createOutputWeights(nextLayer.sizeWithoutBiasNeuron(), 1.0); + neuron.createRandomOutputWeights(nextLayer.sizeWithoutBiasNeuron()); } } @@ -72,12 +72,17 @@ void Layer::updateInputWeights(Layer & prevLayer) void Layer::addBiasNeuron() { push_back(Neuron(1.0)); - hasBiasNeuron = true; + m_hasBiasNeuron = true; +} + +bool Layer::hasBiasNeuron() const +{ + return m_hasBiasNeuron; } size_t Layer::sizeWithoutBiasNeuron() const { - if (hasBiasNeuron) + if (m_hasBiasNeuron) { return size() - 1; } diff --git a/Layer.h b/Layer.h index 76c2078..1290a58 100644 --- a/Layer.h +++ b/Layer.h @@ -7,7 +7,7 @@ class Layer : public std::vector < Neuron > { private: - bool hasBiasNeuron = false; + bool m_hasBiasNeuron = false; public: Layer(size_t numNeurons); @@ -21,5 +21,6 @@ public: void addBiasNeuron(); + bool hasBiasNeuron() const; size_t sizeWithoutBiasNeuron() const; }; diff --git a/Net.cpp b/Net.cpp index 50ea0cc..c1447a6 100644 --- a/Net.cpp +++ b/Net.cpp @@ -1,5 +1,9 @@ #include "Net.h" +#include +#include +#include + Net::Net(std::initializer_list layerSizes) { if (layerSizes.size() < 2) @@ -23,6 +27,11 @@ Net::Net(std::initializer_list layerSizes) } } +Net::Net(const std::string &filename) +{ + load(filename); +} + void Net::feedForward(const std::vector &inputValues) { Layer &inputLayer = front(); @@ -103,5 +112,89 @@ void Net::backProp(const std::vector &targetValues) Layer &prevLayer = *(it - 1); currentLayer.updateInputWeights(prevLayer); - } + } +} + +void Net::save(const std::string &filename) +{ + std::ofstream outFile; + outFile.open(filename); + if (!outFile.is_open()) + { + throw std::exception("unable to open output file"); + } + + outFile << size() << std::endl; + for (const Layer &layer : *this) + { + outFile << layer.size() << std::endl; + outFile << layer.hasBiasNeuron() << std::endl; + + for (const Neuron &neuron : layer) + { + size_t numOutputWeights = neuron.getNumOutputWeights(); + outFile << numOutputWeights << std::endl; + + for (size_t outputWeightIndex = 0; outputWeightIndex < numOutputWeights; ++outputWeightIndex) + { + outFile << neuron.getOutputWeight(outputWeightIndex) << std::endl; + } + } + } + + outFile.close(); +} + +void Net::load(const std::string &filename) +{ + std::ifstream inFile; + inFile.open(filename, std::ios::binary); + if (!inFile.is_open()) + { + throw std::exception("unable to open input file"); + } + + clear(); + + std::string buffer; + + getline(inFile, buffer); + size_t numLayers = std::stol(buffer); + + for (size_t layerIndex = 0; layerIndex < numLayers; ++layerIndex) + { + + getline(inFile, buffer); + size_t numNeurons = std::stol(buffer); + + getline(inFile, buffer); + bool hasBiasNeuron = std::stol(buffer) != 0; + + size_t numNeuronsWithoutBiasNeuron = hasBiasNeuron ? numNeurons - 1 : numNeurons; + + Layer newLayer(numNeuronsWithoutBiasNeuron); + if (hasBiasNeuron) + { + newLayer.addBiasNeuron(); + } + + for (size_t neuronIndex = 0; neuronIndex < numNeurons; ++neuronIndex) + { + getline(inFile, buffer); + size_t numWeights = std::stol(buffer); + + std::list outputWeights; + for (size_t weightIndex = 0; weightIndex < numWeights; ++weightIndex) + { + getline(inFile, buffer); + outputWeights.push_back(std::stod(buffer)); + } + + newLayer.at(neuronIndex).createOutputWeights(outputWeights); + } + + push_back(newLayer); + } + + inFile.close(); } diff --git a/Net.h b/Net.h index 394beae..d85b5f8 100644 --- a/Net.h +++ b/Net.h @@ -8,8 +8,12 @@ class Net : public std::vector < Layer > { public: Net(std::initializer_list layerSizes); + Net(const std::string &filename); void feedForward(const std::vector &inputValues); std::vector getOutput(); void backProp(const std::vector &targetValues); -}; \ No newline at end of file + + void save(const std::string &filename); + void load(const std::string &filename); +}; diff --git a/Neuro.cpp b/Neuro.cpp index 9a49988..dc175b2 100644 --- a/Neuro.cpp +++ b/Neuro.cpp @@ -21,7 +21,7 @@ int main() double batchMaxError = 0.0; double batchMeanError = 0.0; - size_t numIterations = 1000000; + size_t numIterations = 100000; for (size_t iteration = 0; iteration < numIterations; ++iteration) { std::vector inputValues = @@ -59,6 +59,9 @@ int main() myNet.backProp(targetValues); } + + myNet.save("mynet.nnet"); + Net copyNet("mynet.nnet"); } catch (std::exception &ex) { diff --git a/Neuron.cpp b/Neuron.cpp index 2e2cff8..aa180bf 100644 --- a/Neuron.cpp +++ b/Neuron.cpp @@ -111,6 +111,11 @@ double Neuron::getOutputWeight(size_t index) const void Neuron::setOutputWeight(size_t index, double value) { - outputWeights.at(index) = value; + outputWeights.at(index) = value; +} + +size_t Neuron::getNumOutputWeights() const +{ + return outputWeights.size(); } diff --git a/Neuron.h b/Neuron.h index b5e13c2..8dcb5d7 100644 --- a/Neuron.h +++ b/Neuron.h @@ -32,6 +32,7 @@ public: double getOutputWeight(size_t index) const; void setOutputWeight(size_t index, double value); + size_t getNumOutputWeights() const; private: static double transferFunction(double inputValue); diff --git a/gui/NeuroUI/netlearner.cpp b/gui/NeuroUI/netlearner.cpp index c4a760d..884f0af 100644 --- a/gui/NeuroUI/netlearner.cpp +++ b/gui/NeuroUI/netlearner.cpp @@ -12,7 +12,7 @@ void NetLearner::run() double batchMaxError = 0.0; double batchMeanError = 0.0; - size_t numIterations = 1000000; + size_t numIterations = 100000; for (size_t iteration = 0; iteration < numIterations; ++iteration) { std::vector inputValues = @@ -57,6 +57,8 @@ void NetLearner::run() emit progress((double)iteration / (double)numIterations); } + + myNet.save("mynet.nnet"); } catch (std::exception &ex) { diff --git a/gui/NeuroUI/neuroui.cpp b/gui/NeuroUI/neuroui.cpp index 84b658a..75452b3 100644 --- a/gui/NeuroUI/neuroui.cpp +++ b/gui/NeuroUI/neuroui.cpp @@ -6,6 +6,8 @@ NeuroUI::NeuroUI(QWidget *parent) : ui(new Ui::NeuroUI) { ui->setupUi(this); + + ui->logView->addItem("Ready."); } NeuroUI::~NeuroUI() diff --git a/mynet.nnet b/mynet.nnet new file mode 100644 index 0000000..e1458bb --- /dev/null +++ b/mynet.nnet @@ -0,0 +1,28 @@ +3 +3 +1 +3 +1.04423 +0.628599 +0.480053 +3 +1.049 +0.69511 +0.462104 +3 +-2.3429 +0.830251 +0.596034 +4 +1 +1 +1.61567 +1 +0.42416 +1 +1.03857 +1 +0.732838 +1 +0 +0