diff --git a/Layer.cpp b/Layer.cpp index 89eef7d..f4b7319 100644 --- a/Layer.cpp +++ b/Layer.cpp @@ -24,14 +24,6 @@ void Layer::setOutputValues(const std::vector & outputValues) } } -void Layer::setOutputValues(const double *outputValues) -{ - for (size_t neuronIndex = 0; neuronIndex < size(); ++neuronIndex) - { - at(neuronIndex).setOutputValue(outputValues[neuronIndex]); - } -} - void Layer::feedForward(const Layer &inputLayer) { for (size_t neuronNumber = 0; neuronNumber < sizeWithoutBiasNeuron(); ++neuronNumber) diff --git a/Layer.h b/Layer.h index 595cb26..bed1b66 100644 --- a/Layer.h +++ b/Layer.h @@ -13,11 +13,10 @@ public: Layer(size_t numNeurons); void setOutputValues(const std::vector & outputValues); - void setOutputValues(const double *outputValues); void feedForward(const Layer &inputLayer); double getWeightedSum(size_t outputNeuron) const; - void connectTo(const Layer & nextLayer); + void connectTo(const Layer &nextLayer); void updateInputWeights(Layer &prevLayer); diff --git a/Net.cpp b/Net.cpp index fbccaa5..4543321 100644 --- a/Net.cpp +++ b/Net.cpp @@ -66,21 +66,6 @@ void Net::feedForward(const std::vector &inputValues) } } -void Net::feedForward(const double *inputValues) -{ - Layer &inputLayer = front(); - - inputLayer.setOutputValues(inputValues); - - for (auto layerIt = begin(); layerIt != end() - 1; ++layerIt) - { - const Layer ¤tLayer = *layerIt; - Layer &nextLayer = *(layerIt + 1); - - nextLayer.feedForward(currentLayer); - } -} - std::vector Net::getOutput() { std::vector result; diff --git a/Net.h b/Net.h index 1ae0c85..41a80c0 100644 --- a/Net.h +++ b/Net.h @@ -15,7 +15,6 @@ public: void initialize(std::initializer_list layerSizes); void feedForward(const std::vector &inputValues); - void feedForward(const double *inputValues); std::vector getOutput(); void backProp(const std::vector &targetValues);