@@ -393,58 +393,17 @@ void neuralNetwork<T>::getJSONDescription(Json::Value& jsonModelDescription)
393393template <typename T>
394394T neuralNetwork<T>::run(const std::vector<T>& inputVector) const
395395{
396- std::vector<T> localInputLayer (numInputs);
397- localInputLayer.push_back (1.0 ); // layer bias
398- std::vector<std::vector<T> > localHiddenLayers (numHiddenLayers, std::vector<T>(numHiddenNodes + 1 ));
396+ std::vector<T> localInputLayer { inputLayer };
397+ std::vector<std::vector<T> > localHiddenLayers { hiddenLayers };
399398 T localOutputNeuron {};
400399
401- std::vector<T> pattern;
402- for (size_t h {}; h < numInputs; h++)
403- {
404- pattern.push_back (inputVector[whichInputs[h]]);
405- }
406-
407- // set input layer
408- for (size_t i {}; i < numInputs; ++i)
409- {
410- localInputLayer[i] = (pattern[i] - inBases[i]) / inRanges[i];
411- }
400+ runInternal (inputVector, localInputLayer, localHiddenLayers, localOutputNeuron);
412401
413- // calculate hidden layers
414- for (size_t layerNum {}; auto & layer : localHiddenLayers)
415- {
416- for (size_t j {}; j < numHiddenNodes; ++j)
417- {
418- layer[j] = 0 ;
419-
420- const auto & previousLayer { layerNum == 0 ? localInputLayer : localHiddenLayers[layerNum - 1 ] };
421-
422- for (size_t k {}; auto & input : previousLayer)
423- {
424- layer[j] += input * weights[layerNum][j][k];
425- ++k;
426- }
427-
428- layer[j] = activationFunction (layer[j]);
429- }
430-
431- layer.back () = 1.0 ; // for bias weight
432- ++layerNum;
433- }
434-
435- // calculate output
436- for (size_t i {}; auto & hiddenNeuron : localHiddenLayers.back ())
437- {
438- localOutputNeuron += hiddenNeuron * wHiddenOutput[i];
439- ++i;
440- }
441-
442- // if classifier, outputNeuron = activationFunction(outputNeuron), else...
443- return (localOutputNeuron * outRange) + outBase;
402+ return localOutputNeuron;
444403}
445404
446405template <typename T>
447- T neuralNetwork<T>::runForTraining (const std::vector<T>& inputVector)
406+ void neuralNetwork<T>::runInternal (const std::vector<T>& inputVector, std::vector<T>& inputLayer, std::vector<std::vector<T>>& hiddenLayers, T& outputNeuron) const
448407{
449408 std::vector<T> pattern;
450409 for (size_t h {}; h < numInputs; ++h)
@@ -490,7 +449,6 @@ T neuralNetwork<T>::runForTraining(const std::vector<T>& inputVector)
490449
491450 // if classifier, outputNeuron = activationFunction(outputNeuron), else...
492451 outputNeuron = (outputNeuron * outRange) + outBase;
493- return outputNeuron;
494452}
495453
496454template <typename T>
@@ -544,7 +502,7 @@ void neuralNetwork<T>::train(const std::vector<trainingExampleTemplate<T > >& tr
544502 // run through every training instance
545503 for (auto trainingExample : trainingSet)
546504 {
547- runForTraining (trainingExample.input );
505+ runInternal (trainingExample.input , inputLayer, hiddenLayers, outputNeuron );
548506 backpropagate (trainingExample.output [whichOutput]);
549507 }
550508 }
0 commit comments