-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathNeuralNetwork.h
More file actions
73 lines (62 loc) · 1.82 KB
/
NeuralNetwork.h
File metadata and controls
73 lines (62 loc) · 1.82 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#ifndef NEURALNETWORK_H
#define NEURALNETWORK_H
#include <string>
#include "Layer.h"
#include "CostFunction.h"
namespace std
{
class NeuralNetwork
{
private:
Layer** layers;
Layer* inputLayer;
Layer* outputLayer;
CostFunction* costFunction;
int size;
bool didTestWarn, didTrainNotify, gpuLoaded, didLoadGPU;
// CUDA VARIABLES
float** d_inputTrainData;
float** d_inputTestData;
float** d_trainLabels;
float** d_testLabels;
float* d_outputError;
float* d_outputCost;
float* d_totalEpochCost;
// PRIVATE METHODS
void trainCPU();
void trainGPU();
public:
// PUBLIC MEMBER VARIABLES
int trainingSamples, testSamples;
float** inputTrainData;
float** inputTestData;
float** trainLabels;
float** testLabels;
float* outputError;
float* outputCost;
// CONSTRUCTORS & DESTRUCTORS
NeuralNetwork();
~NeuralNetwork();
// PUBLIC METHODS
void train();
void test(int size, int numCopies);
int test2(int size);
void addLayer(Layer* aLayer);
void setCostFunction(CostFunction* aCostFunction);
void setTrainingData(float** input, float** output, int numberSamples);
void setTestData(float** input, float** output, int numberSamples);
void setLearningRate(float aRate);
void setInput(float* anInput);
float* getOutput();
void feedForward();
void propogateBack(float* correctOutput);
void printTopology();
// CUDA METHODS
void loadGPU();
void unloadGPU();
// STATIC METHODS
static float** GenerateWeights(int dimX, int dimY);
static float* GenerateBias(int size);
};
}
#endif