-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathFullyConnected.java
More file actions
134 lines (128 loc) · 5.63 KB
/
FullyConnected.java
File metadata and controls
134 lines (128 loc) · 5.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
/*
By Brendan C. Reidy
Created 12/10/2019
Last Modified 4/22/2020
Fully Connected Layer Object:
Responsible for handling mathematical operations involved with a single fully connected layer
*/
public class FullyConnected implements Layer {
public String name = "Fully Connected"; // Name of the layer
private ActivationFunction activationFunction; // Activation function of the array
private Layer previousLayer; // Pointer to the previous layer with respect to this
private int length; // Length of the layer
private float learningRate; // Learning rate of the layer
public float[][] weights; // Weights in the layer
public float[] bias; // Bias in the layer
private float[] neurons; // Neurons in the layer
private float[][] bestWeights; // Best weights in the layer
private float[] bestBias; // Best bias in the layer
public FullyConnected(int aLength, String activationFunctionName) // Given a length and the name of an activation function, creates layer
{
this.activationFunction = ActivationFunctions.getByName(activationFunctionName); // Find activation function object from string
this.length = aLength; // Sets length to specified length
this.neurons = new float[aLength]; // Creates an array for the neurons
this.weights = null; // Weights are null until previous layer is set (handled in back end by NeuralNetwork.java)
this.bias = NeuralNetwork.GenerateBias2D(aLength); // Sets the bias
this.learningRate = 0.1f; // Sets learning rate
}
public FullyConnected(float[][] aWeights, float[] aBias, String activationFunctionName) // Generate layer with pre-trained weights and bias
{
this.activationFunction = ActivationFunctions.getByName(activationFunctionName);
this.length = aBias.length;
this.neurons = new float[aBias.length];
this.weights = aWeights;
this.bias = aBias;
this.learningRate = 0.1f;
}
public void setPreviousLayer(Layer aLayer) // Set 'pointer' to previous layer in network
{
if(aLayer==null) // Make sure layer is valid
{
System.out.println("[FATAL] Error in setting previous layer: previous is null");
return;
}
this.previousLayer = aLayer; // Set previous layer to the given layer
if(this.weights==null)
this.weights = NeuralNetwork.GenerateWeights2D(previousLayer.size(), this.length); // Generate weights
this.bestWeights = this.weights; // Initialize best weight and bias
this.bestBias = this.bias; // Initialize best weight and bias
}
public void setNeurons(float[] aNeurons)
{
this.neurons = aNeurons;
} // Set neurons (generally used for input layer)
public float[] getNeurons()
{
return this.neurons;
} // Get neurons (generally used for output layer)
public float[] feedForward(float[] aLayer){ // Feed forward given input neurons 'aLayer'
this.neurons = MatrixMath.dotProduct(aLayer, this.weights); // Perform a dot product
this.neurons = MatrixMath.sum(this.neurons, this.bias); // Add bias
this.neurons = activationFunction.activate(this.neurons); // Activate
return this.neurons;
}
public float[][] getWeights()
{
return this.weights;
}
public float[] propagateBack(float[] currentError) // Propagate back layer given the current error for the network
{
float[] previousError = new float[previousLayer.size()]; // Create a new array for previous error (cumulative error for all layers)
float[] activationError = activationFunction.activationError(this.neurons); // get error with respect to activation function for current layer
for(int i=0; i<this.length; i++)
{
float neuronError = activationError[i] * currentError[i]; // Error with respect to neuron
for(int j=0; j<previousLayer.size(); j++) // Iterate through the previous neurons
{
previousError[j] += weights[i][j] * neuronError; // Calculate the error for the previous layer (n-1)
weights[i][j] += neuronError * previousLayer.getNeurons()[j] * learningRate; // Adjust weights
if(weights[i][j]>10)
weights[i][j]=10;
else if(weights[i][j]<-10)
weights[i][j]=-10;
}
bias[i]+=neuronError*learningRate; // Adjust bias
}
return previousError;
}
public void setBest()
{
this.bestWeights = this.weights;
this.bestBias = this.bias;
}
public void setLearningRate(float aLearningRate)
{
this.learningRate = aLearningRate;
} // Sets learning rate for network
public int size() // Returns total size of the network
{
return sizeX()*sizeY();
}
public int sizeX() // Returns the sizeX of the network
{
return this.length;
}
public int sizeY() // Returns the sizeY of the network
{
return 1;
}
public int sizeZ()
{
return 1;
}
public String toString() // Returns the important properties of the layer
{
return this.name + ":" +
"\n\t Activation Function: " + this.activationFunction.toString() +
"\n\t Size: " + this.length +
"\n\t Learning Rate: " + this.learningRate;
}
public void saveWeightsToFile(String aFileName) // Saves weights
{
MatrixIO.saveToFile(MatrixIO.flattenArray(this.bestWeights), aFileName);
}
public void saveBiasToFile(String aFileName) // Save bias
{
MatrixIO.saveToFile(this.bestBias, aFileName);
}
}