-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathRELU.java
More file actions
46 lines (45 loc) · 1.03 KB
/
RELU.java
File metadata and controls
46 lines (45 loc) · 1.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
/*
By Brendan C. Reidy
Created 12/10/2019
Last Modified 12/10/2019
Sigmoid Activation Function
Activates neurons with the sigmoid function 'deactivates' them with inverse derivative
*/
public class RELU implements ActivationFunction {
String name = "RELU";
public float relu(float x)
{
if(x>0)
return x;
return 0;
}
public float inverseReluDerivative(float y)
{
if (y > 0)
return 1;
if (y==0)
return (float) 1;
else
return 0;
}
public float[] activate(float[] aLayer)
{
for(int i=0; i<aLayer.length; i++)
aLayer[i] = relu(aLayer[i]);
return aLayer;
}
public float[] activationError(float[] aLayer)
{
for(int i=0; i<aLayer.length; i++)
aLayer[i] = inverseReluDerivative(aLayer[i]);
return aLayer;
}
public String getName()
{
return this.name;
}
public String toString()
{
return this.name;
}
}