-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathNeuralNetwork.hpp
60 lines (50 loc) · 1.64 KB
/
NeuralNetwork.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
//
// NeuralNet.hpp
// NeuralNet
//
// Created by Eshaan Arnav on 9/7/20.
// Copyright © 2020 Eshaan. All rights reserved.
//
#ifndef NeuralNetwork_hpp
#define NeuralNetwork_hpp
#include <stdio.h>
#include <vector>
#include <iostream>
#include <time.h>
#include <math.h>
using namespace std;
#endif /* NeuralNet_hpp */
class Layer {
public:
double* weights;
double* bias;
int starting_node;
int ending_node;
double e = 2.7182818;
double lr;
Layer (int starting_node, int ending_node, double lr);
vector<double> predict (vector<double> input);
vector<double> backprop(vector<double> inputs, vector<double> outputs, vector<double> target);
void free_vars ();
double sigmoid (double x);
};
class NeuralNet {
private:
Layer *layers;
int layout_size = 0;
int output_layout;
double lr;
double mean (vector<double> array);
vector<double> subtract (vector<double> arrayOne, vector<double> arrayTwo);
vector<double> square (vector<double> arrayOne);
vector<double> multiply (vector<double> arrayOne, double two);
public:
// class / vars initialization
NeuralNet (vector<int> layout, double lr);
// Forward propagate
vector<double> predict (vector<double> input);
// Performs stochastic gradient descent and applies the gradient to the weights
double backprop (vector<double> input, vector<double> expected_output);
// If you can find the gradient of whatever function you have with respect to the output of the neural network, this function will figure out the derivative of the weights and bias and apply them }
void apply_grad (vector<double> input, vector<double> grad);
};