-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathNeuralNetwork.cpp
More file actions
186 lines (165 loc) · 6.35 KB
/
NeuralNetwork.cpp
File metadata and controls
186 lines (165 loc) · 6.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
// constructor of neural network class
#include "NeuralNetwork.hpp"
#include <string>
#include <fstream>
#include <iostream>
#include <sstream>
#include <vector>
NeuralNetwork::NeuralNetwork(std::vector<uint> topology, Scalar learningRate)
{
this->topology = topology;
this->learningRate = learningRate;
for (uint i = 0; i < topology.size(); i++) {
// initialize neuron layers
if (i == topology.size() - 1)
neuronLayers.push_back(new RowVector(topology[i]));
else
neuronLayers.push_back(new RowVector(topology[i] + 1));
// initialize cache and delta vectors
cacheLayers.push_back(new RowVector(neuronLayers.size()));
deltas.push_back(new RowVector(neuronLayers.size()));
// vector.back() gives the handle to recently added element
// coeffRef gives the reference of value at that place
// (using this as we are using pointers here)
if (i != topology.size() - 1) {
neuronLayers.back()->coeffRef(topology[i]) = 1.0;
cacheLayers.back()->coeffRef(topology[i]) = 1.0;
}
// initialize weights matrix
if (i > 0) {
if (i != topology.size() - 1) {
weights.push_back(new Matrix(topology[i - 1] + 1, topology[i] + 1));
weights.back()->setRandom();
weights.back()->col(topology[i]).setZero();
weights.back()->coeffRef(topology[i - 1], topology[i]) = 1.0;
}
else {
weights.push_back(new Matrix(topology[i - 1] + 1, topology[i]));
weights.back()->setRandom();
}
}
}
};
void NeuralNetwork::propagateForward(RowVector& input)
{
// set the input to input layer
// block returns a part of the given vector or matrix
// block takes 4 arguments : startRow, startCol, blockRows, blockCols
neuronLayers.front()->block(0, 0, 1, neuronLayers.front()->size() - 1) = input;
// propagate the data forawrd
for (uint i = 1; i < topology.size(); i++) {
// already explained above
(*neuronLayers[i]) = (*neuronLayers[i - 1]) * (*weights[i - 1]);
}
// apply the activation function to your network
// unaryExpr applies the given function to all elements of CURRENT_LAYER
for (uint i = 1; i < topology.size() - 1; i++) {
neuronLayers[i]->block(0, 0, 1, topology[i]).unaryExpr(std::ptr_fun(activationFunction));
}
}
void NeuralNetwork::calcErrors(RowVector& output)
{
// calculate the errors made by neurons of last layer
(*deltas.back()) = output - (*neuronLayers.back());
// error calculation of hidden layers is different
// we will begin by the last hidden layer
// and we will continue till the first hidden layer
for (uint i = topology.size() - 2; i > 0; i--) {
(*deltas[i]) = (*deltas[i + 1]) * (weights[i]->transpose());
}
}
Scalar activationFunctionDerivative(Scalar x)
{
return 1 - tanhf(x) * tanhf(x);
}
//Training
void NeuralNetwork::train(std::vector<RowVector*> input_data, std::vector<RowVector*> output_data)
{
for (uint i = 0; i < input_data.size(); i++) {
std::cout << "Input to neural network is : " << *input_data[i] << std::endl;
propagateForward(*input_data[i]);
std::cout << "Expected output is : " << *output_data[i] << std::endl;
std::cout << "Output produced is : " << *neuronLayers.back() << std::endl;
propagateBackward(*output_data[i]);
std::cout << "MSE : " << std::sqrt((*deltas.back()).dot((*deltas.back())) / deltas.back()->size()) << std::endl;
}
}
void NeuralNetwork::updateWeights()
{
// topology.size()-1 = weights.size()
for (uint i = 0; i < topology.size() - 1; i++) {
// in this loop we are iterating over the different layers (from first hidden to output layer)
// if this layer is the output layer, there is no bias neuron there, number of neurons specified = number of cols
// if this layer not the output layer, there is a bias neuron and number of neurons specified = number of cols -1
if (i != topology.size() - 2) {
for (uint c = 0; c < weights[i]->cols() - 1; c++) {
for (uint r = 0; r < weights[i]->rows(); r++) {
weights[i]->coeffRef(r, c) += learningRate * deltas[i + 1]->coeffRef(c) * activationFunctionDerivative(cacheLayers[i + 1]->coeffRef(c)) * neuronLayers[i]->coeffRef(r);
}
}
}
else {
for (uint c = 0; c < weights[i]->cols(); c++) {
for (uint r = 0; r < weights[i]->rows(); r++) {
weights[i]->coeffRef(r, c) += learningRate * deltas[i + 1]->coeffRef(c) * activationFunctionDerivative(cacheLayers[i + 1]->coeffRef(c)) * neuronLayers[i]->coeffRef(r);
}
}
}
}
}
//Back-propagation
void NeuralNetwork::propagateBackward(RowVector& output)
{
calcErrors(output);
updateWeights();
}
//Activation function
// you can use your own code here!
Scalar activationFunction(Scalar x)
{
return tanhf(x);
}
//Loading data
void ReadCSV(std::string filename, std::vector<RowVector*>& data)
{
data.clear();
std::ifstream file(filename);
std::string line, word;
// determine number of columns in file
getline(file, line, '\n');
std::stringstream ss(line);
std::vector<Scalar> parsed_vec;
while (getline(ss, word, ',')) {
parsed_vec.push_back(Scalar(std::stof(&word[0])));
}
uint cols = parsed_vec.size();
data.push_back(new RowVector(cols));
for (uint i = 0; i < cols; i++) {
data.back()->coeffRef(1, i) = parsed_vec[i];
}
// read the file
if (file.is_open()) {
while (getline(file, line, '\n')) {
std::stringstream ss(line);
data.push_back(new RowVector(1, cols));
uint i = 0;
while (getline(ss, word, ',')) {
data.back()->coeffRef(i) = Scalar(std::stof(&word[0]));
i++;
}
}
}
}
void genData(std::string filename)
{
std::ofstream file1(filename + "-in");
std::ofstream file2(filename + "-out");
for (uint r = 0; r < 1000; r++) {
Scalar x = rand() / Scalar(RAND_MAX);
Scalar y = rand() / Scalar(RAND_MAX);
file1 << x << ", " << y << std::endl;
file2 << 2 * x + 10 + y << std::endl;
}
file1.close();
file2.close();
}