-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathNeuralNetwork.cpp
87 lines (73 loc) · 3.15 KB
/
NeuralNetwork.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
#include "NeuralNetwork.h"
#include <ctime>
NeuralNetwork::NeuralNetwork(std::vector<uint32_t> layers_lengths, func_ptr activations, func_ptr activations_derivatives)
: activations(activations), activations_derivatives(activations_derivatives)
{
if (layers_lengths.size() < 2)
throw new std::exception("The NN must has at least two layers for inputs and outputs");
for (uint32_t length : layers_lengths)
if (length)
neurons.push_back(std::vector<double>(length));
else
throw new std::exception("Each layer must have at least one neuron");
weights.resize(layers_lengths.size() - 1);
srand(std::time(0));
for (size_t i = 0; i < layers_lengths.size() - 1; i++)
{
weights[i].resize(layers_lengths[i + 1]);
for (size_t ii = 0; ii < layers_lengths[i + 1]; ii++)
{
weights[i][ii].resize(layers_lengths[i] + 1/*for bias*/);
for (size_t iii = 0; iii < weights[i][ii].size() - 1/*initially, bias = 0.f*/; iii++)
{
//give the rest of the weights random value betweem -0.1 and 0.1
weights[i][ii][iii] = rand() / (double)RAND_MAX * 0.2 - 0.1;
}
}
}
}
void NeuralNetwork::forward_pass(std::vector<double> inputs)
{
if (neurons[0].size() != inputs.size())
throw new std::exception("Invalid inputs count");
neurons[0] = inputs;
for (size_t layer = 1; layer < neurons.size(); layer++)
{
for (size_t neuron = 0; neuron < neurons[layer].size(); neuron++)
{
//multiply the current neuron's inputs with its weights, and add the products
neurons[layer][neuron] = weights[layer - 1][neuron].back();//bias
for (size_t i = 0; i < neurons[layer - 1].size(); i++)
neurons[layer][neuron] += neurons[layer - 1][i] * weights[layer - 1][neuron][i];
//apply the activation function to the neuron value
neurons[layer][neuron] = activations[layer-1](neurons[layer][neuron]);
}
}
}
void NeuralNetwork::backward_pass(std::vector<double> desired_outputs, double learning_rate)
{
//this will hold all the neurons error functions derivatives
std::vector<std::vector<double>> errors(weights.size());
//calculate the output neorons error functions derivatives
for (size_t output = 0; output < neurons.back().size(); output++)
errors.back().push_back((neurons.back()[output] - desired_outputs[output]) * activations_derivatives.back()(neurons.back()[output]));
//calculate the hidden neorons error functions derivatives
for (size_t layer = neurons.size() - 2; layer > 0; layer--)
{
for (size_t neuron = 0; neuron < neurons[layer].size(); neuron++)
{
double weighted_errors = 0.0;
for (size_t i = 0; i < errors[layer].size(); i++)
weighted_errors += errors[layer][i] * weights[layer][i][neuron];
errors[layer - 1].push_back(weighted_errors * activations_derivatives[layer - 1](neurons[layer][neuron]));
}
}
//tune the weights of all neurons
for (size_t i = 0; i < weights.size(); i++)
for (size_t ii = 0; ii < weights[i].size(); ii++)
{
for (size_t iii = 0; iii < weights[i][ii].size() - 1; iii++)
weights[i][ii][iii] -= learning_rate * errors[i][ii] * neurons[i][iii];
weights[i][ii].back() -= learning_rate * errors[i].back();
}
}