-
Notifications
You must be signed in to change notification settings - Fork 0
/
nn.cpp
67 lines (57 loc) · 1.6 KB
/
nn.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
#include "nn.h"
NN::NN(vector<int> arch) : arch(arch) {
weights = vector<float> (calculateArch());
}
NN* NN::cloneFrom(NN &nn) {
arch = nn.arch;
weights = nn.weights;
return this;
}
float NN::activation(float x) {
return x / sqrt(1 + x * x);
}
int NN::calculateArch() {
int n = 1;
for(int i=1; i < arch.size(); i++)
n += arch[i-1] * arch[i] + arch[i];
return n;
}
const vector<float>& NN::run() {
int weightId = 0;
for(int lvl=1; lvl < arch.size(); lvl++) {
output.resize(arch[lvl], 0);
for(int j=0; j < arch[lvl]; j++) {
for(int i=0; i < arch[lvl-1]; i++) {
output[j] += input[i] * weights[weightId];
weightId++;
}
output[j] = activation(output[j] + weights[weightId]);
weightId++;
}
input = output;
}
swap(input, output);
input.clear();
return output;
}
vector<float> NN::run(vector<float> input) {
int weightId = 0;
for(int lvl=1; lvl < arch.size(); lvl++) {
vector<float> output(arch[lvl]);
for(int j=0; j < arch[lvl]; j++) {
for(int i=0; i < arch[lvl-1]; i++) {
output[j] += input[i] * weights[weightId];
weightId++;
}
output[j] = activation(output[j] + weights[weightId]);
weightId++;
}
input = output;
}
return input;
}
void NN::addRandom(float range) {
for (size_t i=0; i < weights.size(); i++) {
weights[i] += static_cast<float> (rand()) * range * 2.0 / static_cast<float> (RAND_MAX) - range;
}
}