-
Notifications
You must be signed in to change notification settings - Fork 0
/
neural_net.h
132 lines (109 loc) · 2.77 KB
/
neural_net.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
#ifndef _NEURAL_NET_H_
#define _NEURAL_NET_H_
#include "includes.h"
#include "util.h"
#define RELU 1
#define MAX_HIDDEN_LAYERS 100
class neural_net {
public:
neural_net (int num_input_neurons, int num_output_neurons, int num_hidden_layers, int *num_hidden_neurons, char *file_name);
// Inference
void forward_propagation (float *input_values, float *output_values);
// Training methods
void back_propagation (float *input_values, float *actual_output_values, float *output_values);
void adagrad (float *input_values, float *actual_output_values, float *output_values);
void simulated_annealing (float factor);
void zero_link ();
void cleanup ();
void stash_state ();
void backtrack_state ();
// Utilities
void print ();
int num_links ();
void write_to_disk ();
int read_from_disk ();
private:
int num_neurons_;
int num_input_neurons_;
int num_output_neurons_;
int num_hidden_layers_;
int num_neurons_per_layer_[MAX_HIDDEN_LAYERS];
int index_input_;
int index_bias_;
int index_hidden_[MAX_HIDDEN_LAYERS];
int index_output_;
int num_links_;
bool *graph_;
bool *graph_save_;
float *weights_;
float *weights_save_;
float *steps_;
float *values_;
float *errors_;
char *output_file_;
enum neuron_type {
type_input,
type_bias,
type_hidden,
type_output
};
int neuron_index (int type, int layer, int num) {
switch (type) {
case type_input:
assert (layer == 0);
assert (num < num_input_neurons_);
return index_input_ + num;
case type_bias:assert (layer == 0);
assert (num == 0);
return index_bias_;
case type_hidden:assert (num < num_neurons_per_layer_[layer]);
return index_hidden_[layer] + num;
case type_output:assert (layer == 0);
assert (num < num_output_neurons_);
return index_output_ + num;
default:assert (false);
return 0;
}
}
int link_index (int n1, int n2) {
assert (n1 < n2);
int index = n1 * num_neurons_ + n2;
assert (index >= 0);
assert (index < num_links_);
return index;
}
void create_link (int n1, int n2) {
int index = link_index (n1, n2);
graph_[index] = true;
weights_[index] = frand () / 100.f;
}
float activation_function (float x) {
#ifdef RELU
if (x < 0.0f)
return x * 0.01f;
return x;
#else
return (1 / (1 + expf (-x)));
#endif
}
float activation_function_derivative (float x) {
#ifdef RELU
if (x < 0.0f)
return 0.01f;
return 1.f;
#else
return x * (1.f - x);
#endif
}
void remove_neuron (int n);
bool neuron_is_dead (int n);
bool neuron_has_source (int n);
bool neuron_has_destination (int n);
const float back_propagation_step_size_ = 0.001f;
const float adagrad_step_size_ = 0.001f;
const float annealing_step_size_ = 0.01f;
// Disallow copy/assignment
neural_net (const neural_net &);
neural_net & operator= (const neural_net &);
};
#endif