-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathnet.cpp
More file actions
145 lines (121 loc) · 3.4 KB
/
net.cpp
File metadata and controls
145 lines (121 loc) · 3.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
// population.cpp - copyright (C) 2001-2011 by Patrick Hanevold
#include <iostream>
#include <math.h>
#include <stdlib.h>
#include "net.h"
#include "genetic.h"
using namespace std;
inline int irand( int range ){
return( rand() % range );
}
// Create a neural network
// input: inputs = number of input
// outputs = number of outputs
// layers = the number of layers of neurons to have in the neural net
// a & b = normalization values for the neurons (min & max)
Net::Net(int inputs, int outputs, int layers, double a, double b){
this->layers = layers;
this->outputs = outputs;
typedef Layer* layerptr;
layer = new layerptr[layers];
output = new double[outputs];
int ip = inputs;
int op = inputs/2;
for(int n=0; n<layers; n++){
layer[n] = new Layer(ip,op>outputs?op:outputs,a,b);
ip/=2;
op/=2;
}
for(int n=0; n<layer[0]->inputs; n++) layer[0]->input[n] = new Neuron((Neuron**)0,0,0,a,b);
for(int n=1; n<layers; n++) layer[n]->setInputs(layer[n-1]);
neurons = 0;
for(int n=0; n<layers; n++) neurons+=layer[n]->outputs;
typedef Neuron* nptr;
neuron = new nptr[neurons];
int ni=0;
for(int n=0; n<layers; n++){
for(int i=0; i<layer[n]->outputs; i++) neuron[ni++] = &layer[n]->neuron[i];
}
if(ni!=neurons){ cout << "!!!!!!!!!!!!!!" << endl; exit(0); }
age = 30;//irand(10);
score=0;
max=1;
min=0;
calibrated = false;
name = 0x12345678;
}
// initialize the net with random weights
// input: a & b = normalization values (min & max)
void Net::init(double a, double b){
for(int n=0; n<layers; n++) layer[n]->init(a,b);
age = 0;//irand(10);
score=0;
max=1;
min=0;
calibrated = false;
}
// mutate a single neuron with another by randomized blending
void Net::mix(){
neuron[irand(neurons)]->mix(neuron[irand(neurons)]);
}
// fire all neurons in the net (basic propagation algorithm)
void Net::run(){
for(int n=0; n<layers; n++) layer[n]->run();
for(int n=0; n<outputs; n++) output[n] = (layer[layers-1]->neuron[n].val-min)/(max-min);
}
// mutate a single neuron with another from other net (in population) by randomized blending
// input: other neural network
void Net::mix(Net *net){
neuron[irand(neurons)]->mix(net->neuron[irand(net->neurons)]);
}
// mutate neural net by radiation (noise blending)
// input: dev = radiation factor
void Net::mutate(double dev){
int cnt=rand()%neurons;
for(int n=0; n<cnt; n++) neuron[rand()%neurons]->mutate(dev/double(cnt));
age=0;
score=0;
min=0;
max=1;
calibrated=false;
}
//
double Net::getScore(){
return score/double(age);
}
Net &Net::operator=(Net &s){
if(neurons!=s.neurons||layers!=s.layers){
cerr << "bad!" << endl;
exit(EXIT_FAILURE);
}
for(int n=0; n<layers; n++) (*layer[n])=(*s.layer[n]);
age=0;
score=0;
min=0;
max=1;
calibrated=false;
return *this;
}
inline double drand(){
return double((rand()%10000)-5000)/10000.0;
}
// let this neural net be the result of breeding. (blending and light mutation)
// input: a & b = neural networs to have intercource
void Net::breed(Net *a, Net *b){
double w = drand()+.5;
for(int n=0; n<neurons; n++){
neuron[n]->breed(w,a->neuron[n],b->neuron[n]);
}
//neuron[rand()%neurons]->mutate(.001);
mutate(drand()+.5);
age=0;
score=0;
min=0;
max=1;
calibrated=false;
}
void Net::setCalibration(double min, double max){
this->min = min-(min-max)*.5;
this->max = max-(min-max)*.5;
calibrated = true;
}