-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathlayer.cpp
More file actions
63 lines (49 loc) · 1.43 KB
/
layer.cpp
File metadata and controls
63 lines (49 loc) · 1.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
// population.cpp - copyright (C) 2001-2011 by Patrick Hanevold
#include <iostream>
#include <math.h>
#include <stdlib.h>
#include "layer.h"
#include "neuron.h"
using namespace std;
inline int irand( int range ){
return( rand() % range );
}
// Create a layer of neurons
// input: inputs = number of inputs
// outputs = number of outputs
// a & b = normalization value for the neurons (min & max)
Layer::Layer(int inputs, int outputs, double a, double b){
this->inputs = inputs;
this->outputs = outputs;
typedef Neuron* nptr;
input = new nptr[inputs];
neuron = new Neuron[outputs];
for(int n=0; n<outputs; n++) neuron[n].construct(input,inputs,rand()%3,a,b);
}
Layer::~Layer(){
delete neuron;
}
// initialize all neurons in the layer with random weights
// input: a & b = normalization values (min & max)
void Layer::init(double a, double b){
for(int n=0; n<outputs; n++) neuron[n].init(rand()%3,a,b);
}
// set other layers output neurons as inputs to self
// intput: l = other layer with output neurons
void Layer::setInputs(Layer *l){
for(int i=0; i<l->outputs; i++){
input[i] = &l->neuron[i];
}
}
Layer &Layer::operator=(Layer &s){
if(outputs!=s.outputs){
cerr << "Layer::operator = cannot copy layer!" << endl;
exit(EXIT_FAILURE);
}
for(int n=0; n<outputs; n++) neuron[n]=s.neuron[n];
return *this;
}
// fire all neurons
void Layer::run(){
for(int n=0; n<outputs; n++) neuron[n].run();
}