-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathPerceptron.cpp
102 lines (86 loc) · 2.33 KB
/
Perceptron.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
#include "Perceptron.h"
#include "Edge.h"
using namespace std;
namespace NeNet
{
Perceptron::Perceptron(Type type, u_int layer, u_int index) :
_type(type),
_layer(layer),
_index(index),
_delta(0),
_output(0)
{
/* Initializing activation functions
*
* Sigmoid activation functions are used for input and hidden neuron
* and identity functions for output neurons
*/
if (_type == INPUT || _type == HIDDEN || _type == OUTPUT)
{
_activationFun = [](double x) {
return 1.0 / (1.0 + exp(-1.0 * x));
};
auto actFun = _activationFun;
_activationFunDer = [actFun](double x) {
return actFun(x) * (1.0 - actFun(x));
};
}
else
{
_activationFun = [](double x) {
return x;
};
_activationFunDer = [](double x) {
return 1.0;
};
}
/* Initializing error functions
*
* Square distance error function used
*/
_errorFun = [](double networkOutput, double sampleOut) {
return pow(networkOutput - sampleOut, 2);
};
_errorFunDer = [](double networkOutput, double sampleOut) {
return 2 * (networkOutput - sampleOut);
};
}
void Perceptron::processInputs()
{
_weightedSum = 0;
for (const auto &predecessor : _predecessors)
{
_weightedSum += predecessor.lock()->getWeightedValue();
}
_output = _activationFun(_weightedSum);
for (const auto &successor : _successors)
{
successor.lock()->setValue(_output);
}
}
void Perceptron::calculateDelta(double sampleOutput)
{
if (_type == OUTPUT)
{
_delta = _errorFunDer(_output, sampleOutput) * _activationFunDer(_weightedSum);
for (const auto edge : _predecessors)
{
edge.lock()->setError(edge.lock()->getValue() * _delta);
}
}
else
{
_delta = 0;
for (const auto edge : _successors)
{
_delta += edge.lock()->getSuccessorDelta() * edge.lock()->getWeight();
}
for (const auto edge : _predecessors)
{
edge.lock()->setError(edge.lock()->getValue() *
_activationFunDer(_weightedSum) *
_delta);
}
}
}
}