line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
1
|
|
|
|
|
|
|
########################################################## |
2
|
|
|
|
|
|
|
# AI::NNFlex::Backprop |
3
|
|
|
|
|
|
|
########################################################## |
4
|
|
|
|
|
|
|
# Backprop with simple (non adaptive) momentum |
5
|
|
|
|
|
|
|
########################################################## |
6
|
|
|
|
|
|
|
# Versions |
7
|
|
|
|
|
|
|
# ======== |
8
|
|
|
|
|
|
|
# |
9
|
|
|
|
|
|
|
# 1.0 20050121 CColbourn New module |
10
|
|
|
|
|
|
|
# 1.1 20050201 CColbourn Added call to activation |
11
|
|
|
|
|
|
|
# function slope instead |
12
|
|
|
|
|
|
|
# of hardcoded 1-y*y |
13
|
|
|
|
|
|
|
# |
14
|
|
|
|
|
|
|
# 1.2 20050218 CColbourn Mod'd to change weight |
15
|
|
|
|
|
|
|
# indexing to array for |
16
|
|
|
|
|
|
|
# nnflex 0.16 |
17
|
|
|
|
|
|
|
# |
18
|
|
|
|
|
|
|
# 1.3 20050307 CColbourn packaged as a subclass of NNFLex |
19
|
|
|
|
|
|
|
# |
20
|
|
|
|
|
|
|
# 1.4 20050313 CColbourn modified the slope function call |
21
|
|
|
|
|
|
|
# to avoid using eval |
22
|
|
|
|
|
|
|
# |
23
|
|
|
|
|
|
|
# 1.5 20050314 CColbourn applied fahlman constant |
24
|
|
|
|
|
|
|
# Renamed Backprop.pm, see CHANGES |
25
|
|
|
|
|
|
|
# |
26
|
|
|
|
|
|
|
########################################################## |
27
|
|
|
|
|
|
|
# ToDo |
28
|
|
|
|
|
|
|
# ---- |
29
|
|
|
|
|
|
|
# |
30
|
|
|
|
|
|
|
# |
31
|
|
|
|
|
|
|
########################################################### |
32
|
|
|
|
|
|
|
# |
33
|
|
|
|
|
|
|
|
34
|
|
|
|
|
|
|
package AI::NNFlex::Backprop; |
35
|
3
|
|
|
3
|
|
28999
|
use AI::NNFlex; |
|
3
|
|
|
|
|
9
|
|
|
3
|
|
|
|
|
114
|
|
36
|
3
|
|
|
3
|
|
2744
|
use AI::NNFlex::Feedforward; |
|
3
|
|
|
|
|
33
|
|
|
3
|
|
|
|
|
102
|
|
37
|
3
|
|
|
3
|
|
19
|
use base qw(AI::NNFlex::Feedforward AI::NNFlex); |
|
3
|
|
|
|
|
6
|
|
|
3
|
|
|
|
|
480
|
|
38
|
3
|
|
|
3
|
|
15
|
use strict; |
|
3
|
|
|
|
|
6
|
|
|
3
|
|
|
|
|
8735
|
|
39
|
|
|
|
|
|
|
|
40
|
|
|
|
|
|
|
|
41
|
|
|
|
|
|
|
sub calc_error |
42
|
|
|
|
|
|
|
{ |
43
|
9
|
|
|
9
|
0
|
16
|
my $network = shift; |
44
|
|
|
|
|
|
|
|
45
|
9
|
|
|
|
|
12
|
my $outputPatternRef = shift; |
46
|
9
|
|
|
|
|
19
|
my @outputPattern = @$outputPatternRef; |
47
|
|
|
|
|
|
|
|
48
|
9
|
|
|
|
|
10
|
my @debug = @{$network->{'debug'}}; |
|
9
|
|
|
|
|
25
|
|
49
|
|
|
|
|
|
|
|
50
|
9
|
50
|
|
|
|
23
|
if (scalar @debug > 0) |
|
0
|
|
|
|
|
0
|
|
51
|
|
|
|
|
|
|
{$network->dbug ("Output pattern @outputPattern received by Backprop",4);} |
52
|
|
|
|
|
|
|
|
53
|
|
|
|
|
|
|
|
54
|
9
|
|
|
|
|
20
|
my $outputLayer = $network->{'layers'}->[-1]->{'nodes'}; |
55
|
|
|
|
|
|
|
|
56
|
9
|
50
|
|
|
|
21
|
if (scalar @$outputLayer != scalar @outputPattern) |
57
|
|
|
|
|
|
|
{ |
58
|
0
|
|
|
|
|
0
|
$network->dbug ("Wrong number of output values, net has ".scalar @$outputLayer." nodes",0); |
59
|
0
|
|
|
|
|
0
|
return 0; |
60
|
|
|
|
|
|
|
} |
61
|
|
|
|
|
|
|
|
62
|
|
|
|
|
|
|
# Now calculate the error |
63
|
9
|
|
|
|
|
11
|
my $counter=0; |
64
|
9
|
|
|
|
|
16
|
foreach (@$outputLayer) |
65
|
|
|
|
|
|
|
{ |
66
|
18
|
|
|
|
|
35
|
my $value = $_->{'activation'} - $outputPattern[$counter]; |
67
|
|
|
|
|
|
|
|
68
|
|
|
|
|
|
|
|
69
|
18
|
50
|
|
|
|
39
|
if ($_->{'errorfunction'}) |
70
|
|
|
|
|
|
|
{ |
71
|
0
|
|
|
|
|
0
|
my $errorfunction = $_->{'errorfunction'}; |
72
|
0
|
|
|
|
|
0
|
$value = $network->$errorfunction($value); |
73
|
|
|
|
|
|
|
} |
74
|
|
|
|
|
|
|
|
75
|
18
|
|
|
|
|
24
|
$_->{'error'} = $value; |
76
|
18
|
|
|
|
|
28
|
$counter++; |
77
|
18
|
50
|
|
|
|
58
|
if (scalar @debug > 0) |
|
0
|
|
|
|
|
0
|
|
78
|
|
|
|
|
|
|
{$network->dbug ("Error on output node $_ = ".$_->{'error'},4);} |
79
|
|
|
|
|
|
|
} |
80
|
|
|
|
|
|
|
|
81
|
|
|
|
|
|
|
|
82
|
|
|
|
|
|
|
} |
83
|
|
|
|
|
|
|
|
84
|
|
|
|
|
|
|
|
85
|
|
|
|
|
|
|
######################################################## |
86
|
|
|
|
|
|
|
# AI::NNFlex::Backprop::learn |
87
|
|
|
|
|
|
|
######################################################## |
88
|
|
|
|
|
|
|
sub learn |
89
|
|
|
|
|
|
|
{ |
90
|
|
|
|
|
|
|
|
91
|
9
|
|
|
9
|
1
|
11
|
my $network = shift; |
92
|
|
|
|
|
|
|
|
93
|
9
|
|
|
|
|
16
|
my $outputPatternRef = shift; |
94
|
|
|
|
|
|
|
|
95
|
|
|
|
|
|
|
# if this is an incorrect dataset call translate it |
96
|
9
|
50
|
|
|
|
38
|
if ($outputPatternRef =~/Dataset/) |
97
|
|
|
|
|
|
|
{ |
98
|
0
|
|
|
|
|
0
|
return ($outputPatternRef->learn($network)) |
99
|
|
|
|
|
|
|
} |
100
|
|
|
|
|
|
|
|
101
|
|
|
|
|
|
|
|
102
|
|
|
|
|
|
|
# Set a default value on the Fahlman constant |
103
|
9
|
100
|
|
|
|
30
|
if (!$network->{'fahlmanconstant'}) |
104
|
|
|
|
|
|
|
{ |
105
|
3
|
|
|
|
|
10
|
$network->{'fahlmanconstant'} = 0.1; |
106
|
|
|
|
|
|
|
} |
107
|
|
|
|
|
|
|
|
108
|
9
|
|
|
|
|
20
|
my @outputPattern = @$outputPatternRef; |
109
|
|
|
|
|
|
|
|
110
|
9
|
|
|
|
|
23
|
$network->calc_error($outputPatternRef); |
111
|
|
|
|
|
|
|
|
112
|
|
|
|
|
|
|
#calculate & apply dWs |
113
|
9
|
|
|
|
|
24
|
$network->hiddenToOutput; |
114
|
9
|
50
|
|
|
|
14
|
if (scalar @{$network->{'layers'}} > 2) |
|
9
|
|
|
|
|
25
|
|
115
|
|
|
|
|
|
|
{ |
116
|
0
|
|
|
|
|
0
|
$network->hiddenOrInputToHidden; |
117
|
|
|
|
|
|
|
} |
118
|
|
|
|
|
|
|
|
119
|
|
|
|
|
|
|
# calculate network sqErr |
120
|
9
|
|
|
|
|
25
|
my $Err = $network->RMSErr($outputPatternRef); |
121
|
9
|
|
|
|
|
33
|
return $Err; |
122
|
|
|
|
|
|
|
} |
123
|
|
|
|
|
|
|
|
124
|
|
|
|
|
|
|
|
125
|
|
|
|
|
|
|
######################################################### |
126
|
|
|
|
|
|
|
# AI::NNFlex::Backprop::hiddenToOutput |
127
|
|
|
|
|
|
|
######################################################### |
128
|
|
|
|
|
|
|
sub hiddenToOutput |
129
|
|
|
|
|
|
|
{ |
130
|
9
|
|
|
9
|
0
|
12
|
my $network = shift; |
131
|
|
|
|
|
|
|
|
132
|
9
|
|
|
|
|
13
|
my @debug = @{$network->{'debug'}}; |
|
9
|
|
|
|
|
44
|
|
133
|
|
|
|
|
|
|
|
134
|
9
|
|
|
|
|
17
|
my $outputLayer = $network->{'layers'}->[-1]->{'nodes'}; |
135
|
|
|
|
|
|
|
|
136
|
9
|
|
|
|
|
14
|
foreach my $node (@$outputLayer) |
137
|
|
|
|
|
|
|
{ |
138
|
18
|
|
|
|
|
19
|
my $connectedNodeCounter=0; |
139
|
18
|
|
|
|
|
21
|
foreach my $connectedNode (@{$node->{'connectedNodesWest'}->{'nodes'}}) |
|
18
|
|
|
|
|
38
|
|
140
|
|
|
|
|
|
|
{ |
141
|
90
|
|
|
|
|
91
|
my $momentum = 0; |
142
|
90
|
50
|
|
|
|
168
|
if ($network->{'momentum'}) |
143
|
|
|
|
|
|
|
{ |
144
|
|
|
|
|
|
|
|
145
|
90
|
100
|
|
|
|
189
|
if ($node->{'connectedNodesWest'}->{'lastdelta'}->[$connectedNodeCounter]) |
146
|
|
|
|
|
|
|
{ |
147
|
54
|
|
|
|
|
86
|
$momentum = ($network->{'momentum'})*($node->{'connectedNodesWest'}->{'lastdelta'}->[$connectedNodeCounter]); |
148
|
|
|
|
|
|
|
} |
149
|
|
|
|
|
|
|
} |
150
|
90
|
50
|
|
|
|
146
|
if (scalar @debug > 0) |
|
0
|
|
|
|
|
0
|
|
151
|
|
|
|
|
|
|
{$network->dbug("Learning rate is ".$network->{'learningrate'},4);} |
152
|
90
|
|
|
|
|
162
|
my $deltaW = (($network->{'learningrate'}) * ($node->{'error'}) * ($connectedNode->{'activation'})); |
153
|
90
|
|
|
|
|
167
|
$deltaW = $deltaW+$momentum; |
154
|
90
|
|
|
|
|
142
|
$node->{'connectedNodesWest'}->{'lastdelta'}->[$connectedNodeCounter] = $deltaW; |
155
|
|
|
|
|
|
|
|
156
|
90
|
50
|
|
|
|
149
|
if (scalar @debug > 0) |
|
0
|
|
|
|
|
0
|
|
157
|
|
|
|
|
|
|
{$network->dbug("Applying delta $deltaW on hiddenToOutput $connectedNode to $node",4);} |
158
|
|
|
|
|
|
|
# |
159
|
90
|
|
|
|
|
129
|
$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter] -= $deltaW; |
160
|
90
|
|
|
|
|
186
|
$connectedNodeCounter++; |
161
|
|
|
|
|
|
|
} |
162
|
|
|
|
|
|
|
|
163
|
|
|
|
|
|
|
} |
164
|
|
|
|
|
|
|
} |
165
|
|
|
|
|
|
|
|
166
|
|
|
|
|
|
|
###################################################### |
167
|
|
|
|
|
|
|
# AI::NNFlex::Backprop::hiddenOrInputToHidden |
168
|
|
|
|
|
|
|
###################################################### |
169
|
|
|
|
|
|
|
sub hiddenOrInputToHidden |
170
|
|
|
|
|
|
|
{ |
171
|
|
|
|
|
|
|
|
172
|
0
|
|
|
0
|
0
|
0
|
my $network = shift; |
173
|
|
|
|
|
|
|
|
174
|
0
|
|
|
|
|
0
|
my @layers = @{$network->{'layers'}}; |
|
0
|
|
|
|
|
0
|
|
175
|
|
|
|
|
|
|
|
176
|
0
|
|
|
|
|
0
|
my @debug = @{$network->{'debug'}}; |
|
0
|
|
|
|
|
0
|
|
177
|
|
|
|
|
|
|
|
178
|
|
|
|
|
|
|
# remove the last element (The output layer) from the stack |
179
|
|
|
|
|
|
|
# because we've already calculated dW on that |
180
|
0
|
|
|
|
|
0
|
pop @layers; |
181
|
|
|
|
|
|
|
|
182
|
0
|
0
|
|
|
|
0
|
if (scalar @debug > 0) |
|
0
|
|
|
|
|
0
|
|
183
|
|
|
|
|
|
|
{$network->dbug("Starting Backprop of error on ".scalar @layers." hidden layers",4);} |
184
|
|
|
|
|
|
|
|
185
|
0
|
|
|
|
|
0
|
foreach my $layer (reverse @layers) |
186
|
|
|
|
|
|
|
{ |
187
|
0
|
|
|
|
|
0
|
foreach my $node (@{$layer->{'nodes'}}) |
|
0
|
|
|
|
|
0
|
|
188
|
|
|
|
|
|
|
{ |
189
|
0
|
|
|
|
|
0
|
my $connectedNodeCounter=0; |
190
|
0
|
0
|
|
|
|
0
|
if (!$node->{'connectedNodesWest'}) {last} |
|
0
|
|
|
|
|
0
|
|
191
|
|
|
|
|
|
|
|
192
|
0
|
|
|
|
|
0
|
my $nodeError; |
193
|
0
|
|
|
|
|
0
|
foreach my $connectedNode (@{$node->{'connectedNodesEast'}->{'nodes'}}) |
|
0
|
|
|
|
|
0
|
|
194
|
|
|
|
|
|
|
{ |
195
|
0
|
|
|
|
|
0
|
$nodeError += ($connectedNode->{'error'}) * ($connectedNode->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter]); |
196
|
0
|
|
|
|
|
0
|
$connectedNodeCounter++; |
197
|
|
|
|
|
|
|
} |
198
|
|
|
|
|
|
|
|
199
|
0
|
0
|
|
|
|
0
|
if (scalar @debug > 0) |
|
0
|
|
|
|
|
0
|
|
200
|
|
|
|
|
|
|
{$network->dbug("Hidden node $node error = $nodeError",4);} |
201
|
|
|
|
|
|
|
|
202
|
|
|
|
|
|
|
# Apply error function |
203
|
0
|
0
|
|
|
|
0
|
if ($node->{'errorfunction'}) |
204
|
|
|
|
|
|
|
{ |
205
|
0
|
|
|
|
|
0
|
my $functioncall = $node->{'errorfunction'}; |
206
|
0
|
|
|
|
|
0
|
$nodeError = $network->$functioncall($nodeError); |
207
|
|
|
|
|
|
|
} |
208
|
|
|
|
|
|
|
|
209
|
0
|
|
|
|
|
0
|
$node->{'error'} = $nodeError; |
210
|
|
|
|
|
|
|
|
211
|
|
|
|
|
|
|
|
212
|
|
|
|
|
|
|
# update the weights from nodes inputting to here |
213
|
0
|
|
|
|
|
0
|
$connectedNodeCounter=0; |
214
|
0
|
|
|
|
|
0
|
foreach my $westNodes (@{$node->{'connectedNodesWest'}->{'nodes'}}) |
|
0
|
|
|
|
|
0
|
|
215
|
|
|
|
|
|
|
{ |
216
|
|
|
|
|
|
|
|
217
|
0
|
|
|
|
|
0
|
my $momentum = 0; |
218
|
0
|
0
|
|
|
|
0
|
if ($network->{'momentum'}) |
219
|
|
|
|
|
|
|
{ |
220
|
0
|
0
|
|
|
|
0
|
if($node->{'connectedNodesWest'}->{'lastdelta'}->{$westNodes}) |
221
|
|
|
|
|
|
|
{ |
222
|
0
|
|
|
|
|
0
|
$momentum = ($network->{'momentum'})*($node->{'connectedNodesWest'}->{'lastdelta'}->{$westNodes}); |
223
|
|
|
|
|
|
|
} |
224
|
|
|
|
|
|
|
} |
225
|
|
|
|
|
|
|
|
226
|
|
|
|
|
|
|
# get the slope from the activation function component |
227
|
0
|
|
|
|
|
0
|
my $value = $node->{'activation'}; |
228
|
|
|
|
|
|
|
|
229
|
0
|
|
|
|
|
0
|
my $functionSlope = $node->{'activationfunction'}."_slope"; |
230
|
0
|
|
|
|
|
0
|
$value = $network->$functionSlope($value); |
231
|
|
|
|
|
|
|
|
232
|
|
|
|
|
|
|
# Add the Fahlman constant |
233
|
0
|
|
|
|
|
0
|
$value += $network->{'fahlmanconstant'}; |
234
|
|
|
|
|
|
|
|
235
|
0
|
|
|
|
|
0
|
$value = $value * $node->{'error'} * $network->{'learningrate'} * $westNodes->{'activation'}; |
236
|
|
|
|
|
|
|
|
237
|
|
|
|
|
|
|
|
238
|
0
|
|
|
|
|
0
|
my $dW = $value; |
239
|
0
|
|
|
|
|
0
|
$dW = $dW + $momentum; |
240
|
0
|
0
|
|
|
|
0
|
if (scalar @debug > 0) |
|
0
|
|
|
|
|
0
|
|
241
|
|
|
|
|
|
|
{$network->dbug("Applying deltaW $dW to inputToHidden connection from $westNodes to $node",4);} |
242
|
|
|
|
|
|
|
|
243
|
0
|
|
|
|
|
0
|
$node->{'connectedNodesWest'}->{'lastdelta'}->{$westNodes} = $dW; |
244
|
|
|
|
|
|
|
|
245
|
0
|
|
|
|
|
0
|
$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter] -= $dW; |
246
|
0
|
0
|
|
|
|
0
|
if (scalar @debug > 0) |
|
0
|
|
|
|
|
0
|
|
247
|
|
|
|
|
|
|
{$network->dbug("Weight now ".$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter],4);} |
248
|
0
|
|
|
|
|
0
|
$connectedNodeCounter++; |
249
|
|
|
|
|
|
|
|
250
|
|
|
|
|
|
|
} |
251
|
|
|
|
|
|
|
|
252
|
|
|
|
|
|
|
|
253
|
|
|
|
|
|
|
} |
254
|
|
|
|
|
|
|
} |
255
|
|
|
|
|
|
|
|
256
|
|
|
|
|
|
|
|
257
|
|
|
|
|
|
|
|
258
|
|
|
|
|
|
|
} |
259
|
|
|
|
|
|
|
|
260
|
|
|
|
|
|
|
######################################################### |
261
|
|
|
|
|
|
|
# AI::NNFlex::Backprop::RMSErr |
262
|
|
|
|
|
|
|
######################################################### |
263
|
|
|
|
|
|
|
sub RMSErr |
264
|
|
|
|
|
|
|
{ |
265
|
9
|
|
|
9
|
0
|
13
|
my $network = shift; |
266
|
|
|
|
|
|
|
|
267
|
9
|
|
|
|
|
12
|
my $outputPatternRef = shift; |
268
|
9
|
|
|
|
|
18
|
my @outputPattern = @$outputPatternRef; |
269
|
|
|
|
|
|
|
|
270
|
9
|
|
|
|
|
10
|
my @debug = @{$network->{'debug'}}; |
|
9
|
|
|
|
|
23
|
|
271
|
|
|
|
|
|
|
|
272
|
9
|
|
|
|
|
11
|
my $sqrErr; |
273
|
|
|
|
|
|
|
|
274
|
9
|
|
|
|
|
27
|
my $outputLayer = $network->{'layers'}->[-1]->{'nodes'}; |
275
|
|
|
|
|
|
|
|
276
|
9
|
50
|
|
|
|
49
|
if (scalar @$outputLayer != scalar @outputPattern) |
277
|
|
|
|
|
|
|
{ |
278
|
0
|
|
|
|
|
0
|
$network->dbug("Wrong number of output values, net has ".scalar @$outputLayer." nodes",0); |
279
|
0
|
|
|
|
|
0
|
return 0; |
280
|
|
|
|
|
|
|
} |
281
|
|
|
|
|
|
|
|
282
|
|
|
|
|
|
|
# Now calculate the error |
283
|
9
|
|
|
|
|
15
|
my $counter=0; |
284
|
9
|
|
|
|
|
23
|
foreach (@$outputLayer) |
285
|
|
|
|
|
|
|
{ |
286
|
18
|
|
|
|
|
33
|
my $value = $_->{'activation'} - $outputPattern[$counter]; |
287
|
|
|
|
|
|
|
|
288
|
18
|
|
|
|
|
28
|
$sqrErr += $value *$value; |
289
|
18
|
|
|
|
|
18
|
$counter++; |
290
|
18
|
50
|
|
|
|
49
|
if (scalar @debug > 0) |
|
0
|
|
|
|
|
0
|
|
291
|
|
|
|
|
|
|
{$network->dbug("Error on output node $_ = ".$_->{'error'},4);} |
292
|
|
|
|
|
|
|
} |
293
|
|
|
|
|
|
|
|
294
|
9
|
|
|
|
|
33
|
my $error = sqrt($sqrErr); |
295
|
|
|
|
|
|
|
|
296
|
9
|
|
|
|
|
24
|
return $error; |
297
|
|
|
|
|
|
|
} |
298
|
|
|
|
|
|
|
|
299
|
|
|
|
|
|
|
1; |
300
|
|
|
|
|
|
|
|
301
|
|
|
|
|
|
|
=pod |
302
|
|
|
|
|
|
|
|
303
|
|
|
|
|
|
|
=head1 NAME |
304
|
|
|
|
|
|
|
|
305
|
|
|
|
|
|
|
AI::NNFlex::Backprop - a fast, pure perl backprop Neural Net simulator |
306
|
|
|
|
|
|
|
|
307
|
|
|
|
|
|
|
=head1 SYNOPSIS |
308
|
|
|
|
|
|
|
|
309
|
|
|
|
|
|
|
use AI::NNFlex::Backprop; |
310
|
|
|
|
|
|
|
|
311
|
|
|
|
|
|
|
my $network = AI::NNFlex::Backprop->new(config parameter=>value); |
312
|
|
|
|
|
|
|
|
313
|
|
|
|
|
|
|
$network->add_layer(nodes=>x,activationfunction=>'function'); |
314
|
|
|
|
|
|
|
|
315
|
|
|
|
|
|
|
$network->init(); |
316
|
|
|
|
|
|
|
|
317
|
|
|
|
|
|
|
|
318
|
|
|
|
|
|
|
|
319
|
|
|
|
|
|
|
use AI::NNFlex::Dataset; |
320
|
|
|
|
|
|
|
|
321
|
|
|
|
|
|
|
my $dataset = AI::NNFlex::Dataset->new([ |
322
|
|
|
|
|
|
|
[INPUTARRAY],[TARGETOUTPUT], |
323
|
|
|
|
|
|
|
[INPUTARRAY],[TARGETOUTPUT]]); |
324
|
|
|
|
|
|
|
|
325
|
|
|
|
|
|
|
my $sqrError = 10; |
326
|
|
|
|
|
|
|
|
327
|
|
|
|
|
|
|
while ($sqrError >0.01) |
328
|
|
|
|
|
|
|
|
329
|
|
|
|
|
|
|
{ |
330
|
|
|
|
|
|
|
|
331
|
|
|
|
|
|
|
$sqrError = $dataset->learn($network); |
332
|
|
|
|
|
|
|
|
333
|
|
|
|
|
|
|
} |
334
|
|
|
|
|
|
|
|
335
|
|
|
|
|
|
|
$network->lesion({'nodes'=>PROBABILITY,'connections'=>PROBABILITY}); |
336
|
|
|
|
|
|
|
|
337
|
|
|
|
|
|
|
$network->dump_state(filename=>'badgers.wts'); |
338
|
|
|
|
|
|
|
|
339
|
|
|
|
|
|
|
$network->load_state(filename=>'badgers.wts'); |
340
|
|
|
|
|
|
|
|
341
|
|
|
|
|
|
|
my $outputsRef = $dataset->run($network); |
342
|
|
|
|
|
|
|
|
343
|
|
|
|
|
|
|
my $outputsRef = $network->output(layer=>2,round=>1); |
344
|
|
|
|
|
|
|
|
345
|
|
|
|
|
|
|
=head1 DESCRIPTION |
346
|
|
|
|
|
|
|
|
347
|
|
|
|
|
|
|
AI::NNFlex::Backprop is a class to generate feedforward, backpropagation neural nets. It inherits various constructs from AI::NNFlex & AI::NNFlex::Feedforward, but is documented here as a standalone. |
348
|
|
|
|
|
|
|
|
349
|
|
|
|
|
|
|
The code should be simple enough to use for teaching purposes, but a simpler implementation of a simple backprop network is included in the example file bp.pl. This is derived from Phil Brierleys freely available java code at www.philbrierley.com. |
350
|
|
|
|
|
|
|
|
351
|
|
|
|
|
|
|
AI::NNFlex::Backprop leans towards teaching NN and cognitive modelling applications. Future modules are likely to include more biologically plausible nets like DeVries & Principes Gamma model. |
352
|
|
|
|
|
|
|
|
353
|
|
|
|
|
|
|
Full documentation for AI::NNFlex::Dataset can be found in the modules own perldoc. It's documented here for convenience only. |
354
|
|
|
|
|
|
|
|
355
|
|
|
|
|
|
|
=head1 CONSTRUCTOR |
356
|
|
|
|
|
|
|
|
357
|
|
|
|
|
|
|
=head2 AI::NNFlex::Backprop->new( parameter => value ); |
358
|
|
|
|
|
|
|
|
359
|
|
|
|
|
|
|
Parameters: |
360
|
|
|
|
|
|
|
|
361
|
|
|
|
|
|
|
|
362
|
|
|
|
|
|
|
randomweights=>MAXIMUM VALUE FOR INITIAL WEIGHT |
363
|
|
|
|
|
|
|
|
364
|
|
|
|
|
|
|
fixedweights=>WEIGHT TO USE FOR ALL CONNECTIONS |
365
|
|
|
|
|
|
|
|
366
|
|
|
|
|
|
|
debug=>[LIST OF CODES FOR MODULES TO DEBUG] |
367
|
|
|
|
|
|
|
|
368
|
|
|
|
|
|
|
learningrate=>the learning rate of the network |
369
|
|
|
|
|
|
|
|
370
|
|
|
|
|
|
|
momentum=>the momentum value (momentum learning only) |
371
|
|
|
|
|
|
|
|
372
|
|
|
|
|
|
|
round=>0 or 1 - 1 sets the network to round output values to |
373
|
|
|
|
|
|
|
nearest of 1, -1 or 0 |
374
|
|
|
|
|
|
|
|
375
|
|
|
|
|
|
|
fahlmanconstant=>0.1 |
376
|
|
|
|
|
|
|
|
377
|
|
|
|
|
|
|
|
378
|
|
|
|
|
|
|
|
379
|
|
|
|
|
|
|
The following parameters are optional: |
380
|
|
|
|
|
|
|
|
381
|
|
|
|
|
|
|
randomweights |
382
|
|
|
|
|
|
|
|
383
|
|
|
|
|
|
|
fixedweights |
384
|
|
|
|
|
|
|
|
385
|
|
|
|
|
|
|
debug |
386
|
|
|
|
|
|
|
|
387
|
|
|
|
|
|
|
round |
388
|
|
|
|
|
|
|
|
389
|
|
|
|
|
|
|
momentum |
390
|
|
|
|
|
|
|
|
391
|
|
|
|
|
|
|
fahlmanconstant |
392
|
|
|
|
|
|
|
|
393
|
|
|
|
|
|
|
|
394
|
|
|
|
|
|
|
If randomweights is not specified the network will default to a random value from 0 to 1. |
395
|
|
|
|
|
|
|
|
396
|
|
|
|
|
|
|
If momentum is not specified the network will default to vanilla (non momentum) backprop. |
397
|
|
|
|
|
|
|
|
398
|
|
|
|
|
|
|
The Fahlman constant modifies the slope of the error curve. 0.1 is the standard value for everything, and speeds the network up immensely. If no Fahlman constant is set, the network will default to 0.1 |
399
|
|
|
|
|
|
|
|
400
|
|
|
|
|
|
|
=head2 AI::NNFlex::Dataset |
401
|
|
|
|
|
|
|
|
402
|
|
|
|
|
|
|
new ( [[INPUT VALUES],[OUTPUT VALUES], |
403
|
|
|
|
|
|
|
[INPUT VALUES],[OUTPUT VALUES],..]) |
404
|
|
|
|
|
|
|
|
405
|
|
|
|
|
|
|
=head2 INPUT VALUES |
406
|
|
|
|
|
|
|
|
407
|
|
|
|
|
|
|
These should be comma separated values. They can be applied to the network with ::run or ::learn |
408
|
|
|
|
|
|
|
|
409
|
|
|
|
|
|
|
=head2 OUTPUT VALUES |
410
|
|
|
|
|
|
|
|
411
|
|
|
|
|
|
|
These are the intended or target output values. Comma separated. These will be used by ::learn |
412
|
|
|
|
|
|
|
|
413
|
|
|
|
|
|
|
|
414
|
|
|
|
|
|
|
=head1 METHODS |
415
|
|
|
|
|
|
|
|
416
|
|
|
|
|
|
|
This is a short list of the main methods implemented in AI::NNFlex::Backprop. |
417
|
|
|
|
|
|
|
|
418
|
|
|
|
|
|
|
=head2 AI::NNFlex::Backprop |
419
|
|
|
|
|
|
|
|
420
|
|
|
|
|
|
|
=head2 add_layer |
421
|
|
|
|
|
|
|
|
422
|
|
|
|
|
|
|
Syntax: |
423
|
|
|
|
|
|
|
|
424
|
|
|
|
|
|
|
$network->add_layer( nodes=>NUMBER OF NODES IN LAYER, |
425
|
|
|
|
|
|
|
persistentactivation=>RETAIN ACTIVATION BETWEEN PASSES, |
426
|
|
|
|
|
|
|
decay=>RATE OF ACTIVATION DECAY PER PASS, |
427
|
|
|
|
|
|
|
randomactivation=>MAXIMUM STARTING ACTIVATION, |
428
|
|
|
|
|
|
|
threshold=>NYI, |
429
|
|
|
|
|
|
|
activationfunction=>"ACTIVATION FUNCTION", |
430
|
|
|
|
|
|
|
errorfunction=>'ERROR TRANSFORMATION FUNCTION', |
431
|
|
|
|
|
|
|
randomweights=>MAX VALUE OF STARTING WEIGHTS); |
432
|
|
|
|
|
|
|
|
433
|
|
|
|
|
|
|
|
434
|
|
|
|
|
|
|
The activation function must be defined in AI::NNFlex::Mathlib. Valid predefined activation functions are tanh & linear. |
435
|
|
|
|
|
|
|
|
436
|
|
|
|
|
|
|
The error transformation function defines a transform that is done on the error value. It must be a valid function in AI::NNFlex::Mathlib. Using a non linear transformation function on the error value can sometimes speed up training. |
437
|
|
|
|
|
|
|
|
438
|
|
|
|
|
|
|
The following parameters are optional: |
439
|
|
|
|
|
|
|
|
440
|
|
|
|
|
|
|
persistentactivation |
441
|
|
|
|
|
|
|
|
442
|
|
|
|
|
|
|
decay |
443
|
|
|
|
|
|
|
|
444
|
|
|
|
|
|
|
randomactivation |
445
|
|
|
|
|
|
|
|
446
|
|
|
|
|
|
|
threshold |
447
|
|
|
|
|
|
|
|
448
|
|
|
|
|
|
|
errorfunction |
449
|
|
|
|
|
|
|
|
450
|
|
|
|
|
|
|
randomweights |
451
|
|
|
|
|
|
|
|
452
|
|
|
|
|
|
|
|
453
|
|
|
|
|
|
|
|
454
|
|
|
|
|
|
|
=head2 init |
455
|
|
|
|
|
|
|
|
456
|
|
|
|
|
|
|
Syntax: |
457
|
|
|
|
|
|
|
|
458
|
|
|
|
|
|
|
$network->init(); |
459
|
|
|
|
|
|
|
|
460
|
|
|
|
|
|
|
Initialises connections between nodes, sets initial weights and loads external components. Implements connections backwards and forwards from each node in each layer to each node in the preceeding and following layers, and initialises weights values on all connections. |
461
|
|
|
|
|
|
|
|
462
|
|
|
|
|
|
|
=head2 lesion |
463
|
|
|
|
|
|
|
|
464
|
|
|
|
|
|
|
$network->lesion ({'nodes'=>PROBABILITY,'connections'=>PROBABILITY}) |
465
|
|
|
|
|
|
|
|
466
|
|
|
|
|
|
|
Damages the network. |
467
|
|
|
|
|
|
|
|
468
|
|
|
|
|
|
|
B |
469
|
|
|
|
|
|
|
|
470
|
|
|
|
|
|
|
A value between 0 and 1, denoting the probability of a given node or connection being damaged. |
471
|
|
|
|
|
|
|
|
472
|
|
|
|
|
|
|
Note: this method may be called on a per network, per node or per layer basis using the appropriate object. |
473
|
|
|
|
|
|
|
|
474
|
|
|
|
|
|
|
=head2 AN::NNFlex::Dataset |
475
|
|
|
|
|
|
|
|
476
|
|
|
|
|
|
|
=head2 learn |
477
|
|
|
|
|
|
|
|
478
|
|
|
|
|
|
|
$dataset->learn($network) |
479
|
|
|
|
|
|
|
|
480
|
|
|
|
|
|
|
'Teaches' the network the dataset using the networks defined learning algorithm. Returns sqrError; |
481
|
|
|
|
|
|
|
|
482
|
|
|
|
|
|
|
=head2 run |
483
|
|
|
|
|
|
|
|
484
|
|
|
|
|
|
|
$dataset->run($network) |
485
|
|
|
|
|
|
|
|
486
|
|
|
|
|
|
|
Runs the dataset through the network and returns a reference to an array of output patterns. |
487
|
|
|
|
|
|
|
|
488
|
|
|
|
|
|
|
=head1 EXAMPLES |
489
|
|
|
|
|
|
|
|
490
|
|
|
|
|
|
|
See the code in ./examples. For any given version of NNFlex, xor.pl will contain the latest functionality. |
491
|
|
|
|
|
|
|
|
492
|
|
|
|
|
|
|
|
493
|
|
|
|
|
|
|
=head1 PREREQs |
494
|
|
|
|
|
|
|
|
495
|
|
|
|
|
|
|
None. NNFlex::Backprop should run OK on any version of Perl 5 >. |
496
|
|
|
|
|
|
|
|
497
|
|
|
|
|
|
|
|
498
|
|
|
|
|
|
|
=head1 ACKNOWLEDGEMENTS |
499
|
|
|
|
|
|
|
|
500
|
|
|
|
|
|
|
Phil Brierley, for his excellent free java code, that solved my backprop problem |
501
|
|
|
|
|
|
|
|
502
|
|
|
|
|
|
|
Dr Martin Le Voi, for help with concepts of NN in the early stages |
503
|
|
|
|
|
|
|
|
504
|
|
|
|
|
|
|
Dr David Plaut, for help with the project that this code was originally intended for. |
505
|
|
|
|
|
|
|
|
506
|
|
|
|
|
|
|
Graciliano M.Passos for suggestions & improved code (see SEE ALSO). |
507
|
|
|
|
|
|
|
|
508
|
|
|
|
|
|
|
Dr Scott Fahlman, whose very readable paper 'An empirical study of learning speed in backpropagation networks' (1988) has driven many of the improvements made so far. |
509
|
|
|
|
|
|
|
|
510
|
|
|
|
|
|
|
=head1 SEE ALSO |
511
|
|
|
|
|
|
|
|
512
|
|
|
|
|
|
|
AI::NNFlex |
513
|
|
|
|
|
|
|
|
514
|
|
|
|
|
|
|
AI::NNEasy - Developed by Graciliano M.Passos |
515
|
|
|
|
|
|
|
Shares some common code with NNFlex. |
516
|
|
|
|
|
|
|
|
517
|
|
|
|
|
|
|
|
518
|
|
|
|
|
|
|
=head1 TODO |
519
|
|
|
|
|
|
|
|
520
|
|
|
|
|
|
|
|
521
|
|
|
|
|
|
|
|
522
|
|
|
|
|
|
|
=head1 CHANGES |
523
|
|
|
|
|
|
|
|
524
|
|
|
|
|
|
|
|
525
|
|
|
|
|
|
|
=head1 COPYRIGHT |
526
|
|
|
|
|
|
|
|
527
|
|
|
|
|
|
|
Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. |
528
|
|
|
|
|
|
|
|
529
|
|
|
|
|
|
|
=head1 CONTACT |
530
|
|
|
|
|
|
|
|
531
|
|
|
|
|
|
|
charlesc@nnflex.g0n.net |
532
|
|
|
|
|
|
|
|
533
|
|
|
|
|
|
|
|
534
|
|
|
|
|
|
|
|
535
|
|
|
|
|
|
|
=cut |