line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
1
|
|
|
|
|
|
|
########################################################## |
2
|
|
|
|
|
|
|
# AI::NNFlex::Feedforward |
3
|
|
|
|
|
|
|
########################################################## |
4
|
|
|
|
|
|
|
# This is the first propagation module for NNFlex |
5
|
|
|
|
|
|
|
# |
6
|
|
|
|
|
|
|
########################################################## |
7
|
|
|
|
|
|
|
# Versions |
8
|
|
|
|
|
|
|
# ======== |
9
|
|
|
|
|
|
|
# |
10
|
|
|
|
|
|
|
# 1.0 20040910 CColbourn New module |
11
|
|
|
|
|
|
|
# |
12
|
|
|
|
|
|
|
# 1.1 20050116 CColbourn Added call to |
13
|
|
|
|
|
|
|
# datasets where run |
14
|
|
|
|
|
|
|
# is erroneously called |
15
|
|
|
|
|
|
|
# with a dataset |
16
|
|
|
|
|
|
|
# |
17
|
|
|
|
|
|
|
# 1.2 20050206 CColbourn Fixed a bug where |
18
|
|
|
|
|
|
|
# transfer function |
19
|
|
|
|
|
|
|
# was called on every |
20
|
|
|
|
|
|
|
# input to a node |
21
|
|
|
|
|
|
|
# instead of total |
22
|
|
|
|
|
|
|
# |
23
|
|
|
|
|
|
|
# 1.3 20050218 CColbourn Changed to reflect |
24
|
|
|
|
|
|
|
# new weight indexing |
25
|
|
|
|
|
|
|
# (arrays) in nnflex 0.16 |
26
|
|
|
|
|
|
|
# |
27
|
|
|
|
|
|
|
# 1.4 20050302 CColbourn Fixed a problem that allowed |
28
|
|
|
|
|
|
|
# activation to flow even if a |
29
|
|
|
|
|
|
|
# node was lesioned off |
30
|
|
|
|
|
|
|
# |
31
|
|
|
|
|
|
|
# 1.5 20050308 CColbourn Made a separate class as part |
32
|
|
|
|
|
|
|
# of NNFlex-0.2 |
33
|
|
|
|
|
|
|
# |
34
|
|
|
|
|
|
|
# 1.6 20050313 CColbourn altered syntax of activation |
35
|
|
|
|
|
|
|
# function call to get rid of |
36
|
|
|
|
|
|
|
# eval |
37
|
|
|
|
|
|
|
# |
38
|
|
|
|
|
|
|
########################################################## |
39
|
|
|
|
|
|
|
# ToDo |
40
|
|
|
|
|
|
|
# ---- |
41
|
|
|
|
|
|
|
# |
42
|
|
|
|
|
|
|
# |
43
|
|
|
|
|
|
|
########################################################### |
44
|
|
|
|
|
|
|
# |
45
|
|
|
|
|
|
|
package AI::NNFlex::Feedforward; |
46
|
|
|
|
|
|
|
|
47
|
4
|
|
|
4
|
|
20
|
use strict; |
|
4
|
|
|
|
|
7
|
|
|
4
|
|
|
|
|
3048
|
|
48
|
|
|
|
|
|
|
|
49
|
|
|
|
|
|
|
|
50
|
|
|
|
|
|
|
########################################################### |
51
|
|
|
|
|
|
|
# AI::NNFlex::Feedforward::run |
52
|
|
|
|
|
|
|
########################################################### |
53
|
|
|
|
|
|
|
# |
54
|
|
|
|
|
|
|
#This class contains the run method only. The run method performs |
55
|
|
|
|
|
|
|
#Feedforward (i.e. west to east) activation flow on the network. |
56
|
|
|
|
|
|
|
# |
57
|
|
|
|
|
|
|
#This class is internal to the NNFlex package, and is included |
58
|
|
|
|
|
|
|
#in the NNFlex namespace by a require on the networktype parameter. |
59
|
|
|
|
|
|
|
# |
60
|
|
|
|
|
|
|
#syntax: |
61
|
|
|
|
|
|
|
# $network->run([0,1,1,1,0,1,1]); |
62
|
|
|
|
|
|
|
# |
63
|
|
|
|
|
|
|
# |
64
|
|
|
|
|
|
|
########################################################### |
65
|
|
|
|
|
|
|
sub run |
66
|
|
|
|
|
|
|
{ |
67
|
22
|
|
|
22
|
0
|
30
|
my $network = shift; |
68
|
|
|
|
|
|
|
|
69
|
22
|
|
|
|
|
27
|
my $inputPatternRef = shift; |
70
|
|
|
|
|
|
|
|
71
|
|
|
|
|
|
|
# if this is an incorrect dataset call translate it |
72
|
22
|
50
|
|
|
|
93
|
if ($inputPatternRef =~/Dataset/) |
73
|
|
|
|
|
|
|
{ |
74
|
0
|
|
|
|
|
0
|
return ($inputPatternRef->run($network)) |
75
|
|
|
|
|
|
|
} |
76
|
|
|
|
|
|
|
|
77
|
|
|
|
|
|
|
|
78
|
22
|
|
|
|
|
45
|
my @inputPattern = @$inputPatternRef; |
79
|
|
|
|
|
|
|
|
80
|
22
|
|
|
|
|
26
|
my @debug = @{$network->{'debug'}}; |
|
22
|
|
|
|
|
46
|
|
81
|
22
|
50
|
|
|
|
67
|
if (scalar @debug> 0) |
|
0
|
|
|
|
|
0
|
|
82
|
|
|
|
|
|
|
{$network->dbug ("Input pattern @inputPattern received by Feedforward",3);} |
83
|
|
|
|
|
|
|
|
84
|
|
|
|
|
|
|
|
85
|
|
|
|
|
|
|
# First of all apply the activation pattern to the input units (checking |
86
|
|
|
|
|
|
|
# that the pattern has the right number of values) |
87
|
|
|
|
|
|
|
|
88
|
22
|
|
|
|
|
47
|
my $inputLayer = $network->{'layers'}->[0]->{'nodes'}; |
89
|
|
|
|
|
|
|
|
90
|
22
|
50
|
|
|
|
51
|
if (scalar @$inputLayer != scalar @inputPattern) |
91
|
|
|
|
|
|
|
{ |
92
|
0
|
|
|
|
|
0
|
$network->dbug("Wrong number of input values",0); |
93
|
0
|
|
|
|
|
0
|
return 0; |
94
|
|
|
|
|
|
|
} |
95
|
|
|
|
|
|
|
|
96
|
|
|
|
|
|
|
# Now apply the activation |
97
|
22
|
|
|
|
|
31
|
my $counter=0; |
98
|
22
|
|
|
|
|
38
|
foreach (@$inputLayer) |
99
|
|
|
|
|
|
|
{ |
100
|
44
|
50
|
|
|
|
102
|
if ($_->{'active'}) |
101
|
|
|
|
|
|
|
{ |
102
|
|
|
|
|
|
|
|
103
|
44
|
50
|
|
|
|
138
|
if ($_->{'persistentactivation'}) |
104
|
|
|
|
|
|
|
{ |
105
|
0
|
|
|
|
|
0
|
$_->{'activation'} +=$inputPattern[$counter]; |
106
|
0
|
0
|
|
|
|
0
|
if (scalar @debug> 0) |
|
0
|
|
|
|
|
0
|
|
107
|
|
|
|
|
|
|
{$network->dbug("Applying ".$inputPattern[$counter]." to $_",3);} |
108
|
|
|
|
|
|
|
} |
109
|
|
|
|
|
|
|
else |
110
|
|
|
|
|
|
|
{ |
111
|
44
|
|
|
|
|
64
|
$_->{'activation'} =$inputPattern[$counter]; |
112
|
44
|
50
|
|
|
|
98
|
if (scalar @debug> 0) |
|
0
|
|
|
|
|
0
|
|
113
|
|
|
|
|
|
|
{$network->dbug("Applying ".$inputPattern[$counter]." to $_",3);} |
114
|
|
|
|
|
|
|
|
115
|
|
|
|
|
|
|
} |
116
|
|
|
|
|
|
|
} |
117
|
44
|
|
|
|
|
75
|
$counter++; |
118
|
|
|
|
|
|
|
} |
119
|
|
|
|
|
|
|
|
120
|
|
|
|
|
|
|
|
121
|
|
|
|
|
|
|
# Now flow activation through the network starting with the second layer |
122
|
22
|
|
|
|
|
29
|
foreach my $layer (@{$network->{'layers'}}) |
|
22
|
|
|
|
|
46
|
|
123
|
|
|
|
|
|
|
{ |
124
|
38
|
100
|
|
|
|
284
|
if ($layer eq $network->{'layers'}->[0]){next} |
|
22
|
|
|
|
|
43
|
|
125
|
|
|
|
|
|
|
|
126
|
16
|
|
|
|
|
17
|
foreach my $node (@{$layer->{'nodes'}}) |
|
16
|
|
|
|
|
28
|
|
127
|
|
|
|
|
|
|
{ |
128
|
32
|
|
|
|
|
37
|
my $totalActivation; |
129
|
|
|
|
|
|
|
# Set the node to 0 if not persistent |
130
|
32
|
50
|
|
|
|
70
|
if (!($node->{'persistentactivation'})) |
131
|
|
|
|
|
|
|
{ |
132
|
32
|
|
|
|
|
45
|
$node->{'activation'} =0; |
133
|
|
|
|
|
|
|
} |
134
|
|
|
|
|
|
|
|
135
|
|
|
|
|
|
|
# Decay the node (note that if decay is not set this |
136
|
|
|
|
|
|
|
# will have no effect, hence no if). |
137
|
32
|
|
|
|
|
283
|
$node->{'activation'} -= $node->{'decay'}; |
138
|
32
|
|
|
|
|
35
|
my $nodeCounter=0; |
139
|
32
|
|
|
|
|
34
|
foreach my $connectedNode (@{$node->{'connectedNodesWest'}->{'nodes'}}) |
|
32
|
|
|
|
|
60
|
|
140
|
|
|
|
|
|
|
{ |
141
|
176
|
50
|
|
|
|
283
|
if (scalar @debug> 0) |
|
0
|
|
|
|
|
0
|
|
142
|
|
|
|
|
|
|
{$network->dbug("Flowing from ".$connectedNode->{'nodeid'}." to ".$node->{'nodeid'},3);} |
143
|
|
|
|
|
|
|
|
144
|
176
|
|
|
|
|
163
|
my $weight = ${$node->{'connectedNodesWest'}->{'weights'}}[$nodeCounter]; |
|
176
|
|
|
|
|
278
|
|
145
|
176
|
|
|
|
|
201
|
my $activation = $connectedNode->{'activation'}; |
146
|
176
|
50
|
|
|
|
276
|
if (scalar @debug> 0) |
|
0
|
|
|
|
|
0
|
|
147
|
|
|
|
|
|
|
{$network->dbug("Weight & activation: $weight - $activation",3);} |
148
|
|
|
|
|
|
|
|
149
|
|
|
|
|
|
|
|
150
|
176
|
|
|
|
|
209
|
$totalActivation += $weight*$activation; |
151
|
176
|
|
|
|
|
224
|
$nodeCounter++; |
152
|
|
|
|
|
|
|
} |
153
|
|
|
|
|
|
|
|
154
|
32
|
50
|
|
|
|
80
|
if ($node->{'active'}) |
155
|
|
|
|
|
|
|
{ |
156
|
32
|
|
|
|
|
33
|
my $value = $totalActivation; |
157
|
|
|
|
|
|
|
|
158
|
32
|
|
|
|
|
47
|
my $function = $node->{'activationfunction'}; |
159
|
|
|
|
|
|
|
#my $functionCall ="\$value = \$network->$function(\$value);"; |
160
|
|
|
|
|
|
|
|
161
|
|
|
|
|
|
|
#eval($functionCall); |
162
|
32
|
|
|
|
|
1242
|
$value = $network->$function($value); |
163
|
32
|
|
|
|
|
71
|
$node->{'activation'} = $value; |
164
|
|
|
|
|
|
|
} |
165
|
32
|
50
|
|
|
|
111
|
if (scalar @debug> 0) |
|
0
|
|
|
|
|
0
|
|
166
|
|
|
|
|
|
|
{$network->dbug("Final activation of ".$node->{'nodeid'}." = ".$node->{'activation'},3);} |
167
|
|
|
|
|
|
|
} |
168
|
|
|
|
|
|
|
} |
169
|
|
|
|
|
|
|
|
170
|
|
|
|
|
|
|
|
171
|
|
|
|
|
|
|
|
172
|
22
|
|
|
|
|
94
|
return $network->output; |
173
|
|
|
|
|
|
|
|
174
|
|
|
|
|
|
|
} |
175
|
|
|
|
|
|
|
|
176
|
|
|
|
|
|
|
|
177
|
|
|
|
|
|
|
|
178
|
|
|
|
|
|
|
|
179
|
|
|
|
|
|
|
|
180
|
|
|
|
|
|
|
1; |
181
|
|
|
|
|
|
|
|
182
|
|
|
|
|
|
|
=pod |
183
|
|
|
|
|
|
|
|
184
|
|
|
|
|
|
|
=head1 NAME |
185
|
|
|
|
|
|
|
|
186
|
|
|
|
|
|
|
AI::NNFlex::Feedforward - methods for feedforward neural networks |
187
|
|
|
|
|
|
|
|
188
|
|
|
|
|
|
|
=head1 SYNOPSIS |
189
|
|
|
|
|
|
|
|
190
|
|
|
|
|
|
|
use AI::NNFlex::Feedforward; |
191
|
|
|
|
|
|
|
|
192
|
|
|
|
|
|
|
$network->run([array of inputs]); |
193
|
|
|
|
|
|
|
|
194
|
|
|
|
|
|
|
=head1 DESCRIPTION |
195
|
|
|
|
|
|
|
|
196
|
|
|
|
|
|
|
AI::NNFlex::Feedforward provides a run method to flow activation through an NNFlex network in west to east feedforward style. |
197
|
|
|
|
|
|
|
|
198
|
|
|
|
|
|
|
=head1 CONSTRUCTOR |
199
|
|
|
|
|
|
|
|
200
|
|
|
|
|
|
|
None |
201
|
|
|
|
|
|
|
|
202
|
|
|
|
|
|
|
=head1 METHODS |
203
|
|
|
|
|
|
|
|
204
|
|
|
|
|
|
|
=head1 AI::NNFlex::Feedforward::run |
205
|
|
|
|
|
|
|
|
206
|
|
|
|
|
|
|
takes an array of inputs for the network. Returns true or false. |
207
|
|
|
|
|
|
|
|
208
|
|
|
|
|
|
|
=head1 SEE ALSO |
209
|
|
|
|
|
|
|
|
210
|
|
|
|
|
|
|
|
211
|
|
|
|
|
|
|
AI::NNFlex |
212
|
|
|
|
|
|
|
AI::NNFlex::Backprop |
213
|
|
|
|
|
|
|
AI::NNFlex::Dataset |
214
|
|
|
|
|
|
|
|
215
|
|
|
|
|
|
|
|
216
|
|
|
|
|
|
|
=head1 CHANGES |
217
|
|
|
|
|
|
|
|
218
|
|
|
|
|
|
|
|
219
|
|
|
|
|
|
|
|
220
|
|
|
|
|
|
|
=head1 COPYRIGHT |
221
|
|
|
|
|
|
|
|
222
|
|
|
|
|
|
|
Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. |
223
|
|
|
|
|
|
|
|
224
|
|
|
|
|
|
|
=head1 CONTACT |
225
|
|
|
|
|
|
|
|
226
|
|
|
|
|
|
|
charlesc@nnflex.g0n.net |
227
|
|
|
|
|
|
|
|
228
|
|
|
|
|
|
|
=cut |