| line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
|
1
|
|
|
|
|
|
|
package AI::FANN::Evolving; |
|
2
|
3
|
|
|
3
|
|
53582
|
use strict; |
|
|
3
|
|
|
|
|
9
|
|
|
|
3
|
|
|
|
|
108
|
|
|
3
|
3
|
|
|
3
|
|
13
|
use warnings; |
|
|
3
|
|
|
|
|
8
|
|
|
|
3
|
|
|
|
|
98
|
|
|
4
|
3
|
|
|
3
|
|
2332
|
use AI::FANN ':all'; |
|
|
0
|
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
5
|
|
|
|
|
|
|
use List::Util 'shuffle'; |
|
6
|
|
|
|
|
|
|
use File::Temp 'tempfile'; |
|
7
|
|
|
|
|
|
|
use AI::FANN::Evolving::Gene; |
|
8
|
|
|
|
|
|
|
use AI::FANN::Evolving::Chromosome; |
|
9
|
|
|
|
|
|
|
use AI::FANN::Evolving::Experiment; |
|
10
|
|
|
|
|
|
|
use AI::FANN::Evolving::Factory; |
|
11
|
|
|
|
|
|
|
use Algorithm::Genetic::Diploid; |
|
12
|
|
|
|
|
|
|
use base qw'Algorithm::Genetic::Diploid::Base'; |
|
13
|
|
|
|
|
|
|
|
|
14
|
|
|
|
|
|
|
our $VERSION = '0.4'; |
|
15
|
|
|
|
|
|
|
our $AUTOLOAD; |
|
16
|
|
|
|
|
|
|
my $log = __PACKAGE__->logger; |
|
17
|
|
|
|
|
|
|
|
|
18
|
|
|
|
|
|
|
my %enum = ( |
|
19
|
|
|
|
|
|
|
'train' => { |
|
20
|
|
|
|
|
|
|
# 'FANN_TRAIN_INCREMENTAL' => FANN_TRAIN_INCREMENTAL, # only want batch training |
|
21
|
|
|
|
|
|
|
'FANN_TRAIN_BATCH' => FANN_TRAIN_BATCH, |
|
22
|
|
|
|
|
|
|
'FANN_TRAIN_RPROP' => FANN_TRAIN_RPROP, |
|
23
|
|
|
|
|
|
|
'FANN_TRAIN_QUICKPROP' => FANN_TRAIN_QUICKPROP, |
|
24
|
|
|
|
|
|
|
}, |
|
25
|
|
|
|
|
|
|
'activationfunc' => { |
|
26
|
|
|
|
|
|
|
'FANN_LINEAR' => FANN_LINEAR, |
|
27
|
|
|
|
|
|
|
# 'FANN_THRESHOLD' => FANN_THRESHOLD, # can not be used during training |
|
28
|
|
|
|
|
|
|
# 'FANN_THRESHOLD_SYMMETRIC' => FANN_THRESHOLD_SYMMETRIC, # can not be used during training |
|
29
|
|
|
|
|
|
|
# 'FANN_SIGMOID' => FANN_SIGMOID, # range is between 0 and 1 |
|
30
|
|
|
|
|
|
|
# 'FANN_SIGMOID_STEPWISE' => FANN_SIGMOID_STEPWISE, # range is between 0 and 1 |
|
31
|
|
|
|
|
|
|
'FANN_SIGMOID_SYMMETRIC' => FANN_SIGMOID_SYMMETRIC, |
|
32
|
|
|
|
|
|
|
'FANN_SIGMOID_SYMMETRIC_STEPWISE' => FANN_SIGMOID_SYMMETRIC_STEPWISE, |
|
33
|
|
|
|
|
|
|
# 'FANN_GAUSSIAN' => FANN_GAUSSIAN, # range is between 0 and 1 |
|
34
|
|
|
|
|
|
|
'FANN_GAUSSIAN_SYMMETRIC' => FANN_GAUSSIAN_SYMMETRIC, |
|
35
|
|
|
|
|
|
|
'FANN_GAUSSIAN_STEPWISE' => FANN_GAUSSIAN_STEPWISE, |
|
36
|
|
|
|
|
|
|
# 'FANN_ELLIOT' => FANN_ELLIOT, # range is between 0 and 1 |
|
37
|
|
|
|
|
|
|
'FANN_ELLIOT_SYMMETRIC' => FANN_ELLIOT_SYMMETRIC, |
|
38
|
|
|
|
|
|
|
# 'FANN_LINEAR_PIECE' => FANN_LINEAR_PIECE, # range is between 0 and 1 |
|
39
|
|
|
|
|
|
|
'FANN_LINEAR_PIECE_SYMMETRIC' => FANN_LINEAR_PIECE_SYMMETRIC, |
|
40
|
|
|
|
|
|
|
'FANN_SIN_SYMMETRIC' => FANN_SIN_SYMMETRIC, |
|
41
|
|
|
|
|
|
|
'FANN_COS_SYMMETRIC' => FANN_COS_SYMMETRIC, |
|
42
|
|
|
|
|
|
|
# 'FANN_SIN' => FANN_SIN, # range is between 0 and 1 |
|
43
|
|
|
|
|
|
|
# 'FANN_COS' => FANN_COS, # range is between 0 and 1 |
|
44
|
|
|
|
|
|
|
}, |
|
45
|
|
|
|
|
|
|
'errorfunc' => { |
|
46
|
|
|
|
|
|
|
'FANN_ERRORFUNC_LINEAR' => FANN_ERRORFUNC_LINEAR, |
|
47
|
|
|
|
|
|
|
'FANN_ERRORFUNC_TANH' => FANN_ERRORFUNC_TANH, |
|
48
|
|
|
|
|
|
|
}, |
|
49
|
|
|
|
|
|
|
'stopfunc' => { |
|
50
|
|
|
|
|
|
|
'FANN_STOPFUNC_MSE' => FANN_STOPFUNC_MSE, |
|
51
|
|
|
|
|
|
|
# 'FANN_STOPFUNC_BIT' => FANN_STOPFUNC_BIT, |
|
52
|
|
|
|
|
|
|
} |
|
53
|
|
|
|
|
|
|
); |
|
54
|
|
|
|
|
|
|
|
|
55
|
|
|
|
|
|
|
my %constant; |
|
56
|
|
|
|
|
|
|
for my $hashref ( values %enum ) { |
|
57
|
|
|
|
|
|
|
while( my ( $k, $v ) = each %{ $hashref } ) { |
|
58
|
|
|
|
|
|
|
$constant{$k} = $v; |
|
59
|
|
|
|
|
|
|
} |
|
60
|
|
|
|
|
|
|
} |
|
61
|
|
|
|
|
|
|
|
|
62
|
|
|
|
|
|
|
my %default = ( |
|
63
|
|
|
|
|
|
|
'error' => 0.0001, |
|
64
|
|
|
|
|
|
|
'epochs' => 5000, |
|
65
|
|
|
|
|
|
|
'train_type' => 'ordinary', |
|
66
|
|
|
|
|
|
|
'epoch_printfreq' => 100, |
|
67
|
|
|
|
|
|
|
'neuron_printfreq' => 0, |
|
68
|
|
|
|
|
|
|
'neurons' => 15, |
|
69
|
|
|
|
|
|
|
'activation_function' => FANN_SIGMOID_SYMMETRIC, |
|
70
|
|
|
|
|
|
|
); |
|
71
|
|
|
|
|
|
|
|
|
72
|
|
|
|
|
|
|
=head1 NAME |
|
73
|
|
|
|
|
|
|
|
|
74
|
|
|
|
|
|
|
AI::FANN::Evolving - artificial neural network that evolves |
|
75
|
|
|
|
|
|
|
|
|
76
|
|
|
|
|
|
|
=head1 METHODS |
|
77
|
|
|
|
|
|
|
|
|
78
|
|
|
|
|
|
|
=over |
|
79
|
|
|
|
|
|
|
|
|
80
|
|
|
|
|
|
|
=item new |
|
81
|
|
|
|
|
|
|
|
|
82
|
|
|
|
|
|
|
Constructor requires 'file', or 'data' and 'neurons' arguments. Optionally takes |
|
83
|
|
|
|
|
|
|
'connection_rate' argument for sparse topologies. Returns a wrapper around L. |
|
84
|
|
|
|
|
|
|
|
|
85
|
|
|
|
|
|
|
=cut |
|
86
|
|
|
|
|
|
|
|
|
87
|
|
|
|
|
|
|
sub new { |
|
88
|
|
|
|
|
|
|
my $class = shift; |
|
89
|
|
|
|
|
|
|
my %args = @_; |
|
90
|
|
|
|
|
|
|
my $self = {}; |
|
91
|
|
|
|
|
|
|
bless $self, $class; |
|
92
|
|
|
|
|
|
|
$self->_init(%args); |
|
93
|
|
|
|
|
|
|
|
|
94
|
|
|
|
|
|
|
# de-serialize from a file |
|
95
|
|
|
|
|
|
|
if ( my $file = $args{'file'} ) { |
|
96
|
|
|
|
|
|
|
$self->{'ann'} = AI::FANN->new_from_file($file); |
|
97
|
|
|
|
|
|
|
$log->debug("instantiating from file $file"); |
|
98
|
|
|
|
|
|
|
return $self; |
|
99
|
|
|
|
|
|
|
} |
|
100
|
|
|
|
|
|
|
|
|
101
|
|
|
|
|
|
|
# build new topology from input data |
|
102
|
|
|
|
|
|
|
elsif ( my $data = $args{'data'} ) { |
|
103
|
|
|
|
|
|
|
$log->debug("instantiating from data $data"); |
|
104
|
|
|
|
|
|
|
$data = $data->to_fann if $data->isa('AI::FANN::Evolving::TrainData'); |
|
105
|
|
|
|
|
|
|
|
|
106
|
|
|
|
|
|
|
# prepare arguments |
|
107
|
|
|
|
|
|
|
my $neurons = $args{'neurons'} || ( $data->num_inputs + 1 ); |
|
108
|
|
|
|
|
|
|
my @sizes = ( |
|
109
|
|
|
|
|
|
|
$data->num_inputs, |
|
110
|
|
|
|
|
|
|
$neurons, |
|
111
|
|
|
|
|
|
|
$data->num_outputs |
|
112
|
|
|
|
|
|
|
); |
|
113
|
|
|
|
|
|
|
|
|
114
|
|
|
|
|
|
|
# build topology |
|
115
|
|
|
|
|
|
|
if ( $args{'connection_rate'} ) { |
|
116
|
|
|
|
|
|
|
$self->{'ann'} = AI::FANN->new_sparse( $args{'connection_rate'}, @sizes ); |
|
117
|
|
|
|
|
|
|
} |
|
118
|
|
|
|
|
|
|
else { |
|
119
|
|
|
|
|
|
|
$self->{'ann'} = AI::FANN->new_standard( @sizes ); |
|
120
|
|
|
|
|
|
|
} |
|
121
|
|
|
|
|
|
|
|
|
122
|
|
|
|
|
|
|
# finalize the instance |
|
123
|
|
|
|
|
|
|
return $self; |
|
124
|
|
|
|
|
|
|
} |
|
125
|
|
|
|
|
|
|
|
|
126
|
|
|
|
|
|
|
# build new ANN using argument as a template |
|
127
|
|
|
|
|
|
|
elsif ( my $ann = $args{'ann'} ) { |
|
128
|
|
|
|
|
|
|
$log->debug("instantiating from template $ann"); |
|
129
|
|
|
|
|
|
|
|
|
130
|
|
|
|
|
|
|
# copy the wrapper properties |
|
131
|
|
|
|
|
|
|
%{ $self } = %{ $ann }; |
|
132
|
|
|
|
|
|
|
|
|
133
|
|
|
|
|
|
|
# instantiate the network dimensions |
|
134
|
|
|
|
|
|
|
$self->{'ann'} = AI::FANN->new_standard( |
|
135
|
|
|
|
|
|
|
$ann->num_inputs, |
|
136
|
|
|
|
|
|
|
$ann->num_inputs + 1, |
|
137
|
|
|
|
|
|
|
$ann->num_outputs, |
|
138
|
|
|
|
|
|
|
); |
|
139
|
|
|
|
|
|
|
|
|
140
|
|
|
|
|
|
|
# copy the AI::FANN properties |
|
141
|
|
|
|
|
|
|
$ann->template($self->{'ann'}); |
|
142
|
|
|
|
|
|
|
return $self; |
|
143
|
|
|
|
|
|
|
} |
|
144
|
|
|
|
|
|
|
else { |
|
145
|
|
|
|
|
|
|
die "Need 'file', 'data' or 'ann' argument!"; |
|
146
|
|
|
|
|
|
|
} |
|
147
|
|
|
|
|
|
|
} |
|
148
|
|
|
|
|
|
|
|
|
149
|
|
|
|
|
|
|
=item template |
|
150
|
|
|
|
|
|
|
|
|
151
|
|
|
|
|
|
|
Uses the object as a template for the properties of the argument, e.g. |
|
152
|
|
|
|
|
|
|
$ann1->template($ann2) applies the properties of $ann1 to $ann2 |
|
153
|
|
|
|
|
|
|
|
|
154
|
|
|
|
|
|
|
=cut |
|
155
|
|
|
|
|
|
|
|
|
156
|
|
|
|
|
|
|
sub template { |
|
157
|
|
|
|
|
|
|
my ( $self, $other ) = @_; |
|
158
|
|
|
|
|
|
|
|
|
159
|
|
|
|
|
|
|
# copy over the simple properties |
|
160
|
|
|
|
|
|
|
$log->debug("copying over simple properties"); |
|
161
|
|
|
|
|
|
|
my %scalar_properties = __PACKAGE__->_scalar_properties; |
|
162
|
|
|
|
|
|
|
for my $prop ( keys %scalar_properties ) { |
|
163
|
|
|
|
|
|
|
my $val = $self->$prop; |
|
164
|
|
|
|
|
|
|
$other->$prop($val); |
|
165
|
|
|
|
|
|
|
} |
|
166
|
|
|
|
|
|
|
|
|
167
|
|
|
|
|
|
|
# copy over the list properties |
|
168
|
|
|
|
|
|
|
$log->debug("copying over list properties"); |
|
169
|
|
|
|
|
|
|
my %list_properties = __PACKAGE__->_list_properties; |
|
170
|
|
|
|
|
|
|
for my $prop ( keys %list_properties ) { |
|
171
|
|
|
|
|
|
|
my @values = $self->$prop; |
|
172
|
|
|
|
|
|
|
$other->$prop(@values); |
|
173
|
|
|
|
|
|
|
} |
|
174
|
|
|
|
|
|
|
|
|
175
|
|
|
|
|
|
|
# copy over the layer properties |
|
176
|
|
|
|
|
|
|
$log->debug("copying over layer properties"); |
|
177
|
|
|
|
|
|
|
my %layer_properties = __PACKAGE__->_layer_properties; |
|
178
|
|
|
|
|
|
|
for my $prop ( keys %layer_properties ) { |
|
179
|
|
|
|
|
|
|
for my $i ( 0 .. $self->num_layers - 1 ) { |
|
180
|
|
|
|
|
|
|
for my $j ( 0 .. $self->layer_num_neurons($i) - 1 ) { |
|
181
|
|
|
|
|
|
|
my $val = $self->$prop($i,$j); |
|
182
|
|
|
|
|
|
|
$other->$prop($i,$j,$val); |
|
183
|
|
|
|
|
|
|
} |
|
184
|
|
|
|
|
|
|
} |
|
185
|
|
|
|
|
|
|
} |
|
186
|
|
|
|
|
|
|
return $self; |
|
187
|
|
|
|
|
|
|
} |
|
188
|
|
|
|
|
|
|
|
|
189
|
|
|
|
|
|
|
=item recombine |
|
190
|
|
|
|
|
|
|
|
|
191
|
|
|
|
|
|
|
Recombines (exchanges) properties between the two objects at the provided rate, e.g. |
|
192
|
|
|
|
|
|
|
$ann1->recombine($ann2,0.5) means that on average half of the object properties are |
|
193
|
|
|
|
|
|
|
exchanged between $ann1 and $ann2 |
|
194
|
|
|
|
|
|
|
|
|
195
|
|
|
|
|
|
|
=cut |
|
196
|
|
|
|
|
|
|
|
|
197
|
|
|
|
|
|
|
sub recombine { |
|
198
|
|
|
|
|
|
|
my ( $self, $other, $rr ) = @_; |
|
199
|
|
|
|
|
|
|
|
|
200
|
|
|
|
|
|
|
# recombine the simple properties |
|
201
|
|
|
|
|
|
|
my %scalar_properties = __PACKAGE__->_scalar_properties; |
|
202
|
|
|
|
|
|
|
for my $prop ( keys %scalar_properties ) { |
|
203
|
|
|
|
|
|
|
if ( rand(1) < $rr ) { |
|
204
|
|
|
|
|
|
|
my $vals = $self->$prop; |
|
205
|
|
|
|
|
|
|
my $valo = $other->$prop; |
|
206
|
|
|
|
|
|
|
$other->$prop($vals); |
|
207
|
|
|
|
|
|
|
$self->$prop($valo); |
|
208
|
|
|
|
|
|
|
} |
|
209
|
|
|
|
|
|
|
} |
|
210
|
|
|
|
|
|
|
|
|
211
|
|
|
|
|
|
|
# copy over the list properties |
|
212
|
|
|
|
|
|
|
my %list_properties = __PACKAGE__->_list_properties; |
|
213
|
|
|
|
|
|
|
for my $prop ( keys %list_properties ) { |
|
214
|
|
|
|
|
|
|
if ( rand(1) < $rr ) { |
|
215
|
|
|
|
|
|
|
my @values = $self->$prop; |
|
216
|
|
|
|
|
|
|
my @valueo = $other->$prop; |
|
217
|
|
|
|
|
|
|
$other->$prop(@values); |
|
218
|
|
|
|
|
|
|
$self->$prop(@valueo); |
|
219
|
|
|
|
|
|
|
} |
|
220
|
|
|
|
|
|
|
} |
|
221
|
|
|
|
|
|
|
|
|
222
|
|
|
|
|
|
|
# copy over the layer properties |
|
223
|
|
|
|
|
|
|
my %layer_properties = __PACKAGE__->_layer_properties; |
|
224
|
|
|
|
|
|
|
for my $prop ( keys %layer_properties ) { |
|
225
|
|
|
|
|
|
|
for my $i ( 0 .. $self->num_layers - 1 ) { |
|
226
|
|
|
|
|
|
|
for my $j ( 0 .. $self->layer_num_neurons($i) - 1 ) { |
|
227
|
|
|
|
|
|
|
my $val = $self->$prop($i,$j); |
|
228
|
|
|
|
|
|
|
$other->$prop($i,$j,$val); |
|
229
|
|
|
|
|
|
|
} |
|
230
|
|
|
|
|
|
|
} |
|
231
|
|
|
|
|
|
|
} |
|
232
|
|
|
|
|
|
|
return $self; |
|
233
|
|
|
|
|
|
|
} |
|
234
|
|
|
|
|
|
|
|
|
235
|
|
|
|
|
|
|
=item mutate |
|
236
|
|
|
|
|
|
|
|
|
237
|
|
|
|
|
|
|
Mutates the object by the provided mutation rate |
|
238
|
|
|
|
|
|
|
|
|
239
|
|
|
|
|
|
|
=cut |
|
240
|
|
|
|
|
|
|
|
|
241
|
|
|
|
|
|
|
sub mutate { |
|
242
|
|
|
|
|
|
|
my ( $self, $mu ) = @_; |
|
243
|
|
|
|
|
|
|
$log->debug("going to mutate at rate $mu"); |
|
244
|
|
|
|
|
|
|
|
|
245
|
|
|
|
|
|
|
# mutate the simple properties |
|
246
|
|
|
|
|
|
|
$log->debug("mutating scalar properties"); |
|
247
|
|
|
|
|
|
|
my %scalar_properties = __PACKAGE__->_scalar_properties; |
|
248
|
|
|
|
|
|
|
for my $prop ( keys %scalar_properties ) { |
|
249
|
|
|
|
|
|
|
my $handler = $scalar_properties{$prop}; |
|
250
|
|
|
|
|
|
|
my $val = $self->$prop; |
|
251
|
|
|
|
|
|
|
if ( ref $handler ) { |
|
252
|
|
|
|
|
|
|
$self->$prop( $handler->($val,$mu) ); |
|
253
|
|
|
|
|
|
|
} |
|
254
|
|
|
|
|
|
|
else { |
|
255
|
|
|
|
|
|
|
$self->$prop( _mutate_enum($handler,$val,$mu) ); |
|
256
|
|
|
|
|
|
|
} |
|
257
|
|
|
|
|
|
|
} |
|
258
|
|
|
|
|
|
|
|
|
259
|
|
|
|
|
|
|
# mutate the list properties |
|
260
|
|
|
|
|
|
|
$log->debug("mutating list properties"); |
|
261
|
|
|
|
|
|
|
my %list_properties = __PACKAGE__->_list_properties; |
|
262
|
|
|
|
|
|
|
for my $prop ( keys %list_properties ) { |
|
263
|
|
|
|
|
|
|
my $handler = $list_properties{$prop}; |
|
264
|
|
|
|
|
|
|
my @values = $self->$prop; |
|
265
|
|
|
|
|
|
|
if ( ref $handler ) { |
|
266
|
|
|
|
|
|
|
$self->$prop( map { $handler->($_,$mu) } @values ); |
|
267
|
|
|
|
|
|
|
} |
|
268
|
|
|
|
|
|
|
else { |
|
269
|
|
|
|
|
|
|
$self->$prop( map { _mutate_enum($handler,$_,$mu) } @values ); |
|
270
|
|
|
|
|
|
|
} |
|
271
|
|
|
|
|
|
|
} |
|
272
|
|
|
|
|
|
|
|
|
273
|
|
|
|
|
|
|
# mutate the layer properties |
|
274
|
|
|
|
|
|
|
$log->debug("mutating layer properties"); |
|
275
|
|
|
|
|
|
|
my %layer_properties = __PACKAGE__->_layer_properties; |
|
276
|
|
|
|
|
|
|
for my $prop ( keys %layer_properties ) { |
|
277
|
|
|
|
|
|
|
my $handler = $layer_properties{$prop}; |
|
278
|
|
|
|
|
|
|
for my $i ( 1 .. $self->num_layers ) { |
|
279
|
|
|
|
|
|
|
for my $j ( 1 .. $self->layer_num_neurons($i) ) { |
|
280
|
|
|
|
|
|
|
my $val = $self->$prop($i,$j); |
|
281
|
|
|
|
|
|
|
if ( ref $handler ) { |
|
282
|
|
|
|
|
|
|
$self->$prop( $handler->($val,$mu) ); |
|
283
|
|
|
|
|
|
|
} |
|
284
|
|
|
|
|
|
|
else { |
|
285
|
|
|
|
|
|
|
$self->$prop( _mutate_enum($handler,$val,$mu) ); |
|
286
|
|
|
|
|
|
|
} |
|
287
|
|
|
|
|
|
|
} |
|
288
|
|
|
|
|
|
|
} |
|
289
|
|
|
|
|
|
|
} |
|
290
|
|
|
|
|
|
|
return $self; |
|
291
|
|
|
|
|
|
|
} |
|
292
|
|
|
|
|
|
|
|
|
293
|
|
|
|
|
|
|
sub _mutate_double { |
|
294
|
|
|
|
|
|
|
my ( $value, $mu ) = @_; |
|
295
|
|
|
|
|
|
|
my $scale = 1 + ( rand( 2 * $mu ) - $mu ); |
|
296
|
|
|
|
|
|
|
return $value * $scale; |
|
297
|
|
|
|
|
|
|
} |
|
298
|
|
|
|
|
|
|
|
|
299
|
|
|
|
|
|
|
sub _mutate_int { |
|
300
|
|
|
|
|
|
|
my ( $value, $mu ) = @_; |
|
301
|
|
|
|
|
|
|
if ( rand(1) < $mu ) { |
|
302
|
|
|
|
|
|
|
my $inc = ( int(rand(2)) * 2 ) - 1; |
|
303
|
|
|
|
|
|
|
while( ( $value < 0 ) xor ( ( $value + $inc ) < 0 ) ) { |
|
304
|
|
|
|
|
|
|
$inc = ( int(rand(2)) * 2 ) - 1; |
|
305
|
|
|
|
|
|
|
} |
|
306
|
|
|
|
|
|
|
return $value + $inc; |
|
307
|
|
|
|
|
|
|
} |
|
308
|
|
|
|
|
|
|
return $value; |
|
309
|
|
|
|
|
|
|
} |
|
310
|
|
|
|
|
|
|
|
|
311
|
|
|
|
|
|
|
sub _mutate_enum { |
|
312
|
|
|
|
|
|
|
my ( $enum_name, $value, $mu ) = @_; |
|
313
|
|
|
|
|
|
|
if ( rand(1) < $mu ) { |
|
314
|
|
|
|
|
|
|
my ($newval) = shuffle grep { $_ != $value } values %{ $enum{$enum_name} }; |
|
315
|
|
|
|
|
|
|
$value = $newval if defined $newval; |
|
316
|
|
|
|
|
|
|
} |
|
317
|
|
|
|
|
|
|
return $value; |
|
318
|
|
|
|
|
|
|
} |
|
319
|
|
|
|
|
|
|
|
|
320
|
|
|
|
|
|
|
sub _list_properties { |
|
321
|
|
|
|
|
|
|
( |
|
322
|
|
|
|
|
|
|
# cascade_activation_functions => 'activationfunc', |
|
323
|
|
|
|
|
|
|
cascade_activation_steepnesses => \&_mutate_double, |
|
324
|
|
|
|
|
|
|
) |
|
325
|
|
|
|
|
|
|
} |
|
326
|
|
|
|
|
|
|
|
|
327
|
|
|
|
|
|
|
sub _layer_properties { |
|
328
|
|
|
|
|
|
|
( |
|
329
|
|
|
|
|
|
|
# neuron_activation_function => 'activationfunc', |
|
330
|
|
|
|
|
|
|
# neuron_activation_steepness => \&_mutate_double, |
|
331
|
|
|
|
|
|
|
) |
|
332
|
|
|
|
|
|
|
} |
|
333
|
|
|
|
|
|
|
|
|
334
|
|
|
|
|
|
|
sub _scalar_properties { |
|
335
|
|
|
|
|
|
|
( |
|
336
|
|
|
|
|
|
|
training_algorithm => 'train', |
|
337
|
|
|
|
|
|
|
train_error_function => 'errorfunc', |
|
338
|
|
|
|
|
|
|
train_stop_function => 'stopfunc', |
|
339
|
|
|
|
|
|
|
learning_rate => \&_mutate_double, |
|
340
|
|
|
|
|
|
|
learning_momentum => \&_mutate_double, |
|
341
|
|
|
|
|
|
|
quickprop_decay => \&_mutate_double, |
|
342
|
|
|
|
|
|
|
quickprop_mu => \&_mutate_double, |
|
343
|
|
|
|
|
|
|
rprop_increase_factor => \&_mutate_double, |
|
344
|
|
|
|
|
|
|
rprop_decrease_factor => \&_mutate_double, |
|
345
|
|
|
|
|
|
|
rprop_delta_min => \&_mutate_double, |
|
346
|
|
|
|
|
|
|
rprop_delta_max => \&_mutate_double, |
|
347
|
|
|
|
|
|
|
cascade_output_change_fraction => \&_mutate_double, |
|
348
|
|
|
|
|
|
|
cascade_candidate_change_fraction => \&_mutate_double, |
|
349
|
|
|
|
|
|
|
cascade_output_stagnation_epochs => \&_mutate_int, |
|
350
|
|
|
|
|
|
|
cascade_candidate_stagnation_epochs => \&_mutate_int, |
|
351
|
|
|
|
|
|
|
cascade_max_out_epochs => \&_mutate_int, |
|
352
|
|
|
|
|
|
|
cascade_max_cand_epochs => \&_mutate_int, |
|
353
|
|
|
|
|
|
|
cascade_num_candidate_groups => \&_mutate_int, |
|
354
|
|
|
|
|
|
|
bit_fail_limit => \&_mutate_double, # 'fann_type', |
|
355
|
|
|
|
|
|
|
cascade_weight_multiplier => \&_mutate_double, # 'fann_type', |
|
356
|
|
|
|
|
|
|
cascade_candidate_limit => \&_mutate_double, # 'fann_type', |
|
357
|
|
|
|
|
|
|
) |
|
358
|
|
|
|
|
|
|
} |
|
359
|
|
|
|
|
|
|
|
|
360
|
|
|
|
|
|
|
=item defaults |
|
361
|
|
|
|
|
|
|
|
|
362
|
|
|
|
|
|
|
Getter/setter to influence default ANN configuration |
|
363
|
|
|
|
|
|
|
|
|
364
|
|
|
|
|
|
|
=cut |
|
365
|
|
|
|
|
|
|
|
|
366
|
|
|
|
|
|
|
sub defaults { |
|
367
|
|
|
|
|
|
|
my $self = shift; |
|
368
|
|
|
|
|
|
|
my %args = @_; |
|
369
|
|
|
|
|
|
|
for my $key ( keys %args ) { |
|
370
|
|
|
|
|
|
|
$log->info("setting $key to $args{$key}"); |
|
371
|
|
|
|
|
|
|
if ( $key eq 'activation_function' ) { |
|
372
|
|
|
|
|
|
|
$args{$key} = $constant{$args{$key}}; |
|
373
|
|
|
|
|
|
|
} |
|
374
|
|
|
|
|
|
|
$default{$key} = $args{$key}; |
|
375
|
|
|
|
|
|
|
} |
|
376
|
|
|
|
|
|
|
return %default; |
|
377
|
|
|
|
|
|
|
} |
|
378
|
|
|
|
|
|
|
|
|
379
|
|
|
|
|
|
|
sub _init { |
|
380
|
|
|
|
|
|
|
my $self = shift; |
|
381
|
|
|
|
|
|
|
my %args = @_; |
|
382
|
|
|
|
|
|
|
for ( qw(error epochs train_type epoch_printfreq neuron_printfreq neurons activation_function) ) { |
|
383
|
|
|
|
|
|
|
$self->{$_} = $args{$_} // $default{$_}; |
|
384
|
|
|
|
|
|
|
} |
|
385
|
|
|
|
|
|
|
return $self; |
|
386
|
|
|
|
|
|
|
} |
|
387
|
|
|
|
|
|
|
|
|
388
|
|
|
|
|
|
|
=item clone |
|
389
|
|
|
|
|
|
|
|
|
390
|
|
|
|
|
|
|
Clones the object |
|
391
|
|
|
|
|
|
|
|
|
392
|
|
|
|
|
|
|
=cut |
|
393
|
|
|
|
|
|
|
|
|
394
|
|
|
|
|
|
|
sub clone { |
|
395
|
|
|
|
|
|
|
my $self = shift; |
|
396
|
|
|
|
|
|
|
$log->debug("cloning..."); |
|
397
|
|
|
|
|
|
|
|
|
398
|
|
|
|
|
|
|
# we delete the reference here so we can use |
|
399
|
|
|
|
|
|
|
# Algorithm::Genetic::Diploid::Base's cloning method, which |
|
400
|
|
|
|
|
|
|
# dumps and loads from YAML. This wouldn't work if the |
|
401
|
|
|
|
|
|
|
# reference is still attached because it cannot be |
|
402
|
|
|
|
|
|
|
# stringified, being an XS data structure |
|
403
|
|
|
|
|
|
|
my $ann = delete $self->{'ann'}; |
|
404
|
|
|
|
|
|
|
my $clone = $self->SUPER::clone; |
|
405
|
|
|
|
|
|
|
|
|
406
|
|
|
|
|
|
|
# clone the ANN by writing it to a temp file in "FANN/FLO" |
|
407
|
|
|
|
|
|
|
# format and reading that back in, then delete the file |
|
408
|
|
|
|
|
|
|
my ( $fh, $file ) = tempfile(); |
|
409
|
|
|
|
|
|
|
close $fh; |
|
410
|
|
|
|
|
|
|
$ann->save($file); |
|
411
|
|
|
|
|
|
|
$clone->{'ann'} = __PACKAGE__->new_from_file($file); |
|
412
|
|
|
|
|
|
|
unlink $file; |
|
413
|
|
|
|
|
|
|
|
|
414
|
|
|
|
|
|
|
# now re-attach the original ANN to the invocant |
|
415
|
|
|
|
|
|
|
$self->{'ann'} = $ann; |
|
416
|
|
|
|
|
|
|
|
|
417
|
|
|
|
|
|
|
return $clone; |
|
418
|
|
|
|
|
|
|
} |
|
419
|
|
|
|
|
|
|
|
|
420
|
|
|
|
|
|
|
=item train |
|
421
|
|
|
|
|
|
|
|
|
422
|
|
|
|
|
|
|
Trains the AI on the provided data object |
|
423
|
|
|
|
|
|
|
|
|
424
|
|
|
|
|
|
|
=cut |
|
425
|
|
|
|
|
|
|
|
|
426
|
|
|
|
|
|
|
sub train { |
|
427
|
|
|
|
|
|
|
my ( $self, $data ) = @_; |
|
428
|
|
|
|
|
|
|
if ( $self->train_type eq 'cascade' ) { |
|
429
|
|
|
|
|
|
|
$log->debug("cascade training"); |
|
430
|
|
|
|
|
|
|
|
|
431
|
|
|
|
|
|
|
# set learning curve |
|
432
|
|
|
|
|
|
|
$self->cascade_activation_functions( $self->activation_function ); |
|
433
|
|
|
|
|
|
|
|
|
434
|
|
|
|
|
|
|
# train |
|
435
|
|
|
|
|
|
|
$self->{'ann'}->cascadetrain_on_data( |
|
436
|
|
|
|
|
|
|
$data, |
|
437
|
|
|
|
|
|
|
$self->neurons, |
|
438
|
|
|
|
|
|
|
$self->neuron_printfreq, |
|
439
|
|
|
|
|
|
|
$self->error, |
|
440
|
|
|
|
|
|
|
); |
|
441
|
|
|
|
|
|
|
} |
|
442
|
|
|
|
|
|
|
else { |
|
443
|
|
|
|
|
|
|
$log->debug("normal training"); |
|
444
|
|
|
|
|
|
|
|
|
445
|
|
|
|
|
|
|
# set learning curves |
|
446
|
|
|
|
|
|
|
$self->hidden_activation_function( $self->activation_function ); |
|
447
|
|
|
|
|
|
|
$self->output_activation_function( $self->activation_function ); |
|
448
|
|
|
|
|
|
|
|
|
449
|
|
|
|
|
|
|
# train |
|
450
|
|
|
|
|
|
|
$self->{'ann'}->train_on_data( |
|
451
|
|
|
|
|
|
|
$data, |
|
452
|
|
|
|
|
|
|
$self->epochs, |
|
453
|
|
|
|
|
|
|
$self->epoch_printfreq, |
|
454
|
|
|
|
|
|
|
$self->error, |
|
455
|
|
|
|
|
|
|
); |
|
456
|
|
|
|
|
|
|
} |
|
457
|
|
|
|
|
|
|
} |
|
458
|
|
|
|
|
|
|
|
|
459
|
|
|
|
|
|
|
=item enum_properties |
|
460
|
|
|
|
|
|
|
|
|
461
|
|
|
|
|
|
|
Returns a hash whose keys are names of enums and values the possible states for the |
|
462
|
|
|
|
|
|
|
enum |
|
463
|
|
|
|
|
|
|
|
|
464
|
|
|
|
|
|
|
=cut |
|
465
|
|
|
|
|
|
|
|
|
466
|
|
|
|
|
|
|
=item error |
|
467
|
|
|
|
|
|
|
|
|
468
|
|
|
|
|
|
|
Getter/setter for the error rate. Default is 0.0001 |
|
469
|
|
|
|
|
|
|
|
|
470
|
|
|
|
|
|
|
=cut |
|
471
|
|
|
|
|
|
|
|
|
472
|
|
|
|
|
|
|
sub error { |
|
473
|
|
|
|
|
|
|
my $self = shift; |
|
474
|
|
|
|
|
|
|
if ( @_ ) { |
|
475
|
|
|
|
|
|
|
my $value = shift; |
|
476
|
|
|
|
|
|
|
$log->debug("setting error threshold to $value"); |
|
477
|
|
|
|
|
|
|
return $self->{'error'} = $value; |
|
478
|
|
|
|
|
|
|
} |
|
479
|
|
|
|
|
|
|
else { |
|
480
|
|
|
|
|
|
|
$log->debug("getting error threshold"); |
|
481
|
|
|
|
|
|
|
return $self->{'error'}; |
|
482
|
|
|
|
|
|
|
} |
|
483
|
|
|
|
|
|
|
} |
|
484
|
|
|
|
|
|
|
|
|
485
|
|
|
|
|
|
|
=item epochs |
|
486
|
|
|
|
|
|
|
|
|
487
|
|
|
|
|
|
|
Getter/setter for the number of training epochs, default is 500000 |
|
488
|
|
|
|
|
|
|
|
|
489
|
|
|
|
|
|
|
=cut |
|
490
|
|
|
|
|
|
|
|
|
491
|
|
|
|
|
|
|
sub epochs { |
|
492
|
|
|
|
|
|
|
my $self = shift; |
|
493
|
|
|
|
|
|
|
if ( @_ ) { |
|
494
|
|
|
|
|
|
|
my $value = shift; |
|
495
|
|
|
|
|
|
|
$log->debug("setting training epochs to $value"); |
|
496
|
|
|
|
|
|
|
return $self->{'epochs'} = $value; |
|
497
|
|
|
|
|
|
|
} |
|
498
|
|
|
|
|
|
|
else { |
|
499
|
|
|
|
|
|
|
$log->debug("getting training epochs"); |
|
500
|
|
|
|
|
|
|
return $self->{'epochs'}; |
|
501
|
|
|
|
|
|
|
} |
|
502
|
|
|
|
|
|
|
} |
|
503
|
|
|
|
|
|
|
|
|
504
|
|
|
|
|
|
|
=item epoch_printfreq |
|
505
|
|
|
|
|
|
|
|
|
506
|
|
|
|
|
|
|
Getter/setter for the number of epochs after which progress is printed. default is 1000 |
|
507
|
|
|
|
|
|
|
|
|
508
|
|
|
|
|
|
|
=cut |
|
509
|
|
|
|
|
|
|
|
|
510
|
|
|
|
|
|
|
sub epoch_printfreq { |
|
511
|
|
|
|
|
|
|
my $self = shift; |
|
512
|
|
|
|
|
|
|
if ( @_ ) { |
|
513
|
|
|
|
|
|
|
my $value = shift; |
|
514
|
|
|
|
|
|
|
$log->debug("setting epoch printfreq to $value"); |
|
515
|
|
|
|
|
|
|
return $self->{'epoch_printfreq'} = $value; |
|
516
|
|
|
|
|
|
|
} |
|
517
|
|
|
|
|
|
|
else { |
|
518
|
|
|
|
|
|
|
$log->debug("getting epoch printfreq"); |
|
519
|
|
|
|
|
|
|
return $self->{'epoch_printfreq'} |
|
520
|
|
|
|
|
|
|
} |
|
521
|
|
|
|
|
|
|
} |
|
522
|
|
|
|
|
|
|
|
|
523
|
|
|
|
|
|
|
=item neurons |
|
524
|
|
|
|
|
|
|
|
|
525
|
|
|
|
|
|
|
Getter/setter for the number of neurons. Default is 15 |
|
526
|
|
|
|
|
|
|
|
|
527
|
|
|
|
|
|
|
=cut |
|
528
|
|
|
|
|
|
|
|
|
529
|
|
|
|
|
|
|
sub neurons { |
|
530
|
|
|
|
|
|
|
my $self = shift; |
|
531
|
|
|
|
|
|
|
if ( @_ ) { |
|
532
|
|
|
|
|
|
|
my $value = shift; |
|
533
|
|
|
|
|
|
|
$log->debug("setting neurons to $value"); |
|
534
|
|
|
|
|
|
|
return $self->{'neurons'} = $value; |
|
535
|
|
|
|
|
|
|
} |
|
536
|
|
|
|
|
|
|
else { |
|
537
|
|
|
|
|
|
|
$log->debug("getting neurons"); |
|
538
|
|
|
|
|
|
|
return $self->{'neurons'}; |
|
539
|
|
|
|
|
|
|
} |
|
540
|
|
|
|
|
|
|
} |
|
541
|
|
|
|
|
|
|
|
|
542
|
|
|
|
|
|
|
=item neuron_printfreq |
|
543
|
|
|
|
|
|
|
|
|
544
|
|
|
|
|
|
|
Getter/setter for the number of cascading neurons after which progress is printed. |
|
545
|
|
|
|
|
|
|
default is 10 |
|
546
|
|
|
|
|
|
|
|
|
547
|
|
|
|
|
|
|
=cut |
|
548
|
|
|
|
|
|
|
|
|
549
|
|
|
|
|
|
|
sub neuron_printfreq { |
|
550
|
|
|
|
|
|
|
my $self = shift; |
|
551
|
|
|
|
|
|
|
if ( @_ ) { |
|
552
|
|
|
|
|
|
|
my $value = shift; |
|
553
|
|
|
|
|
|
|
$log->debug("setting neuron printfreq to $value"); |
|
554
|
|
|
|
|
|
|
return $self->{'neuron_printfreq'} = $value; |
|
555
|
|
|
|
|
|
|
} |
|
556
|
|
|
|
|
|
|
else { |
|
557
|
|
|
|
|
|
|
$log->debug("getting neuron printfreq"); |
|
558
|
|
|
|
|
|
|
return $self->{'neuron_printfreq'}; |
|
559
|
|
|
|
|
|
|
} |
|
560
|
|
|
|
|
|
|
} |
|
561
|
|
|
|
|
|
|
|
|
562
|
|
|
|
|
|
|
=item train_type |
|
563
|
|
|
|
|
|
|
|
|
564
|
|
|
|
|
|
|
Getter/setter for the training type: 'cascade' or 'ordinary'. Default is ordinary |
|
565
|
|
|
|
|
|
|
|
|
566
|
|
|
|
|
|
|
=cut |
|
567
|
|
|
|
|
|
|
|
|
568
|
|
|
|
|
|
|
sub train_type { |
|
569
|
|
|
|
|
|
|
my $self = shift; |
|
570
|
|
|
|
|
|
|
if ( @_ ) { |
|
571
|
|
|
|
|
|
|
my $value = lc shift; |
|
572
|
|
|
|
|
|
|
$log->debug("setting train type to $value"); |
|
573
|
|
|
|
|
|
|
return $self->{'train_type'} = $value; |
|
574
|
|
|
|
|
|
|
} |
|
575
|
|
|
|
|
|
|
else { |
|
576
|
|
|
|
|
|
|
$log->debug("getting train type"); |
|
577
|
|
|
|
|
|
|
return $self->{'train_type'}; |
|
578
|
|
|
|
|
|
|
} |
|
579
|
|
|
|
|
|
|
} |
|
580
|
|
|
|
|
|
|
|
|
581
|
|
|
|
|
|
|
=item activation_function |
|
582
|
|
|
|
|
|
|
|
|
583
|
|
|
|
|
|
|
Getter/setter for the function that maps inputs to outputs. default is |
|
584
|
|
|
|
|
|
|
FANN_SIGMOID_SYMMETRIC |
|
585
|
|
|
|
|
|
|
|
|
586
|
|
|
|
|
|
|
=back |
|
587
|
|
|
|
|
|
|
|
|
588
|
|
|
|
|
|
|
=cut |
|
589
|
|
|
|
|
|
|
|
|
590
|
|
|
|
|
|
|
sub activation_function { |
|
591
|
|
|
|
|
|
|
my $self = shift; |
|
592
|
|
|
|
|
|
|
if ( @_ ) { |
|
593
|
|
|
|
|
|
|
my $value = shift; |
|
594
|
|
|
|
|
|
|
$log->debug("setting activation function to $value"); |
|
595
|
|
|
|
|
|
|
return $self->{'activation_function'} = $value; |
|
596
|
|
|
|
|
|
|
} |
|
597
|
|
|
|
|
|
|
else { |
|
598
|
|
|
|
|
|
|
$log->debug("getting activation function"); |
|
599
|
|
|
|
|
|
|
return $self->{'activation_function'}; |
|
600
|
|
|
|
|
|
|
} |
|
601
|
|
|
|
|
|
|
} |
|
602
|
|
|
|
|
|
|
|
|
603
|
|
|
|
|
|
|
# this is here so that we can trap method calls that need to be |
|
604
|
|
|
|
|
|
|
# delegated to the FANN object. at this point we're not even |
|
605
|
|
|
|
|
|
|
# going to care whether the FANN object implements these methods: |
|
606
|
|
|
|
|
|
|
# if it doesn't we get the normal error for unknown methods, which |
|
607
|
|
|
|
|
|
|
# the user then will have to resolve. |
|
608
|
|
|
|
|
|
|
sub AUTOLOAD { |
|
609
|
|
|
|
|
|
|
my $self = shift; |
|
610
|
|
|
|
|
|
|
my $method = $AUTOLOAD; |
|
611
|
|
|
|
|
|
|
$method =~ s/.+://; |
|
612
|
|
|
|
|
|
|
|
|
613
|
|
|
|
|
|
|
# ignore all caps methods |
|
614
|
|
|
|
|
|
|
if ( $method !~ /^[A-Z]+$/ ) { |
|
615
|
|
|
|
|
|
|
|
|
616
|
|
|
|
|
|
|
# determine whether to invoke on an object or a package |
|
617
|
|
|
|
|
|
|
my $invocant; |
|
618
|
|
|
|
|
|
|
if ( ref $self ) { |
|
619
|
|
|
|
|
|
|
$invocant = $self->{'ann'}; |
|
620
|
|
|
|
|
|
|
} |
|
621
|
|
|
|
|
|
|
else { |
|
622
|
|
|
|
|
|
|
$invocant = 'AI::FANN'; |
|
623
|
|
|
|
|
|
|
} |
|
624
|
|
|
|
|
|
|
|
|
625
|
|
|
|
|
|
|
# determine whether to pass in arguments |
|
626
|
|
|
|
|
|
|
if ( @_ ) { |
|
627
|
|
|
|
|
|
|
my $arg = shift; |
|
628
|
|
|
|
|
|
|
$arg = $constant{$arg} if exists $constant{$arg}; |
|
629
|
|
|
|
|
|
|
return $invocant->$method($arg); |
|
630
|
|
|
|
|
|
|
} |
|
631
|
|
|
|
|
|
|
else { |
|
632
|
|
|
|
|
|
|
return $invocant->$method; |
|
633
|
|
|
|
|
|
|
} |
|
634
|
|
|
|
|
|
|
} |
|
635
|
|
|
|
|
|
|
|
|
636
|
|
|
|
|
|
|
} |
|
637
|
|
|
|
|
|
|
|
|
638
|
|
|
|
|
|
|
1; |