File Coverage

blib/lib/AI/ANN/Evolver.pm
Criterion Covered Total %
statement 8 10 80.0
branch n/a
condition n/a
subroutine 4 4 100.0
pod n/a
total 12 14 85.7


line stmt bran cond sub pod time code
1             #!/usr/bin/perl
2             package AI::ANN::Evolver;
3             BEGIN {
4 2     2   10238 $AI::ANN::Evolver::VERSION = '0.008';
5             }
6             # ABSTRACT: an evolver for an artificial neural network simulator
7              
8 2     2   18 use strict;
  2         3  
  2         66  
9 2     2   135 use warnings;
  2         64  
  2         60  
10              
11 2     2   1210 use Moose;
  0            
  0            
12              
13             use AI::ANN;
14             use Storable qw(dclone);
15             use Math::Libm qw(tan);
16              
17              
18             has 'max_value' => (is => 'rw', isa => 'Num', default => 1);
19             has 'min_value' => (is => 'rw', isa => 'Num', default => 0);
20             has 'mutation_chance' => (is => 'rw', isa => 'Num', default => 0);
21             has 'mutation_amount' => (is => 'rw', isa => 'CodeRef', default => sub{sub{2 * rand() - 1}});
22             has 'add_link_chance' => (is => 'rw', isa => 'Num', default => 0);
23             has 'kill_link_chance' => (is => 'rw', isa => 'Num', default => 0);
24             has 'sub_crossover_chance' => (is => 'rw', isa => 'Num', default => 0);
25             has 'gaussian_tau' => (is => 'rw', isa => 'CodeRef', default => sub{sub{1/sqrt(2*sqrt(shift))}});
26             has 'gaussian_tau_prime' => (is => 'rw', isa => 'CodeRef', default => sub{sub{1/sqrt(2*shift)}});
27              
28             around BUILDARGS => sub {
29             my $orig = shift;
30             my $class = shift;
31             my %data;
32             if ( @_ == 1 && ref $_[0] eq 'HASH' ) {
33             %data = %{$_[0]};
34             } else {
35             %data = @_;
36             }
37             if ((not (ref $data{'mutation_amount'})) || ref $data{'mutation_amount'} ne 'CODE') {
38             my $range = $data{'mutation_amount'};
39             $data{'mutation_amount'} = sub { $range * (rand() * 2 - 1) };
40             }
41             return $class->$orig(%data);
42             };
43              
44              
45             sub crossover {
46             my $self = shift;
47             my $network1 = shift;
48             my $network2 = shift;
49             my $class = ref($network1);
50             my $inputcount = $network1->input_count();
51             my $minvalue = $network1->minvalue();
52             my $maxvalue = $network1->maxvalue();
53             my $afunc = $network1->afunc();
54             my $dafunc = $network1->dafunc();
55             # They better have the same number of inputs
56             $inputcount == $network2->input_count() || return -1;
57             my $networkdata1 = $network1->get_internals();
58             my $networkdata2 = $network2->get_internals();
59             my $neuroncount = $#{$networkdata1};
60             # They better also have the same number of neurons
61             $neuroncount == $#{$networkdata2} || return -1;
62             my $networkdata3 = [];
63              
64             for (my $i = 0; $i <= $neuroncount; $i++) {
65             if (rand() < $self->{'sub_crossover_chance'}) {
66             $networkdata3->[$i] = { 'inputs' => [], 'neurons' => [] };
67             $networkdata3->[$i]->{'iamanoutput'} =
68             $networkdata1->[$i]->{'iamanoutput'};
69             for (my $j = 0; $j < $inputcount; $j++) {
70             $networkdata3->[$i]->{'inputs'}->[$j] =
71             (rand() > 0.5) ?
72             $networkdata1->[$i]->{'inputs'}->[$j] :
73             $networkdata2->[$i]->{'inputs'}->[$j];
74             # Note to self: Don't get any silly ideas about dclone()ing
75             # these, that's a good way to waste half an hour debugging.
76             }
77             for (my $j = 0; $j <= $neuroncount; $j++) {
78             $networkdata3->[$i]->{'neurons'}->[$j] =
79             (rand() > 0.5) ?
80             $networkdata1->[$i]->{'neurons'}->[$j] :
81             $networkdata2->[$i]->{'neurons'}->[$j];
82             }
83             } else {
84             $networkdata3->[$i] = dclone(
85             (rand() > 0.5) ?
86             $networkdata1->[$i] :
87             $networkdata2->[$i] );
88             }
89             }
90             my $network3 = $class->new ( 'inputs' => $inputcount,
91             'data' => $networkdata3,
92             'minvalue' => $minvalue,
93             'maxvalue' => $maxvalue,
94             'afunc' => $afunc,
95             'dafunc' => $dafunc);
96             return $network3;
97             }
98              
99              
100             sub mutate {
101             my $self = shift;
102             my $network = shift;
103             my $class = ref($network);
104             my $networkdata = $network->get_internals();
105             my $inputcount = $network->input_count();
106             my $minvalue = $network->minvalue();
107             my $maxvalue = $network->maxvalue();
108             my $afunc = $network->afunc();
109             my $dafunc = $network->dafunc();
110             my $neuroncount = $#{$networkdata}; # BTW did you notice that this
111             # isn't what it says it is?
112             $networkdata = dclone($networkdata); # For safety.
113             for (my $i = 0; $i <= $neuroncount; $i++) {
114             # First each input/neuron pair
115             for (my $j = 0; $j < $inputcount; $j++) {
116             my $weight = $networkdata->[$i]->{'inputs'}->[$j];
117             if (defined $weight && $weight != 0) {
118             if (rand() < $self->{'mutation_chance'}) {
119             $weight += (rand() * 2 - 1) * $self->{'mutation_amount'};
120             if ($weight > $self->{'max_value'}) {
121             $weight = $self->{'max_value'};
122             }
123             if ($weight < $self->{'min_value'}) {
124             $weight = $self->{'min_value'} + 0.000001;
125             }
126             }
127             if (abs($weight) < $self->{'mutation_amount'}) {
128             if (rand() < $self->{'kill_link_chance'}) {
129             $weight = undef;
130             }
131             }
132             } else {
133             if (rand() < $self->{'add_link_chance'}) {
134             $weight = rand() * $self->{'mutation_amount'};
135             # We want to Do The Right Thing. Here, that means to
136             # detect whether the user is using weights in (0, x), and
137             # if so make sure we don't accidentally give them a
138             # negative weight, because that will become 0.000001.
139             # Instead, we'll generate a positive only value at first
140             # (it's easier) and then, if the user will accept negative
141             # weights, we'll let that happen.
142             if ($self->{'min_value'} < 0) {
143             ($weight *= 2) -= $self->{'mutation_amount'};
144             }
145             # Of course, we have to check to be sure...
146             if ($weight > $self->{'max_value'}) {
147             $weight = $self->{'max_value'};
148             }
149             if ($weight < $self->{'min_value'}) {
150             $weight = $self->{'min_value'} + 0.000001;
151             }
152             # But we /don't/ need to to a kill_link_chance just yet.
153             }
154             }
155             # This would be a bloody nightmare if we hadn't done that dclone
156             # magic before. But look how easy it is!
157             $networkdata->[$i]->{'inputs'}->[$j] = $weight;
158             }
159             # Now each neuron/neuron pair
160             for (my $j = 0; $j <= $neuroncount; $j++) {
161             # As a reminder to those cursed with the duty of maintaining this code:
162             # This should be an exact copy of the code above, except that 'inputs'
163             # would be replaced with 'neurons'.
164             my $weight = $networkdata->[$i]->{'neurons'}->[$j];
165             if (defined $weight && $weight != 0) {
166             if (rand() < $self->{'mutation_chance'}) {
167             $weight += (rand() * 2 - 1) * $self->{'mutation_amount'};
168             if ($weight > $self->{'max_value'}) {
169             $weight = $self->{'max_value'};
170             }
171             if ($weight < $self->{'min_value'}) {
172             $weight = $self->{'min_value'} + 0.000001;
173             }
174             }
175             if (abs($weight) < $self->{'mutation_amount'}) {
176             if (rand() < $self->{'kill_link_chance'}) {
177             $weight = undef;
178             }
179             }
180              
181             } else {
182             if (rand() < $self->{'add_link_chance'}) {
183             $weight = rand() * $self->{'mutation_amount'};
184             # We want to Do The Right Thing. Here, that means to
185             # detect whether the user is using weights in (0, x), and
186             # if so make sure we don't accidentally give them a
187             # negative weight, because that will become 0.000001.
188             # Instead, we'll generate a positive only value at first
189             # (it's easier) and then, if the user will accept negative
190             # weights, we'll let that happen.
191             if ($self->{'min_value'} < 0) {
192             ($weight *= 2) -= $self->{'mutation_amount'};
193             }
194             # Of course, we have to check to be sure...
195             if ($weight > $self->{'max_value'}) {
196             $weight = $self->{'max_value'};
197             }
198             if ($weight < $self->{'min_value'}) {
199             $weight = $self->{'min_value'} + 0.000001;
200             }
201             # But we /don't/ need to to a kill_link_chance just yet.
202             }
203             }
204             # This would be a bloody nightmare if we hadn't done that dclone
205             # magic before. But look how easy it is!
206             $networkdata->[$i]->{'neurons'}->[$j] = $weight;
207             }
208             # That was rather tiring, and that's only for the first neuron!!
209             }
210             # All done. Let's pack it back into an object and let someone else deal
211             # with it.
212             $network = $class->new ( 'inputs' => $inputcount,
213             'data' => $networkdata,
214             'minvalue' => $minvalue,
215             'maxvalue' => $maxvalue,
216             'afunc' => $afunc,
217             'dafunc' => $dafunc);
218             return $network;
219             }
220              
221              
222             sub mutate_gaussian {
223             my $self = shift;
224             my $network = shift;
225             my $class = ref($network);
226             my $networkdata = $network->get_internals();
227             my $inputcount = $network->input_count();
228             my $minvalue = $network->minvalue();
229             my $maxvalue = $network->maxvalue();
230             my $afunc = $network->afunc();
231             my $dafunc = $network->dafunc();
232             my $neuroncount = $#{$networkdata}; # BTW did you notice that this
233             # isn't what it says it is?
234             $networkdata = dclone($networkdata); # For safety.
235             for (my $i = 0; $i <= $neuroncount; $i++) {
236             my $n = 0;
237             for (my $j = 0; $j < $inputcount; $j++) {
238             my $weight = $networkdata->[$i]->{'inputs'}->[$j];
239             $n++ if $weight;
240             }
241             for (my $j = 0; $j <= $neuroncount; $j++) {
242             my $weight = $networkdata->[$i]->{'neurons'}->[$j];
243             $n++ if $weight;
244             }
245             next if $n == 0;
246             my $tau = &{$self->{'gaussian_tau'}}($n);
247             my $tau_prime = &{$self->{'gaussian_tau_prime'}}($n);
248             my $random1 = 2 * rand() - 1;
249             for (my $j = 0; $j < $inputcount; $j++) {
250             my $weight = $networkdata->[$i]->{'inputs'}->[$j];
251             next unless $weight;
252             my $random2 = 2 * rand() - 1;
253             $networkdata->[$i]->{'eta_inputs'}->[$j] *= exp($tau_prime*$random1+$tau*$random2);
254             $networkdata->[$i]->{'inputs'}->[$j] += $networkdata->[$i]->{'eta_inputs'}->[$j]*$random2;
255             }
256             for (my $j = 0; $j <= $neuroncount; $j++) {
257             my $weight = $networkdata->[$i]->{'neurons'}->[$j];
258             next unless $weight;
259             my $random2 = 2 * rand() - 1;
260             $networkdata->[$i]->{'eta_neurons'}->[$j] *= exp($tau_prime*$random1+$tau*$random2);
261             $networkdata->[$i]->{'neurons'}->[$j] += $networkdata->[$i]->{'eta_neurons'}->[$j]*$random2;
262             }
263             }
264             # All done. Let's pack it back into an object and let someone else deal
265             # with it.
266             $network = $class->new ( 'inputs' => $inputcount,
267             'data' => $networkdata,
268             'minvalue' => $minvalue,
269             'maxvalue' => $maxvalue,
270             'afunc' => $afunc,
271             'dafunc' => $dafunc);
272             return $network;
273             }
274              
275             __PACKAGE__->meta->make_immutable;
276              
277             1;
278              
279             __END__
280             =pod
281              
282             =head1 NAME
283              
284             AI::ANN::Evolver - an evolver for an artificial neural network simulator
285              
286             =head1 VERSION
287              
288             version 0.008
289              
290             =head1 METHODS
291              
292             =head2 new
293              
294             AI::ANN::Evolver->new( { mutation_chance => $mutationchance,
295             mutation_amount => $mutationamount, add_link_chance => $addlinkchance,
296             kill_link_chance => $killlinkchance, sub_crossover_chance =>
297             $subcrossoverchance, min_value => $minvalue, max_value => $maxvalue } )
298              
299             All values have a sane default.
300              
301             mutation_chance is the chance that calling mutate() will add a random value
302             on a per-link basis. It only affects existing (nonzero) links.
303             mutation_amount is the maximum change that any single mutation can introduce.
304             It affects the result of successful mutation_chance rolls, the maximum
305             value after an add_link_chance roll, and the maximum strength of a link
306             that can be deleted by kill_link_chance rolls. It can either add or
307             subtract.
308             add_link_chance is the chance that, during a mutate() call, each pair of
309             unconnected neurons or each unconnected neuron => input pair will
310             spontaneously develop a connection. This should be extremely small, as
311             it is not an overall chance, put a chance for each connection that does
312             not yet exist. If you wish to ensure that your neural net does not become
313             recursive, this must be zero.
314             kill_link_chance is the chance that, during a mutate() call, each pair of
315             connected neurons with a weight less than mutation_amount or each
316             neuron => input pair with a weight less than mutation_amount will be
317             disconnected. If add_link_chance is zero, this should also be zero, or
318             your network will just fizzle out.
319             sub_crossover_chance is the chance that, during a crossover() call, each
320             neuron will, rather than being inherited fully from each parent, have
321             each element within it be inherited individually.
322             min_value is the smallest acceptable weight. It must be less than or equal to
323             zero. If a value would be decremented below min_value, it will instead
324             become an epsilon above min_value. This is so that we don't accidentally
325             set a weight to zero, thereby killing the link.
326             max_value is the largest acceptable weight. It must be greater than zero.
327             gaussian_tau and gaussian_tau_prime are the terms to the gaussian mutation
328             method. They are coderefs which accept one parameter, n, the number of
329             non-zero-weight inputs to the given neuron.
330              
331             =head2 crossover
332              
333             $evolver->crossover( $network1, $network2 )
334              
335             Returns a $network3 consisting of the shuffling of $network1 and $network2
336             As long as the same neurons in network1 and network2 are outputs, network3
337             will always have those same outputs.
338             This method, at least if the sub_crossover_chance is nonzero, expects neurons
339             to be labeled from zero to n.
340             You probably don't want to do this. This is the least effective way to evolve
341             neural networks. This is because, due to the hidden intermediate steps, it
342             is possible for two networks which output exactly the same with completely
343             different internal representations.
344              
345             =head2 mutate
346              
347             $evolver->mutate($network)
348              
349             Returns a version of $network mutated according to the parameters set for
350             $evolver, followed by a series of counters. The original is not modified.
351             The counters are, in order, the number of times we compared against the
352             following thresholds: mutation_chance, kill_link_chance, add_link_chance.
353             This is useful if you want to try to normalize your probabilities. For
354             example, if you want to make links be killed about as often as they are
355             added, keep a running total of the counters, and let:
356             $kill_link_chance = $add_link_chance * $add_link_counter / $kill_link_counter
357             This will probably make kill_link_chance much larger than add_link_chance,
358             but in doing so will make links be added at overall the same rate as they
359             are killed. Since new links tend to be killed particularly quickly, it may
360             be wise to add an additional optional multiplier to mutation_amount just
361             for new links.
362              
363             =head2 mutate_gaussian
364              
365             $evolver->mutate_gaussian($network)
366              
367             Returns a version of $network modified according to the Gaussian mutation
368             rules discussed in X. Yao, Evolving Artifical Neural Networks, and X. Yao
369             and Y. Liu, Fast Evolution Strategies. Uses the gaussian_tau and
370             gaussian_tau_prime values from the initializer if they are present, or
371             sane defaults proposed by the above. These are both functions of 'n', the
372             number of inputs to each neuron with nonzero weight.
373              
374             =head1 AUTHOR
375              
376             Dan Collins <DCOLLINS@cpan.org>
377              
378             =head1 COPYRIGHT AND LICENSE
379              
380             This software is Copyright (c) 2011 by Dan Collins.
381              
382             This is free software, licensed under:
383              
384             The GNU General Public License, Version 3, June 2007
385              
386             =cut
387