| line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
|
1
|
|
|
|
|
|
|
package AI::ActivationFunctions; |
|
2
|
4
|
|
|
4
|
|
564167
|
use strict; |
|
|
4
|
|
|
|
|
6
|
|
|
|
4
|
|
|
|
|
135
|
|
|
3
|
4
|
|
|
4
|
|
16
|
use warnings; |
|
|
4
|
|
|
|
|
11
|
|
|
|
4
|
|
|
|
|
221
|
|
|
4
|
4
|
|
|
4
|
|
19
|
use Exporter 'import'; |
|
|
4
|
|
|
|
|
9
|
|
|
|
4
|
|
|
|
|
2649
|
|
|
5
|
|
|
|
|
|
|
|
|
6
|
|
|
|
|
|
|
our $VERSION = '0.01'; |
|
7
|
|
|
|
|
|
|
our $ABSTRACT = 'Activation functions for neural networks in Perl'; |
|
8
|
|
|
|
|
|
|
|
|
9
|
|
|
|
|
|
|
# Lista COMPLETA de funções exportáveis |
|
10
|
|
|
|
|
|
|
our @EXPORT_OK = qw( |
|
11
|
|
|
|
|
|
|
relu prelu leaky_relu |
|
12
|
|
|
|
|
|
|
sigmoid tanh softmax |
|
13
|
|
|
|
|
|
|
elu swish gelu |
|
14
|
|
|
|
|
|
|
relu_derivative sigmoid_derivative |
|
15
|
|
|
|
|
|
|
); |
|
16
|
|
|
|
|
|
|
|
|
17
|
|
|
|
|
|
|
our %EXPORT_TAGS = ( |
|
18
|
|
|
|
|
|
|
all => \@EXPORT_OK, |
|
19
|
|
|
|
|
|
|
basic => [qw(relu prelu leaky_relu sigmoid tanh softmax)], |
|
20
|
|
|
|
|
|
|
advanced => [qw(elu swish gelu)], |
|
21
|
|
|
|
|
|
|
derivatives => [qw(relu_derivative sigmoid_derivative)], |
|
22
|
|
|
|
|
|
|
); |
|
23
|
|
|
|
|
|
|
|
|
24
|
|
|
|
|
|
|
# ReLU |
|
25
|
|
|
|
|
|
|
sub relu { |
|
26
|
3
|
|
|
3
|
1
|
138021
|
my ($x) = @_; |
|
27
|
3
|
100
|
|
|
|
20
|
return $x > 0 ? $x : 0; |
|
28
|
|
|
|
|
|
|
} |
|
29
|
|
|
|
|
|
|
|
|
30
|
|
|
|
|
|
|
# PReLU |
|
31
|
|
|
|
|
|
|
sub prelu { |
|
32
|
5
|
|
|
5
|
1
|
12
|
my ($x, $alpha) = @_; |
|
33
|
5
|
|
100
|
|
|
16
|
$alpha //= 0.01; |
|
34
|
5
|
100
|
|
|
|
46
|
return $x > 0 ? $x : $alpha * $x; |
|
35
|
|
|
|
|
|
|
} |
|
36
|
|
|
|
|
|
|
|
|
37
|
|
|
|
|
|
|
# Leaky ReLU |
|
38
|
|
|
|
|
|
|
sub leaky_relu { |
|
39
|
2
|
|
|
2
|
1
|
5
|
my ($x) = @_; |
|
40
|
2
|
|
|
|
|
5
|
return prelu($x, 0.01); |
|
41
|
|
|
|
|
|
|
} |
|
42
|
|
|
|
|
|
|
|
|
43
|
|
|
|
|
|
|
# Sigmoid |
|
44
|
|
|
|
|
|
|
sub sigmoid { |
|
45
|
4
|
|
|
4
|
1
|
273
|
my ($x) = @_; |
|
46
|
4
|
|
|
|
|
58
|
return 1 / (1 + exp(-$x)); |
|
47
|
|
|
|
|
|
|
} |
|
48
|
|
|
|
|
|
|
|
|
49
|
|
|
|
|
|
|
# Tanh |
|
50
|
|
|
|
|
|
|
sub tanh { |
|
51
|
4
|
|
|
4
|
1
|
504
|
my ($x) = @_; |
|
52
|
4
|
|
|
|
|
15
|
my $e2x = exp(2 * $x); |
|
53
|
4
|
|
|
|
|
21
|
return ($e2x - 1) / ($e2x + 1); |
|
54
|
|
|
|
|
|
|
} |
|
55
|
|
|
|
|
|
|
|
|
56
|
|
|
|
|
|
|
# Softmax para array |
|
57
|
|
|
|
|
|
|
sub softmax { |
|
58
|
1
|
|
|
1
|
1
|
199
|
my ($array) = @_; |
|
59
|
|
|
|
|
|
|
|
|
60
|
1
|
50
|
|
|
|
5
|
return undef unless ref($array) eq 'ARRAY'; |
|
61
|
|
|
|
|
|
|
|
|
62
|
|
|
|
|
|
|
# Encontrar máximo |
|
63
|
1
|
|
|
|
|
2
|
my $max = $array->[0]; |
|
64
|
1
|
|
|
|
|
3
|
foreach my $val (@$array) { |
|
65
|
3
|
100
|
|
|
|
6
|
$max = $val if $val > $max; |
|
66
|
|
|
|
|
|
|
} |
|
67
|
|
|
|
|
|
|
|
|
68
|
|
|
|
|
|
|
# Calcular exponenciais |
|
69
|
1
|
|
|
|
|
1
|
my @exp_vals; |
|
70
|
1
|
|
|
|
|
2
|
my $sum = 0; |
|
71
|
1
|
|
|
|
|
3
|
foreach my $val (@$array) { |
|
72
|
3
|
|
|
|
|
4
|
my $exp_val = exp($val - $max); |
|
73
|
3
|
|
|
|
|
8
|
push @exp_vals, $exp_val; |
|
74
|
3
|
|
|
|
|
4
|
$sum += $exp_val; |
|
75
|
|
|
|
|
|
|
} |
|
76
|
|
|
|
|
|
|
|
|
77
|
|
|
|
|
|
|
# Normalizar |
|
78
|
1
|
|
|
|
|
3
|
return [map { $_ / $sum } @exp_vals]; |
|
|
3
|
|
|
|
|
6
|
|
|
79
|
|
|
|
|
|
|
} |
|
80
|
|
|
|
|
|
|
|
|
81
|
|
|
|
|
|
|
# ELU (Exponential Linear Unit) |
|
82
|
|
|
|
|
|
|
sub elu { |
|
83
|
2
|
|
|
2
|
1
|
248813
|
my ($x, $alpha) = @_; |
|
84
|
2
|
|
50
|
|
|
10
|
$alpha //= 1.0; |
|
85
|
2
|
100
|
|
|
|
34
|
return $x > 0 ? $x : $alpha * (exp($x) - 1); |
|
86
|
|
|
|
|
|
|
} |
|
87
|
|
|
|
|
|
|
|
|
88
|
|
|
|
|
|
|
# Swish (Google) |
|
89
|
|
|
|
|
|
|
sub swish { |
|
90
|
1
|
|
|
1
|
1
|
386
|
my ($x) = @_; |
|
91
|
1
|
|
|
|
|
6
|
return $x * sigmoid($x); |
|
92
|
|
|
|
|
|
|
} |
|
93
|
|
|
|
|
|
|
|
|
94
|
|
|
|
|
|
|
# GELU (Gaussian Error Linear Unit) |
|
95
|
|
|
|
|
|
|
sub gelu { |
|
96
|
2
|
|
|
2
|
1
|
795
|
my ($x) = @_; |
|
97
|
2
|
|
|
|
|
17
|
return 0.5 * $x * (1 + tanh(sqrt(2/3.141592653589793) * |
|
98
|
|
|
|
|
|
|
($x + 0.044715 * $x**3))); |
|
99
|
|
|
|
|
|
|
} |
|
100
|
|
|
|
|
|
|
|
|
101
|
|
|
|
|
|
|
# Derivada da ReLU |
|
102
|
|
|
|
|
|
|
sub relu_derivative { |
|
103
|
2
|
|
|
2
|
1
|
385
|
my ($x) = @_; |
|
104
|
2
|
100
|
|
|
|
15
|
return $x > 0 ? 1 : 0; |
|
105
|
|
|
|
|
|
|
} |
|
106
|
|
|
|
|
|
|
|
|
107
|
|
|
|
|
|
|
# Derivada da Sigmoid |
|
108
|
|
|
|
|
|
|
sub sigmoid_derivative { |
|
109
|
1
|
|
|
1
|
1
|
3
|
my ($x) = @_; |
|
110
|
1
|
|
|
|
|
4
|
my $s = sigmoid($x); |
|
111
|
1
|
|
|
|
|
5
|
return $s * (1 - $s); |
|
112
|
|
|
|
|
|
|
} |
|
113
|
|
|
|
|
|
|
|
|
114
|
|
|
|
|
|
|
1; |
|
115
|
|
|
|
|
|
|
|
|
116
|
|
|
|
|
|
|
|
|
117
|
|
|
|
|
|
|
=head1 NAME |
|
118
|
|
|
|
|
|
|
|
|
119
|
|
|
|
|
|
|
AI::ActivationFunctions - Activation functions for neural networks in Perl |
|
120
|
|
|
|
|
|
|
|
|
121
|
|
|
|
|
|
|
=head1 VERSION |
|
122
|
|
|
|
|
|
|
|
|
123
|
|
|
|
|
|
|
Version 0.01 |
|
124
|
|
|
|
|
|
|
|
|
125
|
|
|
|
|
|
|
=head1 ABSTRACT |
|
126
|
|
|
|
|
|
|
|
|
127
|
|
|
|
|
|
|
Activation functions for neural networks in Perl |
|
128
|
|
|
|
|
|
|
|
|
129
|
|
|
|
|
|
|
=head1 SYNOPSIS |
|
130
|
|
|
|
|
|
|
|
|
131
|
|
|
|
|
|
|
use AI::ActivationFunctions qw(relu prelu sigmoid); |
|
132
|
|
|
|
|
|
|
|
|
133
|
|
|
|
|
|
|
my $result = relu(-5); # returns 0 |
|
134
|
|
|
|
|
|
|
my $prelu_result = prelu(-2, 0.1); # returns -0.2 |
|
135
|
|
|
|
|
|
|
|
|
136
|
|
|
|
|
|
|
# Array version works too |
|
137
|
|
|
|
|
|
|
my $array_result = relu([-2, -1, 0, 1, 2]); # returns [0, 0, 0, 1, 2] |
|
138
|
|
|
|
|
|
|
|
|
139
|
|
|
|
|
|
|
=head1 DESCRIPTION |
|
140
|
|
|
|
|
|
|
|
|
141
|
|
|
|
|
|
|
This module provides various activation functions commonly used in neural networks |
|
142
|
|
|
|
|
|
|
and machine learning. It includes basic functions like ReLU and sigmoid, as well |
|
143
|
|
|
|
|
|
|
as advanced functions like GELU and Swish. |
|
144
|
|
|
|
|
|
|
|
|
145
|
|
|
|
|
|
|
=head1 FUNCTIONS |
|
146
|
|
|
|
|
|
|
|
|
147
|
|
|
|
|
|
|
=head2 Basic Functions |
|
148
|
|
|
|
|
|
|
|
|
149
|
|
|
|
|
|
|
=over 4 |
|
150
|
|
|
|
|
|
|
|
|
151
|
|
|
|
|
|
|
=item * relu($input) |
|
152
|
|
|
|
|
|
|
|
|
153
|
|
|
|
|
|
|
Rectified Linear Unit. Returns max(0, $input). |
|
154
|
|
|
|
|
|
|
|
|
155
|
|
|
|
|
|
|
=item * prelu($input, $alpha=0.01) |
|
156
|
|
|
|
|
|
|
|
|
157
|
|
|
|
|
|
|
Parametric ReLU. Returns $input if $input > 0, else $alpha * $input. |
|
158
|
|
|
|
|
|
|
|
|
159
|
|
|
|
|
|
|
=item * leaky_relu($input) |
|
160
|
|
|
|
|
|
|
|
|
161
|
|
|
|
|
|
|
Leaky ReLU with alpha=0.01. |
|
162
|
|
|
|
|
|
|
|
|
163
|
|
|
|
|
|
|
=item * sigmoid($input) |
|
164
|
|
|
|
|
|
|
|
|
165
|
|
|
|
|
|
|
Sigmoid function: 1 / (1 + exp(-$input)). |
|
166
|
|
|
|
|
|
|
|
|
167
|
|
|
|
|
|
|
=item * tanh($input) |
|
168
|
|
|
|
|
|
|
|
|
169
|
|
|
|
|
|
|
Hyperbolic tangent function. |
|
170
|
|
|
|
|
|
|
|
|
171
|
|
|
|
|
|
|
=item * softmax(\@array) |
|
172
|
|
|
|
|
|
|
|
|
173
|
|
|
|
|
|
|
Softmax function for probability distributions. |
|
174
|
|
|
|
|
|
|
|
|
175
|
|
|
|
|
|
|
=back |
|
176
|
|
|
|
|
|
|
|
|
177
|
|
|
|
|
|
|
=head2 Advanced Functions |
|
178
|
|
|
|
|
|
|
|
|
179
|
|
|
|
|
|
|
=over 4 |
|
180
|
|
|
|
|
|
|
|
|
181
|
|
|
|
|
|
|
=item * elu($input, $alpha=1.0) |
|
182
|
|
|
|
|
|
|
|
|
183
|
|
|
|
|
|
|
Exponential Linear Unit. |
|
184
|
|
|
|
|
|
|
|
|
185
|
|
|
|
|
|
|
=item * swish($input) |
|
186
|
|
|
|
|
|
|
|
|
187
|
|
|
|
|
|
|
Swish activation function. |
|
188
|
|
|
|
|
|
|
|
|
189
|
|
|
|
|
|
|
=item * gelu($input) |
|
190
|
|
|
|
|
|
|
|
|
191
|
|
|
|
|
|
|
Gaussian Error Linear Unit (used in transformers like BERT, GPT). |
|
192
|
|
|
|
|
|
|
|
|
193
|
|
|
|
|
|
|
=back |
|
194
|
|
|
|
|
|
|
|
|
195
|
|
|
|
|
|
|
=head2 Derivatives |
|
196
|
|
|
|
|
|
|
|
|
197
|
|
|
|
|
|
|
=over 4 |
|
198
|
|
|
|
|
|
|
|
|
199
|
|
|
|
|
|
|
=item * relu_derivative($input) |
|
200
|
|
|
|
|
|
|
|
|
201
|
|
|
|
|
|
|
Derivative of ReLU for backpropagation. |
|
202
|
|
|
|
|
|
|
|
|
203
|
|
|
|
|
|
|
=item * sigmoid_derivative($input) |
|
204
|
|
|
|
|
|
|
|
|
205
|
|
|
|
|
|
|
Derivative of sigmoid for backpropagation. |
|
206
|
|
|
|
|
|
|
|
|
207
|
|
|
|
|
|
|
=back |
|
208
|
|
|
|
|
|
|
|
|
209
|
|
|
|
|
|
|
=head1 EXPORT |
|
210
|
|
|
|
|
|
|
|
|
211
|
|
|
|
|
|
|
By default nothing is exported. You can export specific functions: |
|
212
|
|
|
|
|
|
|
|
|
213
|
|
|
|
|
|
|
use AI::ActivationFunctions qw(relu prelu); # specific functions |
|
214
|
|
|
|
|
|
|
use AI::ActivationFunctions qw(:basic); # basic functions |
|
215
|
|
|
|
|
|
|
use AI::ActivationFunctions qw(:all); # all functions |
|
216
|
|
|
|
|
|
|
|
|
217
|
|
|
|
|
|
|
=head1 SEE ALSO |
|
218
|
|
|
|
|
|
|
|
|
219
|
|
|
|
|
|
|
=over 4 |
|
220
|
|
|
|
|
|
|
|
|
221
|
|
|
|
|
|
|
=item * L - Perl Data Language for numerical computing |
|
222
|
|
|
|
|
|
|
|
|
223
|
|
|
|
|
|
|
=item * L - Perl interface to TensorFlow |
|
224
|
|
|
|
|
|
|
|
|
225
|
|
|
|
|
|
|
=item * L - Perl interface to Apache MXNet |
|
226
|
|
|
|
|
|
|
|
|
227
|
|
|
|
|
|
|
=back |
|
228
|
|
|
|
|
|
|
|
|
229
|
|
|
|
|
|
|
=head1 AUTHOR |
|
230
|
|
|
|
|
|
|
|
|
231
|
|
|
|
|
|
|
Your Name |
|
232
|
|
|
|
|
|
|
|
|
233
|
|
|
|
|
|
|
=head1 LICENSE |
|
234
|
|
|
|
|
|
|
|
|
235
|
|
|
|
|
|
|
This library is free software; you can redistribute it and/or modify |
|
236
|
|
|
|
|
|
|
it under the same terms as Perl itself. |
|
237
|
|
|
|
|
|
|
|
|
238
|
|
|
|
|
|
|
=cut |