line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
1
|
|
|
|
|
|
|
package Statistics::MaxEntropy; |
2
|
|
|
|
|
|
|
|
3
|
|
|
|
|
|
|
##---------------------------------------------------------------------------## |
4
|
|
|
|
|
|
|
## Author: |
5
|
|
|
|
|
|
|
## Hugo WL ter Doest terdoest@cs.utwente.nl |
6
|
|
|
|
|
|
|
## Description: |
7
|
|
|
|
|
|
|
## Object-oriented implementation of |
8
|
|
|
|
|
|
|
## Generalised Iterative Scaling algorithm, |
9
|
|
|
|
|
|
|
## Improved Iterative Scaling algorithm, and |
10
|
|
|
|
|
|
|
## Feature Induction algorithm |
11
|
|
|
|
|
|
|
## for inducing maximum entropy probability distributions |
12
|
|
|
|
|
|
|
## Keywords: |
13
|
|
|
|
|
|
|
## Maximum Entropy Modeling |
14
|
|
|
|
|
|
|
## Kullback-Leibler Divergence |
15
|
|
|
|
|
|
|
## Exponential models |
16
|
|
|
|
|
|
|
## |
17
|
|
|
|
|
|
|
##---------------------------------------------------------------------------## |
18
|
|
|
|
|
|
|
## Copyright (C) 1998 Hugo WL ter Doest terdoest@cs.utwente.nl |
19
|
|
|
|
|
|
|
## |
20
|
|
|
|
|
|
|
## This library is free software; you can redistribute it and/or modify |
21
|
|
|
|
|
|
|
## it under the terms of the GNU General Public License as published by |
22
|
|
|
|
|
|
|
## the Free Software Foundation; either version 2 of the License, or |
23
|
|
|
|
|
|
|
## (at your option) any later version. |
24
|
|
|
|
|
|
|
## |
25
|
|
|
|
|
|
|
## This library is distributed in the hope that it will be useful, |
26
|
|
|
|
|
|
|
## but WITHOUT ANY WARRANTY; without even the implied warranty of |
27
|
|
|
|
|
|
|
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
28
|
|
|
|
|
|
|
## GNU General Public License for more details. |
29
|
|
|
|
|
|
|
## |
30
|
|
|
|
|
|
|
## You should have received a copy of the GNU Library General Public |
31
|
|
|
|
|
|
|
## License along with this program; if not, write to the Free Software |
32
|
|
|
|
|
|
|
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. |
33
|
|
|
|
|
|
|
##---------------------------------------------------------------------------## |
34
|
|
|
|
|
|
|
|
35
|
|
|
|
|
|
|
|
36
|
|
|
|
|
|
|
##---------------------------------------------------------------------------## |
37
|
|
|
|
|
|
|
## Globals |
38
|
|
|
|
|
|
|
##---------------------------------------------------------------------------## |
39
|
2
|
|
|
|
|
449
|
use vars qw($VERSION |
40
|
|
|
|
|
|
|
@ISA |
41
|
|
|
|
|
|
|
@EXPORT |
42
|
|
|
|
|
|
|
$VECTOR_PACKAGE |
43
|
|
|
|
|
|
|
|
44
|
|
|
|
|
|
|
$debug |
45
|
|
|
|
|
|
|
$SAMPLE_size |
46
|
|
|
|
|
|
|
$NEWTON_max_it |
47
|
|
|
|
|
|
|
$KL_max_it |
48
|
|
|
|
|
|
|
$KL_min |
49
|
|
|
|
|
|
|
$NEWTON_min |
50
|
|
|
|
|
|
|
$cntrl_c_pressed |
51
|
|
|
|
|
|
|
$cntrl_backslash_pressed |
52
|
2
|
|
|
2
|
|
1881
|
); |
|
2
|
|
|
|
|
4
|
|
53
|
|
|
|
|
|
|
|
54
|
|
|
|
|
|
|
|
55
|
|
|
|
|
|
|
##---------------------------------------------------------------------------## |
56
|
|
|
|
|
|
|
## Require libraries |
57
|
|
|
|
|
|
|
##---------------------------------------------------------------------------## |
58
|
2
|
|
|
2
|
|
15
|
use strict; |
|
2
|
|
|
|
|
5
|
|
|
2
|
|
|
|
|
92
|
|
59
|
2
|
|
|
2
|
|
7205
|
use diagnostics -verbose; |
|
2
|
|
|
|
|
524347
|
|
|
2
|
|
|
|
|
26
|
|
60
|
2
|
|
|
2
|
|
8184
|
use Statistics::SparseVector; |
|
2
|
|
|
|
|
7
|
|
|
2
|
|
|
|
|
157
|
|
61
|
|
|
|
|
|
|
$VECTOR_PACKAGE = "Statistics::SparseVector"; |
62
|
2
|
|
|
2
|
|
1824
|
use POSIX; |
|
2
|
|
|
|
|
15995
|
|
|
2
|
|
|
|
|
18
|
|
63
|
2
|
|
|
2
|
|
7506
|
use Carp; |
|
2
|
|
|
|
|
7
|
|
|
2
|
|
|
|
|
121
|
|
64
|
2
|
|
|
2
|
|
4585
|
use Data::Dumper; |
|
2
|
|
|
|
|
17023
|
|
|
2
|
|
|
|
|
8204
|
|
65
|
|
|
|
|
|
|
require Exporter; |
66
|
|
|
|
|
|
|
require AutoLoader; |
67
|
|
|
|
|
|
|
|
68
|
|
|
|
|
|
|
@ISA = qw(Exporter AutoLoader); |
69
|
|
|
|
|
|
|
# Items to export into callers namespace by default. Note: do not export |
70
|
|
|
|
|
|
|
# names by default without a very good reason. Use EXPORT_OK instead. |
71
|
|
|
|
|
|
|
# Do not simply export all your public functions/methods/constants. |
72
|
|
|
|
|
|
|
@EXPORT = qw($KL_min |
73
|
|
|
|
|
|
|
$NEWTON_min |
74
|
|
|
|
|
|
|
$debug |
75
|
|
|
|
|
|
|
$nr_add |
76
|
|
|
|
|
|
|
$KL_max_it |
77
|
|
|
|
|
|
|
$NEWTON_max_it |
78
|
|
|
|
|
|
|
$SAMPLE_size |
79
|
|
|
|
|
|
|
); |
80
|
|
|
|
|
|
|
|
81
|
|
|
|
|
|
|
$VERSION = '0.9'; |
82
|
|
|
|
|
|
|
|
83
|
|
|
|
|
|
|
|
84
|
|
|
|
|
|
|
# default values for some configurable parameters |
85
|
|
|
|
|
|
|
$NEWTON_max_it = 20; |
86
|
|
|
|
|
|
|
$NEWTON_min = 0.001; |
87
|
|
|
|
|
|
|
$KL_max_it = 100; |
88
|
|
|
|
|
|
|
$KL_min = 0.001; |
89
|
|
|
|
|
|
|
$debug = 0; |
90
|
|
|
|
|
|
|
$SAMPLE_size = 250; # the size of MC samples |
91
|
|
|
|
|
|
|
$cntrl_c_pressed = 0; |
92
|
|
|
|
|
|
|
$cntrl_backslash_pressed = 0; |
93
|
|
|
|
|
|
|
$SIG{INT} = \&catch_cntrl_c; |
94
|
|
|
|
|
|
|
$SIG{QUIT} = \&catch_cntrl_backslash; |
95
|
|
|
|
|
|
|
|
96
|
|
|
|
|
|
|
|
97
|
|
|
|
|
|
|
# checks floats |
98
|
|
|
|
|
|
|
sub is_float { |
99
|
1052944
|
|
|
1052944
|
0
|
1648613
|
my($f) = @_; |
100
|
|
|
|
|
|
|
|
101
|
1052944
|
|
|
|
|
10640779
|
return ($f =~ /^([+-]?)(?=\d|\.\d)\d*(\.\d*)?([Ee]([+-]?\d+))?$/); |
102
|
|
|
|
|
|
|
} |
103
|
|
|
|
|
|
|
|
104
|
|
|
|
|
|
|
|
105
|
|
|
|
|
|
|
# interrrupt routine for control c |
106
|
|
|
|
|
|
|
sub catch_cntrl_c { |
107
|
0
|
|
|
0
|
0
|
0
|
my($signame) = shift; |
108
|
|
|
|
|
|
|
|
109
|
0
|
|
|
|
|
0
|
$cntrl_c_pressed = 1; |
110
|
0
|
|
|
|
|
0
|
die " pressed\n"; |
111
|
|
|
|
|
|
|
} |
112
|
|
|
|
|
|
|
|
113
|
|
|
|
|
|
|
|
114
|
|
|
|
|
|
|
# interrrupt routine for control \ (originally core-dump request) |
115
|
|
|
|
|
|
|
sub catch_cntrl_backslash { |
116
|
0
|
|
|
0
|
0
|
0
|
my($signame) = shift; |
117
|
|
|
|
|
|
|
|
118
|
0
|
|
|
|
|
0
|
$cntrl_backslash_pressed = 1; |
119
|
|
|
|
|
|
|
} |
120
|
|
|
|
|
|
|
|
121
|
|
|
|
|
|
|
|
122
|
|
|
|
|
|
|
# creates a new event space |
123
|
|
|
|
|
|
|
# depending on the $arg parameter samples it or reads it from a file |
124
|
|
|
|
|
|
|
sub new { |
125
|
73
|
|
|
73
|
1
|
328
|
my($this, $arg) = @_; |
126
|
|
|
|
|
|
|
|
127
|
|
|
|
|
|
|
# for calling $self->new($someth): |
128
|
73
|
|
66
|
|
|
396
|
my $class = ref($this) || $this; |
129
|
73
|
|
|
|
|
254
|
my $self = {}; |
130
|
73
|
|
|
|
|
437
|
bless $self, $class; |
131
|
73
|
|
|
|
|
470
|
$self->{SCALER} = "gis"; # default |
132
|
73
|
|
|
|
|
273
|
$self->{SAMPLING} = "corpus"; # default |
133
|
73
|
|
|
|
|
196
|
$self->{NR_CLASSES} = 0; |
134
|
73
|
|
|
|
|
195
|
$self->{NR_EVENTS} = 0; |
135
|
73
|
|
|
|
|
239
|
$self->{NR_FEATURES} = 0; |
136
|
73
|
100
|
|
|
|
210
|
if ($arg) { # hey a filename |
137
|
2
|
|
|
|
|
12
|
$self->read($arg); |
138
|
|
|
|
|
|
|
} |
139
|
73
|
|
|
|
|
211
|
return($self); |
140
|
|
|
|
|
|
|
} |
141
|
|
|
|
|
|
|
|
142
|
|
|
|
|
|
|
|
143
|
|
|
|
|
|
|
# decides how to sample, "enum", "mc", or "corpus" |
144
|
|
|
|
|
|
|
sub sample { |
145
|
186
|
|
|
186
|
0
|
323
|
my($self) = @_; |
146
|
|
|
|
|
|
|
|
147
|
186
|
|
|
|
|
343
|
my($sample); |
148
|
|
|
|
|
|
|
|
149
|
186
|
100
|
|
|
|
1378
|
if ($self->{SAMPLING} eq "mc") { |
|
|
100
|
|
|
|
|
|
150
|
18
|
|
|
|
|
114
|
$sample = $self->new(); |
151
|
18
|
|
|
|
|
64
|
$sample->{SCALER} = $self->{SCALER}; |
152
|
18
|
|
|
|
|
67
|
$sample->{NR_FEATURES} = $self->{NR_FEATURES}; |
153
|
|
|
|
|
|
|
# refer to the parameters of $self |
154
|
18
|
|
|
|
|
53
|
$sample->{PARAMETERS} = $self->{PARAMETERS}; |
155
|
18
|
|
|
|
|
51
|
$sample->{NEED_CORRECTION_FEATURE} = 1; |
156
|
18
|
|
|
|
|
90
|
$sample->{CORRECTION_PARAMETER} = $self->{CORRECTION_PARAMETER}; |
157
|
18
|
|
|
|
|
53
|
$sample->{E_REF} = $self->{E_REF}; |
158
|
18
|
|
|
|
|
44
|
$sample->{THIS_IS_A_SAMPLE} = 1; |
159
|
18
|
|
|
|
|
105
|
$sample->mc($self); |
160
|
18
|
|
|
|
|
66
|
$sample->{CLASSES_CHANGED} = 1; |
161
|
18
|
|
|
|
|
102
|
$sample->prepare_model(); |
162
|
|
|
|
|
|
|
} |
163
|
|
|
|
|
|
|
elsif ($self->{SAMPLING} eq "enum") { |
164
|
53
|
|
|
|
|
298
|
$sample = $self->new(); |
165
|
53
|
|
|
|
|
175
|
$sample->{SCALER} = $self->{SCALER}; |
166
|
53
|
|
|
|
|
127
|
$sample->{SAMPLING} = "enum"; |
167
|
53
|
|
|
|
|
156
|
$sample->{NR_FEATURES} = $self->{NR_FEATURES}; |
168
|
53
|
|
|
|
|
157
|
$sample->{PARAMETERS} = $self->{PARAMETERS}; |
169
|
53
|
|
|
|
|
129
|
$sample->{NEED_CORRECTION_FEATURE} = 1; |
170
|
53
|
|
|
|
|
251
|
$sample->{CORRECTION_PARAMETER} = $self->{CORRECTION_PARAMETER}; |
171
|
53
|
|
|
|
|
149
|
$sample->{E_REF} = $self->{E_REF}; |
172
|
53
|
|
|
|
|
123
|
$sample->{THIS_IS_A_SAMPLE} = 1; |
173
|
53
|
|
|
|
|
162
|
$sample->{M} = $self->{NR_FEATURES}; |
174
|
|
|
|
|
|
|
} |
175
|
|
|
|
|
|
|
else { # "corpus" |
176
|
115
|
|
|
|
|
216
|
$sample = $self; |
177
|
|
|
|
|
|
|
} |
178
|
186
|
|
|
|
|
901
|
$sample->prepare_sample(); |
179
|
186
|
|
|
|
|
781
|
return($sample); |
180
|
|
|
|
|
|
|
} |
181
|
|
|
|
|
|
|
|
182
|
|
|
|
|
|
|
|
183
|
|
|
|
|
|
|
# makes sure that when prepare_model is called, everything is recomputed |
184
|
|
|
|
|
|
|
sub clear { |
185
|
10
|
|
|
10
|
0
|
135
|
my($self) = @_; |
186
|
|
|
|
|
|
|
|
187
|
10
|
|
|
|
|
27
|
undef $self->{PARAMETERS_INITIALISED}; |
188
|
10
|
|
|
|
|
28
|
$self->{PARAMETERS_CHANGED} = 1; |
189
|
10
|
|
|
|
|
38
|
$self->{CLASSES_CHANGED} = 1; |
190
|
|
|
|
|
|
|
} |
191
|
|
|
|
|
|
|
|
192
|
|
|
|
|
|
|
|
193
|
|
|
|
|
|
|
|
194
|
|
|
|
|
|
|
sub DESTROY { |
195
|
239
|
|
|
239
|
|
529
|
my($self) = @_; |
196
|
|
|
|
|
|
|
|
197
|
239
|
50
|
|
|
|
3330
|
if ($cntrl_c_pressed) { |
198
|
0
|
|
|
|
|
0
|
$self->dump(); |
199
|
|
|
|
|
|
|
} |
200
|
|
|
|
|
|
|
} |
201
|
|
|
|
|
|
|
|
202
|
|
|
|
|
|
|
|
203
|
|
|
|
|
|
|
# reads an events file, dies in case of inconsistent lines |
204
|
|
|
|
|
|
|
# syntax first line: ..... |
205
|
|
|
|
|
|
|
# syntax other lines: |
206
|
|
|
|
|
|
|
sub read { |
207
|
2
|
|
|
2
|
0
|
4
|
my($self, $file) = @_; |
208
|
|
|
|
|
|
|
|
209
|
2
|
|
|
|
|
4
|
my($features, |
210
|
|
|
|
|
|
|
$feature_names); |
211
|
|
|
|
|
|
|
|
212
|
2
|
|
|
|
|
5
|
$feature_names = ""; |
213
|
2
|
50
|
|
|
|
174
|
open(EVENTS, $file) || |
214
|
|
|
|
|
|
|
$self->die("Could not open $file\n"); |
215
|
2
|
|
|
|
|
10
|
print "Opened $file\n"; |
216
|
|
|
|
|
|
|
|
217
|
|
|
|
|
|
|
# read the names of the features, skip comment |
218
|
|
|
|
|
|
|
# note that feature name are in reverse order now |
219
|
2
|
|
|
|
|
5
|
do { |
220
|
2
|
|
|
|
|
96
|
$feature_names = ; |
221
|
|
|
|
|
|
|
} until ($feature_names !~ /\#.*/); |
222
|
2
|
|
|
|
|
7
|
chomp $feature_names; |
223
|
2
|
|
|
|
|
23
|
$self->{FEATURE_NAMES} = [split(/\t/, $feature_names)]; |
224
|
|
|
|
|
|
|
|
225
|
|
|
|
|
|
|
# read the bitvectors |
226
|
2
|
|
|
|
|
58
|
while () { |
227
|
200
|
50
|
|
|
|
457
|
if (!/\#.*/) { |
228
|
200
|
|
|
|
|
237
|
chomp; |
229
|
|
|
|
|
|
|
|
230
|
200
|
|
|
|
|
1168
|
($self->{FREQ}[$self->{NR_CLASSES}], $features) = split; |
231
|
200
|
50
|
|
|
|
585
|
if ($self->{FREQ} == 0) { |
232
|
0
|
|
|
|
|
0
|
$self->die("Class $self->{NR_CLASSES} has zero probability\n"); |
233
|
|
|
|
|
|
|
} |
234
|
200
|
|
|
|
|
441
|
$self->{NR_EVENTS} += $self->{FREQ}[$self->{NR_CLASSES}]; |
235
|
|
|
|
|
|
|
|
236
|
|
|
|
|
|
|
# if first event set nr_features |
237
|
200
|
100
|
|
|
|
367
|
if ($self->{NR_CLASSES} == 0) { |
238
|
2
|
|
|
|
|
7
|
$self->{NR_FEATURES} = length($features); |
239
|
|
|
|
|
|
|
} |
240
|
|
|
|
|
|
|
# else check nr of features for this event |
241
|
|
|
|
|
|
|
else { |
242
|
198
|
50
|
|
|
|
481
|
if (length($features) != $self->{NR_FEATURES}) { |
243
|
0
|
|
|
|
|
0
|
$self->die("Events file corrupt (class $self->{NR_CLASSES})\n"); |
244
|
|
|
|
|
|
|
} |
245
|
|
|
|
|
|
|
} |
246
|
|
|
|
|
|
|
# create and initialise bit vector |
247
|
200
|
|
|
|
|
649
|
$self->{CLASSES}[$self->{NR_CLASSES}] = |
248
|
|
|
|
|
|
|
$VECTOR_PACKAGE->new_Bin($self->{NR_FEATURES}, $features); |
249
|
200
|
|
|
|
|
737
|
$self->{NR_CLASSES}++; |
250
|
|
|
|
|
|
|
} |
251
|
|
|
|
|
|
|
} |
252
|
2
|
|
|
|
|
29
|
close(EVENTS); |
253
|
|
|
|
|
|
|
|
254
|
2
|
|
|
|
|
20
|
print "Read $self->{NR_EVENTS} events, $self->{NR_CLASSES} classes, " . |
255
|
|
|
|
|
|
|
"and $self->{NR_FEATURES} features\n"; |
256
|
2
|
|
|
|
|
7
|
print "Closed $file\n"; |
257
|
|
|
|
|
|
|
|
258
|
2
|
|
|
|
|
6
|
$self->{FILENAME} = $file; |
259
|
2
|
|
|
|
|
6
|
$self->{CLASSES_CHANGED} = 1; |
260
|
2
|
|
|
|
|
7
|
$self->{PARAMETERS_CHANGED} = 1; |
261
|
|
|
|
|
|
|
} |
262
|
|
|
|
|
|
|
|
263
|
|
|
|
|
|
|
|
264
|
|
|
|
|
|
|
# reads an initial distribution |
265
|
|
|
|
|
|
|
# syntax: one parameter per line |
266
|
|
|
|
|
|
|
sub read_parameters { |
267
|
0
|
|
|
0
|
0
|
0
|
my($self, $file) = @_; |
268
|
|
|
|
|
|
|
|
269
|
0
|
|
|
|
|
0
|
my($i); |
270
|
|
|
|
|
|
|
|
271
|
0
|
|
|
|
|
0
|
$i = 0; |
272
|
0
|
0
|
|
|
|
0
|
open(DISTR,$file) || |
273
|
|
|
|
|
|
|
$self->die("Could not open $file\n"); |
274
|
0
|
|
|
|
|
0
|
print "Opened $file\n"; |
275
|
|
|
|
|
|
|
|
276
|
0
|
|
|
|
|
0
|
while () { |
277
|
0
|
0
|
|
|
|
0
|
if (!/\#.*/) { |
278
|
0
|
|
|
|
|
0
|
chomp; |
279
|
0
|
|
|
|
|
0
|
$self->{PARAMETERS}[$i++] = $_; |
280
|
|
|
|
|
|
|
} |
281
|
|
|
|
|
|
|
} |
282
|
|
|
|
|
|
|
|
283
|
0
|
|
|
|
|
0
|
close(DISTR); |
284
|
0
|
0
|
|
|
|
0
|
if ($i != $self->{NR_FEATURES}) { |
285
|
0
|
|
|
|
|
0
|
$self->die("Initial distribution file corrupt\n"); |
286
|
|
|
|
|
|
|
} |
287
|
0
|
|
|
|
|
0
|
print "Read $i parameters; closed $file\n"; |
288
|
0
|
|
|
|
|
0
|
$self->{PARAMETERS_CHANGED} = 1; |
289
|
|
|
|
|
|
|
} |
290
|
|
|
|
|
|
|
|
291
|
|
|
|
|
|
|
|
292
|
|
|
|
|
|
|
# writes the the current parameters |
293
|
|
|
|
|
|
|
# syntax: |
294
|
|
|
|
|
|
|
sub write_parameters { |
295
|
1
|
|
|
1
|
1
|
597
|
my($self, $file) = @_; |
296
|
|
|
|
|
|
|
|
297
|
1
|
|
|
|
|
3
|
my($i); |
298
|
|
|
|
|
|
|
|
299
|
1
|
50
|
|
|
|
5450
|
open(DISTR,">$file") || |
300
|
|
|
|
|
|
|
$self->die("Could not open $file\n"); |
301
|
1
|
|
|
|
|
8
|
print "Opened $file\n"; |
302
|
|
|
|
|
|
|
|
303
|
1
|
|
|
|
|
21
|
for ($i = 0; $i < $self->{NR_FEATURES}; $i++) { |
304
|
18
|
50
|
|
|
|
47
|
if ($self->{FEATURE_IGNORE}{$i}) { |
305
|
0
|
|
|
|
|
0
|
print DISTR "IGNORED\n"; |
306
|
|
|
|
|
|
|
} |
307
|
|
|
|
|
|
|
else { |
308
|
18
|
|
|
|
|
113
|
print DISTR "$self->{PARAMETERS}[$i]\n"; |
309
|
|
|
|
|
|
|
} |
310
|
|
|
|
|
|
|
} |
311
|
|
|
|
|
|
|
|
312
|
1
|
|
|
|
|
13465
|
close(DISTR); |
313
|
1
|
|
|
|
|
634
|
print "Closed $file\n"; |
314
|
|
|
|
|
|
|
} |
315
|
|
|
|
|
|
|
|
316
|
|
|
|
|
|
|
|
317
|
|
|
|
|
|
|
# writes the the current features with their parameters |
318
|
|
|
|
|
|
|
# syntax first line: <$nr_features> |
319
|
|
|
|
|
|
|
# syntax last line: |
320
|
|
|
|
|
|
|
# syntax other lines: |
321
|
|
|
|
|
|
|
sub write_parameters_with_names { |
322
|
0
|
|
|
0
|
1
|
0
|
my($self, $file) = @_; |
323
|
|
|
|
|
|
|
|
324
|
0
|
|
|
|
|
0
|
my($x, |
325
|
|
|
|
|
|
|
$bitmask); |
326
|
|
|
|
|
|
|
|
327
|
0
|
0
|
|
|
|
0
|
open(DISTR,">$file") || |
328
|
|
|
|
|
|
|
$self->die("Could not open $file\n"); |
329
|
0
|
|
|
|
|
0
|
print "Opened $file\n"; |
330
|
|
|
|
|
|
|
|
331
|
|
|
|
|
|
|
# preamble |
332
|
0
|
|
|
|
|
0
|
print DISTR "$self->{NR_FEATURES}\n"; |
333
|
0
|
|
|
|
|
0
|
print DISTR "$self->{SCALER}\n"; |
334
|
0
|
0
|
|
|
|
0
|
if ($self->{SCALER} eq "gis") { |
335
|
0
|
|
|
|
|
0
|
print DISTR "$self->{M}\n"; |
336
|
0
|
|
|
|
|
0
|
print DISTR "$self->{CORRECTION_PARAMETER}\n"; |
337
|
|
|
|
|
|
|
} |
338
|
|
|
|
|
|
|
|
339
|
|
|
|
|
|
|
# print feature names with parameters |
340
|
|
|
|
|
|
|
# in the meanwhile build the bitmask |
341
|
0
|
|
|
|
|
0
|
$bitmask = ""; |
342
|
0
|
|
|
|
|
0
|
for ($x = 0; $x < $self->{NR_FEATURES}; $x++) { |
343
|
0
|
|
|
|
|
0
|
print DISTR "$self->{FEATURE_NAMES}[$self->{NR_FEATURES} - $x - 1]\t" . |
344
|
|
|
|
|
|
|
"$self->{PARAMETERS}[$x]\n"; |
345
|
0
|
0
|
|
|
|
0
|
if ($self->{FEATURE_IGNORE}{$x}) { |
346
|
0
|
|
|
|
|
0
|
$bitmask = "0" . $bitmask; |
347
|
|
|
|
|
|
|
} |
348
|
|
|
|
|
|
|
else { |
349
|
0
|
|
|
|
|
0
|
$bitmask = "1" . $bitmask; |
350
|
|
|
|
|
|
|
} |
351
|
|
|
|
|
|
|
} |
352
|
0
|
|
|
|
|
0
|
print DISTR "$bitmask\n"; |
353
|
|
|
|
|
|
|
|
354
|
0
|
|
|
|
|
0
|
close(DISTR); |
355
|
0
|
|
|
|
|
0
|
print "Closed $file\n"; |
356
|
|
|
|
|
|
|
} |
357
|
|
|
|
|
|
|
|
358
|
|
|
|
|
|
|
|
359
|
|
|
|
|
|
|
# generate random parameters |
360
|
|
|
|
|
|
|
sub random_parameters { |
361
|
2
|
|
|
2
|
0
|
5
|
my($self) = @_; |
362
|
|
|
|
|
|
|
|
363
|
2
|
|
|
|
|
3
|
my($f); |
364
|
|
|
|
|
|
|
|
365
|
2
|
|
|
|
|
177
|
srand(); |
366
|
2
|
|
|
|
|
13
|
for ($f = 0; $f < $self->{NR_FEATURES}; $f++) { |
367
|
20
|
|
|
|
|
62
|
$self->{PARAMETERS}[$f] = rand() + 1; |
368
|
|
|
|
|
|
|
} |
369
|
2
|
100
|
|
|
|
8
|
if ($self->{SCALER} eq "gis") { |
370
|
1
|
|
|
|
|
4
|
$self->{CORRECTION_PARAMETER} = rand(); |
371
|
|
|
|
|
|
|
} |
372
|
2
|
|
|
|
|
7
|
$self->{PARAMETERS_CHANGED} = 1; |
373
|
|
|
|
|
|
|
} |
374
|
|
|
|
|
|
|
|
375
|
|
|
|
|
|
|
|
376
|
|
|
|
|
|
|
# sets parameters to $val |
377
|
|
|
|
|
|
|
sub set_parameters_to { |
378
|
8
|
|
|
8
|
0
|
19
|
my($self, $val) = @_; |
379
|
|
|
|
|
|
|
|
380
|
8
|
|
|
|
|
16
|
my($f); |
381
|
|
|
|
|
|
|
|
382
|
8
|
|
|
|
|
33
|
for ($f = 0; $f < $self->{NR_FEATURES}; $f++) { |
383
|
92
|
|
|
|
|
236
|
$self->{PARAMETERS}[$f] = $val; |
384
|
|
|
|
|
|
|
} |
385
|
8
|
100
|
|
|
|
32
|
if ($self->{SCALER} eq "gis") { |
386
|
4
|
|
|
|
|
19
|
$self->{CORRECTION_PARAMETER} = $val; |
387
|
|
|
|
|
|
|
} |
388
|
8
|
|
|
|
|
21
|
$self->{PARAMETERS_CHANGED} = 1; |
389
|
|
|
|
|
|
|
} |
390
|
|
|
|
|
|
|
|
391
|
|
|
|
|
|
|
|
392
|
|
|
|
|
|
|
# initialise if !$self->{PARAMETERS_INITIALISED}; subsequent calls |
393
|
|
|
|
|
|
|
# of scale (by fi) should not re-initialise parameters |
394
|
|
|
|
|
|
|
sub init_parameters { |
395
|
18
|
|
|
18
|
0
|
35
|
my($self) = @_; |
396
|
|
|
|
|
|
|
|
397
|
18
|
100
|
|
|
|
73
|
if (!$self->{PARAMETERS_INITIALISED}) { |
398
|
10
|
100
|
|
|
|
41
|
if ($self->{SAMPLING} eq "mc") { |
399
|
|
|
|
|
|
|
# otherwise bits will be flipped with prob 1. |
400
|
2
|
|
|
|
|
14
|
$self->random_parameters(); |
401
|
|
|
|
|
|
|
} |
402
|
|
|
|
|
|
|
else { |
403
|
8
|
100
|
|
|
|
32
|
if ($self->{SCALER} eq "gis") { |
404
|
4
|
|
|
|
|
23
|
$self->set_parameters_to(0); |
405
|
|
|
|
|
|
|
} |
406
|
|
|
|
|
|
|
else { |
407
|
4
|
|
|
|
|
86
|
$self->set_parameters_to(0); |
408
|
|
|
|
|
|
|
} |
409
|
|
|
|
|
|
|
} |
410
|
10
|
|
|
|
|
27
|
$self->{PARAMETERS_INITIALISED} = 1; |
411
|
|
|
|
|
|
|
} |
412
|
|
|
|
|
|
|
} |
413
|
|
|
|
|
|
|
|
414
|
|
|
|
|
|
|
|
415
|
|
|
|
|
|
|
# make sure \tilde{p} << q_0 |
416
|
|
|
|
|
|
|
# constant feature functions are forbidden: that is why |
417
|
|
|
|
|
|
|
# we check whether for all features \sum_x f(x) > 0 |
418
|
|
|
|
|
|
|
# and \sum_x f(x) != $corpus_size |
419
|
|
|
|
|
|
|
sub check { |
420
|
272
|
|
|
272
|
0
|
549
|
my($self) = @_; |
421
|
|
|
|
|
|
|
|
422
|
272
|
|
|
|
|
416
|
my ($x, |
423
|
|
|
|
|
|
|
$f, |
424
|
|
|
|
|
|
|
$sum); |
425
|
|
|
|
|
|
|
|
426
|
272
|
|
|
|
|
565
|
$sum = 0; |
427
|
272
|
|
|
|
|
1093
|
for ($x = 0; $x < $self->{NR_CLASSES}; $x++) { |
428
|
27182
|
50
|
|
|
|
78685
|
if ($self->{CLASS_EXP_WEIGHTS}[$x] == 0) { |
429
|
0
|
|
|
|
|
0
|
print "Initial distribution not ok; class $x\n"; |
430
|
0
|
|
|
|
|
0
|
print $self->{CLASS_EXP_WEIGHTS}[$x], "\t", $self->{CLASSES}[$x]->to_Bin(),"\n"; |
431
|
|
|
|
|
|
|
} |
432
|
|
|
|
|
|
|
} |
433
|
272
|
|
|
|
|
1107
|
for ($f = 0; $f < $self->{NR_FEATURES}; $f++) { |
434
|
3261
|
|
|
|
|
4143
|
$sum = 0; |
435
|
3261
|
|
|
|
|
8034
|
for ($x = 0; $x < $self->{NR_CLASSES}; $x++) { |
436
|
325920
|
|
|
|
|
891898
|
$sum += $self->{CLASSES}[$x]->bit_test($f); |
437
|
|
|
|
|
|
|
} |
438
|
3261
|
50
|
33
|
|
|
21263
|
if (!$sum || ($sum == $self->{NR_CLASSES})) { |
439
|
0
|
|
|
|
|
0
|
print "Feature ", $f + 1, " is constant ($sum), and will be ignored\n"; |
440
|
0
|
|
|
|
|
0
|
$self->{FEATURE_IGNORE}{$f} = 1; |
441
|
|
|
|
|
|
|
} |
442
|
|
|
|
|
|
|
} |
443
|
|
|
|
|
|
|
} |
444
|
|
|
|
|
|
|
|
445
|
|
|
|
|
|
|
|
446
|
|
|
|
|
|
|
# writes events to a file |
447
|
|
|
|
|
|
|
# usefull in case new features have been added |
448
|
|
|
|
|
|
|
# syntax: same as input events file |
449
|
|
|
|
|
|
|
sub write { |
450
|
1
|
|
|
1
|
1
|
29
|
my($self, $file) = @_; |
451
|
|
|
|
|
|
|
|
452
|
1
|
|
|
|
|
238663
|
my($x, $f); |
453
|
|
|
|
|
|
|
|
454
|
|
|
|
|
|
|
# prologue |
455
|
1
|
50
|
|
|
|
120909
|
open(EVENTS,">$file") || |
456
|
|
|
|
|
|
|
$self->die("Could not open $file\n"); |
457
|
1
|
|
|
|
|
3065
|
print "Opened $file\n"; |
458
|
|
|
|
|
|
|
|
459
|
|
|
|
|
|
|
# write a line with the feature names |
460
|
1
|
|
|
|
|
722
|
print EVENTS join("\t", $self->{FEATURE_NAMES}),"\n"; |
461
|
|
|
|
|
|
|
# write the events themselves |
462
|
1
|
|
|
|
|
32
|
for ($x = 0; $x < $self->{NR_CLASSES}; $x++) { |
463
|
100
|
|
|
|
|
210
|
print EVENTS $self->{FREQ}[$x],"\t"; |
464
|
100
|
|
|
|
|
340
|
print EVENTS $self->{CLASSES}[$x]->to_Bin(), "\n"; |
465
|
|
|
|
|
|
|
} |
466
|
|
|
|
|
|
|
|
467
|
|
|
|
|
|
|
# close the file and tell you did that |
468
|
1
|
|
|
|
|
1887
|
close EVENTS; |
469
|
1
|
|
|
|
|
13
|
print "Wrote $self->{NR_EVENTS} events, $self->{NR_CLASSES} classes, " . |
470
|
|
|
|
|
|
|
"and $self->{NR_FEATURES} features\n"; |
471
|
1
|
|
|
|
|
15
|
print "Closed $file\n"; |
472
|
|
|
|
|
|
|
} |
473
|
|
|
|
|
|
|
|
474
|
|
|
|
|
|
|
|
475
|
|
|
|
|
|
|
# reads a dump, and evaluates it into an object |
476
|
|
|
|
|
|
|
sub undump { |
477
|
0
|
|
|
0
|
1
|
0
|
my($class, $file) = @_; |
478
|
|
|
|
|
|
|
|
479
|
0
|
|
|
|
|
0
|
my($x, |
480
|
|
|
|
|
|
|
$VAR1); |
481
|
|
|
|
|
|
|
|
482
|
|
|
|
|
|
|
# open, slurp, and close file |
483
|
0
|
0
|
|
|
|
0
|
open(UNDUMP, "$file") || |
484
|
|
|
|
|
|
|
croak "Could not open $file\n"; |
485
|
0
|
|
|
|
|
0
|
print "Opened $file\n"; |
486
|
0
|
|
|
|
|
0
|
undef $/; |
487
|
0
|
|
|
|
|
0
|
$x = ; |
488
|
0
|
|
|
|
|
0
|
$/ = "\n"; |
489
|
0
|
|
|
|
|
0
|
close(UNDUMP); |
490
|
|
|
|
|
|
|
|
491
|
|
|
|
|
|
|
# and undump |
492
|
0
|
|
|
|
|
0
|
eval $x; |
493
|
0
|
|
|
|
|
0
|
print "Undumped $VAR1->{NR_EVENTS} events, $VAR1->{NR_CLASSES} classes, " . |
494
|
|
|
|
|
|
|
"and $VAR1->{NR_FEATURES} features\n"; |
495
|
0
|
|
|
|
|
0
|
print "Closed $file\n"; |
496
|
0
|
|
|
|
|
0
|
return($VAR1); |
497
|
|
|
|
|
|
|
} |
498
|
|
|
|
|
|
|
|
499
|
|
|
|
|
|
|
|
500
|
|
|
|
|
|
|
# makes dump of the event space using Data::Dumper |
501
|
|
|
|
|
|
|
sub dump { |
502
|
2
|
|
|
2
|
1
|
6243
|
my($self, $file) = @_; |
503
|
|
|
|
|
|
|
|
504
|
2
|
|
|
|
|
12
|
my(@bitvecs, |
505
|
|
|
|
|
|
|
$dump, |
506
|
|
|
|
|
|
|
%features, |
507
|
|
|
|
|
|
|
$f); |
508
|
|
|
|
|
|
|
|
509
|
2
|
50
|
|
|
|
1200
|
if (!$file) { |
510
|
0
|
|
|
|
|
0
|
$file = POSIX::tmpnam(); |
511
|
|
|
|
|
|
|
} |
512
|
2
|
50
|
|
|
|
11465
|
open(DUMP, ">$file") || |
513
|
|
|
|
|
|
|
croak "Could not open $file\n"; |
514
|
2
|
|
|
|
|
976
|
print "Opened $file\n"; |
515
|
|
|
|
|
|
|
|
516
|
|
|
|
|
|
|
# build something that we can sort |
517
|
|
|
|
|
|
|
# ONLY FOR CORPUS! |
518
|
2
|
50
|
33
|
|
|
2078
|
if (!$self->{THIS_IS_A_SAMPLE} && $self->{PARAMETERS}) { |
519
|
2
|
|
|
|
|
1361
|
for ($f = 0; $f < $self->{NR_FEATURES}; $f++) { |
520
|
28
|
|
|
|
|
8473
|
$features{$self->{FEATURE_NAMES}[$self->{NR_FEATURES} - $f - 1]} = |
521
|
|
|
|
|
|
|
$self->{PARAMETERS}[$f]; |
522
|
|
|
|
|
|
|
} |
523
|
2
|
100
|
66
|
|
|
67
|
if ($self->{NEED_CORRECTION_FEATURE} && ($self->{SCALER} eq "gis")) { |
524
|
1
|
|
|
|
|
7
|
$features{"correction$self->{M}"} = |
525
|
|
|
|
|
|
|
$self->{CORRECTION_PARAMETER}; |
526
|
|
|
|
|
|
|
} |
527
|
|
|
|
|
|
|
# and print it into $self |
528
|
|
|
|
|
|
|
$self->{FEATURE_SORTED} = join(' > ', |
529
|
|
|
|
|
|
|
sort { |
530
|
2
|
50
|
|
|
|
5516
|
if ($features{$b} == $features{$a}) { |
|
62
|
|
|
|
|
869
|
|
531
|
0
|
|
|
|
|
0
|
return($b cmp $a)} |
532
|
|
|
|
|
|
|
else { |
533
|
62
|
|
|
|
|
943
|
return ($features{$b} <=> $features{$a}) |
534
|
|
|
|
|
|
|
} |
535
|
|
|
|
|
|
|
} |
536
|
|
|
|
|
|
|
keys(%features)); |
537
|
|
|
|
|
|
|
} |
538
|
|
|
|
|
|
|
|
539
|
2
|
|
|
|
|
7869
|
$dump = Data::Dumper->new([$self]); |
540
|
2
|
|
|
|
|
29137
|
print DUMP $dump->Dump(); |
541
|
2
|
|
|
|
|
754228
|
print "Dumped $self->{NR_EVENTS} events, $self->{NR_CLASSES} classes, " . |
542
|
|
|
|
|
|
|
"and $self->{NR_FEATURES} features\n"; |
543
|
|
|
|
|
|
|
|
544
|
2
|
|
|
|
|
104
|
close(DUMP); |
545
|
2
|
|
|
|
|
2310
|
print "Closed $file\n"; |
546
|
|
|
|
|
|
|
} |
547
|
|
|
|
|
|
|
|
548
|
|
|
|
|
|
|
|
549
|
|
|
|
|
|
|
# $msg is logged, the time is logged, a dump is created, and the |
550
|
|
|
|
|
|
|
# program dies with $msg |
551
|
|
|
|
|
|
|
sub die { |
552
|
0
|
|
|
0
|
0
|
0
|
my($self, $msg) = @_; |
553
|
|
|
|
|
|
|
|
554
|
0
|
|
|
|
|
0
|
$self->log($msg); |
555
|
0
|
|
|
|
|
0
|
$self->log(time()); |
556
|
0
|
|
|
|
|
0
|
$self->dump(); |
557
|
0
|
|
|
|
|
0
|
croak $msg; |
558
|
|
|
|
|
|
|
} |
559
|
|
|
|
|
|
|
|
560
|
|
|
|
|
|
|
|
561
|
|
|
|
|
|
|
# prints a msg to STDOUT, and appends it to $self->{LOG} |
562
|
|
|
|
|
|
|
# so an emergency dump will contain some history information |
563
|
|
|
|
|
|
|
sub log { |
564
|
254
|
|
|
254
|
0
|
672
|
my($self, $x) = @_; |
565
|
|
|
|
|
|
|
|
566
|
254
|
|
|
|
|
1129
|
$self->{LOG} .= $x; |
567
|
254
|
|
|
|
|
2988
|
print $x; |
568
|
|
|
|
|
|
|
} |
569
|
|
|
|
|
|
|
|
570
|
|
|
|
|
|
|
|
571
|
|
|
|
|
|
|
# computes f_# for alle events; results in @sample_nr_feats_on |
572
|
|
|
|
|
|
|
# computes %$sample_m_feats_on; a HOL from m |
573
|
|
|
|
|
|
|
sub active_features { |
574
|
280
|
|
|
280
|
0
|
494
|
my($self) = @_; |
575
|
|
|
|
|
|
|
|
576
|
280
|
|
|
|
|
391
|
my($i, |
577
|
|
|
|
|
|
|
$j); |
578
|
|
|
|
|
|
|
|
579
|
280
|
100
|
|
|
|
1070
|
if ($self->{CLASSES_CHANGED}) { |
580
|
|
|
|
|
|
|
# M is needed for both gis and iis |
581
|
|
|
|
|
|
|
# NEED_CORRECTION_FEATURE is for gis only |
582
|
76
|
|
|
|
|
166
|
$self->{M} = 0; |
583
|
76
|
|
|
|
|
188
|
$self->{NEED_CORRECTION_FEATURE} = 0; |
584
|
76
|
|
|
|
|
295
|
for ($i = 0; $i < $self->{NR_CLASSES}; $i++) { |
585
|
7582
|
100
|
|
|
|
22490
|
if ($self->{CLASSES}[$i]->Norm() > $self->{M}) { |
586
|
|
|
|
|
|
|
# higher nr_features_active found |
587
|
356
|
|
|
|
|
1145
|
$self->{M} = $self->{CLASSES}[$i]->Norm(); |
588
|
356
|
|
|
|
|
1254
|
$self->{NEED_CORRECTION_FEATURE} = 1; |
589
|
|
|
|
|
|
|
} |
590
|
|
|
|
|
|
|
} |
591
|
76
|
50
|
|
|
|
261
|
if ($debug) { |
592
|
0
|
|
|
|
|
0
|
print "M = $self->{M}\n"; |
593
|
|
|
|
|
|
|
} |
594
|
|
|
|
|
|
|
# set up a hash from m to classes HOL; and the correction_feature |
595
|
|
|
|
|
|
|
# CORRECTION_FEATURE FOR gis |
596
|
76
|
|
|
|
|
191
|
undef $self->{M_FEATURES_ACTIVE}; |
597
|
76
|
|
|
|
|
255
|
for ($i = 0; $i < $self->{NR_CLASSES}; $i++) { |
598
|
7582
|
100
|
|
|
|
24031
|
if ($self->{SCALER} eq "gis") { |
599
|
3692
|
|
|
|
|
11319
|
$self->{CORRECTION_FEATURE}[$i] = |
600
|
|
|
|
|
|
|
$self->{M} - $self->{CLASSES}[$i]->Norm(); |
601
|
|
|
|
|
|
|
} |
602
|
|
|
|
|
|
|
} |
603
|
76
|
50
|
|
|
|
253
|
if ($debug) { |
604
|
0
|
|
|
|
|
0
|
print "M = $self->{M}\n"; |
605
|
|
|
|
|
|
|
} |
606
|
|
|
|
|
|
|
# observed feature expectations |
607
|
76
|
100
|
|
|
|
261
|
if (!$self->{THIS_IS_A_SAMPLE}) { |
608
|
58
|
|
|
|
|
286
|
$self->E_reference(); |
609
|
|
|
|
|
|
|
} |
610
|
76
|
|
|
|
|
256
|
undef $self->{CLASSES_CHANGED}; |
611
|
|
|
|
|
|
|
} |
612
|
|
|
|
|
|
|
} |
613
|
|
|
|
|
|
|
|
614
|
|
|
|
|
|
|
|
615
|
|
|
|
|
|
|
# compute the class probabilities according to the parameters |
616
|
|
|
|
|
|
|
sub prepare_model { |
617
|
280
|
|
|
280
|
0
|
690
|
my($self) = @_; |
618
|
|
|
|
|
|
|
|
619
|
280
|
|
|
|
|
449
|
my ($x, |
620
|
|
|
|
|
|
|
$f); |
621
|
|
|
|
|
|
|
|
622
|
280
|
|
|
|
|
1109
|
$self->active_features(); |
623
|
280
|
100
|
|
|
|
1004
|
if ($self->{PARAMETERS_CHANGED}) { |
624
|
272
|
|
|
|
|
627
|
$self->{Z} = 0; |
625
|
272
|
|
|
|
|
1083
|
for ($x = 0; $x < $self->{NR_CLASSES}; $x++) { |
626
|
27182
|
|
|
|
|
40121
|
$self->{CLASS_LOG_WEIGHTS}[$x] = 0; |
627
|
27182
|
|
|
|
|
103281
|
for $f ($self->{CLASSES}[$x]->indices()) { |
628
|
61327
|
|
|
|
|
356298
|
$self->{CLASS_LOG_WEIGHTS}[$x] += $self->{PARAMETERS}[$f]; |
629
|
|
|
|
|
|
|
} |
630
|
27182
|
100
|
66
|
|
|
185753
|
if ($self->{NEED_CORRECTION_FEATURE} && ($self->{SCALER} eq "gis")) { |
631
|
16492
|
|
|
|
|
36427
|
$self->{CLASS_LOG_WEIGHTS}[$x] += $self->{CORRECTION_FEATURE}[$x] * |
632
|
|
|
|
|
|
|
$self->{CORRECTION_PARAMETER}; |
633
|
|
|
|
|
|
|
} |
634
|
27182
|
|
|
|
|
53443
|
$self->{CLASS_EXP_WEIGHTS}[$x] = exp($self->{CLASS_LOG_WEIGHTS}[$x]); |
635
|
27182
|
|
|
|
|
97798
|
$self->{Z} += $self->{CLASS_EXP_WEIGHTS}[$x]; |
636
|
|
|
|
|
|
|
} |
637
|
272
|
50
|
|
|
|
1309
|
print "prepare_model: \$Z is not a number: $self->{Z}\n" |
638
|
|
|
|
|
|
|
unless is_float($self->{Z}); |
639
|
|
|
|
|
|
|
|
640
|
272
|
100
|
|
|
|
1020
|
if (!$self->{THIS_IS_A_SAMPLE}) { |
641
|
254
|
|
|
|
|
1173
|
$self->entropies(); |
642
|
|
|
|
|
|
|
} |
643
|
272
|
|
|
|
|
1544
|
$self->check(); |
644
|
272
|
|
|
|
|
1564
|
undef $self->{PARAMETERS_CHANGED}; |
645
|
|
|
|
|
|
|
} |
646
|
|
|
|
|
|
|
} |
647
|
|
|
|
|
|
|
|
648
|
|
|
|
|
|
|
|
649
|
|
|
|
|
|
|
sub prepare_sample { |
650
|
186
|
|
|
186
|
0
|
334
|
my($self) = @_; |
651
|
|
|
|
|
|
|
|
652
|
|
|
|
|
|
|
# expectations |
653
|
186
|
100
|
|
|
|
681
|
if ($self->{SCALER} eq "gis") { |
654
|
123
|
|
|
|
|
649
|
$self->E_loglinear(); |
655
|
|
|
|
|
|
|
} |
656
|
|
|
|
|
|
|
else { |
657
|
|
|
|
|
|
|
# A_{mj} |
658
|
63
|
|
|
|
|
313
|
$self->A(); |
659
|
|
|
|
|
|
|
} |
660
|
|
|
|
|
|
|
} |
661
|
|
|
|
|
|
|
|
662
|
|
|
|
|
|
|
|
663
|
|
|
|
|
|
|
# feature expectations for the MaxEnt distribution |
664
|
|
|
|
|
|
|
sub E_loglinear { |
665
|
123
|
|
|
123
|
0
|
203
|
my($self) = @_; |
666
|
|
|
|
|
|
|
|
667
|
123
|
|
|
|
|
243
|
my($x, |
668
|
|
|
|
|
|
|
$f, |
669
|
|
|
|
|
|
|
$vec, |
670
|
|
|
|
|
|
|
$weight, |
671
|
|
|
|
|
|
|
$Z); |
672
|
|
|
|
|
|
|
|
673
|
123
|
|
|
|
|
334
|
undef $self->{E_LOGLIN}; |
674
|
123
|
100
|
|
|
|
551
|
if ($self->{SAMPLING} eq "enum") { |
675
|
41
|
|
|
|
|
318
|
$vec = $VECTOR_PACKAGE->new($self->{NR_FEATURES}); |
676
|
41
|
|
|
|
|
112
|
$self->{Z} = 0; |
677
|
41
|
|
|
|
|
219
|
for ($x = 0; $x < 2 ** $self->{NR_FEATURES}; $x++) { |
678
|
817152
|
|
|
|
|
1768849
|
$weight = $self->weight($vec); |
679
|
817152
|
|
|
|
|
2299821
|
for $f ($vec->indices()) { |
680
|
6133760
|
|
|
|
|
10159066
|
$self->{E_LOGLIN}[$f] += $weight; |
681
|
|
|
|
|
|
|
} |
682
|
817152
|
|
|
|
|
3993960
|
$self->{E_LOGLIN}[$self->{NR_FEATURES}] += $weight * |
683
|
|
|
|
|
|
|
($self->{M} - $vec->Norm()); |
684
|
817152
|
|
|
|
|
1316514
|
$self->{Z} += $weight; |
685
|
817152
|
|
|
|
|
2290356
|
$vec->increment(); |
686
|
|
|
|
|
|
|
} |
687
|
41
|
|
|
|
|
143
|
for $f (0..$self->{NR_FEATURES}) { |
688
|
576
|
|
|
|
|
1283
|
$self->{E_LOGLIN}[$f] /= $self->{Z}; |
689
|
|
|
|
|
|
|
} |
690
|
|
|
|
|
|
|
} |
691
|
|
|
|
|
|
|
else { # either corpus or mc sample |
692
|
82
|
|
|
|
|
356
|
for ($x = 0; $x < $self->{NR_CLASSES}; $x++) { |
693
|
8192
|
|
|
|
|
26411
|
for $f ($self->{CLASSES}[$x]->indices()) { |
694
|
18580
|
|
|
|
|
51720
|
$self->{E_LOGLIN}[$f] += $self->{CLASS_EXP_WEIGHTS}[$x]; |
695
|
|
|
|
|
|
|
} |
696
|
8192
|
50
|
|
|
|
24149
|
if ($self->{NEED_CORRECTION_FEATURE}) { |
697
|
8192
|
|
|
|
|
31313
|
$self->{E_LOGLIN}[$self->{NR_FEATURES}] += |
698
|
|
|
|
|
|
|
$self->{CLASS_EXP_WEIGHTS}[$x] * |
699
|
|
|
|
|
|
|
($self->{M} - $self->{CLASSES}[$x]->Norm()); |
700
|
|
|
|
|
|
|
} |
701
|
|
|
|
|
|
|
} |
702
|
82
|
|
|
|
|
215
|
for $f (0..$self->{NR_FEATURES}) { |
703
|
944
|
|
|
|
|
1576
|
$self->{E_LOGLIN}[$f] /= $self->{Z}; |
704
|
|
|
|
|
|
|
} |
705
|
|
|
|
|
|
|
} |
706
|
|
|
|
|
|
|
} |
707
|
|
|
|
|
|
|
|
708
|
|
|
|
|
|
|
|
709
|
|
|
|
|
|
|
# observed feature expectations |
710
|
|
|
|
|
|
|
sub E_reference { |
711
|
58
|
|
|
58
|
0
|
99
|
my($self) = @_; |
712
|
|
|
|
|
|
|
|
713
|
58
|
|
|
|
|
122
|
my($x, |
714
|
|
|
|
|
|
|
$f, |
715
|
|
|
|
|
|
|
@sum); |
716
|
|
|
|
|
|
|
|
717
|
58
|
|
|
|
|
354
|
for ($x = 0; $x < $self->{NR_CLASSES}; $x++) { |
718
|
5800
|
|
|
|
|
18959
|
for $f ($self->{CLASSES}[$x]->indices()) { |
719
|
12626
|
|
|
|
|
27381
|
$sum[$f] += $self->{FREQ}[$x]; |
720
|
|
|
|
|
|
|
} |
721
|
5800
|
100
|
|
|
|
22269
|
if ($self->{SCALER} eq "gis") { |
722
|
2900
|
|
|
|
|
10716
|
$sum[$self->{NR_FEATURES}] += $self->{CORRECTION_FEATURE}[$x] * |
723
|
|
|
|
|
|
|
$self->{FREQ}[$x]; |
724
|
|
|
|
|
|
|
} |
725
|
|
|
|
|
|
|
} |
726
|
58
|
|
|
|
|
176
|
for $f (0..$self->{NR_FEATURES}) { |
727
|
842
|
100
|
|
|
|
1742
|
if ($sum[$f]) { |
728
|
823
|
|
|
|
|
1852
|
$self->{E_REF}[$f] = $sum[$f] / $self->{NR_EVENTS}; |
729
|
|
|
|
|
|
|
} |
730
|
|
|
|
|
|
|
} |
731
|
|
|
|
|
|
|
} |
732
|
|
|
|
|
|
|
|
733
|
|
|
|
|
|
|
|
734
|
|
|
|
|
|
|
# compute several entropies |
735
|
|
|
|
|
|
|
sub entropies { |
736
|
254
|
|
|
254
|
0
|
455
|
my($self) = @_; |
737
|
|
|
|
|
|
|
|
738
|
254
|
|
|
|
|
498
|
my ($i, |
739
|
|
|
|
|
|
|
$w, |
740
|
|
|
|
|
|
|
$log_w, |
741
|
|
|
|
|
|
|
$w_ref, |
742
|
|
|
|
|
|
|
$log_w_ref); |
743
|
|
|
|
|
|
|
|
744
|
254
|
|
|
|
|
729
|
$self->{H_p} = 0; |
745
|
254
|
|
|
|
|
530
|
$self->{H_cross} = 0; |
746
|
254
|
|
|
|
|
438
|
$self->{H_p_ref} = 0; |
747
|
254
|
|
|
|
|
512
|
$self->{KL} = 0; |
748
|
254
|
|
|
|
|
1071
|
for ($i = 0; $i < $self->{NR_CLASSES}; $i++) { |
749
|
25400
|
|
|
|
|
35794
|
$w = $self->{CLASS_EXP_WEIGHTS}[$i]; |
750
|
|
|
|
|
|
|
# we don't know whether $p > 0 |
751
|
25400
|
|
|
|
|
33604
|
$log_w = $self->{CLASS_LOG_WEIGHTS}[$i]; |
752
|
25400
|
|
|
|
|
44242
|
$w_ref = $self->{FREQ}[$i]; |
753
|
|
|
|
|
|
|
# we know that $p_ref > 0 |
754
|
25400
|
|
|
|
|
31285
|
$log_w_ref = log($w_ref); |
755
|
|
|
|
|
|
|
# update the sums |
756
|
25400
|
|
|
|
|
34555
|
$self->{H_p} -= $w * $log_w; |
757
|
25400
|
|
|
|
|
30818
|
$self->{H_cross} -= $w_ref * $log_w; |
758
|
25400
|
|
|
|
|
33681
|
$self->{KL} += $w_ref * ($log_w_ref - $log_w); |
759
|
25400
|
|
|
|
|
32189
|
$self->{H_p_ref} -= $w_ref * $log_w_ref; |
760
|
25400
|
50
|
|
|
|
83271
|
if ($w == 0) { |
761
|
0
|
|
|
|
|
0
|
$self->log("entropies: skipping event $i (p^n($i) = 0)\n"); |
762
|
|
|
|
|
|
|
} |
763
|
|
|
|
|
|
|
} |
764
|
|
|
|
|
|
|
# normalise |
765
|
254
|
|
|
|
|
948
|
$self->{H_p} = $self->{H_p} / $self->{Z} + log($self->{Z}); |
766
|
254
|
|
|
|
|
810
|
$self->{H_cross} = $self->{H_cross} / $self->{NR_EVENTS} + log($self->{Z}); |
767
|
254
|
|
|
|
|
1261
|
$self->{KL} = $self->{KL} / $self->{NR_EVENTS} - log($self->{NR_EVENTS}) + |
768
|
|
|
|
|
|
|
log($self->{Z}); |
769
|
254
|
|
|
|
|
899
|
$self->{H_p_ref} = $self->{H_p_ref} / $self->{NR_EVENTS} + log($self->{NR_EVENTS}); |
770
|
254
|
|
|
|
|
888
|
$self->{L} = -$self->{H_cross}; |
771
|
|
|
|
|
|
|
} |
772
|
|
|
|
|
|
|
|
773
|
|
|
|
|
|
|
|
774
|
|
|
|
|
|
|
# unnormalised density |
775
|
|
|
|
|
|
|
sub weight { |
776
|
1869824
|
|
|
1869824
|
0
|
2681633
|
my($self, $bitvec) = @_; |
777
|
|
|
|
|
|
|
|
778
|
1869824
|
|
|
|
|
2038577
|
my ($f, |
779
|
|
|
|
|
|
|
$sum); |
780
|
|
|
|
|
|
|
|
781
|
1869824
|
|
|
|
|
2260843
|
$sum = 0; |
782
|
1869824
|
|
|
|
|
4972494
|
for $f ($bitvec->indices()) { |
783
|
15198208
|
50
|
|
|
|
33547900
|
if (!$self->{FEATURE_IGNORE}{$f}) { |
784
|
15198208
|
|
|
|
|
28054995
|
$sum += $self->{PARAMETERS}[$f]; |
785
|
|
|
|
|
|
|
} |
786
|
|
|
|
|
|
|
} |
787
|
1869824
|
100
|
66
|
|
|
12456305
|
if ($self->{NEED_CORRECTION_FEATURE} && ($self->{SCALER} eq "gis")) { |
788
|
817152
|
|
|
|
|
2568051
|
$sum += ($self->{M} - $bitvec->Norm()) * $self->{CORRECTION_PARAMETER}; |
789
|
|
|
|
|
|
|
} |
790
|
1869824
|
|
|
|
|
4919175
|
return(exp($sum)); |
791
|
|
|
|
|
|
|
} |
792
|
|
|
|
|
|
|
|
793
|
|
|
|
|
|
|
|
794
|
|
|
|
|
|
|
# computes the `a' coefficients of |
795
|
|
|
|
|
|
|
# \sum_{m=0}^{M} a_{m,j}^{(n)} e^{\alpha^{(n)}_j m} |
796
|
|
|
|
|
|
|
# according to the current distribution |
797
|
|
|
|
|
|
|
sub A { |
798
|
63
|
|
|
63
|
0
|
156
|
my($self) = @_; |
799
|
|
|
|
|
|
|
|
800
|
63
|
|
|
|
|
127
|
my($f, |
801
|
|
|
|
|
|
|
$m, |
802
|
|
|
|
|
|
|
$x, |
803
|
|
|
|
|
|
|
$weight, |
804
|
|
|
|
|
|
|
$vec, |
805
|
|
|
|
|
|
|
$class); |
806
|
|
|
|
|
|
|
|
807
|
63
|
|
|
|
|
167
|
undef $self->{A}; |
808
|
63
|
|
|
|
|
673
|
undef $self->{C}; |
809
|
63
|
100
|
|
|
|
462
|
if ($self->{SAMPLING} eq "enum") { |
810
|
12
|
|
|
|
|
30
|
undef $self->{Z}; |
811
|
12
|
|
|
|
|
97
|
$vec = $VECTOR_PACKAGE->new($self->{NR_FEATURES}); |
812
|
12
|
|
|
|
|
70
|
for ($x = 0; $x < 2 ** $self->{NR_FEATURES}; $x++) { |
813
|
1052672
|
|
|
|
|
2382178
|
$weight = $self->weight($vec); |
814
|
1052672
|
|
|
|
|
3285892
|
for $f ($vec->indices()) { |
815
|
9064448
|
|
|
|
|
27177088
|
$self->{A}{$vec->Norm()}{$f} += $weight; |
816
|
9064448
|
|
|
|
|
29927704
|
$self->{C}{$vec->Norm()}{$f}++; |
817
|
|
|
|
|
|
|
} |
818
|
1052672
|
|
|
|
|
3191756
|
$self->{Z} += $weight; |
819
|
1052672
|
50
|
|
|
|
2454598
|
print "Z = $self->{Z}" unless is_float($self->{Z}); |
820
|
1052672
|
|
|
|
|
3581869
|
$vec->increment(); |
821
|
|
|
|
|
|
|
} |
822
|
|
|
|
|
|
|
} |
823
|
|
|
|
|
|
|
else { # mc or corpus |
824
|
51
|
|
|
|
|
315
|
for ($class = 0; $class < $self->{NR_CLASSES}; $class++) { |
825
|
5090
|
|
|
|
|
15506
|
for $f ($self->{CLASSES}[$class]->indices()) { |
826
|
13480
|
|
|
|
|
41665
|
$self->{A}{$self->{CLASSES}[$class]->Norm()}{$f} += |
827
|
|
|
|
|
|
|
$self->{CLASS_EXP_WEIGHTS}[$class]; |
828
|
13480
|
|
|
|
|
45476
|
$self->{C}{$self->{CLASSES}[$class]->Norm()}{$f}++; |
829
|
|
|
|
|
|
|
} |
830
|
|
|
|
|
|
|
} |
831
|
|
|
|
|
|
|
} |
832
|
|
|
|
|
|
|
} |
833
|
|
|
|
|
|
|
|
834
|
|
|
|
|
|
|
|
835
|
|
|
|
|
|
|
# |
836
|
|
|
|
|
|
|
# Monte Carlo sampling with the Metropolis update |
837
|
|
|
|
|
|
|
# |
838
|
|
|
|
|
|
|
|
839
|
|
|
|
|
|
|
# returns heads up with probability $load |
840
|
|
|
|
|
|
|
sub loaded_die { |
841
|
4100
|
|
|
4100
|
0
|
5303
|
my($load) = @_; |
842
|
|
|
|
|
|
|
|
843
|
4100
|
100
|
|
|
|
11533
|
(rand() <= $load) ? 1 : 0; |
844
|
|
|
|
|
|
|
} |
845
|
|
|
|
|
|
|
|
846
|
|
|
|
|
|
|
|
847
|
|
|
|
|
|
|
# samples from the probability distribution of $other to create $self |
848
|
|
|
|
|
|
|
# we use the so-called Metropolis update R = h(new)/h(old) |
849
|
|
|
|
|
|
|
# Metropolis algorithm \cite{neal:probabilistic} |
850
|
|
|
|
|
|
|
sub mc { |
851
|
18
|
|
|
18
|
0
|
30
|
my($self, $other, $type) = @_; |
852
|
|
|
|
|
|
|
|
853
|
18
|
|
|
|
|
30
|
my($R, |
854
|
|
|
|
|
|
|
$weight, |
855
|
|
|
|
|
|
|
$state, |
856
|
|
|
|
|
|
|
$old_weight, |
857
|
|
|
|
|
|
|
$k, |
858
|
|
|
|
|
|
|
%events |
859
|
|
|
|
|
|
|
); |
860
|
|
|
|
|
|
|
|
861
|
18
|
|
|
|
|
2441
|
srand(); |
862
|
|
|
|
|
|
|
# take some class from the sample space as initial state |
863
|
18
|
|
|
|
|
162
|
$state = $VECTOR_PACKAGE->new($self->{NR_FEATURES}); |
864
|
|
|
|
|
|
|
# make sure there are no constant features! |
865
|
18
|
|
|
|
|
106
|
$state->Fill(); |
866
|
18
|
|
|
|
|
121
|
$events{$state->to_Bin()}++; |
867
|
18
|
|
|
|
|
104
|
$state->Empty(); |
868
|
18
|
|
|
|
|
88
|
$weight = 0; |
869
|
|
|
|
|
|
|
# iterate |
870
|
18
|
|
|
|
|
37
|
$k = 0; |
871
|
|
|
|
|
|
|
|
872
|
18
|
|
66
|
|
|
28
|
do { |
873
|
4100
|
|
|
|
|
5115
|
$old_weight = $weight; |
874
|
4100
|
100
|
|
|
|
10603
|
if ($state->bit_flip($k)) { |
875
|
1423
|
|
|
|
|
2713
|
$weight += $self->{PARAMETERS}[$k]; |
876
|
|
|
|
|
|
|
} |
877
|
|
|
|
|
|
|
else { |
878
|
2677
|
|
|
|
|
4382
|
$weight -= $self->{PARAMETERS}[$k]; |
879
|
|
|
|
|
|
|
} |
880
|
4100
|
|
|
|
|
6362
|
$R = exp($weight - $old_weight); |
881
|
4100
|
100
|
|
|
|
10003
|
if (!loaded_die(1 < $R ? 1 : $R)) { # stay at the old state |
|
|
100
|
|
|
|
|
|
882
|
1787
|
|
|
|
|
4265
|
$state->bit_flip($k); |
883
|
1787
|
|
|
|
|
2405
|
$weight = $old_weight; |
884
|
|
|
|
|
|
|
} |
885
|
|
|
|
|
|
|
else { # add state |
886
|
2313
|
|
|
|
|
6914
|
$events{$state->to_Bin()}++; |
887
|
|
|
|
|
|
|
} |
888
|
4100
|
50
|
|
|
|
10812
|
if ($debug) { |
889
|
0
|
|
|
|
|
0
|
print $state->to_Bin(),"\t",scalar(keys(%events)),"\t$R\n"; |
890
|
|
|
|
|
|
|
} |
891
|
|
|
|
|
|
|
# next component |
892
|
4100
|
|
|
|
|
28705
|
$k = ($k + 1) % $self->{NR_FEATURES}; |
893
|
|
|
|
|
|
|
} until ((scalar(keys(%events)) == $SAMPLE_size) || |
894
|
|
|
|
|
|
|
(scalar(keys(%events)) == 2 ** $self->{NR_FEATURES})); |
895
|
|
|
|
|
|
|
|
896
|
18
|
|
|
|
|
419
|
for (keys(%events)) { |
897
|
1800
|
|
|
|
|
1921
|
push @{$self->{CLASSES}}, |
|
1800
|
|
|
|
|
6784
|
|
898
|
|
|
|
|
|
|
$VECTOR_PACKAGE->new_Bin($self->{NR_FEATURES}, $_); |
899
|
|
|
|
|
|
|
} |
900
|
18
|
|
|
|
|
219
|
$self->{NR_CLASSES} = scalar(keys(%events)) - 1; |
901
|
|
|
|
|
|
|
|
902
|
18
|
|
|
|
|
47
|
$self->{CLASSES_CHANGED} = 1; |
903
|
18
|
|
|
|
|
712
|
$self->{PARAMETERS_CHANGED} = 1; |
904
|
|
|
|
|
|
|
} |
905
|
|
|
|
|
|
|
|
906
|
|
|
|
|
|
|
|
907
|
|
|
|
|
|
|
# |
908
|
|
|
|
|
|
|
# IIS |
909
|
|
|
|
|
|
|
# |
910
|
|
|
|
|
|
|
|
911
|
|
|
|
|
|
|
# Newton estimation according to (Abney 1997), Appendix B |
912
|
|
|
|
|
|
|
sub C_func { |
913
|
0
|
|
|
0
|
0
|
0
|
my($self, $j, $x) = @_; |
914
|
|
|
|
|
|
|
|
915
|
0
|
|
|
|
|
0
|
my($m, |
916
|
|
|
|
|
|
|
$s0, |
917
|
|
|
|
|
|
|
$s1, |
918
|
|
|
|
|
|
|
$a_x_m); |
919
|
|
|
|
|
|
|
|
920
|
0
|
|
|
|
|
0
|
$s0 = - $self->{NR_EVENTS} * $self->{E_REF}[$j]; |
921
|
0
|
|
|
|
|
0
|
$s1 = 0; |
922
|
0
|
|
|
|
|
0
|
for ($m = 1; $m <= $self->{M}; $m++) { |
923
|
0
|
0
|
|
|
|
0
|
if ($self->{"C"}{$m}{$j}) { |
924
|
0
|
|
|
|
|
0
|
$a_x_m = $self->{"C"}{$m}{$j} * exp($x * $m); |
925
|
0
|
|
|
|
|
0
|
$s0 += $a_x_m; |
926
|
0
|
|
|
|
|
0
|
$s1 += $m * $a_x_m; |
927
|
|
|
|
|
|
|
} |
928
|
|
|
|
|
|
|
} |
929
|
0
|
0
|
|
|
|
0
|
print "sum_func not a number: $s0\n" |
930
|
|
|
|
|
|
|
unless is_float($s0); |
931
|
0
|
0
|
|
|
|
0
|
print "sum_deriv not a number: $s1\n" |
932
|
|
|
|
|
|
|
unless is_float($s1); |
933
|
|
|
|
|
|
|
|
934
|
0
|
0
|
|
|
|
0
|
if ($s1 == 0) { |
935
|
0
|
|
|
|
|
0
|
return(0); |
936
|
|
|
|
|
|
|
} |
937
|
|
|
|
|
|
|
else { |
938
|
0
|
|
|
|
|
0
|
return($s0 / $s1); |
939
|
|
|
|
|
|
|
} |
940
|
|
|
|
|
|
|
} |
941
|
|
|
|
|
|
|
|
942
|
|
|
|
|
|
|
|
943
|
|
|
|
|
|
|
# Newton estimation according to (Della Pietra et al. 1997) |
944
|
|
|
|
|
|
|
sub A_func { |
945
|
2813
|
|
|
2813
|
0
|
4055
|
my($self, $j, $x) = @_; |
946
|
|
|
|
|
|
|
|
947
|
2813
|
|
|
|
|
2828
|
my($m, |
948
|
|
|
|
|
|
|
$sum_func, |
949
|
|
|
|
|
|
|
$sum_deriv, |
950
|
|
|
|
|
|
|
$a_x_m); |
951
|
|
|
|
|
|
|
|
952
|
2813
|
|
|
|
|
4858
|
$sum_func = -$self->{E_REF}[$j] * $self->{Z}; |
953
|
2813
|
|
|
|
|
2886
|
$sum_deriv = 0; |
954
|
2813
|
|
|
|
|
6639
|
for ($m = 1; $m <= $self->{M}; $m++) { |
955
|
26606
|
100
|
|
|
|
66999
|
if ($self->{"A"}{$m}{$j}) { |
956
|
24589
|
|
|
|
|
48328
|
$a_x_m = $self->{"A"}{$m}{$j} * exp($x * $m); |
957
|
24589
|
|
|
|
|
27129
|
$sum_func += $a_x_m; |
958
|
24589
|
|
|
|
|
59637
|
$sum_deriv += $m * $a_x_m; |
959
|
|
|
|
|
|
|
} |
960
|
|
|
|
|
|
|
} |
961
|
2813
|
50
|
|
|
|
4831
|
if ($sum_deriv == 0) { |
962
|
0
|
|
|
|
|
0
|
return(0); |
963
|
|
|
|
|
|
|
} |
964
|
|
|
|
|
|
|
else { |
965
|
2813
|
|
|
|
|
16068
|
return($sum_func / $sum_deriv); |
966
|
|
|
|
|
|
|
} |
967
|
|
|
|
|
|
|
} |
968
|
|
|
|
|
|
|
|
969
|
|
|
|
|
|
|
|
970
|
|
|
|
|
|
|
# solves \alpha from |
971
|
|
|
|
|
|
|
# \sum_{m=0}^{M} a_{m,j}^{(n)} e^{\alpha^{(n)}_j m}=0 |
972
|
|
|
|
|
|
|
sub iis_estimate_with_newton { |
973
|
764
|
|
|
764
|
0
|
1114
|
my($self, $i) = @_; |
974
|
|
|
|
|
|
|
|
975
|
764
|
|
|
|
|
861
|
my($x, |
976
|
|
|
|
|
|
|
$old_x, |
977
|
|
|
|
|
|
|
$deriv_res, |
978
|
|
|
|
|
|
|
$func_res, |
979
|
|
|
|
|
|
|
$k); |
980
|
|
|
|
|
|
|
|
981
|
|
|
|
|
|
|
# $x = log(0) |
982
|
764
|
|
|
|
|
849
|
$x = 0; |
983
|
764
|
|
|
|
|
826
|
$k = 0; |
984
|
|
|
|
|
|
|
|
985
|
|
|
|
|
|
|
# do newton's method |
986
|
764
|
|
66
|
|
|
872
|
do { |
987
|
|
|
|
|
|
|
# save old x |
988
|
2813
|
|
|
|
|
3184
|
$old_x = $x; |
989
|
|
|
|
|
|
|
# compute new x |
990
|
2813
|
100
|
|
|
|
5496
|
if ($self->{SAMPLING} eq "enum") { |
991
|
|
|
|
|
|
|
# (DDL 1997) |
992
|
955
|
|
|
|
|
2146
|
$x -= $self->A_func($i, $x); |
993
|
|
|
|
|
|
|
} |
994
|
|
|
|
|
|
|
else { |
995
|
|
|
|
|
|
|
# sample -> (Abney 1997) |
996
|
1858
|
|
|
|
|
3581
|
$x -= $self->A_func($i, $x); |
997
|
|
|
|
|
|
|
} |
998
|
|
|
|
|
|
|
} until ((abs($x - $old_x) <= $NEWTON_min) || |
999
|
|
|
|
|
|
|
($k++ > $NEWTON_max_it)); |
1000
|
764
|
50
|
|
|
|
1578
|
if ($debug) { |
1001
|
0
|
|
|
|
|
0
|
print "Estimated gamma_$i with Newton's method: $x\n"; |
1002
|
|
|
|
|
|
|
} |
1003
|
764
|
|
|
|
|
1913
|
return($x); |
1004
|
|
|
|
|
|
|
} |
1005
|
|
|
|
|
|
|
|
1006
|
|
|
|
|
|
|
|
1007
|
|
|
|
|
|
|
# updates parameter $i |
1008
|
|
|
|
|
|
|
sub gamma { |
1009
|
186
|
|
|
186
|
0
|
528
|
my($self, $sample) = @_; |
1010
|
|
|
|
|
|
|
|
1011
|
186
|
|
|
|
|
372
|
my($f); |
1012
|
|
|
|
|
|
|
|
1013
|
186
|
|
|
|
|
681
|
for $f (0..$self->{NR_FEATURES} - 1) { |
1014
|
2161
|
50
|
|
|
|
5504
|
if (!$self->{FEATURE_IGNORE}{$f}) { |
1015
|
2161
|
100
|
|
|
|
4359
|
if ($self->{SCALER} eq "gis") { |
1016
|
1397
|
|
|
|
|
5434
|
$self->{PARAMETERS}[$f] += |
1017
|
|
|
|
|
|
|
log($self->{E_REF}[$f] / $sample->{E_LOGLIN}[$f]) / $sample->{M}; |
1018
|
|
|
|
|
|
|
} |
1019
|
|
|
|
|
|
|
else { |
1020
|
764
|
|
|
|
|
1977
|
$self->{PARAMETERS}[$f] += |
1021
|
|
|
|
|
|
|
$sample->iis_estimate_with_newton($f); |
1022
|
|
|
|
|
|
|
} |
1023
|
|
|
|
|
|
|
} |
1024
|
|
|
|
|
|
|
} |
1025
|
|
|
|
|
|
|
|
1026
|
186
|
100
|
66
|
|
|
1297
|
if (($self->{SCALER} eq "gis") && ($self->{NEED_CORRECTION_FEATURE})) { |
1027
|
123
|
|
|
|
|
720
|
$self->{CORRECTION_PARAMETER} += |
1028
|
|
|
|
|
|
|
log($self->{E_REF}[$self->{NR_FEATURES}] / |
1029
|
|
|
|
|
|
|
$sample->{E_LOGLIN}[$self->{NR_FEATURES}]) / $self->{M}; |
1030
|
|
|
|
|
|
|
} |
1031
|
|
|
|
|
|
|
} |
1032
|
|
|
|
|
|
|
|
1033
|
|
|
|
|
|
|
|
1034
|
|
|
|
|
|
|
# the iterative scaling algorithms |
1035
|
|
|
|
|
|
|
sub scale { |
1036
|
18
|
|
|
18
|
1
|
63
|
my($self, $sampling, $scaler) = @_; |
1037
|
|
|
|
|
|
|
|
1038
|
18
|
|
|
|
|
32
|
my($k, |
1039
|
|
|
|
|
|
|
$i, |
1040
|
|
|
|
|
|
|
$kl, |
1041
|
|
|
|
|
|
|
$old_kl, |
1042
|
|
|
|
|
|
|
$diff, |
1043
|
|
|
|
|
|
|
$sample, |
1044
|
|
|
|
|
|
|
$old_correction_parameter, |
1045
|
|
|
|
|
|
|
@old_parameters); |
1046
|
|
|
|
|
|
|
|
1047
|
18
|
100
|
|
|
|
110
|
if ($sampling) { |
1048
|
6
|
|
|
|
|
14
|
$self->{SAMPLING} = $sampling; |
1049
|
|
|
|
|
|
|
} |
1050
|
18
|
100
|
|
|
|
59
|
if ($scaler) { |
1051
|
6
|
|
|
|
|
14
|
$self->{SCALER} = $scaler; |
1052
|
|
|
|
|
|
|
} |
1053
|
|
|
|
|
|
|
|
1054
|
18
|
|
|
|
|
95
|
$self->init_parameters(); |
1055
|
18
|
|
|
|
|
73
|
$self->prepare_model(); |
1056
|
18
|
|
|
|
|
618
|
$self->log("($self->{SCALER}, $self->{SAMPLING}): H(p_ref)=$self->{H_p_ref}\nit.\tD(p_ref||p)\t\tH(p)\t\t\tL(p_ref,p)\t\ttime\n0\t$self->{KL}\t$self->{H_p}\t$self->{L}\t" . time() . "\n"); |
1057
|
18
|
|
|
|
|
33
|
$k = 0; |
1058
|
18
|
|
|
|
|
44
|
$kl = 1e99; |
1059
|
18
|
|
66
|
|
|
44
|
do { |
|
|
|
66
|
|
|
|
|
1060
|
|
|
|
|
|
|
# store parameters for reverting if converging stops |
1061
|
186
|
|
|
|
|
389
|
@old_parameters = @{$self->{PARAMETERS}}; |
|
186
|
|
|
|
|
1896
|
|
1062
|
186
|
|
|
|
|
616
|
$old_correction_parameter = $self->{CORRECTION_PARAMETER}; |
1063
|
186
|
100
|
|
|
|
505
|
if ($sample) { |
1064
|
168
|
|
|
|
|
810
|
$sample->DESTROY(); |
1065
|
|
|
|
|
|
|
} |
1066
|
186
|
|
|
|
|
894
|
$sample = $self->sample(); |
1067
|
186
|
|
|
|
|
996
|
$self->gamma($sample); |
1068
|
186
|
|
|
|
|
558
|
$self->{PARAMETERS_CHANGED} = 1; |
1069
|
186
|
|
|
|
|
811
|
$self->prepare_model(); |
1070
|
186
|
|
|
|
|
766
|
$diff = $kl - $self->{KL}; |
1071
|
186
|
|
|
|
|
514
|
$kl = $self->{KL}; |
1072
|
|
|
|
|
|
|
|
1073
|
186
|
|
|
|
|
366
|
$k++; |
1074
|
186
|
|
|
|
|
4972
|
$self->log("$k\t$self->{KL}\t$self->{H_p}\t$self->{L}\t" . time() . "\n"); |
1075
|
186
|
50
|
|
|
|
628
|
if ($debug) { |
1076
|
0
|
|
|
|
|
0
|
$self->check(); |
1077
|
|
|
|
|
|
|
} |
1078
|
186
|
100
|
|
|
|
643
|
if ($diff < 0) { |
1079
|
10
|
|
|
|
|
88
|
$self->log("Scaling is not converging (anymore); will revert parameters!\n"); |
1080
|
|
|
|
|
|
|
# restore old parameters |
1081
|
10
|
|
|
|
|
42
|
$self->{PARAMETERS} = \@old_parameters; |
1082
|
10
|
|
|
|
|
30
|
$self->{CORRECTION_PARAMETER} = $old_correction_parameter; |
1083
|
10
|
|
|
|
|
22
|
$self->{PARAMETERS_CHANGED} = 1; |
1084
|
10
|
|
|
|
|
57
|
$self->prepare_model(); |
1085
|
|
|
|
|
|
|
} |
1086
|
186
|
50
|
|
|
|
2159
|
if ($cntrl_backslash_pressed) { |
1087
|
0
|
|
|
|
|
0
|
$self->dump(); |
1088
|
0
|
|
|
|
|
0
|
$cntrl_backslash_pressed = 0; |
1089
|
|
|
|
|
|
|
} |
1090
|
|
|
|
|
|
|
} until ($diff <= $KL_min || ($k > $KL_max_it) || ($diff < 0)); |
1091
|
|
|
|
|
|
|
} |
1092
|
|
|
|
|
|
|
|
1093
|
|
|
|
|
|
|
|
1094
|
|
|
|
|
|
|
# |
1095
|
|
|
|
|
|
|
# Field Induction Algorithm |
1096
|
|
|
|
|
|
|
# |
1097
|
|
|
|
|
|
|
|
1098
|
|
|
|
|
|
|
# add feature $g to $self |
1099
|
|
|
|
|
|
|
sub add_feature { |
1100
|
28
|
|
|
28
|
0
|
80
|
my($self, $candidates, $g) = @_; |
1101
|
|
|
|
|
|
|
|
1102
|
28
|
|
|
|
|
35
|
my($i); |
1103
|
|
|
|
|
|
|
|
1104
|
28
|
|
|
|
|
53
|
$self->{NR_FEATURES}++; |
1105
|
28
|
|
|
|
|
123
|
for ($i = 0; $i < $self->{NR_CLASSES}; $i++) { |
1106
|
2800
|
|
|
|
|
11360
|
$self->{CLASSES}[$i]->Interval_Substitute($candidates->{CANDIDATES}[$i], |
1107
|
|
|
|
|
|
|
$self->{CLASSES}[$i]->Size(), |
1108
|
|
|
|
|
|
|
0, $g, 1); |
1109
|
|
|
|
|
|
|
} |
1110
|
28
|
100
|
|
|
|
144
|
if ($self->{SCALER} eq "gis") { |
1111
|
14
|
|
|
|
|
48
|
$self->{PARAMETERS}[$self->{NR_FEATURES} - 1] = 1; |
1112
|
|
|
|
|
|
|
} |
1113
|
|
|
|
|
|
|
else { |
1114
|
14
|
|
|
|
|
81
|
$self->{PARAMETERS}[$self->{NR_FEATURES} - 1] = $candidates->{ALPHA}[$g]; |
1115
|
|
|
|
|
|
|
} |
1116
|
28
|
|
|
|
|
46
|
unshift @{$self->{FEATURE_NAMES}}, $candidates->{CANDIDATE_NAMES}[$g]; |
|
28
|
|
|
|
|
145
|
|
1117
|
28
|
|
|
|
|
73
|
$self->{PARAMETERS_CHANGED} = 1; |
1118
|
28
|
|
|
|
|
57
|
$self->{CLASSES_CHANGED} = 1; |
1119
|
28
|
|
|
|
|
133
|
$self->prepare_model(); |
1120
|
|
|
|
|
|
|
} |
1121
|
|
|
|
|
|
|
|
1122
|
|
|
|
|
|
|
|
1123
|
|
|
|
|
|
|
# remove feature $g |
1124
|
|
|
|
|
|
|
sub remove_feature { |
1125
|
20
|
|
|
20
|
0
|
58
|
my($self, $g) = @_; |
1126
|
|
|
|
|
|
|
|
1127
|
20
|
|
|
|
|
1639
|
my($i |
1128
|
|
|
|
|
|
|
); |
1129
|
|
|
|
|
|
|
|
1130
|
20
|
|
|
|
|
83
|
for ($i = 0; $i < $self->{NR_CLASSES}; $i++) { |
1131
|
|
|
|
|
|
|
# substitute offset $g length 1 by nothing |
1132
|
2000
|
|
|
|
|
7094
|
$self->{CLASSES}[$i]->Interval_Substitute($self->{CLASSES}[$i], |
1133
|
|
|
|
|
|
|
$g, 1, 0, 0); |
1134
|
|
|
|
|
|
|
} |
1135
|
20
|
|
|
|
|
42
|
splice(@{$self->{PARAMETERS}}, $g, 1); |
|
20
|
|
|
|
|
86
|
|
1136
|
20
|
|
|
|
|
38
|
splice(@{$self->{FEATURE_NAMES}}, $self->{NR_FEATURES} - 1 - $g, 1); |
|
20
|
|
|
|
|
67
|
|
1137
|
20
|
|
|
|
|
46
|
$self->{NR_FEATURES}--; |
1138
|
20
|
|
|
|
|
45
|
$self->{PARAMETERS_CHANGED} = 1; |
1139
|
20
|
|
|
|
|
40
|
$self->{CLASSES_CHANGED} = 1; |
1140
|
20
|
|
|
|
|
103
|
$self->prepare_model(); |
1141
|
|
|
|
|
|
|
} |
1142
|
|
|
|
|
|
|
|
1143
|
|
|
|
|
|
|
|
1144
|
|
|
|
|
|
|
# checks for $event, if not there adds it, otherwise increases its {FREQ} |
1145
|
|
|
|
|
|
|
sub add_event { |
1146
|
0
|
|
|
0
|
0
|
0
|
my($self, $event) = @_; |
1147
|
|
|
|
|
|
|
|
1148
|
0
|
|
|
|
|
0
|
my($i, |
1149
|
|
|
|
|
|
|
$found); |
1150
|
|
|
|
|
|
|
|
1151
|
0
|
|
|
|
|
0
|
$found = 0; |
1152
|
0
|
|
|
|
|
0
|
for ($i = 0; $i < $self->{NR_CLASSES}; $i++) { |
1153
|
0
|
|
|
|
|
0
|
$found = ($event->Compare($self->{CLASSES}[$i]) == 0); |
1154
|
0
|
0
|
|
|
|
0
|
if ($found) { |
1155
|
0
|
|
|
|
|
0
|
$self->{FREQ}[$i]++; |
1156
|
0
|
|
|
|
|
0
|
last; |
1157
|
|
|
|
|
|
|
} |
1158
|
|
|
|
|
|
|
} |
1159
|
0
|
0
|
|
|
|
0
|
if (!$found) { |
1160
|
0
|
|
|
|
|
0
|
$self->{CLASSES}[$self->{NR_CLASSES}] = $event->Clone(); |
1161
|
0
|
|
|
|
|
0
|
$self->{FREQ}[$self->{NR_CLASSES}] = 1; |
1162
|
0
|
|
|
|
|
0
|
$self->{NR_CLASSES}++; |
1163
|
|
|
|
|
|
|
} |
1164
|
0
|
|
|
|
|
0
|
$self->{NR_EVENTS}++; |
1165
|
|
|
|
|
|
|
} |
1166
|
|
|
|
|
|
|
|
1167
|
|
|
|
|
|
|
|
1168
|
|
|
|
|
|
|
# computes the gain for all $candidates |
1169
|
|
|
|
|
|
|
sub gain { |
1170
|
8
|
|
|
8
|
0
|
21
|
my($self, $candidates) = @_; |
1171
|
|
|
|
|
|
|
|
1172
|
8
|
|
|
|
|
15
|
my($c, |
1173
|
|
|
|
|
|
|
$x, |
1174
|
|
|
|
|
|
|
$kl, |
1175
|
|
|
|
|
|
|
$below, |
1176
|
|
|
|
|
|
|
$above, |
1177
|
|
|
|
|
|
|
$sum_p_ref, |
1178
|
|
|
|
|
|
|
$sum_p); |
1179
|
|
|
|
|
|
|
|
1180
|
8
|
|
|
|
|
29
|
$candidates->{MAX_GAIN} = 0; |
1181
|
8
|
|
|
|
|
18
|
$candidates->{BEST_CAND} = 0; |
1182
|
8
|
|
|
|
|
46
|
for ($c = 0; $c < $candidates->{NR_CANDIDATES}; $c++) { |
1183
|
24
|
100
|
|
|
|
129
|
if (!$candidates->{ADDED}{$c}) { |
1184
|
20
|
|
|
|
|
36
|
$sum_p_ref = 0; |
1185
|
20
|
|
|
|
|
24927
|
$sum_p = 0; |
1186
|
20
|
|
|
|
|
105
|
for ($x = 0; $x < $self->{NR_CLASSES}; $x++) { |
1187
|
2000
|
100
|
|
|
|
5924
|
if ($candidates->{CANDIDATES}[$x]->bit_test($c)) { |
1188
|
234
|
|
|
|
|
433
|
$sum_p += $self->{CLASS_EXP_WEIGHTS}[$x]; |
1189
|
234
|
|
|
|
|
945
|
$sum_p_ref += $self->{FREQ}[$x]; |
1190
|
|
|
|
|
|
|
} |
1191
|
|
|
|
|
|
|
} |
1192
|
20
|
|
|
|
|
58
|
$sum_p /= $self->{Z}; |
1193
|
20
|
|
|
|
|
44
|
$sum_p_ref /= $self->{NR_EVENTS}; |
1194
|
20
|
|
|
|
|
51
|
$above = $sum_p_ref * (1 - $sum_p); |
1195
|
20
|
|
|
|
|
38
|
$below = $sum_p * (1 - $sum_p_ref); |
1196
|
20
|
50
|
|
|
|
73
|
if ($above * $below > 0) { |
1197
|
20
|
|
|
|
|
129
|
$candidates->{ALPHA}[$c] = log($above / $below); |
1198
|
|
|
|
|
|
|
} |
1199
|
|
|
|
|
|
|
else { |
1200
|
0
|
|
|
|
|
0
|
$self->die("Cannot take log of negative/zero value: $above / $below\n"); |
1201
|
|
|
|
|
|
|
} |
1202
|
|
|
|
|
|
|
# temporarily add feature to classes and compute $gain |
1203
|
20
|
|
|
|
|
43
|
$kl = $self->{KL}; |
1204
|
20
|
|
|
|
|
97
|
$self->add_feature($candidates, $c); |
1205
|
20
|
|
|
|
|
128
|
$candidates->{GAIN}[$c] = $kl - $self->{KL}; |
1206
|
20
|
|
|
|
|
495
|
$self->log("G($c, $candidates->{ALPHA}[$c]) = $candidates->{GAIN}[$c]\n"); |
1207
|
20
|
100
|
|
|
|
123
|
if (($candidates->{MAX_GAIN} <= $candidates->{GAIN}[$c])) { |
1208
|
8
|
|
|
|
|
29
|
$candidates->{MAX_GAIN} = $candidates->{GAIN}[$c]; |
1209
|
8
|
|
|
|
|
22
|
$candidates->{BEST_CAND} = $c; |
1210
|
|
|
|
|
|
|
} |
1211
|
|
|
|
|
|
|
# remove the feature |
1212
|
20
|
|
|
|
|
110
|
$self->remove_feature($self->{NR_FEATURES} - 1); |
1213
|
|
|
|
|
|
|
} |
1214
|
|
|
|
|
|
|
} |
1215
|
|
|
|
|
|
|
} |
1216
|
|
|
|
|
|
|
|
1217
|
|
|
|
|
|
|
|
1218
|
|
|
|
|
|
|
# adds the $n best candidates |
1219
|
|
|
|
|
|
|
sub fi { |
1220
|
4
|
|
|
4
|
1
|
28
|
my($self, $scaler, $candidates, $n, $sample) = @_; |
1221
|
|
|
|
|
|
|
|
1222
|
4
|
|
|
|
|
7
|
my ($i, |
1223
|
|
|
|
|
|
|
$kl); |
1224
|
|
|
|
|
|
|
|
1225
|
4
|
|
|
|
|
23
|
$self->log("(fi, $scaler, $sample, $n)\n"); |
1226
|
4
|
50
|
|
|
|
12
|
if ($scaler) { |
1227
|
4
|
|
|
|
|
14
|
$self->{SCALER} = $scaler; |
1228
|
|
|
|
|
|
|
} |
1229
|
4
|
50
|
|
|
|
13
|
if ($sample) { |
1230
|
4
|
|
|
|
|
8
|
$self->{SAMPLING} = $sample; |
1231
|
|
|
|
|
|
|
} |
1232
|
|
|
|
|
|
|
|
1233
|
4
|
50
|
|
|
|
26
|
if ($self->{NR_CLASSES} != $candidates->{NR_CLASSES}) { |
1234
|
0
|
|
|
|
|
0
|
$self->die("Candidates have the wrong number of events\n"); |
1235
|
|
|
|
|
|
|
} |
1236
|
|
|
|
|
|
|
|
1237
|
4
|
|
|
|
|
21
|
$self->scale(); |
1238
|
4
|
|
|
|
|
13
|
$kl = $self->{KL}; |
1239
|
4
|
50
|
|
|
|
37
|
$n = ($n > $candidates->{NR_CANDIDATES}) ? $candidates->{NR_CANDIDATES} : $n; |
1240
|
4
|
|
|
|
|
14
|
for ($i = 0; $i < $n; $i++) { |
1241
|
8
|
|
|
|
|
39450
|
$self->gain($candidates); |
1242
|
8
|
|
|
|
|
55
|
$self->add_feature($candidates, $candidates->{BEST_CAND}); |
1243
|
8
|
|
|
|
|
60
|
$candidates->{ADDED}{$candidates->{BEST_CAND}} = 1; |
1244
|
8
|
|
|
|
|
97
|
$self->log("Adding candidate $candidates->{BEST_CAND}\n"); |
1245
|
8
|
|
|
|
|
53
|
$self->scale(); |
1246
|
8
|
|
|
|
|
118
|
$self->log("Actual gain: " . ($self->{KL} - $kl) . "\n"); |
1247
|
8
|
|
|
|
|
44
|
$kl = $self->{KL}; |
1248
|
|
|
|
|
|
|
} |
1249
|
4
|
|
|
|
|
25
|
return(1); |
1250
|
|
|
|
|
|
|
} |
1251
|
|
|
|
|
|
|
|
1252
|
|
|
|
|
|
|
|
1253
|
|
|
|
|
|
|
1; |
1254
|
|
|
|
|
|
|
|
1255
|
|
|
|
|
|
|
__END__ |