line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
1
|
|
|
|
|
|
|
package WWW::Bookmark::TagStripper; |
2
|
1
|
|
|
1
|
|
1818
|
use HTML::Parser; |
|
1
|
|
|
|
|
10237
|
|
|
1
|
|
|
|
|
353
|
|
3
|
|
|
|
|
|
|
|
4
|
|
|
|
|
|
|
sub tag($$$) |
5
|
|
|
|
|
|
|
{ |
6
|
0
|
|
|
0
|
|
|
my $pkg = shift; |
7
|
0
|
|
|
|
|
|
my($tag, $num) = @_; |
8
|
0
|
|
|
|
|
|
$pkg->{INSIDE}->{$tag} += $num; |
9
|
|
|
|
|
|
|
} |
10
|
|
|
|
|
|
|
|
11
|
|
|
|
|
|
|
sub text($$) |
12
|
|
|
|
|
|
|
{ |
13
|
0
|
|
|
0
|
|
|
my $pkg = shift; |
14
|
0
|
0
|
0
|
|
|
|
return if $pkg->{INSIDE}->{script} || $pkg->{INSIDE}->{style}; |
15
|
0
|
0
|
0
|
|
|
|
$pkg->{TITLE} = $_[0] if( !$pkg->{TITLE} && $pkg->{INSIDE}->{title} ); |
16
|
0
|
|
|
|
|
|
$_[0] =~ s/\n+/ /og; |
17
|
0
|
|
|
|
|
|
$pkg->{TEXT} .= $_[0]; |
18
|
|
|
|
|
|
|
} |
19
|
|
|
|
|
|
|
|
20
|
|
|
|
|
|
|
sub strip { |
21
|
0
|
|
|
0
|
|
|
my $pkg = shift; |
22
|
0
|
|
|
|
|
|
$pkg->{PARSER} = |
23
|
|
|
|
|
|
|
HTML::Parser->new( |
24
|
|
|
|
|
|
|
api_version => 3, |
25
|
|
|
|
|
|
|
handlers => [ |
26
|
|
|
|
|
|
|
start => [\&tag, "self, tagname, '+1'"], |
27
|
|
|
|
|
|
|
end => [\&tag, "self, tagname, '-1'"], |
28
|
|
|
|
|
|
|
text => [\&text, "self, dtext"], |
29
|
|
|
|
|
|
|
], |
30
|
|
|
|
|
|
|
marked_sections => 1, |
31
|
|
|
|
|
|
|
); |
32
|
0
|
|
|
|
|
|
$pkg->{PARSER}->parse(shift); |
33
|
0
|
|
|
|
|
|
$pkg->{TEXT} = $pkg->{PARSER}->{TEXT}; |
34
|
0
|
|
|
|
|
|
$pkg->{TITLE} = $pkg->{PARSER}->{TITLE}; |
35
|
0
|
|
|
|
|
|
1; |
36
|
|
|
|
|
|
|
} |
37
|
|
|
|
|
|
|
|
38
|
|
|
|
|
|
|
sub new() { |
39
|
0
|
|
|
0
|
|
|
bless { TEXT => '', TITLE => ''}, shift; |
40
|
|
|
|
|
|
|
} |
41
|
|
|
|
|
|
|
|
42
|
|
|
|
|
|
|
|
43
|
|
|
|
|
|
|
###################################################################### |
44
|
|
|
|
|
|
|
|
45
|
|
|
|
|
|
|
|
46
|
|
|
|
|
|
|
package WWW::Bookmark::Crawler; |
47
|
|
|
|
|
|
|
|
48
|
1
|
|
|
1
|
|
28
|
use 5.006; |
|
1
|
|
|
|
|
3
|
|
|
1
|
|
|
|
|
49
|
|
49
|
1
|
|
|
1
|
|
6
|
use strict; |
|
1
|
|
|
|
|
7
|
|
|
1
|
|
|
|
|
44
|
|
50
|
1
|
|
|
1
|
|
6
|
use warnings; |
|
1
|
|
|
|
|
2
|
|
|
1
|
|
|
|
|
35
|
|
51
|
1
|
|
|
1
|
|
5
|
use Carp; |
|
1
|
|
|
|
|
2
|
|
|
1
|
|
|
|
|
126
|
|
52
|
|
|
|
|
|
|
|
53
|
|
|
|
|
|
|
our $VERSION = '0.01'; |
54
|
|
|
|
|
|
|
|
55
|
1
|
|
|
1
|
|
1682
|
use OurNet::FuzzyIndex; |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
56
|
|
|
|
|
|
|
use HTML::LinkExtor; |
57
|
|
|
|
|
|
|
use LWP::UserAgent; |
58
|
|
|
|
|
|
|
use HTTP::Request::Common; |
59
|
|
|
|
|
|
|
use Set::Scalar; |
60
|
|
|
|
|
|
|
|
61
|
|
|
|
|
|
|
sub new($$) { |
62
|
|
|
|
|
|
|
my ($pkg, $arg) = @_; |
63
|
|
|
|
|
|
|
my $self = { |
64
|
|
|
|
|
|
|
SOURCE => |
65
|
|
|
|
|
|
|
-f $arg->{SOURCE} ? |
66
|
|
|
|
|
|
|
$arg->{SOURCE} : |
67
|
|
|
|
|
|
|
( ref($arg->{SOURCE}) eq "ARRAY" ? $arg->{SOURCE} : '' ), |
68
|
|
|
|
|
|
|
DBNAME => $arg->{DBNAME}, |
69
|
|
|
|
|
|
|
PEEK => $arg->{PEEK}, |
70
|
|
|
|
|
|
|
PROXY => $arg->{PROXY}, |
71
|
|
|
|
|
|
|
TIMEOUT => $arg->{TIMEOUT} || 10, |
72
|
|
|
|
|
|
|
}; |
73
|
|
|
|
|
|
|
$self->{TOKENIZER} = ref($arg->{TOKENIZER}) ? $arg->{TOKENIZER} : \&tokenizer; |
74
|
|
|
|
|
|
|
|
75
|
|
|
|
|
|
|
if(ref($self->{SOURCE}) eq "ARRAY"){ |
76
|
|
|
|
|
|
|
$self->{_LINKS} = $self->{SOURCE}; |
77
|
|
|
|
|
|
|
} |
78
|
|
|
|
|
|
|
elsif($self->{SOURCE}){ |
79
|
|
|
|
|
|
|
my $p = HTML::LinkExtor->new(); |
80
|
|
|
|
|
|
|
$p->parse_file($self->{SOURCE}); |
81
|
|
|
|
|
|
|
$self->{_LINKS} = [map{$_->[2]}grep{$_->[0] eq 'a' && $_->[1] eq 'href' }$p->links]; |
82
|
|
|
|
|
|
|
} |
83
|
|
|
|
|
|
|
|
84
|
|
|
|
|
|
|
bless $self, $pkg; |
85
|
|
|
|
|
|
|
} |
86
|
|
|
|
|
|
|
|
87
|
|
|
|
|
|
|
sub tokenizer($) { |
88
|
|
|
|
|
|
|
caller eq __PACKAGE__ or croak(q/It's private!/); |
89
|
|
|
|
|
|
|
my(@t); |
90
|
|
|
|
|
|
|
for my $tok (grep {$_} split /\s+/o, $_[0]){ |
91
|
|
|
|
|
|
|
my %words = OurNet::FuzzyIndex::parse($tok, 0); |
92
|
|
|
|
|
|
|
foreach my $m (keys %words){ |
93
|
|
|
|
|
|
|
push @t, map{"$m$_"} grep { $_ !~ /^[\s\t\d]+$/ } keys %{$words{$m}}; |
94
|
|
|
|
|
|
|
push @t, $m; |
95
|
|
|
|
|
|
|
} |
96
|
|
|
|
|
|
|
push @t, $_[0] unless @t; |
97
|
|
|
|
|
|
|
} |
98
|
|
|
|
|
|
|
return @t; |
99
|
|
|
|
|
|
|
} |
100
|
|
|
|
|
|
|
|
101
|
|
|
|
|
|
|
sub crawl($) { |
102
|
|
|
|
|
|
|
my $pkg = shift; |
103
|
|
|
|
|
|
|
my $ua = LWP::UserAgent->new; |
104
|
|
|
|
|
|
|
$ua->agent ("WWW::Bookmark::Crawler $VERSION"); |
105
|
|
|
|
|
|
|
$ua->proxy ($pkg->{HTTP_PROXY}); |
106
|
|
|
|
|
|
|
$ua->timeout($pkg->{TIMEOUT}); |
107
|
|
|
|
|
|
|
local $| = 1; |
108
|
|
|
|
|
|
|
open (DB, ">".$pkg->{DBNAME}) or croak("cannot write to index file"); |
109
|
|
|
|
|
|
|
local $SIG{INT} = sub { close DB; exit }; |
110
|
|
|
|
|
|
|
|
111
|
|
|
|
|
|
|
for my $L (@{$pkg->{_LINKS}}){ |
112
|
|
|
|
|
|
|
my $request = GET ($L); |
113
|
|
|
|
|
|
|
my $response = $ua->request($request); |
114
|
|
|
|
|
|
|
$response->is_success or next; |
115
|
|
|
|
|
|
|
|
116
|
|
|
|
|
|
|
my $stripper = WWW::Bookmark::TagStripper->new(); |
117
|
|
|
|
|
|
|
$stripper->strip($response->content); |
118
|
|
|
|
|
|
|
|
119
|
|
|
|
|
|
|
if($pkg->{PEEK}){ |
120
|
|
|
|
|
|
|
print "{\n $L\n"; |
121
|
|
|
|
|
|
|
print " ".$stripper->{TITLE}."\n}\n"; |
122
|
|
|
|
|
|
|
} |
123
|
|
|
|
|
|
|
|
124
|
|
|
|
|
|
|
print DB |
125
|
|
|
|
|
|
|
$L, "\x02", $stripper->{TITLE}, "\x02", |
126
|
|
|
|
|
|
|
join(qq/\x01/, $pkg->{TOKENIZER}->($stripper->{TEXT}) ), "\n"; |
127
|
|
|
|
|
|
|
} |
128
|
|
|
|
|
|
|
close DB; |
129
|
|
|
|
|
|
|
} |
130
|
|
|
|
|
|
|
|
131
|
|
|
|
|
|
|
sub _loadDB($) { |
132
|
|
|
|
|
|
|
my $pkg = shift; |
133
|
|
|
|
|
|
|
my $L; |
134
|
|
|
|
|
|
|
my ($url, $title, $keywords); |
135
|
|
|
|
|
|
|
my $cnt = 0; |
136
|
|
|
|
|
|
|
$pkg->{_dbloaded} = 1; |
137
|
|
|
|
|
|
|
open (DB, $pkg->{DBNAME}) or croak("index file error"); |
138
|
|
|
|
|
|
|
while($L = ){ |
139
|
|
|
|
|
|
|
next unless $L =~ /\x02/o; |
140
|
|
|
|
|
|
|
chomp $L; |
141
|
|
|
|
|
|
|
($url, $title, $keywords) = split /\x02/, $L; |
142
|
|
|
|
|
|
|
$pkg->{_URLS}->[$cnt] = $url; |
143
|
|
|
|
|
|
|
$pkg->{_TITLES}->[$cnt] = $title; |
144
|
|
|
|
|
|
|
foreach my $k (keys %{ { map {$_,1} split /\x01/, $keywords } }){ |
145
|
|
|
|
|
|
|
push @{$pkg->{_KEYWORDS}->{$k}}, $cnt; |
146
|
|
|
|
|
|
|
} |
147
|
|
|
|
|
|
|
$cnt++; |
148
|
|
|
|
|
|
|
} |
149
|
|
|
|
|
|
|
close DB; |
150
|
|
|
|
|
|
|
} |
151
|
|
|
|
|
|
|
|
152
|
|
|
|
|
|
|
sub query($) { |
153
|
|
|
|
|
|
|
my $pkg = shift; |
154
|
|
|
|
|
|
|
my $query = shift || croak("Query?"); |
155
|
|
|
|
|
|
|
|
156
|
|
|
|
|
|
|
$pkg->_loadDB unless $pkg->{_dbloaded}; |
157
|
|
|
|
|
|
|
|
158
|
|
|
|
|
|
|
my @queries = keys %{{ |
159
|
|
|
|
|
|
|
map {$_,1} |
160
|
|
|
|
|
|
|
sort { @{$pkg->{_KEYWORDS}->{$a}} <=> @{$pkg->{_KEYWORDS}->{$b}} } |
161
|
|
|
|
|
|
|
$pkg->{TOKENIZER}->($query) |
162
|
|
|
|
|
|
|
}}; |
163
|
|
|
|
|
|
|
|
164
|
|
|
|
|
|
|
my $seta = Set::Scalar->new(@{$pkg->{_KEYWORDS}->{$queries[0]}}); |
165
|
|
|
|
|
|
|
|
166
|
|
|
|
|
|
|
for my $i (1..$#queries){ |
167
|
|
|
|
|
|
|
my $setb = Set::Scalar->new(@{$pkg->{_KEYWORDS}->{$queries[$i]}}); |
168
|
|
|
|
|
|
|
$seta->intersection($setb); |
169
|
|
|
|
|
|
|
} |
170
|
|
|
|
|
|
|
|
171
|
|
|
|
|
|
|
map {{ |
172
|
|
|
|
|
|
|
URL => $pkg->{_URLS}->[$_], TITLE => $pkg->{_TITLES}->[$_] |
173
|
|
|
|
|
|
|
}} $seta->elements; |
174
|
|
|
|
|
|
|
} |
175
|
|
|
|
|
|
|
|
176
|
|
|
|
|
|
|
sub peek() { $_[0]->{PEEK} = 1 } |
177
|
|
|
|
|
|
|
|
178
|
|
|
|
|
|
|
sub nopeek() { $_[0]->{PEEK} = 0 } |
179
|
|
|
|
|
|
|
|
180
|
|
|
|
|
|
|
sub proxy($) { $_[0]->{PROXY} = $_[1] } |
181
|
|
|
|
|
|
|
|
182
|
|
|
|
|
|
|
sub timeout($) { $_[0]->{TIMEOUT} = $_[1] || 10 } |
183
|
|
|
|
|
|
|
|
184
|
|
|
|
|
|
|
1; |
185
|
|
|
|
|
|
|
__END__ |