line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
1
|
|
|
|
|
|
|
package WWW::Crawler::Mojo; |
2
|
10
|
|
|
10
|
|
2002557
|
use strict; |
|
10
|
|
|
|
|
90
|
|
|
10
|
|
|
|
|
258
|
|
3
|
10
|
|
|
10
|
|
45
|
use warnings; |
|
10
|
|
|
|
|
12
|
|
|
10
|
|
|
|
|
240
|
|
4
|
10
|
|
|
10
|
|
1610
|
use Mojo::Base 'Mojo::EventEmitter'; |
|
10
|
|
|
|
|
628006
|
|
|
10
|
|
|
|
|
73
|
|
5
|
10
|
|
|
10
|
|
14674
|
use WWW::Crawler::Mojo::Job; |
|
10
|
|
|
|
|
22
|
|
|
10
|
|
|
|
|
60
|
|
6
|
10
|
|
|
10
|
|
3538
|
use WWW::Crawler::Mojo::Queue::Memory; |
|
10
|
|
|
|
|
21
|
|
|
10
|
|
|
|
|
64
|
|
7
|
10
|
|
|
10
|
|
3339
|
use WWW::Crawler::Mojo::UserAgent; |
|
10
|
|
|
|
|
28
|
|
|
10
|
|
|
|
|
120
|
|
8
|
10
|
|
|
|
|
793
|
use WWW::Crawler::Mojo::ScraperUtil qw{ |
9
|
10
|
|
|
10
|
|
4626
|
collect_urls_css html_handler_presets reduce_html_handlers resolve_href decoded_body}; |
|
10
|
|
|
|
|
28
|
|
10
|
10
|
|
|
10
|
|
66
|
use Mojo::Message::Request; |
|
10
|
|
|
|
|
18
|
|
|
10
|
|
|
|
|
104
|
|
11
|
|
|
|
|
|
|
our $VERSION = '0.24'; |
12
|
|
|
|
|
|
|
|
13
|
|
|
|
|
|
|
has clock_speed => 0.25; |
14
|
|
|
|
|
|
|
has html_handlers => sub { html_handler_presets() }; |
15
|
|
|
|
|
|
|
has max_conn => 1; |
16
|
|
|
|
|
|
|
has max_conn_per_host => 1; |
17
|
|
|
|
|
|
|
has queue => sub { WWW::Crawler::Mojo::Queue::Memory->new }; |
18
|
|
|
|
|
|
|
has 'shuffle'; |
19
|
|
|
|
|
|
|
has ua => sub { WWW::Crawler::Mojo::UserAgent->new }; |
20
|
|
|
|
|
|
|
has ua_name => |
21
|
|
|
|
|
|
|
"www-crawler-mojo/$VERSION (+https://github.com/jamadam/www-crawler-mojo)"; |
22
|
|
|
|
|
|
|
|
23
|
|
|
|
|
|
|
sub crawl { |
24
|
0
|
|
|
0
|
1
|
0
|
my ($self) = @_; |
25
|
|
|
|
|
|
|
|
26
|
0
|
|
|
|
|
0
|
$self->init; |
27
|
|
|
|
|
|
|
|
28
|
0
|
0
|
|
|
|
0
|
die 'No job is given' unless ($self->queue->length); |
29
|
|
|
|
|
|
|
|
30
|
0
|
|
|
|
|
0
|
$self->emit('start'); |
31
|
|
|
|
|
|
|
|
32
|
0
|
0
|
|
|
|
0
|
Mojo::IOLoop->start unless Mojo::IOLoop->is_running; |
33
|
|
|
|
|
|
|
} |
34
|
|
|
|
|
|
|
|
35
|
|
|
|
|
|
|
sub init { |
36
|
12
|
|
|
12
|
1
|
9907
|
my ($self) = @_; |
37
|
|
|
|
|
|
|
|
38
|
2
|
|
|
2
|
|
276
|
$self->on('empty', sub { say "Queue is drained out."; $self->stop }) |
|
2
|
|
|
|
|
18
|
|
39
|
12
|
50
|
|
|
|
60
|
unless $self->has_subscribers('empty'); |
40
|
|
|
|
|
|
|
$self->on( |
41
|
|
|
|
|
|
|
'error', |
42
|
|
|
|
|
|
|
sub { |
43
|
0
|
|
|
0
|
|
0
|
say |
44
|
|
|
|
|
|
|
sprintf("An error occured during crawling %s: %s", $_[2]->url, $_[1]); |
45
|
|
|
|
|
|
|
} |
46
|
12
|
50
|
|
|
|
188
|
) unless $self->has_subscribers('error'); |
47
|
12
|
100
|
|
0
|
|
142
|
$self->on('res', sub { $_[1]->() }) unless $self->has_subscribers('res'); |
|
0
|
|
|
|
|
0
|
|
48
|
|
|
|
|
|
|
|
49
|
12
|
|
|
|
|
120
|
$self->ua->transactor->name($self->ua_name); |
50
|
12
|
|
|
|
|
309
|
$self->ua->max_redirects(5); |
51
|
|
|
|
|
|
|
|
52
|
12
|
|
|
|
|
120
|
$self->{_loop_ids} = []; |
53
|
|
|
|
|
|
|
|
54
|
|
|
|
|
|
|
my $id = Mojo::IOLoop->recurring( |
55
|
|
|
|
|
|
|
$self->clock_speed => sub { |
56
|
15
|
|
|
15
|
|
3477140
|
my $queue = $self->queue; |
57
|
|
|
|
|
|
|
|
58
|
15
|
100
|
|
|
|
222
|
if (!$queue->length) { |
59
|
2
|
50
|
|
|
|
32
|
$self->emit('empty') unless ($self->ua->active_conn); |
60
|
2
|
|
|
|
|
34
|
return; |
61
|
|
|
|
|
|
|
} |
62
|
13
|
50
|
|
|
|
132
|
return if $self->ua->active_conn >= $self->max_conn; |
63
|
|
|
|
|
|
|
return |
64
|
13
|
50
|
|
|
|
283
|
if $self->ua->active_host($queue->next->url) |
65
|
|
|
|
|
|
|
>= $self->max_conn_per_host; |
66
|
13
|
|
|
|
|
157
|
$self->process_job($queue->dequeue); |
67
|
|
|
|
|
|
|
} |
68
|
12
|
|
|
|
|
38
|
); |
69
|
|
|
|
|
|
|
|
70
|
12
|
|
|
|
|
1029
|
push(@{$self->{_loop_ids}}, $id); |
|
12
|
|
|
|
|
29
|
|
71
|
|
|
|
|
|
|
|
72
|
12
|
50
|
|
|
|
36
|
if ($self->shuffle) { |
73
|
|
|
|
|
|
|
my $id = Mojo::IOLoop->recurring( |
74
|
|
|
|
|
|
|
$self->shuffle => sub { |
75
|
0
|
|
|
0
|
|
0
|
$self->queue->shuffle; |
76
|
|
|
|
|
|
|
} |
77
|
0
|
|
|
|
|
0
|
); |
78
|
|
|
|
|
|
|
|
79
|
0
|
|
|
|
|
0
|
push(@{$self->{_loop_ids}}, $id); |
|
0
|
|
|
|
|
0
|
|
80
|
|
|
|
|
|
|
} |
81
|
|
|
|
|
|
|
} |
82
|
|
|
|
|
|
|
|
83
|
|
|
|
|
|
|
sub process_job { |
84
|
13
|
|
|
13
|
1
|
106
|
my ($self, $job) = @_; |
85
|
|
|
|
|
|
|
|
86
|
13
|
|
100
|
|
|
55
|
my @args = ($job->method || 'get', $job->url); |
87
|
13
|
100
|
|
|
|
162
|
push(@args, form => $job->tx_params->to_hash) if $job->tx_params; |
88
|
13
|
|
|
|
|
125
|
my $tx = $self->ua->build_tx(@args); |
89
|
|
|
|
|
|
|
|
90
|
13
|
|
|
|
|
3270
|
$self->emit('req', $job, $tx->req); |
91
|
|
|
|
|
|
|
|
92
|
|
|
|
|
|
|
$self->ua->start( |
93
|
|
|
|
|
|
|
$tx => sub { |
94
|
13
|
|
|
13
|
|
4366
|
my ($ua, $tx) = @_; |
95
|
|
|
|
|
|
|
|
96
|
13
|
|
|
|
|
55
|
$job->redirect(_urls_redirect($tx)); |
97
|
|
|
|
|
|
|
|
98
|
13
|
|
|
|
|
85
|
my $res = $tx->res; |
99
|
|
|
|
|
|
|
|
100
|
13
|
50
|
|
|
|
74
|
unless ($res->code) { |
101
|
0
|
0
|
|
|
|
0
|
my $msg = $res->error ? $res->error->{message} : 'Unknown error'; |
102
|
0
|
|
|
|
|
0
|
$self->emit('error', $msg, $job); |
103
|
0
|
|
|
|
|
0
|
return; |
104
|
|
|
|
|
|
|
} |
105
|
|
|
|
|
|
|
|
106
|
13
|
|
|
|
|
132
|
$self->emit('res', sub { $self->scrape($res, $job, $_[0]) }, $job, $res); |
|
10
|
|
|
|
|
1460
|
|
107
|
|
|
|
|
|
|
|
108
|
13
|
|
|
|
|
2237
|
$job->close; |
109
|
|
|
|
|
|
|
} |
110
|
13
|
|
|
|
|
229
|
); |
111
|
|
|
|
|
|
|
} |
112
|
|
|
|
|
|
|
|
113
|
|
|
|
|
|
|
sub say_start { |
114
|
0
|
|
|
0
|
1
|
0
|
my $self = shift; |
115
|
|
|
|
|
|
|
|
116
|
0
|
|
|
|
|
0
|
print <<"EOF"; |
117
|
|
|
|
|
|
|
---------------------------------------- |
118
|
0
|
|
|
|
|
0
|
Crawling is starting with @{[ $self->queue->next->url ]} |
119
|
0
|
|
|
|
|
0
|
Max Connection : @{[ $self->max_conn ]} |
120
|
0
|
|
|
|
|
0
|
User Agent : @{[ $self->ua_name ]} |
121
|
|
|
|
|
|
|
---------------------------------------- |
122
|
|
|
|
|
|
|
EOF |
123
|
|
|
|
|
|
|
} |
124
|
|
|
|
|
|
|
|
125
|
|
|
|
|
|
|
sub scrape { |
126
|
26
|
|
|
26
|
1
|
12975
|
my ($self, $res, $job, $contexts) = @_; |
127
|
26
|
|
|
|
|
38
|
my @ret; |
128
|
|
|
|
|
|
|
|
129
|
26
|
100
|
66
|
|
|
99
|
return unless $res->headers->content_length && $res->body; |
130
|
|
|
|
|
|
|
|
131
|
25
|
|
|
|
|
954
|
my $base = $job->url; |
132
|
25
|
|
|
|
|
112
|
my $type = $res->headers->content_type; |
133
|
|
|
|
|
|
|
|
134
|
25
|
100
|
66
|
|
|
582
|
if ($type && $type =~ qr{^(text|application)/(html|xml|xhtml)}) { |
135
|
23
|
100
|
|
|
|
120
|
if ((my $base_tag = $res->dom->at('base[href]'))) { |
136
|
4
|
|
|
|
|
2411
|
$base = resolve_href($base, $base_tag->attr('href')); |
137
|
|
|
|
|
|
|
} |
138
|
23
|
|
|
|
|
20965
|
my $dom = Mojo::DOM->new(decoded_body($res)); |
139
|
23
|
|
|
|
|
22743
|
my $handlers = reduce_html_handlers($self->html_handlers, $contexts); |
140
|
23
|
|
|
|
|
38
|
for my $selector (sort keys %{$handlers}) { |
|
23
|
|
|
|
|
184
|
|
141
|
|
|
|
|
|
|
$dom->find($selector)->each( |
142
|
|
|
|
|
|
|
sub { |
143
|
83
|
|
|
83
|
|
23601
|
my $dom = shift; |
144
|
83
|
|
|
|
|
173
|
for ($handlers->{$selector}->($dom)) { |
145
|
86
|
100
|
|
|
|
1647
|
push(@ret, $self->_make_child($_, $dom, $job, $base)) if $_; |
146
|
|
|
|
|
|
|
} |
147
|
|
|
|
|
|
|
} |
148
|
360
|
|
|
|
|
99931
|
); |
149
|
|
|
|
|
|
|
} |
150
|
|
|
|
|
|
|
} |
151
|
|
|
|
|
|
|
|
152
|
25
|
100
|
66
|
|
|
8510
|
if ($type && $type =~ qr{text/css}) { |
153
|
1
|
|
|
|
|
6
|
for (collect_urls_css(decoded_body($res))) { |
154
|
1
|
|
|
|
|
5
|
push(@ret, $self->_make_child($_, $job->url, $job, $base)); |
155
|
|
|
|
|
|
|
} |
156
|
|
|
|
|
|
|
} |
157
|
|
|
|
|
|
|
|
158
|
25
|
|
|
|
|
120
|
return @ret; |
159
|
|
|
|
|
|
|
} |
160
|
|
|
|
|
|
|
|
161
|
|
|
|
|
|
|
sub stop { |
162
|
2
|
|
|
2
|
1
|
9
|
my $self = shift; |
163
|
2
|
|
|
|
|
7
|
while (my $id = shift @{$self->{_loop_ids}}) { |
|
4
|
|
|
|
|
114
|
|
164
|
2
|
|
|
|
|
27
|
Mojo::IOLoop->remove($id); |
165
|
|
|
|
|
|
|
} |
166
|
2
|
|
|
|
|
10
|
Mojo::IOLoop->stop; |
167
|
|
|
|
|
|
|
} |
168
|
|
|
|
|
|
|
|
169
|
|
|
|
|
|
|
sub _make_child { |
170
|
84
|
|
|
84
|
|
156
|
my ($self, $url, $context, $job, $base) = @_; |
171
|
|
|
|
|
|
|
|
172
|
84
|
50
|
|
|
|
133
|
return unless $url; |
173
|
84
|
100
|
|
|
|
134
|
($url, my $method, my $params) = @$url if (ref $url); |
174
|
|
|
|
|
|
|
|
175
|
84
|
|
|
|
|
158
|
my $resolved = resolve_href($base, $url); |
176
|
|
|
|
|
|
|
|
177
|
84
|
100
|
|
|
|
173
|
return unless ($resolved->scheme =~ qr{^(http|https|ftp|ws|wss)$}); |
178
|
|
|
|
|
|
|
|
179
|
73
|
|
|
|
|
802
|
my $child |
180
|
|
|
|
|
|
|
= $job->child(url => $resolved, literal_uri => $url, context => $context); |
181
|
|
|
|
|
|
|
|
182
|
73
|
100
|
|
|
|
901
|
$child->method($method) if $method; |
183
|
|
|
|
|
|
|
|
184
|
73
|
100
|
|
|
|
128
|
if ($params) { |
185
|
6
|
100
|
|
|
|
42
|
if ($method eq 'GET') { |
186
|
3
|
|
|
|
|
9
|
$child->url->query->append($params); |
187
|
|
|
|
|
|
|
} |
188
|
|
|
|
|
|
|
else { |
189
|
3
|
|
|
|
|
10
|
$child->tx_params($params); |
190
|
|
|
|
|
|
|
} |
191
|
|
|
|
|
|
|
} |
192
|
|
|
|
|
|
|
|
193
|
73
|
|
|
|
|
404
|
return $child; |
194
|
|
|
|
|
|
|
} |
195
|
|
|
|
|
|
|
|
196
|
|
|
|
|
|
|
sub enqueue { |
197
|
59
|
|
|
59
|
1
|
811
|
my ($self, @jobs) = @_; |
198
|
|
|
|
|
|
|
return |
199
|
59
|
|
|
|
|
102
|
map { $self->queue->enqueue(WWW::Crawler::Mojo::Job->upgrade($_)) } @jobs; |
|
59
|
|
|
|
|
144
|
|
200
|
|
|
|
|
|
|
} |
201
|
|
|
|
|
|
|
|
202
|
|
|
|
|
|
|
sub requeue { |
203
|
1
|
|
|
1
|
1
|
407
|
my ($self, @jobs) = @_; |
204
|
|
|
|
|
|
|
return |
205
|
1
|
|
|
|
|
14
|
map { $self->queue->requeue(WWW::Crawler::Mojo::Job->upgrade($_)) } @jobs; |
|
1
|
|
|
|
|
5
|
|
206
|
|
|
|
|
|
|
} |
207
|
|
|
|
|
|
|
|
208
|
|
|
|
|
|
|
sub _urls_redirect { |
209
|
13
|
|
|
13
|
|
21
|
my $tx = shift; |
210
|
13
|
|
|
|
|
23
|
my @urls; |
211
|
13
|
50
|
|
|
|
57
|
@urls = _urls_redirect($tx->previous) if ($tx->previous); |
212
|
13
|
|
|
|
|
85
|
unshift(@urls, $tx->req->url->userinfo(undef)); |
213
|
13
|
|
|
|
|
187
|
return @urls; |
214
|
|
|
|
|
|
|
} |
215
|
|
|
|
|
|
|
|
216
|
|
|
|
|
|
|
1; |
217
|
|
|
|
|
|
|
|
218
|
|
|
|
|
|
|
=head1 NAME |
219
|
|
|
|
|
|
|
|
220
|
|
|
|
|
|
|
WWW::Crawler::Mojo - A web crawling framework for Perl |
221
|
|
|
|
|
|
|
|
222
|
|
|
|
|
|
|
=head1 SYNOPSIS |
223
|
|
|
|
|
|
|
|
224
|
|
|
|
|
|
|
use strict; |
225
|
|
|
|
|
|
|
use warnings; |
226
|
|
|
|
|
|
|
use WWW::Crawler::Mojo; |
227
|
|
|
|
|
|
|
|
228
|
|
|
|
|
|
|
my $bot = WWW::Crawler::Mojo->new; |
229
|
|
|
|
|
|
|
|
230
|
|
|
|
|
|
|
$bot->on(res => sub { |
231
|
|
|
|
|
|
|
my ($bot, $scrape, $job, $res) = @_; |
232
|
|
|
|
|
|
|
|
233
|
|
|
|
|
|
|
$bot->enqueue($_) for $scrape->('#context'); |
234
|
|
|
|
|
|
|
}); |
235
|
|
|
|
|
|
|
|
236
|
|
|
|
|
|
|
$bot->enqueue('http://example.com/'); |
237
|
|
|
|
|
|
|
$bot->crawl; |
238
|
|
|
|
|
|
|
|
239
|
|
|
|
|
|
|
=head1 DESCRIPTION |
240
|
|
|
|
|
|
|
|
241
|
|
|
|
|
|
|
L is a web crawling framework for those who are familiar with |
242
|
|
|
|
|
|
|
L::* APIs. |
243
|
|
|
|
|
|
|
|
244
|
|
|
|
|
|
|
Althogh the module is only well tested for "focused crawl" at this point, |
245
|
|
|
|
|
|
|
you can also use it for endless crawling by taking special care of memory usage. |
246
|
|
|
|
|
|
|
|
247
|
|
|
|
|
|
|
=head1 ATTRIBUTES |
248
|
|
|
|
|
|
|
|
249
|
|
|
|
|
|
|
L inherits all attributes from L and |
250
|
|
|
|
|
|
|
implements the following new ones. |
251
|
|
|
|
|
|
|
|
252
|
|
|
|
|
|
|
=head2 clock_speed |
253
|
|
|
|
|
|
|
|
254
|
|
|
|
|
|
|
A number of main event loop interval in milliseconds. Defaults to 0.25. |
255
|
|
|
|
|
|
|
|
256
|
|
|
|
|
|
|
$bot->clock_speed(2); |
257
|
|
|
|
|
|
|
my $clock = $bot->clock_speed; # 2 |
258
|
|
|
|
|
|
|
|
259
|
|
|
|
|
|
|
=head2 html_handlers |
260
|
|
|
|
|
|
|
|
261
|
|
|
|
|
|
|
Sets HTML handlers of scrapper. Defaults to |
262
|
|
|
|
|
|
|
WWW::Crawler::Mojo::ScraperUtil::html_handler_presets. |
263
|
|
|
|
|
|
|
|
264
|
|
|
|
|
|
|
$bot->html_handlers( { |
265
|
|
|
|
|
|
|
'a[href]' => sub { return $_[0]->{href} }, |
266
|
|
|
|
|
|
|
'img[src]' => sub { return $_[0]->{src} }, |
267
|
|
|
|
|
|
|
} ); |
268
|
|
|
|
|
|
|
|
269
|
|
|
|
|
|
|
=head2 max_conn |
270
|
|
|
|
|
|
|
|
271
|
|
|
|
|
|
|
An amount of max connections. |
272
|
|
|
|
|
|
|
|
273
|
|
|
|
|
|
|
$bot->max_conn(5); |
274
|
|
|
|
|
|
|
say $bot->max_conn; # 5 |
275
|
|
|
|
|
|
|
|
276
|
|
|
|
|
|
|
=head2 max_conn_per_host |
277
|
|
|
|
|
|
|
|
278
|
|
|
|
|
|
|
An amount of max connections per host. |
279
|
|
|
|
|
|
|
|
280
|
|
|
|
|
|
|
$bot->max_conn_per_host(5); |
281
|
|
|
|
|
|
|
say $bot->max_conn_per_host; # 5 |
282
|
|
|
|
|
|
|
|
283
|
|
|
|
|
|
|
=head2 queue |
284
|
|
|
|
|
|
|
|
285
|
|
|
|
|
|
|
L object for default. |
286
|
|
|
|
|
|
|
|
287
|
|
|
|
|
|
|
$bot->queue(WWW::Crawler::Mojo::Queue::Memory->new); |
288
|
|
|
|
|
|
|
$bot->queue->enqueue($job); |
289
|
|
|
|
|
|
|
|
290
|
|
|
|
|
|
|
=head2 shuffle |
291
|
|
|
|
|
|
|
|
292
|
|
|
|
|
|
|
An interval in seconds to shuffle the job queue. It also evalutated as boolean |
293
|
|
|
|
|
|
|
for disabling/enabling the feature. Defaults to undef, meaning disable. |
294
|
|
|
|
|
|
|
|
295
|
|
|
|
|
|
|
$bot->shuffle(5); |
296
|
|
|
|
|
|
|
say $bot->shuffle; # 5 |
297
|
|
|
|
|
|
|
|
298
|
|
|
|
|
|
|
=head2 ua |
299
|
|
|
|
|
|
|
|
300
|
|
|
|
|
|
|
A L instance. |
301
|
|
|
|
|
|
|
|
302
|
|
|
|
|
|
|
my $ua = $bot->ua; |
303
|
|
|
|
|
|
|
$bot->ua(WWW::Crawler::Mojo::UserAgent->new); |
304
|
|
|
|
|
|
|
|
305
|
|
|
|
|
|
|
=head2 ua_name |
306
|
|
|
|
|
|
|
|
307
|
|
|
|
|
|
|
Name of crawler for User-Agent header. |
308
|
|
|
|
|
|
|
|
309
|
|
|
|
|
|
|
$bot->ua_name('my-bot/0.01 (+https://example.com/)'); |
310
|
|
|
|
|
|
|
say $bot->ua_name; # 'my-bot/0.01 (+https://example.com/)' |
311
|
|
|
|
|
|
|
|
312
|
|
|
|
|
|
|
=head1 EVENTS |
313
|
|
|
|
|
|
|
|
314
|
|
|
|
|
|
|
L inherits all events from L and |
315
|
|
|
|
|
|
|
implements the following new ones. |
316
|
|
|
|
|
|
|
|
317
|
|
|
|
|
|
|
=head2 req |
318
|
|
|
|
|
|
|
|
319
|
|
|
|
|
|
|
Emitted right before crawler perform request to servers. The callback takes 3 |
320
|
|
|
|
|
|
|
arguments. |
321
|
|
|
|
|
|
|
|
322
|
|
|
|
|
|
|
$bot->on(req => sub { |
323
|
|
|
|
|
|
|
my ($bot, $job, $req) = @_; |
324
|
|
|
|
|
|
|
|
325
|
|
|
|
|
|
|
# DO NOTHING |
326
|
|
|
|
|
|
|
}); |
327
|
|
|
|
|
|
|
|
328
|
|
|
|
|
|
|
=head2 res |
329
|
|
|
|
|
|
|
|
330
|
|
|
|
|
|
|
Emitted when crawler got response from server. The callback takes 4 arguments. |
331
|
|
|
|
|
|
|
|
332
|
|
|
|
|
|
|
$bot->on(res => sub { |
333
|
|
|
|
|
|
|
my ($bot, $scrape, $job, $res) = @_; |
334
|
|
|
|
|
|
|
if (...) { |
335
|
|
|
|
|
|
|
$bot->enqueue($_) for $scrape->(); |
336
|
|
|
|
|
|
|
} else { |
337
|
|
|
|
|
|
|
# DO NOTHING |
338
|
|
|
|
|
|
|
} |
339
|
|
|
|
|
|
|
}); |
340
|
|
|
|
|
|
|
|
341
|
|
|
|
|
|
|
=head3 $bot |
342
|
|
|
|
|
|
|
|
343
|
|
|
|
|
|
|
L instance. |
344
|
|
|
|
|
|
|
|
345
|
|
|
|
|
|
|
=head3 $scrape |
346
|
|
|
|
|
|
|
|
347
|
|
|
|
|
|
|
Scraper code reference for current document. The code takes optional argument |
348
|
|
|
|
|
|
|
CSS selector for context and returns new jobs. |
349
|
|
|
|
|
|
|
|
350
|
|
|
|
|
|
|
for my $job ($scrape->($context)) { |
351
|
|
|
|
|
|
|
$bot->enqueue($job) |
352
|
|
|
|
|
|
|
} |
353
|
|
|
|
|
|
|
|
354
|
|
|
|
|
|
|
Optionally you can specify a scraping target container in CSS selector. |
355
|
|
|
|
|
|
|
|
356
|
|
|
|
|
|
|
@jobs = $scrape->('#container'); |
357
|
|
|
|
|
|
|
@jobs = $scrape->(['#container1', '#container2']); |
358
|
|
|
|
|
|
|
|
359
|
|
|
|
|
|
|
=head3 $job |
360
|
|
|
|
|
|
|
|
361
|
|
|
|
|
|
|
L instance. |
362
|
|
|
|
|
|
|
|
363
|
|
|
|
|
|
|
=head3 $res |
364
|
|
|
|
|
|
|
|
365
|
|
|
|
|
|
|
L instance. |
366
|
|
|
|
|
|
|
|
367
|
|
|
|
|
|
|
=head2 empty |
368
|
|
|
|
|
|
|
|
369
|
|
|
|
|
|
|
Emitted when queue length gets zero. |
370
|
|
|
|
|
|
|
|
371
|
|
|
|
|
|
|
$bot->on(empty => sub { |
372
|
|
|
|
|
|
|
my ($bot) = @_; |
373
|
|
|
|
|
|
|
say "Queue is drained out."; |
374
|
|
|
|
|
|
|
}); |
375
|
|
|
|
|
|
|
|
376
|
|
|
|
|
|
|
=head2 error |
377
|
|
|
|
|
|
|
|
378
|
|
|
|
|
|
|
Emitted when user agent returns no status code for request. Possibly caused by |
379
|
|
|
|
|
|
|
network errors or un-responsible servers. |
380
|
|
|
|
|
|
|
|
381
|
|
|
|
|
|
|
$bot->on(error => sub { |
382
|
|
|
|
|
|
|
my ($bot, $error, $job) = @_; |
383
|
|
|
|
|
|
|
say "error: $error"; |
384
|
|
|
|
|
|
|
if (...) { # until failur occures 3 times |
385
|
|
|
|
|
|
|
$bot->requeue($job); |
386
|
|
|
|
|
|
|
} |
387
|
|
|
|
|
|
|
}); |
388
|
|
|
|
|
|
|
|
389
|
|
|
|
|
|
|
Note that server errors such as 404 or 500 cannot be catched with the event. |
390
|
|
|
|
|
|
|
Consider res event for the use case instead of this. |
391
|
|
|
|
|
|
|
|
392
|
|
|
|
|
|
|
=head2 start |
393
|
|
|
|
|
|
|
|
394
|
|
|
|
|
|
|
Emitted right before crawl is started. |
395
|
|
|
|
|
|
|
|
396
|
|
|
|
|
|
|
$bot->on(start => sub { |
397
|
|
|
|
|
|
|
my $self = shift; |
398
|
|
|
|
|
|
|
... |
399
|
|
|
|
|
|
|
}); |
400
|
|
|
|
|
|
|
|
401
|
|
|
|
|
|
|
=head1 METHODS |
402
|
|
|
|
|
|
|
|
403
|
|
|
|
|
|
|
L inherits all methods from L and |
404
|
|
|
|
|
|
|
implements the following new ones. |
405
|
|
|
|
|
|
|
|
406
|
|
|
|
|
|
|
=head2 crawl |
407
|
|
|
|
|
|
|
|
408
|
|
|
|
|
|
|
Starts crawling loop. |
409
|
|
|
|
|
|
|
|
410
|
|
|
|
|
|
|
$bot->crawl; |
411
|
|
|
|
|
|
|
|
412
|
|
|
|
|
|
|
=head2 init |
413
|
|
|
|
|
|
|
|
414
|
|
|
|
|
|
|
Initializes crawler settings. |
415
|
|
|
|
|
|
|
|
416
|
|
|
|
|
|
|
$bot->init; |
417
|
|
|
|
|
|
|
|
418
|
|
|
|
|
|
|
=head2 process_job |
419
|
|
|
|
|
|
|
|
420
|
|
|
|
|
|
|
Processes a job. |
421
|
|
|
|
|
|
|
|
422
|
|
|
|
|
|
|
$bot->process_job; |
423
|
|
|
|
|
|
|
|
424
|
|
|
|
|
|
|
=head2 say_start |
425
|
|
|
|
|
|
|
|
426
|
|
|
|
|
|
|
Displays starting messages to STDOUT |
427
|
|
|
|
|
|
|
|
428
|
|
|
|
|
|
|
$bot->say_start; |
429
|
|
|
|
|
|
|
|
430
|
|
|
|
|
|
|
=head2 scrape |
431
|
|
|
|
|
|
|
|
432
|
|
|
|
|
|
|
Parses and discovers links in a web page and CSS. This performs scraping. |
433
|
|
|
|
|
|
|
With the optional 4th argument, you can specify a CSS selector to container |
434
|
|
|
|
|
|
|
you would collect URLs within. |
435
|
|
|
|
|
|
|
|
436
|
|
|
|
|
|
|
$bot->scrape($res, $job, ); |
437
|
|
|
|
|
|
|
$bot->scrape($res, $job, $selector); |
438
|
|
|
|
|
|
|
$bot->scrape($res, $job, [$selector1, $selector2]); |
439
|
|
|
|
|
|
|
|
440
|
|
|
|
|
|
|
=head2 stop |
441
|
|
|
|
|
|
|
|
442
|
|
|
|
|
|
|
Stop crawling. |
443
|
|
|
|
|
|
|
|
444
|
|
|
|
|
|
|
$bot->stop; |
445
|
|
|
|
|
|
|
|
446
|
|
|
|
|
|
|
=head2 enqueue |
447
|
|
|
|
|
|
|
|
448
|
|
|
|
|
|
|
Appends one or more URLs or L objects. Returns the jobs |
449
|
|
|
|
|
|
|
actually added. |
450
|
|
|
|
|
|
|
|
451
|
|
|
|
|
|
|
my @jobs = $bot->enqueue('http://example.com/index1.html'); |
452
|
|
|
|
|
|
|
|
453
|
|
|
|
|
|
|
OR |
454
|
|
|
|
|
|
|
|
455
|
|
|
|
|
|
|
my @jobs = $bot->enqueue($job1, $job2); |
456
|
|
|
|
|
|
|
|
457
|
|
|
|
|
|
|
OR |
458
|
|
|
|
|
|
|
|
459
|
|
|
|
|
|
|
my @jobs = $bot->enqueue( |
460
|
|
|
|
|
|
|
'http://example.com/index1.html', |
461
|
|
|
|
|
|
|
'http://example.com/index2.html', |
462
|
|
|
|
|
|
|
'http://example.com/index3.html', |
463
|
|
|
|
|
|
|
); |
464
|
|
|
|
|
|
|
|
465
|
|
|
|
|
|
|
=head2 requeue |
466
|
|
|
|
|
|
|
|
467
|
|
|
|
|
|
|
Appends one or more URLs or jobs for re-try. This accepts same arguments as |
468
|
|
|
|
|
|
|
enqueue method. Returns the jobs actually added. |
469
|
|
|
|
|
|
|
|
470
|
|
|
|
|
|
|
$self->on(error => sub { |
471
|
|
|
|
|
|
|
my ($self, $msg, $job) = @_; |
472
|
|
|
|
|
|
|
if (...) { # until failur occures 3 times |
473
|
|
|
|
|
|
|
$bot->requeue($job); |
474
|
|
|
|
|
|
|
} |
475
|
|
|
|
|
|
|
}); |
476
|
|
|
|
|
|
|
|
477
|
|
|
|
|
|
|
=head1 EXAMPLE |
478
|
|
|
|
|
|
|
|
479
|
|
|
|
|
|
|
L |
480
|
|
|
|
|
|
|
|
481
|
|
|
|
|
|
|
=head1 AUTHOR |
482
|
|
|
|
|
|
|
|
483
|
|
|
|
|
|
|
Keita Sugama, Esugama@jamadam.comE |
484
|
|
|
|
|
|
|
|
485
|
|
|
|
|
|
|
=head1 COPYRIGHT AND LICENSE |
486
|
|
|
|
|
|
|
|
487
|
|
|
|
|
|
|
Copyright (C) jamadam |
488
|
|
|
|
|
|
|
|
489
|
|
|
|
|
|
|
This program is free software; you can redistribute it and/or |
490
|
|
|
|
|
|
|
modify it under the same terms as Perl itself. |
491
|
|
|
|
|
|
|
|
492
|
|
|
|
|
|
|
=cut |