line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
1
|
|
|
|
|
|
|
package GitHub::RSS; |
2
|
1
|
|
|
1
|
|
718
|
use strict; |
|
1
|
|
|
|
|
3
|
|
|
1
|
|
|
|
|
31
|
|
3
|
1
|
|
|
1
|
|
16
|
use 5.010; |
|
1
|
|
|
|
|
4
|
|
4
|
1
|
|
|
1
|
|
684
|
use Moo 2; |
|
1
|
|
|
|
|
12434
|
|
|
1
|
|
|
|
|
8
|
|
5
|
1
|
|
|
1
|
|
2179
|
use Filter::signatures; |
|
1
|
|
|
|
|
26956
|
|
|
1
|
|
|
|
|
6
|
|
6
|
1
|
|
|
1
|
|
36
|
use feature 'signatures'; |
|
1
|
|
|
|
|
3
|
|
|
1
|
|
|
|
|
31
|
|
7
|
1
|
|
|
1
|
|
5
|
no warnings 'experimental::signatures'; |
|
1
|
|
|
|
|
3
|
|
|
1
|
|
|
|
|
34
|
|
8
|
1
|
|
|
1
|
|
532
|
use PerlX::Maybe; |
|
1
|
|
|
|
|
2433
|
|
|
1
|
|
|
|
|
4
|
|
9
|
|
|
|
|
|
|
|
10
|
1
|
|
|
1
|
|
851
|
use IO::Socket::SSL; |
|
1
|
|
|
|
|
72486
|
|
|
1
|
|
|
|
|
9
|
|
11
|
1
|
|
|
1
|
|
680
|
use Net::GitHub; |
|
1
|
|
|
|
|
314704
|
|
|
1
|
|
|
|
|
47
|
|
12
|
1
|
|
|
1
|
|
4148
|
use DBI; |
|
1
|
|
|
|
|
23392
|
|
|
1
|
|
|
|
|
60
|
|
13
|
1
|
|
|
1
|
|
1508
|
use JSON; |
|
1
|
|
|
|
|
8613
|
|
|
1
|
|
|
|
|
6
|
|
14
|
|
|
|
|
|
|
|
15
|
1
|
|
|
1
|
|
806
|
use Data::Dumper; |
|
1
|
|
|
|
|
6023
|
|
|
1
|
|
|
|
|
2230
|
|
16
|
|
|
|
|
|
|
|
17
|
|
|
|
|
|
|
our $VERSION = '0.04'; |
18
|
|
|
|
|
|
|
|
19
|
|
|
|
|
|
|
=head1 NAME |
20
|
|
|
|
|
|
|
|
21
|
|
|
|
|
|
|
GitHub::RSS - collect data from Github.com for feeding into RSS |
22
|
|
|
|
|
|
|
|
23
|
|
|
|
|
|
|
=head1 SYNOPSIS |
24
|
|
|
|
|
|
|
|
25
|
|
|
|
|
|
|
my $gh = GitHub::RSS->new( |
26
|
|
|
|
|
|
|
dbh => { |
27
|
|
|
|
|
|
|
dsn => "dbi:SQLite:dbname=$store", |
28
|
|
|
|
|
|
|
}, |
29
|
|
|
|
|
|
|
); |
30
|
|
|
|
|
|
|
|
31
|
|
|
|
|
|
|
my $last_updated = $gh->last_check; |
32
|
|
|
|
|
|
|
$gh->fetch_and_store( $github_user => $github_repo, $last_updated ); |
33
|
|
|
|
|
|
|
if( $verbose ) { |
34
|
|
|
|
|
|
|
print "Updated from $last_updated to " . $gh->last_check, "\n"; |
35
|
|
|
|
|
|
|
}; |
36
|
|
|
|
|
|
|
|
37
|
|
|
|
|
|
|
=head1 DESCRIPTION |
38
|
|
|
|
|
|
|
|
39
|
|
|
|
|
|
|
This module provides a cache database for GitHub issues and scripts to |
40
|
|
|
|
|
|
|
periodically update the database from GitHub. |
41
|
|
|
|
|
|
|
|
42
|
|
|
|
|
|
|
This is mainly used for creating an RSS feed from the database, hence the |
43
|
|
|
|
|
|
|
name. |
44
|
|
|
|
|
|
|
|
45
|
|
|
|
|
|
|
=head1 METHODS |
46
|
|
|
|
|
|
|
|
47
|
|
|
|
|
|
|
=head2 C<< ->new >> |
48
|
|
|
|
|
|
|
|
49
|
|
|
|
|
|
|
my $gh = GitHub::RSS->new( |
50
|
|
|
|
|
|
|
dbh => { |
51
|
|
|
|
|
|
|
dsn => 'dbi:SQLite:dbname=db/issues.sqlite', |
52
|
|
|
|
|
|
|
}, |
53
|
|
|
|
|
|
|
); |
54
|
|
|
|
|
|
|
|
55
|
|
|
|
|
|
|
Constructs a new GitHub::RSS instance |
56
|
|
|
|
|
|
|
|
57
|
|
|
|
|
|
|
=over 4 |
58
|
|
|
|
|
|
|
|
59
|
|
|
|
|
|
|
=item * |
60
|
|
|
|
|
|
|
|
61
|
|
|
|
|
|
|
B<gh> - instance of L<Net::GitHub> |
62
|
|
|
|
|
|
|
|
63
|
|
|
|
|
|
|
=cut |
64
|
|
|
|
|
|
|
|
65
|
|
|
|
|
|
|
has 'gh' => ( |
66
|
|
|
|
|
|
|
is => 'ro', |
67
|
|
|
|
|
|
|
default => sub( $self ) { |
68
|
|
|
|
|
|
|
Net::GitHub->new( |
69
|
|
|
|
|
|
|
maybe access_token => $self->token |
70
|
|
|
|
|
|
|
), |
71
|
|
|
|
|
|
|
}, |
72
|
|
|
|
|
|
|
); |
73
|
|
|
|
|
|
|
|
74
|
|
|
|
|
|
|
=item * |
75
|
|
|
|
|
|
|
|
76
|
|
|
|
|
|
|
B<token_file> - name and path of the JSON-format token file containing the |
77
|
|
|
|
|
|
|
GitHub API token By default, that file is searched for under the name |
78
|
|
|
|
|
|
|
C<github.credentials> in C<.>, C<$ENV{XDG_DATA_HOME}>, C<$ENV{USERPROFILE}> |
79
|
|
|
|
|
|
|
and C<$ENV{HOME}>. |
80
|
|
|
|
|
|
|
|
81
|
|
|
|
|
|
|
=cut |
82
|
|
|
|
|
|
|
|
83
|
|
|
|
|
|
|
has 'token_file' => ( |
84
|
|
|
|
|
|
|
is => 'lazy', |
85
|
|
|
|
|
|
|
default => \&_find_gh_token_file, |
86
|
|
|
|
|
|
|
); |
87
|
|
|
|
|
|
|
|
88
|
|
|
|
|
|
|
=item * |
89
|
|
|
|
|
|
|
|
90
|
|
|
|
|
|
|
B<token> - GitHub API token. If this is missing, it will be attempted to read |
91
|
|
|
|
|
|
|
it from the C<token_file>. |
92
|
|
|
|
|
|
|
|
93
|
|
|
|
|
|
|
=cut |
94
|
|
|
|
|
|
|
|
95
|
|
|
|
|
|
|
has 'token' => ( |
96
|
|
|
|
|
|
|
is => 'lazy', |
97
|
|
|
|
|
|
|
default => \&_read_gh_token, |
98
|
|
|
|
|
|
|
); |
99
|
|
|
|
|
|
|
|
100
|
|
|
|
|
|
|
=item * |
101
|
|
|
|
|
|
|
|
102
|
|
|
|
|
|
|
B<default_user> - name of the GitHub user whose repos will be read |
103
|
|
|
|
|
|
|
|
104
|
|
|
|
|
|
|
=cut |
105
|
|
|
|
|
|
|
|
106
|
|
|
|
|
|
|
has default_user => ( |
107
|
|
|
|
|
|
|
is => 'ro', |
108
|
|
|
|
|
|
|
); |
109
|
|
|
|
|
|
|
|
110
|
|
|
|
|
|
|
=item * |
111
|
|
|
|
|
|
|
|
112
|
|
|
|
|
|
|
B<default_repo> - name of the GitHub repo whose issues will be read |
113
|
|
|
|
|
|
|
|
114
|
|
|
|
|
|
|
=cut |
115
|
|
|
|
|
|
|
|
116
|
|
|
|
|
|
|
has default_repo => ( |
117
|
|
|
|
|
|
|
is => 'ro', |
118
|
|
|
|
|
|
|
); |
119
|
|
|
|
|
|
|
|
120
|
|
|
|
|
|
|
=item * |
121
|
|
|
|
|
|
|
|
122
|
|
|
|
|
|
|
B<dbh> - premade database handle or alternatively a hashref containing |
123
|
|
|
|
|
|
|
the L<DBI> arguments |
124
|
|
|
|
|
|
|
|
125
|
|
|
|
|
|
|
dbh => $dbh, |
126
|
|
|
|
|
|
|
|
127
|
|
|
|
|
|
|
or alternatively |
128
|
|
|
|
|
|
|
|
129
|
|
|
|
|
|
|
dbh => { |
130
|
|
|
|
|
|
|
user => 'scott', |
131
|
|
|
|
|
|
|
password => 'tiger', |
132
|
|
|
|
|
|
|
dsn => 'dbi:SQLite:dbname=db/issues.sqlite', |
133
|
|
|
|
|
|
|
} |
134
|
|
|
|
|
|
|
|
135
|
|
|
|
|
|
|
=cut |
136
|
|
|
|
|
|
|
|
137
|
|
|
|
|
|
|
has dbh => ( |
138
|
|
|
|
|
|
|
is => 'ro', |
139
|
|
|
|
|
|
|
required => 1, |
140
|
|
|
|
|
|
|
coerce => \&_build_dbh, |
141
|
|
|
|
|
|
|
); |
142
|
|
|
|
|
|
|
|
143
|
0
|
|
|
0
|
|
|
sub _build_dbh( $args ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
144
|
0
|
0
|
|
|
|
|
return $args if ref($args) eq 'DBI::db'; |
145
|
0
|
0
|
|
|
|
|
ref($args) eq 'HASH' or die 'Not a DB handle nor a hashref'; |
146
|
0
|
|
|
|
|
|
return DBI->connect( @{$args}{qw/dsn user password options/} ); |
|
0
|
|
|
|
|
|
|
147
|
|
|
|
|
|
|
} |
148
|
|
|
|
|
|
|
|
149
|
|
|
|
|
|
|
=item * |
150
|
|
|
|
|
|
|
|
151
|
|
|
|
|
|
|
B<fetch_additional_pages> - number of additional pages to fetch from GitHub. |
152
|
|
|
|
|
|
|
This is relevant when catching up a database for a repository with many issues. |
153
|
|
|
|
|
|
|
|
154
|
|
|
|
|
|
|
=back |
155
|
|
|
|
|
|
|
|
156
|
|
|
|
|
|
|
=cut |
157
|
|
|
|
|
|
|
|
158
|
|
|
|
|
|
|
has fetch_additional_pages => ( |
159
|
|
|
|
|
|
|
is => 'ro', |
160
|
|
|
|
|
|
|
default => '1', |
161
|
|
|
|
|
|
|
); |
162
|
|
|
|
|
|
|
|
163
|
0
|
|
|
0
|
|
|
sub _find_gh_token_file( $self, $env=undef ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
164
|
0
|
|
0
|
|
|
|
$env //= \%ENV; |
165
|
|
|
|
|
|
|
|
166
|
0
|
|
|
|
|
|
my $token_file; |
167
|
|
|
|
|
|
|
|
168
|
|
|
|
|
|
|
# This should use File::User |
169
|
0
|
|
|
|
|
|
for my $candidate_dir ('.', |
170
|
|
|
|
|
|
|
$ENV{XDG_DATA_HOME}, |
171
|
|
|
|
|
|
|
$ENV{USERPROFILE}, |
172
|
|
|
|
|
|
|
$ENV{HOME} |
173
|
|
|
|
|
|
|
) { |
174
|
0
|
0
|
|
|
|
|
next unless defined $candidate_dir; |
175
|
0
|
0
|
|
|
|
|
if( -f "$candidate_dir/github.credentials" ) { |
176
|
0
|
|
|
|
|
|
$token_file = "$candidate_dir/github.credentials"; |
177
|
|
|
|
|
|
|
last |
178
|
0
|
|
|
|
|
|
}; |
179
|
|
|
|
|
|
|
}; |
180
|
|
|
|
|
|
|
|
181
|
0
|
|
|
|
|
|
return $token_file |
182
|
|
|
|
|
|
|
} |
183
|
|
|
|
|
|
|
|
184
|
0
|
|
|
0
|
|
|
sub _read_gh_token( $self, $token_file=undef ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
185
|
0
|
|
0
|
|
|
|
my $file = $token_file // $self->token_file; |
186
|
|
|
|
|
|
|
|
187
|
0
|
0
|
|
|
|
|
if( $file ) { |
188
|
0
|
0
|
|
|
|
|
open my $fh, '<', $file |
189
|
|
|
|
|
|
|
or die "Couldn't open file '$file': $!"; |
190
|
0
|
|
|
|
|
|
binmode $fh; |
191
|
0
|
|
|
|
|
|
local $/; |
192
|
0
|
|
|
|
|
|
my $json = <$fh>; |
193
|
0
|
|
|
|
|
|
my $token_json = decode_json( $json ); |
194
|
0
|
|
|
|
|
|
return $token_json->{token}; |
195
|
|
|
|
|
|
|
} else { |
196
|
|
|
|
|
|
|
# We'll run without a known account |
197
|
|
|
|
|
|
|
return |
198
|
0
|
|
|
|
|
|
} |
199
|
|
|
|
|
|
|
} |
200
|
|
|
|
|
|
|
|
201
|
0
|
|
|
|
|
|
sub fetch_all_issues( $self, |
202
|
0
|
|
|
|
|
|
$user = $self->default_user, |
203
|
0
|
|
|
|
|
|
$repo = $self->default_repo, |
204
|
0
|
|
|
0
|
0
|
|
$since=undef ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
205
|
0
|
|
|
|
|
|
my @issues = $self->fetch_issues( $user, $repo, $since ); |
206
|
0
|
|
|
|
|
|
my $gh = $self->gh; |
207
|
0
|
|
|
|
|
|
while ($gh->issue->has_next_page) { |
208
|
0
|
|
|
|
|
|
push @issues, $gh->issue->next_page; |
209
|
|
|
|
|
|
|
} |
210
|
|
|
|
|
|
|
@issues |
211
|
0
|
|
|
|
|
|
} |
212
|
|
|
|
|
|
|
|
213
|
0
|
|
|
|
|
|
sub fetch_issues( $self, |
214
|
0
|
|
|
|
|
|
$user = $self->default_user, |
215
|
0
|
|
|
|
|
|
$repo = $self->default_repo, |
216
|
0
|
|
|
0
|
0
|
|
$since=undef ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
217
|
0
|
|
|
|
|
|
my $gh = $self->gh; |
218
|
0
|
|
|
|
|
|
my @issues = $gh->issue->repos_issues($user => $repo, |
219
|
|
|
|
|
|
|
{ sort => 'updated', |
220
|
|
|
|
|
|
|
direction => 'asc', # so we can interrupt any time |
221
|
|
|
|
|
|
|
state => 'all', # so we find issues that got closed |
222
|
|
|
|
|
|
|
maybe since => $since, |
223
|
|
|
|
|
|
|
} |
224
|
|
|
|
|
|
|
); |
225
|
|
|
|
|
|
|
}; |
226
|
|
|
|
|
|
|
|
227
|
|
|
|
|
|
|
=head2 C<< ->fetch_issue_comments >> |
228
|
|
|
|
|
|
|
|
229
|
|
|
|
|
|
|
=cut |
230
|
|
|
|
|
|
|
|
231
|
0
|
|
|
|
|
|
sub fetch_issue_comments( $self, $issue_number, |
|
0
|
|
|
|
|
|
|
232
|
0
|
|
|
|
|
|
$user=$self->default_user, |
233
|
0
|
|
|
|
|
|
$repo=$self->default_repo |
234
|
0
|
|
|
0
|
1
|
|
) { |
|
0
|
|
|
|
|
|
|
235
|
|
|
|
|
|
|
# Shouldn't this loop as well, just like with the issues?! |
236
|
0
|
|
|
|
|
|
return $self->gh->issue->comments($user, $repo, $issue_number ); |
237
|
|
|
|
|
|
|
} |
238
|
|
|
|
|
|
|
|
239
|
0
|
|
|
0
|
0
|
|
sub write_data( $self, $table, @rows) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
240
|
0
|
|
|
|
|
|
my @columns = sort keys %{ $rows[0] }; |
|
0
|
|
|
|
|
|
|
241
|
0
|
|
|
|
|
|
my $statement = sprintf q{replace into "%s" (%s) values (%s)}, |
242
|
|
|
|
|
|
|
$table, |
243
|
|
|
|
|
|
|
join( ",", map qq{"$_"}, @columns ), |
244
|
|
|
|
|
|
|
join( ",", ('?') x (0+@columns)) |
245
|
|
|
|
|
|
|
; |
246
|
0
|
|
|
|
|
|
my $sth = $self->dbh->prepare( $statement ); |
247
|
0
|
0
|
|
|
|
|
eval { |
248
|
0
|
0
|
|
0
|
|
|
$sth->execute_for_fetch(sub { @rows ? [ @{ shift @rows }{@columns} ] : () }, \my @errors); |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
249
|
|
|
|
|
|
|
} or die Dumper \@rows; |
250
|
|
|
|
|
|
|
#if( @errors ) { |
251
|
|
|
|
|
|
|
# warn Dumper \@errors if (0+@errors); |
252
|
|
|
|
|
|
|
#}; |
253
|
|
|
|
|
|
|
} |
254
|
|
|
|
|
|
|
|
255
|
0
|
|
|
0
|
0
|
|
sub store_issues_comments( $self, $user, $repo, $issues ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
256
|
|
|
|
|
|
|
# Munge some columns: |
257
|
0
|
|
|
|
|
|
for (@$issues) { |
258
|
0
|
|
|
|
|
|
my $u = $_->{user}; |
259
|
0
|
|
|
|
|
|
@{ $_ }{qw( user_id user_login user_gravatar_id )} |
260
|
0
|
|
|
|
|
|
= @{ $u }{qw( id login gravatar_id )}; |
|
0
|
|
|
|
|
|
|
261
|
|
|
|
|
|
|
|
262
|
|
|
|
|
|
|
# Squish all structure into JSON, for later |
263
|
0
|
|
|
|
|
|
for (values %$_) { |
264
|
0
|
0
|
|
|
|
|
if( ref($_) ) { $_ = encode_json($_) }; |
|
0
|
|
|
|
|
|
|
265
|
|
|
|
|
|
|
}; |
266
|
|
|
|
|
|
|
}; |
267
|
|
|
|
|
|
|
|
268
|
0
|
|
|
|
|
|
for my $issue (@$issues) { |
269
|
|
|
|
|
|
|
#$|=1; |
270
|
|
|
|
|
|
|
#print sprintf "% 6d %s\r", $issue->{number}, $issue->{updated_at}; |
271
|
0
|
|
|
|
|
|
my @comments = $self->fetch_issue_comments( $issue->{number}, $user => $repo ); |
272
|
|
|
|
|
|
|
|
273
|
|
|
|
|
|
|
# Squish all structure into JSON, for later |
274
|
0
|
|
|
|
|
|
for (@comments) { |
275
|
0
|
|
|
|
|
|
for (values %$_) { |
276
|
0
|
0
|
|
|
|
|
if( ref($_) ) { $_ = encode_json($_) }; |
|
0
|
|
|
|
|
|
|
277
|
|
|
|
|
|
|
}; |
278
|
|
|
|
|
|
|
}; |
279
|
0
|
0
|
|
|
|
|
$self->write_data( 'comment' => @comments ) |
280
|
|
|
|
|
|
|
if @comments; |
281
|
|
|
|
|
|
|
}; |
282
|
|
|
|
|
|
|
|
283
|
|
|
|
|
|
|
# We wrote the comments first so we will refetch them if there is a problem |
284
|
|
|
|
|
|
|
# when writing the issue |
285
|
0
|
|
|
|
|
|
$self->write_data( 'issue' => @$issues ); |
286
|
|
|
|
|
|
|
}; |
287
|
|
|
|
|
|
|
|
288
|
|
|
|
|
|
|
=head2 C<< ->fetch_and_store($user, $repo, $since) >> |
289
|
|
|
|
|
|
|
|
290
|
|
|
|
|
|
|
my $since = $gh->last_check; |
291
|
|
|
|
|
|
|
$gh->fetch_and_store($user, $repo, $since) |
292
|
|
|
|
|
|
|
|
293
|
|
|
|
|
|
|
Fetches all issues and comments modified after the C<$since> timestamp. |
294
|
|
|
|
|
|
|
If C<$since> is missing or C<undef>, all issues will be retrieved. |
295
|
|
|
|
|
|
|
|
296
|
|
|
|
|
|
|
=cut |
297
|
|
|
|
|
|
|
|
298
|
0
|
|
|
|
|
|
sub fetch_and_store( $self, |
299
|
0
|
|
|
|
|
|
$user = $self->default_user, |
300
|
0
|
|
|
|
|
|
$repo = $self->default_repo, |
301
|
0
|
|
|
0
|
1
|
|
$since = undef) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
302
|
0
|
|
|
|
|
|
my $dbh = $self->dbh; |
303
|
0
|
|
|
|
|
|
my $gh = $self->gh; |
304
|
|
|
|
|
|
|
|
305
|
0
|
|
|
|
|
|
my $can_fetch_more = $self->fetch_additional_pages; |
306
|
|
|
|
|
|
|
|
307
|
0
|
|
|
|
|
|
FETCH: |
308
|
|
|
|
|
|
|
my @issues = $self->fetch_issues( $user => $repo, $since ); |
309
|
0
|
|
|
|
|
|
my $has_more = $gh->issue->has_next_page; |
310
|
0
|
|
|
|
|
|
$self->store_issues_comments( $user => $repo, \@issues ); |
311
|
|
|
|
|
|
|
|
312
|
0
|
0
|
0
|
|
|
|
if( $has_more and (!defined($can_fetch_more) or $can_fetch_more-- > 0)) { |
|
|
|
0
|
|
|
|
|
313
|
0
|
|
|
|
|
|
$since = $issues[-1]->{updated_at}; |
314
|
0
|
|
|
|
|
|
goto FETCH; |
315
|
|
|
|
|
|
|
} |
316
|
|
|
|
|
|
|
} |
317
|
|
|
|
|
|
|
|
318
|
0
|
|
|
|
|
|
sub refetch_issues( $self, |
319
|
0
|
|
|
|
|
|
$user = $self->default_user, |
320
|
0
|
|
|
|
|
|
$repo = $self->default_repo, |
321
|
0
|
|
|
0
|
0
|
|
@issue_numbers) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
322
|
0
|
|
|
|
|
|
my $dbh = $self->dbh; |
323
|
0
|
|
|
|
|
|
my $gh = $self->gh; |
324
|
|
|
|
|
|
|
|
325
|
0
|
|
|
|
|
|
my @issues = map { scalar $gh->issue->issue($user => $repo, $_) } @issue_numbers; |
|
0
|
|
|
|
|
|
|
326
|
0
|
|
|
|
|
|
$self->store_issues_comments( $user => $repo, \@issues ); |
327
|
|
|
|
|
|
|
} |
328
|
|
|
|
|
|
|
|
329
|
0
|
|
|
0
|
0
|
|
sub inflate_fields( $self, $item, @fields ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
330
|
0
|
|
|
|
|
|
for (@fields) { |
331
|
0
|
0
|
|
|
|
|
$item->{$_} = $item->{$_} ? decode_json( $item->{$_} ) : $item->{$_}; |
332
|
|
|
|
|
|
|
} |
333
|
|
|
|
|
|
|
} |
334
|
|
|
|
|
|
|
|
335
|
0
|
|
|
0
|
0
|
|
sub issues_and_comments( $self, $since ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
336
|
|
|
|
|
|
|
map { |
337
|
0
|
|
|
|
|
|
$self->inflate_fields( $_, qw(user closed_by)); |
338
|
0
|
|
|
|
|
|
$_ |
339
|
|
|
|
|
|
|
} |
340
|
0
|
|
|
|
|
|
@{ $self->dbh->selectall_arrayref(<<'SQL', { Slice => {}}, $since, $since) } |
|
0
|
|
|
|
|
|
|
341
|
|
|
|
|
|
|
select |
342
|
|
|
|
|
|
|
i.id |
343
|
|
|
|
|
|
|
, i.user |
344
|
|
|
|
|
|
|
, i.html_url |
345
|
|
|
|
|
|
|
, i.body |
346
|
|
|
|
|
|
|
, i.created_at |
347
|
|
|
|
|
|
|
, i.updated_at |
348
|
|
|
|
|
|
|
, i.title as issue_title |
349
|
|
|
|
|
|
|
, i.number as issue_number |
350
|
|
|
|
|
|
|
from issue i |
351
|
|
|
|
|
|
|
where i.updated_at >= ? |
352
|
|
|
|
|
|
|
union all |
353
|
|
|
|
|
|
|
select |
354
|
|
|
|
|
|
|
c.id |
355
|
|
|
|
|
|
|
, c.user |
356
|
|
|
|
|
|
|
, c.html_url |
357
|
|
|
|
|
|
|
, c.body |
358
|
|
|
|
|
|
|
, c.created_at |
359
|
|
|
|
|
|
|
, c.updated_at |
360
|
|
|
|
|
|
|
, i.title as issue_title |
361
|
|
|
|
|
|
|
, i.number as issue_number |
362
|
|
|
|
|
|
|
from comment c |
363
|
|
|
|
|
|
|
join issue i on c.issue_url=i.url |
364
|
|
|
|
|
|
|
where c.updated_at >= ? |
365
|
|
|
|
|
|
|
order by i.updated_at, html_url |
366
|
|
|
|
|
|
|
SQL |
367
|
|
|
|
|
|
|
} |
368
|
|
|
|
|
|
|
|
369
|
0
|
|
|
0
|
0
|
|
sub issues_with_patches( $self ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
370
|
|
|
|
|
|
|
map { |
371
|
0
|
|
|
|
|
|
$self->inflate_fields( $_, qw(user closed_by)); |
372
|
0
|
|
|
|
|
|
$_ |
373
|
|
|
|
|
|
|
} |
374
|
0
|
|
|
|
|
|
@{ $self->dbh->selectall_arrayref(<<'SQL', { Slice => {}}) } |
|
0
|
|
|
|
|
|
|
375
|
|
|
|
|
|
|
select distinct |
376
|
|
|
|
|
|
|
i.* -- later, expand to explicit list |
377
|
|
|
|
|
|
|
from issue i |
378
|
|
|
|
|
|
|
join comment c on c.issue_url=i.url |
379
|
|
|
|
|
|
|
where c.body like '%```diff%' |
380
|
|
|
|
|
|
|
and i.state = 'open' |
381
|
|
|
|
|
|
|
order by i.url |
382
|
|
|
|
|
|
|
SQL |
383
|
|
|
|
|
|
|
} |
384
|
|
|
|
|
|
|
|
385
|
0
|
|
|
0
|
0
|
|
sub issue( $self, $issue ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
386
|
0
|
|
|
|
|
|
$self->dbh->selectall_arrayref(<<'SQL', { Slice => {}}, $issue)->[0] |
387
|
|
|
|
|
|
|
select |
388
|
|
|
|
|
|
|
* -- later, expand to explicit list |
389
|
|
|
|
|
|
|
from issue i |
390
|
|
|
|
|
|
|
where i.number = ? |
391
|
|
|
|
|
|
|
order by i.url |
392
|
|
|
|
|
|
|
SQL |
393
|
|
|
|
|
|
|
} |
394
|
|
|
|
|
|
|
|
395
|
0
|
|
|
0
|
0
|
|
sub comments( $self, $issue ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
396
|
0
|
|
|
|
|
|
@{ $self->dbh->selectall_arrayref(<<'SQL', { Slice => {}}, $issue) } |
|
0
|
|
|
|
|
|
|
397
|
|
|
|
|
|
|
select |
398
|
|
|
|
|
|
|
c.* -- later, expand to explicit list |
399
|
|
|
|
|
|
|
from comment c |
400
|
|
|
|
|
|
|
join issue i on c.issue_url=i.url |
401
|
|
|
|
|
|
|
where i.number = ? |
402
|
|
|
|
|
|
|
order by c.url |
403
|
|
|
|
|
|
|
SQL |
404
|
|
|
|
|
|
|
} |
405
|
|
|
|
|
|
|
|
406
|
|
|
|
|
|
|
=head2 C<< ->last_check >> |
407
|
|
|
|
|
|
|
|
408
|
|
|
|
|
|
|
my $since = $gh->last_check; |
409
|
|
|
|
|
|
|
|
410
|
|
|
|
|
|
|
Returns the timestamp of the last stored modification or C<undef> |
411
|
|
|
|
|
|
|
if no issue or comment is stored. |
412
|
|
|
|
|
|
|
|
413
|
|
|
|
|
|
|
=cut |
414
|
|
|
|
|
|
|
|
415
|
|
|
|
|
|
|
|
416
|
0
|
|
|
|
|
|
sub last_check( $self, |
417
|
0
|
|
|
|
|
|
$user = $self->default_user, |
418
|
0
|
|
|
0
|
1
|
|
$repo = $self->default_repo ) { |
|
0
|
|
|
|
|
|
|
|
0
|
|
|
|
|
|
|
419
|
0
|
|
|
|
|
|
my $last = $self->dbh->selectall_arrayref(<<'SQL', { Slice => {} }); |
420
|
|
|
|
|
|
|
select |
421
|
|
|
|
|
|
|
max(updated_at) as updated_at |
422
|
|
|
|
|
|
|
from issue |
423
|
|
|
|
|
|
|
SQL |
424
|
0
|
0
|
|
|
|
|
if( @$last ) { |
425
|
|
|
|
|
|
|
return $last->[0]->{updated_at} |
426
|
0
|
|
|
|
|
|
} else { |
427
|
|
|
|
|
|
|
return undef # empty DB |
428
|
0
|
|
|
|
|
|
} |
429
|
|
|
|
|
|
|
} |
430
|
|
|
|
|
|
|
|
431
|
|
|
|
|
|
|
1; |