line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
1
|
|
|
|
|
|
|
package Text::Wikispaces2Markdown; |
2
|
2
|
|
|
2
|
|
1947
|
use strict; |
|
2
|
|
|
|
|
4
|
|
|
2
|
|
|
|
|
730
|
|
3
|
|
|
|
|
|
|
our $VERSION = "0.1"; |
4
|
|
|
|
|
|
|
|
5
|
|
|
|
|
|
|
=head1 NAME |
6
|
|
|
|
|
|
|
|
7
|
|
|
|
|
|
|
Text::Wikispaces2Markdown - convert wikispaces pages to markdown |
8
|
|
|
|
|
|
|
|
9
|
|
|
|
|
|
|
=head1 SYNOPSIS |
10
|
|
|
|
|
|
|
|
11
|
|
|
|
|
|
|
Do a rough conversion of Wikispaces.com markup into Markdown. |
12
|
|
|
|
|
|
|
|
13
|
|
|
|
|
|
|
=head1 METHODS |
14
|
|
|
|
|
|
|
|
15
|
|
|
|
|
|
|
=head2 convert |
16
|
|
|
|
|
|
|
|
17
|
|
|
|
|
|
|
Does the conversion using dumb regexp rules. Would fare better with a proper parser. |
18
|
|
|
|
|
|
|
|
19
|
|
|
|
|
|
|
=cut |
20
|
|
|
|
|
|
|
|
21
|
|
|
|
|
|
|
sub convert { |
22
|
1
|
50
|
|
1
|
1
|
79
|
shift if ( $_[0] eq __PACKAGE__ ); # oops, called in OOP fashion. |
23
|
|
|
|
|
|
|
|
24
|
|
|
|
|
|
|
# Paramaters: |
25
|
1
|
|
|
|
|
3
|
my $text = shift; # text to be parsed |
26
|
|
|
|
|
|
|
|
27
|
1
|
|
|
|
|
21
|
my @lines = split /\n/, $text; |
28
|
|
|
|
|
|
|
|
29
|
1
|
|
|
|
|
4
|
for my $i (0..$#lines) { |
30
|
65
|
|
|
|
|
96
|
$_ = $lines[$i]; |
31
|
|
|
|
|
|
|
# convert links |
32
|
65
|
|
|
|
|
178
|
$lines[$i] =~ s/\[\[(.*?)\|(.*?)]]/[$2]($1)/g; |
33
|
|
|
|
|
|
|
# convert italic |
34
|
65
|
|
|
|
|
116
|
$lines[$i] =~ s{(?<!:)//}{_}g; # FIXME: very crude avoidance of URLs; will break in code blocks |
35
|
|
|
|
|
|
|
# convert ToC |
36
|
65
|
|
|
|
|
94
|
$lines[$i] =~ s/\Q[[toc]]/{{toc}}/g; |
37
|
|
|
|
|
|
|
# convert nested lists |
38
|
65
|
|
|
|
|
115
|
$lines[$i] =~ s/^([#*])\1+/(' ' x length $&) . $1/meg; |
|
2
|
|
|
|
|
9
|
|
39
|
|
|
|
|
|
|
|
40
|
|
|
|
|
|
|
# convert ordered lists |
41
|
65
|
|
|
|
|
101
|
$lines[$i] =~ s/^(\s*)#/$1 . '1.'/meg; |
|
4
|
|
|
|
|
11
|
|
42
|
|
|
|
|
|
|
# add a line before lists, as (annoyingly) required by Markdown |
43
|
65
|
100
|
66
|
|
|
276
|
if ($lines[$i] =~ /^[*0-9]/ and $i > 0 and $lines[$i-1] !~ /^\s*$|^\s*[*0-9]/) { |
|
|
|
100
|
|
|
|
|
44
|
4
|
|
|
|
|
11
|
substr($lines[$i], 0, 0, "\n"); |
45
|
|
|
|
|
|
|
} |
46
|
|
|
|
|
|
|
# convert headings |
47
|
65
|
100
|
|
|
|
174
|
if ($lines[$i] =~ s/^(=+)(.+?)=*$/('#' x length $1) . ' ' . $2/me) { |
|
10
|
|
|
|
|
46
|
|
48
|
|
|
|
|
|
|
# make sure headings are preceded by a blank line |
49
|
10
|
100
|
100
|
|
|
52
|
substr($lines[$i], 0, 0, "\n") if $i > 0 and $lines[$i-1] !~ /^\s*$/; |
50
|
|
|
|
|
|
|
# remove explicit anchors to headings (e.g. '=[[#Reliability]] Reliability='; assumes you run a {{toc}}) |
51
|
|
|
|
|
|
|
# $lines[$i] =~ s/\[\[#(.*?)]] \1/[[$1]]/; |
52
|
|
|
|
|
|
|
# remove anchors anwyay - they seem to be an odd artefact of Wikispaces. Be careful in the strange case in which anchors are named something other than the heading's 'a' name |
53
|
10
|
|
|
|
|
33
|
$lines[$i] =~ s/\[\[#(.*?)]] (.*)/$2/; |
54
|
|
|
|
|
|
|
} |
55
|
|
|
|
|
|
|
|
56
|
|
|
|
|
|
|
|
57
|
|
|
|
|
|
|
|
58
|
|
|
|
|
|
|
} |
59
|
|
|
|
|
|
|
|
60
|
1
|
50
|
|
|
|
40
|
return (join "\n", @lines) . ($text =~ /(\n+)\z/? $1 : ''); # append the last \n if any, which would be lost by the initial split |
61
|
|
|
|
|
|
|
} |
62
|
|
|
|
|
|
|
|
63
|
|
|
|
|
|
|
=head1 AUTHORS |
64
|
|
|
|
|
|
|
|
65
|
|
|
|
|
|
|
Dan Dascalescu (dandv), L<http://dandascalescu.com> |
66
|
|
|
|
|
|
|
|
67
|
|
|
|
|
|
|
=head1 License |
68
|
|
|
|
|
|
|
|
69
|
|
|
|
|
|
|
This module is licensed under the same terms as Perl itself. |
70
|
|
|
|
|
|
|
|
71
|
|
|
|
|
|
|
=cut |
72
|
|
|
|
|
|
|
|
73
|
|
|
|
|
|
|
1; |