ikiwiki/IkiWiki/Render.pm

536 lines
13 KiB
Perl
Raw Normal View History

2006-04-25 01:09:26 +02:00
#!/usr/bin/perl
package IkiWiki;
use warnings;
use strict;
use File::Spec;
use IkiWiki;
sub linkify ($$) { #{{{
my $content=shift;
my $page=shift;
$content =~ s{(\\?)$config{wiki_link_regexp}}{
$2 ? ( $1 ? "[[$2|$3]]" : htmllink($page, titlepage($3), 0, 0, pagetitle($2)))
: ( $1 ? "[[$3]]" : htmllink($page, titlepage($3)))
}eg;
return $content;
} #}}}
2006-04-25 05:18:21 +02:00
my $_scrubber;
sub scrubber { #{{{
return $_scrubber if defined $_scrubber;
eval q{use HTML::Scrubber};
# Lists based on http://feedparser.org/docs/html-sanitization.html
$_scrubber = HTML::Scrubber->new(
allow => [qw{
a abbr acronym address area b big blockquote br
button caption center cite code col colgroup dd del
dfn dir div dl dt em fieldset font form h1 h2 h3 h4
h5 h6 hr i img input ins kbd label legend li map
menu ol optgroup option p pre q s samp select small
span strike strong sub sup table tbody td textarea
tfoot th thead tr tt u ul var
}],
default => [undef, { map { $_ => 1 } qw{
abbr accept accept-charset accesskey action
align alt axis border cellpadding cellspacing
char charoff charset checked cite class
clear cols colspan color compact coords
datetime dir disabled enctype for frame
headers height href hreflang hspace id ismap
label lang longdesc maxlength media method
multiple name nohref noshade nowrap prompt
readonly rel rev rows rowspan rules scope
selected shape size span src start summary
tabindex target title type usemap valign
value vspace width
}}],
);
return $_scrubber;
} # }}}
sub htmlize ($$) { #{{{
my $type=shift;
my $content=shift;
if (! $INC{"/usr/bin/markdown"}) {
no warnings 'once';
$blosxom::version="is a proper perl module too much to ask?";
use warnings 'all';
do "/usr/bin/markdown";
}
if ($type eq '.mdwn') {
2006-04-25 05:18:21 +02:00
$content=Markdown::Markdown($content);
}
else {
error("htmlization of $type not supported");
}
2006-04-25 05:18:21 +02:00
if ($config{sanitize}) {
$content=scrubber()->scrub($content);
}
return $content;
} #}}}
sub backlinks ($) { #{{{
my $page=shift;
my @links;
foreach my $p (keys %links) {
next if bestlink($page, $p) eq $page;
if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
# Trim common dir prefixes from both pages.
my $p_trimmed=$p;
my $page_trimmed=$page;
my $dir;
1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
defined $dir &&
$p_trimmed=~s/^\Q$dir\E// &&
$page_trimmed=~s/^\Q$dir\E//;
push @links, { url => $href, page => $p_trimmed };
}
}
return sort { $a->{page} cmp $b->{page} } @links;
} #}}}
sub parentlinks ($) { #{{{
my $page=shift;
my @ret;
my $pagelink="";
my $path="";
my $skip=1;
foreach my $dir (reverse split("/", $page)) {
if (! $skip) {
$path.="../";
unshift @ret, { url => "$path$dir.html", page => $dir };
}
else {
$skip=0;
}
}
unshift @ret, { url => length $path ? $path : ".", page => $config{wikiname} };
return @ret;
} #}}}
sub preprocess ($$) { #{{{
my $page=shift;
my $content=shift;
my $handle=sub {
my $escape=shift;
my $command=shift;
my $params=shift;
if (length $escape) {
return "[[$command $params]]";
}
elsif (exists $hooks{preprocess}{$command}) {
my %params;
while ($params =~ /(\w+)=\"([^"]+)"(\s+|$)/g) {
$params{$1}=$2;
}
return $hooks{preprocess}{$command}{call}->(page => $page, %params);
}
else {
2006-05-02 06:18:44 +02:00
return "[[$command not processed]]";
}
};
$content =~ s{(\\?)$config{wiki_processor_regexp}}{$handle->($1, $2, $3)}eg;
return $content;
} #}}}
sub add_depends ($$) { #{{{
my $page=shift;
my $globlist=shift;
if (! exists $depends{$page}) {
$depends{$page}=$globlist;
}
else {
$depends{$page}=globlist_merge($depends{$page}, $globlist);
}
} # }}}
sub globlist_merge ($$) { #{{{
my $a=shift;
my $b=shift;
my $ret="";
# Only add negated globs if they are not matched by the other globlist.
foreach my $i ((map { [ $a, $_ ] } split(" ", $b)),
(map { [ $b, $_ ] } split(" ", $a))) {
if ($i->[1]=~/^!(.*)/) {
if (! globlist_match($1, $i->[0])) {
$ret.=" ".$i->[1];
}
}
else {
$ret.=" ".$i->[1];
}
}
return $ret;
} #}}}
sub genpage ($$$) { #{{{
my $content=shift;
my $page=shift;
my $mtime=shift;
my $title=pagetitle(basename($page));
my $template=HTML::Template->new(blind_cache => 1,
filename => "$config{templatedir}/page.tmpl");
if (length $config{cgiurl}) {
$template->param(editurl => cgiurl(do => "edit", page => $page));
$template->param(prefsurl => cgiurl(do => "prefs"));
if ($config{rcs}) {
$template->param(recentchangesurl => cgiurl(do => "recentchanges"));
}
}
if (length $config{historyurl}) {
my $u=$config{historyurl};
$u=~s/\[\[file\]\]/$pagesources{$page}/g;
$template->param(historyurl => $u);
}
if ($config{hyperestraier}) {
$template->param(hyperestraierurl => cgiurl());
}
$template->param(
title => $title,
wikiname => $config{wikiname},
parentlinks => [parentlinks($page)],
content => $content,
backlinks => [backlinks($page)],
discussionlink => htmllink($page, "Discussion", 1, 1),
mtime => scalar(gmtime($mtime)),
2006-03-29 09:24:03 +02:00
styleurl => styleurl($page),
);
return $template->output;
} #}}}
sub check_overwrite ($$) { #{{{
# Important security check. Make sure to call this before saving
# any files to the source directory.
my $dest=shift;
my $src=shift;
if (! exists $renderedfiles{$src} && -e $dest && ! $config{rebuild}) {
error("$dest already exists and was rendered from ".
join(" ",(grep { $renderedfiles{$_} eq $dest } keys
%renderedfiles)).
", before, so not rendering from $src");
}
} #}}}
sub mtime ($) { #{{{
2006-03-24 22:36:37 +01:00
my $file=shift;
2006-03-24 22:36:37 +01:00
return (stat($file))[9];
} #}}}
sub findlinks ($$) { #{{{
my $content=shift;
my $page=shift;
my @links;
while ($content =~ /(?<!\\)$config{wiki_link_regexp}/g) {
push @links, titlepage($2);
}
# Discussion links are a special case since they're not in the text
# of the page, but on its template.
return @links, "$page/discussion";
} #}}}
sub render ($) { #{{{
my $file=shift;
my $type=pagetype($file);
my $srcfile=srcfile($file);
if ($type ne 'unknown') {
my $content=readfile($srcfile);
my $page=pagename($file);
$links{$page}=[findlinks($content, $page)];
delete $depends{$page};
$content=linkify($content, $page);
$content=preprocess($page, $content);
$content=htmlize($type, $content);
check_overwrite("$config{destdir}/".htmlpage($page), $page);
writefile(htmlpage($page), $config{destdir},
genpage($content, $page, mtime($srcfile)));
$oldpagemtime{$page}=time;
$renderedfiles{$page}=htmlpage($page);
}
else {
my $content=readfile($srcfile, 1);
$links{$file}=[];
delete $depends{$file};
check_overwrite("$config{destdir}/$file", $file);
writefile($file, $config{destdir}, $content, 1);
$oldpagemtime{$file}=time;
$renderedfiles{$file}=$file;
}
} #}}}
sub prune ($) { #{{{
my $file=shift;
unlink($file);
my $dir=dirname($file);
while (rmdir($dir)) {
$dir=dirname($dir);
}
} #}}}
sub estcfg () { #{{{
my $estdir="$config{wikistatedir}/hyperestraier";
my $cgi=basename($config{cgiurl});
$cgi=~s/\..*$//;
open(TEMPLATE, ">$estdir/$cgi.tmpl") ||
error("write $estdir/$cgi.tmpl: $!");
print TEMPLATE misctemplate("search",
"<!--ESTFORM-->\n\n<!--ESTRESULT-->\n\n<!--ESTINFO-->\n\n");
close TEMPLATE;
open(TEMPLATE, ">$estdir/$cgi.conf") ||
error("write $estdir/$cgi.conf: $!");
my $template=HTML::Template->new(
filename => "$config{templatedir}/estseek.conf"
);
eval q{use Cwd 'abs_path'};
$template->param(
index => $estdir,
tmplfile => "$estdir/$cgi.tmpl",
destdir => abs_path($config{destdir}),
url => $config{url},
);
print TEMPLATE $template->output;
close TEMPLATE;
2006-03-30 00:36:23 +02:00
$cgi="$estdir/".basename($config{cgiurl});
unlink($cgi);
symlink("/usr/lib/estraier/estseek.cgi", $cgi) ||
error("symlink $cgi: $!");
} # }}}
sub estcmd ($;@) { #{{{
my @params=split(' ', shift);
push @params, "-cl", "$config{wikistatedir}/hyperestraier";
if (@_) {
push @params, "-";
}
my $pid=open(CHILD, "|-");
if ($pid) {
# parent
foreach (@_) {
print CHILD "$_\n";
}
close(CHILD) || error("estcmd @params exited nonzero: $?");
}
else {
# child
open(STDOUT, "/dev/null"); # shut it up (closing won't work)
exec("estcmd", @params) || error("can't run estcmd");
}
} #}}}
sub refresh () { #{{{
# find existing pages
my %exists;
my @files;
eval q{use File::Find};
find({
no_chdir => 1,
wanted => sub {
if (/$config{wiki_file_prune_regexp}/) {
$File::Find::prune=1;
}
elsif (! -d $_ && ! -l $_) {
my ($f)=/$config{wiki_file_regexp}/; # untaint
if (! defined $f) {
warn("skipping bad filename $_\n");
}
else {
$f=~s/^\Q$config{srcdir}\E\/?//;
push @files, $f;
$exists{pagename($f)}=1;
}
}
},
}, $config{srcdir});
find({
no_chdir => 1,
wanted => sub {
if (/$config{wiki_file_prune_regexp}/) {
$File::Find::prune=1;
}
elsif (! -d $_ && ! -l $_) {
my ($f)=/$config{wiki_file_regexp}/; # untaint
if (! defined $f) {
warn("skipping bad filename $_\n");
}
else {
# Don't add files that are in the
# srcdir.
$f=~s/^\Q$config{underlaydir}\E\/?//;
if (! -e "$config{srcdir}/$f" &&
! -l "$config{srcdir}/$f") {
push @files, $f;
$exists{pagename($f)}=1;
}
}
}
},
}, $config{underlaydir});
my %rendered;
# check for added or removed pages
my @add;
foreach my $file (@files) {
my $page=pagename($file);
if (! $oldpagemtime{$page}) {
2006-03-23 22:54:30 +01:00
debug("new page $page") unless exists $pagectime{$page};
push @add, $file;
$links{$page}=[];
$pagesources{$page}=$file;
$pagectime{$page}=mtime(srcfile($file))
2006-03-24 22:36:37 +01:00
unless exists $pagectime{$page};
}
}
my @del;
foreach my $page (keys %oldpagemtime) {
if (! $exists{$page}) {
debug("removing old page $page");
push @del, $pagesources{$page};
prune($config{destdir}."/".$renderedfiles{$page});
delete $renderedfiles{$page};
$oldpagemtime{$page}=0;
delete $pagesources{$page};
}
}
# render any updated files
foreach my $file (@files) {
my $page=pagename($file);
if (! exists $oldpagemtime{$page} ||
mtime(srcfile($file)) > $oldpagemtime{$page}) {
debug("rendering changed file $file");
render($file);
$rendered{$file}=1;
}
}
# if any files were added or removed, check to see if each page
# needs an update due to linking to them or inlining them.
# TODO: inefficient; pages may get rendered above and again here;
# problem is the bestlink may have changed and we won't know until
# now
if (@add || @del) {
FILE: foreach my $file (@files) {
my $page=pagename($file);
foreach my $f (@add, @del) {
my $p=pagename($f);
foreach my $link (@{$links{$page}}) {
if (bestlink($page, $link) eq $p) {
debug("rendering $file, which links to $p");
render($file);
$rendered{$file}=1;
next FILE;
}
}
}
}
}
2006-03-24 02:49:42 +01:00
# Handle backlinks; if a page has added/removed links, update the
# pages it links to. Also handles rebuilding dependat pages.
# TODO: inefficient; pages may get rendered above and again here;
# problem is the backlinks could be wrong in the first pass render
# above
2006-03-24 02:49:42 +01:00
if (%rendered || @del) {
2006-03-24 04:21:46 +01:00
foreach my $f (@files) {
my $p=pagename($f);
if (exists $depends{$p}) {
2006-03-24 04:21:46 +01:00
foreach my $file (keys %rendered, @del) {
next if $f eq $file;
2006-03-24 04:21:46 +01:00
my $page=pagename($file);
if (globlist_match($page, $depends{$p})) {
debug("rendering $f, which depends on $page");
2006-03-24 04:21:46 +01:00
render($f);
$rendered{$f}=1;
2006-03-24 04:21:46 +01:00
last;
}
}
}
}
my %linkchanged;
foreach my $file (keys %rendered, @del) {
my $page=pagename($file);
2006-03-24 02:49:42 +01:00
if (exists $links{$page}) {
foreach my $link (map { bestlink($page, $_) } @{$links{$page}}) {
if (length $link &&
(! exists $oldlinks{$page} ||
! grep { bestlink($page, $_) eq $link } @{$oldlinks{$page}})) {
$linkchanged{$link}=1;
}
}
}
if (exists $oldlinks{$page}) {
foreach my $link (map { bestlink($page, $_) } @{$oldlinks{$page}}) {
if (length $link &&
(! exists $links{$page} ||
! grep { bestlink($page, $_) eq $link } @{$links{$page}})) {
$linkchanged{$link}=1;
}
}
}
}
foreach my $link (keys %linkchanged) {
my $linkfile=$pagesources{$link};
if (defined $linkfile) {
debug("rendering $linkfile, to update its backlinks");
render($linkfile);
$rendered{$linkfile}=1;
}
}
}
if ($config{hyperestraier} && (%rendered || @del)) {
debug("updating hyperestraier search index");
if (%rendered) {
estcmd("gather -cm -bc -cl -sd",
map { $config{destdir}."/".$renderedfiles{pagename($_)} }
keys %rendered);
}
if (@del) {
estcmd("purge -cl");
}
debug("generating hyperestraier cgi config");
estcfg();
}
} #}}}
1