2006-04-25 01:09:26 +02:00
|
|
|
#!/usr/bin/perl
|
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
package IkiWiki;
|
|
|
|
|
|
|
|
use warnings;
|
|
|
|
use strict;
|
|
|
|
use File::Spec;
|
2006-05-02 08:53:33 +02:00
|
|
|
use IkiWiki;
|
2006-03-23 07:51:15 +01:00
|
|
|
|
|
|
|
sub linkify ($$) { #{{{
|
|
|
|
my $content=shift;
|
|
|
|
my $page=shift;
|
|
|
|
|
|
|
|
$content =~ s{(\\?)$config{wiki_link_regexp}}{
|
2006-03-29 04:14:55 +02:00
|
|
|
$2 ? ( $1 ? "[[$2|$3]]" : htmllink($page, titlepage($3), 0, 0, pagetitle($2)))
|
|
|
|
: ( $1 ? "[[$3]]" : htmllink($page, titlepage($3)))
|
2006-03-23 07:51:15 +01:00
|
|
|
}eg;
|
|
|
|
|
|
|
|
return $content;
|
|
|
|
} #}}}
|
|
|
|
|
2006-04-25 05:18:21 +02:00
|
|
|
my $_scrubber;
|
|
|
|
sub scrubber { #{{{
|
|
|
|
return $_scrubber if defined $_scrubber;
|
|
|
|
|
|
|
|
eval q{use HTML::Scrubber};
|
|
|
|
# Lists based on http://feedparser.org/docs/html-sanitization.html
|
|
|
|
$_scrubber = HTML::Scrubber->new(
|
|
|
|
allow => [qw{
|
|
|
|
a abbr acronym address area b big blockquote br
|
|
|
|
button caption center cite code col colgroup dd del
|
|
|
|
dfn dir div dl dt em fieldset font form h1 h2 h3 h4
|
|
|
|
h5 h6 hr i img input ins kbd label legend li map
|
|
|
|
menu ol optgroup option p pre q s samp select small
|
|
|
|
span strike strong sub sup table tbody td textarea
|
|
|
|
tfoot th thead tr tt u ul var
|
|
|
|
}],
|
|
|
|
default => [undef, { map { $_ => 1 } qw{
|
|
|
|
abbr accept accept-charset accesskey action
|
|
|
|
align alt axis border cellpadding cellspacing
|
|
|
|
char charoff charset checked cite class
|
|
|
|
clear cols colspan color compact coords
|
|
|
|
datetime dir disabled enctype for frame
|
|
|
|
headers height href hreflang hspace id ismap
|
|
|
|
label lang longdesc maxlength media method
|
|
|
|
multiple name nohref noshade nowrap prompt
|
|
|
|
readonly rel rev rows rowspan rules scope
|
|
|
|
selected shape size span src start summary
|
|
|
|
tabindex target title type usemap valign
|
|
|
|
value vspace width
|
|
|
|
}}],
|
|
|
|
);
|
|
|
|
return $_scrubber;
|
|
|
|
} # }}}
|
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
sub htmlize ($$) { #{{{
|
|
|
|
my $type=shift;
|
|
|
|
my $content=shift;
|
|
|
|
|
|
|
|
if (! $INC{"/usr/bin/markdown"}) {
|
|
|
|
no warnings 'once';
|
|
|
|
$blosxom::version="is a proper perl module too much to ask?";
|
|
|
|
use warnings 'all';
|
|
|
|
do "/usr/bin/markdown";
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($type eq '.mdwn') {
|
2006-04-25 05:18:21 +02:00
|
|
|
$content=Markdown::Markdown($content);
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
error("htmlization of $type not supported");
|
|
|
|
}
|
2006-04-25 05:18:21 +02:00
|
|
|
|
|
|
|
if ($config{sanitize}) {
|
|
|
|
$content=scrubber()->scrub($content);
|
|
|
|
}
|
|
|
|
|
|
|
|
return $content;
|
2006-03-23 07:51:15 +01:00
|
|
|
} #}}}
|
|
|
|
|
|
|
|
sub backlinks ($) { #{{{
|
|
|
|
my $page=shift;
|
|
|
|
|
|
|
|
my @links;
|
|
|
|
foreach my $p (keys %links) {
|
|
|
|
next if bestlink($page, $p) eq $page;
|
|
|
|
if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
|
|
|
|
my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
|
|
|
|
|
|
|
|
# Trim common dir prefixes from both pages.
|
|
|
|
my $p_trimmed=$p;
|
|
|
|
my $page_trimmed=$page;
|
|
|
|
my $dir;
|
|
|
|
1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
|
|
|
|
defined $dir &&
|
|
|
|
$p_trimmed=~s/^\Q$dir\E// &&
|
|
|
|
$page_trimmed=~s/^\Q$dir\E//;
|
|
|
|
|
|
|
|
push @links, { url => $href, page => $p_trimmed };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return sort { $a->{page} cmp $b->{page} } @links;
|
|
|
|
} #}}}
|
|
|
|
|
|
|
|
sub parentlinks ($) { #{{{
|
|
|
|
my $page=shift;
|
|
|
|
|
|
|
|
my @ret;
|
|
|
|
my $pagelink="";
|
|
|
|
my $path="";
|
|
|
|
my $skip=1;
|
|
|
|
foreach my $dir (reverse split("/", $page)) {
|
|
|
|
if (! $skip) {
|
|
|
|
$path.="../";
|
|
|
|
unshift @ret, { url => "$path$dir.html", page => $dir };
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$skip=0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
unshift @ret, { url => length $path ? $path : ".", page => $config{wikiname} };
|
|
|
|
return @ret;
|
|
|
|
} #}}}
|
|
|
|
|
2006-04-04 22:57:46 +02:00
|
|
|
sub preprocess ($$) { #{{{
|
2006-03-24 02:16:32 +01:00
|
|
|
my $page=shift;
|
|
|
|
my $content=shift;
|
2006-04-04 22:57:46 +02:00
|
|
|
|
2006-03-24 02:16:32 +01:00
|
|
|
my $handle=sub {
|
|
|
|
my $escape=shift;
|
|
|
|
my $command=shift;
|
|
|
|
my $params=shift;
|
|
|
|
if (length $escape) {
|
2006-05-02 00:27:37 +02:00
|
|
|
return "[[$command $params]]";
|
2006-03-24 02:16:32 +01:00
|
|
|
}
|
2006-05-03 21:58:58 +02:00
|
|
|
elsif (exists $hooks{preprocess}{$command}) {
|
2006-03-24 02:16:32 +01:00
|
|
|
my %params;
|
|
|
|
while ($params =~ /(\w+)=\"([^"]+)"(\s+|$)/g) {
|
|
|
|
$params{$1}=$2;
|
|
|
|
}
|
2006-05-03 21:58:58 +02:00
|
|
|
return $hooks{preprocess}{$command}{call}->(page => $page, %params);
|
2006-03-24 02:16:32 +01:00
|
|
|
}
|
|
|
|
else {
|
2006-05-02 06:18:44 +02:00
|
|
|
return "[[$command not processed]]";
|
2006-03-24 02:16:32 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
$content =~ s{(\\?)$config{wiki_processor_regexp}}{$handle->($1, $2, $3)}eg;
|
|
|
|
return $content;
|
|
|
|
} #}}}
|
|
|
|
|
2006-05-02 04:34:33 +02:00
|
|
|
sub add_depends ($$) { #{{{
|
2006-03-24 02:16:32 +01:00
|
|
|
my $page=shift;
|
2006-05-02 04:34:33 +02:00
|
|
|
my $globlist=shift;
|
2006-03-24 02:16:32 +01:00
|
|
|
|
2006-05-02 04:34:33 +02:00
|
|
|
if (! exists $depends{$page}) {
|
|
|
|
$depends{$page}=$globlist;
|
2006-05-02 00:27:37 +02:00
|
|
|
}
|
|
|
|
else {
|
2006-05-02 17:22:49 +02:00
|
|
|
$depends{$page}=globlist_merge($depends{$page}, $globlist);
|
2006-04-04 22:57:46 +02:00
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
} # }}}
|
2006-03-24 02:16:32 +01:00
|
|
|
|
2006-05-02 17:22:49 +02:00
|
|
|
sub globlist_merge ($$) { #{{{
|
|
|
|
my $a=shift;
|
|
|
|
my $b=shift;
|
|
|
|
|
|
|
|
my $ret="";
|
|
|
|
# Only add negated globs if they are not matched by the other globlist.
|
|
|
|
foreach my $i ((map { [ $a, $_ ] } split(" ", $b)),
|
|
|
|
(map { [ $b, $_ ] } split(" ", $a))) {
|
|
|
|
if ($i->[1]=~/^!(.*)/) {
|
|
|
|
if (! globlist_match($1, $i->[0])) {
|
|
|
|
$ret.=" ".$i->[1];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$ret.=" ".$i->[1];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return $ret;
|
|
|
|
} #}}}
|
|
|
|
|
2006-03-23 22:00:51 +01:00
|
|
|
sub genpage ($$$) { #{{{
|
2006-03-23 07:51:15 +01:00
|
|
|
my $content=shift;
|
|
|
|
my $page=shift;
|
|
|
|
my $mtime=shift;
|
|
|
|
|
2006-03-23 20:25:08 +01:00
|
|
|
my $title=pagetitle(basename($page));
|
2006-03-23 07:51:15 +01:00
|
|
|
|
|
|
|
my $template=HTML::Template->new(blind_cache => 1,
|
|
|
|
filename => "$config{templatedir}/page.tmpl");
|
|
|
|
|
|
|
|
if (length $config{cgiurl}) {
|
2006-03-29 05:18:21 +02:00
|
|
|
$template->param(editurl => cgiurl(do => "edit", page => $page));
|
|
|
|
$template->param(prefsurl => cgiurl(do => "prefs"));
|
2006-03-23 07:51:15 +01:00
|
|
|
if ($config{rcs}) {
|
2006-03-29 05:18:21 +02:00
|
|
|
$template->param(recentchangesurl => cgiurl(do => "recentchanges"));
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (length $config{historyurl}) {
|
|
|
|
my $u=$config{historyurl};
|
|
|
|
$u=~s/\[\[file\]\]/$pagesources{$page}/g;
|
|
|
|
$template->param(historyurl => $u);
|
|
|
|
}
|
2006-03-30 00:21:23 +02:00
|
|
|
if ($config{hyperestraier}) {
|
|
|
|
$template->param(hyperestraierurl => cgiurl());
|
|
|
|
}
|
2006-03-23 22:00:51 +01:00
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
$template->param(
|
|
|
|
title => $title,
|
|
|
|
wikiname => $config{wikiname},
|
|
|
|
parentlinks => [parentlinks($page)],
|
|
|
|
content => $content,
|
|
|
|
backlinks => [backlinks($page)],
|
|
|
|
discussionlink => htmllink($page, "Discussion", 1, 1),
|
|
|
|
mtime => scalar(gmtime($mtime)),
|
2006-03-29 09:24:03 +02:00
|
|
|
styleurl => styleurl($page),
|
2006-03-23 07:51:15 +01:00
|
|
|
);
|
|
|
|
|
|
|
|
return $template->output;
|
|
|
|
} #}}}
|
|
|
|
|
|
|
|
sub check_overwrite ($$) { #{{{
|
|
|
|
# Important security check. Make sure to call this before saving
|
|
|
|
# any files to the source directory.
|
|
|
|
my $dest=shift;
|
|
|
|
my $src=shift;
|
|
|
|
|
|
|
|
if (! exists $renderedfiles{$src} && -e $dest && ! $config{rebuild}) {
|
|
|
|
error("$dest already exists and was rendered from ".
|
|
|
|
join(" ",(grep { $renderedfiles{$_} eq $dest } keys
|
|
|
|
%renderedfiles)).
|
|
|
|
", before, so not rendering from $src");
|
|
|
|
}
|
|
|
|
} #}}}
|
|
|
|
|
|
|
|
sub mtime ($) { #{{{
|
2006-03-24 22:36:37 +01:00
|
|
|
my $file=shift;
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2006-03-24 22:36:37 +01:00
|
|
|
return (stat($file))[9];
|
2006-03-23 07:51:15 +01:00
|
|
|
} #}}}
|
|
|
|
|
|
|
|
sub findlinks ($$) { #{{{
|
|
|
|
my $content=shift;
|
|
|
|
my $page=shift;
|
|
|
|
|
|
|
|
my @links;
|
|
|
|
while ($content =~ /(?<!\\)$config{wiki_link_regexp}/g) {
|
2006-03-29 04:14:55 +02:00
|
|
|
push @links, titlepage($2);
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
# Discussion links are a special case since they're not in the text
|
|
|
|
# of the page, but on its template.
|
|
|
|
return @links, "$page/discussion";
|
|
|
|
} #}}}
|
|
|
|
|
|
|
|
sub render ($) { #{{{
|
|
|
|
my $file=shift;
|
|
|
|
|
|
|
|
my $type=pagetype($file);
|
2006-03-29 20:21:01 +02:00
|
|
|
my $srcfile=srcfile($file);
|
2006-03-23 07:51:15 +01:00
|
|
|
if ($type ne 'unknown') {
|
2006-04-04 21:34:50 +02:00
|
|
|
my $content=readfile($srcfile);
|
2006-03-23 07:51:15 +01:00
|
|
|
my $page=pagename($file);
|
|
|
|
|
|
|
|
$links{$page}=[findlinks($content, $page)];
|
2006-05-02 00:27:37 +02:00
|
|
|
delete $depends{$page};
|
2006-03-23 07:51:15 +01:00
|
|
|
|
|
|
|
$content=linkify($content, $page);
|
2006-04-04 22:57:46 +02:00
|
|
|
$content=preprocess($page, $content);
|
2006-03-23 07:51:15 +01:00
|
|
|
$content=htmlize($type, $content);
|
|
|
|
|
|
|
|
check_overwrite("$config{destdir}/".htmlpage($page), $page);
|
2006-03-29 20:50:36 +02:00
|
|
|
writefile(htmlpage($page), $config{destdir},
|
2006-03-29 20:21:01 +02:00
|
|
|
genpage($content, $page, mtime($srcfile)));
|
2006-03-23 07:51:15 +01:00
|
|
|
$oldpagemtime{$page}=time;
|
|
|
|
$renderedfiles{$page}=htmlpage($page);
|
|
|
|
}
|
|
|
|
else {
|
2006-04-04 21:34:50 +02:00
|
|
|
my $content=readfile($srcfile, 1);
|
2006-03-23 07:51:15 +01:00
|
|
|
$links{$file}=[];
|
2006-05-02 04:34:33 +02:00
|
|
|
delete $depends{$file};
|
2006-03-23 07:51:15 +01:00
|
|
|
check_overwrite("$config{destdir}/$file", $file);
|
2006-04-04 21:34:50 +02:00
|
|
|
writefile($file, $config{destdir}, $content, 1);
|
2006-03-23 07:51:15 +01:00
|
|
|
$oldpagemtime{$file}=time;
|
|
|
|
$renderedfiles{$file}=$file;
|
|
|
|
}
|
|
|
|
} #}}}
|
|
|
|
|
|
|
|
sub prune ($) { #{{{
|
|
|
|
my $file=shift;
|
|
|
|
|
|
|
|
unlink($file);
|
|
|
|
my $dir=dirname($file);
|
|
|
|
while (rmdir($dir)) {
|
|
|
|
$dir=dirname($dir);
|
|
|
|
}
|
|
|
|
} #}}}
|
|
|
|
|
2006-03-30 00:21:23 +02:00
|
|
|
sub estcfg () { #{{{
|
|
|
|
my $estdir="$config{wikistatedir}/hyperestraier";
|
|
|
|
my $cgi=basename($config{cgiurl});
|
|
|
|
$cgi=~s/\..*$//;
|
|
|
|
open(TEMPLATE, ">$estdir/$cgi.tmpl") ||
|
|
|
|
error("write $estdir/$cgi.tmpl: $!");
|
|
|
|
print TEMPLATE misctemplate("search",
|
|
|
|
"<!--ESTFORM-->\n\n<!--ESTRESULT-->\n\n<!--ESTINFO-->\n\n");
|
|
|
|
close TEMPLATE;
|
|
|
|
open(TEMPLATE, ">$estdir/$cgi.conf") ||
|
|
|
|
error("write $estdir/$cgi.conf: $!");
|
|
|
|
my $template=HTML::Template->new(
|
|
|
|
filename => "$config{templatedir}/estseek.conf"
|
|
|
|
);
|
|
|
|
eval q{use Cwd 'abs_path'};
|
|
|
|
$template->param(
|
|
|
|
index => $estdir,
|
|
|
|
tmplfile => "$estdir/$cgi.tmpl",
|
|
|
|
destdir => abs_path($config{destdir}),
|
|
|
|
url => $config{url},
|
|
|
|
);
|
|
|
|
print TEMPLATE $template->output;
|
|
|
|
close TEMPLATE;
|
2006-03-30 00:36:23 +02:00
|
|
|
$cgi="$estdir/".basename($config{cgiurl});
|
|
|
|
unlink($cgi);
|
|
|
|
symlink("/usr/lib/estraier/estseek.cgi", $cgi) ||
|
|
|
|
error("symlink $cgi: $!");
|
2006-03-30 00:21:23 +02:00
|
|
|
} # }}}
|
|
|
|
|
|
|
|
sub estcmd ($;@) { #{{{
|
|
|
|
my @params=split(' ', shift);
|
|
|
|
push @params, "-cl", "$config{wikistatedir}/hyperestraier";
|
|
|
|
if (@_) {
|
|
|
|
push @params, "-";
|
|
|
|
}
|
|
|
|
|
|
|
|
my $pid=open(CHILD, "|-");
|
|
|
|
if ($pid) {
|
|
|
|
# parent
|
|
|
|
foreach (@_) {
|
|
|
|
print CHILD "$_\n";
|
|
|
|
}
|
|
|
|
close(CHILD) || error("estcmd @params exited nonzero: $?");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
# child
|
|
|
|
open(STDOUT, "/dev/null"); # shut it up (closing won't work)
|
|
|
|
exec("estcmd", @params) || error("can't run estcmd");
|
|
|
|
}
|
|
|
|
} #}}}
|
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
sub refresh () { #{{{
|
|
|
|
# find existing pages
|
|
|
|
my %exists;
|
|
|
|
my @files;
|
|
|
|
eval q{use File::Find};
|
|
|
|
find({
|
|
|
|
no_chdir => 1,
|
|
|
|
wanted => sub {
|
|
|
|
if (/$config{wiki_file_prune_regexp}/) {
|
|
|
|
$File::Find::prune=1;
|
|
|
|
}
|
|
|
|
elsif (! -d $_ && ! -l $_) {
|
|
|
|
my ($f)=/$config{wiki_file_regexp}/; # untaint
|
|
|
|
if (! defined $f) {
|
|
|
|
warn("skipping bad filename $_\n");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$f=~s/^\Q$config{srcdir}\E\/?//;
|
|
|
|
push @files, $f;
|
|
|
|
$exists{pagename($f)}=1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}, $config{srcdir});
|
2006-03-29 20:21:01 +02:00
|
|
|
find({
|
|
|
|
no_chdir => 1,
|
|
|
|
wanted => sub {
|
|
|
|
if (/$config{wiki_file_prune_regexp}/) {
|
|
|
|
$File::Find::prune=1;
|
|
|
|
}
|
|
|
|
elsif (! -d $_ && ! -l $_) {
|
|
|
|
my ($f)=/$config{wiki_file_regexp}/; # untaint
|
|
|
|
if (! defined $f) {
|
|
|
|
warn("skipping bad filename $_\n");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
# Don't add files that are in the
|
|
|
|
# srcdir.
|
|
|
|
$f=~s/^\Q$config{underlaydir}\E\/?//;
|
|
|
|
if (! -e "$config{srcdir}/$f" &&
|
|
|
|
! -l "$config{srcdir}/$f") {
|
|
|
|
push @files, $f;
|
|
|
|
$exists{pagename($f)}=1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}, $config{underlaydir});
|
2006-03-23 07:51:15 +01:00
|
|
|
|
|
|
|
my %rendered;
|
|
|
|
|
|
|
|
# check for added or removed pages
|
|
|
|
my @add;
|
|
|
|
foreach my $file (@files) {
|
|
|
|
my $page=pagename($file);
|
|
|
|
if (! $oldpagemtime{$page}) {
|
2006-03-23 22:54:30 +01:00
|
|
|
debug("new page $page") unless exists $pagectime{$page};
|
2006-03-23 07:51:15 +01:00
|
|
|
push @add, $file;
|
|
|
|
$links{$page}=[];
|
|
|
|
$pagesources{$page}=$file;
|
2006-03-29 20:21:01 +02:00
|
|
|
$pagectime{$page}=mtime(srcfile($file))
|
2006-03-24 22:36:37 +01:00
|
|
|
unless exists $pagectime{$page};
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
my @del;
|
|
|
|
foreach my $page (keys %oldpagemtime) {
|
|
|
|
if (! $exists{$page}) {
|
|
|
|
debug("removing old page $page");
|
|
|
|
push @del, $pagesources{$page};
|
|
|
|
prune($config{destdir}."/".$renderedfiles{$page});
|
|
|
|
delete $renderedfiles{$page};
|
|
|
|
$oldpagemtime{$page}=0;
|
|
|
|
delete $pagesources{$page};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
# render any updated files
|
|
|
|
foreach my $file (@files) {
|
|
|
|
my $page=pagename($file);
|
|
|
|
|
|
|
|
if (! exists $oldpagemtime{$page} ||
|
2006-03-29 20:21:01 +02:00
|
|
|
mtime(srcfile($file)) > $oldpagemtime{$page}) {
|
2006-03-23 07:51:15 +01:00
|
|
|
debug("rendering changed file $file");
|
|
|
|
render($file);
|
|
|
|
$rendered{$file}=1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
# if any files were added or removed, check to see if each page
|
2006-03-24 02:16:32 +01:00
|
|
|
# needs an update due to linking to them or inlining them.
|
2006-03-23 07:51:15 +01:00
|
|
|
# TODO: inefficient; pages may get rendered above and again here;
|
|
|
|
# problem is the bestlink may have changed and we won't know until
|
|
|
|
# now
|
|
|
|
if (@add || @del) {
|
|
|
|
FILE: foreach my $file (@files) {
|
|
|
|
my $page=pagename($file);
|
|
|
|
foreach my $f (@add, @del) {
|
|
|
|
my $p=pagename($f);
|
|
|
|
foreach my $link (@{$links{$page}}) {
|
|
|
|
if (bestlink($page, $link) eq $p) {
|
|
|
|
debug("rendering $file, which links to $p");
|
|
|
|
render($file);
|
|
|
|
$rendered{$file}=1;
|
|
|
|
next FILE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2006-03-24 02:49:42 +01:00
|
|
|
# Handle backlinks; if a page has added/removed links, update the
|
2006-05-02 00:27:37 +02:00
|
|
|
# pages it links to. Also handles rebuilding dependat pages.
|
2006-03-23 07:51:15 +01:00
|
|
|
# TODO: inefficient; pages may get rendered above and again here;
|
|
|
|
# problem is the backlinks could be wrong in the first pass render
|
|
|
|
# above
|
2006-03-24 02:49:42 +01:00
|
|
|
if (%rendered || @del) {
|
2006-03-24 04:21:46 +01:00
|
|
|
foreach my $f (@files) {
|
|
|
|
my $p=pagename($f);
|
2006-05-02 00:27:37 +02:00
|
|
|
if (exists $depends{$p}) {
|
2006-03-24 04:21:46 +01:00
|
|
|
foreach my $file (keys %rendered, @del) {
|
2006-05-02 04:34:33 +02:00
|
|
|
next if $f eq $file;
|
2006-03-24 04:21:46 +01:00
|
|
|
my $page=pagename($file);
|
2006-05-02 00:27:37 +02:00
|
|
|
if (globlist_match($page, $depends{$p})) {
|
|
|
|
debug("rendering $f, which depends on $page");
|
2006-03-24 04:21:46 +01:00
|
|
|
render($f);
|
2006-03-30 00:21:23 +02:00
|
|
|
$rendered{$f}=1;
|
2006-03-24 04:21:46 +01:00
|
|
|
last;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
my %linkchanged;
|
|
|
|
foreach my $file (keys %rendered, @del) {
|
|
|
|
my $page=pagename($file);
|
2006-03-24 02:49:42 +01:00
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
if (exists $links{$page}) {
|
|
|
|
foreach my $link (map { bestlink($page, $_) } @{$links{$page}}) {
|
|
|
|
if (length $link &&
|
2006-05-02 04:34:33 +02:00
|
|
|
(! exists $oldlinks{$page} ||
|
|
|
|
! grep { bestlink($page, $_) eq $link } @{$oldlinks{$page}})) {
|
2006-03-23 07:51:15 +01:00
|
|
|
$linkchanged{$link}=1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (exists $oldlinks{$page}) {
|
|
|
|
foreach my $link (map { bestlink($page, $_) } @{$oldlinks{$page}}) {
|
|
|
|
if (length $link &&
|
2006-05-02 04:34:33 +02:00
|
|
|
(! exists $links{$page} ||
|
|
|
|
! grep { bestlink($page, $_) eq $link } @{$links{$page}})) {
|
2006-03-23 07:51:15 +01:00
|
|
|
$linkchanged{$link}=1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
foreach my $link (keys %linkchanged) {
|
|
|
|
my $linkfile=$pagesources{$link};
|
|
|
|
if (defined $linkfile) {
|
|
|
|
debug("rendering $linkfile, to update its backlinks");
|
|
|
|
render($linkfile);
|
2006-03-30 00:21:23 +02:00
|
|
|
$rendered{$linkfile}=1;
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2006-03-30 00:21:23 +02:00
|
|
|
|
|
|
|
if ($config{hyperestraier} && (%rendered || @del)) {
|
|
|
|
debug("updating hyperestraier search index");
|
|
|
|
if (%rendered) {
|
|
|
|
estcmd("gather -cm -bc -cl -sd",
|
|
|
|
map { $config{destdir}."/".$renderedfiles{pagename($_)} }
|
|
|
|
keys %rendered);
|
|
|
|
}
|
|
|
|
if (@del) {
|
|
|
|
estcmd("purge -cl");
|
|
|
|
}
|
|
|
|
|
|
|
|
debug("generating hyperestraier cgi config");
|
|
|
|
estcfg();
|
|
|
|
}
|
2006-03-23 07:51:15 +01:00
|
|
|
} #}}}
|
|
|
|
|
|
|
|
1
|