2006-04-25 01:09:26 +02:00
|
|
|
#!/usr/bin/perl
|
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
package IkiWiki;
|
|
|
|
|
|
|
|
use warnings;
|
|
|
|
use strict;
|
2006-05-02 08:53:33 +02:00
|
|
|
use IkiWiki;
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2013-09-13 10:14:24 +02:00
|
|
|
my (%backlinks, %rendered, %scanned);
|
2009-08-14 07:11:53 +02:00
|
|
|
our %brokenlinks;
|
|
|
|
my $links_calculated=0;
|
2006-10-28 07:07:56 +02:00
|
|
|
|
2009-08-14 07:11:53 +02:00
|
|
|
sub calculate_links () {
|
|
|
|
return if $links_calculated;
|
|
|
|
%backlinks=%brokenlinks=();
|
2006-10-28 07:07:56 +02:00
|
|
|
foreach my $page (keys %links) {
|
|
|
|
foreach my $link (@{$links{$page}}) {
|
|
|
|
my $bestlink=bestlink($page, $link);
|
2009-08-14 07:11:53 +02:00
|
|
|
if (length $bestlink) {
|
|
|
|
$backlinks{$bestlink}{$page}=1
|
|
|
|
if $bestlink ne $page;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
push @{$brokenlinks{$link}}, $page;
|
2006-10-28 07:07:56 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2009-08-14 07:11:53 +02:00
|
|
|
$links_calculated=1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-10-28 07:07:56 +02:00
|
|
|
|
2009-06-18 19:27:00 +02:00
|
|
|
sub backlink_pages ($) {
|
2006-03-23 07:51:15 +01:00
|
|
|
my $page=shift;
|
|
|
|
|
2009-08-14 07:11:53 +02:00
|
|
|
calculate_links();
|
2006-10-28 05:27:10 +02:00
|
|
|
|
2009-06-18 19:27:00 +02:00
|
|
|
return keys %{$backlinks{$page}};
|
|
|
|
}
|
|
|
|
|
|
|
|
sub backlinks ($) {
|
|
|
|
my $page=shift;
|
|
|
|
|
2006-10-28 07:07:56 +02:00
|
|
|
my @links;
|
2009-06-18 19:27:00 +02:00
|
|
|
foreach my $p (backlink_pages($page)) {
|
2007-04-01 21:59:42 +02:00
|
|
|
my $href=urlto($p, $page);
|
2010-04-17 18:54:22 +02:00
|
|
|
|
2006-10-28 07:07:56 +02:00
|
|
|
# Trim common dir prefixes from both pages.
|
|
|
|
my $p_trimmed=$p;
|
|
|
|
my $page_trimmed=$page;
|
|
|
|
my $dir;
|
|
|
|
1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
|
|
|
|
defined $dir &&
|
|
|
|
$p_trimmed=~s/^\Q$dir\E// &&
|
|
|
|
$page_trimmed=~s/^\Q$dir\E//;
|
|
|
|
|
|
|
|
push @links, { url => $href, page => pagetitle($p_trimmed) };
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
2007-05-12 21:17:41 +02:00
|
|
|
return @links;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub genpage ($$) {
|
2006-03-23 07:51:15 +01:00
|
|
|
my $page=shift;
|
2006-05-26 10:24:36 +02:00
|
|
|
my $content=shift;
|
2009-11-08 19:48:07 +01:00
|
|
|
|
2010-05-07 05:07:08 +02:00
|
|
|
run_hooks(indexhtml => sub {
|
2010-05-07 05:20:48 +02:00
|
|
|
shift->(page => $page, destpage => $page, content => $content);
|
2009-11-08 19:48:07 +01:00
|
|
|
});
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2007-07-26 22:50:55 +02:00
|
|
|
my $templatefile;
|
|
|
|
run_hooks(templatefile => sub {
|
|
|
|
return if defined $templatefile;
|
|
|
|
my $file=shift->(page => $page);
|
|
|
|
if (defined $file && defined template_file($file)) {
|
|
|
|
$templatefile=$file;
|
|
|
|
}
|
|
|
|
});
|
2010-04-23 22:41:07 +02:00
|
|
|
my $template;
|
|
|
|
if (defined $templatefile) {
|
|
|
|
$template=template_depends($templatefile, $page,
|
|
|
|
blind_cache => 1);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
# no explicit depends as special case
|
|
|
|
$template=template('page.tmpl',
|
|
|
|
blind_cache => 1);
|
|
|
|
}
|
2006-05-26 17:18:12 +02:00
|
|
|
|
2010-05-15 02:20:41 +02:00
|
|
|
my $actions=0;
|
2006-03-23 07:51:15 +01:00
|
|
|
if (length $config{cgiurl}) {
|
2010-01-04 18:51:45 +01:00
|
|
|
if (IkiWiki->can("cgi_editpage")) {
|
|
|
|
$template->param(editurl => cgiurl(do => "edit", page => $page));
|
|
|
|
$actions++;
|
|
|
|
}
|
2008-01-29 18:20:17 +01:00
|
|
|
}
|
2008-07-27 01:37:25 +02:00
|
|
|
if (defined $config{historyurl} && length $config{historyurl}) {
|
2006-03-23 07:51:15 +01:00
|
|
|
my $u=$config{historyurl};
|
2011-03-21 19:21:55 +01:00
|
|
|
my $p=uri_escape_utf8($pagesources{$page}, '^A-Za-z0-9\-\._~/');
|
2010-11-16 20:12:52 +01:00
|
|
|
$u=~s/\[\[file\]\]/$p/g;
|
2006-03-23 07:51:15 +01:00
|
|
|
$template->param(historyurl => $u);
|
2006-05-26 17:18:12 +02:00
|
|
|
$actions++;
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
2007-01-18 16:06:57 +01:00
|
|
|
if ($config{discussion}) {
|
2010-01-02 21:42:20 +01:00
|
|
|
if ($page !~ /.*\/\Q$config{discussionpage}\E$/i &&
|
2007-01-18 16:06:57 +01:00
|
|
|
(length $config{cgiurl} ||
|
2009-08-14 03:41:33 +02:00
|
|
|
exists $links{$page."/".$config{discussionpage}})) {
|
|
|
|
$template->param(discussionlink => htmllink($page, $page, $config{discussionpage}, noimageinline => 1, forcesubpage => 1));
|
2007-01-18 16:06:57 +01:00
|
|
|
$actions++;
|
|
|
|
}
|
2006-05-26 17:18:12 +02:00
|
|
|
}
|
2010-05-15 02:20:41 +02:00
|
|
|
if ($actions) {
|
2006-05-26 17:18:12 +02:00
|
|
|
$template->param(have_actions => 1);
|
2006-05-05 20:20:52 +02:00
|
|
|
}
|
2010-05-15 02:20:41 +02:00
|
|
|
templateactions($template, $page);
|
2006-05-26 03:10:58 +02:00
|
|
|
|
2007-05-12 21:17:41 +02:00
|
|
|
my @backlinks=sort { $a->{page} cmp $b->{page} } backlinks($page);
|
|
|
|
my ($backlinks, $more_backlinks);
|
|
|
|
if (@backlinks <= $config{numbacklinks} || ! $config{numbacklinks}) {
|
|
|
|
$backlinks=\@backlinks;
|
|
|
|
$more_backlinks=[];
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$backlinks=[@backlinks[0..$config{numbacklinks}-1]];
|
|
|
|
$more_backlinks=[@backlinks[$config{numbacklinks}..$#backlinks]];
|
|
|
|
}
|
2007-03-31 10:48:10 +02:00
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
$template->param(
|
2006-07-04 22:04:33 +02:00
|
|
|
title => $page eq 'index'
|
|
|
|
? $config{wikiname}
|
|
|
|
: pagetitle(basename($page)),
|
2006-03-23 07:51:15 +01:00
|
|
|
wikiname => $config{wikiname},
|
|
|
|
content => $content,
|
2007-03-31 10:48:10 +02:00
|
|
|
backlinks => $backlinks,
|
|
|
|
more_backlinks => $more_backlinks,
|
2007-12-12 20:43:35 +01:00
|
|
|
mtime => displaytime($pagemtime{$page}),
|
2010-05-02 19:44:13 +02:00
|
|
|
ctime => displaytime($pagectime{$page}, undef, 1),
|
2006-08-22 00:27:02 +02:00
|
|
|
baseurl => baseurl($page),
|
2010-05-02 02:40:31 +02:00
|
|
|
html5 => $config{html5},
|
2014-11-26 01:02:55 +01:00
|
|
|
responsive_layout => $config{responsive_layout},
|
2006-03-23 07:51:15 +01:00
|
|
|
);
|
2006-06-02 06:49:12 +02:00
|
|
|
|
2006-07-30 02:20:11 +02:00
|
|
|
run_hooks(pagetemplate => sub {
|
|
|
|
shift->(page => $page, destpage => $page, template => $template);
|
|
|
|
});
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2006-08-04 09:41:02 +02:00
|
|
|
$content=$template->output;
|
2008-07-17 21:16:56 +02:00
|
|
|
|
2006-08-04 09:41:02 +02:00
|
|
|
run_hooks(format => sub {
|
2006-08-28 20:17:59 +02:00
|
|
|
$content=shift->(
|
|
|
|
page => $page,
|
|
|
|
content => $content,
|
|
|
|
);
|
2006-08-04 09:41:02 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
return $content;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub scan ($) {
|
2006-10-28 05:27:10 +02:00
|
|
|
my $file=shift;
|
2014-03-05 11:42:00 +01:00
|
|
|
return if $phase > PHASE_SCAN || $scanned{$file};
|
2013-09-13 10:14:24 +02:00
|
|
|
$scanned{$file}=1;
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
debug(sprintf(gettext("scanning %s"), $file));
|
|
|
|
|
2006-10-28 05:27:10 +02:00
|
|
|
my $type=pagetype($file);
|
|
|
|
if (defined $type) {
|
|
|
|
my $srcfile=srcfile($file);
|
|
|
|
my $content=readfile($srcfile);
|
|
|
|
my $page=pagename($file);
|
2006-10-28 07:07:56 +02:00
|
|
|
will_render($page, htmlpage($page), 1);
|
|
|
|
|
2006-10-28 05:27:10 +02:00
|
|
|
if ($config{discussion}) {
|
2006-11-26 20:42:40 +01:00
|
|
|
# Discussion links are a special case since they're
|
|
|
|
# not in the text of the page, but on its template.
|
2009-08-14 03:41:33 +02:00
|
|
|
$links{$page}=[ $page."/".lc($config{discussionpage}) ];
|
2006-10-28 05:27:10 +02:00
|
|
|
}
|
2008-02-12 04:48:27 +01:00
|
|
|
else {
|
|
|
|
$links{$page}=[];
|
|
|
|
}
|
2010-04-02 01:28:02 +02:00
|
|
|
delete $typedlinks{$page};
|
2008-02-12 04:48:27 +01:00
|
|
|
|
2010-08-02 13:39:06 +02:00
|
|
|
# Preprocess in scan-only mode.
|
|
|
|
preprocess($page, $page, $content, 1);
|
|
|
|
|
2008-02-12 04:48:27 +01:00
|
|
|
run_hooks(scan => sub {
|
|
|
|
shift->(
|
|
|
|
page => $page,
|
|
|
|
content => $content,
|
|
|
|
);
|
|
|
|
});
|
2006-05-05 20:20:52 +02:00
|
|
|
}
|
2006-10-29 00:24:18 +02:00
|
|
|
else {
|
|
|
|
will_render($file, $file, 1);
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub fast_file_copy (@) {
|
2008-07-01 06:42:23 +02:00
|
|
|
my $srcfile=shift;
|
|
|
|
my $destfile=shift;
|
|
|
|
my $srcfd=shift;
|
|
|
|
my $destfd=shift;
|
|
|
|
my $cleanup=shift;
|
|
|
|
|
|
|
|
my $blksize = 16384;
|
|
|
|
my ($len, $buf, $written);
|
|
|
|
while ($len = sysread $srcfd, $buf, $blksize) {
|
|
|
|
if (! defined $len) {
|
|
|
|
next if $! =~ /^Interrupted/;
|
|
|
|
error("failed to read $srcfile: $!", $cleanup);
|
|
|
|
}
|
|
|
|
my $offset = 0;
|
|
|
|
while ($len) {
|
|
|
|
defined($written = syswrite $destfd, $buf, $len, $offset)
|
|
|
|
or error("failed to write $destfile: $!", $cleanup);
|
|
|
|
$len -= $written;
|
|
|
|
$offset += $written;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
sub render ($$) {
|
2006-03-23 07:51:15 +01:00
|
|
|
my $file=shift;
|
2009-10-06 05:54:29 +02:00
|
|
|
return if $rendered{$file};
|
|
|
|
debug(shift);
|
|
|
|
$rendered{$file}=1;
|
2006-03-23 07:51:15 +01:00
|
|
|
|
|
|
|
my $type=pagetype($file);
|
2006-03-29 20:21:01 +02:00
|
|
|
my $srcfile=srcfile($file);
|
2006-07-04 00:14:52 +02:00
|
|
|
if (defined $type) {
|
2006-03-23 07:51:15 +01:00
|
|
|
my $page=pagename($file);
|
2006-05-04 06:29:37 +02:00
|
|
|
delete $depends{$page};
|
2009-08-28 21:13:45 +02:00
|
|
|
delete $depends_simple{$page};
|
2006-10-08 23:56:50 +02:00
|
|
|
will_render($page, htmlpage($page), 1);
|
2008-01-29 18:39:28 +01:00
|
|
|
return if $type=~/^_/;
|
2006-05-04 06:29:37 +02:00
|
|
|
|
2008-06-04 07:24:23 +02:00
|
|
|
my $content=htmlize($page, $page, $type,
|
2007-05-17 21:55:11 +02:00
|
|
|
linkify($page, $page,
|
|
|
|
preprocess($page, $page,
|
|
|
|
filter($page, $page,
|
|
|
|
readfile($srcfile)))));
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2007-12-12 20:43:35 +01:00
|
|
|
my $output=htmlpage($page);
|
|
|
|
writefile($output, $config{destdir}, genpage($page, $content));
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
else {
|
2006-05-02 04:34:33 +02:00
|
|
|
delete $depends{$file};
|
2009-08-28 21:13:45 +02:00
|
|
|
delete $depends_simple{$file};
|
2006-10-08 23:56:50 +02:00
|
|
|
will_render($file, $file, 1);
|
2008-03-30 03:02:47 +02:00
|
|
|
|
|
|
|
if ($config{hardlink}) {
|
2008-07-13 05:31:27 +02:00
|
|
|
# only hardlink if owned by same user
|
|
|
|
my @stat=stat($srcfile);
|
|
|
|
if ($stat[4] == $>) {
|
|
|
|
prep_writefile($file, $config{destdir});
|
|
|
|
unlink($config{destdir}."/".$file);
|
|
|
|
if (link($srcfile, $config{destdir}."/".$file)) {
|
|
|
|
return;
|
|
|
|
}
|
2008-03-30 03:02:47 +02:00
|
|
|
}
|
2008-05-08 05:15:43 +02:00
|
|
|
# if hardlink fails, fall back to copying
|
2008-03-30 03:02:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
my $srcfd=readfile($srcfile, 1, 1);
|
2007-02-15 03:22:08 +01:00
|
|
|
writefile($file, $config{destdir}, undef, 1, sub {
|
2008-07-01 06:42:23 +02:00
|
|
|
fast_file_copy($srcfile, $file, $srcfd, @_);
|
2007-02-15 03:22:08 +01:00
|
|
|
});
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2012-04-07 18:52:29 +02:00
|
|
|
sub prune ($;$) {
|
2006-03-23 07:51:15 +01:00
|
|
|
my $file=shift;
|
2012-04-07 18:52:29 +02:00
|
|
|
my $up_to=shift;
|
2006-03-23 07:51:15 +01:00
|
|
|
|
|
|
|
unlink($file);
|
|
|
|
my $dir=dirname($file);
|
2012-04-07 18:52:29 +02:00
|
|
|
while ((! defined $up_to || $dir =~ m{^\Q$up_to\E\/}) && rmdir($dir)) {
|
2006-03-23 07:51:15 +01:00
|
|
|
$dir=dirname($dir);
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2009-07-20 05:23:16 +02:00
|
|
|
sub srcdir_check () {
|
2008-07-22 00:33:09 +02:00
|
|
|
# security check, avoid following symlinks in the srcdir path by default
|
2007-11-26 21:30:44 +01:00
|
|
|
my $test=$config{srcdir};
|
|
|
|
while (length $test) {
|
2008-07-22 00:33:09 +02:00
|
|
|
if (-l $test && ! $config{allow_symlinks_before_srcdir}) {
|
2008-10-29 18:38:26 +01:00
|
|
|
error(sprintf(gettext("symlink found in srcdir path (%s) -- set allow_symlinks_before_srcdir to allow this"), $test));
|
2007-11-26 21:30:44 +01:00
|
|
|
}
|
|
|
|
unless ($test=~s/\/+$//) {
|
|
|
|
$test=dirname($test);
|
|
|
|
}
|
|
|
|
}
|
2008-01-29 19:08:32 +01:00
|
|
|
|
2009-07-20 05:23:16 +02:00
|
|
|
}
|
2007-11-26 21:30:44 +01:00
|
|
|
|
2013-11-16 22:26:20 +01:00
|
|
|
# Finds all files in the srcdir, and the underlaydirs.
|
|
|
|
# Returns the files, and their corresponding pages.
|
|
|
|
#
|
|
|
|
# When run in only_underlay mode, adds only the underlay files to
|
|
|
|
# the files and pages passed in.
|
|
|
|
sub find_src_files (;$$$) {
|
|
|
|
my $only_underlay=shift;
|
2009-10-06 08:00:34 +02:00
|
|
|
my @files;
|
2013-11-16 22:26:20 +01:00
|
|
|
if (defined $_[0]) {
|
|
|
|
@files=@{shift()};
|
|
|
|
}
|
2009-10-06 08:00:34 +02:00
|
|
|
my %pages;
|
2013-11-16 22:26:20 +01:00
|
|
|
if (defined $_[0]) {
|
|
|
|
%pages=%{shift()};
|
|
|
|
}
|
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
eval q{use File::Find};
|
2006-11-08 22:03:33 +01:00
|
|
|
error($@) if $@;
|
2010-04-21 21:05:59 +02:00
|
|
|
|
Fix issues with combining unicode srcdirs and source files.
A short story:
Once there was a unicode string, let's call him Srcdir.
Along came a crufy old File::Find, who went through a tree and pasted each
of the leaves in turn onto Srcdir. But this 90's relic didn't decode the
leaves -- despite some of them using unicode! Poor Srcdir, with these
leaves stuck on him, tainted them with his nice unicode-ness. They didn't
look like leaves at all, but instead garbage.
(In other words, perl's unicode support sucks mightily, and drives
us all to drink and bad storytelling. But we knew that..)
So, srcdir is not normally flagged as unicode, because typically it's pure
ascii. And in that case, things work ok; File::Find finds filenames, which
are not yet decoded to unicode, and appends them to the srcdir, and then
decode_utf8 happily converts the whole thing.
But, if the srcdir does contain utf8 characters, that breaks. Or, if a Yaml
setup file is used, Yaml::Syck's implicitunicode sets the unicode flag of
*all* strings, even those containing only ascii. In either case, srcdir
has the unicode flag set; a non-decoded filename is appended, and the flag
remains set; and decode_utf8 sees the flag and does *nothing*. The result
is that the filename is not decoded, so looks valid and gets skipped.
File::Find only sticks the directory and filenames together in no_chdir
mode .. but we need that mode for security. In order to retain the
security, and avoid the problem, I made it not pass srcdir to File::Find.
Instead, chdir to the srcdir, and pass ".". Since "." is ascii, the problem
is avoided.
Note that chdir srcdir is safe because we check for symlinks in the srcdir
path.
Note that it takes care to chdir back to the starting location. Because
the user may have specified relative paths and so staying in the srcdir
might break. A relative path could even be specifed for an underlay dir, so
it chdirs back after each.
2010-06-15 22:40:37 +02:00
|
|
|
eval q{use Cwd};
|
|
|
|
die $@ if $@;
|
|
|
|
my $origdir=getcwd();
|
|
|
|
my $abssrcdir=Cwd::abs_path($config{srcdir});
|
2013-11-16 22:26:20 +01:00
|
|
|
|
|
|
|
@IkiWiki::underlayfiles=();
|
Fix issues with combining unicode srcdirs and source files.
A short story:
Once there was a unicode string, let's call him Srcdir.
Along came a crufy old File::Find, who went through a tree and pasted each
of the leaves in turn onto Srcdir. But this 90's relic didn't decode the
leaves -- despite some of them using unicode! Poor Srcdir, with these
leaves stuck on him, tainted them with his nice unicode-ness. They didn't
look like leaves at all, but instead garbage.
(In other words, perl's unicode support sucks mightily, and drives
us all to drink and bad storytelling. But we knew that..)
So, srcdir is not normally flagged as unicode, because typically it's pure
ascii. And in that case, things work ok; File::Find finds filenames, which
are not yet decoded to unicode, and appends them to the srcdir, and then
decode_utf8 happily converts the whole thing.
But, if the srcdir does contain utf8 characters, that breaks. Or, if a Yaml
setup file is used, Yaml::Syck's implicitunicode sets the unicode flag of
*all* strings, even those containing only ascii. In either case, srcdir
has the unicode flag set; a non-decoded filename is appended, and the flag
remains set; and decode_utf8 sees the flag and does *nothing*. The result
is that the filename is not decoded, so looks valid and gets skipped.
File::Find only sticks the directory and filenames together in no_chdir
mode .. but we need that mode for security. In order to retain the
security, and avoid the problem, I made it not pass srcdir to File::Find.
Instead, chdir to the srcdir, and pass ".". Since "." is ascii, the problem
is avoided.
Note that chdir srcdir is safe because we check for symlinks in the srcdir
path.
Note that it takes care to chdir back to the starting location. Because
the user may have specified relative paths and so staying in the srcdir
might break. A relative path could even be specifed for an underlay dir, so
it chdirs back after each.
2010-06-15 22:40:37 +02:00
|
|
|
|
Fix issues with combining unicode srcdirs and source files.
A short story:
Once there was a unicode string, let's call him Srcdir.
Along came a crufy old File::Find, who went through a tree and pasted each
of the leaves in turn onto Srcdir. But this 90's relic didn't decode the
leaves -- despite some of them using unicode! Poor Srcdir, with these
leaves stuck on him, tainted them with his nice unicode-ness. They didn't
look like leaves at all, but instead garbage.
In other words, perl's unicode support sucks mightily, and drives
us all to drink and bad storytelling. But we knew that..
So, srcdir is not normally flagged as unicode, because typically it's pure
ascii. And in that case, things work ok; File::Find finds filenames, which
are not yet decoded to unicode, and appends them to the srcdir, and then
decode_utf8 happily converts the whole thing.
But, if the srcdir does contain utf8 characters, that breaks. Or, if a Yaml
setup file is used, Yaml::Syck's implicitunicode sets the unicode flag of
*all* strings, even those containing only ascii. In either case, srcdir
has the unicode flag set; a non-decoded filename is appended, and
decode_utf8 sees the flag and does *nothing*. The result is that the
filename is not decoded, so looks valid and gets skipped.
File::Find only sticks the directory and filenames together in no_chdir
mode .. but we need that mode for security. In order to retain the
security, and avoid the problem, I made it not pass srcdir to File::Find.
Instead, chdir to the srcdir, and pass ".". Since "." is ascii, the problem
is avoided.
Note that it takes care to chdir back to the starting location. Because
the user may have specified relative paths and so staying in the srcdir
might break. A relative path could even be specifed for an underlay dir, so
it chdirs back after each.
2010-06-15 22:40:37 +02:00
|
|
|
my ($page, $underlay);
|
2010-04-21 21:05:59 +02:00
|
|
|
my $helper=sub {
|
|
|
|
my $file=decode_utf8($_);
|
2010-06-16 04:56:06 +02:00
|
|
|
|
2010-04-21 21:05:59 +02:00
|
|
|
return if -l $file || -d _;
|
Fix issues with combining unicode srcdirs and source files.
A short story:
Once there was a unicode string, let's call him Srcdir.
Along came a crufy old File::Find, who went through a tree and pasted each
of the leaves in turn onto Srcdir. But this 90's relic didn't decode the
leaves -- despite some of them using unicode! Poor Srcdir, with these
leaves stuck on him, tainted them with his nice unicode-ness. They didn't
look like leaves at all, but instead garbage.
(In other words, perl's unicode support sucks mightily, and drives
us all to drink and bad storytelling. But we knew that..)
So, srcdir is not normally flagged as unicode, because typically it's pure
ascii. And in that case, things work ok; File::Find finds filenames, which
are not yet decoded to unicode, and appends them to the srcdir, and then
decode_utf8 happily converts the whole thing.
But, if the srcdir does contain utf8 characters, that breaks. Or, if a Yaml
setup file is used, Yaml::Syck's implicitunicode sets the unicode flag of
*all* strings, even those containing only ascii. In either case, srcdir
has the unicode flag set; a non-decoded filename is appended, and the flag
remains set; and decode_utf8 sees the flag and does *nothing*. The result
is that the filename is not decoded, so looks valid and gets skipped.
File::Find only sticks the directory and filenames together in no_chdir
mode .. but we need that mode for security. In order to retain the
security, and avoid the problem, I made it not pass srcdir to File::Find.
Instead, chdir to the srcdir, and pass ".". Since "." is ascii, the problem
is avoided.
Note that chdir srcdir is safe because we check for symlinks in the srcdir
path.
Note that it takes care to chdir back to the starting location. Because
the user may have specified relative paths and so staying in the srcdir
might break. A relative path could even be specifed for an underlay dir, so
it chdirs back after each.
2010-06-15 22:40:37 +02:00
|
|
|
$file=~s/^\.\///;
|
2010-04-21 21:05:59 +02:00
|
|
|
return if ! length $file;
|
|
|
|
$page = pagename($file);
|
|
|
|
if (! exists $pagesources{$page} &&
|
|
|
|
file_pruned($file)) {
|
|
|
|
$File::Find::prune=1;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
my ($f) = $file =~ /$config{wiki_file_regexp}/; # untaint
|
|
|
|
if (! defined $f) {
|
|
|
|
warn(sprintf(gettext("skipping bad filename %s"), $file)."\n");
|
2010-05-05 02:26:17 +02:00
|
|
|
return;
|
2010-04-21 21:05:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if ($underlay) {
|
|
|
|
# avoid underlaydir override attacks; see security.mdwn
|
Fix issues with combining unicode srcdirs and source files.
A short story:
Once there was a unicode string, let's call him Srcdir.
Along came a crufy old File::Find, who went through a tree and pasted each
of the leaves in turn onto Srcdir. But this 90's relic didn't decode the
leaves -- despite some of them using unicode! Poor Srcdir, with these
leaves stuck on him, tainted them with his nice unicode-ness. They didn't
look like leaves at all, but instead garbage.
(In other words, perl's unicode support sucks mightily, and drives
us all to drink and bad storytelling. But we knew that..)
So, srcdir is not normally flagged as unicode, because typically it's pure
ascii. And in that case, things work ok; File::Find finds filenames, which
are not yet decoded to unicode, and appends them to the srcdir, and then
decode_utf8 happily converts the whole thing.
But, if the srcdir does contain utf8 characters, that breaks. Or, if a Yaml
setup file is used, Yaml::Syck's implicitunicode sets the unicode flag of
*all* strings, even those containing only ascii. In either case, srcdir
has the unicode flag set; a non-decoded filename is appended, and the flag
remains set; and decode_utf8 sees the flag and does *nothing*. The result
is that the filename is not decoded, so looks valid and gets skipped.
File::Find only sticks the directory and filenames together in no_chdir
mode .. but we need that mode for security. In order to retain the
security, and avoid the problem, I made it not pass srcdir to File::Find.
Instead, chdir to the srcdir, and pass ".". Since "." is ascii, the problem
is avoided.
Note that chdir srcdir is safe because we check for symlinks in the srcdir
path.
Note that it takes care to chdir back to the starting location. Because
the user may have specified relative paths and so staying in the srcdir
might break. A relative path could even be specifed for an underlay dir, so
it chdirs back after each.
2010-06-15 22:40:37 +02:00
|
|
|
if (! -l "$abssrcdir/$f" && ! -e _) {
|
2010-04-21 21:05:59 +02:00
|
|
|
if (! $pages{$page}) {
|
|
|
|
push @files, $f;
|
2013-11-16 22:26:20 +01:00
|
|
|
push @IkiWiki::underlayfiles, $f;
|
2010-04-21 21:05:59 +02:00
|
|
|
$pages{$page}=1;
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
}
|
2010-04-21 21:05:59 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
push @files, $f;
|
|
|
|
if ($pages{$page}) {
|
|
|
|
debug(sprintf(gettext("%s has multiple possible source pages"), $page));
|
2010-04-17 22:14:15 +02:00
|
|
|
}
|
2010-04-21 21:05:59 +02:00
|
|
|
$pages{$page}=1;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2013-11-16 22:26:20 +01:00
|
|
|
unless ($only_underlay) {
|
|
|
|
chdir($config{srcdir}) || die "chdir $config{srcdir}: $!";
|
|
|
|
find({
|
|
|
|
no_chdir => 1,
|
|
|
|
wanted => $helper,
|
|
|
|
}, '.');
|
|
|
|
chdir($origdir) || die "chdir $origdir: $!";
|
|
|
|
}
|
Fix issues with combining unicode srcdirs and source files.
A short story:
Once there was a unicode string, let's call him Srcdir.
Along came a crufy old File::Find, who went through a tree and pasted each
of the leaves in turn onto Srcdir. But this 90's relic didn't decode the
leaves -- despite some of them using unicode! Poor Srcdir, with these
leaves stuck on him, tainted them with his nice unicode-ness. They didn't
look like leaves at all, but instead garbage.
In other words, perl's unicode support sucks mightily, and drives
us all to drink and bad storytelling. But we knew that..
So, srcdir is not normally flagged as unicode, because typically it's pure
ascii. And in that case, things work ok; File::Find finds filenames, which
are not yet decoded to unicode, and appends them to the srcdir, and then
decode_utf8 happily converts the whole thing.
But, if the srcdir does contain utf8 characters, that breaks. Or, if a Yaml
setup file is used, Yaml::Syck's implicitunicode sets the unicode flag of
*all* strings, even those containing only ascii. In either case, srcdir
has the unicode flag set; a non-decoded filename is appended, and
decode_utf8 sees the flag and does *nothing*. The result is that the
filename is not decoded, so looks valid and gets skipped.
File::Find only sticks the directory and filenames together in no_chdir
mode .. but we need that mode for security. In order to retain the
security, and avoid the problem, I made it not pass srcdir to File::Find.
Instead, chdir to the srcdir, and pass ".". Since "." is ascii, the problem
is avoided.
Note that it takes care to chdir back to the starting location. Because
the user may have specified relative paths and so staying in the srcdir
might break. A relative path could even be specifed for an underlay dir, so
it chdirs back after each.
2010-06-15 22:40:37 +02:00
|
|
|
|
2010-04-21 21:05:59 +02:00
|
|
|
$underlay=1;
|
|
|
|
foreach (@{$config{underlaydirs}}, $config{underlaydir}) {
|
2010-06-17 22:54:03 +02:00
|
|
|
if (chdir($_)) {
|
|
|
|
find({
|
|
|
|
no_chdir => 1,
|
|
|
|
wanted => $helper,
|
|
|
|
}, '.');
|
|
|
|
chdir($origdir) || die "chdir: $!";
|
|
|
|
}
|
2007-08-28 03:59:01 +02:00
|
|
|
};
|
Fix issues with combining unicode srcdirs and source files.
A short story:
Once there was a unicode string, let's call him Srcdir.
Along came a crufy old File::Find, who went through a tree and pasted each
of the leaves in turn onto Srcdir. But this 90's relic didn't decode the
leaves -- despite some of them using unicode! Poor Srcdir, with these
leaves stuck on him, tainted them with his nice unicode-ness. They didn't
look like leaves at all, but instead garbage.
In other words, perl's unicode support sucks mightily, and drives
us all to drink and bad storytelling. But we knew that..
So, srcdir is not normally flagged as unicode, because typically it's pure
ascii. And in that case, things work ok; File::Find finds filenames, which
are not yet decoded to unicode, and appends them to the srcdir, and then
decode_utf8 happily converts the whole thing.
But, if the srcdir does contain utf8 characters, that breaks. Or, if a Yaml
setup file is used, Yaml::Syck's implicitunicode sets the unicode flag of
*all* strings, even those containing only ascii. In either case, srcdir
has the unicode flag set; a non-decoded filename is appended, and
decode_utf8 sees the flag and does *nothing*. The result is that the
filename is not decoded, so looks valid and gets skipped.
File::Find only sticks the directory and filenames together in no_chdir
mode .. but we need that mode for security. In order to retain the
security, and avoid the problem, I made it not pass srcdir to File::Find.
Instead, chdir to the srcdir, and pass ".". Since "." is ascii, the problem
is avoided.
Note that it takes care to chdir back to the starting location. Because
the user may have specified relative paths and so staying in the srcdir
might break. A relative path could even be specifed for an underlay dir, so
it chdirs back after each.
2010-06-15 22:40:37 +02:00
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
return \@files, \%pages;
|
2009-07-20 05:23:16 +02:00
|
|
|
}
|
|
|
|
|
2013-11-16 22:26:20 +01:00
|
|
|
# Given a hash of files that have changed, and a hash of files that were
|
|
|
|
# deleted, should return the same results as find_src_files, with the same
|
|
|
|
# sanity checks. But a lot faster!
|
|
|
|
sub process_changed_files ($$) {
|
|
|
|
my $changed_raw=shift;
|
|
|
|
my $deleted_raw=shift;
|
|
|
|
|
|
|
|
my @files;
|
|
|
|
my %pages;
|
|
|
|
|
|
|
|
foreach my $file (keys %$changed_raw) {
|
|
|
|
my $page = pagename($file);
|
|
|
|
next if ! exists $pagesources{$page} && file_pruned($file);
|
|
|
|
my ($f) = $file =~ /$config{wiki_file_regexp}/; # untaint
|
|
|
|
if (! defined $f) {
|
|
|
|
warn(sprintf(gettext("skipping bad filename %s"), $file)."\n");
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
push @files, $f;
|
|
|
|
if ($pages{$page}) {
|
|
|
|
debug(sprintf(gettext("%s has multiple possible source pages"), $page));
|
|
|
|
}
|
|
|
|
$pages{$page}=1;
|
|
|
|
}
|
|
|
|
|
|
|
|
# So far, we only have the changed files. Now add in all the old
|
|
|
|
# files that were not changed or deleted, excluding ones that came
|
|
|
|
# from the underlay.
|
|
|
|
my %old_underlay;
|
|
|
|
foreach my $f (@IkiWiki::underlayfiles) {
|
|
|
|
$old_underlay{$f}=1;
|
|
|
|
}
|
|
|
|
foreach my $page (keys %pagesources) {
|
|
|
|
my $f=$pagesources{$page};
|
|
|
|
unless ($old_underlay{$f} || exists $pages{$page} || exists $deleted_raw->{$f}) {
|
|
|
|
$pages{$page}=1;
|
|
|
|
push @files, $f;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
# add in the underlay
|
|
|
|
find_src_files(1, \@files, \%pages);
|
|
|
|
}
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
sub find_new_files ($) {
|
|
|
|
my $files=shift;
|
|
|
|
my @new;
|
|
|
|
my @internal_new;
|
|
|
|
|
2010-04-17 00:29:45 +02:00
|
|
|
my $times_noted;
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
foreach my $file (@$files) {
|
2006-03-23 07:51:15 +01:00
|
|
|
my $page=pagename($file);
|
2010-06-16 04:56:06 +02:00
|
|
|
|
|
|
|
if ($config{rcs} && $config{gettime} &&
|
|
|
|
-e "$config{srcdir}/$file") {
|
|
|
|
if (! $times_noted) {
|
|
|
|
debug(sprintf(gettext("querying %s for file creation and modification times.."), $config{rcs}));
|
|
|
|
$times_noted=1;
|
|
|
|
}
|
|
|
|
|
|
|
|
eval {
|
2010-06-24 01:32:53 +02:00
|
|
|
my $ctime=rcs_getctime($file);
|
2010-06-16 04:56:06 +02:00
|
|
|
if ($ctime > 0) {
|
|
|
|
$pagectime{$page}=$ctime;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
if ($@) {
|
|
|
|
print STDERR $@;
|
|
|
|
}
|
|
|
|
my $mtime;
|
|
|
|
eval {
|
2010-06-24 01:32:53 +02:00
|
|
|
$mtime=rcs_getmtime($file);
|
2010-06-16 04:56:06 +02:00
|
|
|
};
|
|
|
|
if ($@) {
|
|
|
|
print STDERR $@;
|
|
|
|
}
|
|
|
|
elsif ($mtime > 0) {
|
|
|
|
utime($mtime, $mtime, "$config{srcdir}/$file");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-07-20 09:40:16 +02:00
|
|
|
if (exists $pagesources{$page} && $pagesources{$page} ne $file) {
|
|
|
|
# the page has changed its type
|
|
|
|
$forcerebuild{$page}=1;
|
|
|
|
}
|
2006-12-28 23:15:38 +01:00
|
|
|
$pagesources{$page}=$file;
|
2007-03-24 16:10:58 +01:00
|
|
|
if (! $pagemtime{$page}) {
|
2008-01-29 21:05:49 +01:00
|
|
|
if (isinternal($page)) {
|
2009-10-06 08:00:34 +02:00
|
|
|
push @internal_new, $file;
|
2008-01-29 21:05:49 +01:00
|
|
|
}
|
2010-06-16 04:56:06 +02:00
|
|
|
else {
|
2009-10-06 05:54:29 +02:00
|
|
|
push @new, $file;
|
2006-05-26 16:54:47 +02:00
|
|
|
}
|
2008-02-25 00:02:34 +01:00
|
|
|
$pagecase{lc $page}=$page;
|
2008-02-08 04:11:54 +01:00
|
|
|
if (! exists $pagectime{$page}) {
|
2014-09-27 00:55:09 +02:00
|
|
|
my $ctime=(srcfile_stat($file, 1))[10];
|
|
|
|
$pagectime{$page}=$ctime if defined $ctime;
|
2006-05-26 16:54:47 +02:00
|
|
|
}
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
}
|
2009-10-06 08:00:34 +02:00
|
|
|
|
|
|
|
return \@new, \@internal_new;
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
sub find_del_files ($) {
|
|
|
|
my $pages=shift;
|
|
|
|
my @del;
|
|
|
|
my @internal_del;
|
|
|
|
|
2010-04-27 00:32:06 +02:00
|
|
|
foreach my $page (keys %pagesources) {
|
2009-10-06 08:00:34 +02:00
|
|
|
if (! $pages->{$page}) {
|
2008-01-29 21:05:49 +01:00
|
|
|
if (isinternal($page)) {
|
2009-10-06 08:00:34 +02:00
|
|
|
push @internal_del, $pagesources{$page};
|
2008-01-29 21:05:49 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
push @del, $pagesources{$page};
|
|
|
|
}
|
2006-10-28 05:27:10 +02:00
|
|
|
$links{$page}=[];
|
2010-04-02 01:28:02 +02:00
|
|
|
delete $typedlinks{$page};
|
2006-10-08 23:56:50 +02:00
|
|
|
$renderedfiles{$page}=[];
|
2007-03-24 16:10:58 +01:00
|
|
|
$pagemtime{$page}=0;
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
}
|
2009-10-06 08:00:34 +02:00
|
|
|
|
|
|
|
return \@del, \@internal_del;
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
2006-10-28 07:07:56 +02:00
|
|
|
|
fix bestlink to not return just-deleted pages
bestlink was looking at whether %links existed for a page in order to tell
if the page exists, but just-deleted pages still have entries in there (for
reasons it may be best not to explore). So bestlink would return
just-deleted pages. Instead, make bestlink use %pagesources.
Also, when finding a deleted page, %pagecase was not cleared of that page.
This, again, made bestlink return just-deleted pages. Now that is cleared.
Fixing bestlink exposed another issue though. The backlink calculation code
uses bestlink. So when a page was deleted, no backlinks to it are found,
and pages that really did backlink to it were not updated, and had broken
links.
To fix that, the code that actually removes deleted pages had to be split
out from find_del_files, so it can run a bit later. It is run just after
backlinks are calculated. This way, backlink calculation still sees the
deleted pages, but everything afterwards does not.
However, it does not address the original bug report that started this
whole thing, [[bugs/bestlink_returns_deleted_pages]]. Because there
bestlink is run in the needsbuild hook. And that happens before backlink
calculation, and so bestlink still returns deleted pages then. Also in the
scan hook.
If bestlink needs to work consistently during those hooks, a more involved
fix will be needed.
2009-11-30 23:16:44 +01:00
|
|
|
sub remove_del (@) {
|
|
|
|
foreach my $file (@_) {
|
|
|
|
my $page=pagename($file);
|
2009-12-02 19:07:58 +01:00
|
|
|
if (! isinternal($page)) {
|
2010-04-20 05:45:42 +02:00
|
|
|
debug(sprintf(gettext("removing obsolete %s"), $page));
|
fix bestlink to not return just-deleted pages
bestlink was looking at whether %links existed for a page in order to tell
if the page exists, but just-deleted pages still have entries in there (for
reasons it may be best not to explore). So bestlink would return
just-deleted pages. Instead, make bestlink use %pagesources.
Also, when finding a deleted page, %pagecase was not cleared of that page.
This, again, made bestlink return just-deleted pages. Now that is cleared.
Fixing bestlink exposed another issue though. The backlink calculation code
uses bestlink. So when a page was deleted, no backlinks to it are found,
and pages that really did backlink to it were not updated, and had broken
links.
To fix that, the code that actually removes deleted pages had to be split
out from find_del_files, so it can run a bit later. It is run just after
backlinks are calculated. This way, backlink calculation still sees the
deleted pages, but everything afterwards does not.
However, it does not address the original bug report that started this
whole thing, [[bugs/bestlink_returns_deleted_pages]]. Because there
bestlink is run in the needsbuild hook. And that happens before backlink
calculation, and so bestlink still returns deleted pages then. Also in the
scan hook.
If bestlink needs to work consistently during those hooks, a more involved
fix will be needed.
2009-11-30 23:16:44 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
foreach my $old (@{$oldrenderedfiles{$page}}) {
|
2012-04-07 18:52:29 +02:00
|
|
|
prune($config{destdir}."/".$old, $config{destdir});
|
fix bestlink to not return just-deleted pages
bestlink was looking at whether %links existed for a page in order to tell
if the page exists, but just-deleted pages still have entries in there (for
reasons it may be best not to explore). So bestlink would return
just-deleted pages. Instead, make bestlink use %pagesources.
Also, when finding a deleted page, %pagecase was not cleared of that page.
This, again, made bestlink return just-deleted pages. Now that is cleared.
Fixing bestlink exposed another issue though. The backlink calculation code
uses bestlink. So when a page was deleted, no backlinks to it are found,
and pages that really did backlink to it were not updated, and had broken
links.
To fix that, the code that actually removes deleted pages had to be split
out from find_del_files, so it can run a bit later. It is run just after
backlinks are calculated. This way, backlink calculation still sees the
deleted pages, but everything afterwards does not.
However, it does not address the original bug report that started this
whole thing, [[bugs/bestlink_returns_deleted_pages]]. Because there
bestlink is run in the needsbuild hook. And that happens before backlink
calculation, and so bestlink still returns deleted pages then. Also in the
scan hook.
If bestlink needs to work consistently during those hooks, a more involved
fix will be needed.
2009-11-30 23:16:44 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
foreach my $source (keys %destsources) {
|
|
|
|
if ($destsources{$source} eq $page) {
|
|
|
|
delete $destsources{$source};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
delete $pagecase{lc $page};
|
2010-05-18 19:28:35 +02:00
|
|
|
$delpagesources{$page}=$pagesources{$page};
|
fix bestlink to not return just-deleted pages
bestlink was looking at whether %links existed for a page in order to tell
if the page exists, but just-deleted pages still have entries in there (for
reasons it may be best not to explore). So bestlink would return
just-deleted pages. Instead, make bestlink use %pagesources.
Also, when finding a deleted page, %pagecase was not cleared of that page.
This, again, made bestlink return just-deleted pages. Now that is cleared.
Fixing bestlink exposed another issue though. The backlink calculation code
uses bestlink. So when a page was deleted, no backlinks to it are found,
and pages that really did backlink to it were not updated, and had broken
links.
To fix that, the code that actually removes deleted pages had to be split
out from find_del_files, so it can run a bit later. It is run just after
backlinks are calculated. This way, backlink calculation still sees the
deleted pages, but everything afterwards does not.
However, it does not address the original bug report that started this
whole thing, [[bugs/bestlink_returns_deleted_pages]]. Because there
bestlink is run in the needsbuild hook. And that happens before backlink
calculation, and so bestlink still returns deleted pages then. Also in the
scan hook.
If bestlink needs to work consistently during those hooks, a more involved
fix will be needed.
2009-11-30 23:16:44 +01:00
|
|
|
delete $pagesources{$page};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
sub find_changed ($) {
|
|
|
|
my $files=shift;
|
|
|
|
my @changed;
|
|
|
|
my @internal_changed;
|
|
|
|
foreach my $file (@$files) {
|
2006-10-28 05:27:10 +02:00
|
|
|
my $page=pagename($file);
|
2014-09-27 00:55:09 +02:00
|
|
|
my ($srcfile, @stat)=srcfile_stat($file, 1);
|
|
|
|
if (defined $srcfile &&
|
|
|
|
(! exists $pagemtime{$page} ||
|
|
|
|
$stat[9] > $pagemtime{$page} ||
|
|
|
|
$forcerebuild{$page})) {
|
2008-05-07 20:11:56 +02:00
|
|
|
$pagemtime{$page}=$stat[9];
|
2009-10-06 03:24:55 +02:00
|
|
|
|
2008-01-29 21:05:49 +01:00
|
|
|
if (isinternal($page)) {
|
2011-06-04 17:50:07 +02:00
|
|
|
my $content = readfile($srcfile);
|
|
|
|
|
2008-01-30 00:06:36 +01:00
|
|
|
# Preprocess internal page in scan-only mode.
|
2011-06-04 17:50:07 +02:00
|
|
|
preprocess($page, $page, $content, 1);
|
|
|
|
|
|
|
|
run_hooks(scan => sub {
|
|
|
|
shift->(
|
|
|
|
page => $page,
|
|
|
|
content => $content,
|
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
push @internal_changed, $file;
|
2008-01-29 21:05:49 +01:00
|
|
|
}
|
|
|
|
else {
|
2009-10-06 08:00:34 +02:00
|
|
|
push @changed, $file;
|
2008-01-29 21:05:49 +01:00
|
|
|
}
|
2006-10-28 05:27:10 +02:00
|
|
|
}
|
|
|
|
}
|
2009-10-06 08:00:34 +02:00
|
|
|
return \@changed, \@internal_changed;
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
2006-10-28 05:27:10 +02:00
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
sub calculate_old_links ($$) {
|
|
|
|
my ($changed, $del)=@_;
|
|
|
|
my %oldlink_targets;
|
|
|
|
foreach my $file (@$changed, @$del) {
|
|
|
|
my $page=pagename($file);
|
|
|
|
if (exists $oldlinks{$page}) {
|
|
|
|
foreach my $l (@{$oldlinks{$page}}) {
|
|
|
|
$oldlink_targets{$page}{$l}=bestlink($page, $l);
|
|
|
|
}
|
2009-10-06 03:24:55 +02:00
|
|
|
}
|
|
|
|
}
|
2009-10-06 08:00:34 +02:00
|
|
|
return \%oldlink_targets;
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
2009-10-06 03:24:55 +02:00
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
sub derender_internal ($) {
|
|
|
|
my $file=shift;
|
|
|
|
my $page=pagename($file);
|
|
|
|
delete $depends{$page};
|
|
|
|
delete $depends_simple{$page};
|
|
|
|
foreach my $old (@{$renderedfiles{$page}}) {
|
|
|
|
delete $destsources{$old};
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
2009-10-06 05:54:29 +02:00
|
|
|
$renderedfiles{$page}=[];
|
|
|
|
}
|
|
|
|
|
2009-10-06 06:06:37 +02:00
|
|
|
sub render_linkers ($) {
|
|
|
|
my $f=shift;
|
|
|
|
my $p=pagename($f);
|
|
|
|
foreach my $page (keys %{$backlinks{$p}}) {
|
|
|
|
my $file=$pagesources{$page};
|
|
|
|
render($file, sprintf(gettext("building %s, which links to %s"), $file, $p));
|
2008-01-29 21:05:49 +01:00
|
|
|
}
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
sub remove_unrendered () {
|
|
|
|
foreach my $src (keys %rendered) {
|
|
|
|
my $page=pagename($src);
|
|
|
|
foreach my $file (@{$oldrenderedfiles{$page}}) {
|
|
|
|
if (! grep { $_ eq $file } @{$renderedfiles{$page}}) {
|
|
|
|
debug(sprintf(gettext("removing %s, no longer built by %s"), $file, $page));
|
2012-04-07 18:52:29 +02:00
|
|
|
prune($config{destdir}."/".$file, $config{destdir});
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
|
|
|
|
2010-04-02 01:28:02 +02:00
|
|
|
sub link_types_changed ($$) {
|
|
|
|
# each is of the form { type => { link => 1 } }
|
|
|
|
my $new = shift;
|
|
|
|
my $old = shift;
|
|
|
|
|
|
|
|
return 0 if !defined $new && !defined $old;
|
2010-04-22 01:42:00 +02:00
|
|
|
return 1 if (!defined $new && %$old) || (!defined $old && %$new);
|
2010-04-02 01:28:02 +02:00
|
|
|
|
|
|
|
while (my ($type, $links) = each %$new) {
|
|
|
|
foreach my $link (keys %$links) {
|
2010-04-04 01:21:01 +02:00
|
|
|
return 1 unless exists $old->{$type}{$link};
|
2010-04-02 01:28:02 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
while (my ($type, $links) = each %$old) {
|
|
|
|
foreach my $link (keys %$links) {
|
2010-04-04 01:21:01 +02:00
|
|
|
return 1 unless exists $new->{$type}{$link};
|
2010-04-02 01:28:02 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
sub calculate_changed_links ($$$) {
|
|
|
|
my ($changed, $del, $oldlink_targets)=@_;
|
|
|
|
|
|
|
|
my (%backlinkchanged, %linkchangers);
|
|
|
|
|
|
|
|
foreach my $file (@$changed, @$del) {
|
|
|
|
my $page=pagename($file);
|
|
|
|
|
|
|
|
if (exists $links{$page}) {
|
|
|
|
foreach my $l (@{$links{$page}}) {
|
|
|
|
my $target=bestlink($page, $l);
|
|
|
|
if (! exists $oldlink_targets->{$page}{$l} ||
|
|
|
|
$target ne $oldlink_targets->{$page}{$l}) {
|
2009-10-07 20:08:03 +02:00
|
|
|
$backlinkchanged{$target}=1;
|
2009-10-06 08:00:34 +02:00
|
|
|
$linkchangers{lc($page)}=1;
|
|
|
|
}
|
|
|
|
delete $oldlink_targets->{$page}{$l};
|
2009-10-06 03:24:55 +02:00
|
|
|
}
|
|
|
|
}
|
2009-10-06 08:00:34 +02:00
|
|
|
if (exists $oldlink_targets->{$page} &&
|
|
|
|
%{$oldlink_targets->{$page}}) {
|
2009-10-07 20:08:03 +02:00
|
|
|
foreach my $target (values %{$oldlink_targets->{$page}}) {
|
2009-10-06 08:00:34 +02:00
|
|
|
$backlinkchanged{$target}=1;
|
|
|
|
}
|
|
|
|
$linkchangers{lc($page)}=1;
|
2009-10-06 03:24:55 +02:00
|
|
|
}
|
2010-04-02 01:28:02 +02:00
|
|
|
|
|
|
|
# we currently assume that changing the type of a link doesn't
|
|
|
|
# change backlinks
|
|
|
|
if (!exists $linkchangers{lc($page)}) {
|
2010-04-04 01:21:01 +02:00
|
|
|
if (link_types_changed($typedlinks{$page}, $oldtypedlinks{$page})) {
|
2010-04-02 01:28:02 +02:00
|
|
|
$linkchangers{lc($page)}=1;
|
|
|
|
}
|
|
|
|
}
|
2009-10-06 03:24:55 +02:00
|
|
|
}
|
2009-10-06 08:00:34 +02:00
|
|
|
|
|
|
|
return \%backlinkchanged, \%linkchangers;
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
sub render_dependent ($$$$$$$) {
|
|
|
|
my ($files, $new, $internal_new, $del, $internal_del,
|
|
|
|
$internal_changed, $linkchangers)=@_;
|
|
|
|
|
|
|
|
my @changed=(keys %rendered, @$del);
|
|
|
|
my @exists_changed=(@$new, @$del);
|
2009-10-04 22:33:58 +02:00
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
my %lc_changed = map { lc(pagename($_)) => 1 } @changed;
|
|
|
|
my %lc_exists_changed = map { lc(pagename($_)) => 1 } @exists_changed;
|
2010-04-25 02:22:20 +02:00
|
|
|
|
2010-04-25 02:41:35 +02:00
|
|
|
foreach my $p ("templates/page.tmpl", keys %{$depends_simple{""}}) {
|
2010-04-25 03:13:53 +02:00
|
|
|
if ($rendered{$p} || grep { $_ eq $p } @$del) {
|
2010-04-25 02:41:35 +02:00
|
|
|
foreach my $f (@$files) {
|
|
|
|
next if $rendered{$f};
|
|
|
|
render($f, sprintf(gettext("building %s, which depends on %s"), $f, $p));
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
2010-04-25 02:22:20 +02:00
|
|
|
}
|
2009-10-04 22:33:58 +02:00
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
foreach my $f (@$files) {
|
2009-10-06 05:54:29 +02:00
|
|
|
next if $rendered{$f};
|
|
|
|
my $p=pagename($f);
|
2010-04-25 02:41:35 +02:00
|
|
|
my $reason = undef;
|
2010-04-25 02:22:20 +02:00
|
|
|
|
|
|
|
if (exists $depends_simple{$p} && ! defined $reason) {
|
2009-10-06 05:54:29 +02:00
|
|
|
foreach my $d (keys %{$depends_simple{$p}}) {
|
|
|
|
if (($depends_simple{$p}{$d} & $IkiWiki::DEPEND_CONTENT &&
|
|
|
|
$lc_changed{$d})
|
|
|
|
||
|
|
|
|
($depends_simple{$p}{$d} & $IkiWiki::DEPEND_PRESENCE &&
|
|
|
|
$lc_exists_changed{$d})
|
|
|
|
||
|
|
|
|
($depends_simple{$p}{$d} & $IkiWiki::DEPEND_LINKS &&
|
2009-10-06 08:00:34 +02:00
|
|
|
$linkchangers->{$d})
|
2009-10-06 05:54:29 +02:00
|
|
|
) {
|
|
|
|
$reason = $d;
|
|
|
|
last;
|
2006-03-24 04:21:46 +01:00
|
|
|
}
|
2009-10-06 03:24:55 +02:00
|
|
|
}
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
2009-10-02 21:38:04 +02:00
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
if (exists $depends{$p} && ! defined $reason) {
|
2009-10-07 00:09:46 +02:00
|
|
|
foreach my $dep (keys %{$depends{$p}}) {
|
|
|
|
my $sub=pagespec_translate($dep);
|
2010-03-29 02:23:22 +02:00
|
|
|
next unless defined $sub;
|
2009-10-06 05:54:29 +02:00
|
|
|
|
|
|
|
# only consider internal files
|
|
|
|
# if the page explicitly depends
|
|
|
|
# on such files
|
2010-05-07 18:43:51 +02:00
|
|
|
my $internal_dep=$dep =~ /(?:internal|comment|comment_pending)\(/;
|
2009-10-07 00:09:46 +02:00
|
|
|
|
|
|
|
my $in=sub {
|
|
|
|
my $list=shift;
|
|
|
|
my $type=shift;
|
2009-10-07 00:45:22 +02:00
|
|
|
foreach my $file (@$list) {
|
2009-10-07 00:09:46 +02:00
|
|
|
next if $file eq $f;
|
|
|
|
my $page=pagename($file);
|
|
|
|
if ($sub->($page, location => $p)) {
|
|
|
|
if ($type == $IkiWiki::DEPEND_LINKS) {
|
|
|
|
next unless $linkchangers->{lc($page)};
|
|
|
|
}
|
2010-04-20 07:54:42 +02:00
|
|
|
$reason=$page;
|
|
|
|
return 1;
|
2009-08-25 23:46:15 +02:00
|
|
|
}
|
2006-03-24 04:21:46 +01:00
|
|
|
}
|
2009-10-07 00:09:46 +02:00
|
|
|
return undef;
|
|
|
|
};
|
|
|
|
|
|
|
|
if ($depends{$p}{$dep} & $IkiWiki::DEPEND_CONTENT) {
|
2010-04-20 07:54:42 +02:00
|
|
|
last if $in->(\@changed, $IkiWiki::DEPEND_CONTENT);
|
|
|
|
last if $internal_dep && (
|
2009-10-07 00:09:46 +02:00
|
|
|
$in->($internal_new, $IkiWiki::DEPEND_CONTENT) ||
|
|
|
|
$in->($internal_del, $IkiWiki::DEPEND_CONTENT) ||
|
2010-04-20 07:54:42 +02:00
|
|
|
$in->($internal_changed, $IkiWiki::DEPEND_CONTENT)
|
|
|
|
);
|
2009-10-07 00:09:46 +02:00
|
|
|
}
|
|
|
|
if ($depends{$p}{$dep} & $IkiWiki::DEPEND_PRESENCE) {
|
2010-04-20 07:54:42 +02:00
|
|
|
last if $in->(\@exists_changed, $IkiWiki::DEPEND_PRESENCE);
|
|
|
|
last if $internal_dep && (
|
2009-10-07 00:09:46 +02:00
|
|
|
$in->($internal_new, $IkiWiki::DEPEND_PRESENCE) ||
|
2010-04-20 07:54:42 +02:00
|
|
|
$in->($internal_del, $IkiWiki::DEPEND_PRESENCE)
|
|
|
|
);
|
2009-10-07 00:09:46 +02:00
|
|
|
}
|
|
|
|
if ($depends{$p}{$dep} & $IkiWiki::DEPEND_LINKS) {
|
2010-04-20 07:54:42 +02:00
|
|
|
last if $in->(\@changed, $IkiWiki::DEPEND_LINKS);
|
|
|
|
last if $internal_dep && (
|
2009-10-07 00:09:46 +02:00
|
|
|
$in->($internal_new, $IkiWiki::DEPEND_LINKS) ||
|
|
|
|
$in->($internal_del, $IkiWiki::DEPEND_LINKS) ||
|
2010-04-20 07:54:42 +02:00
|
|
|
$in->($internal_changed, $IkiWiki::DEPEND_LINKS)
|
|
|
|
);
|
2006-03-24 04:21:46 +01:00
|
|
|
}
|
|
|
|
}
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
2009-10-06 03:24:55 +02:00
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
if (defined $reason) {
|
|
|
|
render($f, sprintf(gettext("building %s, which depends on %s"), $f, $reason));
|
|
|
|
return 1;
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
2009-10-06 03:24:55 +02:00
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
sub render_backlinks ($) {
|
|
|
|
my $backlinkchanged=shift;
|
|
|
|
foreach my $link (keys %$backlinkchanged) {
|
2009-10-06 03:24:55 +02:00
|
|
|
my $linkfile=$pagesources{$link};
|
|
|
|
if (defined $linkfile) {
|
2009-10-06 05:54:29 +02:00
|
|
|
render($linkfile, sprintf(gettext("building %s, to update its backlinks"), $linkfile));
|
2009-10-06 03:24:55 +02:00
|
|
|
}
|
2006-03-23 07:51:15 +01:00
|
|
|
}
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
2006-03-30 00:21:23 +02:00
|
|
|
|
2010-04-17 19:35:15 +02:00
|
|
|
sub gen_autofile ($$$) {
|
|
|
|
my $autofile=shift;
|
|
|
|
my $pages=shift;
|
|
|
|
my $del=shift;
|
2010-04-21 21:54:18 +02:00
|
|
|
|
2010-04-21 22:41:36 +02:00
|
|
|
if (file_pruned($autofile)) {
|
2010-04-21 21:05:59 +02:00
|
|
|
return;
|
2010-04-17 19:35:15 +02:00
|
|
|
}
|
2010-04-21 22:41:36 +02:00
|
|
|
|
2010-04-21 21:54:18 +02:00
|
|
|
my ($file)="$config{srcdir}/$autofile" =~ /$config{wiki_file_regexp}/; # untaint
|
2010-04-21 22:41:36 +02:00
|
|
|
if (! defined $file) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
# Remember autofiles that were tried, and never try them again later.
|
|
|
|
if (exists $wikistate{$autofiles{$autofile}{plugin}}{autofile}{$autofile}) {
|
2010-04-21 21:05:59 +02:00
|
|
|
return;
|
|
|
|
}
|
2010-04-21 22:41:36 +02:00
|
|
|
$wikistate{$autofiles{$autofile}{plugin}}{autofile}{$autofile}=1;
|
2010-04-21 21:05:59 +02:00
|
|
|
|
2010-04-21 22:41:36 +02:00
|
|
|
if (srcfile($autofile, 1) || file_pruned($autofile)) {
|
2010-04-21 21:05:59 +02:00
|
|
|
return;
|
2010-04-17 19:35:15 +02:00
|
|
|
}
|
|
|
|
|
2010-04-21 22:41:36 +02:00
|
|
|
if (-l $file || -d _ || -e _) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2010-04-21 21:05:59 +02:00
|
|
|
my $page = pagename($file);
|
2010-04-17 19:35:15 +02:00
|
|
|
if ($pages->{$page}) {
|
2010-04-21 21:05:59 +02:00
|
|
|
return;
|
2010-04-17 19:35:15 +02:00
|
|
|
}
|
|
|
|
|
2010-04-21 21:54:18 +02:00
|
|
|
if (grep { $_ eq $autofile } @$del) {
|
2010-04-21 21:05:59 +02:00
|
|
|
return;
|
2010-04-17 19:35:15 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
$autofiles{$autofile}{generator}->();
|
|
|
|
$pages->{$page}=1;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
sub refresh () {
|
2014-03-05 11:11:04 +01:00
|
|
|
$phase = PHASE_SCAN;
|
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
srcdir_check();
|
|
|
|
run_hooks(refresh => sub { shift->() });
|
2013-11-16 22:26:20 +01:00
|
|
|
my ($files, $pages, $new, $internal_new, $del, $internal_del, $changed, $internal_changed);
|
2013-11-17 01:51:09 +01:00
|
|
|
my $want_find_changes=$config{only_committed_changes} &&
|
|
|
|
exists $IkiWiki::hooks{rcs}{rcs_find_changes} &&
|
|
|
|
exists $IkiWiki::hooks{rcs}{rcs_get_current_rev};
|
|
|
|
if (! $config{rebuild} && $want_find_changes && defined $IkiWiki::lastrev && length $IkiWiki::lastrev) {
|
2013-11-16 22:26:20 +01:00
|
|
|
my ($changed_raw, $del_raw);
|
|
|
|
($changed_raw, $del_raw, $IkiWiki::lastrev) = $IkiWiki::hooks{rcs}{rcs_find_changes}{call}->($IkiWiki::lastrev);
|
|
|
|
($files, $pages)=process_changed_files($changed_raw, $del_raw);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
($files, $pages)=find_src_files();
|
|
|
|
}
|
2013-11-17 01:51:09 +01:00
|
|
|
if ($want_find_changes) {
|
2013-11-17 01:49:57 +01:00
|
|
|
if (! defined($IkiWiki::lastrev) || ! length $IkiWiki::lastrev) {
|
2013-11-16 22:26:20 +01:00
|
|
|
$IkiWiki::lastrev=$IkiWiki::hooks{rcs}{rcs_get_current_rev}{call}->();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
($new, $internal_new)=find_new_files($files);
|
|
|
|
($del, $internal_del)=find_del_files($pages);
|
|
|
|
($changed, $internal_changed)=find_changed($files);
|
2013-07-10 23:02:24 +02:00
|
|
|
my %existingfiles;
|
2010-09-07 18:08:59 +02:00
|
|
|
run_hooks(needsbuild => sub {
|
2010-09-10 23:17:08 +02:00
|
|
|
my $ret=shift->($changed, [@$del, @$internal_del]);
|
2013-07-10 23:40:33 +02:00
|
|
|
if (ref $ret eq 'ARRAY' && $ret != $changed) {
|
2013-07-10 23:02:24 +02:00
|
|
|
if (! %existingfiles) {
|
|
|
|
foreach my $f (@$files) {
|
|
|
|
$existingfiles{$f}=1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
@$changed=grep $existingfiles{$_}, @$ret;
|
|
|
|
}
|
2010-09-07 18:08:59 +02:00
|
|
|
});
|
2009-10-06 08:00:34 +02:00
|
|
|
my $oldlink_targets=calculate_old_links($changed, $del);
|
|
|
|
|
|
|
|
foreach my $file (@$changed) {
|
2009-10-06 05:54:29 +02:00
|
|
|
scan($file);
|
|
|
|
}
|
|
|
|
|
2010-04-17 19:35:15 +02:00
|
|
|
foreach my $autofile (keys %autofiles) {
|
|
|
|
if (gen_autofile($autofile, $pages, $del)) {
|
|
|
|
push @{$files}, $autofile;
|
|
|
|
push @{$new}, $autofile if find_new_files([$autofile]);
|
|
|
|
push @{$changed}, $autofile if find_changed([$autofile]);
|
|
|
|
|
|
|
|
scan($autofile);
|
2010-01-31 02:23:11 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-10-06 05:54:29 +02:00
|
|
|
calculate_links();
|
2014-03-05 11:11:04 +01:00
|
|
|
|
|
|
|
# At this point it becomes OK to start matching pagespecs.
|
|
|
|
$phase = PHASE_RENDER;
|
2014-03-05 11:42:00 +01:00
|
|
|
# Save some memory: we no longer need to keep track of which pages
|
|
|
|
# we've scanned
|
|
|
|
%scanned = ();
|
2014-03-05 11:11:04 +01:00
|
|
|
|
fix bestlink to not return just-deleted pages
bestlink was looking at whether %links existed for a page in order to tell
if the page exists, but just-deleted pages still have entries in there (for
reasons it may be best not to explore). So bestlink would return
just-deleted pages. Instead, make bestlink use %pagesources.
Also, when finding a deleted page, %pagecase was not cleared of that page.
This, again, made bestlink return just-deleted pages. Now that is cleared.
Fixing bestlink exposed another issue though. The backlink calculation code
uses bestlink. So when a page was deleted, no backlinks to it are found,
and pages that really did backlink to it were not updated, and had broken
links.
To fix that, the code that actually removes deleted pages had to be split
out from find_del_files, so it can run a bit later. It is run just after
backlinks are calculated. This way, backlink calculation still sees the
deleted pages, but everything afterwards does not.
However, it does not address the original bug report that started this
whole thing, [[bugs/bestlink_returns_deleted_pages]]. Because there
bestlink is run in the needsbuild hook. And that happens before backlink
calculation, and so bestlink still returns deleted pages then. Also in the
scan hook.
If bestlink needs to work consistently during those hooks, a more involved
fix will be needed.
2009-11-30 23:16:44 +01:00
|
|
|
remove_del(@$del, @$internal_del);
|
2009-10-06 05:54:29 +02:00
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
foreach my $file (@$changed) {
|
2009-10-06 05:54:29 +02:00
|
|
|
render($file, sprintf(gettext("building %s"), $file));
|
|
|
|
}
|
2009-10-06 08:00:34 +02:00
|
|
|
foreach my $file (@$internal_new, @$internal_del, @$internal_changed) {
|
2009-10-06 05:54:29 +02:00
|
|
|
derender_internal($file);
|
2006-10-08 23:56:50 +02:00
|
|
|
}
|
2009-10-06 06:06:37 +02:00
|
|
|
|
2012-03-18 18:34:39 +01:00
|
|
|
run_hooks(build_affected => sub {
|
|
|
|
my %affected = shift->();
|
|
|
|
while (my ($page, $message) = each %affected) {
|
|
|
|
next unless exists $pagesources{$page};
|
|
|
|
render($pagesources{$page}, $message);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
my ($backlinkchanged, $linkchangers)=calculate_changed_links($changed,
|
|
|
|
$del, $oldlink_targets);
|
2009-10-06 06:06:37 +02:00
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
foreach my $file (@$new, @$del) {
|
2009-10-06 06:06:37 +02:00
|
|
|
render_linkers($file);
|
|
|
|
}
|
2010-04-23 22:41:07 +02:00
|
|
|
|
2010-04-25 02:22:20 +02:00
|
|
|
if (@$changed || @$internal_changed ||
|
2009-10-06 08:00:34 +02:00
|
|
|
@$del || @$internal_del || @$internal_new) {
|
|
|
|
1 while render_dependent($files, $new, $internal_new,
|
|
|
|
$del, $internal_del, $internal_changed,
|
|
|
|
$linkchangers);
|
2009-10-06 05:54:29 +02:00
|
|
|
}
|
|
|
|
|
2009-10-06 08:00:34 +02:00
|
|
|
render_backlinks($backlinkchanged);
|
2009-10-06 05:54:29 +02:00
|
|
|
remove_unrendered();
|
2006-10-08 23:56:50 +02:00
|
|
|
|
2010-05-07 06:26:59 +02:00
|
|
|
if (@$del || @$internal_del) {
|
2010-05-07 05:25:27 +02:00
|
|
|
run_hooks(delete => sub { shift->(@$del, @$internal_del) });
|
2006-05-03 23:50:39 +02:00
|
|
|
}
|
2006-07-30 02:20:11 +02:00
|
|
|
if (%rendered) {
|
2012-03-29 00:41:47 +02:00
|
|
|
run_hooks(rendered => sub { shift->(keys %rendered) });
|
|
|
|
run_hooks(change => sub { shift->(keys %rendered) }); # back-compat
|
2006-03-30 00:21:23 +02:00
|
|
|
}
|
2012-04-02 19:50:58 +02:00
|
|
|
my %all_changed = map { $_ => 1 }
|
|
|
|
@$new, @$changed, @$del,
|
|
|
|
@$internal_new, @$internal_changed, @$internal_del;
|
|
|
|
run_hooks(changes => sub { shift->(keys %all_changed) });
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-03-23 07:51:15 +01:00
|
|
|
|
2010-02-28 06:12:47 +01:00
|
|
|
sub clean_rendered {
|
|
|
|
lockwiki();
|
|
|
|
loadindex();
|
|
|
|
remove_unrendered();
|
|
|
|
foreach my $page (keys %oldrenderedfiles) {
|
|
|
|
foreach my $file (@{$oldrenderedfiles{$page}}) {
|
2012-04-07 18:52:29 +02:00
|
|
|
prune($config{destdir}."/".$file, $config{destdir});
|
2010-02-28 06:12:47 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub commandline_render () {
|
2006-09-21 23:34:29 +02:00
|
|
|
lockwiki();
|
|
|
|
loadindex();
|
|
|
|
unlockwiki();
|
|
|
|
|
2014-03-05 11:11:04 +01:00
|
|
|
# This function behaves as though it's in the render phase;
|
|
|
|
# all other files are assumed to have been scanned last time.
|
|
|
|
$phase = PHASE_RENDER;
|
|
|
|
|
2006-09-21 23:34:29 +02:00
|
|
|
my $srcfile=possibly_foolish_untaint($config{render});
|
|
|
|
my $file=$srcfile;
|
|
|
|
$file=~s/\Q$config{srcdir}\E\/?//;
|
|
|
|
|
|
|
|
my $type=pagetype($file);
|
2009-07-23 00:53:23 +02:00
|
|
|
die sprintf(gettext("ikiwiki: cannot build %s"), $srcfile)."\n" unless defined $type;
|
2006-09-21 23:34:29 +02:00
|
|
|
my $content=readfile($srcfile);
|
|
|
|
my $page=pagename($file);
|
|
|
|
$pagesources{$page}=$file;
|
2007-05-17 21:55:11 +02:00
|
|
|
$content=filter($page, $page, $content);
|
2006-09-21 23:34:29 +02:00
|
|
|
$content=preprocess($page, $page, $content);
|
|
|
|
$content=linkify($page, $page, $content);
|
2008-06-04 07:24:23 +02:00
|
|
|
$content=htmlize($page, $page, $type, $content);
|
2008-05-07 20:11:56 +02:00
|
|
|
$pagemtime{$page}=(stat($srcfile))[9];
|
2008-10-30 19:50:33 +01:00
|
|
|
$pagectime{$page}=$pagemtime{$page} if ! exists $pagectime{$page};
|
2006-09-21 23:34:29 +02:00
|
|
|
|
2007-12-12 20:43:35 +01:00
|
|
|
print genpage($page, $content);
|
2006-09-21 23:34:29 +02:00
|
|
|
exit 0;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-09-21 23:34:29 +02:00
|
|
|
|
2006-03-23 07:51:15 +01:00
|
|
|
1
|