Merge branch 'dependency-types'

Conflicts:
	doc/todo/dependency_types.mdwn
master
Joey Hess 2009-10-13 15:15:54 -04:00
commit ef7bddbc36
47 changed files with 1191 additions and 548 deletions

View File

@ -17,17 +17,23 @@ use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase
%forcerebuild %loaded_plugins};
use Exporter q{import};
our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
pagespec_match_list bestlink htmllink readfile writefile
pagetype srcfile pagename displaytime will_render gettext urlto
targetpage add_underlay pagetitle titlepage linkpage
newpagefile inject add_link
our @EXPORT = qw(hook debug error template htmlpage deptype
add_depends pagespec_match pagespec_match_list bestlink
htmllink readfile writefile pagetype srcfile pagename
displaytime will_render gettext urlto targetpage
add_underlay pagetitle titlepage linkpage newpagefile
inject add_link
%config %links %pagestate %wikistate %renderedfiles
%pagesources %destsources);
our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
# Page dependency types.
our $DEPEND_CONTENT=1;
our $DEPEND_PRESENCE=2;
our $DEPEND_LINKS=4;
# Optimisation.
use Memoize;
memoize("abs2rel");
@ -1523,18 +1529,28 @@ sub loadindex () {
$links{$page}=$d->{links};
$oldlinks{$page}=[@{$d->{links}}];
}
if (exists $d->{depends_simple}) {
if (ref $d->{depends_simple} eq 'ARRAY') {
# old format
$depends_simple{$page}={
map { $_ => 1 } @{$d->{depends_simple}}
};
}
elsif (exists $d->{depends_simple}) {
$depends_simple{$page}=$d->{depends_simple};
}
if (exists $d->{dependslist}) {
# old format
$depends{$page}={
map { $_ => 1 } @{$d->{dependslist}}
map { $_ => $DEPEND_CONTENT }
@{$d->{dependslist}}
};
}
elsif (exists $d->{depends} && ! ref $d->{depends}) {
# old format
$depends{$page}={$d->{depends} => $DEPEND_CONTENT };
}
elsif (exists $d->{depends}) {
$depends{$page}={$d->{depends} => 1};
$depends{$page}=$d->{depends};
}
if (exists $d->{state}) {
$pagestate{$page}=$d->{state};
@ -1580,11 +1596,11 @@ sub saveindex () {
};
if (exists $depends{$page}) {
$index{page}{$src}{dependslist} = [ keys %{$depends{$page}} ];
$index{page}{$src}{depends} = $depends{$page};
}
if (exists $depends_simple{$page}) {
$index{page}{$src}{depends_simple} = [ keys %{$depends_simple{$page}} ];
$index{page}{$src}{depends_simple} = $depends_simple{$page};
}
if (exists $pagestate{$page}) {
@ -1752,23 +1768,50 @@ sub rcs_receive () {
$hooks{rcs}{rcs_receive}{call}->();
}
sub add_depends ($$) {
sub add_depends ($$;$) {
my $page=shift;
my $pagespec=shift;
my $deptype=shift || $DEPEND_CONTENT;
# Is the pagespec a simple page name?
if ($pagespec =~ /$config{wiki_file_regexp}/ &&
$pagespec !~ /[\s*?()!]/) {
# a simple dependency, which can be matched by string eq
$depends_simple{$page}{lc $pagespec} = 1;
$pagespec !~ /[\s*?()!]/) {
$depends_simple{$page}{lc $pagespec} |= $deptype;
return 1;
}
return unless pagespec_valid($pagespec);
# Add explicit dependencies for influences.
my $sub=pagespec_translate($pagespec);
return if $@;
foreach my $p (keys %pagesources) {
my $r=$sub->($p, location => $page);
my $i=$r->influences;
foreach my $k (keys %$i) {
$depends_simple{$page}{lc $k} |= $i->{$k};
}
last if $r->influences_static;
}
$depends{$page}{$pagespec} = 1;
$depends{$page}{$pagespec} |= $deptype;
return 1;
}
sub deptype (@) {
my $deptype=0;
foreach my $type (@_) {
if ($type eq 'presence') {
$deptype |= $DEPEND_PRESENCE;
}
elsif ($type eq 'links') {
$deptype |= $DEPEND_LINKS;
}
elsif ($type eq 'content') {
$deptype |= $DEPEND_CONTENT;
}
}
return $deptype;
}
sub file_pruned ($;$) {
my $file=shift;
if (@_) {
@ -1876,10 +1919,10 @@ sub pagespec_translate ($) {
}gx) {
my $word=$1;
if (lc $word eq 'and') {
$code.=' &&';
$code.=' &';
}
elsif (lc $word eq 'or') {
$code.=' ||';
$code.=' |';
}
elsif ($word eq "(" || $word eq ")" || $word eq "!") {
$code.=' '.$word;
@ -1925,27 +1968,87 @@ sub pagespec_match ($$;@) {
}
sub pagespec_match_list ($$;@) {
my $pages=shift;
my $spec=shift;
my @params=@_;
my $page=shift;
my $pagespec=shift;
my %params=@_;
my $sub=pagespec_translate($spec);
error "syntax error in pagespec \"$spec\""
if $@ || ! defined $sub;
my @ret;
my $r;
foreach my $page (@$pages) {
$r=$sub->($page, @params);
push @ret, $page if $r;
# Backwards compatability with old calling convention.
if (ref $page) {
print STDERR "warning: a plugin (".caller().") is using pagespec_match_list in an obsolete way, and needs to be updated\n";
$params{list}=$page;
$page=$params{location}; # ugh!
}
if (! @ret && defined $r && $r->isa("IkiWiki::ErrorReason")) {
error(sprintf(gettext("cannot match pages: %s"), $r));
my $sub=pagespec_translate($pagespec);
error "syntax error in pagespec \"$pagespec\""
if $@ || ! defined $sub;
my @candidates;
if (exists $params{list}) {
@candidates=exists $params{filter}
? grep { ! $params{filter}->($_) } @{$params{list}}
: @{$params{list}};
}
else {
return @ret;
@candidates=exists $params{filter}
? grep { ! $params{filter}->($_) } keys %pagesources
: keys %pagesources;
}
if (defined $params{sort}) {
my $f;
if ($params{sort} eq 'title') {
$f=sub { pagetitle(basename($a)) cmp pagetitle(basename($b)) };
}
elsif ($params{sort} eq 'title_natural') {
eval q{use Sort::Naturally};
if ($@) {
error(gettext("Sort::Naturally needed for title_natural sort"));
}
$f=sub { Sort::Naturally::ncmp(pagetitle(basename($a)), pagetitle(basename($b))) };
}
elsif ($params{sort} eq 'mtime') {
$f=sub { $pagemtime{$b} <=> $pagemtime{$a} };
}
elsif ($params{sort} eq 'age') {
$f=sub { $pagectime{$b} <=> $pagectime{$a} };
}
else {
error sprintf(gettext("unknown sort type %s"), $params{sort});
}
@candidates = sort { &$f } @candidates;
}
@candidates=reverse(@candidates) if $params{reverse};
$depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT);
# clear params, remainder is passed to pagespec
my $num=$params{num};
delete @params{qw{num deptype reverse sort filter list}};
my @matches;
my $firstfail;
my $count=0;
my $accum=IkiWiki::SuccessReason->new();
foreach my $p (@candidates) {
my $r=$sub->($p, %params, location => $page);
error(sprintf(gettext("cannot match pages: %s"), $r))
if $r->isa("IkiWiki::ErrorReason");
$accum |= $r;
if ($r) {
push @matches, $p;
last if defined $num && ++$count == $num;
}
}
# Add simple dependencies for accumulated influences.
my $i=$accum->influences;
foreach my $k (keys %$i) {
$depends_simple{$page}{lc $k} |= $i->{$k};
}
return @matches;
}
sub pagespec_valid ($) {
@ -1965,37 +2068,66 @@ sub glob2re ($) {
package IkiWiki::FailReason;
use overload (
'""' => sub { ${$_[0]} },
'""' => sub { $_[0][0] },
'0+' => sub { 0 },
'!' => sub { bless $_[0], 'IkiWiki::SuccessReason'},
'&' => sub { $_[0]->merge_influences($_[1], 1); $_[0] },
'|' => sub { $_[1]->merge_influences($_[0]); $_[1] },
fallback => 1,
);
our @ISA = 'IkiWiki::SuccessReason';
package IkiWiki::SuccessReason;
use overload (
'""' => sub { $_[0][0] },
'0+' => sub { 1 },
'!' => sub { bless $_[0], 'IkiWiki::FailReason'},
'&' => sub { $_[1]->merge_influences($_[0], 1); $_[1] },
'|' => sub { $_[0]->merge_influences($_[1]); $_[0] },
fallback => 1,
);
sub new {
my $class = shift;
my $value = shift;
return bless \$value, $class;
return bless [$value, {@_}], $class;
}
sub influences {
my $this=shift;
$this->[1]={@_} if @_;
my %i=%{$this->[1]};
delete $i{""};
return \%i;
}
sub influences_static {
return ! $_[0][1]->{""};
}
sub merge_influences {
my $this=shift;
my $other=shift;
my $anded=shift;
if (! $anded || (($this || %{$this->[1]}) &&
($other || %{$other->[1]}))) {
foreach my $influence (keys %{$other->[1]}) {
$this->[1]{$influence} |= $other->[1]{$influence};
}
}
else {
# influence blocker
$this->[1]={};
}
}
package IkiWiki::ErrorReason;
our @ISA = 'IkiWiki::FailReason';
package IkiWiki::SuccessReason;
use overload (
'""' => sub { ${$_[0]} },
'0+' => sub { 1 },
'!' => sub { bless $_[0], 'IkiWiki::FailReason'},
fallback => 1,
);
sub new {
my $class = shift;
my $value = shift;
return bless \$value, $class;
};
package IkiWiki::PageSpec;
sub derel ($$) {
@ -2045,27 +2177,30 @@ sub match_link ($$;@) {
my $from=exists $params{location} ? $params{location} : '';
my $links = $IkiWiki::links{$page};
return IkiWiki::FailReason->new("$page has no links") unless $links && @{$links};
return IkiWiki::FailReason->new("$page has no links", "" => 1)
unless $links && @{$links};
my $bestlink = IkiWiki::bestlink($from, $link);
foreach my $p (@{$links}) {
if (length $bestlink) {
return IkiWiki::SuccessReason->new("$page links to $link")
return IkiWiki::SuccessReason->new("$page links to $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
if $bestlink eq IkiWiki::bestlink($page, $p);
}
else {
return IkiWiki::SuccessReason->new("$page links to page $p matching $link")
return IkiWiki::SuccessReason->new("$page links to page $p matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
if match_glob($p, $link, %params);
my ($p_rel)=$p=~/^\/?(.*)/;
$link=~s/^\///;
return IkiWiki::SuccessReason->new("$page links to page $p_rel matching $link")
return IkiWiki::SuccessReason->new("$page links to page $p_rel matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
if match_glob($p_rel, $link, %params);
}
}
return IkiWiki::FailReason->new("$page does not link to $link");
return IkiWiki::FailReason->new("$page does not link to $link", "" => 1);
}
sub match_backlink ($$;@) {
return match_link($_[1], $_[0], @_);
my $ret=match_link($_[1], $_[0], @_);
$ret->influences($_[1] => $IkiWiki::DEPEND_LINKS);
return $ret;
}
sub match_created_before ($$;@) {
@ -2077,14 +2212,14 @@ sub match_created_before ($$;@) {
if (exists $IkiWiki::pagectime{$testpage}) {
if ($IkiWiki::pagectime{$page} < $IkiWiki::pagectime{$testpage}) {
return IkiWiki::SuccessReason->new("$page created before $testpage");
return IkiWiki::SuccessReason->new("$page created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
}
else {
return IkiWiki::FailReason->new("$page not created before $testpage");
return IkiWiki::FailReason->new("$page not created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
}
}
else {
return IkiWiki::ErrorReason->new("$testpage does not exist");
return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
}
}
@ -2097,14 +2232,14 @@ sub match_created_after ($$;@) {
if (exists $IkiWiki::pagectime{$testpage}) {
if ($IkiWiki::pagectime{$page} > $IkiWiki::pagectime{$testpage}) {
return IkiWiki::SuccessReason->new("$page created after $testpage");
return IkiWiki::SuccessReason->new("$page created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
}
else {
return IkiWiki::FailReason->new("$page not created after $testpage");
return IkiWiki::FailReason->new("$page not created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
}
}
else {
return IkiWiki::ErrorReason->new("$testpage does not exist");
return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
}
}

View File

@ -23,19 +23,15 @@ sub preprocess (@) {
my %params=@_;
$params{pages}="*" unless defined $params{pages};
# Needs to update whenever a page is added or removed, so
# register a dependency.
add_depends($params{page}, $params{pages});
my @broken;
foreach my $link (keys %IkiWiki::brokenlinks) {
next if $link =~ /.*\/\Q$config{discussionpage}\E/i && $config{discussion};
my @pages;
foreach my $page (@{$IkiWiki::brokenlinks{$link}}) {
push @pages, $page
if pagespec_match($page, $params{pages}, location => $params{page});
}
my @pages=pagespec_match_list($params{page}, $params{pages},
list => $IkiWiki::brokenlinks{$link},
# needs to update when links on a page change
deptype => deptype("links")
);
next unless @pages;
my $page=$IkiWiki::brokenlinks{$link}->[0];

View File

@ -24,8 +24,6 @@ use IkiWiki 3.00;
use Time::Local;
use POSIX;
my %cache;
my %linkcache;
my $time=time;
my @now=localtime($time);
@ -67,23 +65,50 @@ sub month_days {
sub format_month (@) {
my %params=@_;
my $pagespec = $params{pages};
my $year = $params{year};
my $month = $params{month};
my $pmonth = $params{pmonth};
my $nmonth = $params{nmonth};
my $pyear = $params{pyear};
my $nyear = $params{nyear};
my %linkcache;
foreach my $p (pagespec_match_list($params{page},
"creation_year($params{year}) and creation_month($params{month}) and ($params{pages})",
# add presence dependencies to update
# month calendar when pages are added/removed
deptype => deptype("presence"))) {
my $mtime = $IkiWiki::pagectime{$p};
my @date = localtime($mtime);
my $mday = $date[3];
my $month = $date[4] + 1;
my $year = $date[5] + 1900;
my $mtag = sprintf("%02d", $month);
# Only one posting per day is being linked to.
$linkcache{"$year/$mtag/$mday"} = $p;
}
my $pmonth = $params{month} - 1;
my $nmonth = $params{month} + 1;
my $pyear = $params{year};
my $nyear = $params{year};
# Adjust for January and December
if ($params{month} == 1) {
$pmonth = 12;
$pyear--;
}
if ($params{month} == 12) {
$nmonth = 1;
$nyear++;
}
# Add padding.
$pmonth=sprintf("%02d", $pmonth);
$nmonth=sprintf("%02d", $nmonth);
my @list;
my $calendar="\n";
# When did this month start?
my @monthstart = localtime(timelocal(0,0,0,1,$month-1,$year-1900));
my @monthstart = localtime(timelocal(0,0,0,1,$params{month}-1,$params{year}-1900));
my $future_dom = 0;
my $today = 0;
if ($year == $now[5]+1900 && $month == $now[4]+1) {
if ($params{year} == $now[5]+1900 && $params{month} == $now[4]+1) {
$future_dom = $now[3]+1;
$today = $now[3];
}
@ -99,24 +124,30 @@ sub format_month (@) {
# Calculate URL's for monthly archives.
my ($url, $purl, $nurl)=("$monthname",'','');
if (exists $cache{$pagespec}{"$year/$month"}) {
if (exists $pagesources{"$archivebase/$params{year}/$params{month}"}) {
$url = htmllink($params{page}, $params{destpage},
"$archivebase/$year/".sprintf("%02d", $month),
"$archivebase/$params{year}/".$params{month},
noimageinline => 1,
linktext => " $monthname ");
}
add_depends($params{page}, "$archivebase/$year/".sprintf("%02d", $month));
if (exists $cache{$pagespec}{"$pyear/$pmonth"}) {
add_depends($params{page}, "$archivebase/$params{year}/$params{month}",
deptype("presence"));
if (exists $pagesources{"$archivebase/$pyear/$pmonth"}) {
$purl = htmllink($params{page}, $params{destpage},
"$archivebase/$pyear/" . sprintf("%02d", $pmonth),
linktext => " $pmonthname ");
"$archivebase/$pyear/$pmonth",
noimageinline => 1,
linktext => "\&larr");
}
add_depends($params{page}, "$archivebase/$pyear/".sprintf("%02d", $pmonth));
if (exists $cache{$pagespec}{"$nyear/$nmonth"}) {
add_depends($params{page}, "$archivebase/$pyear/$pmonth",
deptype("presence"));
if (exists $pagesources{"$archivebase/$nyear/$nmonth"}) {
$nurl = htmllink($params{page}, $params{destpage},
"$archivebase/$nyear/" . sprintf("%02d", $nmonth),
linktext => " $nmonthname ");
"$archivebase/$nyear/$nmonth",
noimageinline => 1,
linktext => "\&rarr");
}
add_depends($params{page}, "$archivebase/$nyear/".sprintf("%02d", $nmonth));
add_depends($params{page}, "$archivebase/$nyear/$nmonth",
deptype("presence"));
# Start producing the month calendar
$calendar=<<EOF;
@ -137,7 +168,7 @@ EOF
my %downame;
my %dowabbr;
for my $dow ($week_start_day..$week_start_day+6) {
my @day=localtime(timelocal(0,0,0,$start_day++,$month-1,$year-1900));
my @day=localtime(timelocal(0,0,0,$start_day++,$params{month}-1,$params{year}-1900));
my $downame = POSIX::strftime("%A", @day);
my $dowabbr = POSIX::strftime("%a", @day);
$downame{$dow % 7}=$downame;
@ -158,7 +189,7 @@ EOF
# At this point, either the first is a week_start_day, in which case
# nothing has been printed, or else we are in the middle of a row.
for (my $day = 1; $day <= month_days(year => $year, month => $month);
for (my $day = 1; $day <= month_days(year => $params{year}, month => $params{month});
$day++, $wday++, $wday %= 7) {
# At tihs point, on a week_start_day, we close out a row,
# and start a new one -- unless it is week_start_day on the
@ -169,8 +200,7 @@ EOF
}
my $tag;
my $mtag = sprintf("%02d", $month);
if (defined $cache{$pagespec}{"$year/$mtag/$day"}) {
if (defined $linkcache{"$params{year}/$params{month}/$day"}) {
if ($day == $today) {
$tag='month-calendar-day-this-day';
}
@ -179,9 +209,9 @@ EOF
}
$calendar.=qq{\t\t<td class="$tag $downame{$wday}">};
$calendar.=htmllink($params{page}, $params{destpage},
pagename($linkcache{"$year/$mtag/$day"}),
"linktext" => "$day");
push @list, pagename($linkcache{"$year/$mtag/$day"});
$linkcache{"$params{year}/$params{month}/$day"},
noimageinline => 1,
"linktext" => "$day");
$calendar.=qq{</td>\n};
}
else {
@ -207,58 +237,62 @@ EOF
</table>
EOF
# Add dependencies to update the calendar whenever pages
# matching the pagespec are added or removed.
add_depends($params{page}, $params{pages});
# Explicitly add all currently linked pages as dependencies, so
# that if they are removed, the calendar will be sure to be updated.
foreach my $p (@list) {
add_depends($params{page}, $p);
}
return $calendar;
}
sub format_year (@) {
my %params=@_;
my @post_months;
foreach my $p (pagespec_match_list($params{page},
"creation_year($params{year}) and ($params{pages})",
# add presence dependencies to update
# year calendar's links to months when
# pages are added/removed
deptype => deptype("presence"))) {
my $mtime = $IkiWiki::pagectime{$p};
my @date = localtime($mtime);
my $month = $date[4] + 1;
my $pagespec = $params{pages};
my $year = $params{year};
my $month = $params{month};
my $pmonth = $params{pmonth};
my $nmonth = $params{nmonth};
my $pyear = $params{pyear};
my $nyear = $params{nyear};
$post_months[$month]++;
}
my $calendar="\n";
my $pyear = $params{year} - 1;
my $nyear = $params{year} + 1;
my $thisyear = $now[5]+1900;
my $future_month = 0;
$future_month = $now[4]+1 if ($year == $now[5]+1900);
$future_month = $now[4]+1 if $params{year} == $thisyear;
my $archivebase = 'archives';
$archivebase = $config{archivebase} if defined $config{archivebase};
$archivebase = $params{archivebase} if defined $params{archivebase};
# calculate URL's for previous and next years
my ($url, $purl, $nurl)=("$year",'','');
if (exists $cache{$pagespec}{"$year"}) {
my ($url, $purl, $nurl)=("$params{year}",'','');
if (exists $pagesources{"$archivebase/$params{year}"}) {
$url = htmllink($params{page}, $params{destpage},
"$archivebase/$year",
linktext => "$year");
"$archivebase/$params{year}",
noimageinline => 1,
linktext => "$params{year}");
}
add_depends($params{page}, "$archivebase/$year");
if (exists $cache{$pagespec}{"$pyear"}) {
add_depends($params{page}, "$archivebase/$params{year}", deptype("presence"));
if (exists $pagesources{"$archivebase/$pyear"}) {
$purl = htmllink($params{page}, $params{destpage},
"$archivebase/$pyear",
noimageinline => 1,
linktext => "\&larr;");
}
add_depends($params{page}, "$archivebase/$pyear");
if (exists $cache{$pagespec}{"$nyear"}) {
add_depends($params{page}, "$archivebase/$pyear", deptype("presence"));
if (exists $pagesources{"$archivebase/$nyear"}) {
$nurl = htmllink($params{page}, $params{destpage},
"$archivebase/$nyear",
noimageinline => 1,
linktext => "\&rarr;");
}
add_depends($params{page}, "$archivebase/$nyear");
add_depends($params{page}, "$archivebase/$nyear", deptype("presence"));
# Start producing the year calendar
$calendar=<<EOF;
@ -273,35 +307,32 @@ sub format_year (@) {
</tr>
EOF
for ($month = 1; $month <= 12; $month++) {
my @day=localtime(timelocal(0,0,0,15,$month-1,$year-1900));
for (my $month = 1; $month <= 12; $month++) {
my @day=localtime(timelocal(0,0,0,15,$month-1,$params{year}-1900));
my $murl;
my $monthname = POSIX::strftime("%B", @day);
my $monthabbr = POSIX::strftime("%b", @day);
$calendar.=qq{\t<tr>\n} if ($month % $params{months_per_row} == 1);
my $tag;
my $mtag=sprintf("%02d", $month);
if ($month == $params{month}) {
if ($cache{$pagespec}{"$year/$mtag"}) {
$tag = 'this_month_link';
}
else {
$tag = 'this_month_nolink';
}
if ($month == $params{month} && $thisyear == $params{year}) {
$tag = 'year-calendar-this-month';
}
elsif ($cache{$pagespec}{"$year/$mtag"}) {
$tag = 'month_link';
elsif ($pagesources{"$archivebase/$params{year}/$mtag"}) {
$tag = 'year-calendar-month-link';
}
elsif ($future_month && $month >= $future_month) {
$tag = 'month_future';
$tag = 'year-calendar-month-future';
}
else {
$tag = 'month_nolink';
$tag = 'year-calendar-month-nolink';
}
if ($cache{$pagespec}{"$year/$mtag"}) {
if ($pagesources{"$archivebase/$params{year}/$mtag"} &&
$post_months[$mtag]) {
$murl = htmllink($params{page}, $params{destpage},
"$archivebase/$year/$mtag",
"$archivebase/$params{year}/$mtag",
noimageinline => 1,
linktext => "$monthabbr");
$calendar.=qq{\t<td class="$tag">};
$calendar.=$murl;
@ -310,7 +341,8 @@ EOF
else {
$calendar.=qq{\t<td class="$tag">$monthabbr</td>\n};
}
add_depends($params{page}, "$archivebase/$year/$mtag");
add_depends($params{page}, "$archivebase/$params{year}/$mtag",
deptype("presence"));
$calendar.=qq{\t</tr>\n} if ($month % $params{months_per_row} == 0);
}
@ -324,74 +356,57 @@ EOF
sub preprocess (@) {
my %params=@_;
my $thisyear=1900 + $now[5];
my $thismonth=1 + $now[4];
$params{pages} = "*" unless defined $params{pages};
$params{type} = "month" unless defined $params{type};
$params{month} = sprintf("%02d", $params{month}) if defined $params{month};
$params{week_start_day} = 0 unless defined $params{week_start_day};
$params{months_per_row} = 3 unless defined $params{months_per_row};
$params{year} = $thisyear unless defined $params{year};
$params{month} = $thismonth unless defined $params{month};
if (! defined $params{year} || ! defined $params{month}) {
# Record that the calendar next changes at midnight.
$params{month} = sprintf("%02d", $params{month});
if ($params{type} eq 'month' && $params{year} == $thisyear
&& $params{month} == $thismonth) {
# calendar for current month, updates next midnight
$pagestate{$params{destpage}}{calendar}{nextchange}=($time
+ (60 - $now[0]) # seconds
+ (59 - $now[1]) * 60 # minutes
+ (23 - $now[2]) * 60 * 60 # hours
);
$params{year} = 1900 + $now[5] unless defined $params{year};
$params{month} = 1 + $now[4] unless defined $params{month};
}
elsif ($params{type} eq 'month' &&
(($params{year} == $thisyear && $params{month} > $thismonth) ||
$params{year} > $thisyear)) {
# calendar for upcoming month, updates 1st of that month
$pagestate{$params{destpage}}{calendar}{nextchange}=
timelocal(0, 0, 0, 1, $params{month}-1, $params{year});
}
elsif ($params{type} eq 'year' && $params{year} == $thisyear) {
# calendar for current year, updates 1st of next month
$pagestate{$params{destpage}}{calendar}{nextchange}=
timelocal(0, 0, 0, 1, $thismonth+1-1, $params{year});
}
elsif ($params{type} eq 'year' && $params{year} > $thisyear) {
# calendar for upcoming year, updates 1st of that year
$pagestate{$params{destpage}}{calendar}{nextchange}=
timelocal(0, 0, 0, 1, 1-1, $params{year});
}
else {
# calendar for past month or year, does not need
# to update any more
delete $pagestate{$params{destpage}}{calendar};
}
# Calculate month names for next month, and previous months
my $pmonth = $params{month} - 1;
my $nmonth = $params{month} + 1;
my $pyear = $params{year} - 1;
my $nyear = $params{year} + 1;
# Adjust for January and December
if ($params{month} == 1) {
$pmonth = 12;
$pyear--;
}
if ($params{month} == 12) {
$nmonth = 1;
$nyear++;
}
$params{pmonth}=$pmonth;
$params{nmonth}=$nmonth;
$params{pyear} =$pyear;
$params{nyear} =$nyear;
my $calendar="\n";
my $pagespec=$params{pages};
my $page =$params{page};
if (! defined $cache{$pagespec}) {
foreach my $p (pagespec_match_list([keys %pagesources], $pagespec)) {
my $mtime = $IkiWiki::pagectime{$p};
my $src = $pagesources{$p};
my @date = localtime($mtime);
my $mday = $date[3];
my $month = $date[4] + 1;
my $year = $date[5] + 1900;
my $mtag = sprintf("%02d", $month);
# Only one posting per day is being linked to.
$linkcache{"$year/$mtag/$mday"} = "$src";
$cache{$pagespec}{"$year"}++;
$cache{$pagespec}{"$year/$mtag"}++;
$cache{$pagespec}{"$year/$mtag/$mday"}++;
}
}
if ($params{type} =~ /month/i) {
my $calendar="";
if ($params{type} eq 'month') {
$calendar=format_month(%params);
}
elsif ($params{type} =~ /year/i) {
elsif ($params{type} eq 'year') {
$calendar=format_year(%params);
}

View File

@ -29,11 +29,10 @@ sub preprocess_if (@) {
}
my $result=0;
if ((exists $params{all} && lc $params{all} eq "no") ||
# An optimisation to avoid needless looping over every page
# and adding of dependencies for simple uses of some of the
# tests.
$params{test} =~ /^([\s\!()]*((enabled|sourcepage|destpage|included)\([^)]*\)|(and|or))[\s\!()]*)+$/) {
if (! IkiWiki::yesno($params{all}) ||
# An optimisation to avoid needless looping over every page
# for simple uses of some of the tests.
$params{test} =~ /^([\s\!()]*((enabled|sourcepage|destpage|included)\([^)]*\)|(and|or))[\s\!()]*)+$/) {
add_depends($params{page}, "($params{test}) and $params{page}");
$result=pagespec_match($params{page}, $params{test},
location => $params{page},
@ -41,17 +40,12 @@ sub preprocess_if (@) {
destpage => $params{destpage});
}
else {
add_depends($params{page}, $params{test});
foreach my $page (keys %pagesources) {
if (pagespec_match($page, $params{test},
location => $params{page},
sourcepage => $params{page},
destpage => $params{destpage})) {
$result=1;
last;
}
}
$result=pagespec_match_list($params{page}, $params{test},
# stop after first match
num => 1,
sourcepage => $params{page},
destpage => $params{destpage},
);
}
my $ret;

View File

@ -58,7 +58,7 @@ sub preprocess (@) {
$pagestate{$params{page}}{edittemplate}{$params{match}}=$link;
return "" if ($params{silent} && IkiWiki::yesno($params{silent}));
add_depends($params{page}, $link);
add_depends($params{page}, $link, deptype("presence"));
return sprintf(gettext("edittemplate %s registered for %s"),
htmllink($params{page}, $params{destpage}, $link),
$params{match});

View File

@ -613,11 +613,15 @@ sub rcs_getctime ($) {
# Remove srcdir prefix
$file =~ s/^\Q$config{srcdir}\E\/?//;
my @raw_lines = run_or_die('git', 'log', '--reverse', '--follow',
my @raw_lines = run_or_die('git', 'log',
'--follow', '--no-merges',
'--pretty=raw', '--raw', '--abbrev=40', '--always', '-c',
'-r', '--', $file);
my $first = parse_diff_tree("", \@raw_lines);
my $ctime = $first->{'author_epoch'};
my @ci;
while (my $parsed = parse_diff_tree("", \@raw_lines)) {
push @ci, $parsed;
}
my $ctime = $ci[$#ci]->{'author_epoch'};
debug("ctime for '$file': ". localtime($ctime));
return $ctime;

View File

@ -170,7 +170,7 @@ sub preprocess (@) {
my $b = bestlink($params{page}, $params{link});
if (length $b) {
add_depends($params{page}, $b);
add_depends($params{page}, $b, deptype("presence"));
$imgtag=htmllink($params{page}, $params{destpage},
$params{link}, linktext => $imgtag,
noimageinline => 1);

View File

@ -195,41 +195,38 @@ sub preprocess_inline (@) {
@list = map { bestlink($params{page}, $_) }
split ' ', $params{pagenames};
if (yesno($params{reverse})) {
@list=reverse(@list);
}
foreach my $p (@list) {
add_depends($params{page}, $p, deptype($quick ? "presence" : "content"));
}
}
else {
add_depends($params{page}, $params{pages});
my $num=0;
if ($params{show}) {
$num=$params{show};
}
if ($params{feedshow} && $num < $params{feedshow}) {
$num=$params{feedshow};
}
if ($params{skip}) {
$num+=$params{skip};
}
@list = pagespec_match_list(
[ grep { $_ ne $params{page} } keys %pagesources ],
$params{pages}, location => $params{page});
if (exists $params{sort} && $params{sort} eq 'title') {
@list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list;
}
elsif (exists $params{sort} && $params{sort} eq 'title_natural') {
eval q{use Sort::Naturally};
if ($@) {
error(gettext("Sort::Naturally needed for title_natural sort"));
}
@list=sort { Sort::Naturally::ncmp(pagetitle(basename($a)), pagetitle(basename($b))) } @list;
}
elsif (exists $params{sort} && $params{sort} eq 'mtime') {
@list=sort { $pagemtime{$b} <=> $pagemtime{$a} } @list;
}
elsif (! exists $params{sort} || $params{sort} eq 'age') {
@list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
}
else {
error sprintf(gettext("unknown sort type %s"), $params{sort});
}
}
if (yesno($params{reverse})) {
@list=reverse(@list);
@list = pagespec_match_list($params{page}, $params{pages},
deptype => deptype($quick ? "presence" : "content"),
filter => sub { $_[0] eq $params{page} },
sort => exists $params{sort} ? $params{sort} : "age",
reverse => yesno($params{reverse}),
num => $num,
);
}
if (exists $params{skip}) {
@list=@list[$params{skip} .. scalar @list - 1];
@list=@list[$params{skip} .. $#list];
}
my @feedlist;
@ -247,15 +244,12 @@ sub preprocess_inline (@) {
@list=@list[0..$params{show} - 1];
}
# Explicitly add all currently displayed pages as dependencies, so
# that if they are removed or otherwise changed, the inline will be
# sure to be updated.
foreach my $p ($#list >= $#feedlist ? @list : @feedlist) {
add_depends($params{page}, $p);
}
if ($feeds && exists $params{feedpages}) {
@feedlist=pagespec_match_list(\@feedlist, $params{feedpages}, location => $params{page});
@feedlist = pagespec_match_list(
$params{page}, "($params{pages}) and ($params{feedpages})",
deptype => deptype($quick ? "presence" : "content"),
list => \@feedlist,
);
}
my ($feedbase, $feednum);

View File

@ -28,10 +28,6 @@ sub preprocess (@) {
$params{pages}="*" unless defined $params{pages};
# Needs to update whenever a page is added or removed, so
# register a dependency.
add_depends($params{page}, $params{pages});
# Can't just return the linkmap here, since the htmlscrubber
# scrubs out all <object> tags (with good reason!)
# Instead, insert a placeholder tag, which will be expanded during
@ -55,12 +51,11 @@ sub genmap ($) {
my %params=%{$maps{$mapnum}};
# Get all the items to map.
my %mapitems = ();
foreach my $item (keys %links) {
if (pagespec_match($item, $params{pages}, location => $params{page})) {
$mapitems{$item}=urlto($item, $params{destpage});
}
}
my %mapitems = map { $_ => urlto($_, $params{destpage}) }
pagespec_match_list($params{page}, $params{pages},
# update when a page is added or removed, or its
# links change
deptype => deptype("presence", "links"));
my $dest=$params{page}."/linkmap.png";

View File

@ -84,7 +84,7 @@ sub preprocess (@) {
foreach my $plugin (@pluginlist) {
$result .= '<li class="listdirectives">';
my $link=linkpage($config{directive_description_dir}."/".$plugin);
add_depends($params{page}, $link);
add_depends($params{page}, $link, deptype("presence"));
$result .= htmllink($params{page}, $params{destpage}, $link);
$result .= '</li>';
}

View File

@ -28,12 +28,16 @@ sub preprocess (@) {
my %params=@_;
$params{pages}="*" unless defined $params{pages};
# Needs to update whenever a page is added or removed (or in some
# cases, when its content changes, if show= is specified).
my $deptype=deptype(exists $params{show} ? "content" : "presence");
my $common_prefix;
# Get all the items to map.
my %mapitems;
foreach my $page (pagespec_match_list([keys %pagesources],
$params{pages}, location => $params{page})) {
foreach my $page (pagespec_match_list($params{page}, $params{pages},
deptype => $deptype)) {
if (exists $params{show} &&
exists $pagestate{$page} &&
exists $pagestate{$page}{meta}{$params{show}}) {
@ -67,16 +71,6 @@ sub preprocess (@) {
$common_prefix=IkiWiki::dirname($common_prefix);
}
# Needs to update whenever a page is added or removed (or in some
# cases, when its content changes, if show=title), so register a
# dependency.
add_depends($params{page}, $params{pages});
# Explicitly add all currently shown pages, to detect when pages
# are removed.
foreach my $item (keys %mapitems) {
add_depends($params{page}, $item);
}
# Create the map.
my $parent="";
my $indent=0;
@ -84,12 +78,12 @@ sub preprocess (@) {
my $addparent="";
my $map = "<div class='map'>\n";
# Return empty div if %mapitems is empty
if (!scalar(keys %mapitems)) {
if (! keys %mapitems) {
# return empty div for empty map
$map .= "</div>\n";
return $map;
}
else { # continue populating $map
else {
$map .= "<ul>\n";
}

View File

@ -195,7 +195,7 @@ sub preprocess (@) {
if (! length $link) {
error gettext("redir page not found")
}
add_depends($page, $link);
add_depends($page, $link, deptype("presence"));
$value=urlto($link, $page);
$value.='#'.$redir_anchor if defined $redir_anchor;
@ -291,21 +291,21 @@ sub match {
if (defined $val) {
if ($val=~/^$re$/i) {
return IkiWiki::SuccessReason->new("$re matches $field of $page");
return IkiWiki::SuccessReason->new("$re matches $field of $page", $page => $IkiWiki::DEPEND_CONTENT, "" => 1);
}
else {
return IkiWiki::FailReason->new("$re does not match $field of $page");
return IkiWiki::FailReason->new("$re does not match $field of $page", "" => 1);
}
}
else {
return IkiWiki::FailReason->new("$page does not have a $field");
return IkiWiki::FailReason->new("$page does not have a $field", "" => 1);
}
}
package IkiWiki::PageSpec;
sub match_title ($$;@) {
IkiWiki::Plugin::meta::match("title", @_);
IkiWiki::Plugin::meta::match("title", @_);
}
sub match_author ($$;@) {

View File

@ -23,24 +23,34 @@ sub preprocess (@) {
my %params=@_;
$params{pages}="*" unless defined $params{pages};
# Needs to update whenever a page is added or removed, so
# register a dependency.
add_depends($params{page}, $params{pages});
# Needs to update whenever a link changes, on any page
# since any page could link to one of the pages we're
# considering as orphans.
add_depends($params{page}, "*", deptype("links"));
my @orphans;
foreach my $page (pagespec_match_list(
[ grep { ! IkiWiki::backlink_pages($_) && $_ ne 'index' }
keys %pagesources ],
$params{pages}, location => $params{page})) {
# If the page has a link to some other page, it's
# indirectly linked to a page via that page's backlinks.
next if grep {
length $_ &&
($_ !~ /\/\Q$config{discussionpage}\E$/i || ! $config{discussion}) &&
bestlink($page, $_) !~ /^(\Q$page\E|)$/
} @{$links{$page}};
push @orphans, $page;
}
my @orphans=pagespec_match_list($params{page}, $params{pages},
# update when orphans are added/removed
deptype => deptype("presence"),
filter => sub {
my $page=shift;
# Filter out pages that other pages link to.
return 1 if IkiWiki::backlink_pages($page);
# Toplevel index is assumed to never be orphaned.
return 1 if $page eq 'index';
# If the page has a link to some other page, it's
# indirectly linked via that page's backlinks.
return 1 if grep {
length $_ &&
($_ !~ /\/\Q$config{discussionpage}\E$/i || ! $config{discussion}) &&
bestlink($page, $_) !~ /^(\Q$page\E|)$/
} @{$links{$page}};
return 0;
},
);
return gettext("All pages have other pages linking to them.") unless @orphans;
return "<ul>\n".

View File

@ -20,20 +20,20 @@ sub getsetup () {
sub preprocess (@) {
my %params=@_;
$params{pages}="*" unless defined $params{pages};
my $pages=defined $params{pages} ? $params{pages} : "*";
# Needs to update count whenever a page is added or removed, so
# register a dependency.
add_depends($params{page}, $params{pages});
my @pages;
if ($params{pages} eq "*") {
@pages=keys %pagesources;
# Just get a list of all the pages, and count the items in it.
# Use a presence dependency to only update when pages are added
# or removed.
if ($pages eq '*') {
# optimisation to avoid needing to try matching every page
add_depends($params{page}, $pages, deptype("presence"));
return scalar keys %pagesources;
}
else {
@pages=pagespec_match_list([keys %pagesources], $params{pages}, location => $params{page});
}
return $#pages+1;
return scalar pagespec_match_list($params{page}, $pages,
deptype => deptype("presence"));
}
1

View File

@ -35,22 +35,28 @@ sub preprocess (@) {
$params{pages}="*" unless defined $params{pages};
my $style = ($params{style} or 'cloud');
# Needs to update whenever a page is added or removed, so
# register a dependency.
add_depends($params{page}, $params{pages});
add_depends($params{page}, $params{among}) if exists $params{among};
my %counts;
my $max = 0;
foreach my $page (pagespec_match_list([keys %links],
$params{pages}, location => $params{page})) {
foreach my $page (pagespec_match_list($params{page}, $params{pages},
# update when a displayed page is added/removed
deptype => deptype("presence"))) {
use IkiWiki::Render;
my @backlinks = IkiWiki::backlink_pages($page);
if (exists $params{among}) {
@backlinks = pagespec_match_list(\@backlinks,
$params{among}, location => $params{page});
# only consider backlinks from the amoung pages
@backlinks = pagespec_match_list(
$params{page}, $params{among},
# update whenever links on those pages change
deptype => deptype("links"),
list => \@backlinks
);
}
else {
# update when any page with links changes,
# in case the links point to our displayed pages
add_depends($params{page}, "*", deptype("links"));
}
$counts{$page} = scalar(@backlinks);

View File

@ -30,11 +30,16 @@ sub preprocess (@) {
return "";
}
my $deptype;
if (! exists $params{time} || $params{time} ne 'mtime') {
$params{timehash} = \%IkiWiki::pagectime;
# need to update when pages are added or removed
$deptype = deptype("presence");
}
else {
$params{timehash} = \%IkiWiki::pagemtime;
# need to update when pages are changed
$deptype = deptype("content");
}
if (! exists $params{formula}) {
@ -48,12 +53,11 @@ sub preprocess (@) {
error gettext("unknown formula");
}
add_depends($params{page}, $params{pages});
my @list=sort { $params{timehash}->{$b} <=> $params{timehash}->{$a} }
pagespec_match_list(
[ grep { $_ ne $params{page} } keys %pagesources],
$params{pages}, location => $params{page});
pagespec_match_list($params{page}, $params{pages},
deptype => $deptype,
filter => sub { $_[0] eq $params{page} },
);
my @data=eval qq{IkiWiki::Plugin::postsparkline::formula::$formula(\\\%params, \@list)};
if ($@) {

View File

@ -36,16 +36,12 @@ sub preprocess (@) {
$fill.="%";
}
elsif (defined $params{totalpages} and defined $params{donepages}) {
add_depends($params{page}, $params{totalpages});
add_depends($params{page}, $params{donepages});
my @pages=keys %pagesources;
my $totalcount=0;
my $donecount=0;
foreach my $page (@pages) {
$totalcount++ if pagespec_match($page, $params{totalpages}, location => $params{page});
$donecount++ if pagespec_match($page, $params{donepages}, location => $params{page});
}
my $totalcount=pagespec_match_list(
$params{page}, $params{totalpages},
deptype => deptype("presence"));
my $donecount=pagespec_match_list(
$params{page}, $params{donepages},
deptype => deptype("presence"));
if ($totalcount == 0) {
$fill = "100%";

View File

@ -7,7 +7,7 @@ use strict;
use IkiWiki;
use Encode;
my %backlinks;
my (%backlinks, %rendered);
our %brokenlinks;
my $links_calculated=0;
@ -147,6 +147,8 @@ sub genpage ($$) {
sub scan ($) {
my $file=shift;
debug(sprintf(gettext("scanning %s"), $file));
my $type=pagetype($file);
if (defined $type) {
my $srcfile=srcfile($file);
@ -202,8 +204,11 @@ sub fast_file_copy (@) {
}
}
sub render ($) {
sub render ($$) {
my $file=shift;
return if $rendered{$file};
debug(shift);
$rendered{$file}=1;
my $type=pagetype($file);
my $srcfile=srcfile($file);
@ -273,7 +278,8 @@ sub srcdir_check () {
}
sub find_src_files () {
my (@files, %pages);
my @files;
my %pages;
eval q{use File::Find};
error($@) if $@;
find({
@ -334,19 +340,14 @@ sub find_src_files () {
},
}, $dir);
};
# Returns a list of all source files found, and a hash of
# the corresponding page names.
return \@files, \%pages;
}
sub refresh () {
srcdir_check();
run_hooks(refresh => sub { shift->() });
my ($files, $exists)=find_src_files();
sub find_new_files ($) {
my $files=shift;
my @new;
my @internal_new;
my (%rendered, @add, @del, @internal);
# check for added or removed pages
foreach my $file (@$files) {
my $page=pagename($file);
if (exists $pagesources{$page} && $pagesources{$page} ne $file) {
@ -356,10 +357,10 @@ sub refresh () {
$pagesources{$page}=$file;
if (! $pagemtime{$page}) {
if (isinternal($page)) {
push @internal, $file;
push @internal_new, $file;
}
else {
push @add, $file;
push @new, $file;
if ($config{getctime} && -e "$config{srcdir}/$file") {
eval {
my $time=rcs_getctime("$config{srcdir}/$file");
@ -376,10 +377,19 @@ sub refresh () {
}
}
}
return \@new, \@internal_new;
}
sub find_del_files ($) {
my $pages=shift;
my @del;
my @internal_del;
foreach my $page (keys %pagemtime) {
if (! $exists->{$page}) {
if (! $pages->{$page}) {
if (isinternal($page)) {
push @internal, $pagesources{$page};
push @internal_del, $pagesources{$page};
}
else {
debug(sprintf(gettext("removing old page %s"), $page));
@ -400,8 +410,13 @@ sub refresh () {
}
}
# find changed and new files
my @needsbuild;
return \@del, \@internal_del;
}
sub find_changed ($) {
my $files=shift;
my @changed;
my @internal_changed;
foreach my $file (@$files) {
my $page=pagename($file);
my ($srcfile, @stat)=srcfile_stat($file);
@ -409,138 +424,55 @@ sub refresh () {
$stat[9] > $pagemtime{$page} ||
$forcerebuild{$page}) {
$pagemtime{$page}=$stat[9];
if (isinternal($page)) {
push @internal, $file;
# Preprocess internal page in scan-only mode.
preprocess($page, $page, readfile($srcfile), 1);
push @internal_changed, $file;
}
else {
push @needsbuild, $file;
push @changed, $file;
}
}
}
run_hooks(needsbuild => sub { shift->(\@needsbuild) });
return \@changed, \@internal_changed;
}
# scan and render files
foreach my $file (@needsbuild) {
debug(sprintf(gettext("scanning %s"), $file));
scan($file);
}
calculate_links();
foreach my $file (@needsbuild) {
debug(sprintf(gettext("building %s"), $file));
render($file);
$rendered{$file}=1;
}
foreach my $file (@internal) {
# internal pages are not rendered
sub calculate_old_links ($$) {
my ($changed, $del)=@_;
my %oldlink_targets;
foreach my $file (@$changed, @$del) {
my $page=pagename($file);
delete $depends{$page};
delete $depends_simple{$page};
foreach my $old (@{$renderedfiles{$page}}) {
delete $destsources{$old};
}
$renderedfiles{$page}=[];
}
# rebuild pages that link to added or removed pages
if (@add || @del) {
foreach my $f (@add, @del) {
my $p=pagename($f);
foreach my $page (keys %{$backlinks{$p}}) {
my $file=$pagesources{$page};
next if $rendered{$file};
debug(sprintf(gettext("building %s, which links to %s"), $file, $p));
render($file);
$rendered{$file}=1;
if (exists $oldlinks{$page}) {
foreach my $l (@{$oldlinks{$page}}) {
$oldlink_targets{$page}{$l}=bestlink($page, $l);
}
}
}
return \%oldlink_targets;
}
if (%rendered || @del || @internal) {
my @changed=(keys %rendered, @del);
my %lcchanged = map { lc(pagename($_)) => 1 } @changed;
# rebuild dependant pages
foreach my $f (@$files) {
next if $rendered{$f};
my $p=pagename($f);
my $reason = undef;
if (exists $depends_simple{$p}) {
foreach my $d (keys %{$depends_simple{$p}}) {
if (exists $lcchanged{$d}) {
$reason = $d;
last;
}
}
}
if (exists $depends{$p} && ! defined $reason) {
D: foreach my $d (keys %{$depends{$p}}) {
my $sub=pagespec_translate($d);
next if $@ || ! defined $sub;
# only consider internal files
# if the page explicitly depends
# on such files
foreach my $file (@changed, $d =~ /internal\(/ ? @internal : ()) {
next if $file eq $f;
my $page=pagename($file);
if ($sub->($page, location => $p)) {
$reason = $page;
last D;
}
}
}
}
if (defined $reason) {
debug(sprintf(gettext("building %s, which depends on %s"), $f, $reason));
render($f);
$rendered{$f}=1;
}
}
# handle backlinks; if a page has added/removed links,
# update the pages it links to
my %linkchanged;
foreach my $file (@changed) {
my $page=pagename($file);
if (exists $links{$page}) {
foreach my $link (map { bestlink($page, $_) } @{$links{$page}}) {
if (length $link &&
(! exists $oldlinks{$page} ||
! grep { bestlink($page, $_) eq $link } @{$oldlinks{$page}})) {
$linkchanged{$link}=1;
}
}
}
if (exists $oldlinks{$page}) {
foreach my $link (map { bestlink($page, $_) } @{$oldlinks{$page}}) {
if (length $link &&
(! exists $links{$page} ||
! grep { bestlink($page, $_) eq $link } @{$links{$page}})) {
$linkchanged{$link}=1;
}
}
}
}
foreach my $link (keys %linkchanged) {
my $linkfile=$pagesources{$link};
if (defined $linkfile) {
next if $rendered{$linkfile};
debug(sprintf(gettext("building %s, to update its backlinks"), $linkfile));
render($linkfile);
$rendered{$linkfile}=1;
}
}
sub derender_internal ($) {
my $file=shift;
my $page=pagename($file);
delete $depends{$page};
delete $depends_simple{$page};
foreach my $old (@{$renderedfiles{$page}}) {
delete $destsources{$old};
}
$renderedfiles{$page}=[];
}
# remove no longer rendered files
sub render_linkers ($) {
my $f=shift;
my $p=pagename($f);
foreach my $page (keys %{$backlinks{$p}}) {
my $file=$pagesources{$page};
render($file, sprintf(gettext("building %s, which links to %s"), $file, $p));
}
}
sub remove_unrendered () {
foreach my $src (keys %rendered) {
my $page=pagename($src);
foreach my $file (@{$oldrenderedfiles{$page}}) {
@ -550,9 +482,184 @@ sub refresh () {
}
}
}
}
if (@del) {
run_hooks(delete => sub { shift->(@del) });
sub calculate_changed_links ($$$) {
my ($changed, $del, $oldlink_targets)=@_;
my (%backlinkchanged, %linkchangers);
foreach my $file (@$changed, @$del) {
my $page=pagename($file);
if (exists $links{$page}) {
foreach my $l (@{$links{$page}}) {
my $target=bestlink($page, $l);
if (! exists $oldlink_targets->{$page}{$l} ||
$target ne $oldlink_targets->{$page}{$l}) {
$backlinkchanged{$target}=1;
$linkchangers{lc($page)}=1;
}
delete $oldlink_targets->{$page}{$l};
}
}
if (exists $oldlink_targets->{$page} &&
%{$oldlink_targets->{$page}}) {
foreach my $target (values %{$oldlink_targets->{$page}}) {
$backlinkchanged{$target}=1;
}
$linkchangers{lc($page)}=1;
}
}
return \%backlinkchanged, \%linkchangers;
}
sub render_dependent ($$$$$$$) {
my ($files, $new, $internal_new, $del, $internal_del,
$internal_changed, $linkchangers)=@_;
my @changed=(keys %rendered, @$del);
my @exists_changed=(@$new, @$del);
my %lc_changed = map { lc(pagename($_)) => 1 } @changed;
my %lc_exists_changed = map { lc(pagename($_)) => 1 } @exists_changed;
foreach my $f (@$files) {
next if $rendered{$f};
my $p=pagename($f);
my $reason = undef;
if (exists $depends_simple{$p}) {
foreach my $d (keys %{$depends_simple{$p}}) {
if (($depends_simple{$p}{$d} & $IkiWiki::DEPEND_CONTENT &&
$lc_changed{$d})
||
($depends_simple{$p}{$d} & $IkiWiki::DEPEND_PRESENCE &&
$lc_exists_changed{$d})
||
($depends_simple{$p}{$d} & $IkiWiki::DEPEND_LINKS &&
$linkchangers->{$d})
) {
$reason = $d;
last;
}
}
}
if (exists $depends{$p} && ! defined $reason) {
foreach my $dep (keys %{$depends{$p}}) {
my $sub=pagespec_translate($dep);
next if $@ || ! defined $sub;
# only consider internal files
# if the page explicitly depends
# on such files
my $internal_dep=$dep =~ /internal\(/;
my $in=sub {
my $list=shift;
my $type=shift;
foreach my $file (@$list) {
next if $file eq $f;
my $page=pagename($file);
if ($sub->($page, location => $p)) {
if ($type == $IkiWiki::DEPEND_LINKS) {
next unless $linkchangers->{lc($page)};
}
return $page;
}
}
return undef;
};
if ($depends{$p}{$dep} & $IkiWiki::DEPEND_CONTENT) {
last if $reason =
$in->(\@changed, $IkiWiki::DEPEND_CONTENT);
last if $internal_dep && ($reason =
$in->($internal_new, $IkiWiki::DEPEND_CONTENT) ||
$in->($internal_del, $IkiWiki::DEPEND_CONTENT) ||
$in->($internal_changed, $IkiWiki::DEPEND_CONTENT));
}
if ($depends{$p}{$dep} & $IkiWiki::DEPEND_PRESENCE) {
last if $reason =
$in->(\@exists_changed, $IkiWiki::DEPEND_PRESENCE);
last if $internal_dep && ($reason =
$in->($internal_new, $IkiWiki::DEPEND_PRESENCE) ||
$in->($internal_del, $IkiWiki::DEPEND_PRESENCE));
}
if ($depends{$p}{$dep} & $IkiWiki::DEPEND_LINKS) {
last if $reason =
$in->(\@changed, $IkiWiki::DEPEND_LINKS);
last if $internal_dep && ($reason =
$in->($internal_new, $IkiWiki::DEPEND_LINKS) ||
$in->($internal_del, $IkiWiki::DEPEND_LINKS) ||
$in->($internal_changed, $IkiWiki::DEPEND_LINKS));
}
}
}
if (defined $reason) {
render($f, sprintf(gettext("building %s, which depends on %s"), $f, $reason));
return 1;
}
}
return 0;
}
sub render_backlinks ($) {
my $backlinkchanged=shift;
foreach my $link (keys %$backlinkchanged) {
my $linkfile=$pagesources{$link};
if (defined $linkfile) {
render($linkfile, sprintf(gettext("building %s, to update its backlinks"), $linkfile));
}
}
}
sub refresh () {
srcdir_check();
run_hooks(refresh => sub { shift->() });
my ($files, $pages)=find_src_files();
my ($new, $internal_new)=find_new_files($files);
my ($del, $internal_del)=find_del_files($pages);
my ($changed, $internal_changed)=find_changed($files);
run_hooks(needsbuild => sub { shift->($changed) });
my $oldlink_targets=calculate_old_links($changed, $del);
foreach my $file (@$changed) {
scan($file);
}
calculate_links();
foreach my $file (@$changed) {
render($file, sprintf(gettext("building %s"), $file));
}
foreach my $file (@$internal_new, @$internal_del, @$internal_changed) {
derender_internal($file);
}
my ($backlinkchanged, $linkchangers)=calculate_changed_links($changed,
$del, $oldlink_targets);
foreach my $file (@$new, @$del) {
render_linkers($file);
}
if (@$changed || @$internal_changed ||
@$del || @$internal_del || @$internal_new) {
1 while render_dependent($files, $new, $internal_new,
$del, $internal_del, $internal_changed,
$linkchangers);
}
render_backlinks($backlinkchanged);
remove_unrendered();
if (@$del) {
run_hooks(delete => sub { shift->(@$del) });
}
if (%rendered) {
run_hooks(change => sub { shift->(keys %rendered) });

10
debian/NEWS vendored
View File

@ -1,3 +1,13 @@
ikiwiki (3.2009XXXX) UNRELEASED; urgency=low
To take advantage of significant performance improvements, all
wikis need to be rebuilt on upgrade to this version. If you
listed your wiki in /etc/ikiwiki/wikilist this will be done
automatically when the Debian package is upgraded. Or use
ikiwiki-mass-rebuild to force a rebuild.
-- Joey Hess <joeyh@debian.org> Mon, 05 Oct 2009 16:48:59 -0400
ikiwiki (3.1415926) unstable; urgency=low
In order to fix a performance bug, all wikis need to be rebuilt on

39
debian/changelog vendored
View File

@ -1,11 +1,44 @@
ikiwiki (3.20091010) UNRELEASED; urgency=low
ikiwiki (3.2009XXXX) UNRELEASED; urgency=low
* Added support framework for multiple types of dependencies, including
dependncies that are only affected by page precence or link changes.
* Rebuild wikis on upgrade to this version to get improved dependency
info.
* pagecount, calendar, postsparkline, progress: Use a presence dependency,
which makes these directives much less expensive to use, since page
edits will no longer trigger an unnecessary update.
* map: Use a presence dependency unless show= is specified.
This makes maps efficient enough that they can be used on sidebars!
* inline: Use a presence dependency in quick mode.
* brokenlinks: Use a link dependency.
This makes it much more efficient, only updating when really necessary.
* orphans, pagestats: Use a combination of presence and link dependencies.
This makes them more efficient. It also fixes a longstanding bug,
where if only a small set of pages were considered by orphans/pagestats,
changes to links on other pages failed to cause an update.
* linkmap: Use a combination of presence and link dependencies.
This makes the map be regenerated much less frequently in many cases,
so larger maps are more practical to use now.
* Plugins providing PageSpec `match_*` functions should pass additional
influence information when creating result objects. This allows correctly
handling many more complicated dependencies.
* API change: `pagespec_match_list` has completly changed its interface.
The old interface will be removed soon, and a warning will be printed
if any plugins try to use it.
* Transitive dependencies are now correctly supported.
* ikiwiki-calendar: New command automates creation of archive pages
using the calendar plugin.
* calendar: Fix midnight rebuild trigger of calendars with explicit
month/year.
* calendar: Fix bug in next/previous year/month links, which sometimes
linked to an archive page from the wrong year, or were missing.
* git: --getctime will now follow renames back to the original creation
of a file.
* calendar: Fix CSS for year calendar to match the plugin documentation.
* Added minimal default CSS for calendar plugin, just highlighting the
current day.
-- Joey Hess <joeyh@debian.org> Sun, 11 Oct 2009 15:54:45 -0400
-- Joey Hess <joeyh@debian.org> Fri, 09 Oct 2009 19:53:50 -0400
ikiwiki (3.20091009) unstable; urgency=low
@ -19,7 +52,7 @@ ikiwiki (3.20091009) unstable; urgency=low
* mirrorlist: Display nothing if list is empty.
* Fix a bug that could lead to duplicate links being recorded
for tags.
* Optimize away most expensive file prune calls, when refreshing,
* Optimize away most expensive file prune checks, when refreshing,
by only checking new files.
-- Joey Hess <joeyh@debian.org> Fri, 09 Oct 2009 19:53:50 -0400

2
debian/postinst vendored
View File

@ -4,7 +4,7 @@ set -e
# Change this when some incompatible change is made that requires
# rebuilding all wikis.
firstcompat=3.1415926
firstcompat=3.20091010
if [ "$1" = configure ] && \
dpkg --compare-versions "$2" lt "$firstcompat"; then

View File

@ -2,10 +2,6 @@ It seems that the [[ikiwiki/directive/inline]] directive doesn't generate wikili
\[[!inline pages="bugs/* and !*/discussion and backlink(bugs)" feeds=no postform=no archive=yes show="10"]]
But here it is:
[[!inline pages="bugs/* and !*/discussion and backlink(bugs)" feeds=no postform=no archive=yes show="10"]]
and note that it only included the 'normal' wikilinks (and also note that this page is not marked done even though the done page is inlined).
One might also wonder if inline would make this page link to any internal links on those inlined pages too, but I think
that would be overkill.

View File

@ -1,13 +1,29 @@
* Has bugs updating things if the bestlink of a page changes due to
adding/removing a page. For example, if Foo/Bar links to "Baz", which is
Foo/Baz, and Foo/Bar/Baz gets added, it will update the links in Foo/Bar
to point to it, but will forget to update the linkbacks in Foo/Baz.
to point to it, but will forget to update the backlinks in Foo/Baz.
* And if Foo/Bar/Baz is then removed, it forgets to update Foo/Bar to link
back to Foo/Baz.
The buggy code is in `refresh()`, when it determines what
links, on what pages, have changed. It only looks at
changed/added/deleted pages when doing this. But when Foo/Bar/Baz
is added, Foo/Bar is not changed -- so the change it its
backlinks is not noticed.
As of 1.33, this is still true. The buggy code is the %linkchanged
calculation in refresh(), which doesn't detect that the link has changed in
this case.
To fix this, it needs to consider, when rebuilding Foo/Bar for the changed
links, what oldlinks Foo/Bar had. If one of the oldlinks linked to
Foo/Baz, and not links to Foo/Bar/Baz, it could then rebuild Foo/Baz.
Still true in 1.43 although the code is much different now..
Problem is that in order to do that, it needs to be able to tell that
the oldlinks linked to Foo/Baz. Which would mean either calculating
all links before the scan phase, or keeping a copy of the backlinks
from the last build, and using that. The first option would be a lot
of work for this minor issue.. it might be less expensive to just rebuild
*all* pages that Foo/Bar links to.
Keeping a copy of the backlinks has some merit. It could also be
incrementally updated.
* And if Foo/Bar/Baz is then removed, Foo/Bar gets a broken link,
instead of changing back to linking to Foo/Baz.
This old bug still exists as of 031d1bf5046ab77c796477a19967e7c0c512c417.

View File

@ -65,7 +65,7 @@ Downsides here:
modification to plugins/brokenlinks causes an unnecessary update of
plugins, and could be solved by adding more dependency types.)
--[[Joey]]
[[done]] --[[Joey]]
> Some questions/comments... I've thought about this a lot for [[todo/tracking_bugs_with_dependencies]].
>

View File

@ -1,4 +1,4 @@
[[!pagestats pages="./tags/*"]]
[[!pagestats pages="./tags/*" among="./posts/*"]]
Welcome to my blog.

View File

@ -1,3 +1,3 @@
[[!pagestats pages="./tags/*"]]
[[!pagestats pages="./tags/*" among="./posts/*"]]
On the right you can see the tag cloud for this blog.

View File

@ -4,7 +4,7 @@ ikiwiki-calendar - create calendar archive pages
# SYNOPSIS
ikiwiki-calendar [-f] your.setup [pagespec] [year]
ikiwiki-calendar [-f] your.setup [pagespec] [startyear [endyear]]
# DESCRIPTION
@ -22,13 +22,13 @@ default is all pages. To limit it to only posts in a blog,
use something like "posts/* and !*/Discussion".
It defaults to creating calendar pages for the current
year, as well as the previous year, and the next year.
If you specify a year, it will create pages for that year.
year. If you specify a year, it will create pages for that year.
Specify a second year to create pages for a span of years.
Existing pages will not be overwritten by this command by default.
Use the `-f` switch to force it to overwrite any existing pages.
## CRONTAB
# CRONTAB
While this command only needs to be run once a year to update
the archive pages for each new year, you are recommended to set up

View File

@ -86,19 +86,15 @@ Here are some less often needed parameters:
if raw is set to "yes", the page will be included raw, without additional
markup around it, as if it were a literal part of the source of the
inlining page.
* `sort` - Controls how inlined pages are sorted. The default, "age" is to
sort newest created pages first. Setting it to "title" will sort pages by
title, and "mtime" sorts most recently modified pages first. If
[[!cpan Sort::Naturally]] is installed, `sort` can be set to "title_natural"
to sort by title with numbers treated as such ("1 2 9 10 20" instead of
"1 10 2 20 9").
* `sort` - Controls how inlined pages are [[sorted|pagespec/sorting]].
The default is to sort the newest created pages first.
* `reverse` - If set to "yes", causes the sort order to be reversed.
* `feedshow` - Specify the maximum number of matching pages to include in
the rss/atom feeds. The default is the same as the `show` value above.
* `feedonly` - Only generate the feed, do not display the pages inline on
the page.
* `quick` - Build archives in quick mode, without reading page contents for
metadata. By default, this also turns off generation of any feeds.
metadata. This also turns off generation of any feeds.
* `timeformat` - Use this to specify how to display the time or date for pages
in the blog. The format string is passed to the strftime(3) function.
* `feedpages` - A [[PageSpec]] of inlined pages to include in the rss/atom

View File

@ -9,9 +9,7 @@ Only links between mapped pages will be shown; links pointing to or from
unmapped pages will be omitted. If the pages to include are not specified,
the links between all pages (and other files) in the wiki are mapped. For
best results, only a small set of pages should be mapped, since otherwise
the map can become very large, unwieldy, and complicated. Also, the map is
rebuilt whenever one of the mapped pages is changed, which can make the
wiki a bit slow.
the map can become very large, unwieldy, and complicated.
Here are descriptions of all the supported parameters to the `linkmap`
directive:

View File

@ -12,13 +12,13 @@ And here's how to create a table of all the pages on the wiki:
\[[!pagestats style="table"]]
The optional `among` parameter limits display to pages that match a
[[ikiwiki/PageSpec]]. For instance, to display a cloud of tags used on blog
entries, you could use:
The optional `among` parameter limits the pages whose outgoing links are
considered. For instance, to display a cloud of tags used on blog
entries, while ignoring other pages that use those tags, you could use:
\[[!pagestats pages="tags/*" among="blog/posts/*"]]
or to display a cloud of tags related to Linux, you could use:
Or to display a cloud of tags related to Linux, you could use:
\[[!pagestats pages="tags/* and not tags/linux" among="tagged(linux)"]]

View File

@ -0,0 +1,11 @@
Some [[directives|ikiwiki/directive]] that use
[[PageSpecs|ikiwiki/pagespec]] allow
specifying the order that matching pages are shown in. The following sort
orders can be specified.
* `age` - List pages from the most recently created to the oldest.
* `mtime` - List pages with the most recently modified first.
* `title` - Order by title.
* `title_natural` - Only available if [[!cpan Sort::Naturally]] is
installed. Orders by title, but numbers in the title are treated
as such, ("1 2 9 10 20" instead of "1 10 2 20 9")

View File

@ -1,7 +1,7 @@
Most of ikiwiki's [[features]] are implemented as plugins. Many of these
plugins are included with ikiwiki.
[[!pagestats pages="plugins/type/* and !plugins/type/slow"]]
[[!pagestats pages="plugins/type/* and !plugins/type/slow" among="plugins/*"]]
There's documentation if you want to [[write]] your own plugins, or you can
[[install]] plugins [[contributed|contrib]] by others.
@ -13,7 +13,5 @@ will fit most uses of ikiwiki.
## Plugin directory
[[!inline pages="plugins/* and !plugins/type/* and !plugins/write and
!plugins/write/* and !plugins/contrib and !plugins/install and !*/Discussion"
feedpages="created_after(plugins/graphviz)" archive="yes" sort=title
rootpage="plugins/contrib" postformtext="Add a new plugin named:" show=0]]
[[!map pages="plugins/* and !plugins/type/* and !plugins/write and
!plugins/write/* and !plugins/contrib and !plugins/install and !*/Discussion"]]

View File

@ -1,6 +1,4 @@
These plugins are provided by third parties and are not currently
included in ikiwiki. See [[install]] for installation help.
[[!inline pages="plugins/contrib/* and !*/Discussion"
feedpages="created_after(plugins/contrib/navbar)" archive="yes"
rootpage="plugins/contrib" postformtext="Add a new plugin named:" show=0]]
[[!map pages="plugins/contrib/* and !*/Discussion"]]

View File

@ -10,5 +10,6 @@ Here's a list of orphaned pages on this wiki:
[[!orphans pages="* and !news/* and !todo/* and !bugs/* and !users/* and
!recentchanges and !examples/* and !tips/* and !sandbox/* and !templates/* and
!forum/* and !*.js and
!wikiicons/* and !plugins/*"]]
"""]]

View File

@ -16,6 +16,10 @@ will turn off the sidebar altogether.
Warning: Any change to the sidebar will cause a rebuild of the whole wiki,
since every page includes a copy that has to be updated. This can
especially be a problem if the sidebar includes [[inline]] or [[map]]
directives, since any changes to pages inlined or mapped onto the sidebar
especially be a problem if the sidebar includes an [[ikiwiki/directive/inline]]
directive, since any changes to pages inlined into the sidebar
will change the sidebar and cause a full wiki rebuild.
Instead, if you include a [[ikiwiki/directive/map]] directive on the sidebar,
and it does not use the `show` parameter, only adding or removing pages
included in the map will cause a full rebuild. Modifying pages will not.

View File

@ -609,21 +609,60 @@ page created from it. (Ie, it appends ".html".)
Use this when constructing the filename of a html file. Use `urlto` when
generating a link to a page.
#### `add_depends($$;@)`
### `deptype(@)`
Use this function to generate ikiwiki's internal representation of a
dependency type from one or more of these keywords:
* `content` is the default. Any change to the content
of a page triggers the dependency.
* `presence` is only triggered by a change to the presence
of a page.
* `links` is only triggered by a change to the links of a page.
This includes when a link is added, removed, or changes what
it points to due to other changes. It does not include the
addition or removal of a duplicate link.
If multiple types are specified, they are combined.
#### `pagespec_match_list($$;@)`
Passed a page name, and [[ikiwiki/PageSpec]], returns a list of pages
in the wiki that match the [[ikiwiki/PageSpec]].
The page will automatically be made to depend on the specified
[[ikiwiki/PageSpec]], so `add_depends` does not need to be called. This
is often significantly more efficient than calling `add_depends` and
`pagespec_match` in a loop. You should use this anytime a plugin
needs to match a set of pages and do something based on that list.
Unlike pagespec_match, this may throw an error if there is an error in
the pagespec.
Additional named parameters can be specified:
* `deptype` optionally specifies the type of dependency to add. Use the
`deptype` function to generate a dependency type.
* `filter` is a reference to a function, that is called and passed a page,
and returns true if the page should be filtered out of the list.
* `sort` specifies a sort order for the list. See
[[ikiwiki/PageSpec/sorting]] for the avilable sort methods.
* `reverse` if true, sorts in reverse.
* `num` if nonzero, specifies the maximum number of matching pages that
will be returned.
* `list` makes it only match amoung the specified list of pages.
Default is to match amoung all pages in the wiki.
Any other named parameters are passed on to `pagespec_match`, to further
limit the match.
#### `add_depends($$;$)`
Makes the specified page depend on the specified [[ikiwiki/PageSpec]].
By default, dependencies are full content dependencies, meaning that the
page will be updated whenever anything matching the PageSpec is modified.
This default can be overridden by additional named parameters, which can be
used to indicate weaker types of dependencies:
* `presence` if set to true, only the presence of a matching page triggers
the dependency.
* `links` if set to true, any change to links on a matching page
triggers the dependency. This includes when a link is added, removed,
or changes what it points to due to other changes. It does not include
the addition or removal of a duplicate link.
This can be overridden by passing a `deptype` value as the third parameter.
#### `pagespec_match($$;@)`
@ -639,19 +678,6 @@ The most often used is "location", which specifies the location the
PageSpec should match against. If not passed, relative PageSpecs will match
relative to the top of the wiki.
#### `pagespec_match_list($$;@)`
Passed a reference to a list of page names, and [[ikiwiki/PageSpec]],
returns the set of pages that match the [[ikiwiki/PageSpec]].
Additional named parameters can be passed, to further limit the match.
The most often used is "location", which specifies the location the
PageSpec should match against. If not passed, relative PageSpecs will match
relative to the top of the wiki.
Unlike pagespec_match, this may throw an error if there is an error in
the pagespec.
#### `bestlink($$)`
Given a page and the text of a link on the page, determine which
@ -982,6 +1008,15 @@ an IkiWiki::FailReason object if the match fails. If the match cannot be
attempted at all, for any page, it can instead return an
IkiWiki::ErrorReason object explaining why.
When constructing these objects, you should also include information about
of any pages whose contents or other metadata influenced the result of the
match. Do this by passing a list of pages, followed by `deptype` values.
For example, "backlink(foo)" is influenced by the contents of page foo;
"link(foo)" and "title(bar)" are influenced by the contents of any page
they match; "created_before(foo)" is influenced by the metadata of foo;
while "glob(*)" is not influenced by the contents of any page.
### Setup plugins
The ikiwiki setup file is loaded using a pluggable mechanism. If you look

View File

@ -403,7 +403,6 @@ span.color {
}
/* Used by the highlight plugin. */
pre.hl { color:#000000; background-color:#ffffff; }
.hl.num { color:#2928ff; }
.hl.esc { color:#ff00ff; }
@ -419,3 +418,7 @@ pre.hl { color:#000000; background-color:#ffffff; }
.hl.kwb { color:#830000; }
.hl.kwc { color:#000000; font-weight:bold; }
.hl.kwd { color:#010181; }
/* For the calendar plugin. */
.month-calendar-day-this-day { background-color: #eee; }
.year-calendar-this-month { background-color: #eee; }

View File

@ -280,6 +280,7 @@ sigh.
that the page links to, which is just what link dependencies are
triggered on.
[[done]]
----
### the removal problem

View File

@ -23,7 +23,7 @@ essentially three pieces needed for a complete translation:
file, and in preprocessor directives.
1. The [[basewiki]] needs to be translated. The
[[plugins/contrib/po]] ikiwiki plugin will allow translating
[[plugins/po]] ikiwiki plugin will allow translating
wikis using po files and can be used for this.
There is now a website, [l10n.ikiwiki.info](http://l10n.ikiwiki.info)

View File

@ -15,7 +15,8 @@ GetOptions(
) || usage();
my $setup=shift || usage();
my $pagespec=shift || "*";
my $year=shift || 1900+(localtime(time))[5];
my $startyear=shift || 1900+(localtime(time))[5];
my $endyear=shift || $startyear;
%config=IkiWiki::defaultconfig();
IkiWiki::Setup::load($setup);
@ -43,7 +44,7 @@ sub writearchive ($$;$) {
}
}
foreach my $y ($year-1, $year, $year+1) {
foreach my $y ($startyear..$endyear) {
writearchive("calendaryear.tmpl", $y);
foreach my $m (qw{01 02 03 04 05 06 07 08 09 10 11 12}) {
writearchive("calendarmonth.tmpl", $y, $m);

View File

@ -299,7 +299,7 @@ sub oldloadindex {
$pagemtime{$page}=$items{mtime}[0];
$oldlinks{$page}=[@{$items{link}}];
$links{$page}=[@{$items{link}}];
$depends{$page}={ $items{depends}[0] => 1 } if exists $items{depends};
$depends{$page}={ $items{depends}[0] => $IkiWiki::DEPEND_CONTENT } if exists $items{depends};
$destsources{$_}=$page foreach @{$items{dest}};
$renderedfiles{$page}=[@{$items{dest}}];
$pagecase{lc $page}=$page;

62
t/add_depends.t 100755
View File

@ -0,0 +1,62 @@
#!/usr/bin/perl
use warnings;
use strict;
use Test::More tests => 38;
BEGIN { use_ok("IkiWiki"); }
%config=IkiWiki::defaultconfig();
$config{srcdir}=$config{destdir}="/dev/null";
IkiWiki::checkconfig();
$pagesources{"foo$_"}="foo$_.mdwn" for 0..9;
# avoids adding an unparseable pagespec
ok(! add_depends("foo0", "foo and (bar"));
ok(! add_depends("foo0", "foo another"));
# simple and not-so-simple dependencies split
ok(add_depends("foo0", "*"));
ok(add_depends("foo0", "bar"));
ok(add_depends("foo0", "BAZ"));
ok(exists $IkiWiki::depends_simple{foo0}{"bar"});
ok(exists $IkiWiki::depends_simple{foo0}{"baz"}); # lowercase
ok(! exists $IkiWiki::depends_simple{foo0}{"*"});
ok(! exists $IkiWiki::depends{foo0}{"bar"});
ok(! exists $IkiWiki::depends{foo0}{"baz"});
# default dependencies are content dependencies
ok($IkiWiki::depends{foo0}{"*"} & $IkiWiki::DEPEND_CONTENT);
ok(! ($IkiWiki::depends{foo0}{"*"} & ($IkiWiki::DEPEND_PRESENCE | $IkiWiki::DEPEND_LINKS)));
ok($IkiWiki::depends_simple{foo0}{"bar"} & $IkiWiki::DEPEND_CONTENT);
ok(! ($IkiWiki::depends_simple{foo0}{"bar"} & ($IkiWiki::DEPEND_PRESENCE | $IkiWiki::DEPEND_LINKS)));
# adding other dep types standalone
ok(add_depends("foo2", "*", deptype("presence")));
ok(add_depends("foo2", "bar", deptype("links")));
ok($IkiWiki::depends{foo2}{"*"} & $IkiWiki::DEPEND_PRESENCE);
ok(! ($IkiWiki::depends{foo2}{"*"} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS)));
ok($IkiWiki::depends_simple{foo2}{"bar"} & $IkiWiki::DEPEND_LINKS);
ok(! ($IkiWiki::depends_simple{foo2}{"bar"} & ($IkiWiki::DEPEND_PRESENCE | $IkiWiki::DEPEND_CONTENT)));
# adding combined dep types
ok(add_depends("foo2", "baz", deptype("links", "presence")));
ok($IkiWiki::depends_simple{foo2}{"baz"} & $IkiWiki::DEPEND_LINKS);
ok($IkiWiki::depends_simple{foo2}{"baz"} & $IkiWiki::DEPEND_PRESENCE);
ok(! ($IkiWiki::depends_simple{foo2}{"baz"} & $IkiWiki::DEPEND_CONTENT));
# adding dep types to existing dependencies should merge the flags
ok(add_depends("foo2", "baz"));
ok($IkiWiki::depends_simple{foo2}{"baz"} & $IkiWiki::DEPEND_LINKS);
ok($IkiWiki::depends_simple{foo2}{"baz"} & $IkiWiki::DEPEND_PRESENCE);
ok(($IkiWiki::depends_simple{foo2}{"baz"} & $IkiWiki::DEPEND_CONTENT));
ok(add_depends("foo2", "bar", deptype("presence"))); # had only links before
ok($IkiWiki::depends_simple{foo2}{"bar"} & ($IkiWiki::DEPEND_LINKS | $IkiWiki::DEPEND_PRESENCE));
ok(! ($IkiWiki::depends_simple{foo2}{"bar"} & $IkiWiki::DEPEND_CONTENT));
ok(add_depends("foo0", "bar", deptype("links"))); # had only content before
ok($IkiWiki::depends{foo0}{"*"} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS));
ok(! ($IkiWiki::depends{foo0}{"*"} & $IkiWiki::DEPEND_PRESENCE));
# content is the default if unknown types are entered
ok(add_depends("foo9", "*", deptype("monkey")));
ok($IkiWiki::depends{foo9}{"*"} & $IkiWiki::DEPEND_CONTENT);
ok(! ($IkiWiki::depends{foo9}{"*"} & ($IkiWiki::DEPEND_PRESENCE | $IkiWiki::DEPEND_LINKS)));

View File

@ -1,7 +1,7 @@
#!/usr/bin/perl
use warnings;
use strict;
use Test::More tests => 54;
use Test::More tests => 64;
BEGIN { use_ok("IkiWiki"); }
@ -88,3 +88,24 @@ ok(! pagespec_match("foo", "no_such_function(foo)"), "foo");
my $ret=pagespec_match("foo", "(invalid");
ok(! $ret, "syntax error");
ok($ret =~ /syntax error/, "error message");
$ret=pagespec_match("foo", "bar or foo");
ok($ret, "simple match");
is($ret, "foo matches foo", "stringified return");
my $i=pagespec_match("foo", "link(bar)")->influences;
is(join(",", keys %$i), 'foo', "link is influenced by the page with the link");
$i=pagespec_match("bar", "backlink(foo)")->influences;
is(join(",", keys %$i), 'foo', "backlink is influenced by the page with the link");
$i=pagespec_match("bar", "backlink(foo)")->influences;
is(join(",", keys %$i), 'foo', "backlink is influenced by the page with the link");
$i=pagespec_match("bar", "created_before(foo)")->influences;
is(join(",", keys %$i), 'foo', "created_before is influenced by the comparison page");
$i=pagespec_match("bar", "created_after(foo)")->influences;
is(join(",", keys %$i), 'foo', "created_after is influenced by the comparison page");
$i=pagespec_match("foo", "link(baz) and created_after(bar)")->influences;
is(join(",", sort keys %$i), 'bar,foo', "influences add up over AND");
$i=pagespec_match("foo", "link(baz) and created_after(bar)")->influences;
is(join(",", sort keys %$i), 'bar,foo', "influences add up over OR");
$i=pagespec_match("foo", "!link(baz) and !created_after(bar)")->influences;
is(join(",", sort keys %$i), 'bar,foo', "influences unaffected by negation");

View File

@ -0,0 +1,112 @@
#!/usr/bin/perl
use warnings;
use strict;
use Test::More tests => 88;
BEGIN { use_ok("IkiWiki"); }
%config=IkiWiki::defaultconfig();
$config{srcdir}=$config{destdir}="/dev/null";
IkiWiki::checkconfig();
%pagesources=(
foo => "foo.mdwn",
foo2 => "foo2.mdwn",
foo3 => "foo3.mdwn",
bar => "bar.mdwn",
"post/1" => "post/1.mdwn",
"post/2" => "post/2.mdwn",
"post/3" => "post/3.mdwn",
);
$links{foo}=[qw{post/1 post/2}];
$links{foo2}=[qw{bar}];
$links{foo3}=[qw{bar}];
is_deeply([pagespec_match_list("foo", "bar")], ["bar"]);
is_deeply([sort(pagespec_match_list("foo", "* and !post/*"))], ["bar", "foo", "foo2", "foo3"]);
is_deeply([sort(pagespec_match_list("foo", "post/*"))], ["post/1", "post/2", "post/3"]);
is_deeply([pagespec_match_list("foo", "post/*", sort => "title", reverse => 1)],
["post/3", "post/2", "post/1"]);
is_deeply([pagespec_match_list("foo", "post/*", sort => "title", num => 2)],
["post/1", "post/2"]);
is_deeply([pagespec_match_list("foo", "post/*", sort => "title", num => 50)],
["post/1", "post/2", "post/3"]);
is_deeply([pagespec_match_list("foo", "post/*", sort => "title",
filter => sub { $_[0] =~ /3/}) ],
["post/1", "post/2"]);
my $r=eval { pagespec_match_list("foo", "beep") };
ok(eval { pagespec_match_list("foo", "beep") } == 0);
ok(! $@, "does not fail with error when unable to match anything");
eval { pagespec_match_list("foo", "this is not a legal pagespec!") };
ok($@, "fails with error when pagespec bad");
# A pagespec that requires page metadata should add influences
# as an explicit dependency. In the case of a link, a links dependency.
foreach my $spec ("* and link(bar)", "* or link(bar)") {
pagespec_match_list("foo2", $spec, deptype => deptype("presence"));
ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE);
ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS)));
ok($IkiWiki::depends_simple{foo2}{foo2} == $IkiWiki::DEPEND_LINKS);
%IkiWiki::depends_simple=();
%IkiWiki::depends=();
pagespec_match_list("foo3", $spec, deptype => deptype("links"));
ok($IkiWiki::depends{foo3}{$spec} & $IkiWiki::DEPEND_LINKS);
ok(! ($IkiWiki::depends{foo3}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_PRESENCE)));
ok($IkiWiki::depends_simple{foo3}{foo3} == $IkiWiki::DEPEND_LINKS);
%IkiWiki::depends_simple=();
%IkiWiki::depends=();
}
# a pagespec with backlinks() will add as an influence the page with the links
foreach my $spec ("bar or (backlink(foo) and !*.png)", "backlink(foo)") {
pagespec_match_list("foo2", $spec, deptype => deptype("presence"));
ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE);
ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS)));
ok($IkiWiki::depends_simple{foo2}{foo} == $IkiWiki::DEPEND_LINKS);
%IkiWiki::depends_simple=();
%IkiWiki::depends=();
pagespec_match_list("foo2", $spec, deptype => deptype("links"));
ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_LINKS);
ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_PRESENCE | $IkiWiki::DEPEND_CONTENT)));
ok($IkiWiki::depends_simple{foo2}{foo} == $IkiWiki::DEPEND_LINKS);
%IkiWiki::depends_simple=();
%IkiWiki::depends=();
pagespec_match_list("foo2", $spec, deptype => deptype("presence", "links"));
ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE);
ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_LINKS);
ok(! ($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_CONTENT));
ok($IkiWiki::depends_simple{foo2}{foo} == $IkiWiki::DEPEND_LINKS);
%IkiWiki::depends_simple=();
%IkiWiki::depends=();
pagespec_match_list("foo2", $spec);
ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_CONTENT);
ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_PRESENCE | $IkiWiki::DEPEND_LINKS)));
ok($IkiWiki::depends_simple{foo2}{foo} == $IkiWiki::DEPEND_LINKS);
%IkiWiki::depends_simple=();
%IkiWiki::depends=();
}
# Hard fails due to a glob, etc, will block influences of other anded terms.
foreach my $spec ("nosuchpage and link(bar)", "link(bar) and nosuchpage",
"link(bar) and */Discussion", "*/Discussion and link(bar)",
"!foo2 and link(bar)", "link(bar) and !foo2") {
pagespec_match_list("foo2", $spec, deptype => deptype("presence"));
ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE);
ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS)));
ok(! exists $IkiWiki::depends_simple{foo2}{foo2}, "no influence from $spec");
%IkiWiki::depends_simple=();
%IkiWiki::depends=();
}
# A hard fail will not block influences of other ored terms.
foreach my $spec ("nosuchpage or link(bar)", "link(bar) or nosuchpage",
"link(bar) or */Discussion", "*/Discussion or link(bar)",
"!foo2 or link(bar)", "link(bar) or !foo2",
"link(bar) or (!foo2 and !foo1)") {
pagespec_match_list("foo2", $spec, deptype => deptype("presence"));
ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE);
ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS)));
ok($IkiWiki::depends_simple{foo2}{foo2} == $IkiWiki::DEPEND_LINKS);
%IkiWiki::depends_simple=();
%IkiWiki::depends=();
}

View File

@ -0,0 +1,84 @@
#!/usr/bin/perl
use warnings;
use strict;
use Test::More tests => 138;
BEGIN { use_ok("IkiWiki"); }
# Note that new objects have to be constructed freshly for each test, since
# object states are mutated as they are combined.
sub S { IkiWiki::SuccessReason->new("match", @_) }
sub F { IkiWiki::FailReason->new("no match", @_) }
sub E { IkiWiki::ErrorReason->new("error in matching", @_) }
ok(S() eq "match");
ok(F() eq "no match");
ok(E() eq "error in matching");
ok(S());
ok(! F());
ok(! E());
ok(!(! S()));
ok(!(!(! F)));
ok(!(!(! E)));
ok(S() | F());
ok(F() | S());
ok(!(F() | E()));
ok(!(!S() | F() | E()));
ok(S() & S() & S());
ok(!(S() & E()));
ok(!(S() & F()));
ok(!(S() & F() & E()));
ok(S() & (F() | F() | S()));
# influence merging tests
foreach my $test (
['$s | $f' => 1], # OR merges
['! $s | ! $f' => 1], # OR merges with negated terms too
['!(!(!$s)) | $f' => 1],# OR merges with multiple negation too
['$s | $f | E()' => 1], # OR merges, even though E() has no influences
['$s | E() | $f' => 1], # ditto
['E() | $s | $f' => 1], # ditto
['!$s | !$f | E()' => 1],# negated terms also do not block merges
['!$s | E() | $f' => 1],# ditto
['E() | $s | !$f' => 1],# ditto
['$s & $f' => 1], # AND merges if both items have influences
['!$s & $f' => 1], # AND merges negated terms too
['$s & !$f' => 1], # AND merges negated terms too
['$s & $f & E()' => 0], # AND fails to merge since E() has no influences
['$s & E() & $f' => 0], # ditto
['E() & $s & $f' => 0], # ditto
) {
my $op=$test->[0];
my $influence=$test->[1];
my $s=S(foo => 1, bar => 1);
is($s->influences->{foo}, 1);
is($s->influences->{bar}, 1);
my $f=F(bar => 2, baz => 1);
is($f->influences->{bar}, 2);
is($f->influences->{baz}, 1);
my $c = eval $op;
ok(ref $c);
if ($influence) {
is($c->influences->{foo}, 1, "foo ($op)");
is($c->influences->{bar}, (1 | 2), "bar ($op)");
is($c->influences->{baz}, 1, "baz ($op)");
}
else {
ok(! %{$c->influences}, "no influence for ($op)");
}
}
my $s=S(foo => 0, bar => 1);
$s->influences(baz => 1);
ok(! $s->influences->{foo}, "removed 0 influence");
ok(! $s->influences->{bar}, "removed 1 influence");
ok($s->influences->{baz}, "set influence");
ok($s->influences_static);
$s=S(foo => 0, bar => 1);
$s->influences(baz => 1, "" => 1);
ok(! $s->influences_static);

View File

@ -1,7 +1,7 @@
#!/usr/bin/perl
use warnings;
use strict;
use Test::More tests => 10;
use Test::More tests => 11;
BEGIN { use_ok("IkiWiki"); }
@ -19,3 +19,5 @@ ok(IkiWiki::yesno("NO") == 0);
ok(IkiWiki::yesno("1") == 1);
ok(IkiWiki::yesno("0") == 0);
ok(IkiWiki::yesno("mooooooooooo") == 0);
ok(IkiWiki::yesno(undef) == 0);

View File

@ -0,0 +1,11 @@
Some [[directives|ikiwiki/directive]] that use
[[PageSpecs|ikiwiki/pagespec]] allow
specifying the order that matching pages are shown in. The following sort
orders can be specified.
* `age` - List pages from the most recently created to the oldest.
* `mtime` - List pages with the most recently modified first.
* `title` - Order by title.
* `title_natural` - Only available if [[!cpan Sort::Naturally]] is
installed. Orders by title, but numbers in the title are treated
as such, ("1 2 9 10 20" instead of "1 10 2 20 9")