* aggregate: Revert use of forking to not save state, that was not the right
approach.master
parent
72bbc41520
commit
1f6591f0a6
|
@ -39,27 +39,11 @@ sub checkconfig () { #{{{
|
||||||
}
|
}
|
||||||
|
|
||||||
loadstate();
|
loadstate();
|
||||||
my @feeds=needsaggregate();
|
|
||||||
return unless @feeds;
|
|
||||||
|
|
||||||
# Fork a child process to handle the aggregation.
|
|
||||||
# The parent process will then handle building the
|
|
||||||
# result. This avoids messy code to clear state
|
|
||||||
# accumulated while aggregating.
|
|
||||||
defined(my $pid = fork) or error("Can't fork: $!");
|
|
||||||
if (! $pid) {
|
|
||||||
IkiWiki::loadindex();
|
IkiWiki::loadindex();
|
||||||
aggregate(@feeds);
|
aggregate();
|
||||||
expire();
|
expire();
|
||||||
savestate();
|
savestate();
|
||||||
exit 0;
|
clearstate();
|
||||||
}
|
|
||||||
waitpid($pid,0);
|
|
||||||
if ($?) {
|
|
||||||
error "aggregation failed with code $?";
|
|
||||||
}
|
|
||||||
$IkiWiki::forcerebuild{$_->{sourcepage}}=1
|
|
||||||
foreach @feeds;
|
|
||||||
|
|
||||||
IkiWiki::unlockwiki();
|
IkiWiki::unlockwiki();
|
||||||
}
|
}
|
||||||
|
@ -148,7 +132,7 @@ sub loadstate () { #{{{
|
||||||
return if $state_loaded;
|
return if $state_loaded;
|
||||||
$state_loaded=1;
|
$state_loaded=1;
|
||||||
if (-e "$config{wikistatedir}/aggregate") {
|
if (-e "$config{wikistatedir}/aggregate") {
|
||||||
open(IN, "$config{wikistatedir}/aggregate") ||
|
open(IN, "<", "$config{wikistatedir}/aggregate") ||
|
||||||
die "$config{wikistatedir}/aggregate: $!";
|
die "$config{wikistatedir}/aggregate: $!";
|
||||||
while (<IN>) {
|
while (<IN>) {
|
||||||
$_=IkiWiki::possibly_foolish_untaint($_);
|
$_=IkiWiki::possibly_foolish_untaint($_);
|
||||||
|
@ -186,7 +170,7 @@ sub savestate () { #{{{
|
||||||
error($@) if $@;
|
error($@) if $@;
|
||||||
my $newfile="$config{wikistatedir}/aggregate.new";
|
my $newfile="$config{wikistatedir}/aggregate.new";
|
||||||
my $cleanup = sub { unlink($newfile) };
|
my $cleanup = sub { unlink($newfile) };
|
||||||
open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
|
open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
|
||||||
foreach my $data (values %feeds, values %guids) {
|
foreach my $data (values %feeds, values %guids) {
|
||||||
if ($data->{remove}) {
|
if ($data->{remove}) {
|
||||||
if ($data->{name}) {
|
if ($data->{name}) {
|
||||||
|
@ -228,6 +212,12 @@ sub savestate () { #{{{
|
||||||
error("rename $newfile: $!", $cleanup);
|
error("rename $newfile: $!", $cleanup);
|
||||||
} #}}}
|
} #}}}
|
||||||
|
|
||||||
|
sub clearstate () { #{{{
|
||||||
|
%feeds=();
|
||||||
|
%guids=();
|
||||||
|
$state_loaded=0;
|
||||||
|
} #}}}
|
||||||
|
|
||||||
sub expire () { #{{{
|
sub expire () { #{{{
|
||||||
foreach my $feed (values %feeds) {
|
foreach my $feed (values %feeds) {
|
||||||
next unless $feed->{expireage} || $feed->{expirecount};
|
next unless $feed->{expireage} || $feed->{expirecount};
|
||||||
|
@ -259,12 +249,7 @@ sub expire () { #{{{
|
||||||
}
|
}
|
||||||
} #}}}
|
} #}}}
|
||||||
|
|
||||||
sub needsaggregate () { #{{{
|
sub aggregate () { #{{{
|
||||||
return values %feeds if $config{rebuild};
|
|
||||||
return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
|
|
||||||
} #}}}
|
|
||||||
|
|
||||||
sub aggregate (@) { #{{{
|
|
||||||
eval q{use XML::Feed};
|
eval q{use XML::Feed};
|
||||||
error($@) if $@;
|
error($@) if $@;
|
||||||
eval q{use URI::Fetch};
|
eval q{use URI::Fetch};
|
||||||
|
@ -272,12 +257,15 @@ sub aggregate (@) { #{{{
|
||||||
eval q{use HTML::Entities};
|
eval q{use HTML::Entities};
|
||||||
error($@) if $@;
|
error($@) if $@;
|
||||||
|
|
||||||
foreach my $feed (@_) {
|
foreach my $feed (values %feeds) {
|
||||||
|
next unless $config{rebuild} ||
|
||||||
|
time - $feed->{lastupdate} >= $feed->{updateinterval};
|
||||||
$feed->{lastupdate}=time;
|
$feed->{lastupdate}=time;
|
||||||
$feed->{newposts}=0;
|
$feed->{newposts}=0;
|
||||||
$feed->{message}=sprintf(gettext("processed ok at %s"),
|
$feed->{message}=sprintf(gettext("processed ok at %s"),
|
||||||
displaytime($feed->{lastupdate}));
|
displaytime($feed->{lastupdate}));
|
||||||
$feed->{error}=0;
|
$feed->{error}=0;
|
||||||
|
$IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
|
||||||
|
|
||||||
debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
|
debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
|
||||||
|
|
||||||
|
|
|
@ -6,14 +6,14 @@ ikiwiki (2.31) UNRELEASED; urgency=low
|
||||||
that contributes to a page's content and using the youngest of them all,
|
that contributes to a page's content and using the youngest of them all,
|
||||||
as well as special cases for things like the version plugin, and it's just
|
as well as special cases for things like the version plugin, and it's just
|
||||||
too complex to do.
|
too complex to do.
|
||||||
* aggregate: Forking a child broke the one state that mattered: Forcing
|
|
||||||
the aggregating page to be rebuilt. Fix this.
|
|
||||||
* cgi hooks are now run before ikiwiki state is loaded.
|
* cgi hooks are now run before ikiwiki state is loaded.
|
||||||
* This allows locking the wiki before loading state, which avoids some
|
* This allows locking the wiki before loading state, which avoids some
|
||||||
tricky locking code when saving a web edit.
|
tricky locking code when saving a web edit.
|
||||||
* poll: This plugin turns out to have edited pages w/o doing any locking.
|
* poll: This plugin turns out to have edited pages w/o doing any locking.
|
||||||
Oops. Convert it from a cgi to a sessioncgi hook, which will work
|
Oops. Convert it from a cgi to a sessioncgi hook, which will work
|
||||||
much better.
|
much better.
|
||||||
|
* aggregate: Revert use of forking to not save state, that was not the right
|
||||||
|
approach.
|
||||||
|
|
||||||
-- Joey Hess <joeyh@debian.org> Sat, 02 Feb 2008 23:36:31 -0500
|
-- Joey Hess <joeyh@debian.org> Sat, 02 Feb 2008 23:36:31 -0500
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue