avoid multiple ikiwiki cgi processes piling up, eating all memory, and thrashing
Fixed by making the cgi wrapper wait on a cgilock. If you had to set apache's MaxClients low to avoid ikiwiki thrashing your server, you can now turn it up to a high value. The downside to this is that a cgi call that doesn't need to call lockwiki will be serialised by this so only one can run at a time. (For example, do=search.) There are few such calls, and all of them call loadindex, so each still eats gobs of memory, so serialising them still seems ok.master
parent
6611f3a2d9
commit
9a48669f1e
|
@ -44,6 +44,7 @@ EOF
|
|||
}
|
||||
|
||||
my $check_commit_hook="";
|
||||
my $pre_exec="";
|
||||
if ($config{post_commit}) {
|
||||
# Optimise checking !commit_hook_enabled() ,
|
||||
# so that ikiwiki does not have to be started if the
|
||||
|
@ -67,6 +68,19 @@ EOF
|
|||
}
|
||||
EOF
|
||||
}
|
||||
elsif ($config{cgi}) {
|
||||
# Avoid more than one ikiwiki cgi running at a time by
|
||||
# taking a cgi lock. Since ikiwiki uses several MB of
|
||||
# memory, a pile up of processes could cause thrashing
|
||||
# otherwise.
|
||||
$pre_exec=<<"EOF";
|
||||
{
|
||||
int fd=open("$config{wikistatedir}/cgilock", O_CREAT | O_RDWR);
|
||||
if (fd != -1)
|
||||
flock(fd, LOCK_EX);
|
||||
}
|
||||
EOF
|
||||
}
|
||||
|
||||
$Data::Dumper::Indent=0; # no newlines
|
||||
my $configstring=Data::Dumper->Dump([\%config], ['*config']);
|
||||
|
@ -122,6 +136,7 @@ $envsave
|
|||
exit(1);
|
||||
}
|
||||
|
||||
$pre_exec
|
||||
execl("$this", "$this", NULL);
|
||||
perror("exec $this");
|
||||
exit(1);
|
||||
|
|
Loading…
Reference in New Issue