ikiwiki/IkiWiki/Plugin/inline.pm

789 lines
21 KiB
Perl
Raw Normal View History

2008-12-27 03:45:12 +01:00
#!/usr/bin/perl
# Page inlining and blogging.
package IkiWiki::Plugin::inline;
use warnings;
use strict;
use Encode;
use IkiWiki 3.00;
2006-08-04 04:22:16 +02:00
use URI;
my %knownfeeds;
my %page_numfeeds;
my @inline;
my $nested=0;
sub import {
hook(type => "getopt", id => "inline", call => \&getopt);
hook(type => "getsetup", id => "inline", call => \&getsetup);
hook(type => "checkconfig", id => "inline", call => \&checkconfig);
hook(type => "sessioncgi", id => "inline", call => \&sessioncgi);
hook(type => "preprocess", id => "inline",
call => \&IkiWiki::preprocess_inline, scan => 1);
hook(type => "pagetemplate", id => "inline",
call => \&IkiWiki::pagetemplate_inline);
inline: Run format hook first inline has a format hook that is an optimisation hack. Until this hook runs, the inlined content is not present on the page. This can prevent other format hooks, that process that content, from acting on inlined content. In bug ##509710, we discovered this happened commonly for the embed plugin, but it could in theory happen for many other plugins (color, cutpaste, etc) that use format to fill in special html after sanitization. The ordering was essentially random (hash key order). That's kinda a good thing, because hooks should be independent of other hooks and able to run in any order. But for things like inline, that just doesn't work. To fix the immediate problem, let's make hooks able to be registered as running "first". There was already the ability to make them run "last". Now, this simple first/middle/last ordering is obviously not going to work if a lot of things need to run first, or last, since then we'll be back to being unable to specify ordering inside those sets. But before worrying about that too much, and considering dependency ordering, etc, observe how few plugins use last ordering: Exactly one needs it. And, so far, exactly one needs first ordering. So for now, KISS. Another implementation note: I could have sorted the plugins with first/last/middle as the primary key, and plugin name secondary, to get a guaranteed stable order. Instead, I chose to preserve hash order. Two opposing things pulled me toward that decision: 1. Since has order is randomish, it will ensure that no accidental ordering assumptions are made. 2. Assume for a minute that ordering matters a lot more than expected. Drastically changing the order a particular configuration uses could result in a lot of subtle bugs cropping up. (I hope this assumption is false, partly due to #1, but can't rule it out.)
2008-12-26 22:08:33 +01:00
hook(type => "format", id => "inline", call => \&format, first => 1);
# Hook to change to do pinging since it's called late.
# This ensures each page only pings once and prevents slow
# pings interrupting page builds.
hook(type => "rendered", id => "inline", call => \&IkiWiki::pingurl);
}
sub getopt () {
eval q{use Getopt::Long};
error($@) if $@;
Getopt::Long::Configure('pass_through');
GetOptions(
"rss!" => \$config{rss},
"atom!" => \$config{atom},
"allowrss!" => \$config{allowrss},
"allowatom!" => \$config{allowatom},
"pingurl=s" => sub {
push @{$config{pingurl}}, $_[1];
},
);
}
sub getsetup () {
return
plugin => {
safe => 1,
rebuild => undef,
section => "core",
},
rss => {
type => "boolean",
example => 0,
2008-07-26 20:43:47 +02:00
description => "enable rss feeds by default?",
safe => 1,
rebuild => 1,
},
atom => {
type => "boolean",
example => 0,
2008-07-26 20:43:47 +02:00
description => "enable atom feeds by default?",
safe => 1,
rebuild => 1,
},
allowrss => {
type => "boolean",
example => 0,
2008-07-26 20:43:47 +02:00
description => "allow rss feeds to be used?",
safe => 1,
rebuild => 1,
},
allowatom => {
type => "boolean",
example => 0,
2008-07-26 20:43:47 +02:00
description => "allow atom feeds to be used?",
safe => 1,
rebuild => 1,
},
pingurl => {
type => "string",
example => "http://rpc.technorati.com/rpc/ping",
2008-07-26 20:44:49 +02:00
description => "urls to ping (using XML-RPC) on feed update",
safe => 1,
rebuild => 0,
},
}
sub checkconfig () {
if (($config{rss} || $config{atom}) && ! length $config{url}) {
error(gettext("Must specify url to wiki with --url when using --rss or --atom"));
}
if ($config{rss}) {
push @{$config{wiki_file_prune_regexps}}, qr/\.rss$/;
}
if ($config{atom}) {
push @{$config{wiki_file_prune_regexps}}, qr/\.atom$/;
}
2008-07-26 20:37:25 +02:00
if (! exists $config{pingurl}) {
$config{pingurl}=[];
}
}
sub format (@) {
2010-11-16 20:40:16 +01:00
my %params=@_;
# Fill in the inline content generated earlier. This is actually an
# optimisation.
2008-03-21 09:51:14 +01:00
$params{content}=~s{<div class="inline" id="([^"]+)"></div>}{
2008-03-21 10:08:04 +01:00
delete @inline[$1,]
2008-03-21 09:51:14 +01:00
}eg;
return $params{content};
}
sub sessioncgi ($$) {
my $q=shift;
my $session=shift;
if ($q->param('do') eq 'blog') {
2008-09-27 20:14:36 +02:00
my $page=titlepage(decode_utf8($q->param('title')));
$page=~s/(\/)/"__".ord($1)."__"/eg; # don't create subdirs
# if the page already exists, munge it to be unique
my $from=$q->param('from');
my $add="";
while (exists $IkiWiki::pagecase{lc($from."/".$page.$add)}) {
$add=1 unless length $add;
$add++;
}
$q->param('page', "/$from/$page$add");
# now go create the page
$q->param('do', 'create');
2010-12-25 18:12:21 +01:00
# make sure the editpage plugin is loaded
if (IkiWiki->can("cgi_editpage")) {
IkiWiki::cgi_editpage($q, $session);
}
else {
error(gettext("page editing not allowed"));
}
exit;
}
}
# Back to ikiwiki namespace for the rest, this code is very much
# internal to ikiwiki even though it's separated into a plugin.
package IkiWiki;
my %toping;
my %feedlinks;
sub preprocess_inline (@) {
my %params=@_;
if (! exists $params{pages} && ! exists $params{pagenames}) {
error gettext("missing pages parameter");
}
if (! defined wantarray) {
# Running in scan mode: only do the essentials
if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) {
# default to sorting age, the same as inline itself,
# but let the params override that
IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params);
}
return;
}
if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) {
scalar IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params);
}
my $raw=yesno($params{raw});
my $archive=yesno($params{archive});
my $rss=(($config{rss} || $config{allowrss}) && exists $params{rss}) ? yesno($params{rss}) : $config{rss};
my $atom=(($config{atom} || $config{allowatom}) && exists $params{atom}) ? yesno($params{atom}) : $config{atom};
my $quick=exists $params{quick} ? yesno($params{quick}) : 0;
my $feeds=exists $params{feeds} ? yesno($params{feeds}) : !$quick && ! $raw;
my $emptyfeeds=exists $params{emptyfeeds} ? yesno($params{emptyfeeds}) : 1;
my $feedonly=yesno($params{feedonly});
if (! exists $params{show} && ! $archive) {
$params{show}=10;
}
if (! exists $params{feedshow} && exists $params{show}) {
$params{feedshow}=$params{show};
}
my $desc;
if (exists $params{description}) {
$desc = $params{description}
}
else {
$desc = $config{wikiname};
}
my $actions=yesno($params{actions});
if (exists $params{template}) {
$params{template}=~s/[^-_a-zA-Z0-9]+//g;
}
else {
$params{template} = $archive ? "archivepage" : "inlinepage";
}
my @list;
if (exists $params{pagenames}) {
foreach my $p (qw(sort pages)) {
if (exists $params{$p}) {
error sprintf(gettext("the %s and %s parameters cannot be used together"),
"pagenames", $p);
}
}
@list = split ' ', $params{pagenames};
if (yesno($params{reverse})) {
@list=reverse(@list);
}
foreach my $p (@list) {
add_depends($params{page}, $p, deptype($quick ? "presence" : "content"));
}
@list = grep { exists $pagesources{$_} } @list;
}
else {
my $num=0;
if ($params{show}) {
$num=$params{show};
}
if ($params{feedshow} && $num < $params{feedshow} && $num > 0) {
$num=$params{feedshow};
}
if ($params{skip} && $num) {
$num+=$params{skip};
}
@list = pagespec_match_list($params{page}, $params{pages},
deptype => deptype($quick ? "presence" : "content"),
filter => sub { $_[0] eq $params{page} },
sort => exists $params{sort} ? $params{sort} : "age",
reverse => yesno($params{reverse}),
($num ? (num => $num) : ()),
);
}
if (exists $params{skip}) {
@list=@list[$params{skip} .. $#list];
}
my @feedlist;
if ($feeds) {
if (exists $params{feedshow} &&
$params{feedshow} && @list > $params{feedshow}) {
@feedlist=@list[0..$params{feedshow} - 1];
}
else {
@feedlist=@list;
}
}
if ($params{show} && @list > $params{show}) {
@list=@list[0..$params{show} - 1];
}
if ($feeds && exists $params{feedpages}) {
@feedlist = pagespec_match_list(
$params{page}, "($params{pages}) and ($params{feedpages})",
deptype => deptype($quick ? "presence" : "content"),
list => \@feedlist,
);
}
my ($feedbase, $feednum);
if ($feeds) {
# Ensure that multiple feeds on a page go to unique files.
# Feedfile can lead to conflicts if usedirs is not enabled,
# so avoid supporting it in that case.
delete $params{feedfile} if ! $config{usedirs};
# Tight limits on legal feedfiles, to avoid security issues
# and conflicts.
if (defined $params{feedfile}) {
if ($params{feedfile} =~ /\// ||
$params{feedfile} !~ /$config{wiki_file_regexp}/) {
error("illegal feedfile");
}
$params{feedfile}=possibly_foolish_untaint($params{feedfile});
}
$feedbase=targetpage($params{page}, "", $params{feedfile});
my $feedid=join("\0", $feedbase, map { $_."\0".$params{$_} } sort keys %params);
if (exists $knownfeeds{$feedid}) {
$feednum=$knownfeeds{$feedid};
}
else {
if (exists $page_numfeeds{$params{destpage}}{$feedbase}) {
if ($feeds) {
$feednum=$knownfeeds{$feedid}=++$page_numfeeds{$params{destpage}}{$feedbase};
}
}
else {
$feednum=$knownfeeds{$feedid}="";
if ($feeds) {
$page_numfeeds{$params{destpage}}{$feedbase}=1;
}
}
}
}
my ($rssurl, $atomurl, $rssdesc, $atomdesc);
if ($feeds) {
if ($rss) {
$rssurl=abs2rel($feedbase."rss".$feednum, dirname(htmlpage($params{destpage})));
$rssdesc = sprintf(gettext("%s (RSS feed)"), $desc);
}
if ($atom) {
$atomurl=abs2rel($feedbase."atom".$feednum, dirname(htmlpage($params{destpage})));
$atomdesc = sprintf(gettext("%s (Atom feed)"), $desc);
}
}
my $ret="";
2008-07-28 01:21:56 +02:00
if (length $config{cgiurl} && ! $params{preview} && (exists $params{rootpage} ||
(exists $params{postform} && yesno($params{postform}))) &&
IkiWiki->can("cgi_editpage")) {
# Add a blog post form, with feed buttons.
my $formtemplate=template_depends("blogpost.tmpl", $params{page}, blind_cache => 1);
2010-11-23 01:12:17 +01:00
$formtemplate->param(cgiurl => IkiWiki::cgiurl());
$formtemplate->param(rootpage => rootpage(%params));
if ($feeds) {
if ($rss) {
$formtemplate->param(rssurl => $rssurl);
$formtemplate->param(rssdesc => $rssdesc);
}
if ($atom) {
$formtemplate->param(atomurl => $atomurl);
$formtemplate->param(atomdesc => $atomdesc);
}
}
if (exists $params{postformtext}) {
$formtemplate->param(postformtext =>
$params{postformtext});
}
else {
$formtemplate->param(postformtext =>
gettext("Add a new post titled:"));
}
if (exists $params{id}) {
$formtemplate->param(postformid =>
$params{id});
}
$ret.=$formtemplate->output;
# The post form includes the feed buttons, so
# emptyfeeds cannot be hidden.
$emptyfeeds=1;
}
2008-12-11 23:31:57 +01:00
elsif ($feeds && !$params{preview} && ($emptyfeeds || @feedlist)) {
# Add feed buttons.
my $linktemplate=template_depends("feedlink.tmpl", $params{page}, blind_cache => 1);
if ($rss) {
$linktemplate->param(rssurl => $rssurl);
$linktemplate->param(rssdesc => $rssdesc);
}
if ($atom) {
$linktemplate->param(atomurl => $atomurl);
$linktemplate->param(atomdesc => $atomdesc);
}
if (exists $params{id}) {
$linktemplate->param(id => $params{id});
}
$ret.=$linktemplate->output;
}
if (! $feedonly) {
my $template;
if (! $raw) {
2010-04-23 20:50:00 +02:00
# cannot use wiki pages as templates; template not sanitized due to
# format hook hack
eval {
$template=template_depends($params{template}.".tmpl", $params{page},
blind_cache => 1);
};
2010-04-23 20:50:00 +02:00
if ($@) {
error sprintf(gettext("failed to process template %s"), $params{template}.".tmpl").": $@";
}
}
2009-10-23 18:38:30 +02:00
my $needcontent=$raw || (!($archive && $quick) && $template->query(name => 'content'));
foreach my $page (@list) {
my $file = $pagesources{$page};
my $type = pagetype($file);
if (! $raw) {
2009-10-16 02:01:47 +02:00
if ($needcontent) {
# Get the content before populating the
# template, since getting the content uses
# the same template if inlines are nested.
my $content=get_inline_content($page, $params{destpage});
$template->param(content => $content);
}
$template->param(pageurl => urlto($page, $params{destpage}));
$template->param(inlinepage => $page);
$template->param(title => pagetitle(basename($page)));
$template->param(ctime => displaytime($pagectime{$page}, $params{timeformat}, 1));
$template->param(mtime => displaytime($pagemtime{$page}, $params{timeformat}));
$template->param(first => 1) if $page eq $list[0];
$template->param(last => 1) if $page eq $list[$#list];
$template->param(html5 => $config{html5});
if ($actions) {
my $file = $pagesources{$page};
my $type = pagetype($file);
if ($config{discussion}) {
if ($page !~ /.*\/\Q$config{discussionpage}\E$/i &&
(length $config{cgiurl} ||
exists $pagesources{$page."/".lc($config{discussionpage})})) {
$template->param(have_actions => 1);
$template->param(discussionlink =>
htmllink($page,
$params{destpage},
$config{discussionpage},
noimageinline => 1,
forcesubpage => 1));
}
}
if (length $config{cgiurl} &&
defined $type &&
IkiWiki->can("cgi_editpage")) {
$template->param(have_actions => 1);
$template->param(editurl => cgiurl(do => "edit", page => $page));
}
}
run_hooks(pagetemplate => sub {
shift->(page => $page, destpage => $params{destpage},
template => $template,);
});
$ret.=$template->output;
$template->clear_params;
}
else {
if (defined $type) {
$ret.="\n".
linkify($page, $params{destpage},
preprocess($page, $params{destpage},
filter($page, $params{destpage},
readfile(srcfile($file)))));
}
else {
$ret.="\n".
readfile(srcfile($file));
}
}
}
}
2008-12-11 23:31:57 +01:00
if ($feeds && ($emptyfeeds || @feedlist)) {
if ($rss) {
my $rssp=$feedbase."rss".$feednum;
will_render($params{destpage}, $rssp);
if (! $params{preview}) {
writefile($rssp, $config{destdir},
genfeed("rss",
$config{url}."/".$rssp, $desc, $params{guid}, $params{page}, @feedlist));
$toping{$params{destpage}}=1 unless $config{rebuild};
$feedlinks{$params{destpage}}.=qq{<link rel="alternate" type="application/rss+xml" title="$rssdesc" href="$rssurl" />};
}
}
if ($atom) {
my $atomp=$feedbase."atom".$feednum;
will_render($params{destpage}, $atomp);
if (! $params{preview}) {
writefile($atomp, $config{destdir},
genfeed("atom", $config{url}."/".$atomp, $desc, $params{guid}, $params{page}, @feedlist));
$toping{$params{destpage}}=1 unless $config{rebuild};
$feedlinks{$params{destpage}}.=qq{<link rel="alternate" type="application/atom+xml" title="$atomdesc" href="$atomurl" />};
}
}
}
clear_inline_content_cache();
return $ret if $raw || $nested;
push @inline, $ret;
return "<div class=\"inline\" id=\"$#inline\"></div>\n\n";
}
sub pagetemplate_inline (@) {
my %params=@_;
my $page=$params{page};
my $template=$params{template};
$template->param(feedlinks => $feedlinks{$page})
if exists $feedlinks{$page} && $template->query(name => "feedlinks");
}
{
my %inline_content;
my $cached_destpage="";
sub get_inline_content ($$) {
my $page=shift;
my $destpage=shift;
if (exists $inline_content{$page} && $cached_destpage eq $destpage) {
return $inline_content{$page};
}
my $file=$pagesources{$page};
my $type=pagetype($file);
my $ret="";
if (defined $type) {
$nested++;
$ret=htmlize($page, $destpage, $type,
linkify($page, $destpage,
preprocess($page, $destpage,
filter($page, $destpage,
readfile(srcfile($file))))));
$nested--;
if (isinternal($page)) {
# make inlined text of internal pages searchable
run_hooks(indexhtml => sub {
shift->(page => $page, destpage => $destpage,
content => $ret);
});
}
}
if ($cached_destpage ne $destpage) {
clear_inline_content_cache();
$cached_destpage=$destpage;
}
return $inline_content{$page}=$ret;
}
sub clear_inline_content_cache () {
%inline_content=();
}
}
sub date_822 ($) {
my $time=shift;
2007-01-10 21:25:00 +01:00
my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
POSIX::setlocale(&POSIX::LC_TIME, "C");
my $ret=POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
return $ret;
}
sub absolute_urls ($$) {
# needed because rss sucks
my $html=shift;
my $baseurl=shift;
my $url=$baseurl;
$url=~s/[^\/]+$//;
my $urltop; # calculated if needed
my $ret="";
eval q{use HTML::Parser; use HTML::Tagset};
die $@ if $@;
my $p = HTML::Parser->new(api_version => 3);
$p->handler(default => sub { $ret.=join("", @_) }, "text");
$p->handler(start => sub {
my ($tagname, $pos, $text) = @_;
if (ref $HTML::Tagset::linkElements{$tagname}) {
while (4 <= @$pos) {
# use attribute sets from right to left
# to avoid invalidating the offsets
# when replacing the values
my ($k_offset, $k_len, $v_offset, $v_len) =
splice(@$pos, -4);
my $attrname = lc(substr($text, $k_offset, $k_len));
next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
next unless $v_offset; # 0 v_offset means no value
my $v = substr($text, $v_offset, $v_len);
$v =~ s/^([\'\"])(.*)\1$/$2/;
eval q{use HTML::Entities};
my $dv = decode_entities($v);
if ($dv=~/^#/) {
$v=$baseurl.$v; # anchor
}
elsif ($dv=~/^(?!\w+:)[^\/]/) {
$v=$url.$v; # relative url
}
elsif ($dv=~/^\//) {
if (! defined $urltop) {
# what is the non path part of the url?
my $top_uri = URI->new($url);
$top_uri->path_query(""); # reset the path
$urltop = $top_uri->as_string;
}
$v=$urltop.$v; # url relative to top of site
}
$v =~ s/\"/&quot;/g; # since we quote with ""
substr($text, $v_offset, $v_len) = qq("$v");
}
}
$ret.=$text;
}, "tagname, tokenpos, text");
$p->parse($html);
$p->eof;
return $ret;
}
sub genenclosure {
my $itemtemplate=shift;
my $url=shift;
my $file=shift;
return unless $itemtemplate->query(name => "enclosure");
my $size=(srcfile_stat($file))[8];
my $mime="unknown";
eval q{use File::MimeInfo};
if (! $@) {
$mime = mimetype($file);
}
$itemtemplate->param(
enclosure => $url,
type => $mime,
length => $size,
);
}
sub genfeed ($$$$$@) {
my $feedtype=shift;
my $feedurl=shift;
my $feeddesc=shift;
my $guid=shift;
my $page=shift;
my @pages=@_;
my $url=URI->new(encode_utf8(urlto($page,"",1)));
my $itemtemplate=template_depends($feedtype."item.tmpl", $page, blind_cache => 1);
my $content="";
my $lasttime = 0;
foreach my $p (@pages) {
my $u=URI->new(encode_utf8(urlto($p, "", 1)));
my $pcontent = absolute_urls(get_inline_content($p, $page), $url);
my $fancy_enclosure_seen = 0;
$itemtemplate->param(
title => pagetitle(basename($p)),
url => $u,
permalink => $u,
cdate_822 => date_822($pagectime{$p}),
mdate_822 => date_822($pagemtime{$p}),
cdate_3339 => date_3339($pagectime{$p}),
mdate_3339 => date_3339($pagemtime{$p}),
);
if (exists $pagestate{$p}) {
if (exists $pagestate{$p}{meta}{guid}) {
eval q{use HTML::Entities};
$itemtemplate->param(guid => HTML::Entities::encode_numeric($pagestate{$p}{meta}{guid}));
}
if (exists $pagestate{$p}{meta}{updated}) {
$itemtemplate->param(mdate_822 => date_822($pagestate{$p}{meta}{updated}));
$itemtemplate->param(mdate_3339 => date_3339($pagestate{$p}{meta}{updated}));
}
if (exists $pagestate{$p}{meta}{enclosure}) {
my $absurl = $pagestate{$p}{meta}{enclosure};
# XXX better way to compute relative to srcdir?
my $file = $absurl;
$file =~ s|^$config{url}/||;
genenclosure($itemtemplate, $absurl, $file);
$fancy_enclosure_seen = 1;
}
}
my $file=$pagesources{$p};
unless ($fancy_enclosure_seen || defined(pagetype($file))) {
genenclosure($itemtemplate, $u, $file);
$itemtemplate->param(simplepodcast => 1);
}
$itemtemplate->param(content => $pcontent);
run_hooks(pagetemplate => sub {
shift->(page => $p, destpage => $page,
template => $itemtemplate);
});
$content.=$itemtemplate->output;
$itemtemplate->clear_params;
$lasttime = $pagemtime{$p} if $pagemtime{$p} > $lasttime;
}
my $template=template_depends($feedtype."page.tmpl", $page, blind_cache => 1);
$template->param(
title => $page ne "index" ? pagetitle($page) : $config{wikiname},
wikiname => $config{wikiname},
pageurl => $url,
content => $content,
feeddesc => $feeddesc,
guid => $guid,
feeddate => date_3339($lasttime),
feedurl => $feedurl,
);
run_hooks(pagetemplate => sub {
shift->(page => $page, destpage => $page,
template => $template);
});
return $template->output;
}
sub pingurl (@) {
return unless @{$config{pingurl}} && %toping;
eval q{require RPC::XML::Client};
if ($@) {
debug(gettext("RPC::XML::Client not found, not pinging"));
2006-06-27 03:14:46 +02:00
return;
}
# daemonize here so slow pings don't slow down wiki updates
defined(my $pid = fork) or error("Can't fork: $!");
return if $pid;
chdir '/';
POSIX::setsid() or error("Can't start a new session: $!");
open STDIN, '/dev/null';
open STDOUT, '>/dev/null';
open STDERR, '>&STDOUT' or error("Can't dup stdout: $!");
2006-10-16 21:06:29 +02:00
# Don't need to keep a lock on the wiki as a daemon.
IkiWiki::unlockwiki();
foreach my $page (keys %toping) {
my $title=pagetitle(basename($page), 0);
my $url=urlto($page, "", 1);
foreach my $pingurl (@{$config{pingurl}}) {
debug("Pinging $pingurl for $page");
eval {
my $client = RPC::XML::Client->new($pingurl);
my $req = RPC::XML::request->new('weblogUpdates.ping',
$title, $url);
my $res = $client->send_request($req);
if (! ref $res) {
error("Did not receive response to ping");
}
my $r=$res->value;
if (! exists $r->{flerror} || $r->{flerror}) {
error("Ping rejected: ".(exists $r->{message} ? $r->{message} : "[unknown reason]"));
}
};
if ($@) {
error "Ping failed: $@";
}
}
}
exit 0; # daemon done
}
sub rootpage (@) {
my %params=@_;
my $rootpage;
if (exists $params{rootpage}) {
$rootpage=bestlink($params{page}, $params{rootpage});
if (!length $rootpage) {
$rootpage=$params{rootpage};
}
}
else {
$rootpage=$params{page};
}
return $rootpage;
}
1