2008-12-27 03:45:12 +01:00
|
|
|
#!/usr/bin/perl
|
2006-05-02 04:34:33 +02:00
|
|
|
# Page inlining and blogging.
|
|
|
|
package IkiWiki::Plugin::inline;
|
|
|
|
|
|
|
|
use warnings;
|
|
|
|
use strict;
|
2007-08-06 00:07:32 +02:00
|
|
|
use Encode;
|
2008-12-23 22:34:19 +01:00
|
|
|
use IkiWiki 3.00;
|
2006-08-04 04:22:16 +02:00
|
|
|
use URI;
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2007-10-25 11:43:43 +02:00
|
|
|
my %knownfeeds;
|
|
|
|
my %page_numfeeds;
|
2008-03-21 09:48:26 +01:00
|
|
|
my @inline;
|
2008-05-31 21:10:23 +02:00
|
|
|
my $nested=0;
|
2007-10-25 11:43:43 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub import {
|
2006-12-21 20:36:15 +01:00
|
|
|
hook(type => "getopt", id => "inline", call => \&getopt);
|
2008-07-25 23:24:52 +02:00
|
|
|
hook(type => "getsetup", id => "inline", call => \&getsetup);
|
2006-12-21 20:36:15 +01:00
|
|
|
hook(type => "checkconfig", id => "inline", call => \&checkconfig);
|
2008-01-07 21:28:53 +01:00
|
|
|
hook(type => "sessioncgi", id => "inline", call => \&sessioncgi);
|
2006-09-10 00:50:27 +02:00
|
|
|
hook(type => "preprocess", id => "inline",
|
2012-04-05 10:43:44 +02:00
|
|
|
call => \&IkiWiki::preprocess_inline, scan => 1);
|
2006-09-10 00:50:27 +02:00
|
|
|
hook(type => "pagetemplate", id => "inline",
|
2006-09-06 23:03:39 +02:00
|
|
|
call => \&IkiWiki::pagetemplate_inline);
|
inline: Run format hook first
inline has a format hook that is an optimisation hack. Until this hook
runs, the inlined content is not present on the page. This can prevent
other format hooks, that process that content, from acting on inlined
content. In bug ##509710, we discovered this happened commonly for the
embed plugin, but it could in theory happen for many other plugins (color,
cutpaste, etc) that use format to fill in special html after sanitization.
The ordering was essentially random (hash key order). That's kinda a good
thing, because hooks should be independent of other hooks and able to run
in any order. But for things like inline, that just doesn't work.
To fix the immediate problem, let's make hooks able to be registered as
running "first". There was already the ability to make them run "last".
Now, this simple first/middle/last ordering is obviously not going to work
if a lot of things need to run first, or last, since then we'll be back to
being unable to specify ordering inside those sets. But before worrying about
that too much, and considering dependency ordering, etc, observe how few
plugins use last ordering: Exactly one needs it. And, so far, exactly one
needs first ordering. So for now, KISS.
Another implementation note: I could have sorted the plugins with
first/last/middle as the primary key, and plugin name secondary, to get a
guaranteed stable order. Instead, I chose to preserve hash order. Two
opposing things pulled me toward that decision:
1. Since has order is randomish, it will ensure that no accidental
ordering assumptions are made.
2. Assume for a minute that ordering matters a lot more than expected.
Drastically changing the order a particular configuration uses could
result in a lot of subtle bugs cropping up. (I hope this assumption is
false, partly due to #1, but can't rule it out.)
2008-12-26 22:08:33 +01:00
|
|
|
hook(type => "format", id => "inline", call => \&format, first => 1);
|
2006-06-27 03:13:03 +02:00
|
|
|
# Hook to change to do pinging since it's called late.
|
|
|
|
# This ensures each page only pings once and prevents slow
|
|
|
|
# pings interrupting page builds.
|
2012-03-29 00:41:47 +02:00
|
|
|
hook(type => "rendered", id => "inline", call => \&IkiWiki::pingurl);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub getopt () {
|
2006-12-21 20:36:15 +01:00
|
|
|
eval q{use Getopt::Long};
|
|
|
|
error($@) if $@;
|
|
|
|
Getopt::Long::Configure('pass_through');
|
|
|
|
GetOptions(
|
|
|
|
"rss!" => \$config{rss},
|
|
|
|
"atom!" => \$config{atom},
|
2008-02-05 00:36:50 +01:00
|
|
|
"allowrss!" => \$config{allowrss},
|
|
|
|
"allowatom!" => \$config{allowatom},
|
2008-07-25 23:24:52 +02:00
|
|
|
"pingurl=s" => sub {
|
|
|
|
push @{$config{pingurl}}, $_[1];
|
|
|
|
},
|
2006-12-21 20:36:15 +01:00
|
|
|
);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-25 23:24:52 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub getsetup () {
|
2008-07-25 23:24:52 +02:00
|
|
|
return
|
2008-08-03 22:40:12 +02:00
|
|
|
plugin => {
|
|
|
|
safe => 1,
|
|
|
|
rebuild => undef,
|
2010-02-12 04:24:15 +01:00
|
|
|
section => "core",
|
2008-08-03 22:40:12 +02:00
|
|
|
},
|
2008-07-25 23:24:52 +02:00
|
|
|
rss => {
|
|
|
|
type => "boolean",
|
2008-07-27 03:07:15 +02:00
|
|
|
example => 0,
|
2008-07-26 20:43:47 +02:00
|
|
|
description => "enable rss feeds by default?",
|
2008-07-25 23:24:52 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
atom => {
|
|
|
|
type => "boolean",
|
2008-07-27 03:07:15 +02:00
|
|
|
example => 0,
|
2008-07-26 20:43:47 +02:00
|
|
|
description => "enable atom feeds by default?",
|
2008-07-25 23:24:52 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
allowrss => {
|
|
|
|
type => "boolean",
|
2008-07-27 03:07:15 +02:00
|
|
|
example => 0,
|
2008-07-26 20:43:47 +02:00
|
|
|
description => "allow rss feeds to be used?",
|
2008-07-25 23:24:52 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
allowatom => {
|
|
|
|
type => "boolean",
|
2008-07-27 03:07:15 +02:00
|
|
|
example => 0,
|
2008-07-26 20:43:47 +02:00
|
|
|
description => "allow atom feeds to be used?",
|
2008-07-25 23:24:52 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
pingurl => {
|
|
|
|
type => "string",
|
|
|
|
example => "http://rpc.technorati.com/rpc/ping",
|
2008-07-26 20:44:49 +02:00
|
|
|
description => "urls to ping (using XML-RPC) on feed update",
|
2008-07-25 23:24:52 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-12-21 20:36:15 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub checkconfig () {
|
2006-12-21 20:36:15 +01:00
|
|
|
if (($config{rss} || $config{atom}) && ! length $config{url}) {
|
2006-12-29 05:38:40 +01:00
|
|
|
error(gettext("Must specify url to wiki with --url when using --rss or --atom"));
|
2006-12-21 20:36:15 +01:00
|
|
|
}
|
|
|
|
if ($config{rss}) {
|
|
|
|
push @{$config{wiki_file_prune_regexps}}, qr/\.rss$/;
|
|
|
|
}
|
|
|
|
if ($config{atom}) {
|
|
|
|
push @{$config{wiki_file_prune_regexps}}, qr/\.atom$/;
|
|
|
|
}
|
2008-07-26 20:37:25 +02:00
|
|
|
if (! exists $config{pingurl}) {
|
|
|
|
$config{pingurl}=[];
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-12-21 20:36:15 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub format (@) {
|
2010-11-16 20:40:16 +01:00
|
|
|
my %params=@_;
|
2008-03-21 09:48:26 +01:00
|
|
|
|
|
|
|
# Fill in the inline content generated earlier. This is actually an
|
|
|
|
# optimisation.
|
2008-03-21 09:51:14 +01:00
|
|
|
$params{content}=~s{<div class="inline" id="([^"]+)"></div>}{
|
2008-03-21 10:08:04 +01:00
|
|
|
delete @inline[$1,]
|
2008-03-21 09:51:14 +01:00
|
|
|
}eg;
|
2008-03-21 09:48:26 +01:00
|
|
|
return $params{content};
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-03-21 09:48:26 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub sessioncgi ($$) {
|
2007-08-06 00:07:32 +02:00
|
|
|
my $q=shift;
|
|
|
|
my $session=shift;
|
|
|
|
|
|
|
|
if ($q->param('do') eq 'blog') {
|
2008-09-27 20:14:36 +02:00
|
|
|
my $page=titlepage(decode_utf8($q->param('title')));
|
2008-07-06 21:52:04 +02:00
|
|
|
$page=~s/(\/)/"__".ord($1)."__"/eg; # don't create subdirs
|
2007-08-06 00:07:32 +02:00
|
|
|
# if the page already exists, munge it to be unique
|
|
|
|
my $from=$q->param('from');
|
|
|
|
my $add="";
|
2008-07-06 21:52:04 +02:00
|
|
|
while (exists $IkiWiki::pagecase{lc($from."/".$page.$add)}) {
|
2007-08-06 00:07:32 +02:00
|
|
|
$add=1 unless length $add;
|
|
|
|
$add++;
|
|
|
|
}
|
2010-12-25 18:32:57 +01:00
|
|
|
$q->param('page', "/$from/$page$add");
|
2007-08-06 00:07:32 +02:00
|
|
|
# now go create the page
|
|
|
|
$q->param('do', 'create');
|
2010-12-25 18:12:21 +01:00
|
|
|
# make sure the editpage plugin is loaded
|
2008-09-05 19:57:25 +02:00
|
|
|
if (IkiWiki->can("cgi_editpage")) {
|
|
|
|
IkiWiki::cgi_editpage($q, $session);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
error(gettext("page editing not allowed"));
|
|
|
|
}
|
2007-08-26 19:32:15 +02:00
|
|
|
exit;
|
2007-08-06 00:07:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2006-05-02 04:34:33 +02:00
|
|
|
# Back to ikiwiki namespace for the rest, this code is very much
|
|
|
|
# internal to ikiwiki even though it's separated into a plugin.
|
|
|
|
package IkiWiki;
|
2006-06-27 03:13:03 +02:00
|
|
|
|
|
|
|
my %toping;
|
2006-10-09 01:57:37 +02:00
|
|
|
my %feedlinks;
|
2006-08-18 05:56:18 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub preprocess_inline (@) {
|
2006-05-02 04:34:33 +02:00
|
|
|
my %params=@_;
|
2006-08-18 05:56:18 +02:00
|
|
|
|
2009-06-16 18:13:19 +02:00
|
|
|
if (! exists $params{pages} && ! exists $params{pagenames}) {
|
2008-07-13 21:05:34 +02:00
|
|
|
error gettext("missing pages parameter");
|
2006-05-02 04:34:33 +02:00
|
|
|
}
|
2012-04-05 10:43:44 +02:00
|
|
|
|
|
|
|
if (! defined wantarray) {
|
|
|
|
# Running in scan mode: only do the essentials
|
|
|
|
|
|
|
|
if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) {
|
|
|
|
# default to sorting age, the same as inline itself,
|
|
|
|
# but let the params override that
|
|
|
|
IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params);
|
|
|
|
}
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) {
|
|
|
|
scalar IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params);
|
|
|
|
}
|
|
|
|
|
2006-08-18 05:56:18 +02:00
|
|
|
my $raw=yesno($params{raw});
|
|
|
|
my $archive=yesno($params{archive});
|
2008-02-05 00:36:50 +01:00
|
|
|
my $rss=(($config{rss} || $config{allowrss}) && exists $params{rss}) ? yesno($params{rss}) : $config{rss};
|
|
|
|
my $atom=(($config{atom} || $config{allowatom}) && exists $params{atom}) ? yesno($params{atom}) : $config{atom};
|
2006-11-26 20:43:24 +01:00
|
|
|
my $quick=exists $params{quick} ? yesno($params{quick}) : 0;
|
2010-12-05 21:17:11 +01:00
|
|
|
my $feeds=exists $params{feeds} ? yesno($params{feeds}) : !$quick && ! $raw;
|
2008-12-11 20:04:38 +01:00
|
|
|
my $emptyfeeds=exists $params{emptyfeeds} ? yesno($params{emptyfeeds}) : 1;
|
2007-09-14 00:23:32 +02:00
|
|
|
my $feedonly=yesno($params{feedonly});
|
2006-08-18 05:56:18 +02:00
|
|
|
if (! exists $params{show} && ! $archive) {
|
2006-05-02 04:34:33 +02:00
|
|
|
$params{show}=10;
|
|
|
|
}
|
2008-03-23 22:39:03 +01:00
|
|
|
if (! exists $params{feedshow} && exists $params{show}) {
|
|
|
|
$params{feedshow}=$params{show};
|
|
|
|
}
|
2014-01-13 22:06:29 +01:00
|
|
|
my $title;
|
|
|
|
if (exists $params{title}) {
|
|
|
|
$title = $params{title};
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$title = $params{page} ne "index" ? pagetitle($params{page}) : $config{wikiname};
|
|
|
|
}
|
2006-08-26 19:36:46 +02:00
|
|
|
my $desc;
|
|
|
|
if (exists $params{description}) {
|
|
|
|
$desc = $params{description}
|
2008-03-23 22:39:03 +01:00
|
|
|
}
|
|
|
|
else {
|
2006-08-26 19:36:46 +02:00
|
|
|
$desc = $config{wikiname};
|
|
|
|
}
|
2006-08-28 21:43:07 +02:00
|
|
|
my $actions=yesno($params{actions});
|
2007-03-02 05:00:42 +01:00
|
|
|
if (exists $params{template}) {
|
|
|
|
$params{template}=~s/[^-_a-zA-Z0-9]+//g;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$params{template} = $archive ? "archivepage" : "inlinepage";
|
|
|
|
}
|
2006-07-29 23:38:50 +02:00
|
|
|
|
2009-06-16 18:13:19 +02:00
|
|
|
my @list;
|
2009-04-23 20:07:28 +02:00
|
|
|
|
2009-06-16 18:13:19 +02:00
|
|
|
if (exists $params{pagenames}) {
|
|
|
|
foreach my $p (qw(sort pages)) {
|
|
|
|
if (exists $params{$p}) {
|
|
|
|
error sprintf(gettext("the %s and %s parameters cannot be used together"),
|
|
|
|
"pagenames", $p);
|
|
|
|
}
|
2009-03-19 20:35:57 +01:00
|
|
|
}
|
2009-06-16 18:13:19 +02:00
|
|
|
|
2012-04-06 18:05:26 +02:00
|
|
|
@list = split ' ', $params{pagenames};
|
2009-10-08 22:49:03 +02:00
|
|
|
|
2009-10-09 00:26:36 +02:00
|
|
|
if (yesno($params{reverse})) {
|
|
|
|
@list=reverse(@list);
|
|
|
|
}
|
|
|
|
|
2009-10-08 22:49:03 +02:00
|
|
|
foreach my $p (@list) {
|
|
|
|
add_depends($params{page}, $p, deptype($quick ? "presence" : "content"));
|
|
|
|
}
|
2012-04-06 18:05:26 +02:00
|
|
|
|
|
|
|
@list = grep { exists $pagesources{$_} } @list;
|
2006-09-25 23:13:14 +02:00
|
|
|
}
|
|
|
|
else {
|
2009-10-09 00:26:36 +02:00
|
|
|
my $num=0;
|
|
|
|
if ($params{show}) {
|
|
|
|
$num=$params{show};
|
2009-06-16 18:13:19 +02:00
|
|
|
}
|
2010-01-01 21:09:15 +01:00
|
|
|
if ($params{feedshow} && $num < $params{feedshow} && $num > 0) {
|
2009-10-09 00:26:36 +02:00
|
|
|
$num=$params{feedshow};
|
2009-06-16 18:13:19 +02:00
|
|
|
}
|
2009-11-14 02:50:27 +01:00
|
|
|
if ($params{skip} && $num) {
|
2009-10-09 00:26:36 +02:00
|
|
|
$num+=$params{skip};
|
2009-06-16 18:13:19 +02:00
|
|
|
}
|
2006-09-25 23:13:14 +02:00
|
|
|
|
2009-10-09 05:51:06 +02:00
|
|
|
@list = pagespec_match_list($params{page}, $params{pages},
|
2009-10-09 00:26:36 +02:00
|
|
|
deptype => deptype($quick ? "presence" : "content"),
|
2009-10-09 19:20:41 +02:00
|
|
|
filter => sub { $_[0] eq $params{page} },
|
2009-10-09 00:26:36 +02:00
|
|
|
sort => exists $params{sort} ? $params{sort} : "age",
|
|
|
|
reverse => yesno($params{reverse}),
|
2009-11-14 02:50:27 +01:00
|
|
|
($num ? (num => $num) : ()),
|
2009-10-09 00:26:36 +02:00
|
|
|
);
|
2007-02-08 20:48:00 +01:00
|
|
|
}
|
|
|
|
|
2006-11-08 21:39:48 +01:00
|
|
|
if (exists $params{skip}) {
|
2009-10-09 00:26:36 +02:00
|
|
|
@list=@list[$params{skip} .. $#list];
|
2006-11-08 21:39:48 +01:00
|
|
|
}
|
|
|
|
|
2008-03-23 22:39:03 +01:00
|
|
|
my @feedlist;
|
|
|
|
if ($feeds) {
|
|
|
|
if (exists $params{feedshow} &&
|
|
|
|
$params{feedshow} && @list > $params{feedshow}) {
|
|
|
|
@feedlist=@list[0..$params{feedshow} - 1];
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
@feedlist=@list;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2006-07-02 21:44:42 +02:00
|
|
|
if ($params{show} && @list > $params{show}) {
|
|
|
|
@list=@list[0..$params{show} - 1];
|
|
|
|
}
|
|
|
|
|
2008-12-11 19:50:51 +01:00
|
|
|
if ($feeds && exists $params{feedpages}) {
|
2009-10-09 05:51:06 +02:00
|
|
|
@feedlist = pagespec_match_list(
|
|
|
|
$params{page}, "($params{pages}) and ($params{feedpages})",
|
2009-10-09 02:54:06 +02:00
|
|
|
deptype => deptype($quick ? "presence" : "content"),
|
2009-10-09 05:51:06 +02:00
|
|
|
list => \@feedlist,
|
|
|
|
);
|
2008-12-11 19:50:51 +01:00
|
|
|
}
|
2008-01-09 08:38:43 +01:00
|
|
|
|
2008-12-11 21:01:26 +01:00
|
|
|
my ($feedbase, $feednum);
|
|
|
|
if ($feeds) {
|
|
|
|
# Ensure that multiple feeds on a page go to unique files.
|
|
|
|
|
|
|
|
# Feedfile can lead to conflicts if usedirs is not enabled,
|
|
|
|
# so avoid supporting it in that case.
|
|
|
|
delete $params{feedfile} if ! $config{usedirs};
|
|
|
|
# Tight limits on legal feedfiles, to avoid security issues
|
|
|
|
# and conflicts.
|
|
|
|
if (defined $params{feedfile}) {
|
|
|
|
if ($params{feedfile} =~ /\// ||
|
|
|
|
$params{feedfile} !~ /$config{wiki_file_regexp}/) {
|
|
|
|
error("illegal feedfile");
|
2007-12-13 09:56:59 +01:00
|
|
|
}
|
2008-12-11 21:01:26 +01:00
|
|
|
$params{feedfile}=possibly_foolish_untaint($params{feedfile});
|
|
|
|
}
|
2010-12-05 21:17:11 +01:00
|
|
|
$feedbase=targetpage($params{page}, "", $params{feedfile});
|
2008-12-11 21:01:26 +01:00
|
|
|
|
|
|
|
my $feedid=join("\0", $feedbase, map { $_."\0".$params{$_} } sort keys %params);
|
|
|
|
if (exists $knownfeeds{$feedid}) {
|
|
|
|
$feednum=$knownfeeds{$feedid};
|
2007-10-25 11:43:43 +02:00
|
|
|
}
|
|
|
|
else {
|
2008-12-11 21:01:26 +01:00
|
|
|
if (exists $page_numfeeds{$params{destpage}}{$feedbase}) {
|
|
|
|
if ($feeds) {
|
|
|
|
$feednum=$knownfeeds{$feedid}=++$page_numfeeds{$params{destpage}}{$feedbase};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$feednum=$knownfeeds{$feedid}="";
|
|
|
|
if ($feeds) {
|
|
|
|
$page_numfeeds{$params{destpage}}{$feedbase}=1;
|
|
|
|
}
|
2007-12-06 13:06:21 +01:00
|
|
|
}
|
2007-10-25 11:43:43 +02:00
|
|
|
}
|
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2011-01-02 18:23:45 +01:00
|
|
|
my ($rssurl, $atomurl, $rssdesc, $atomdesc);
|
|
|
|
if ($feeds) {
|
|
|
|
if ($rss) {
|
|
|
|
$rssurl=abs2rel($feedbase."rss".$feednum, dirname(htmlpage($params{destpage})));
|
2011-01-24 22:01:01 +01:00
|
|
|
$rssdesc = sprintf(gettext("%s (RSS feed)"), $desc);
|
2011-01-02 18:23:45 +01:00
|
|
|
}
|
|
|
|
if ($atom) {
|
|
|
|
$atomurl=abs2rel($feedbase."atom".$feednum, dirname(htmlpage($params{destpage})));
|
2011-01-24 22:01:01 +01:00
|
|
|
$atomdesc = sprintf(gettext("%s (Atom feed)"), $desc);
|
2011-01-02 18:23:45 +01:00
|
|
|
}
|
|
|
|
}
|
2008-12-11 21:01:26 +01:00
|
|
|
|
2006-05-02 04:34:33 +02:00
|
|
|
my $ret="";
|
2006-09-06 23:03:39 +02:00
|
|
|
|
2008-07-28 01:21:56 +02:00
|
|
|
if (length $config{cgiurl} && ! $params{preview} && (exists $params{rootpage} ||
|
2008-09-05 19:57:25 +02:00
|
|
|
(exists $params{postform} && yesno($params{postform}))) &&
|
|
|
|
IkiWiki->can("cgi_editpage")) {
|
2006-10-09 01:57:37 +02:00
|
|
|
# Add a blog post form, with feed buttons.
|
2010-04-22 21:34:32 +02:00
|
|
|
my $formtemplate=template_depends("blogpost.tmpl", $params{page}, blind_cache => 1);
|
2010-11-23 01:12:17 +01:00
|
|
|
$formtemplate->param(cgiurl => IkiWiki::cgiurl());
|
2009-08-28 13:56:03 +02:00
|
|
|
$formtemplate->param(rootpage => rootpage(%params));
|
2011-01-02 18:23:45 +01:00
|
|
|
if ($feeds) {
|
|
|
|
if ($rss) {
|
|
|
|
$formtemplate->param(rssurl => $rssurl);
|
|
|
|
$formtemplate->param(rssdesc => $rssdesc);
|
|
|
|
}
|
|
|
|
if ($atom) {
|
|
|
|
$formtemplate->param(atomurl => $atomurl);
|
|
|
|
$formtemplate->param(atomdesc => $atomdesc);
|
|
|
|
}
|
|
|
|
}
|
2007-04-12 06:13:55 +02:00
|
|
|
if (exists $params{postformtext}) {
|
|
|
|
$formtemplate->param(postformtext =>
|
|
|
|
$params{postformtext});
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$formtemplate->param(postformtext =>
|
|
|
|
gettext("Add a new post titled:"));
|
|
|
|
}
|
2011-01-07 09:29:13 +01:00
|
|
|
if (exists $params{id}) {
|
|
|
|
$formtemplate->param(postformid =>
|
|
|
|
$params{id});
|
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
$ret.=$formtemplate->output;
|
2008-12-11 20:04:38 +01:00
|
|
|
|
|
|
|
# The post form includes the feed buttons, so
|
|
|
|
# emptyfeeds cannot be hidden.
|
|
|
|
$emptyfeeds=1;
|
2006-05-02 04:34:33 +02:00
|
|
|
}
|
2008-12-11 23:31:57 +01:00
|
|
|
elsif ($feeds && !$params{preview} && ($emptyfeeds || @feedlist)) {
|
2006-10-09 01:57:37 +02:00
|
|
|
# Add feed buttons.
|
2010-04-22 21:34:32 +02:00
|
|
|
my $linktemplate=template_depends("feedlink.tmpl", $params{page}, blind_cache => 1);
|
2011-01-02 18:23:45 +01:00
|
|
|
if ($rss) {
|
|
|
|
$linktemplate->param(rssurl => $rssurl);
|
|
|
|
$linktemplate->param(rssdesc => $rssdesc);
|
|
|
|
}
|
|
|
|
if ($atom) {
|
|
|
|
$linktemplate->param(atomurl => $atomurl);
|
|
|
|
$linktemplate->param(atomdesc => $atomdesc);
|
|
|
|
}
|
2011-01-07 09:29:13 +01:00
|
|
|
if (exists $params{id}) {
|
|
|
|
$linktemplate->param(id => $params{id});
|
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
$ret.=$linktemplate->output;
|
|
|
|
}
|
|
|
|
|
2007-09-14 00:23:32 +02:00
|
|
|
if (! $feedonly) {
|
2010-04-22 21:34:32 +02:00
|
|
|
my $template;
|
|
|
|
if (! $raw) {
|
2010-04-23 20:50:00 +02:00
|
|
|
# cannot use wiki pages as templates; template not sanitized due to
|
|
|
|
# format hook hack
|
2010-04-22 21:34:32 +02:00
|
|
|
eval {
|
|
|
|
$template=template_depends($params{template}.".tmpl", $params{page},
|
|
|
|
blind_cache => 1);
|
|
|
|
};
|
2010-04-23 20:50:00 +02:00
|
|
|
if ($@) {
|
protect $@ whenever a block using $@ is non-trivial
As noted in the Try::Tiny man page, eval/$@ can be quite awkward in
corner cases, because $@ has the same properties and problems as C's
errno. While writing a regression test for definetemplate
in which it couldn't find an appropriate template, I received
<span class="error">Error: failed to process template
<span class="createlink">deftmpl</span> </span>
instead of the intended
<span class="error">Error: failed to process template
<span class="createlink">deftmpl</span> template deftmpl not
found</span>
which turned out to be because the "catch"-analogous block called
gettext before it used $@, and gettext can call define_gettext,
which uses eval.
This commit alters all current "catch"-like blocks that use $@, except
those that just do trivial things with $@ (string interpolation, string
concatenation) and call a function (die, error, print, etc.)
2014-02-21 18:06:36 +01:00
|
|
|
# gettext can clobber $@
|
|
|
|
my $error = $@;
|
|
|
|
error sprintf(gettext("failed to process template %s"), $params{template}.".tmpl").": $error";
|
2010-04-22 21:34:32 +02:00
|
|
|
}
|
2007-09-14 00:23:32 +02:00
|
|
|
}
|
2009-10-23 18:38:30 +02:00
|
|
|
my $needcontent=$raw || (!($archive && $quick) && $template->query(name => 'content'));
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2007-09-14 00:23:32 +02:00
|
|
|
foreach my $page (@list) {
|
|
|
|
my $file = $pagesources{$page};
|
|
|
|
my $type = pagetype($file);
|
2009-11-13 21:10:58 +01:00
|
|
|
if (! $raw) {
|
2009-10-16 02:01:47 +02:00
|
|
|
if ($needcontent) {
|
2007-09-14 00:23:32 +02:00
|
|
|
# Get the content before populating the
|
|
|
|
# template, since getting the content uses
|
|
|
|
# the same template if inlines are nested.
|
|
|
|
my $content=get_inline_content($page, $params{destpage});
|
|
|
|
$template->param(content => $content);
|
|
|
|
}
|
2009-05-06 02:06:50 +02:00
|
|
|
$template->param(pageurl => urlto($page, $params{destpage}));
|
2008-12-19 02:58:16 +01:00
|
|
|
$template->param(inlinepage => $page);
|
2007-09-14 00:23:32 +02:00
|
|
|
$template->param(title => pagetitle(basename($page)));
|
2010-05-02 19:44:13 +02:00
|
|
|
$template->param(ctime => displaytime($pagectime{$page}, $params{timeformat}, 1));
|
2008-10-14 21:00:46 +02:00
|
|
|
$template->param(mtime => displaytime($pagemtime{$page}, $params{timeformat}));
|
2008-01-29 03:22:04 +01:00
|
|
|
$template->param(first => 1) if $page eq $list[0];
|
|
|
|
$template->param(last => 1) if $page eq $list[$#list];
|
2010-05-02 22:12:08 +02:00
|
|
|
$template->param(html5 => $config{html5});
|
2007-09-14 00:23:32 +02:00
|
|
|
|
|
|
|
if ($actions) {
|
|
|
|
my $file = $pagesources{$page};
|
|
|
|
my $type = pagetype($file);
|
|
|
|
if ($config{discussion}) {
|
2010-01-02 21:42:20 +01:00
|
|
|
if ($page !~ /.*\/\Q$config{discussionpage}\E$/i &&
|
2007-09-14 00:23:32 +02:00
|
|
|
(length $config{cgiurl} ||
|
2010-01-02 21:42:20 +01:00
|
|
|
exists $pagesources{$page."/".lc($config{discussionpage})})) {
|
2007-09-14 00:23:32 +02:00
|
|
|
$template->param(have_actions => 1);
|
|
|
|
$template->param(discussionlink =>
|
|
|
|
htmllink($page,
|
2007-10-25 11:43:43 +02:00
|
|
|
$params{destpage},
|
2009-08-14 03:41:33 +02:00
|
|
|
$config{discussionpage},
|
2007-09-14 00:23:32 +02:00
|
|
|
noimageinline => 1,
|
|
|
|
forcesubpage => 1));
|
|
|
|
}
|
|
|
|
}
|
2010-01-04 18:54:18 +01:00
|
|
|
if (length $config{cgiurl} &&
|
|
|
|
defined $type &&
|
|
|
|
IkiWiki->can("cgi_editpage")) {
|
2007-01-18 16:06:57 +01:00
|
|
|
$template->param(have_actions => 1);
|
2008-07-06 21:52:04 +02:00
|
|
|
$template->param(editurl => cgiurl(do => "edit", page => $page));
|
2010-01-04 18:54:18 +01:00
|
|
|
|
2007-01-18 16:06:57 +01:00
|
|
|
}
|
2006-08-28 21:43:07 +02:00
|
|
|
}
|
2007-09-14 00:23:32 +02:00
|
|
|
|
|
|
|
run_hooks(pagetemplate => sub {
|
2007-10-25 11:43:43 +02:00
|
|
|
shift->(page => $page, destpage => $params{destpage},
|
2007-09-14 00:23:32 +02:00
|
|
|
template => $template,);
|
|
|
|
});
|
|
|
|
|
|
|
|
$ret.=$template->output;
|
|
|
|
$template->clear_params;
|
2006-08-28 21:43:07 +02:00
|
|
|
}
|
2007-09-14 00:23:32 +02:00
|
|
|
else {
|
|
|
|
if (defined $type) {
|
|
|
|
$ret.="\n".
|
2007-10-25 11:43:43 +02:00
|
|
|
linkify($page, $params{destpage},
|
|
|
|
preprocess($page, $params{destpage},
|
|
|
|
filter($page, $params{destpage},
|
2007-09-14 00:23:32 +02:00
|
|
|
readfile(srcfile($file)))));
|
|
|
|
}
|
2009-11-13 21:10:58 +01:00
|
|
|
else {
|
|
|
|
$ret.="\n".
|
|
|
|
readfile(srcfile($file));
|
|
|
|
}
|
2006-08-18 05:56:18 +02:00
|
|
|
}
|
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
}
|
|
|
|
|
2008-12-11 23:31:57 +01:00
|
|
|
if ($feeds && ($emptyfeeds || @feedlist)) {
|
2008-02-05 00:46:34 +01:00
|
|
|
if ($rss) {
|
2008-12-11 21:01:26 +01:00
|
|
|
my $rssp=$feedbase."rss".$feednum;
|
2007-10-25 11:43:43 +02:00
|
|
|
will_render($params{destpage}, $rssp);
|
2008-02-05 00:46:34 +01:00
|
|
|
if (! $params{preview}) {
|
|
|
|
writefile($rssp, $config{destdir},
|
2008-03-12 23:49:41 +01:00
|
|
|
genfeed("rss",
|
2014-01-13 22:06:29 +01:00
|
|
|
$config{url}."/".$rssp, $title, $desc, $params{guid}, $params{page}, @feedlist));
|
2008-02-05 00:46:34 +01:00
|
|
|
$toping{$params{destpage}}=1 unless $config{rebuild};
|
2011-01-02 18:23:45 +01:00
|
|
|
$feedlinks{$params{destpage}}.=qq{<link rel="alternate" type="application/rss+xml" title="$rssdesc" href="$rssurl" />};
|
2008-02-05 00:46:34 +01:00
|
|
|
}
|
2007-02-05 22:54:36 +01:00
|
|
|
}
|
2008-02-05 00:46:34 +01:00
|
|
|
if ($atom) {
|
2008-12-11 21:01:26 +01:00
|
|
|
my $atomp=$feedbase."atom".$feednum;
|
2007-10-25 11:43:43 +02:00
|
|
|
will_render($params{destpage}, $atomp);
|
2008-02-05 00:46:34 +01:00
|
|
|
if (! $params{preview}) {
|
|
|
|
writefile($atomp, $config{destdir},
|
2014-01-13 22:06:29 +01:00
|
|
|
genfeed("atom", $config{url}."/".$atomp, $title, $desc, $params{guid}, $params{page}, @feedlist));
|
2008-02-05 00:46:34 +01:00
|
|
|
$toping{$params{destpage}}=1 unless $config{rebuild};
|
2011-01-02 18:23:45 +01:00
|
|
|
$feedlinks{$params{destpage}}.=qq{<link rel="alternate" type="application/atom+xml" title="$atomdesc" href="$atomurl" />};
|
2008-02-05 00:46:34 +01:00
|
|
|
}
|
2007-02-05 22:54:36 +01:00
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
}
|
|
|
|
|
2009-11-17 07:29:28 +01:00
|
|
|
clear_inline_content_cache();
|
|
|
|
|
2008-05-31 21:10:23 +02:00
|
|
|
return $ret if $raw || $nested;
|
2008-03-21 09:48:26 +01:00
|
|
|
push @inline, $ret;
|
|
|
|
return "<div class=\"inline\" id=\"$#inline\"></div>\n\n";
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub pagetemplate_inline (@) {
|
2006-09-06 23:03:39 +02:00
|
|
|
my %params=@_;
|
|
|
|
my $page=$params{page};
|
|
|
|
my $template=$params{template};
|
|
|
|
|
2006-10-09 01:57:37 +02:00
|
|
|
$template->param(feedlinks => $feedlinks{$page})
|
|
|
|
if exists $feedlinks{$page} && $template->query(name => "feedlinks");
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-09-06 23:03:39 +02:00
|
|
|
|
2009-11-17 07:29:28 +01:00
|
|
|
{
|
|
|
|
my %inline_content;
|
|
|
|
my $cached_destpage="";
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub get_inline_content ($$) {
|
2006-05-02 04:34:33 +02:00
|
|
|
my $page=shift;
|
2006-07-28 01:41:58 +02:00
|
|
|
my $destpage=shift;
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2009-11-17 07:29:28 +01:00
|
|
|
if (exists $inline_content{$page} && $cached_destpage eq $destpage) {
|
|
|
|
return $inline_content{$page};
|
|
|
|
}
|
|
|
|
|
2006-05-02 04:34:33 +02:00
|
|
|
my $file=$pagesources{$page};
|
|
|
|
my $type=pagetype($file);
|
2009-11-17 07:29:28 +01:00
|
|
|
my $ret="";
|
2006-07-04 00:14:52 +02:00
|
|
|
if (defined $type) {
|
2008-05-31 21:10:23 +02:00
|
|
|
$nested++;
|
2009-11-17 07:29:28 +01:00
|
|
|
$ret=htmlize($page, $destpage, $type,
|
2006-08-18 02:44:46 +02:00
|
|
|
linkify($page, $destpage,
|
2006-08-23 22:23:57 +02:00
|
|
|
preprocess($page, $destpage,
|
2007-05-17 21:55:11 +02:00
|
|
|
filter($page, $destpage,
|
2006-08-18 02:44:46 +02:00
|
|
|
readfile(srcfile($file))))));
|
2008-05-31 21:10:23 +02:00
|
|
|
$nested--;
|
2010-05-07 05:20:48 +02:00
|
|
|
if (isinternal($page)) {
|
|
|
|
# make inlined text of internal pages searchable
|
|
|
|
run_hooks(indexhtml => sub {
|
2011-09-01 17:38:10 +02:00
|
|
|
shift->(page => $page, destpage => $destpage,
|
2010-05-07 05:20:48 +02:00
|
|
|
content => $ret);
|
|
|
|
});
|
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
}
|
2009-11-17 07:29:28 +01:00
|
|
|
|
|
|
|
if ($cached_destpage ne $destpage) {
|
|
|
|
clear_inline_content_cache();
|
|
|
|
$cached_destpage=$destpage;
|
2006-05-02 04:34:33 +02:00
|
|
|
}
|
2009-11-17 07:29:28 +01:00
|
|
|
return $inline_content{$page}=$ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub clear_inline_content_cache () {
|
|
|
|
%inline_content=();
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub date_822 ($) {
|
2006-05-02 04:34:33 +02:00
|
|
|
my $time=shift;
|
|
|
|
|
2007-01-10 21:25:00 +01:00
|
|
|
my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
|
2006-08-05 02:45:03 +02:00
|
|
|
POSIX::setlocale(&POSIX::LC_TIME, "C");
|
|
|
|
my $ret=POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
|
|
|
|
POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
|
|
|
|
return $ret;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub absolute_urls ($$) {
|
2010-11-16 21:57:50 +01:00
|
|
|
# needed because rss sucks
|
|
|
|
my $html=shift;
|
2006-11-28 10:10:42 +01:00
|
|
|
my $baseurl=shift;
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2006-11-28 10:10:42 +01:00
|
|
|
my $url=$baseurl;
|
2006-05-02 04:34:33 +02:00
|
|
|
$url=~s/[^\/]+$//;
|
2010-11-16 21:57:50 +01:00
|
|
|
my $urltop; # calculated if needed
|
Bug#473987: [PATCH] Links relative to baseurl mangled in atom/rss feeds
tag 473987 +patch
thanks
Hi,
The issue is that we need to convert relative links to absolute
ones for atom and rss feeds -- but there are two types of
relative links. The first kind, relative to the current
document ( href="some/path") is handled correctly. The second
kind of relative url is is relative to the http server
base (href="/semi-abs/path"), and that broke.
It broke because we just prepended the url of the current
document to the href (http://host/path/to/this-doc/ + link),
which gave us, in the first place:
http://host/path/to/this-doc/some/path [correct], and
http://host/path/to/this-doc//semi-abs/path [wrong]
The fix is to calculate the base for the http server (the base of
the wiki does not help, since the base of the wiki can be
different from the base of the http server -- I have, for example,
"url => http://host.name.mine/blog/manoj/"), and prepend that to
the relative references that start with a /.
This has been tested.
Signed-off-by: Manoj Srivastava <srivasta@debian.org>
2008-04-02 19:01:25 +02:00
|
|
|
|
2010-11-16 21:57:50 +01:00
|
|
|
my $ret="";
|
|
|
|
|
|
|
|
eval q{use HTML::Parser; use HTML::Tagset};
|
|
|
|
die $@ if $@;
|
|
|
|
my $p = HTML::Parser->new(api_version => 3);
|
|
|
|
$p->handler(default => sub { $ret.=join("", @_) }, "text");
|
|
|
|
$p->handler(start => sub {
|
|
|
|
my ($tagname, $pos, $text) = @_;
|
|
|
|
if (ref $HTML::Tagset::linkElements{$tagname}) {
|
|
|
|
while (4 <= @$pos) {
|
|
|
|
# use attribute sets from right to left
|
|
|
|
# to avoid invalidating the offsets
|
|
|
|
# when replacing the values
|
|
|
|
my ($k_offset, $k_len, $v_offset, $v_len) =
|
|
|
|
splice(@$pos, -4);
|
|
|
|
my $attrname = lc(substr($text, $k_offset, $k_len));
|
|
|
|
next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
|
|
|
|
next unless $v_offset; # 0 v_offset means no value
|
|
|
|
my $v = substr($text, $v_offset, $v_len);
|
|
|
|
$v =~ s/^([\'\"])(.*)\1$/$2/;
|
2011-06-30 00:12:58 +02:00
|
|
|
eval q{use HTML::Entities};
|
|
|
|
my $dv = decode_entities($v);
|
|
|
|
if ($dv=~/^#/) {
|
2010-11-16 21:57:50 +01:00
|
|
|
$v=$baseurl.$v; # anchor
|
|
|
|
}
|
2011-06-30 00:12:58 +02:00
|
|
|
elsif ($dv=~/^(?!\w+:)[^\/]/) {
|
2010-11-16 21:57:50 +01:00
|
|
|
$v=$url.$v; # relative url
|
|
|
|
}
|
2011-06-30 00:12:58 +02:00
|
|
|
elsif ($dv=~/^\//) {
|
2010-11-16 21:57:50 +01:00
|
|
|
if (! defined $urltop) {
|
|
|
|
# what is the non path part of the url?
|
|
|
|
my $top_uri = URI->new($url);
|
|
|
|
$top_uri->path_query(""); # reset the path
|
|
|
|
$urltop = $top_uri->as_string;
|
|
|
|
}
|
|
|
|
$v=$urltop.$v; # url relative to top of site
|
|
|
|
}
|
|
|
|
$v =~ s/\"/"/g; # since we quote with ""
|
|
|
|
substr($text, $v_offset, $v_len) = qq("$v");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$ret.=$text;
|
|
|
|
}, "tagname, tokenpos, text");
|
|
|
|
$p->parse($html);
|
|
|
|
$p->eof;
|
|
|
|
|
|
|
|
return $ret;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2013-02-21 02:53:50 +01:00
|
|
|
sub genenclosure {
|
|
|
|
my $itemtemplate=shift;
|
|
|
|
my $url=shift;
|
|
|
|
my $file=shift;
|
|
|
|
|
|
|
|
return unless $itemtemplate->query(name => "enclosure");
|
|
|
|
|
|
|
|
my $size=(srcfile_stat($file))[8];
|
|
|
|
my $mime="unknown";
|
|
|
|
eval q{use File::MimeInfo};
|
|
|
|
if (! $@) {
|
|
|
|
$mime = mimetype($file);
|
|
|
|
}
|
|
|
|
$itemtemplate->param(
|
|
|
|
enclosure => $url,
|
|
|
|
type => $mime,
|
|
|
|
length => $size,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub genfeed ($$$$$@) {
|
2006-10-09 01:57:37 +02:00
|
|
|
my $feedtype=shift;
|
|
|
|
my $feedurl=shift;
|
2014-01-13 22:06:29 +01:00
|
|
|
my $feedtitle=shift;
|
2006-10-09 01:57:37 +02:00
|
|
|
my $feeddesc=shift;
|
2008-07-12 18:12:37 +02:00
|
|
|
my $guid=shift;
|
2006-05-02 04:34:33 +02:00
|
|
|
my $page=shift;
|
|
|
|
my @pages=@_;
|
|
|
|
|
2008-07-25 22:16:44 +02:00
|
|
|
my $url=URI->new(encode_utf8(urlto($page,"",1)));
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2010-04-22 21:34:32 +02:00
|
|
|
my $itemtemplate=template_depends($feedtype."item.tmpl", $page, blind_cache => 1);
|
2006-07-31 02:34:18 +02:00
|
|
|
my $content="";
|
2006-10-09 01:57:37 +02:00
|
|
|
my $lasttime = 0;
|
2006-05-02 04:34:33 +02:00
|
|
|
foreach my $p (@pages) {
|
2008-07-25 22:16:44 +02:00
|
|
|
my $u=URI->new(encode_utf8(urlto($p, "", 1)));
|
2006-12-21 14:51:50 +01:00
|
|
|
my $pcontent = absolute_urls(get_inline_content($p, $page), $url);
|
2013-02-21 03:16:19 +01:00
|
|
|
my $fancy_enclosure_seen = 0;
|
2006-08-04 03:57:32 +02:00
|
|
|
|
2006-07-31 02:34:18 +02:00
|
|
|
$itemtemplate->param(
|
2007-05-28 21:43:28 +02:00
|
|
|
title => pagetitle(basename($p)),
|
2006-08-04 03:57:32 +02:00
|
|
|
url => $u,
|
|
|
|
permalink => $u,
|
2007-08-12 01:15:08 +02:00
|
|
|
cdate_822 => date_822($pagectime{$p}),
|
|
|
|
mdate_822 => date_822($pagemtime{$p}),
|
|
|
|
cdate_3339 => date_3339($pagectime{$p}),
|
|
|
|
mdate_3339 => date_3339($pagemtime{$p}),
|
2006-07-31 02:34:18 +02:00
|
|
|
);
|
2006-11-01 07:45:59 +01:00
|
|
|
|
2009-01-18 17:27:43 +01:00
|
|
|
if (exists $pagestate{$p}) {
|
|
|
|
if (exists $pagestate{$p}{meta}{guid}) {
|
2010-04-06 02:31:38 +02:00
|
|
|
eval q{use HTML::Entities};
|
|
|
|
$itemtemplate->param(guid => HTML::Entities::encode_numeric($pagestate{$p}{meta}{guid}));
|
2009-01-18 17:27:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (exists $pagestate{$p}{meta}{updated}) {
|
|
|
|
$itemtemplate->param(mdate_822 => date_822($pagestate{$p}{meta}{updated}));
|
|
|
|
$itemtemplate->param(mdate_3339 => date_3339($pagestate{$p}{meta}{updated}));
|
|
|
|
}
|
2013-02-21 03:16:19 +01:00
|
|
|
|
|
|
|
if (exists $pagestate{$p}{meta}{enclosure}) {
|
|
|
|
my $absurl = $pagestate{$p}{meta}{enclosure};
|
2013-06-27 06:21:20 +02:00
|
|
|
my $file = $pagestate{$p}{meta}{enclosurefile};
|
2013-02-21 03:16:19 +01:00
|
|
|
genenclosure($itemtemplate, $absurl, $file);
|
|
|
|
$fancy_enclosure_seen = 1;
|
|
|
|
}
|
2008-07-12 00:44:12 +02:00
|
|
|
}
|
|
|
|
|
2013-02-21 02:53:50 +01:00
|
|
|
my $file=$pagesources{$p};
|
2013-02-21 03:16:19 +01:00
|
|
|
unless ($fancy_enclosure_seen || defined(pagetype($file))) {
|
2013-02-21 02:53:50 +01:00
|
|
|
genenclosure($itemtemplate, $u, $file);
|
2013-02-21 03:16:19 +01:00
|
|
|
$itemtemplate->param(simplepodcast => 1);
|
2006-11-01 07:45:59 +01:00
|
|
|
}
|
|
|
|
|
2013-02-21 03:16:19 +01:00
|
|
|
$itemtemplate->param(content => $pcontent);
|
|
|
|
|
2006-07-31 02:34:18 +02:00
|
|
|
run_hooks(pagetemplate => sub {
|
|
|
|
shift->(page => $p, destpage => $page,
|
|
|
|
template => $itemtemplate);
|
|
|
|
});
|
2006-08-04 02:01:51 +02:00
|
|
|
|
2006-07-31 02:34:18 +02:00
|
|
|
$content.=$itemtemplate->output;
|
|
|
|
$itemtemplate->clear_params;
|
2006-10-09 01:57:37 +02:00
|
|
|
|
2007-08-12 01:15:08 +02:00
|
|
|
$lasttime = $pagemtime{$p} if $pagemtime{$p} > $lasttime;
|
2006-05-02 04:34:33 +02:00
|
|
|
}
|
|
|
|
|
2010-04-22 21:34:32 +02:00
|
|
|
my $template=template_depends($feedtype."page.tmpl", $page, blind_cache => 1);
|
2006-05-02 04:34:33 +02:00
|
|
|
$template->param(
|
2014-01-13 22:06:29 +01:00
|
|
|
title => $feedtitle,
|
2006-07-31 01:51:48 +02:00
|
|
|
wikiname => $config{wikiname},
|
2006-05-02 04:34:33 +02:00
|
|
|
pageurl => $url,
|
2006-07-31 02:34:18 +02:00
|
|
|
content => $content,
|
2006-10-09 01:57:37 +02:00
|
|
|
feeddesc => $feeddesc,
|
2008-07-12 18:12:37 +02:00
|
|
|
guid => $guid,
|
2006-10-09 01:57:37 +02:00
|
|
|
feeddate => date_3339($lasttime),
|
2013-02-22 18:54:16 +01:00
|
|
|
feeddate_822 => date_822($lasttime),
|
2006-10-09 01:57:37 +02:00
|
|
|
feedurl => $feedurl,
|
2006-05-02 04:34:33 +02:00
|
|
|
);
|
2006-07-30 02:20:11 +02:00
|
|
|
run_hooks(pagetemplate => sub {
|
|
|
|
shift->(page => $page, destpage => $page,
|
|
|
|
template => $template);
|
|
|
|
});
|
2006-07-29 09:25:17 +02:00
|
|
|
|
2006-05-02 04:34:33 +02:00
|
|
|
return $template->output;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 04:34:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub pingurl (@) {
|
2006-12-02 01:12:26 +01:00
|
|
|
return unless @{$config{pingurl}} && %toping;
|
2006-06-27 03:13:03 +02:00
|
|
|
|
|
|
|
eval q{require RPC::XML::Client};
|
|
|
|
if ($@) {
|
2006-12-29 05:38:40 +01:00
|
|
|
debug(gettext("RPC::XML::Client not found, not pinging"));
|
2006-06-27 03:14:46 +02:00
|
|
|
return;
|
2006-06-27 03:13:03 +02:00
|
|
|
}
|
|
|
|
|
2006-11-21 18:47:53 +01:00
|
|
|
# daemonize here so slow pings don't slow down wiki updates
|
|
|
|
defined(my $pid = fork) or error("Can't fork: $!");
|
|
|
|
return if $pid;
|
2006-12-02 01:12:26 +01:00
|
|
|
chdir '/';
|
2008-05-07 00:41:56 +02:00
|
|
|
POSIX::setsid() or error("Can't start a new session: $!");
|
2006-12-02 01:12:26 +01:00
|
|
|
open STDIN, '/dev/null';
|
|
|
|
open STDOUT, '>/dev/null';
|
2006-12-29 05:38:40 +01:00
|
|
|
open STDERR, '>&STDOUT' or error("Can't dup stdout: $!");
|
2006-10-16 21:06:29 +02:00
|
|
|
|
2006-11-22 03:28:42 +01:00
|
|
|
# Don't need to keep a lock on the wiki as a daemon.
|
|
|
|
IkiWiki::unlockwiki();
|
|
|
|
|
2006-06-27 03:13:03 +02:00
|
|
|
foreach my $page (keys %toping) {
|
2006-12-21 22:52:06 +01:00
|
|
|
my $title=pagetitle(basename($page), 0);
|
2008-07-25 22:16:44 +02:00
|
|
|
my $url=urlto($page, "", 1);
|
2006-06-27 03:13:03 +02:00
|
|
|
foreach my $pingurl (@{$config{pingurl}}) {
|
|
|
|
debug("Pinging $pingurl for $page");
|
2006-10-16 21:03:33 +02:00
|
|
|
eval {
|
|
|
|
my $client = RPC::XML::Client->new($pingurl);
|
|
|
|
my $req = RPC::XML::request->new('weblogUpdates.ping',
|
2006-12-21 22:52:06 +01:00
|
|
|
$title, $url);
|
2006-10-16 21:03:33 +02:00
|
|
|
my $res = $client->send_request($req);
|
|
|
|
if (! ref $res) {
|
2008-08-11 19:03:30 +02:00
|
|
|
error("Did not receive response to ping");
|
2006-10-16 21:03:33 +02:00
|
|
|
}
|
|
|
|
my $r=$res->value;
|
|
|
|
if (! exists $r->{flerror} || $r->{flerror}) {
|
2008-08-11 19:03:30 +02:00
|
|
|
error("Ping rejected: ".(exists $r->{message} ? $r->{message} : "[unknown reason]"));
|
2006-10-16 21:03:33 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
if ($@) {
|
2008-08-11 19:03:30 +02:00
|
|
|
error "Ping failed: $@";
|
2006-06-27 03:13:03 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2006-11-22 03:28:42 +01:00
|
|
|
|
|
|
|
exit 0; # daemon done
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-06-27 03:13:03 +02:00
|
|
|
|
2009-08-28 13:56:03 +02:00
|
|
|
|
|
|
|
sub rootpage (@) {
|
|
|
|
my %params=@_;
|
|
|
|
|
|
|
|
my $rootpage;
|
|
|
|
if (exists $params{rootpage}) {
|
|
|
|
$rootpage=bestlink($params{page}, $params{rootpage});
|
|
|
|
if (!length $rootpage) {
|
|
|
|
$rootpage=$params{rootpage};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$rootpage=$params{page};
|
|
|
|
}
|
|
|
|
return $rootpage;
|
|
|
|
}
|
|
|
|
|
2006-05-02 04:34:33 +02:00
|
|
|
1
|