2006-06-02 06:49:12 +02:00
|
|
|
#!/usr/bin/perl
|
|
|
|
# Ikiwiki metadata plugin.
|
|
|
|
package IkiWiki::Plugin::meta;
|
|
|
|
|
|
|
|
use warnings;
|
|
|
|
use strict;
|
2008-12-23 22:34:19 +01:00
|
|
|
use IkiWiki 3.00;
|
2006-06-02 06:49:12 +02:00
|
|
|
|
2008-01-29 23:16:51 +01:00
|
|
|
my %metaheaders;
|
2006-06-02 06:49:12 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub import {
|
2008-08-03 22:40:12 +02:00
|
|
|
hook(type => "getsetup", id => "meta", call => \&getsetup);
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
hook(type => "needsbuild", id => "meta", call => \&needsbuild);
|
2008-01-09 08:38:43 +01:00
|
|
|
hook(type => "preprocess", id => "meta", call => \&preprocess, scan => 1);
|
2006-09-10 00:50:27 +02:00
|
|
|
hook(type => "pagetemplate", id => "meta", call => \&pagetemplate);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-06-02 06:49:12 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub getsetup () {
|
2008-08-03 22:40:12 +02:00
|
|
|
return
|
|
|
|
plugin => {
|
|
|
|
safe => 1,
|
|
|
|
rebuild => undef,
|
2010-02-12 04:24:15 +01:00
|
|
|
section => "core",
|
2008-08-03 22:40:12 +02:00
|
|
|
},
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-08-03 22:40:12 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub needsbuild (@) {
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
my $needsbuild=shift;
|
|
|
|
foreach my $page (keys %pagestate) {
|
|
|
|
if (exists $pagestate{$page}{meta}) {
|
2008-01-29 23:36:25 +01:00
|
|
|
if (exists $pagesources{$page} &&
|
|
|
|
grep { $_ eq $pagesources{$page} } @$needsbuild) {
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
# remove state, it will be re-added
|
|
|
|
# if the preprocessor directive is still
|
|
|
|
# there during the rebuild
|
|
|
|
delete $pagestate{$page}{meta};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2006-07-31 00:58:48 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub scrub ($$) {
|
2007-03-21 19:52:56 +01:00
|
|
|
if (IkiWiki::Plugin::htmlscrubber->can("sanitize")) {
|
2008-12-12 20:06:45 +01:00
|
|
|
return IkiWiki::Plugin::htmlscrubber::sanitize(
|
|
|
|
content => shift, destpage => shift);
|
2007-03-21 19:52:56 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
return shift;
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-03-21 19:52:56 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub safeurl ($) {
|
2008-02-10 23:17:44 +01:00
|
|
|
my $url=shift;
|
|
|
|
if (exists $IkiWiki::Plugin::htmlscrubber::{safe_url_regexp} &&
|
|
|
|
defined $IkiWiki::Plugin::htmlscrubber::safe_url_regexp) {
|
|
|
|
return $url=~/$IkiWiki::Plugin::htmlscrubber::safe_url_regexp/;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return 1;
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-02-10 23:17:44 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub htmlize ($$$) {
|
2008-01-09 07:05:54 +01:00
|
|
|
my $page = shift;
|
|
|
|
my $destpage = shift;
|
|
|
|
|
2008-06-04 07:24:23 +02:00
|
|
|
return IkiWiki::htmlize($page, $destpage, pagetype($pagesources{$page}),
|
2008-01-09 07:05:54 +01:00
|
|
|
IkiWiki::linkify($page, $destpage,
|
2008-01-09 20:35:23 +01:00
|
|
|
IkiWiki::preprocess($page, $destpage, shift)));
|
2008-01-09 07:05:54 +01:00
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub preprocess (@) {
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
return "" unless @_;
|
2006-06-02 06:49:12 +02:00
|
|
|
my %params=@_;
|
|
|
|
my $key=shift;
|
|
|
|
my $value=$params{$key};
|
|
|
|
delete $params{$key};
|
|
|
|
my $page=$params{page};
|
|
|
|
delete $params{page};
|
2007-12-08 20:37:41 +01:00
|
|
|
my $destpage=$params{destpage};
|
2006-07-28 01:47:13 +02:00
|
|
|
delete $params{destpage};
|
2007-03-07 06:53:47 +01:00
|
|
|
delete $params{preview};
|
2006-06-02 06:49:12 +02:00
|
|
|
|
2006-07-31 00:58:48 +02:00
|
|
|
eval q{use HTML::Entities};
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
# Always decode, even if encoding later, since it might not be
|
2006-07-31 00:58:48 +02:00
|
|
|
# fully encoded.
|
|
|
|
$value=decode_entities($value);
|
2006-06-02 08:11:22 +02:00
|
|
|
|
2008-01-09 08:38:43 +01:00
|
|
|
# Metadata collection that needs to happen during the scan pass.
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
if ($key eq 'title') {
|
2010-04-06 02:31:38 +02:00
|
|
|
$pagestate{$page}{meta}{title}=$value;
|
2010-04-03 15:19:18 +02:00
|
|
|
if (exists $params{sortas}) {
|
|
|
|
$pagestate{$page}{meta}{titlesort}=$params{sortas};
|
2010-03-24 04:18:24 +01:00
|
|
|
}
|
2010-04-06 01:58:55 +02:00
|
|
|
else {
|
|
|
|
delete $pagestate{$page}{meta}{titlesort};
|
|
|
|
}
|
2009-11-16 21:44:03 +01:00
|
|
|
return "";
|
2008-06-16 01:08:50 +02:00
|
|
|
}
|
|
|
|
elsif ($key eq 'description') {
|
2010-04-06 02:31:38 +02:00
|
|
|
$pagestate{$page}{meta}{description}=$value;
|
2008-06-16 01:08:50 +02:00
|
|
|
# fallthrough
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
}
|
2008-07-12 16:59:45 +02:00
|
|
|
elsif ($key eq 'guid') {
|
2010-04-06 02:31:38 +02:00
|
|
|
$pagestate{$page}{meta}{guid}=$value;
|
2008-07-12 00:42:38 +02:00
|
|
|
# fallthrough
|
|
|
|
}
|
2008-01-09 08:38:43 +01:00
|
|
|
elsif ($key eq 'license') {
|
2008-01-29 23:16:51 +01:00
|
|
|
push @{$metaheaders{$page}}, '<link rel="license" href="#page_license" />';
|
|
|
|
$pagestate{$page}{meta}{license}=$value;
|
2008-01-09 08:38:43 +01:00
|
|
|
return "";
|
|
|
|
}
|
|
|
|
elsif ($key eq 'copyright') {
|
2008-01-29 23:16:51 +01:00
|
|
|
push @{$metaheaders{$page}}, '<link rel="copyright" href="#page_copyright" />';
|
|
|
|
$pagestate{$page}{meta}{copyright}=$value;
|
2008-01-09 08:38:43 +01:00
|
|
|
return "";
|
|
|
|
}
|
|
|
|
elsif ($key eq 'link' && ! %params) {
|
|
|
|
# hidden WikiLink
|
Avoid %links accumulating duplicates. (For TOVA)
This is sorta an optimisation, and sorta a bug fix. In one
test case I have available, it can speed a page build up from 3
minutes to 3 seconds.
The root of the problem is that $links{$page} contains arrays of
links, rather than hashes of links. And when a link is found,
it is just pushed onto the array, without checking for dups.
Now, the array is emptied before scanning a page, so there
should not be a lot of opportunity for lots of duplicate links
to pile up in it. But, in some cases, they can, and if there
are hundreds of duplicate links in the array, then scanning it
for matching links, as match_link and some other code does,
becomes much more expensive than it needs to be.
Perhaps the real right fix would be to change the data structure
to a hash. But, the list of links is never accessed like that,
you always want to iterate through it.
I also looked at deduping the list in saveindex, but that does
a lot of unnecessary work, and doesn't completly solve the problem.
So, finally, I decided to add an add_link function that handles deduping,
and make ikiwiki-transition remove the old dup links.
2009-05-06 05:40:09 +02:00
|
|
|
add_link($page, $value);
|
2008-01-09 08:38:43 +01:00
|
|
|
return "";
|
|
|
|
}
|
|
|
|
elsif ($key eq 'author') {
|
2008-01-29 23:16:51 +01:00
|
|
|
$pagestate{$page}{meta}{author}=$value;
|
2010-04-03 15:19:18 +02:00
|
|
|
if (exists $params{sortas}) {
|
|
|
|
$pagestate{$page}{meta}{authorsort}=$params{sortas};
|
2010-04-03 15:17:48 +02:00
|
|
|
}
|
|
|
|
else {
|
2010-04-06 01:59:16 +02:00
|
|
|
delete $pagestate{$page}{meta}{authorsort};
|
2010-04-03 15:17:48 +02:00
|
|
|
}
|
2008-01-09 08:38:43 +01:00
|
|
|
# fallthorough
|
|
|
|
}
|
|
|
|
elsif ($key eq 'authorurl') {
|
2008-02-10 23:17:44 +01:00
|
|
|
$pagestate{$page}{meta}{authorurl}=$value if safeurl($value);
|
2008-01-09 08:38:43 +01:00
|
|
|
# fallthrough
|
|
|
|
}
|
2009-10-21 22:42:25 +02:00
|
|
|
elsif ($key eq 'permalink') {
|
|
|
|
$pagestate{$page}{meta}{permalink}=$value if safeurl($value);
|
|
|
|
# fallthrough
|
|
|
|
}
|
2008-12-21 07:52:08 +01:00
|
|
|
elsif ($key eq 'date') {
|
|
|
|
eval q{use Date::Parse};
|
|
|
|
if (! $@) {
|
|
|
|
my $time = str2time($value);
|
|
|
|
$IkiWiki::pagectime{$page}=$time if defined $time;
|
|
|
|
}
|
|
|
|
}
|
2009-01-18 17:27:43 +01:00
|
|
|
elsif ($key eq 'updated') {
|
|
|
|
eval q{use Date::Parse};
|
|
|
|
if (! $@) {
|
|
|
|
my $time = str2time($value);
|
|
|
|
$pagestate{$page}{meta}{updated}=$time if defined $time;
|
|
|
|
}
|
|
|
|
}
|
2008-01-09 08:38:43 +01:00
|
|
|
|
|
|
|
if (! defined wantarray) {
|
|
|
|
# avoid collecting duplicate data during scan pass
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2009-10-21 22:42:25 +02:00
|
|
|
# Metadata handling that happens only during preprocessing pass.
|
2008-12-21 07:52:08 +01:00
|
|
|
if ($key eq 'permalink') {
|
2008-02-10 23:17:44 +01:00
|
|
|
if (safeurl($value)) {
|
2008-12-12 20:06:45 +01:00
|
|
|
push @{$metaheaders{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />', $destpage);
|
2008-02-10 23:17:44 +01:00
|
|
|
}
|
2008-01-09 08:38:43 +01:00
|
|
|
}
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
elsif ($key eq 'stylesheet') {
|
|
|
|
my $rel=exists $params{rel} ? $params{rel} : "alternate stylesheet";
|
|
|
|
my $title=exists $params{title} ? $params{title} : $value;
|
|
|
|
# adding .css to the value prevents using any old web
|
|
|
|
# editable page as a stylesheet
|
|
|
|
my $stylesheet=bestlink($page, $value.".css");
|
|
|
|
if (! length $stylesheet) {
|
2008-07-13 21:05:34 +02:00
|
|
|
error gettext("stylesheet not found")
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
push @{$metaheaders{$page}}, '<link href="'.urlto($stylesheet, $page).
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
'" rel="'.encode_entities($rel).
|
|
|
|
'" title="'.encode_entities($title).
|
|
|
|
"\" type=\"text/css\" />";
|
|
|
|
}
|
|
|
|
elsif ($key eq 'openid') {
|
2009-06-09 03:40:06 +02:00
|
|
|
my $delegate=0; # both by default
|
|
|
|
if (exists $params{delegate}) {
|
|
|
|
$delegate = 1 if lc $params{delegate} eq 'openid';
|
|
|
|
$delegate = 2 if lc $params{delegate} eq 'openid2';
|
|
|
|
}
|
2008-02-10 23:17:44 +01:00
|
|
|
if (exists $params{server} && safeurl($params{server})) {
|
2008-01-29 23:16:51 +01:00
|
|
|
push @{$metaheaders{$page}}, '<link href="'.encode_entities($params{server}).
|
2009-06-09 03:40:06 +02:00
|
|
|
'" rel="openid.server" />' if $delegate ne 2;
|
2008-03-11 14:00:59 +01:00
|
|
|
push @{$metaheaders{$page}}, '<link href="'.encode_entities($params{server}).
|
2009-06-09 03:40:06 +02:00
|
|
|
'" rel="openid2.provider" />' if $delegate ne 1;
|
2006-06-02 06:49:12 +02:00
|
|
|
}
|
2008-02-10 23:17:44 +01:00
|
|
|
if (safeurl($value)) {
|
|
|
|
push @{$metaheaders{$page}}, '<link href="'.encode_entities($value).
|
2009-06-09 03:40:06 +02:00
|
|
|
'" rel="openid.delegate" />' if $delegate ne 2;
|
2008-03-11 14:00:59 +01:00
|
|
|
push @{$metaheaders{$page}}, '<link href="'.encode_entities($value).
|
2009-06-09 03:40:06 +02:00
|
|
|
'" rel="openid2.local_id" />' if $delegate ne 1;
|
2008-02-10 23:17:44 +01:00
|
|
|
}
|
2008-03-12 15:35:25 +01:00
|
|
|
if (exists $params{"xrds-location"} && safeurl($params{"xrds-location"})) {
|
2008-03-11 14:00:30 +01:00
|
|
|
push @{$metaheaders{$page}}, '<meta http-equiv="X-XRDS-Location"'.
|
2008-03-12 15:35:25 +01:00
|
|
|
'content="'.encode_entities($params{"xrds-location"}).'" />';
|
2008-03-11 14:00:30 +01:00
|
|
|
}
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
}
|
2007-12-08 20:58:29 +01:00
|
|
|
elsif ($key eq 'redir') {
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
return "" if $page ne $destpage;
|
2007-12-08 20:58:29 +01:00
|
|
|
my $safe=0;
|
2007-12-08 21:17:37 +01:00
|
|
|
if ($value !~ /^\w+:\/\//) {
|
2007-12-22 16:21:00 +01:00
|
|
|
my ($redir_page, $redir_anchor) = split /\#/, $value;
|
|
|
|
|
|
|
|
my $link=bestlink($page, $redir_page);
|
2007-12-08 20:58:29 +01:00
|
|
|
if (! length $link) {
|
2008-07-13 21:05:34 +02:00
|
|
|
error gettext("redir page not found")
|
2007-12-08 20:58:29 +01:00
|
|
|
}
|
2009-10-08 22:49:03 +02:00
|
|
|
add_depends($page, $link, deptype("presence"));
|
2007-12-09 01:39:32 +01:00
|
|
|
|
|
|
|
$value=urlto($link, $page);
|
2007-12-22 16:21:00 +01:00
|
|
|
$value.='#'.$redir_anchor if defined $redir_anchor;
|
2007-12-09 01:39:32 +01:00
|
|
|
$safe=1;
|
|
|
|
|
|
|
|
# redir cycle detection
|
2007-12-08 23:40:50 +01:00
|
|
|
$pagestate{$page}{meta}{redir}=$link;
|
2007-12-09 01:39:32 +01:00
|
|
|
my $at=$page;
|
|
|
|
my %seen;
|
|
|
|
while (exists $pagestate{$at}{meta}{redir}) {
|
|
|
|
if ($seen{$at}) {
|
2008-07-13 21:05:34 +02:00
|
|
|
error gettext("redir cycle is not allowed")
|
2007-12-09 01:39:32 +01:00
|
|
|
}
|
|
|
|
$seen{$at}=1;
|
|
|
|
$at=$pagestate{$at}{meta}{redir};
|
2007-12-08 23:40:50 +01:00
|
|
|
}
|
2007-12-08 20:58:29 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
$value=encode_entities($value);
|
|
|
|
}
|
|
|
|
my $delay=int(exists $params{delay} ? $params{delay} : 0);
|
2008-03-21 05:24:06 +01:00
|
|
|
my $redir="<meta http-equiv=\"refresh\" content=\"$delay; URL=$value\" />";
|
2007-12-08 20:58:29 +01:00
|
|
|
if (! $safe) {
|
2008-12-12 20:06:45 +01:00
|
|
|
$redir=scrub($redir, $destpage);
|
2007-12-08 20:58:29 +01:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
push @{$metaheaders{$page}}, $redir;
|
2007-03-21 19:52:56 +01:00
|
|
|
}
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
elsif ($key eq 'link') {
|
2008-01-09 08:38:43 +01:00
|
|
|
if (%params) {
|
2008-01-29 23:16:51 +01:00
|
|
|
push @{$metaheaders{$page}}, scrub("<link href=\"".encode_entities($value)."\" ".
|
2008-01-09 08:38:43 +01:00
|
|
|
join(" ", map {
|
|
|
|
encode_entities($_)."=\"".encode_entities(decode_entities($params{$_}))."\""
|
|
|
|
} keys %params).
|
2008-12-12 20:06:45 +01:00
|
|
|
" />\n", $destpage);
|
2008-01-09 08:38:43 +01:00
|
|
|
}
|
2007-09-14 20:11:10 +02:00
|
|
|
}
|
2008-03-02 15:51:31 +01:00
|
|
|
elsif ($key eq 'robots') {
|
|
|
|
push @{$metaheaders{$page}}, '<meta name="robots"'.
|
2008-03-11 23:02:01 +01:00
|
|
|
' content="'.encode_entities($value).'" />';
|
2008-03-02 15:51:31 +01:00
|
|
|
}
|
2009-11-16 21:51:00 +01:00
|
|
|
elsif ($key eq 'description') {
|
|
|
|
push @{$metaheaders{$page}}, '<meta name="'.encode_entities($key).
|
|
|
|
'" content="'.encode_entities($value).'" />';
|
|
|
|
}
|
2006-06-02 06:49:12 +02:00
|
|
|
else {
|
2008-01-29 23:16:51 +01:00
|
|
|
push @{$metaheaders{$page}}, scrub('<meta name="'.encode_entities($key).
|
2008-12-12 20:06:45 +01:00
|
|
|
'" content="'.encode_entities($value).'" />', $destpage);
|
2006-06-02 06:49:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return "";
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-06-02 06:49:12 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub pagetemplate (@) {
|
2006-07-28 01:41:58 +02:00
|
|
|
my %params=@_;
|
|
|
|
my $page=$params{page};
|
2007-09-14 21:09:16 +02:00
|
|
|
my $destpage=$params{destpage};
|
2006-07-28 01:41:58 +02:00
|
|
|
my $template=$params{template};
|
2006-06-02 06:49:12 +02:00
|
|
|
|
2008-01-29 23:16:51 +01:00
|
|
|
if (exists $metaheaders{$page} && $template->query(name => "meta")) {
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
# avoid duplicate meta lines
|
|
|
|
my %seen;
|
2008-01-29 23:16:51 +01:00
|
|
|
$template->param(meta => join("\n", grep { (! $seen{$_}) && ($seen{$_}=1) } @{$metaheaders{$page}}));
|
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
2007-12-16 21:56:09 +01:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
if (exists $pagestate{$page}{meta}{title} && $template->query(name => "title")) {
|
2010-04-06 02:31:38 +02:00
|
|
|
$template->param(title => HTML::Entities::encode_numeric($pagestate{$page}{meta}{title}));
|
2006-08-12 19:51:32 +02:00
|
|
|
$template->param(title_overridden => 1);
|
|
|
|
}
|
2007-09-14 21:09:16 +02:00
|
|
|
|
2010-04-06 02:31:38 +02:00
|
|
|
foreach my $field (qw{author authorurl permalink}) {
|
2008-01-29 23:16:51 +01:00
|
|
|
$template->param($field => $pagestate{$page}{meta}{$field})
|
|
|
|
if exists $pagestate{$page}{meta}{$field} && $template->query(name => $field);
|
2007-09-14 21:09:16 +02:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
|
2010-04-06 02:31:38 +02:00
|
|
|
foreach my $field (qw{description}) {
|
|
|
|
$template->param($field => HTML::Entities::encode_numeric($pagestate{$page}{meta}{$field}))
|
|
|
|
if exists $pagestate{$page}{meta}{$field} && $template->query(name => $field);
|
|
|
|
}
|
|
|
|
|
2008-01-29 23:16:51 +01:00
|
|
|
foreach my $field (qw{license copyright}) {
|
|
|
|
if (exists $pagestate{$page}{meta}{$field} && $template->query(name => $field) &&
|
|
|
|
($page eq $destpage || ! exists $pagestate{$destpage}{meta}{$field} ||
|
|
|
|
$pagestate{$page}{meta}{$field} ne $pagestate{$destpage}{meta}{$field})) {
|
|
|
|
$template->param($field => htmlize($page, $destpage, $pagestate{$page}{meta}{$field}));
|
|
|
|
}
|
2007-09-14 21:09:16 +02:00
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-06-02 06:49:12 +02:00
|
|
|
|
2010-04-03 15:17:48 +02:00
|
|
|
sub get_sort_key {
|
2010-04-07 05:29:18 +02:00
|
|
|
my $page = shift;
|
|
|
|
my $meta = shift;
|
2010-03-24 01:59:01 +01:00
|
|
|
|
2010-04-03 15:17:48 +02:00
|
|
|
# e.g. titlesort (also makes sense for author)
|
|
|
|
my $key = $pagestate{$page}{meta}{$meta . "sort"};
|
|
|
|
return $key if defined $key;
|
|
|
|
|
|
|
|
# e.g. title
|
|
|
|
$key = $pagestate{$page}{meta}{$meta};
|
|
|
|
return $key if defined $key;
|
2010-03-24 01:59:01 +01:00
|
|
|
|
2010-04-03 15:17:48 +02:00
|
|
|
# fall back to closer-to-core things
|
|
|
|
if ($meta eq 'title') {
|
|
|
|
return pagetitle(IkiWiki::basename($page));
|
|
|
|
}
|
|
|
|
elsif ($meta eq 'date') {
|
|
|
|
return $IkiWiki::pagectime{$page};
|
|
|
|
}
|
|
|
|
elsif ($meta eq 'updated') {
|
|
|
|
return $IkiWiki::pagemtime{$page};
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return '';
|
|
|
|
}
|
2010-03-24 01:59:01 +01:00
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match {
|
2008-01-29 23:16:51 +01:00
|
|
|
my $field=shift;
|
|
|
|
my $page=shift;
|
|
|
|
|
|
|
|
# turn glob into a safe regexp
|
2008-07-02 23:33:37 +02:00
|
|
|
my $re=IkiWiki::glob2re(shift);
|
2008-01-29 23:16:51 +01:00
|
|
|
|
|
|
|
my $val;
|
|
|
|
if (exists $pagestate{$page}{meta}{$field}) {
|
|
|
|
$val=$pagestate{$page}{meta}{$field};
|
|
|
|
}
|
|
|
|
elsif ($field eq 'title') {
|
2008-09-27 20:14:36 +02:00
|
|
|
$val = pagetitle($page);
|
2008-01-29 23:16:51 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (defined $val) {
|
|
|
|
if ($val=~/^$re$/i) {
|
2009-10-09 23:15:40 +02:00
|
|
|
return IkiWiki::SuccessReason->new("$re matches $field of $page", $page => $IkiWiki::DEPEND_CONTENT, "" => 1);
|
2008-01-29 23:16:51 +01:00
|
|
|
}
|
|
|
|
else {
|
2009-10-09 23:15:40 +02:00
|
|
|
return IkiWiki::FailReason->new("$re does not match $field of $page", "" => 1);
|
2008-01-29 23:16:51 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
2009-10-09 23:15:40 +02:00
|
|
|
return IkiWiki::FailReason->new("$page does not have a $field", "" => 1);
|
2008-01-29 23:16:51 +01:00
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
|
|
|
|
package IkiWiki::PageSpec;
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_title ($$;@) {
|
2009-10-08 02:31:13 +02:00
|
|
|
IkiWiki::Plugin::meta::match("title", @_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_author ($$;@) {
|
2008-01-29 23:16:51 +01:00
|
|
|
IkiWiki::Plugin::meta::match("author", @_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_authorurl ($$;@) {
|
2008-01-29 23:16:51 +01:00
|
|
|
IkiWiki::Plugin::meta::match("authorurl", @_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_license ($$;@) {
|
2008-01-29 23:16:51 +01:00
|
|
|
IkiWiki::Plugin::meta::match("license", @_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_copyright ($$;@) {
|
2008-01-29 23:16:51 +01:00
|
|
|
IkiWiki::Plugin::meta::match("copyright", @_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-01-29 23:16:51 +01:00
|
|
|
|
2010-04-03 14:57:38 +02:00
|
|
|
package IkiWiki::SortSpec;
|
|
|
|
|
2010-04-03 15:17:48 +02:00
|
|
|
sub cmp_meta {
|
2010-04-07 05:29:18 +02:00
|
|
|
my $meta = shift;
|
2010-04-03 15:17:48 +02:00
|
|
|
error(gettext("sort=meta requires a parameter")) unless defined $meta;
|
|
|
|
|
|
|
|
if ($meta eq 'updated' || $meta eq 'date') {
|
2010-04-05 23:50:51 +02:00
|
|
|
return IkiWiki::Plugin::meta::get_sort_key($a, $meta)
|
2010-04-03 15:17:48 +02:00
|
|
|
<=>
|
2010-04-05 23:50:51 +02:00
|
|
|
IkiWiki::Plugin::meta::get_sort_key($b, $meta);
|
2010-04-03 15:17:48 +02:00
|
|
|
}
|
|
|
|
|
2010-04-05 23:50:51 +02:00
|
|
|
return IkiWiki::Plugin::meta::get_sort_key($a, $meta)
|
2010-04-03 15:17:48 +02:00
|
|
|
cmp
|
2010-04-05 23:50:51 +02:00
|
|
|
IkiWiki::Plugin::meta::get_sort_key($b, $meta);
|
2010-04-03 15:17:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
# A prototype of how sort=title could behave in 4.0 or something
|
2010-03-26 00:31:53 +01:00
|
|
|
sub cmp_meta_title {
|
2010-04-05 23:50:51 +02:00
|
|
|
$_[0] = 'title';
|
2010-04-03 15:17:48 +02:00
|
|
|
return cmp_meta(@_);
|
2010-03-26 00:31:53 +01:00
|
|
|
}
|
|
|
|
|
2006-06-02 06:49:12 +02:00
|
|
|
1
|