2006-05-02 08:53:33 +02:00
|
|
|
#!/usr/bin/perl
|
|
|
|
|
|
|
|
package IkiWiki;
|
2008-07-11 12:08:36 +02:00
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
use warnings;
|
|
|
|
use strict;
|
2006-07-03 22:18:16 +02:00
|
|
|
use Encode;
|
2014-11-30 20:34:46 +01:00
|
|
|
use Fcntl q{:flock};
|
2007-03-08 07:25:20 +01:00
|
|
|
use URI::Escape q{uri_escape_utf8};
|
2010-03-19 18:10:17 +01:00
|
|
|
use POSIX ();
|
2008-03-21 14:07:44 +01:00
|
|
|
use Storable;
|
2006-07-04 00:14:52 +02:00
|
|
|
use open qw{:utf8 :std};
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2007-03-24 16:10:58 +01:00
|
|
|
use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase
|
2010-04-17 18:54:22 +02:00
|
|
|
%pagestate %wikistate %renderedfiles %oldrenderedfiles
|
2010-05-18 19:28:35 +02:00
|
|
|
%pagesources %delpagesources %destsources %depends %depends_simple
|
|
|
|
@mass_depends %hooks %forcerebuild %loaded_plugins %typedlinks
|
2014-03-05 11:11:04 +01:00
|
|
|
%oldtypedlinks %autofiles @underlayfiles $lastrev $phase};
|
2006-09-10 00:50:27 +02:00
|
|
|
|
|
|
|
use Exporter q{import};
|
2010-04-22 21:34:32 +02:00
|
|
|
our @EXPORT = qw(hook debug error htmlpage template template_depends
|
|
|
|
deptype add_depends pagespec_match pagespec_match_list bestlink
|
2010-04-17 18:54:22 +02:00
|
|
|
htmllink readfile writefile pagetype srcfile pagename
|
2012-01-30 20:08:50 +01:00
|
|
|
displaytime strftime_utf8 will_render gettext ngettext urlto targetpage
|
2010-04-17 18:54:22 +02:00
|
|
|
add_underlay pagetitle titlepage linkpage newpagefile
|
2013-07-27 14:12:01 +02:00
|
|
|
inject add_link add_autofile useragent
|
2010-04-17 18:54:22 +02:00
|
|
|
%config %links %pagestate %wikistate %renderedfiles
|
|
|
|
%pagesources %destsources %typedlinks);
|
2008-12-23 22:34:19 +01:00
|
|
|
our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
|
2007-04-13 20:41:06 +02:00
|
|
|
our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
|
Avoid %links accumulating duplicates. (For TOVA)
This is sorta an optimisation, and sorta a bug fix. In one
test case I have available, it can speed a page build up from 3
minutes to 3 seconds.
The root of the problem is that $links{$page} contains arrays of
links, rather than hashes of links. And when a link is found,
it is just pushed onto the array, without checking for dups.
Now, the array is emptied before scanning a page, so there
should not be a lot of opportunity for lots of duplicate links
to pile up in it. But, in some cases, they can, and if there
are hundreds of duplicate links in the array, then scanning it
for matching links, as match_link and some other code does,
becomes much more expensive than it needs to be.
Perhaps the real right fix would be to change the data structure
to a hash. But, the list of links is never accessed like that,
you always want to iterate through it.
I also looked at deduping the list in saveindex, but that does
a lot of unnecessary work, and doesn't completly solve the problem.
So, finally, I decided to add an add_link function that handles deduping,
and make ikiwiki-transition remove the old dup links.
2009-05-06 05:40:09 +02:00
|
|
|
our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
|
2007-04-13 20:41:06 +02:00
|
|
|
|
2009-10-03 21:31:51 +02:00
|
|
|
# Page dependency types.
|
2009-10-05 02:30:21 +02:00
|
|
|
our $DEPEND_CONTENT=1;
|
|
|
|
our $DEPEND_PRESENCE=2;
|
2009-10-05 20:08:46 +02:00
|
|
|
our $DEPEND_LINKS=4;
|
2009-10-03 21:31:51 +02:00
|
|
|
|
2014-03-05 11:11:04 +01:00
|
|
|
# Phases of processing.
|
|
|
|
sub PHASE_SCAN () { 0 }
|
|
|
|
sub PHASE_RENDER () { 1 }
|
|
|
|
$phase = PHASE_SCAN;
|
|
|
|
|
2006-07-04 18:34:27 +02:00
|
|
|
# Optimisation.
|
|
|
|
use Memoize;
|
|
|
|
memoize("abs2rel");
|
2010-04-03 14:57:38 +02:00
|
|
|
memoize("sortspec_translate");
|
2006-08-02 05:39:19 +02:00
|
|
|
memoize("pagespec_translate");
|
2009-09-08 23:20:04 +02:00
|
|
|
memoize("template_file");
|
2006-07-04 18:34:27 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub getsetup () {
|
2008-07-26 20:39:12 +02:00
|
|
|
wikiname => {
|
|
|
|
type => "string",
|
|
|
|
default => "wiki",
|
|
|
|
description => "name of the wiki",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2008-08-03 23:02:00 +02:00
|
|
|
adminemail => {
|
|
|
|
type => "string",
|
|
|
|
default => undef,
|
|
|
|
example => 'me@example.com',
|
|
|
|
description => "contact email for wiki",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
adminuser => {
|
|
|
|
type => "string",
|
|
|
|
default => [],
|
|
|
|
description => "users who are wiki admins",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
banned_users => {
|
|
|
|
type => "string",
|
|
|
|
default => [],
|
|
|
|
description => "users who are banned from the wiki",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-07-26 20:39:12 +02:00
|
|
|
srcdir => {
|
|
|
|
type => "string",
|
|
|
|
default => undef,
|
|
|
|
example => "$ENV{HOME}/wiki",
|
|
|
|
description => "where the source of the wiki is located",
|
|
|
|
safe => 0, # path
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
destdir => {
|
|
|
|
type => "string",
|
|
|
|
default => undef,
|
|
|
|
example => "/var/www/wiki",
|
|
|
|
description => "where to build the wiki",
|
|
|
|
safe => 0, # path
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
url => {
|
|
|
|
type => "string",
|
|
|
|
default => '',
|
|
|
|
example => "http://example.com/wiki",
|
|
|
|
description => "base url to the wiki",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
cgiurl => {
|
|
|
|
type => "string",
|
|
|
|
default => '',
|
2008-07-27 03:00:11 +02:00
|
|
|
example => "http://example.com/wiki/ikiwiki.cgi",
|
2008-07-26 20:39:12 +02:00
|
|
|
description => "url to the ikiwiki.cgi",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2014-10-06 00:06:48 +02:00
|
|
|
reverse_proxy => {
|
|
|
|
type => "boolean",
|
|
|
|
default => 0,
|
|
|
|
description => "do not adjust cgiurl if CGI is accessed via different URL",
|
|
|
|
advanced => 0,
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 0, # only affects CGI requests
|
|
|
|
},
|
2008-07-27 03:00:11 +02:00
|
|
|
cgi_wrapper => {
|
|
|
|
type => "string",
|
|
|
|
default => '',
|
|
|
|
example => "/var/www/wiki/ikiwiki.cgi",
|
2008-12-18 20:49:24 +01:00
|
|
|
description => "filename of cgi wrapper to generate",
|
2008-07-27 03:00:11 +02:00
|
|
|
safe => 0, # file
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
cgi_wrappermode => {
|
|
|
|
type => "string",
|
|
|
|
default => '06755',
|
|
|
|
description => "mode for cgi_wrapper (can safely be made suid)",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2012-10-09 23:03:52 +02:00
|
|
|
cgi_overload_delay => {
|
|
|
|
type => "string",
|
|
|
|
default => '',
|
|
|
|
example => "10",
|
|
|
|
description => "number of seconds to delay CGI requests when overloaded",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2012-10-11 17:22:03 +02:00
|
|
|
cgi_overload_message => {
|
|
|
|
type => "string",
|
|
|
|
default => '',
|
|
|
|
example => "Please wait",
|
|
|
|
description => "message to display when overloaded (may contain html)",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2013-11-16 22:26:20 +01:00
|
|
|
only_committed_changes => {
|
|
|
|
type => "boolean",
|
|
|
|
default => 0,
|
|
|
|
description => "enable optimization of only refreshing committed changes?",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-07-26 20:39:12 +02:00
|
|
|
rcs => {
|
|
|
|
type => "string",
|
|
|
|
default => '',
|
|
|
|
description => "rcs backend to use",
|
|
|
|
safe => 0, # don't allow overriding
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-07-27 00:36:56 +02:00
|
|
|
default_plugins => {
|
|
|
|
type => "internal",
|
2008-11-06 22:08:11 +01:00
|
|
|
default => [qw{mdwn link inline meta htmlscrubber passwordauth
|
2015-05-27 09:52:01 +02:00
|
|
|
openid signinedit lockedit conditional
|
2014-02-21 23:45:29 +01:00
|
|
|
recentchanges parentlinks editpage
|
|
|
|
templatebody}],
|
2008-07-27 00:36:56 +02:00
|
|
|
description => "plugins to enable by default",
|
2008-07-30 20:36:40 +02:00
|
|
|
safe => 0,
|
2008-07-27 00:36:56 +02:00
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
add_plugins => {
|
|
|
|
type => "string",
|
|
|
|
default => [],
|
|
|
|
description => "plugins to add to the default configuration",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
disable_plugins => {
|
|
|
|
type => "string",
|
|
|
|
default => [],
|
|
|
|
description => "plugins to disable",
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
templatedir => {
|
|
|
|
type => "string",
|
|
|
|
default => "$installdir/share/ikiwiki/templates",
|
2010-04-25 03:38:22 +02:00
|
|
|
description => "additional directory to search for template files",
|
2008-08-03 20:57:24 +02:00
|
|
|
advanced => 1,
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 0, # path
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
underlaydir => {
|
|
|
|
type => "string",
|
|
|
|
default => "$installdir/share/ikiwiki/basewiki",
|
|
|
|
description => "base wiki source location",
|
2008-08-03 20:57:24 +02:00
|
|
|
advanced => 1,
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 0, # path
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2009-05-07 20:02:52 +02:00
|
|
|
underlaydirbase => {
|
|
|
|
type => "internal",
|
|
|
|
default => "$installdir/share/ikiwiki",
|
|
|
|
description => "parent directory containing additional underlays",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-07-27 03:00:11 +02:00
|
|
|
wrappers => {
|
|
|
|
type => "internal",
|
|
|
|
default => [],
|
|
|
|
description => "wrappers to generate",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-07-26 20:39:12 +02:00
|
|
|
underlaydirs => {
|
|
|
|
type => "internal",
|
|
|
|
default => [],
|
|
|
|
description => "additional underlays to use",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
verbose => {
|
|
|
|
type => "boolean",
|
2008-08-06 01:16:24 +02:00
|
|
|
example => 1,
|
2009-01-24 19:04:59 +01:00
|
|
|
description => "display verbose messages?",
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
syslog => {
|
|
|
|
type => "boolean",
|
2008-08-06 01:16:24 +02:00
|
|
|
example => 1,
|
2008-07-26 20:43:47 +02:00
|
|
|
description => "log to syslog?",
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
usedirs => {
|
|
|
|
type => "boolean",
|
|
|
|
default => 1,
|
|
|
|
description => "create output files named page/index.html?",
|
|
|
|
safe => 0, # changing requires manual transition
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
prefix_directives => {
|
|
|
|
type => "boolean",
|
2008-12-23 22:24:43 +01:00
|
|
|
default => 1,
|
2008-07-26 20:39:12 +02:00
|
|
|
description => "use '!'-prefixed preprocessor directives?",
|
|
|
|
safe => 0, # changing requires manual transition
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2008-09-29 23:30:30 +02:00
|
|
|
indexpages => {
|
|
|
|
type => "boolean",
|
2008-10-01 21:55:50 +02:00
|
|
|
default => 0,
|
2008-09-29 23:30:30 +02:00
|
|
|
description => "use page/index.mdwn source files",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2008-07-27 00:36:56 +02:00
|
|
|
discussion => {
|
|
|
|
type => "boolean",
|
|
|
|
default => 1,
|
|
|
|
description => "enable Discussion pages?",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2009-08-14 03:41:33 +02:00
|
|
|
discussionpage => {
|
|
|
|
type => "string",
|
|
|
|
default => gettext("Discussion"),
|
|
|
|
description => "name of Discussion pages",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2010-05-02 02:40:31 +02:00
|
|
|
html5 => {
|
|
|
|
type => "boolean",
|
|
|
|
default => 0,
|
2014-10-16 12:04:53 +02:00
|
|
|
description => "use elements new in HTML5 like <section>?",
|
2011-06-23 15:41:21 +02:00
|
|
|
advanced => 0,
|
2010-05-02 02:40:31 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2008-08-03 23:02:00 +02:00
|
|
|
sslcookie => {
|
|
|
|
type => "boolean",
|
|
|
|
default => 0,
|
|
|
|
description => "only send cookies over SSL connections?",
|
|
|
|
advanced => 1,
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-07-26 20:39:12 +02:00
|
|
|
default_pageext => {
|
|
|
|
type => "string",
|
|
|
|
default => "mdwn",
|
|
|
|
description => "extension to use for new pages",
|
|
|
|
safe => 0, # not sanitized
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
htmlext => {
|
|
|
|
type => "string",
|
|
|
|
default => "html",
|
|
|
|
description => "extension to use for html files",
|
|
|
|
safe => 0, # not sanitized
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
timeformat => {
|
|
|
|
type => "string",
|
|
|
|
default => '%c',
|
|
|
|
description => "strftime format string to display date",
|
2008-08-03 20:57:24 +02:00
|
|
|
advanced => 1,
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
locale => {
|
|
|
|
type => "string",
|
|
|
|
default => undef,
|
|
|
|
example => "en_US.UTF-8",
|
|
|
|
description => "UTF-8 locale to use",
|
2008-08-03 20:57:24 +02:00
|
|
|
advanced => 1,
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 0,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
userdir => {
|
|
|
|
type => "string",
|
|
|
|
default => "",
|
|
|
|
example => "users",
|
|
|
|
description => "put user pages below specified page",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
numbacklinks => {
|
|
|
|
type => "integer",
|
|
|
|
default => 10,
|
|
|
|
description => "how many backlinks to show before hiding excess (0 to show all)",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
hardlink => {
|
|
|
|
type => "boolean",
|
|
|
|
default => 0,
|
2008-07-26 20:43:47 +02:00
|
|
|
description => "attempt to hardlink source files? (optimisation for large files)",
|
2008-08-03 20:57:24 +02:00
|
|
|
advanced => 1,
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 0, # paranoia
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-07-27 01:28:15 +02:00
|
|
|
umask => {
|
2011-11-27 15:14:47 +01:00
|
|
|
type => "string",
|
|
|
|
example => "public",
|
|
|
|
description => "force ikiwiki to use a particular umask (keywords public, group or private, or a number)",
|
2008-08-03 20:57:24 +02:00
|
|
|
advanced => 1,
|
2008-07-27 01:28:15 +02:00
|
|
|
safe => 0, # paranoia
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-11-26 19:28:16 +01:00
|
|
|
wrappergroup => {
|
|
|
|
type => "string",
|
|
|
|
example => "ikiwiki",
|
|
|
|
description => "group for wrappers to run in",
|
|
|
|
advanced => 1,
|
|
|
|
safe => 0, # paranoia
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2014-12-09 21:02:03 +01:00
|
|
|
libdirs => {
|
|
|
|
type => "string",
|
|
|
|
default => [],
|
|
|
|
example => ["$ENV{HOME}/.local/share/ikiwiki"],
|
|
|
|
description => "extra library and plugin directories",
|
|
|
|
advanced => 1,
|
|
|
|
safe => 0, # directory
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-07-27 01:28:15 +02:00
|
|
|
libdir => {
|
|
|
|
type => "string",
|
|
|
|
default => "",
|
|
|
|
example => "$ENV{HOME}/.ikiwiki/",
|
2014-12-09 21:02:03 +01:00
|
|
|
description => "extra library and plugin directory (searched after libdirs)",
|
2008-08-03 20:57:24 +02:00
|
|
|
advanced => 1,
|
2008-07-27 01:28:15 +02:00
|
|
|
safe => 0, # directory
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
ENV => {
|
|
|
|
type => "string",
|
|
|
|
default => {},
|
|
|
|
description => "environment variables",
|
|
|
|
safe => 0, # paranoia
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2011-03-24 18:36:16 +01:00
|
|
|
timezone => {
|
|
|
|
type => "string",
|
|
|
|
default => "",
|
|
|
|
example => "US/Eastern",
|
|
|
|
description => "time zone name",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2010-03-14 19:58:13 +01:00
|
|
|
include => {
|
|
|
|
type => "string",
|
|
|
|
default => undef,
|
|
|
|
example => '^\.htaccess$',
|
2010-03-18 04:21:35 +01:00
|
|
|
description => "regexp of normally excluded files to include",
|
2010-03-14 19:58:13 +01:00
|
|
|
advanced => 1,
|
|
|
|
safe => 0, # regexp
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2008-07-26 21:34:38 +02:00
|
|
|
exclude => {
|
|
|
|
type => "string",
|
|
|
|
default => undef,
|
2010-03-18 04:24:31 +01:00
|
|
|
example => '^(*\.private|Makefile)$',
|
2010-03-18 04:21:35 +01:00
|
|
|
description => "regexp of files that should be skipped",
|
2008-08-03 20:57:24 +02:00
|
|
|
advanced => 1,
|
2008-07-26 21:34:38 +02:00
|
|
|
safe => 0, # regexp
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2008-07-26 20:39:12 +02:00
|
|
|
wiki_file_prune_regexps => {
|
|
|
|
type => "internal",
|
2010-04-18 01:05:40 +02:00
|
|
|
default => [qr/(^|\/)\.\.(\/|$)/, qr/^\//, qr/^\./, qr/\/\./,
|
2008-07-26 20:39:12 +02:00
|
|
|
qr/\.x?html?$/, qr/\.ikiwiki-new$/,
|
|
|
|
qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//,
|
2009-04-05 00:49:57 +02:00
|
|
|
qr/(^|\/)_MTN\//, qr/(^|\/)_darcs\//,
|
2009-08-13 22:56:26 +02:00
|
|
|
qr/(^|\/)CVS\//, qr/\.dpkg-tmp$/],
|
2008-07-26 20:39:12 +02:00
|
|
|
description => "regexps of source files to ignore",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2008-09-04 20:13:10 +02:00
|
|
|
wiki_file_chars => {
|
|
|
|
type => "string",
|
|
|
|
description => "specifies the characters that are allowed in source filenames",
|
|
|
|
default => "-[:alnum:]+/.:_",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2008-07-26 20:39:12 +02:00
|
|
|
wiki_file_regexp => {
|
|
|
|
type => "internal",
|
|
|
|
description => "regexp of legal source files",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
|
|
|
web_commit_regexp => {
|
|
|
|
type => "internal",
|
2009-09-10 19:32:13 +02:00
|
|
|
default => qr/^web commit (by (.*?(?=: |$))|from ([0-9a-fA-F:.]+[0-9a-fA-F])):?(.*)/,
|
2008-07-26 20:39:12 +02:00
|
|
|
description => "regexp to parse web commits from logs",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
cgi => {
|
|
|
|
type => "internal",
|
|
|
|
default => 0,
|
|
|
|
description => "run as a cgi",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
cgi_disable_uploads => {
|
|
|
|
type => "internal",
|
|
|
|
default => 1,
|
|
|
|
description => "whether CGI should accept file uploads",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
post_commit => {
|
|
|
|
type => "internal",
|
|
|
|
default => 0,
|
|
|
|
description => "run as a post-commit hook",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
rebuild => {
|
|
|
|
type => "internal",
|
|
|
|
default => 0,
|
|
|
|
description => "running in rebuild mode",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-08-06 07:58:04 +02:00
|
|
|
setup => {
|
|
|
|
type => "internal",
|
|
|
|
default => undef,
|
|
|
|
description => "running in setup mode",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2010-02-28 06:12:47 +01:00
|
|
|
clean => {
|
|
|
|
type => "internal",
|
|
|
|
default => 0,
|
|
|
|
description => "running in clean mode",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-07-26 20:39:12 +02:00
|
|
|
refresh => {
|
|
|
|
type => "internal",
|
|
|
|
default => 0,
|
|
|
|
description => "running in refresh mode",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-10-23 02:52:34 +02:00
|
|
|
test_receive => {
|
|
|
|
type => "internal",
|
|
|
|
default => 0,
|
|
|
|
description => "running in receive test mode",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2010-07-01 22:20:03 +02:00
|
|
|
wrapper_background_command => {
|
|
|
|
type => "internal",
|
|
|
|
default => '',
|
|
|
|
description => "background shell command to run",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2010-04-16 23:02:29 +02:00
|
|
|
gettime => {
|
2008-07-26 20:39:12 +02:00
|
|
|
type => "internal",
|
2010-04-16 23:02:29 +02:00
|
|
|
description => "running in gettime mode",
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
w3mmode => {
|
|
|
|
type => "internal",
|
|
|
|
default => 0,
|
|
|
|
description => "running in w3mmode",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-11-07 18:22:01 +01:00
|
|
|
wikistatedir => {
|
|
|
|
type => "internal",
|
|
|
|
default => undef,
|
|
|
|
description => "path to the .ikiwiki directory holding ikiwiki state",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-08-06 07:58:04 +02:00
|
|
|
setupfile => {
|
2008-07-26 20:39:12 +02:00
|
|
|
type => "internal",
|
|
|
|
default => undef,
|
2008-08-06 07:58:04 +02:00
|
|
|
description => "path to setup file",
|
2010-03-19 19:52:17 +01:00
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
|
|
|
setuptype => {
|
|
|
|
type => "internal",
|
2011-03-25 02:30:18 +01:00
|
|
|
default => "Yaml",
|
2010-03-19 19:52:17 +01:00
|
|
|
description => "perl class to use to dump setup file",
|
2008-07-26 20:39:12 +02:00
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2008-09-09 20:50:37 +02:00
|
|
|
allow_symlinks_before_srcdir => {
|
2008-10-29 19:11:09 +01:00
|
|
|
type => "boolean",
|
2008-09-09 20:50:37 +02:00
|
|
|
default => 0,
|
|
|
|
description => "allow symlinks in the path leading to the srcdir (potentially insecure)",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2013-08-03 15:55:54 +02:00
|
|
|
cookiejar => {
|
|
|
|
type => "string",
|
2013-09-04 16:15:35 +02:00
|
|
|
default => { file => "$ENV{HOME}/.ikiwiki/cookies" },
|
2013-08-03 15:55:54 +02:00
|
|
|
description => "cookie control",
|
|
|
|
safe => 0, # hooks into perl module internals
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2014-01-30 13:41:55 +01:00
|
|
|
useragent => {
|
|
|
|
type => "string",
|
2014-10-11 10:43:34 +02:00
|
|
|
default => "ikiwiki/$version",
|
2014-01-30 13:41:55 +01:00
|
|
|
example => "Wget/1.13.4 (linux-gnu)",
|
|
|
|
description => "set custom user agent string for outbound HTTP requests e.g. when fetching aggregated RSS feeds",
|
|
|
|
safe => 0,
|
|
|
|
rebuild => 0,
|
|
|
|
},
|
2014-11-26 01:02:55 +01:00
|
|
|
responsive_layout => {
|
|
|
|
type => "boolean",
|
|
|
|
default => 1,
|
|
|
|
description => "theme has a responsive layout? (mobile-optimized)",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
},
|
2015-06-09 12:38:55 +02:00
|
|
|
deterministic => {
|
|
|
|
type => "boolean",
|
|
|
|
default => 0,
|
|
|
|
description => "try harder to produce deterministic output",
|
|
|
|
safe => 1,
|
|
|
|
rebuild => 1,
|
|
|
|
advanced => 1,
|
|
|
|
},
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-26 20:39:12 +02:00
|
|
|
|
2014-12-06 18:32:02 +01:00
|
|
|
sub getlibdirs () {
|
2014-12-16 09:11:59 +01:00
|
|
|
my @libdirs;
|
|
|
|
if ($config{libdirs}) {
|
|
|
|
@libdirs = @{$config{libdirs}};
|
|
|
|
}
|
2014-12-09 21:02:03 +01:00
|
|
|
if (length $config{libdir}) {
|
|
|
|
push @libdirs, $config{libdir};
|
2014-12-06 18:32:02 +01:00
|
|
|
}
|
2014-12-09 21:02:03 +01:00
|
|
|
return @libdirs;
|
2014-12-06 18:32:02 +01:00
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub defaultconfig () {
|
2008-07-26 20:39:12 +02:00
|
|
|
my %s=getsetup();
|
|
|
|
my @ret;
|
|
|
|
foreach my $key (keys %s) {
|
|
|
|
push @ret, $key, $s{$key}->{default};
|
|
|
|
}
|
|
|
|
return @ret;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-08-14 21:44:59 +02:00
|
|
|
|
Compute local paths to the top of the wiki
"local" here is short for "locally valid" - the idea is that we can use
URLs that are relative in the sense of only having the path part, but
absolute in the sense that they start from '/', such as
'/~smcv/ikiwiki.cgi'. There's no particularly good name that I can find
for these between-relative-and-absolute URLs.
They're useful because in the common case where the pages and the CGI
script have the same scheme and authority component, each page is
identified by the same locally-valid URL when linking from any page or
from the CGI, without hard-coding a choice between HTTP and HTTPS, or
between multiple virtual hostnames with the same path layout. As such,
we can use them in many situations that previously used an absolute URL.
If there's no suitable semi-absolute value for local_url (for instance,
if your pages and your CGI reside on different servers), we can just fall
back to using the absolute URL. I append '/' because $config{url} doesn't
end with '/', but the common case for local_url (on all branchable.com
sites, for instance) is that it's just '/'.
2010-11-23 00:13:52 +01:00
|
|
|
# URL to top of wiki as a path starting with /, valid from any wiki page or
|
|
|
|
# the CGI; if that's not possible, an absolute URL. Either way, it ends with /
|
|
|
|
my $local_url;
|
|
|
|
# URL to CGI script, similar to $local_url
|
|
|
|
my $local_cgiurl;
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub checkconfig () {
|
2006-07-29 23:04:31 +02:00
|
|
|
# locale stuff; avoid LC_ALL since it overrides everything
|
|
|
|
if (defined $ENV{LC_ALL}) {
|
|
|
|
$ENV{LANG} = $ENV{LC_ALL};
|
|
|
|
delete $ENV{LC_ALL};
|
|
|
|
}
|
|
|
|
if (defined $config{locale}) {
|
2007-01-28 03:24:43 +01:00
|
|
|
if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) {
|
|
|
|
$ENV{LANG}=$config{locale};
|
2009-06-09 00:27:40 +02:00
|
|
|
define_gettext();
|
2007-01-28 03:24:43 +01:00
|
|
|
}
|
2006-07-29 23:04:31 +02:00
|
|
|
}
|
2008-09-04 20:13:10 +02:00
|
|
|
|
|
|
|
if (! defined $config{wiki_file_regexp}) {
|
|
|
|
$config{wiki_file_regexp}=qr/(^[$config{wiki_file_chars}]+$)/;
|
|
|
|
}
|
2006-07-29 23:04:31 +02:00
|
|
|
|
2008-05-16 00:20:52 +02:00
|
|
|
if (ref $config{ENV} eq 'HASH') {
|
|
|
|
foreach my $val (keys %{$config{ENV}}) {
|
|
|
|
$ENV{$val}=$config{ENV}{$val};
|
|
|
|
}
|
|
|
|
}
|
2011-03-24 18:36:16 +01:00
|
|
|
if (defined $config{timezone} && length $config{timezone}) {
|
|
|
|
$ENV{TZ}=$config{timezone};
|
|
|
|
}
|
2015-03-01 15:56:45 +01:00
|
|
|
elsif (defined $ENV{TZ} && length $ENV{TZ}) {
|
2011-03-24 18:36:16 +01:00
|
|
|
$config{timezone}=$ENV{TZ};
|
|
|
|
}
|
2015-03-01 15:56:45 +01:00
|
|
|
else {
|
|
|
|
eval q{use Config qw()};
|
|
|
|
error($@) if $@;
|
|
|
|
|
|
|
|
if ($Config::Config{d_gnulibc} && -e '/etc/localtime') {
|
|
|
|
$config{timezone}=$ENV{TZ}=':/etc/localtime';
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$config{timezone}=$ENV{TZ}='GMT';
|
|
|
|
}
|
|
|
|
}
|
2008-05-16 00:20:52 +02:00
|
|
|
|
2006-07-07 23:00:48 +02:00
|
|
|
if ($config{w3mmode}) {
|
|
|
|
eval q{use Cwd q{abs_path}};
|
2006-11-08 22:03:33 +01:00
|
|
|
error($@) if $@;
|
2006-07-07 23:00:48 +02:00
|
|
|
$config{srcdir}=possibly_foolish_untaint(abs_path($config{srcdir}));
|
|
|
|
$config{destdir}=possibly_foolish_untaint(abs_path($config{destdir}));
|
|
|
|
$config{cgiurl}="file:///\$LIB/ikiwiki-w3m.cgi/".$config{cgiurl}
|
|
|
|
unless $config{cgiurl} =~ m!file:///!;
|
|
|
|
$config{url}="file://".$config{destdir};
|
|
|
|
}
|
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
if ($config{cgi} && ! length $config{url}) {
|
2006-12-29 05:38:40 +01:00
|
|
|
error(gettext("Must specify url to wiki with --url when using --cgi"));
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
Compute local paths to the top of the wiki
"local" here is short for "locally valid" - the idea is that we can use
URLs that are relative in the sense of only having the path part, but
absolute in the sense that they start from '/', such as
'/~smcv/ikiwiki.cgi'. There's no particularly good name that I can find
for these between-relative-and-absolute URLs.
They're useful because in the common case where the pages and the CGI
script have the same scheme and authority component, each page is
identified by the same locally-valid URL when linking from any page or
from the CGI, without hard-coding a choice between HTTP and HTTPS, or
between multiple virtual hostnames with the same path layout. As such,
we can use them in many situations that previously used an absolute URL.
If there's no suitable semi-absolute value for local_url (for instance,
if your pages and your CGI reside on different servers), we can just fall
back to using the absolute URL. I append '/' because $config{url} doesn't
end with '/', but the common case for local_url (on all branchable.com
sites, for instance) is that it's just '/'.
2010-11-23 00:13:52 +01:00
|
|
|
|
2010-12-21 15:21:53 +01:00
|
|
|
if (defined $config{url} && length $config{url}) {
|
Compute local paths to the top of the wiki
"local" here is short for "locally valid" - the idea is that we can use
URLs that are relative in the sense of only having the path part, but
absolute in the sense that they start from '/', such as
'/~smcv/ikiwiki.cgi'. There's no particularly good name that I can find
for these between-relative-and-absolute URLs.
They're useful because in the common case where the pages and the CGI
script have the same scheme and authority component, each page is
identified by the same locally-valid URL when linking from any page or
from the CGI, without hard-coding a choice between HTTP and HTTPS, or
between multiple virtual hostnames with the same path layout. As such,
we can use them in many situations that previously used an absolute URL.
If there's no suitable semi-absolute value for local_url (for instance,
if your pages and your CGI reside on different servers), we can just fall
back to using the absolute URL. I append '/' because $config{url} doesn't
end with '/', but the common case for local_url (on all branchable.com
sites, for instance) is that it's just '/'.
2010-11-23 00:13:52 +01:00
|
|
|
eval q{use URI};
|
|
|
|
my $baseurl = URI->new($config{url});
|
|
|
|
|
|
|
|
$local_url = $baseurl->path . "/";
|
|
|
|
$local_cgiurl = undef;
|
|
|
|
|
|
|
|
if (length $config{cgiurl}) {
|
|
|
|
my $cgiurl = URI->new($config{cgiurl});
|
|
|
|
|
|
|
|
$local_cgiurl = $cgiurl->path;
|
|
|
|
|
2014-10-05 23:56:55 +02:00
|
|
|
if ($cgiurl->scheme eq 'https' &&
|
|
|
|
$baseurl->scheme eq 'http') {
|
|
|
|
# We assume that the same content is available
|
|
|
|
# over both http and https, because if it
|
|
|
|
# wasn't, accessing the static content
|
|
|
|
# from the CGI would be mixed-content,
|
|
|
|
# which would be a security flaw.
|
|
|
|
|
|
|
|
if ($cgiurl->authority ne $baseurl->authority) {
|
|
|
|
# use protocol-relative URL for
|
|
|
|
# static content
|
|
|
|
$local_url = "$config{url}/";
|
|
|
|
$local_url =~ s{^http://}{//};
|
|
|
|
}
|
|
|
|
# else use host-relative URL for static content
|
|
|
|
|
|
|
|
# either way, CGI needs to be absolute
|
|
|
|
$local_cgiurl = $config{cgiurl};
|
|
|
|
}
|
|
|
|
elsif ($cgiurl->scheme ne $baseurl->scheme) {
|
Compute local paths to the top of the wiki
"local" here is short for "locally valid" - the idea is that we can use
URLs that are relative in the sense of only having the path part, but
absolute in the sense that they start from '/', such as
'/~smcv/ikiwiki.cgi'. There's no particularly good name that I can find
for these between-relative-and-absolute URLs.
They're useful because in the common case where the pages and the CGI
script have the same scheme and authority component, each page is
identified by the same locally-valid URL when linking from any page or
from the CGI, without hard-coding a choice between HTTP and HTTPS, or
between multiple virtual hostnames with the same path layout. As such,
we can use them in many situations that previously used an absolute URL.
If there's no suitable semi-absolute value for local_url (for instance,
if your pages and your CGI reside on different servers), we can just fall
back to using the absolute URL. I append '/' because $config{url} doesn't
end with '/', but the common case for local_url (on all branchable.com
sites, for instance) is that it's just '/'.
2010-11-23 00:13:52 +01:00
|
|
|
# too far apart, fall back to absolute URLs
|
|
|
|
$local_url = "$config{url}/";
|
|
|
|
$local_cgiurl = $config{cgiurl};
|
|
|
|
}
|
2014-10-05 16:56:19 +02:00
|
|
|
elsif ($cgiurl->authority ne $baseurl->authority) {
|
|
|
|
# slightly too far apart, fall back to
|
|
|
|
# protocol-relative URLs
|
|
|
|
$local_url = "$config{url}/";
|
|
|
|
$local_url =~ s{^https?://}{//};
|
|
|
|
$local_cgiurl = $config{cgiurl};
|
|
|
|
$local_cgiurl =~ s{^https?://}{//};
|
|
|
|
}
|
2014-10-05 23:56:55 +02:00
|
|
|
# else keep host-relative URLs
|
Compute local paths to the top of the wiki
"local" here is short for "locally valid" - the idea is that we can use
URLs that are relative in the sense of only having the path part, but
absolute in the sense that they start from '/', such as
'/~smcv/ikiwiki.cgi'. There's no particularly good name that I can find
for these between-relative-and-absolute URLs.
They're useful because in the common case where the pages and the CGI
script have the same scheme and authority component, each page is
identified by the same locally-valid URL when linking from any page or
from the CGI, without hard-coding a choice between HTTP and HTTPS, or
between multiple virtual hostnames with the same path layout. As such,
we can use them in many situations that previously used an absolute URL.
If there's no suitable semi-absolute value for local_url (for instance,
if your pages and your CGI reside on different servers), we can just fall
back to using the absolute URL. I append '/' because $config{url} doesn't
end with '/', but the common case for local_url (on all branchable.com
sites, for instance) is that it's just '/'.
2010-11-23 00:13:52 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
$local_url =~ s{//$}{/};
|
|
|
|
}
|
2010-11-23 23:21:31 +01:00
|
|
|
else {
|
|
|
|
$local_cgiurl = $config{cgiurl};
|
|
|
|
}
|
Compute local paths to the top of the wiki
"local" here is short for "locally valid" - the idea is that we can use
URLs that are relative in the sense of only having the path part, but
absolute in the sense that they start from '/', such as
'/~smcv/ikiwiki.cgi'. There's no particularly good name that I can find
for these between-relative-and-absolute URLs.
They're useful because in the common case where the pages and the CGI
script have the same scheme and authority component, each page is
identified by the same locally-valid URL when linking from any page or
from the CGI, without hard-coding a choice between HTTP and HTTPS, or
between multiple virtual hostnames with the same path layout. As such,
we can use them in many situations that previously used an absolute URL.
If there's no suitable semi-absolute value for local_url (for instance,
if your pages and your CGI reside on different servers), we can just fall
back to using the absolute URL. I append '/' because $config{url} doesn't
end with '/', but the common case for local_url (on all branchable.com
sites, for instance) is that it's just '/'.
2010-11-23 00:13:52 +01:00
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
$config{wikistatedir}="$config{srcdir}/.ikiwiki"
|
2008-11-07 18:22:01 +01:00
|
|
|
unless exists $config{wikistatedir} && defined $config{wikistatedir};
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-07-27 01:28:15 +02:00
|
|
|
if (defined $config{umask}) {
|
2011-11-27 15:14:47 +01:00
|
|
|
my $u = possibly_foolish_untaint($config{umask});
|
|
|
|
|
|
|
|
if ($u =~ m/^\d+$/) {
|
|
|
|
umask($u);
|
|
|
|
}
|
|
|
|
elsif ($u eq 'private') {
|
|
|
|
umask(077);
|
|
|
|
}
|
|
|
|
elsif ($u eq 'group') {
|
|
|
|
umask(027);
|
|
|
|
}
|
|
|
|
elsif ($u eq 'public') {
|
|
|
|
umask(022);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
error(sprintf(gettext("unsupported umask setting %s"), $u));
|
|
|
|
}
|
2007-11-27 22:46:02 +01:00
|
|
|
}
|
|
|
|
|
2006-07-30 02:20:11 +02:00
|
|
|
run_hooks(checkconfig => sub { shift->() });
|
2007-08-15 10:08:32 +02:00
|
|
|
|
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-07-28 07:26:49 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub listplugins () {
|
2008-07-27 01:10:11 +02:00
|
|
|
my %ret;
|
|
|
|
|
2014-12-06 18:40:14 +01:00
|
|
|
foreach my $dir (@INC, getlibdirs()) {
|
2008-07-28 01:20:14 +02:00
|
|
|
next unless defined $dir && length $dir;
|
2008-07-27 01:10:11 +02:00
|
|
|
foreach my $file (glob("$dir/IkiWiki/Plugin/*.pm")) {
|
|
|
|
my ($plugin)=$file=~/.*\/(.*)\.pm$/;
|
|
|
|
$ret{$plugin}=1;
|
|
|
|
}
|
|
|
|
}
|
2014-12-06 18:40:14 +01:00
|
|
|
foreach my $dir (getlibdirs(), "$installdir/lib/ikiwiki") {
|
2008-07-28 01:20:14 +02:00
|
|
|
next unless defined $dir && length $dir;
|
2008-07-27 01:10:11 +02:00
|
|
|
foreach my $file (glob("$dir/plugins/*")) {
|
|
|
|
$ret{basename($file)}=1 if -x $file;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return keys %ret;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 01:10:11 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub loadplugins () {
|
2014-12-09 21:02:03 +01:00
|
|
|
foreach my $dir (getlibdirs()) {
|
|
|
|
unshift @INC, possibly_foolish_untaint($dir);
|
2007-07-27 02:48:06 +02:00
|
|
|
}
|
|
|
|
|
2008-08-30 00:40:41 +02:00
|
|
|
foreach my $plugin (@{$config{default_plugins}}, @{$config{add_plugins}}) {
|
|
|
|
loadplugin($plugin);
|
|
|
|
}
|
2008-08-01 22:02:01 +02:00
|
|
|
|
|
|
|
if ($config{rcs}) {
|
2009-10-03 20:01:19 +02:00
|
|
|
if (exists $hooks{rcs}) {
|
2008-08-01 22:02:01 +02:00
|
|
|
error(gettext("cannot use multiple rcs plugins"));
|
|
|
|
}
|
|
|
|
loadplugin($config{rcs});
|
|
|
|
}
|
2009-10-03 20:01:19 +02:00
|
|
|
if (! exists $hooks{rcs}) {
|
2008-08-01 22:02:01 +02:00
|
|
|
loadplugin("norcs");
|
|
|
|
}
|
2007-07-27 02:48:06 +02:00
|
|
|
|
2006-07-30 09:41:26 +02:00
|
|
|
run_hooks(getopt => sub { shift->() });
|
|
|
|
if (grep /^-/, @ARGV) {
|
2009-03-13 21:27:24 +01:00
|
|
|
print STDERR "Unknown option (or missing parameter): $_\n"
|
2006-07-30 09:41:26 +02:00
|
|
|
foreach grep /^-/, @ARGV;
|
|
|
|
usage();
|
|
|
|
}
|
2007-08-15 10:08:32 +02:00
|
|
|
|
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2010-06-13 16:21:19 +02:00
|
|
|
sub loadplugin ($;$) {
|
2006-11-22 03:28:42 +01:00
|
|
|
my $plugin=shift;
|
2010-06-13 16:21:19 +02:00
|
|
|
my $force=shift;
|
2006-11-22 03:28:42 +01:00
|
|
|
|
2010-06-13 16:21:19 +02:00
|
|
|
return if ! $force && grep { $_ eq $plugin} @{$config{disable_plugins}};
|
2006-12-29 05:49:55 +01:00
|
|
|
|
2014-12-06 18:40:14 +01:00
|
|
|
foreach my $possiblytainteddir (getlibdirs(), "$installdir/lib/ikiwiki") {
|
2014-12-09 21:02:03 +01:00
|
|
|
my $dir = possibly_foolish_untaint($possiblytainteddir);
|
2007-08-13 05:07:31 +02:00
|
|
|
if (defined $dir && -x "$dir/plugins/$plugin") {
|
2008-09-11 07:41:55 +02:00
|
|
|
eval { require IkiWiki::Plugin::external };
|
|
|
|
if ($@) {
|
|
|
|
my $reason=$@;
|
|
|
|
error(sprintf(gettext("failed to load external plugin needed for %s plugin: %s"), $plugin, $reason));
|
|
|
|
}
|
2007-08-13 05:07:31 +02:00
|
|
|
import IkiWiki::Plugin::external "$dir/plugins/$plugin";
|
2008-08-02 22:40:46 +02:00
|
|
|
$loaded_plugins{$plugin}=1;
|
2007-08-13 05:07:31 +02:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2006-11-22 03:28:42 +01:00
|
|
|
my $mod="IkiWiki::Plugin::".possibly_foolish_untaint($plugin);
|
|
|
|
eval qq{use $mod};
|
|
|
|
if ($@) {
|
|
|
|
error("Failed to load plugin $mod: $@");
|
|
|
|
}
|
2008-08-02 22:40:46 +02:00
|
|
|
$loaded_plugins{$plugin}=1;
|
2007-08-13 05:07:31 +02:00
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-11-22 03:28:42 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub error ($;$) {
|
2007-02-15 03:22:08 +01:00
|
|
|
my $message=shift;
|
|
|
|
my $cleaner=shift;
|
2007-04-10 01:09:43 +02:00
|
|
|
log_message('err' => $message) if $config{syslog};
|
2007-02-15 03:22:08 +01:00
|
|
|
if (defined $cleaner) {
|
|
|
|
$cleaner->();
|
|
|
|
}
|
|
|
|
die $message."\n";
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub debug ($) {
|
2006-05-02 08:53:33 +02:00
|
|
|
return unless $config{verbose};
|
2007-08-15 10:08:32 +02:00
|
|
|
return log_message(debug => @_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-16 23:17:49 +02:00
|
|
|
|
|
|
|
my $log_open=0;
|
2013-09-08 00:25:46 +02:00
|
|
|
my $log_failed=0;
|
2008-12-17 21:22:16 +01:00
|
|
|
sub log_message ($$) {
|
2006-08-16 23:17:49 +02:00
|
|
|
my $type=shift;
|
|
|
|
|
|
|
|
if ($config{syslog}) {
|
|
|
|
require Sys::Syslog;
|
2007-08-15 10:08:32 +02:00
|
|
|
if (! $log_open) {
|
2006-08-16 23:17:49 +02:00
|
|
|
Sys::Syslog::setlogsock('unix');
|
|
|
|
Sys::Syslog::openlog('ikiwiki', '', 'user');
|
|
|
|
$log_open=1;
|
|
|
|
}
|
2013-09-08 00:25:46 +02:00
|
|
|
eval {
|
2016-01-21 08:32:55 +01:00
|
|
|
my $message = "[$config{wikiname}] ".join(" ", @_);
|
2016-01-21 08:25:25 +01:00
|
|
|
utf8::encode($message);
|
2016-01-21 08:32:55 +01:00
|
|
|
Sys::Syslog::syslog($type, "%s", $message);
|
2007-04-10 01:09:43 +02:00
|
|
|
};
|
2013-09-08 00:25:46 +02:00
|
|
|
if ($@) {
|
|
|
|
print STDERR "failed to syslog: $@" unless $log_failed;
|
|
|
|
$log_failed=1;
|
|
|
|
print STDERR "@_\n";
|
|
|
|
}
|
|
|
|
return $@;
|
2006-08-16 23:17:49 +02:00
|
|
|
}
|
|
|
|
elsif (! $config{cgi}) {
|
2007-08-15 10:08:32 +02:00
|
|
|
return print "@_\n";
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
|
|
|
else {
|
2007-08-15 10:08:32 +02:00
|
|
|
return print STDERR "@_\n";
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub possibly_foolish_untaint ($) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $tainted=shift;
|
2007-06-03 18:24:22 +02:00
|
|
|
my ($untainted)=$tainted=~/(.*)/s;
|
2006-05-02 08:53:33 +02:00
|
|
|
return $untainted;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub basename ($) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $file=shift;
|
|
|
|
|
|
|
|
$file=~s!.*/+!!;
|
|
|
|
return $file;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub dirname ($) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $file=shift;
|
|
|
|
|
|
|
|
$file=~s!/*[^/]+$!!;
|
|
|
|
return $file;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2009-02-20 00:38:45 +01:00
|
|
|
sub isinternal ($) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $page=shift;
|
2009-02-20 00:38:45 +01:00
|
|
|
return exists $pagesources{$page} &&
|
|
|
|
$pagesources{$page} =~ /\._([^.]+)$/;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub pagetype ($) {
|
|
|
|
my $file=shift;
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2009-02-20 00:38:45 +01:00
|
|
|
if ($file =~ /\.([^.]+)$/) {
|
2006-07-04 00:08:04 +02:00
|
|
|
return $1 if exists $hooks{htmlize}{$1};
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
2009-03-15 22:39:14 +01:00
|
|
|
my $base=basename($file);
|
|
|
|
if (exists $hooks{htmlize}{$base} &&
|
|
|
|
$hooks{htmlize}{$base}{noextension}) {
|
|
|
|
return $base;
|
2009-02-20 00:38:45 +01:00
|
|
|
}
|
2007-08-15 10:08:32 +02:00
|
|
|
return;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2009-08-25 01:02:27 +02:00
|
|
|
my %pagename_cache;
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub pagename ($) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $file=shift;
|
|
|
|
|
2009-08-25 01:02:27 +02:00
|
|
|
if (exists $pagename_cache{$file}) {
|
|
|
|
return $pagename_cache{$file};
|
|
|
|
}
|
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
my $type=pagetype($file);
|
|
|
|
my $page=$file;
|
2010-10-04 22:34:33 +02:00
|
|
|
$page=~s/\Q.$type\E*$//
|
2009-02-20 00:38:45 +01:00
|
|
|
if defined $type && !$hooks{htmlize}{$type}{keepextension}
|
|
|
|
&& !$hooks{htmlize}{$type}{noextension};
|
2008-09-29 23:30:30 +02:00
|
|
|
if ($config{indexpages} && $page=~/(.*)\/index$/) {
|
|
|
|
$page=$1;
|
|
|
|
}
|
2009-08-25 01:02:27 +02:00
|
|
|
|
|
|
|
$pagename_cache{$file} = $page;
|
2006-05-02 08:53:33 +02:00
|
|
|
return $page;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub newpagefile ($$) {
|
2008-09-30 00:51:16 +02:00
|
|
|
my $page=shift;
|
|
|
|
my $type=shift;
|
|
|
|
|
|
|
|
if (! $config{indexpages} || $page eq 'index') {
|
|
|
|
return $page.".".$type;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return $page."/index.".$type;
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-09-30 00:51:16 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub targetpage ($$;$) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $page=shift;
|
2007-04-01 21:59:42 +02:00
|
|
|
my $ext=shift;
|
2008-12-11 21:01:26 +01:00
|
|
|
my $filename=shift;
|
2007-04-01 21:59:42 +02:00
|
|
|
|
2008-12-11 21:01:26 +01:00
|
|
|
if (defined $filename) {
|
|
|
|
return $page."/".$filename.".".$ext;
|
|
|
|
}
|
|
|
|
elsif (! $config{usedirs} || $page eq 'index') {
|
2007-04-01 21:59:42 +02:00
|
|
|
return $page.".".$ext;
|
2008-09-30 00:51:16 +02:00
|
|
|
}
|
|
|
|
else {
|
2007-04-01 21:59:42 +02:00
|
|
|
return $page."/index.".$ext;
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub htmlpage ($) {
|
2007-04-01 21:59:42 +02:00
|
|
|
my $page=shift;
|
|
|
|
|
2007-07-25 03:16:53 +02:00
|
|
|
return targetpage($page, $config{htmlext});
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub srcfile_stat {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $file=shift;
|
2008-05-02 19:02:07 +02:00
|
|
|
my $nothrow=shift;
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-05-07 20:11:56 +02:00
|
|
|
return "$config{srcdir}/$file", stat(_) if -e "$config{srcdir}/$file";
|
2007-08-28 03:59:01 +02:00
|
|
|
foreach my $dir (@{$config{underlaydirs}}, $config{underlaydir}) {
|
2008-05-07 20:11:56 +02:00
|
|
|
return "$dir/$file", stat(_) if -e "$dir/$file";
|
2007-08-28 03:59:01 +02:00
|
|
|
}
|
2008-05-02 19:02:07 +02:00
|
|
|
error("internal error: $file cannot be found in $config{srcdir} or underlay") unless $nothrow;
|
2007-08-15 10:08:32 +02:00
|
|
|
return;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub srcfile ($;$) {
|
2008-05-07 20:11:56 +02:00
|
|
|
return (srcfile_stat(@_))[0];
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-05-07 20:11:56 +02:00
|
|
|
|
2011-01-07 21:04:45 +01:00
|
|
|
sub add_literal_underlay ($) {
|
2007-08-28 03:59:01 +02:00
|
|
|
my $dir=shift;
|
|
|
|
|
2008-10-18 01:53:04 +02:00
|
|
|
if (! grep { $_ eq $dir } @{$config{underlaydirs}}) {
|
|
|
|
unshift @{$config{underlaydirs}}, $dir;
|
2007-08-28 03:59:01 +02:00
|
|
|
}
|
2011-01-07 21:04:45 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
sub add_underlay ($) {
|
|
|
|
my $dir = shift;
|
|
|
|
|
|
|
|
if ($dir !~ /^\//) {
|
|
|
|
$dir="$config{underlaydirbase}/$dir";
|
|
|
|
}
|
2007-08-28 03:59:01 +02:00
|
|
|
|
2011-01-07 21:04:45 +01:00
|
|
|
add_literal_underlay($dir);
|
|
|
|
# why does it return 1? we just don't know
|
2007-08-28 03:59:01 +02:00
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-08-28 03:59:01 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub readfile ($;$$) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $file=shift;
|
|
|
|
my $binary=shift;
|
2007-02-03 04:27:33 +01:00
|
|
|
my $wantfd=shift;
|
2006-05-02 08:53:33 +02:00
|
|
|
|
|
|
|
if (-l $file) {
|
|
|
|
error("cannot read a symlink ($file)");
|
|
|
|
}
|
|
|
|
|
|
|
|
local $/=undef;
|
2007-08-15 10:08:32 +02:00
|
|
|
open (my $in, "<", $file) || error("failed to read $file: $!");
|
2007-08-14 07:47:29 +02:00
|
|
|
binmode($in) if ($binary);
|
|
|
|
return \*$in if $wantfd;
|
|
|
|
my $ret=<$in>;
|
check for invalid utf-8, and toss it back to avoid crashes
Since ikiwiki uses open :utf8, perl assumes that files contain valid utf-8.
If it turns out to be malformed it may later crash while processing strings
read from them, with 'Malformed UTF-8 character (fatal)'.
As at least a quick fix, use utf8::valid as soon as data is read, and if
it's not valid, call encode_utf8 on the string, thus clearing the utf-8
flag. This may cause follow-on encoding problems, but will avoid this
crash, and the input file was broken anyway, so GIGO is a reasonable
response. (I looked at calling decode_utf8 after, but it seemed to cause
more trouble than it was worth. BTW, use open ':encoding(utf8)' avaoids
this problem, but the corrupted data later causes Storable to crash when
writing the index.)
This is a quick fix, clearly imperfect:
- It might be better to explicitly call decode_utf8 when reading files,
rather than using the IO layer.
- Data read other than by readfile() can still sneak in bad utf-8. While
ikiwiki does very little file input not using it, stdin for the CGI
would be one way.
2008-11-12 23:19:41 +01:00
|
|
|
# check for invalid utf-8, and toss it back to avoid crashes
|
|
|
|
if (! utf8::valid($ret)) {
|
|
|
|
$ret=encode_utf8($ret);
|
|
|
|
}
|
2007-08-14 07:47:29 +02:00
|
|
|
close $in || error("failed to read $file: $!");
|
2006-05-02 08:53:33 +02:00
|
|
|
return $ret;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub prep_writefile ($$) {
|
2008-03-30 03:02:47 +02:00
|
|
|
my $file=shift;
|
|
|
|
my $destdir=shift;
|
2006-05-02 08:53:33 +02:00
|
|
|
|
|
|
|
my $test=$file;
|
|
|
|
while (length $test) {
|
|
|
|
if (-l "$destdir/$test") {
|
|
|
|
error("cannot write to a symlink ($test)");
|
|
|
|
}
|
2010-07-18 23:30:46 +02:00
|
|
|
if (-f _ && $test ne $file) {
|
|
|
|
# Remove conflicting file.
|
|
|
|
foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
|
|
|
|
foreach my $f (@{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
|
|
|
|
if ($f eq $test) {
|
|
|
|
unlink("$destdir/$test");
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
$test=dirname($test);
|
|
|
|
}
|
|
|
|
|
2008-03-30 03:02:47 +02:00
|
|
|
my $dir=dirname("$destdir/$file");
|
2006-05-02 08:53:33 +02:00
|
|
|
if (! -d $dir) {
|
|
|
|
my $d="";
|
|
|
|
foreach my $s (split(m!/+!, $dir)) {
|
|
|
|
$d.="$s/";
|
|
|
|
if (! -d $d) {
|
|
|
|
mkdir($d) || error("failed to create directory $d: $!");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2007-02-15 03:22:08 +01:00
|
|
|
|
2008-03-30 03:02:47 +02:00
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-03-30 03:02:47 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub writefile ($$$;$$) {
|
2008-03-30 03:02:47 +02:00
|
|
|
my $file=shift; # can include subdirs
|
|
|
|
my $destdir=shift; # directory to put file in
|
|
|
|
my $content=shift;
|
|
|
|
my $binary=shift;
|
|
|
|
my $writer=shift;
|
|
|
|
|
|
|
|
prep_writefile($file, $destdir);
|
|
|
|
|
|
|
|
my $newfile="$destdir/$file.ikiwiki-new";
|
|
|
|
if (-l $newfile) {
|
|
|
|
error("cannot write to a symlink ($newfile)");
|
|
|
|
}
|
|
|
|
|
2007-02-15 03:22:08 +01:00
|
|
|
my $cleanup = sub { unlink($newfile) };
|
2007-08-14 07:47:29 +02:00
|
|
|
open (my $out, '>', $newfile) || error("failed to write $newfile: $!", $cleanup);
|
|
|
|
binmode($out) if ($binary);
|
2007-02-15 03:22:08 +01:00
|
|
|
if ($writer) {
|
2007-08-14 07:47:29 +02:00
|
|
|
$writer->(\*$out, $cleanup);
|
2007-02-15 03:22:08 +01:00
|
|
|
}
|
|
|
|
else {
|
2007-08-14 07:47:29 +02:00
|
|
|
print $out $content or error("failed writing to $newfile: $!", $cleanup);
|
2007-02-15 03:22:08 +01:00
|
|
|
}
|
2007-08-14 07:47:29 +02:00
|
|
|
close $out || error("failed saving $newfile: $!", $cleanup);
|
2007-02-15 03:22:08 +01:00
|
|
|
rename($newfile, "$destdir/$file") ||
|
|
|
|
error("failed renaming $newfile to $destdir/$file: $!", $cleanup);
|
2007-08-15 10:08:32 +02:00
|
|
|
|
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2006-10-16 20:51:13 +02:00
|
|
|
my %cleared;
|
2008-12-17 21:22:16 +01:00
|
|
|
sub will_render ($$;$) {
|
2006-10-08 23:56:50 +02:00
|
|
|
my $page=shift;
|
|
|
|
my $dest=shift;
|
|
|
|
my $clear=shift;
|
|
|
|
|
2010-07-18 23:30:46 +02:00
|
|
|
# Important security check for independently created files.
|
2006-10-08 23:56:50 +02:00
|
|
|
if (-e "$config{destdir}/$dest" && ! $config{rebuild} &&
|
2008-09-27 23:04:25 +02:00
|
|
|
! grep { $_ eq $dest } (@{$renderedfiles{$page}}, @{$oldrenderedfiles{$page}}, @{$wikistate{editpage}{previews}})) {
|
2010-07-18 22:28:39 +02:00
|
|
|
my $from_other_page=0;
|
2010-07-18 23:30:46 +02:00
|
|
|
# Expensive, but rarely runs.
|
|
|
|
foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
|
2010-07-18 22:28:39 +02:00
|
|
|
if (grep {
|
|
|
|
$_ eq $dest ||
|
|
|
|
dirname($_) eq $dest
|
2010-07-18 22:47:52 +02:00
|
|
|
} @{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
|
2010-07-18 22:28:39 +02:00
|
|
|
$from_other_page=1;
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
error("$config{destdir}/$dest independently created, not overwriting with version from $page")
|
|
|
|
unless $from_other_page;
|
2006-10-08 23:56:50 +02:00
|
|
|
}
|
|
|
|
|
2010-07-18 23:30:46 +02:00
|
|
|
# If $dest exists as a directory, remove conflicting files in it
|
|
|
|
# rendered from other pages.
|
|
|
|
if (-d _) {
|
|
|
|
foreach my $p (keys %renderedfiles, keys %oldrenderedfiles) {
|
|
|
|
foreach my $f (@{$renderedfiles{$p}}, @{$oldrenderedfiles{$p}}) {
|
2010-07-18 23:47:36 +02:00
|
|
|
if (dirname($f) eq $dest) {
|
2010-07-18 23:30:46 +02:00
|
|
|
unlink("$config{destdir}/$f");
|
|
|
|
rmdir(dirname("$config{destdir}/$f"));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2006-10-16 20:51:13 +02:00
|
|
|
if (! $clear || $cleared{$page}) {
|
2006-10-08 23:56:50 +02:00
|
|
|
$renderedfiles{$page}=[$dest, grep { $_ ne $dest } @{$renderedfiles{$page}}];
|
|
|
|
}
|
|
|
|
else {
|
2007-04-10 03:18:03 +02:00
|
|
|
foreach my $old (@{$renderedfiles{$page}}) {
|
|
|
|
delete $destsources{$old};
|
|
|
|
}
|
2006-10-08 23:56:50 +02:00
|
|
|
$renderedfiles{$page}=[$dest];
|
2006-10-16 20:51:13 +02:00
|
|
|
$cleared{$page}=1;
|
2006-10-08 23:56:50 +02:00
|
|
|
}
|
2007-04-10 03:18:03 +02:00
|
|
|
$destsources{$dest}=$page;
|
2007-08-15 10:08:32 +02:00
|
|
|
|
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-10-08 23:56:50 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub bestlink ($$) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $page=shift;
|
2006-08-13 04:03:43 +02:00
|
|
|
my $link=shift;
|
2006-05-02 08:53:33 +02:00
|
|
|
|
|
|
|
my $cwd=$page;
|
2006-12-21 23:15:11 +01:00
|
|
|
if ($link=~s/^\/+//) {
|
|
|
|
# absolute links
|
|
|
|
$cwd="";
|
2006-12-21 21:11:18 +01:00
|
|
|
}
|
2007-11-17 22:32:02 +01:00
|
|
|
$link=~s/\/$//;
|
2006-12-21 21:11:18 +01:00
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
do {
|
|
|
|
my $l=$cwd;
|
|
|
|
$l.="/" if length $l;
|
|
|
|
$l.=$link;
|
|
|
|
|
fix bestlink to not return just-deleted pages
bestlink was looking at whether %links existed for a page in order to tell
if the page exists, but just-deleted pages still have entries in there (for
reasons it may be best not to explore). So bestlink would return
just-deleted pages. Instead, make bestlink use %pagesources.
Also, when finding a deleted page, %pagecase was not cleared of that page.
This, again, made bestlink return just-deleted pages. Now that is cleared.
Fixing bestlink exposed another issue though. The backlink calculation code
uses bestlink. So when a page was deleted, no backlinks to it are found,
and pages that really did backlink to it were not updated, and had broken
links.
To fix that, the code that actually removes deleted pages had to be split
out from find_del_files, so it can run a bit later. It is run just after
backlinks are calculated. This way, backlink calculation still sees the
deleted pages, but everything afterwards does not.
However, it does not address the original bug report that started this
whole thing, [[bugs/bestlink_returns_deleted_pages]]. Because there
bestlink is run in the needsbuild hook. And that happens before backlink
calculation, and so bestlink still returns deleted pages then. Also in the
scan hook.
If bestlink needs to work consistently during those hooks, a more involved
fix will be needed.
2009-11-30 23:16:44 +01:00
|
|
|
if (exists $pagesources{$l}) {
|
2006-05-02 08:53:33 +02:00
|
|
|
return $l;
|
|
|
|
}
|
2006-08-13 04:03:43 +02:00
|
|
|
elsif (exists $pagecase{lc $l}) {
|
|
|
|
return $pagecase{lc $l};
|
|
|
|
}
|
2008-09-04 20:13:10 +02:00
|
|
|
} while $cwd=~s{/?[^/]+$}{};
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2007-04-27 11:11:14 +02:00
|
|
|
if (length $config{userdir}) {
|
|
|
|
my $l = "$config{userdir}/".lc($link);
|
fix bestlink to not return just-deleted pages
bestlink was looking at whether %links existed for a page in order to tell
if the page exists, but just-deleted pages still have entries in there (for
reasons it may be best not to explore). So bestlink would return
just-deleted pages. Instead, make bestlink use %pagesources.
Also, when finding a deleted page, %pagecase was not cleared of that page.
This, again, made bestlink return just-deleted pages. Now that is cleared.
Fixing bestlink exposed another issue though. The backlink calculation code
uses bestlink. So when a page was deleted, no backlinks to it are found,
and pages that really did backlink to it were not updated, and had broken
links.
To fix that, the code that actually removes deleted pages had to be split
out from find_del_files, so it can run a bit later. It is run just after
backlinks are calculated. This way, backlink calculation still sees the
deleted pages, but everything afterwards does not.
However, it does not address the original bug report that started this
whole thing, [[bugs/bestlink_returns_deleted_pages]]. Because there
bestlink is run in the needsbuild hook. And that happens before backlink
calculation, and so bestlink still returns deleted pages then. Also in the
scan hook.
If bestlink needs to work consistently during those hooks, a more involved
fix will be needed.
2009-11-30 23:16:44 +01:00
|
|
|
if (exists $pagesources{$l}) {
|
2007-04-27 11:11:14 +02:00
|
|
|
return $l;
|
|
|
|
}
|
|
|
|
elsif (exists $pagecase{lc $l}) {
|
|
|
|
return $pagecase{lc $l};
|
|
|
|
}
|
2006-12-29 06:33:20 +01:00
|
|
|
}
|
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
#print STDERR "warning: page $page, broken link: $link\n";
|
|
|
|
return "";
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub isinlinableimage ($) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $file=shift;
|
|
|
|
|
2011-06-29 20:17:12 +02:00
|
|
|
return $file =~ /\.(png|gif|jpg|jpeg|svg)$/i;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub pagetitle ($;$) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $page=shift;
|
2006-12-21 22:52:06 +01:00
|
|
|
my $unescaped=shift;
|
|
|
|
|
|
|
|
if ($unescaped) {
|
2007-03-02 01:37:22 +01:00
|
|
|
$page=~s/(__(\d+)__|_)/$1 eq '_' ? ' ' : chr($2)/eg;
|
2006-12-21 22:52:06 +01:00
|
|
|
}
|
|
|
|
else {
|
2007-03-02 01:37:22 +01:00
|
|
|
$page=~s/(__(\d+)__|_)/$1 eq '_' ? ' ' : "&#$2;"/eg;
|
2006-12-21 22:52:06 +01:00
|
|
|
}
|
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
return $page;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub titlepage ($) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $title=shift;
|
2008-09-04 20:13:10 +02:00
|
|
|
# support use w/o %config set
|
|
|
|
my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_";
|
|
|
|
$title=~s/([^$chars]|_)/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg;
|
2006-05-02 08:53:33 +02:00
|
|
|
return $title;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub linkpage ($) {
|
2007-03-07 10:48:59 +01:00
|
|
|
my $link=shift;
|
2008-09-04 20:13:10 +02:00
|
|
|
my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_";
|
|
|
|
$link=~s/([^$chars])/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg;
|
2007-03-07 10:48:59 +01:00
|
|
|
return $link;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-03-07 10:48:59 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub cgiurl (@) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my %params=@_;
|
|
|
|
|
2010-11-23 00:20:32 +01:00
|
|
|
my $cgiurl=$local_cgiurl;
|
|
|
|
|
2010-02-12 00:25:10 +01:00
|
|
|
if (exists $params{cgiurl}) {
|
|
|
|
$cgiurl=$params{cgiurl};
|
|
|
|
delete $params{cgiurl};
|
|
|
|
}
|
2010-11-23 00:16:59 +01:00
|
|
|
|
|
|
|
unless (%params) {
|
|
|
|
return $cgiurl;
|
|
|
|
}
|
|
|
|
|
2010-02-12 00:25:10 +01:00
|
|
|
return $cgiurl."?".
|
2015-05-19 20:09:38 +02:00
|
|
|
join("&", map $_."=".uri_escape_utf8($params{$_}), sort(keys %params));
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2012-04-02 18:24:14 +02:00
|
|
|
sub cgiurl_abs (@) {
|
|
|
|
eval q{use URI};
|
|
|
|
URI->new_abs(cgiurl(@_), $config{cgiurl});
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub baseurl (;$) {
|
2006-05-02 08:53:33 +02:00
|
|
|
my $page=shift;
|
|
|
|
|
2010-11-23 00:25:45 +01:00
|
|
|
return $local_url if ! defined $page;
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2007-04-01 21:59:42 +02:00
|
|
|
$page=htmlpage($page);
|
2006-05-02 08:53:33 +02:00
|
|
|
$page=~s/[^\/]+$//;
|
|
|
|
$page=~s/[^\/]+\//..\//g;
|
2006-08-22 00:27:02 +02:00
|
|
|
return $page;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2011-01-05 21:18:25 +01:00
|
|
|
sub urlabs ($$) {
|
|
|
|
my $url=shift;
|
|
|
|
my $urlbase=shift;
|
|
|
|
|
2011-01-05 21:23:05 +01:00
|
|
|
return $url unless defined $urlbase && length $urlbase;
|
|
|
|
|
2011-01-05 21:18:25 +01:00
|
|
|
eval q{use URI};
|
|
|
|
URI->new_abs($url, $urlbase)->as_string;
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub abs2rel ($$) {
|
2006-07-04 05:42:19 +02:00
|
|
|
# Work around very innefficient behavior in File::Spec if abs2rel
|
|
|
|
# is passed two relative paths. It's much faster if paths are
|
2006-12-07 05:56:06 +01:00
|
|
|
# absolute! (Debian bug #376658; fixed in debian unstable now)
|
2006-07-04 05:42:19 +02:00
|
|
|
my $path="/".shift;
|
|
|
|
my $base="/".shift;
|
|
|
|
|
|
|
|
require File::Spec;
|
|
|
|
my $ret=File::Spec->abs2rel($path, $base);
|
|
|
|
$ret=~s/^// if defined $ret;
|
|
|
|
return $ret;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-07-04 05:42:19 +02:00
|
|
|
|
2010-05-02 19:44:13 +02:00
|
|
|
sub displaytime ($;$$) {
|
2008-10-21 23:57:19 +02:00
|
|
|
# Plugins can override this function to mark up the time to
|
|
|
|
# display.
|
2010-05-02 19:44:13 +02:00
|
|
|
my $time=formattime($_[0], $_[1]);
|
|
|
|
if ($config{html5}) {
|
|
|
|
return '<time datetime="'.date_3339($_[0]).'"'.
|
2010-05-09 01:45:02 +02:00
|
|
|
($_[2] ? ' pubdate="pubdate"' : '').
|
2010-05-02 19:44:13 +02:00
|
|
|
'>'.$time.'</time>';
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return '<span class="date">'.$time.'</span>';
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-10-20 01:13:40 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub formattime ($;$) {
|
2008-10-21 23:57:19 +02:00
|
|
|
# Plugins can override this function to format the time.
|
2006-09-10 00:50:27 +02:00
|
|
|
my $time=shift;
|
2007-11-13 22:14:48 +01:00
|
|
|
my $format=shift;
|
|
|
|
if (! defined $format) {
|
|
|
|
$format=$config{timeformat};
|
|
|
|
}
|
2006-09-10 00:50:27 +02:00
|
|
|
|
2012-01-30 20:08:50 +01:00
|
|
|
return strftime_utf8($format, localtime($time));
|
|
|
|
}
|
|
|
|
|
|
|
|
my $strftime_encoding;
|
|
|
|
sub strftime_utf8 {
|
2017-07-23 16:56:24 +02:00
|
|
|
# strftime didn't know about encodings in older Perl, so make sure
|
2012-01-30 20:08:50 +01:00
|
|
|
# its output is properly treated as utf8.
|
|
|
|
# Note that this does not handle utf-8 in the format string.
|
2017-07-23 16:56:24 +02:00
|
|
|
my $result = POSIX::strftime(@_);
|
|
|
|
|
|
|
|
if (Encode::is_utf8($result)) {
|
|
|
|
return $result;
|
|
|
|
}
|
|
|
|
|
2012-02-02 03:47:21 +01:00
|
|
|
($strftime_encoding) = POSIX::setlocale(&POSIX::LC_TIME) =~ m#\.([^@]+)#
|
2012-01-30 20:08:50 +01:00
|
|
|
unless defined $strftime_encoding;
|
|
|
|
$strftime_encoding
|
2017-07-23 16:56:24 +02:00
|
|
|
? Encode::decode($strftime_encoding, $result)
|
|
|
|
: $result;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-09-10 00:50:27 +02:00
|
|
|
|
2010-05-02 19:44:13 +02:00
|
|
|
sub date_3339 ($) {
|
|
|
|
my $time=shift;
|
|
|
|
|
|
|
|
my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
|
|
|
|
POSIX::setlocale(&POSIX::LC_TIME, "C");
|
|
|
|
my $ret=POSIX::strftime("%Y-%m-%dT%H:%M:%SZ", gmtime($time));
|
|
|
|
POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
|
|
|
|
return $ret;
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub beautify_urlpath ($) {
|
2007-04-01 21:59:42 +02:00
|
|
|
my $url=shift;
|
|
|
|
|
2008-11-20 18:55:57 +01:00
|
|
|
# Ensure url is not an empty link, and if necessary,
|
|
|
|
# add ./ to avoid colon confusion.
|
2008-12-21 16:24:42 +01:00
|
|
|
if ($url !~ /^\// && $url !~ /^\.\.?\//) {
|
2008-07-11 16:31:08 +02:00
|
|
|
$url="./$url";
|
|
|
|
}
|
2007-04-01 21:59:42 +02:00
|
|
|
|
2008-12-21 16:23:59 +01:00
|
|
|
if ($config{usedirs}) {
|
|
|
|
$url =~ s!/index.$config{htmlext}$!/!;
|
|
|
|
}
|
|
|
|
|
2007-04-01 21:59:42 +02:00
|
|
|
return $url;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-04-01 21:59:42 +02:00
|
|
|
|
2010-11-29 19:59:48 +01:00
|
|
|
sub urlto ($;$$) {
|
2007-04-01 21:59:42 +02:00
|
|
|
my $to=shift;
|
|
|
|
my $from=shift;
|
2008-07-25 22:16:44 +02:00
|
|
|
my $absolute=shift;
|
|
|
|
|
2007-04-01 21:59:42 +02:00
|
|
|
if (! length $to) {
|
2010-12-25 23:12:38 +01:00
|
|
|
$to = 'index';
|
2007-04-01 21:59:42 +02:00
|
|
|
}
|
|
|
|
|
2007-04-10 03:18:03 +02:00
|
|
|
if (! $destsources{$to}) {
|
2007-04-01 21:59:42 +02:00
|
|
|
$to=htmlpage($to);
|
|
|
|
}
|
|
|
|
|
2008-07-25 22:16:44 +02:00
|
|
|
if ($absolute) {
|
|
|
|
return $config{url}.beautify_urlpath("/".$to);
|
|
|
|
}
|
|
|
|
|
2010-11-23 00:33:13 +01:00
|
|
|
if (! defined $from) {
|
2011-02-27 23:32:44 +01:00
|
|
|
my $u = $local_url || '';
|
2010-11-23 00:33:13 +01:00
|
|
|
$u =~ s{/$}{};
|
|
|
|
return $u.beautify_urlpath("/".$to);
|
|
|
|
}
|
|
|
|
|
2007-04-01 21:59:42 +02:00
|
|
|
my $link = abs2rel($to, dirname(htmlpage($from)));
|
|
|
|
|
2008-07-11 16:33:41 +02:00
|
|
|
return beautify_urlpath($link);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-04-01 21:59:42 +02:00
|
|
|
|
2010-01-09 22:53:45 +01:00
|
|
|
sub isselflink ($$) {
|
2010-07-11 11:35:37 +02:00
|
|
|
# Plugins can override this function to support special types
|
|
|
|
# of selflinks.
|
2010-01-09 22:53:45 +01:00
|
|
|
my $page=shift;
|
|
|
|
my $link=shift;
|
|
|
|
|
2010-10-04 22:34:33 +02:00
|
|
|
return $page eq $link;
|
2010-01-09 22:53:45 +01:00
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub htmllink ($$$;@) {
|
2006-05-26 18:11:53 +02:00
|
|
|
my $lpage=shift; # the page doing the linking
|
|
|
|
my $page=shift; # the page that will contain the link (different for inline)
|
2006-05-02 08:53:33 +02:00
|
|
|
my $link=shift;
|
2007-02-20 04:05:47 +01:00
|
|
|
my %opts=@_;
|
2007-11-17 22:32:02 +01:00
|
|
|
|
2007-11-17 22:26:34 +01:00
|
|
|
$link=~s/\/$//;
|
2006-05-02 08:53:33 +02:00
|
|
|
|
|
|
|
my $bestlink;
|
2007-02-20 04:05:47 +01:00
|
|
|
if (! $opts{forcesubpage}) {
|
2006-05-26 18:11:53 +02:00
|
|
|
$bestlink=bestlink($lpage, $link);
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
|
|
|
else {
|
2006-05-26 18:11:53 +02:00
|
|
|
$bestlink="$lpage/".lc($link);
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
|
|
|
|
2007-02-20 04:05:47 +01:00
|
|
|
my $linktext;
|
|
|
|
if (defined $opts{linktext}) {
|
|
|
|
$linktext=$opts{linktext};
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$linktext=pagetitle(basename($link));
|
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2006-08-18 02:24:22 +02:00
|
|
|
return "<span class=\"selflink\">$linktext</span>"
|
2010-01-09 22:53:45 +01:00
|
|
|
if length $bestlink && isselflink($page, $bestlink) &&
|
2007-11-18 01:58:17 +01:00
|
|
|
! defined $opts{anchor};
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2007-04-10 03:18:03 +02:00
|
|
|
if (! $destsources{$bestlink}) {
|
2006-05-02 08:53:33 +02:00
|
|
|
$bestlink=htmlpage($bestlink);
|
2007-04-10 03:18:03 +02:00
|
|
|
|
|
|
|
if (! $destsources{$bestlink}) {
|
2010-04-21 00:16:32 +02:00
|
|
|
my $cgilink = "";
|
|
|
|
if (length $config{cgiurl}) {
|
|
|
|
$cgilink = "<a href=\"".
|
2010-04-21 18:50:34 +02:00
|
|
|
cgiurl(
|
|
|
|
do => "create",
|
2011-06-29 22:38:32 +02:00
|
|
|
page => $link,
|
2010-04-21 18:50:34 +02:00
|
|
|
from => $lpage
|
|
|
|
)."\" rel=\"nofollow\">?</a>";
|
2010-04-21 00:16:32 +02:00
|
|
|
}
|
|
|
|
return "<span class=\"createlink\">$cgilink$linktext</span>"
|
2007-04-10 03:18:03 +02:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
|
|
|
|
2007-04-01 21:59:42 +02:00
|
|
|
$bestlink=abs2rel($bestlink, dirname(htmlpage($page)));
|
2008-07-11 16:33:41 +02:00
|
|
|
$bestlink=beautify_urlpath($bestlink);
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2007-02-20 04:05:47 +01:00
|
|
|
if (! $opts{noimageinline} && isinlinableimage($bestlink)) {
|
2006-05-02 08:53:33 +02:00
|
|
|
return "<img src=\"$bestlink\" alt=\"$linktext\" />";
|
|
|
|
}
|
2007-02-20 02:49:52 +01:00
|
|
|
|
2007-02-20 04:05:47 +01:00
|
|
|
if (defined $opts{anchor}) {
|
|
|
|
$bestlink.="#".$opts{anchor};
|
|
|
|
}
|
|
|
|
|
2007-08-05 22:48:13 +02:00
|
|
|
my @attrs;
|
2009-11-26 20:10:21 +01:00
|
|
|
foreach my $attr (qw{rel class title}) {
|
|
|
|
if (defined $opts{$attr}) {
|
2009-11-26 20:57:52 +01:00
|
|
|
push @attrs, " $attr=\"$opts{$attr}\"";
|
2009-11-26 20:10:21 +01:00
|
|
|
}
|
2007-09-22 18:32:24 +02:00
|
|
|
}
|
2007-08-05 22:48:13 +02:00
|
|
|
|
|
|
|
return "<a href=\"$bestlink\"@attrs>$linktext</a>";
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2010-02-05 00:24:15 +01:00
|
|
|
sub userpage ($) {
|
|
|
|
my $user=shift;
|
|
|
|
return length $config{userdir} ? "$config{userdir}/$user" : $user;
|
|
|
|
}
|
|
|
|
|
2015-05-14 17:37:47 +02:00
|
|
|
# Username to display for openid accounts.
|
2009-07-10 19:41:16 +02:00
|
|
|
sub openiduser ($) {
|
|
|
|
my $user=shift;
|
|
|
|
|
2010-11-27 23:29:49 +01:00
|
|
|
if (defined $user && $user =~ m!^https?://! &&
|
2009-07-10 19:41:16 +02:00
|
|
|
eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) {
|
|
|
|
my $display;
|
|
|
|
|
|
|
|
if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) {
|
|
|
|
$display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user);
|
|
|
|
}
|
|
|
|
else {
|
2010-02-06 21:29:25 +01:00
|
|
|
# backcompat with old version
|
2009-07-10 19:41:16 +02:00
|
|
|
my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user);
|
|
|
|
$display=$oid->display;
|
|
|
|
}
|
|
|
|
|
|
|
|
# Convert "user.somehost.com" to "user [somehost.com]"
|
|
|
|
# (also "user.somehost.co.uk")
|
|
|
|
if ($display !~ /\[/) {
|
|
|
|
$display=~s/^([-a-zA-Z0-9]+?)\.([-.a-zA-Z0-9]+\.[a-z]+)$/$1 [$2]/;
|
|
|
|
}
|
|
|
|
# Convert "http://somehost.com/user" to "user [somehost.com]".
|
|
|
|
# (also "https://somehost.com/user/")
|
|
|
|
if ($display !~ /\[/) {
|
2010-03-14 02:10:50 +01:00
|
|
|
$display=~s/^https?:\/\/(.+)\/([^\/#?]+)\/?(?:[#?].*)?$/$2 [$1]/;
|
2009-07-10 19:41:16 +02:00
|
|
|
}
|
|
|
|
$display=~s!^https?://!!; # make sure this is removed
|
|
|
|
eval q{use CGI 'escapeHTML'};
|
|
|
|
error($@) if $@;
|
|
|
|
return escapeHTML($display);
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2015-05-14 17:37:47 +02:00
|
|
|
# Username to display for emailauth accounts.
|
2015-05-14 05:23:53 +02:00
|
|
|
sub emailuser ($) {
|
|
|
|
my $user=shift;
|
|
|
|
if (defined $user && $user =~ m/(.+)@/) {
|
2015-05-14 16:40:52 +02:00
|
|
|
my $nick=$1;
|
|
|
|
# remove any characters from not allowed in wiki files
|
2015-05-19 23:06:25 +02:00
|
|
|
# support use w/o %config set
|
|
|
|
my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_";
|
|
|
|
$nick=~s/[^$chars]/_/g;
|
2015-05-14 16:40:52 +02:00
|
|
|
return $nick;
|
2015-05-14 05:23:53 +02:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2015-05-14 17:37:47 +02:00
|
|
|
# Some user information should not be exposed in commit metadata, etc.
|
|
|
|
# This generates a cloaked form of such information.
|
|
|
|
sub cloak ($) {
|
|
|
|
my $user=shift;
|
|
|
|
# cloak email address using http://xmlns.com/foaf/spec/#term_mbox_sha1sum
|
|
|
|
if ($user=~m/(.+)@/) {
|
|
|
|
my $nick=$1;
|
|
|
|
eval q{use Digest::SHA};
|
|
|
|
return $user if $@;
|
|
|
|
return $nick.'@'.Digest::SHA::sha1_hex("mailto:$user");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return $user;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub htmlize ($$$$) {
|
2006-09-10 00:50:27 +02:00
|
|
|
my $page=shift;
|
2008-06-04 07:24:23 +02:00
|
|
|
my $destpage=shift;
|
2006-09-10 00:50:27 +02:00
|
|
|
my $type=shift;
|
|
|
|
my $content=shift;
|
2008-01-09 20:45:07 +01:00
|
|
|
|
|
|
|
my $oneline = $content !~ /\n/;
|
2010-04-05 23:18:30 +02:00
|
|
|
|
2006-09-10 00:50:27 +02:00
|
|
|
if (exists $hooks{htmlize}{$type}) {
|
|
|
|
$content=$hooks{htmlize}{$type}{call}->(
|
|
|
|
page => $page,
|
|
|
|
content => $content,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
error("htmlization of $type not supported");
|
|
|
|
}
|
|
|
|
|
|
|
|
run_hooks(sanitize => sub {
|
|
|
|
$content=shift->(
|
|
|
|
page => $page,
|
2008-06-04 07:24:23 +02:00
|
|
|
destpage => $destpage,
|
2006-09-10 00:50:27 +02:00
|
|
|
content => $content,
|
|
|
|
);
|
|
|
|
});
|
2008-01-09 20:41:28 +01:00
|
|
|
|
|
|
|
if ($oneline) {
|
|
|
|
# hack to get rid of enclosing junk added by markdown
|
2010-04-05 23:18:30 +02:00
|
|
|
# and other htmlizers/sanitizers
|
2008-01-09 20:41:28 +01:00
|
|
|
$content=~s/^<p>//i;
|
2010-04-05 23:18:30 +02:00
|
|
|
$content=~s/<\/p>\n*$//i;
|
2008-01-09 20:41:28 +01:00
|
|
|
}
|
2006-09-10 00:50:27 +02:00
|
|
|
|
|
|
|
return $content;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-09-10 00:50:27 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub linkify ($$$) {
|
2008-02-12 04:48:27 +01:00
|
|
|
my $page=shift;
|
|
|
|
my $destpage=shift;
|
2006-09-10 00:50:27 +02:00
|
|
|
my $content=shift;
|
|
|
|
|
2008-02-12 04:48:27 +01:00
|
|
|
run_hooks(linkify => sub {
|
|
|
|
$content=shift->(
|
|
|
|
page => $page,
|
2008-02-24 21:58:20 +01:00
|
|
|
destpage => $destpage,
|
2008-02-12 04:48:27 +01:00
|
|
|
content => $content,
|
|
|
|
);
|
|
|
|
});
|
2006-09-10 00:50:27 +02:00
|
|
|
|
|
|
|
return $content;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-09-10 00:50:27 +02:00
|
|
|
|
2008-06-04 06:58:46 +02:00
|
|
|
our %preprocessing;
|
2007-03-06 23:37:05 +01:00
|
|
|
our $preprocess_preview=0;
|
2008-12-17 21:22:16 +01:00
|
|
|
sub preprocess ($$$;$$) {
|
2006-09-10 00:50:27 +02:00
|
|
|
my $page=shift; # the page the data comes from
|
|
|
|
my $destpage=shift; # the page the data will appear in (different for inline)
|
|
|
|
my $content=shift;
|
2006-10-28 07:07:56 +02:00
|
|
|
my $scan=shift;
|
2007-03-06 23:37:05 +01:00
|
|
|
my $preview=shift;
|
|
|
|
|
|
|
|
# Using local because it needs to be set within any nested calls
|
|
|
|
# of this function.
|
|
|
|
local $preprocess_preview=$preview if defined $preview;
|
2006-09-10 00:50:27 +02:00
|
|
|
|
|
|
|
my $handle=sub {
|
|
|
|
my $escape=shift;
|
2008-01-28 01:13:54 +01:00
|
|
|
my $prefix=shift;
|
2006-09-10 00:50:27 +02:00
|
|
|
my $command=shift;
|
|
|
|
my $params=shift;
|
2008-08-04 20:58:21 +02:00
|
|
|
$params="" if ! defined $params;
|
|
|
|
|
2008-08-06 01:39:50 +02:00
|
|
|
if (length $escape) {
|
2008-01-28 01:13:54 +01:00
|
|
|
return "[[$prefix$command $params]]";
|
2006-09-10 00:50:27 +02:00
|
|
|
}
|
|
|
|
elsif (exists $hooks{preprocess}{$command}) {
|
2006-10-28 07:07:56 +02:00
|
|
|
return "" if $scan && ! $hooks{preprocess}{$command}{scan};
|
2006-09-10 00:50:27 +02:00
|
|
|
# Note: preserve order of params, some plugins may
|
|
|
|
# consider it significant.
|
|
|
|
my @params;
|
2007-06-02 01:40:43 +02:00
|
|
|
while ($params =~ m{
|
2012-11-09 11:39:23 +01:00
|
|
|
(?:([-.\w]+)=)? # 1: named parameter key?
|
2007-06-02 01:40:43 +02:00
|
|
|
(?:
|
|
|
|
"""(.*?)""" # 2: triple-quoted value
|
|
|
|
|
|
2010-02-26 17:49:51 +01:00
|
|
|
"([^"]*?)" # 3: single-quoted value
|
2007-06-02 01:40:43 +02:00
|
|
|
|
|
2011-04-30 21:58:43 +02:00
|
|
|
'''(.*?)''' # 4: triple-single-quote
|
|
|
|
|
|
2011-07-30 19:59:54 +02:00
|
|
|
<<([a-zA-Z]+)\n # 5: heredoc start
|
|
|
|
(.*?)\n\5 # 6: heredoc value
|
2011-04-30 21:58:43 +02:00
|
|
|
|
|
|
|
|
(\S+) # 7: unquoted value
|
2007-06-02 01:40:43 +02:00
|
|
|
)
|
|
|
|
(?:\s+|$) # delimiter to next param
|
2011-04-30 21:58:43 +02:00
|
|
|
}msgx) {
|
2006-09-10 00:50:27 +02:00
|
|
|
my $key=$1;
|
|
|
|
my $val;
|
|
|
|
if (defined $2) {
|
|
|
|
$val=$2;
|
|
|
|
$val=~s/\r\n/\n/mg;
|
|
|
|
$val=~s/^\n+//g;
|
|
|
|
$val=~s/\n+$//g;
|
|
|
|
}
|
|
|
|
elsif (defined $3) {
|
|
|
|
$val=$3;
|
|
|
|
}
|
|
|
|
elsif (defined $4) {
|
|
|
|
$val=$4;
|
|
|
|
}
|
2011-04-30 21:58:43 +02:00
|
|
|
elsif (defined $7) {
|
|
|
|
$val=$7;
|
|
|
|
}
|
2011-07-30 19:59:54 +02:00
|
|
|
elsif (defined $6) {
|
|
|
|
$val=$6;
|
2011-04-30 21:58:43 +02:00
|
|
|
}
|
2006-09-10 00:50:27 +02:00
|
|
|
|
|
|
|
if (defined $key) {
|
|
|
|
push @params, $key, $val;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
push @params, $val, '';
|
|
|
|
}
|
|
|
|
}
|
2014-02-23 19:11:54 +01:00
|
|
|
if ($preprocessing{$page}++ > 8) {
|
2006-09-10 00:50:27 +02:00
|
|
|
# Avoid loops of preprocessed pages preprocessing
|
|
|
|
# other pages that preprocess them, etc.
|
2008-07-26 23:53:03 +02:00
|
|
|
return "[[!$command <span class=\"error\">".
|
|
|
|
sprintf(gettext("preprocessing loop detected on %s at depth %i"),
|
|
|
|
$page, $preprocessing{$page}).
|
|
|
|
"</span>]]";
|
2006-09-10 00:50:27 +02:00
|
|
|
}
|
2008-01-09 08:30:46 +01:00
|
|
|
my $ret;
|
|
|
|
if (! $scan) {
|
2008-07-13 20:41:40 +02:00
|
|
|
$ret=eval {
|
|
|
|
$hooks{preprocess}{$command}{call}->(
|
|
|
|
@params,
|
|
|
|
page => $page,
|
|
|
|
destpage => $destpage,
|
|
|
|
preview => $preprocess_preview,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
if ($@) {
|
2009-08-16 19:43:31 +02:00
|
|
|
my $error=$@;
|
|
|
|
chomp $error;
|
2016-05-04 09:46:02 +02:00
|
|
|
eval q{use HTML::Entities};
|
|
|
|
$error = encode_entities($error);
|
2008-07-13 20:41:40 +02:00
|
|
|
$ret="[[!$command <span class=\"error\">".
|
2009-08-16 19:43:31 +02:00
|
|
|
gettext("Error").": $error"."</span>]]";
|
2008-07-13 20:41:40 +02:00
|
|
|
}
|
2008-01-09 08:30:46 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
# use void context during scan pass
|
2008-07-13 20:41:40 +02:00
|
|
|
eval {
|
|
|
|
$hooks{preprocess}{$command}{call}->(
|
|
|
|
@params,
|
|
|
|
page => $page,
|
|
|
|
destpage => $destpage,
|
|
|
|
preview => $preprocess_preview,
|
|
|
|
);
|
|
|
|
};
|
2008-01-09 08:30:46 +01:00
|
|
|
$ret="";
|
|
|
|
}
|
2006-09-10 00:50:27 +02:00
|
|
|
$preprocessing{$page}--;
|
|
|
|
return $ret;
|
|
|
|
}
|
|
|
|
else {
|
2008-01-28 01:13:54 +01:00
|
|
|
return "[[$prefix$command $params]]";
|
2006-09-10 00:50:27 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2008-01-28 01:13:54 +01:00
|
|
|
my $regex;
|
|
|
|
if ($config{prefix_directives}) {
|
|
|
|
$regex = qr{
|
|
|
|
(\\?) # 1: escape?
|
|
|
|
\[\[(!) # directive open; 2: prefix
|
|
|
|
([-\w]+) # 3: command
|
|
|
|
( # 4: the parameters..
|
|
|
|
\s+ # Must have space if parameters present
|
|
|
|
(?:
|
2012-11-09 11:39:23 +01:00
|
|
|
(?:[-.\w]+=)? # named parameter key?
|
2008-01-28 01:13:54 +01:00
|
|
|
(?:
|
|
|
|
""".*?""" # triple-quoted value
|
|
|
|
|
|
2010-02-26 17:49:51 +01:00
|
|
|
"[^"]*?" # single-quoted value
|
2008-01-28 01:13:54 +01:00
|
|
|
|
|
2011-07-30 19:59:54 +02:00
|
|
|
'''.*?''' # triple-single-quote
|
2011-04-30 21:58:43 +02:00
|
|
|
|
|
2011-07-30 19:59:54 +02:00
|
|
|
<<([a-zA-Z]+)\n # 5: heredoc start
|
|
|
|
(?:.*?)\n\5 # heredoc value
|
2011-04-30 21:58:43 +02:00
|
|
|
|
|
2009-06-05 22:14:51 +02:00
|
|
|
[^"\s\]]+ # unquoted value
|
2008-01-28 01:13:54 +01:00
|
|
|
)
|
|
|
|
\s* # whitespace or end
|
|
|
|
# of directive
|
|
|
|
)
|
|
|
|
*)? # 0 or more parameters
|
|
|
|
\]\] # directive closed
|
|
|
|
}sx;
|
2008-07-17 06:22:25 +02:00
|
|
|
}
|
|
|
|
else {
|
2008-01-28 01:13:54 +01:00
|
|
|
$regex = qr{
|
|
|
|
(\\?) # 1: escape?
|
|
|
|
\[\[(!?) # directive open; 2: optional prefix
|
|
|
|
([-\w]+) # 3: command
|
|
|
|
\s+
|
|
|
|
( # 4: the parameters..
|
2007-06-02 01:40:43 +02:00
|
|
|
(?:
|
2012-11-09 11:39:23 +01:00
|
|
|
(?:[-.\w]+=)? # named parameter key?
|
2008-01-28 01:13:54 +01:00
|
|
|
(?:
|
|
|
|
""".*?""" # triple-quoted value
|
|
|
|
|
|
2010-02-26 17:49:51 +01:00
|
|
|
"[^"]*?" # single-quoted value
|
2008-01-28 01:13:54 +01:00
|
|
|
|
|
2011-07-30 19:59:54 +02:00
|
|
|
'''.*?''' # triple-single-quote
|
2011-04-30 21:58:43 +02:00
|
|
|
|
|
2011-07-30 19:59:54 +02:00
|
|
|
<<([a-zA-Z]+)\n # 5: heredoc start
|
|
|
|
(?:.*?)\n\5 # heredoc value
|
2011-04-30 21:58:43 +02:00
|
|
|
|
|
2009-06-05 22:14:51 +02:00
|
|
|
[^"\s\]]+ # unquoted value
|
2008-01-28 01:13:54 +01:00
|
|
|
)
|
|
|
|
\s* # whitespace or end
|
|
|
|
# of directive
|
2007-06-02 01:40:43 +02:00
|
|
|
)
|
2008-01-28 01:13:54 +01:00
|
|
|
*) # 0 or more parameters
|
|
|
|
\]\] # directive closed
|
|
|
|
}sx;
|
|
|
|
}
|
|
|
|
|
2008-08-04 20:58:21 +02:00
|
|
|
$content =~ s{$regex}{$handle->($1, $2, $3, $4)}eg;
|
2006-09-10 00:50:27 +02:00
|
|
|
return $content;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-09-10 00:50:27 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub filter ($$$) {
|
2006-09-10 00:50:27 +02:00
|
|
|
my $page=shift;
|
2007-05-17 21:55:11 +02:00
|
|
|
my $destpage=shift;
|
2006-09-10 00:50:27 +02:00
|
|
|
my $content=shift;
|
|
|
|
|
|
|
|
run_hooks(filter => sub {
|
2007-05-17 21:55:11 +02:00
|
|
|
$content=shift->(page => $page, destpage => $destpage,
|
|
|
|
content => $content);
|
2006-09-10 00:50:27 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
return $content;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-09-10 00:50:27 +02:00
|
|
|
|
2010-08-31 00:31:56 +02:00
|
|
|
sub check_canedit ($$$;$) {
|
2009-02-12 22:31:05 +01:00
|
|
|
my $page=shift;
|
|
|
|
my $q=shift;
|
|
|
|
my $session=shift;
|
|
|
|
my $nonfatal=shift;
|
|
|
|
|
|
|
|
my $canedit;
|
|
|
|
run_hooks(canedit => sub {
|
|
|
|
return if defined $canedit;
|
|
|
|
my $ret=shift->($page, $q, $session);
|
|
|
|
if (defined $ret) {
|
|
|
|
if ($ret eq "") {
|
|
|
|
$canedit=1;
|
|
|
|
}
|
|
|
|
elsif (ref $ret eq 'CODE') {
|
|
|
|
$ret->() unless $nonfatal;
|
|
|
|
$canedit=0;
|
|
|
|
}
|
|
|
|
elsif (defined $ret) {
|
|
|
|
error($ret) unless $nonfatal;
|
|
|
|
$canedit=0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
return defined $canedit ? $canedit : 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub check_content (@) {
|
|
|
|
my %params=@_;
|
|
|
|
|
|
|
|
return 1 if ! exists $hooks{checkcontent}; # optimisation
|
|
|
|
|
|
|
|
if (exists $pagesources{$params{page}}) {
|
|
|
|
my @diff;
|
|
|
|
my %old=map { $_ => 1 }
|
|
|
|
split("\n", readfile(srcfile($pagesources{$params{page}})));
|
|
|
|
foreach my $line (split("\n", $params{content})) {
|
2009-12-15 00:20:11 +01:00
|
|
|
push @diff, $line if ! exists $old{$line};
|
2009-02-12 22:31:05 +01:00
|
|
|
}
|
2009-03-08 16:02:10 +01:00
|
|
|
$params{diff}=join("\n", @diff);
|
2009-02-12 22:31:05 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
my $ok;
|
|
|
|
run_hooks(checkcontent => sub {
|
|
|
|
return if defined $ok;
|
|
|
|
my $ret=shift->(%params);
|
|
|
|
if (defined $ret) {
|
|
|
|
if ($ret eq "") {
|
|
|
|
$ok=1;
|
|
|
|
}
|
|
|
|
elsif (ref $ret eq 'CODE') {
|
|
|
|
$ret->() unless $params{nonfatal};
|
|
|
|
$ok=0;
|
|
|
|
}
|
|
|
|
elsif (defined $ret) {
|
|
|
|
error($ret) unless $params{nonfatal};
|
|
|
|
$ok=0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
});
|
|
|
|
return defined $ok ? $ok : 1;
|
|
|
|
}
|
|
|
|
|
2010-10-09 00:06:38 +02:00
|
|
|
sub check_canchange (@) {
|
|
|
|
my %params = @_;
|
|
|
|
my $cgi = $params{cgi};
|
|
|
|
my $session = $params{session};
|
|
|
|
my @changes = @{$params{changes}};
|
|
|
|
|
|
|
|
my %newfiles;
|
|
|
|
foreach my $change (@changes) {
|
|
|
|
# This untaint is safe because we check file_pruned and
|
|
|
|
# wiki_file_regexp.
|
|
|
|
my ($file)=$change->{file}=~/$config{wiki_file_regexp}/;
|
|
|
|
$file=possibly_foolish_untaint($file);
|
|
|
|
if (! defined $file || ! length $file ||
|
|
|
|
file_pruned($file)) {
|
2017-01-09 15:27:56 +01:00
|
|
|
error(sprintf(gettext("bad file name %s"), $file));
|
2010-10-09 00:06:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
my $type=pagetype($file);
|
|
|
|
my $page=pagename($file) if defined $type;
|
|
|
|
|
|
|
|
if ($change->{action} eq 'add') {
|
|
|
|
$newfiles{$file}=1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($change->{action} eq 'change' ||
|
|
|
|
$change->{action} eq 'add') {
|
|
|
|
if (defined $page) {
|
|
|
|
check_canedit($page, $cgi, $session);
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
if (IkiWiki::Plugin::attachment->can("check_canattach")) {
|
|
|
|
IkiWiki::Plugin::attachment::check_canattach($session, $file, $change->{path});
|
|
|
|
check_canedit($file, $cgi, $session);
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
elsif ($change->{action} eq 'remove') {
|
|
|
|
# check_canremove tests to see if the file is present
|
|
|
|
# on disk. This will fail when a single commit adds a
|
|
|
|
# file and then removes it again. Avoid the problem
|
|
|
|
# by not testing the removal in such pairs of changes.
|
|
|
|
# (The add is still tested, just to make sure that
|
|
|
|
# no data is added to the repo that a web edit
|
|
|
|
# could not add.)
|
|
|
|
next if $newfiles{$file};
|
|
|
|
|
|
|
|
if (IkiWiki::Plugin::remove->can("check_canremove")) {
|
|
|
|
IkiWiki::Plugin::remove::check_canremove(defined $page ? $page : $file, $cgi, $session);
|
|
|
|
check_canedit(defined $page ? $page : $file, $cgi, $session);
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
error "unknown action ".$change->{action};
|
|
|
|
}
|
|
|
|
|
|
|
|
error sprintf(gettext("you are not allowed to change %s"), $file);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-08-15 10:08:32 +02:00
|
|
|
my $wikilock;
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub lockwiki () {
|
2006-05-02 08:53:33 +02:00
|
|
|
# Take an exclusive lock on the wiki to prevent multiple concurrent
|
|
|
|
# run issues. The lock will be dropped on program exit.
|
|
|
|
if (! -d $config{wikistatedir}) {
|
|
|
|
mkdir($config{wikistatedir});
|
|
|
|
}
|
2007-08-15 10:08:32 +02:00
|
|
|
open($wikilock, '>', "$config{wikistatedir}/lockfile") ||
|
2006-05-02 08:53:33 +02:00
|
|
|
error ("cannot write to $config{wikistatedir}/lockfile: $!");
|
2014-11-30 20:34:46 +01:00
|
|
|
if (! flock($wikilock, LOCK_EX | LOCK_NB)) {
|
2014-12-02 16:38:17 +01:00
|
|
|
debug("failed to get lock; waiting...");
|
2014-11-30 20:34:46 +01:00
|
|
|
if (! flock($wikilock, LOCK_EX)) {
|
|
|
|
error("failed to get lock");
|
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
2007-05-21 04:52:51 +02:00
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub unlockwiki () {
|
2008-11-12 02:48:02 +01:00
|
|
|
POSIX::close($ENV{IKIWIKI_CGILOCK_FD}) if exists $ENV{IKIWIKI_CGILOCK_FD};
|
2007-08-26 19:38:17 +02:00
|
|
|
return close($wikilock) if $wikilock;
|
|
|
|
return;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2007-08-15 10:08:32 +02:00
|
|
|
my $commitlock;
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub commit_hook_enabled () {
|
2007-08-15 10:08:32 +02:00
|
|
|
open($commitlock, '+>', "$config{wikistatedir}/commitlock") ||
|
|
|
|
error("cannot write to $config{wikistatedir}/commitlock: $!");
|
|
|
|
if (! flock($commitlock, 1 | 4)) { # LOCK_SH | LOCK_NB to test
|
|
|
|
close($commitlock) || error("failed closing commitlock: $!");
|
2007-02-21 09:55:28 +01:00
|
|
|
return 0;
|
|
|
|
}
|
2007-08-15 10:08:32 +02:00
|
|
|
close($commitlock) || error("failed closing commitlock: $!");
|
2007-02-21 09:55:28 +01:00
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-02-21 09:55:28 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub disable_commit_hook () {
|
2007-08-15 10:08:32 +02:00
|
|
|
open($commitlock, '>', "$config{wikistatedir}/commitlock") ||
|
|
|
|
error("cannot write to $config{wikistatedir}/commitlock: $!");
|
|
|
|
if (! flock($commitlock, 2)) { # LOCK_EX
|
2007-02-21 09:55:28 +01:00
|
|
|
error("failed to get commit lock");
|
|
|
|
}
|
2007-08-15 10:08:32 +02:00
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-02-21 09:55:28 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub enable_commit_hook () {
|
2007-08-26 19:38:17 +02:00
|
|
|
return close($commitlock) if $commitlock;
|
|
|
|
return;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-02-21 09:55:28 +01:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub loadindex () {
|
2007-10-10 20:46:25 +02:00
|
|
|
%oldrenderedfiles=%pagectime=();
|
2013-11-16 17:48:07 +01:00
|
|
|
my $rebuild=$config{rebuild};
|
|
|
|
if (! $rebuild) {
|
2007-10-10 20:46:25 +02:00
|
|
|
%pagesources=%pagemtime=%oldlinks=%links=%depends=
|
Add depends_exact: simplified dependency tracking for dependencies on a single page
Let E be the number of dependencies per page of the form "A depends on B and
nothing else", let D be the number of other dependencies per page,
let P be the total number of pages, and let C be the number of changed
pages in a refresh.
This patch should speed up a refresh from O(E*C*P + D*C*P) to
O(C + E*P + D*C*P), assuming that hash lookups are O(1).
In practice, plugins like inline and map produce a lot of these very simple
dependencies, and my album plugin's combination of inline with a large
number of pages causes it to suffer particularly badly.
In testing on a wiki with about 7000 objects (3500 full pages, 3500
images), a full rebuild continued to take about 5:30, and a refresh
after touching about 350 pages and 350 images reduced from 5:30 to 1:30.
As with my previous optimizations, this change will result in downgrades not
working correctly until the wiki is rebuilt.
2009-08-28 00:25:58 +02:00
|
|
|
%destsources=%renderedfiles=%pagecase=%pagestate=
|
2010-04-02 01:28:02 +02:00
|
|
|
%depends_simple=%typedlinks=%oldtypedlinks=();
|
2007-10-10 20:46:25 +02:00
|
|
|
}
|
2008-03-21 14:37:52 +01:00
|
|
|
my $in;
|
|
|
|
if (! open ($in, "<", "$config{wikistatedir}/indexdb")) {
|
|
|
|
if (-e "$config{wikistatedir}/index") {
|
2008-03-21 18:56:52 +01:00
|
|
|
system("ikiwiki-transition", "indexdb", $config{srcdir});
|
2008-03-21 14:37:52 +01:00
|
|
|
open ($in, "<", "$config{wikistatedir}/indexdb") || return;
|
|
|
|
}
|
|
|
|
else {
|
2014-08-29 04:08:09 +02:00
|
|
|
# gettime on first build
|
|
|
|
$config{gettime}=1 unless defined $config{gettime};
|
2008-03-21 14:37:52 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
2008-09-27 22:34:09 +02:00
|
|
|
|
|
|
|
my $index=Storable::fd_retrieve($in);
|
|
|
|
if (! defined $index) {
|
2008-03-21 14:07:44 +01:00
|
|
|
return 0;
|
|
|
|
}
|
2008-09-27 22:34:09 +02:00
|
|
|
|
|
|
|
my $pages;
|
|
|
|
if (exists $index->{version} && ! ref $index->{version}) {
|
|
|
|
$pages=$index->{page};
|
2008-09-27 22:45:27 +02:00
|
|
|
%wikistate=%{$index->{state}};
|
2010-07-26 22:24:17 +02:00
|
|
|
# Handle plugins that got disabled by loading a new setup.
|
|
|
|
if (exists $config{setupfile}) {
|
|
|
|
require IkiWiki::Setup;
|
|
|
|
IkiWiki::Setup::disabled_plugins(
|
|
|
|
grep { ! $loaded_plugins{$_} } keys %wikistate);
|
|
|
|
}
|
2008-09-27 22:34:09 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
$pages=$index;
|
2008-09-27 22:45:27 +02:00
|
|
|
%wikistate=();
|
2008-09-27 22:34:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
foreach my $src (keys %$pages) {
|
|
|
|
my $d=$pages->{$src};
|
2013-11-16 17:48:07 +01:00
|
|
|
my $page;
|
|
|
|
if (exists $d->{page} && ! $rebuild) {
|
|
|
|
$page=$d->{page};
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$page=pagename($src);
|
|
|
|
}
|
2008-09-27 22:34:09 +02:00
|
|
|
$pagectime{$page}=$d->{ctime};
|
2010-04-26 23:14:03 +02:00
|
|
|
$pagesources{$page}=$src;
|
2013-11-16 17:48:07 +01:00
|
|
|
if (! $rebuild) {
|
2008-09-27 22:34:09 +02:00
|
|
|
$pagemtime{$page}=$d->{mtime};
|
|
|
|
$renderedfiles{$page}=$d->{dest};
|
|
|
|
if (exists $d->{links} && ref $d->{links}) {
|
|
|
|
$links{$page}=$d->{links};
|
|
|
|
$oldlinks{$page}=[@{$d->{links}}];
|
2008-03-21 14:07:44 +01:00
|
|
|
}
|
2009-10-03 21:31:51 +02:00
|
|
|
if (ref $d->{depends_simple} eq 'ARRAY') {
|
|
|
|
# old format
|
2009-08-28 21:13:45 +02:00
|
|
|
$depends_simple{$page}={
|
|
|
|
map { $_ => 1 } @{$d->{depends_simple}}
|
Add depends_exact: simplified dependency tracking for dependencies on a single page
Let E be the number of dependencies per page of the form "A depends on B and
nothing else", let D be the number of other dependencies per page,
let P be the total number of pages, and let C be the number of changed
pages in a refresh.
This patch should speed up a refresh from O(E*C*P + D*C*P) to
O(C + E*P + D*C*P), assuming that hash lookups are O(1).
In practice, plugins like inline and map produce a lot of these very simple
dependencies, and my album plugin's combination of inline with a large
number of pages causes it to suffer particularly badly.
In testing on a wiki with about 7000 objects (3500 full pages, 3500
images), a full rebuild continued to take about 5:30, and a refresh
after touching about 350 pages and 350 images reduced from 5:30 to 1:30.
As with my previous optimizations, this change will result in downgrades not
working correctly until the wiki is rebuilt.
2009-08-28 00:25:58 +02:00
|
|
|
};
|
|
|
|
}
|
2009-10-03 21:31:51 +02:00
|
|
|
elsif (exists $d->{depends_simple}) {
|
2009-10-07 01:03:23 +02:00
|
|
|
$depends_simple{$page}=$d->{depends_simple};
|
2009-10-03 21:31:51 +02:00
|
|
|
}
|
2009-06-18 16:54:53 +02:00
|
|
|
if (exists $d->{dependslist}) {
|
2009-10-03 21:31:51 +02:00
|
|
|
# old format
|
2009-08-25 00:01:42 +02:00
|
|
|
$depends{$page}={
|
2009-10-05 02:30:21 +02:00
|
|
|
map { $_ => $DEPEND_CONTENT }
|
2009-10-03 21:31:51 +02:00
|
|
|
@{$d->{dependslist}}
|
2009-08-25 00:01:42 +02:00
|
|
|
};
|
2009-06-18 16:54:53 +02:00
|
|
|
}
|
2009-10-03 21:31:51 +02:00
|
|
|
elsif (exists $d->{depends} && ! ref $d->{depends}) {
|
|
|
|
# old format
|
2009-10-05 02:30:21 +02:00
|
|
|
$depends{$page}={$d->{depends} => $DEPEND_CONTENT };
|
2009-10-03 21:31:51 +02:00
|
|
|
}
|
2009-06-18 16:54:53 +02:00
|
|
|
elsif (exists $d->{depends}) {
|
2009-10-03 21:31:51 +02:00
|
|
|
$depends{$page}=$d->{depends};
|
2008-03-21 14:07:44 +01:00
|
|
|
}
|
2008-09-27 22:34:09 +02:00
|
|
|
if (exists $d->{state}) {
|
|
|
|
$pagestate{$page}=$d->{state};
|
2007-12-08 23:40:50 +01:00
|
|
|
}
|
2010-04-02 01:28:02 +02:00
|
|
|
if (exists $d->{typedlinks}) {
|
|
|
|
$typedlinks{$page}=$d->{typedlinks};
|
|
|
|
|
|
|
|
while (my ($type, $links) = each %{$typedlinks{$page}}) {
|
|
|
|
next unless %$links;
|
|
|
|
$oldtypedlinks{$page}{$type} = {%$links};
|
|
|
|
}
|
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
2008-09-27 22:34:09 +02:00
|
|
|
$oldrenderedfiles{$page}=[@{$d->{dest}}];
|
2008-03-21 14:07:44 +01:00
|
|
|
}
|
|
|
|
foreach my $page (keys %pagesources) {
|
|
|
|
$pagecase{lc $page}=$page;
|
|
|
|
}
|
|
|
|
foreach my $page (keys %renderedfiles) {
|
|
|
|
$destsources{$_}=$page foreach @{$renderedfiles{$page}};
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
2013-11-16 22:26:20 +01:00
|
|
|
$lastrev=$index->{lastrev};
|
|
|
|
@underlayfiles=@{$index->{underlayfiles}} if ref $index->{underlayfiles};
|
2007-08-15 10:08:32 +02:00
|
|
|
return close($in);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub saveindex () {
|
2006-07-30 03:38:50 +02:00
|
|
|
run_hooks(savestate => sub { shift->() });
|
|
|
|
|
2010-07-26 22:24:17 +02:00
|
|
|
my @plugins=keys %loaded_plugins;
|
2007-12-08 23:40:50 +01:00
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
if (! -d $config{wikistatedir}) {
|
|
|
|
mkdir($config{wikistatedir});
|
|
|
|
}
|
2008-03-21 14:37:52 +01:00
|
|
|
my $newfile="$config{wikistatedir}/indexdb.new";
|
2007-02-15 03:22:08 +01:00
|
|
|
my $cleanup = sub { unlink($newfile) };
|
2007-08-14 07:47:29 +02:00
|
|
|
open (my $out, '>', $newfile) || error("cannot write to $newfile: $!", $cleanup);
|
2008-09-27 22:45:27 +02:00
|
|
|
|
2008-03-21 14:07:44 +01:00
|
|
|
my %index;
|
2007-03-24 16:10:58 +01:00
|
|
|
foreach my $page (keys %pagemtime) {
|
|
|
|
next unless $pagemtime{$page};
|
2008-03-21 19:42:59 +01:00
|
|
|
my $src=$pagesources{$page};
|
2008-03-21 14:07:44 +01:00
|
|
|
|
2008-09-27 22:34:09 +02:00
|
|
|
$index{page}{$src}={
|
2013-11-16 17:48:07 +01:00
|
|
|
page => $page,
|
2008-03-21 14:07:44 +01:00
|
|
|
ctime => $pagectime{$page},
|
|
|
|
mtime => $pagemtime{$page},
|
|
|
|
dest => $renderedfiles{$page},
|
|
|
|
links => $links{$page},
|
|
|
|
};
|
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
if (exists $depends{$page}) {
|
2009-10-03 21:31:51 +02:00
|
|
|
$index{page}{$src}{depends} = $depends{$page};
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
2008-03-21 14:07:44 +01:00
|
|
|
|
2009-08-28 21:13:45 +02:00
|
|
|
if (exists $depends_simple{$page}) {
|
2009-10-03 21:31:51 +02:00
|
|
|
$index{page}{$src}{depends_simple} = $depends_simple{$page};
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
2008-03-21 14:07:44 +01:00
|
|
|
|
2010-04-02 01:28:02 +02:00
|
|
|
if (exists $typedlinks{$page} && %{$typedlinks{$page}}) {
|
|
|
|
$index{page}{$src}{typedlinks} = $typedlinks{$page};
|
|
|
|
}
|
|
|
|
|
2007-12-08 23:40:50 +01:00
|
|
|
if (exists $pagestate{$page}) {
|
2013-11-16 17:28:01 +01:00
|
|
|
$index{page}{$src}{state}=$pagestate{$page};
|
2007-12-08 23:40:50 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
}
|
2008-09-27 22:45:27 +02:00
|
|
|
|
|
|
|
$index{state}={};
|
2010-07-26 22:24:17 +02:00
|
|
|
foreach my $id (@plugins) {
|
|
|
|
$index{state}{$id}={}; # used to detect disabled plugins
|
2008-09-27 22:45:27 +02:00
|
|
|
foreach my $key (keys %{$wikistate{$id}}) {
|
|
|
|
$index{state}{$id}{$key}=$wikistate{$id}{$key};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-11-16 22:26:20 +01:00
|
|
|
$index{lastrev}=$lastrev;
|
|
|
|
$index{underlayfiles}=\@underlayfiles;
|
|
|
|
|
2008-09-27 22:34:09 +02:00
|
|
|
$index{version}="3";
|
2008-03-21 14:07:44 +01:00
|
|
|
my $ret=Storable::nstore_fd(\%index, $out);
|
|
|
|
return if ! defined $ret || ! $ret;
|
2007-08-14 07:47:29 +02:00
|
|
|
close $out || error("failed saving to $newfile: $!", $cleanup);
|
2008-03-21 14:37:52 +01:00
|
|
|
rename($newfile, "$config{wikistatedir}/indexdb") ||
|
|
|
|
error("failed renaming $newfile to $config{wikistatedir}/indexdb", $cleanup);
|
2007-08-15 10:08:32 +02:00
|
|
|
|
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub template_file ($) {
|
2010-04-22 21:34:32 +02:00
|
|
|
my $name=shift;
|
2010-04-23 20:44:37 +02:00
|
|
|
|
2010-04-25 01:50:23 +02:00
|
|
|
my $tpage=($name =~ s/^\///) ? $name : "templates/$name";
|
2010-07-23 20:14:25 +02:00
|
|
|
my $template;
|
2010-04-23 20:44:37 +02:00
|
|
|
if ($name !~ /\.tmpl$/ && exists $pagesources{$tpage}) {
|
2010-07-23 20:14:25 +02:00
|
|
|
$template=srcfile($pagesources{$tpage}, 1);
|
2010-04-23 20:44:37 +02:00
|
|
|
$name.=".tmpl";
|
|
|
|
}
|
2010-07-23 20:14:25 +02:00
|
|
|
else {
|
|
|
|
$template=srcfile($tpage, 1);
|
|
|
|
}
|
2007-01-12 21:48:19 +01:00
|
|
|
|
2010-04-28 18:39:13 +02:00
|
|
|
if (defined $template) {
|
|
|
|
return $template, $tpage, 1 if wantarray;
|
|
|
|
return $template;
|
|
|
|
}
|
|
|
|
else {
|
2010-04-23 22:20:02 +02:00
|
|
|
$name=~s:/::; # avoid path traversal
|
|
|
|
foreach my $dir ($config{templatedir},
|
|
|
|
"$installdir/share/ikiwiki/templates") {
|
|
|
|
if (-e "$dir/$name") {
|
|
|
|
$template="$dir/$name";
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
}
|
2010-04-28 18:39:13 +02:00
|
|
|
if (defined $template) {
|
|
|
|
return $template, $tpage if wantarray;
|
|
|
|
return $template;
|
|
|
|
}
|
2010-04-23 20:44:37 +02:00
|
|
|
}
|
2010-04-23 21:02:07 +02:00
|
|
|
|
2007-08-15 10:08:32 +02:00
|
|
|
return;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-01-12 21:48:19 +01:00
|
|
|
|
2010-04-22 21:34:32 +02:00
|
|
|
sub template_depends ($$;@) {
|
|
|
|
my $name=shift;
|
|
|
|
my $page=shift;
|
2010-04-23 20:44:37 +02:00
|
|
|
|
2010-04-28 18:39:13 +02:00
|
|
|
my ($filename, $tpage, $untrusted)=template_file($name);
|
2010-09-27 21:47:14 +02:00
|
|
|
if (! defined $filename) {
|
|
|
|
error(sprintf(gettext("template %s not found"), $name))
|
|
|
|
}
|
|
|
|
|
2010-04-23 20:44:37 +02:00
|
|
|
if (defined $page && defined $tpage) {
|
|
|
|
add_depends($page, $tpage);
|
2010-04-22 21:34:32 +02:00
|
|
|
}
|
2014-02-21 22:10:43 +01:00
|
|
|
|
2010-04-24 22:11:33 +02:00
|
|
|
my @opts=(
|
2010-04-22 21:58:06 +02:00
|
|
|
filter => sub {
|
|
|
|
my $text_ref = shift;
|
|
|
|
${$text_ref} = decode_utf8(${$text_ref});
|
2014-02-21 22:10:43 +01:00
|
|
|
run_hooks(readtemplate => sub {
|
|
|
|
${$text_ref} = shift->(
|
|
|
|
id => $name,
|
|
|
|
page => $tpage,
|
|
|
|
content => ${$text_ref},
|
|
|
|
untrusted => $untrusted,
|
|
|
|
);
|
|
|
|
});
|
2010-04-22 21:58:06 +02:00
|
|
|
},
|
|
|
|
loop_context_vars => 1,
|
|
|
|
die_on_bad_params => 0,
|
2010-12-22 19:21:41 +01:00
|
|
|
parent_global_vars => 1,
|
2010-04-22 21:34:32 +02:00
|
|
|
filename => $filename,
|
2010-04-22 21:58:06 +02:00
|
|
|
@_,
|
2010-04-28 18:39:13 +02:00
|
|
|
($untrusted ? (no_includes => 1) : ()),
|
2010-04-22 21:58:06 +02:00
|
|
|
);
|
2010-04-24 22:11:33 +02:00
|
|
|
return @opts if wantarray;
|
|
|
|
|
|
|
|
require HTML::Template;
|
|
|
|
return HTML::Template->new(@opts);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-07-02 21:06:08 +02:00
|
|
|
|
2010-04-23 20:44:37 +02:00
|
|
|
sub template ($;@) {
|
|
|
|
template_depends(shift, undef, @_);
|
|
|
|
}
|
|
|
|
|
2010-05-15 02:20:41 +02:00
|
|
|
sub templateactions ($$) {
|
|
|
|
my $template=shift;
|
|
|
|
my $page=shift;
|
|
|
|
|
|
|
|
my $have_actions=0;
|
2010-05-15 02:04:02 +02:00
|
|
|
my @actions;
|
|
|
|
run_hooks(pageactions => sub {
|
|
|
|
push @actions, map { { action => $_ } }
|
2010-05-15 02:20:41 +02:00
|
|
|
grep { defined } shift->(page => $page);
|
2010-05-15 02:04:02 +02:00
|
|
|
});
|
|
|
|
$template->param(actions => \@actions);
|
2010-05-15 02:20:41 +02:00
|
|
|
|
|
|
|
if ($config{cgiurl} && exists $hooks{auth}) {
|
|
|
|
$template->param(prefsurl => cgiurl(do => "prefs"));
|
|
|
|
$have_actions=1;
|
2010-05-15 02:04:02 +02:00
|
|
|
}
|
2010-05-06 00:22:47 +02:00
|
|
|
|
2010-05-15 02:20:41 +02:00
|
|
|
if ($have_actions || @actions) {
|
|
|
|
$template->param(have_actions => 1);
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub hook (@) {
|
2006-05-03 21:58:58 +02:00
|
|
|
my %param=@_;
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2006-05-03 21:58:58 +02:00
|
|
|
if (! exists $param{type} || ! ref $param{call} || ! exists $param{id}) {
|
2007-08-15 10:08:32 +02:00
|
|
|
error 'hook requires type, call, and id parameters';
|
2006-05-03 21:58:58 +02:00
|
|
|
}
|
2006-10-15 21:33:52 +02:00
|
|
|
|
|
|
|
return if $param{no_override} && exists $hooks{$param{type}}{$param{id}};
|
2006-05-03 21:58:58 +02:00
|
|
|
|
|
|
|
$hooks{$param{type}}{$param{id}}=\%param;
|
2007-08-15 10:08:32 +02:00
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-05-02 08:53:33 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub run_hooks ($$) {
|
2006-07-30 02:20:11 +02:00
|
|
|
# Calls the given sub for each hook of the given type,
|
|
|
|
# passing it the hook function to call.
|
|
|
|
my $type=shift;
|
|
|
|
my $sub=shift;
|
|
|
|
|
|
|
|
if (exists $hooks{$type}) {
|
inline: Run format hook first
inline has a format hook that is an optimisation hack. Until this hook
runs, the inlined content is not present on the page. This can prevent
other format hooks, that process that content, from acting on inlined
content. In bug ##509710, we discovered this happened commonly for the
embed plugin, but it could in theory happen for many other plugins (color,
cutpaste, etc) that use format to fill in special html after sanitization.
The ordering was essentially random (hash key order). That's kinda a good
thing, because hooks should be independent of other hooks and able to run
in any order. But for things like inline, that just doesn't work.
To fix the immediate problem, let's make hooks able to be registered as
running "first". There was already the ability to make them run "last".
Now, this simple first/middle/last ordering is obviously not going to work
if a lot of things need to run first, or last, since then we'll be back to
being unable to specify ordering inside those sets. But before worrying about
that too much, and considering dependency ordering, etc, observe how few
plugins use last ordering: Exactly one needs it. And, so far, exactly one
needs first ordering. So for now, KISS.
Another implementation note: I could have sorted the plugins with
first/last/middle as the primary key, and plugin name secondary, to get a
guaranteed stable order. Instead, I chose to preserve hash order. Two
opposing things pulled me toward that decision:
1. Since has order is randomish, it will ensure that no accidental
ordering assumptions are made.
2. Assume for a minute that ordering matters a lot more than expected.
Drastically changing the order a particular configuration uses could
result in a lot of subtle bugs cropping up. (I hope this assumption is
false, partly due to #1, but can't rule it out.)
2008-12-26 22:08:33 +01:00
|
|
|
my (@first, @middle, @last);
|
2006-07-30 02:20:11 +02:00
|
|
|
foreach my $id (keys %{$hooks{$type}}) {
|
inline: Run format hook first
inline has a format hook that is an optimisation hack. Until this hook
runs, the inlined content is not present on the page. This can prevent
other format hooks, that process that content, from acting on inlined
content. In bug ##509710, we discovered this happened commonly for the
embed plugin, but it could in theory happen for many other plugins (color,
cutpaste, etc) that use format to fill in special html after sanitization.
The ordering was essentially random (hash key order). That's kinda a good
thing, because hooks should be independent of other hooks and able to run
in any order. But for things like inline, that just doesn't work.
To fix the immediate problem, let's make hooks able to be registered as
running "first". There was already the ability to make them run "last".
Now, this simple first/middle/last ordering is obviously not going to work
if a lot of things need to run first, or last, since then we'll be back to
being unable to specify ordering inside those sets. But before worrying about
that too much, and considering dependency ordering, etc, observe how few
plugins use last ordering: Exactly one needs it. And, so far, exactly one
needs first ordering. So for now, KISS.
Another implementation note: I could have sorted the plugins with
first/last/middle as the primary key, and plugin name secondary, to get a
guaranteed stable order. Instead, I chose to preserve hash order. Two
opposing things pulled me toward that decision:
1. Since has order is randomish, it will ensure that no accidental
ordering assumptions are made.
2. Assume for a minute that ordering matters a lot more than expected.
Drastically changing the order a particular configuration uses could
result in a lot of subtle bugs cropping up. (I hope this assumption is
false, partly due to #1, but can't rule it out.)
2008-12-26 22:08:33 +01:00
|
|
|
if ($hooks{$type}{$id}{first}) {
|
|
|
|
push @first, $id;
|
|
|
|
}
|
|
|
|
elsif ($hooks{$type}{$id}{last}) {
|
|
|
|
push @last, $id;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
push @middle, $id;
|
2006-11-20 21:37:27 +01:00
|
|
|
}
|
|
|
|
}
|
inline: Run format hook first
inline has a format hook that is an optimisation hack. Until this hook
runs, the inlined content is not present on the page. This can prevent
other format hooks, that process that content, from acting on inlined
content. In bug ##509710, we discovered this happened commonly for the
embed plugin, but it could in theory happen for many other plugins (color,
cutpaste, etc) that use format to fill in special html after sanitization.
The ordering was essentially random (hash key order). That's kinda a good
thing, because hooks should be independent of other hooks and able to run
in any order. But for things like inline, that just doesn't work.
To fix the immediate problem, let's make hooks able to be registered as
running "first". There was already the ability to make them run "last".
Now, this simple first/middle/last ordering is obviously not going to work
if a lot of things need to run first, or last, since then we'll be back to
being unable to specify ordering inside those sets. But before worrying about
that too much, and considering dependency ordering, etc, observe how few
plugins use last ordering: Exactly one needs it. And, so far, exactly one
needs first ordering. So for now, KISS.
Another implementation note: I could have sorted the plugins with
first/last/middle as the primary key, and plugin name secondary, to get a
guaranteed stable order. Instead, I chose to preserve hash order. Two
opposing things pulled me toward that decision:
1. Since has order is randomish, it will ensure that no accidental
ordering assumptions are made.
2. Assume for a minute that ordering matters a lot more than expected.
Drastically changing the order a particular configuration uses could
result in a lot of subtle bugs cropping up. (I hope this assumption is
false, partly due to #1, but can't rule it out.)
2008-12-26 22:08:33 +01:00
|
|
|
foreach my $id (@first, @middle, @last) {
|
2006-07-30 02:20:11 +02:00
|
|
|
$sub->($hooks{$type}{$id}{call});
|
|
|
|
}
|
|
|
|
}
|
2007-08-15 10:08:32 +02:00
|
|
|
|
|
|
|
return 1;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-07-30 02:20:11 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub rcs_update () {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_update}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub rcs_prepedit ($) {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_prepedit}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2010-06-23 23:35:21 +02:00
|
|
|
sub rcs_commit (@) {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_commit}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2010-06-23 23:35:21 +02:00
|
|
|
sub rcs_commit_staged (@) {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_commit_staged}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub rcs_add ($) {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_add}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub rcs_remove ($) {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_remove}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub rcs_rename ($$) {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_rename}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub rcs_recentchanges ($) {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_recentchanges}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2010-12-30 00:58:49 +01:00
|
|
|
sub rcs_diff ($;$) {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_diff}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub rcs_getctime ($) {
|
2008-07-27 04:27:58 +02:00
|
|
|
$hooks{rcs}{rcs_getctime}{call}->(@_);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-27 04:27:58 +02:00
|
|
|
|
2010-04-16 23:02:29 +02:00
|
|
|
sub rcs_getmtime ($) {
|
|
|
|
$hooks{rcs}{rcs_getmtime}{call}->(@_);
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub rcs_receive () {
|
2008-10-24 00:05:57 +02:00
|
|
|
$hooks{rcs}{rcs_receive}{call}->();
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-10-23 02:52:34 +02:00
|
|
|
|
2009-10-08 22:49:53 +02:00
|
|
|
sub add_depends ($$;$) {
|
2007-01-28 01:35:32 +01:00
|
|
|
my $page=shift;
|
|
|
|
my $pagespec=shift;
|
2009-10-08 22:49:53 +02:00
|
|
|
my $deptype=shift || $DEPEND_CONTENT;
|
2009-10-03 21:31:51 +02:00
|
|
|
|
2009-10-08 03:13:10 +02:00
|
|
|
# Is the pagespec a simple page name?
|
|
|
|
if ($pagespec =~ /$config{wiki_file_regexp}/ &&
|
|
|
|
$pagespec !~ /[\s*?()!]/) {
|
2009-10-03 21:31:51 +02:00
|
|
|
$depends_simple{$page}{lc $pagespec} |= $deptype;
|
2009-08-28 16:41:26 +02:00
|
|
|
return 1;
|
2007-01-28 01:35:32 +01:00
|
|
|
}
|
2007-08-15 10:08:32 +02:00
|
|
|
|
2009-10-09 23:15:40 +02:00
|
|
|
# Add explicit dependencies for influences.
|
|
|
|
my $sub=pagespec_translate($pagespec);
|
2010-03-29 02:23:22 +02:00
|
|
|
return unless defined $sub;
|
2009-10-09 23:15:40 +02:00
|
|
|
foreach my $p (keys %pagesources) {
|
|
|
|
my $r=$sub->($p, location => $page);
|
|
|
|
my $i=$r->influences;
|
2010-04-22 06:12:15 +02:00
|
|
|
my $static=$r->influences_static;
|
2009-10-09 23:15:40 +02:00
|
|
|
foreach my $k (keys %$i) {
|
2010-04-22 06:12:15 +02:00
|
|
|
next unless $r || $static || $k eq $page;
|
2009-10-09 23:15:40 +02:00
|
|
|
$depends_simple{$page}{lc $k} |= $i->{$k};
|
|
|
|
}
|
2010-04-22 06:12:15 +02:00
|
|
|
last if $static;
|
2009-10-09 23:15:40 +02:00
|
|
|
}
|
2009-10-08 22:49:53 +02:00
|
|
|
|
|
|
|
$depends{$page}{$pagespec} |= $deptype;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub deptype (@) {
|
|
|
|
my $deptype=0;
|
|
|
|
foreach my $type (@_) {
|
|
|
|
if ($type eq 'presence') {
|
|
|
|
$deptype |= $DEPEND_PRESENCE;
|
|
|
|
}
|
|
|
|
elsif ($type eq 'links') {
|
|
|
|
$deptype |= $DEPEND_LINKS;
|
|
|
|
}
|
|
|
|
elsif ($type eq 'content') {
|
|
|
|
$deptype |= $DEPEND_CONTENT;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return $deptype;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-01-28 01:35:32 +01:00
|
|
|
|
2010-03-14 20:21:42 +01:00
|
|
|
my $file_prune_regexp;
|
2010-04-20 20:08:29 +02:00
|
|
|
sub file_pruned ($) {
|
2009-10-09 02:27:56 +02:00
|
|
|
my $file=shift;
|
2010-03-14 19:58:13 +01:00
|
|
|
|
|
|
|
if (defined $config{include} && length $config{include}) {
|
|
|
|
return 0 if $file =~ m/$config{include}/;
|
2009-10-09 02:27:56 +02:00
|
|
|
}
|
2007-01-28 01:35:32 +01:00
|
|
|
|
2010-03-14 20:21:42 +01:00
|
|
|
if (! defined $file_prune_regexp) {
|
|
|
|
$file_prune_regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')';
|
|
|
|
$file_prune_regexp=qr/$file_prune_regexp/;
|
|
|
|
}
|
|
|
|
return $file =~ m/$file_prune_regexp/;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-01-28 01:35:32 +01:00
|
|
|
|
2009-06-09 00:27:40 +02:00
|
|
|
sub define_gettext () {
|
|
|
|
# If translation is needed, redefine the gettext function to do it.
|
|
|
|
# Otherwise, it becomes a quick no-op.
|
2010-02-14 23:25:30 +01:00
|
|
|
my $gettext_obj;
|
|
|
|
my $getobj;
|
2007-08-21 23:19:53 +02:00
|
|
|
if ((exists $ENV{LANG} && length $ENV{LANG}) ||
|
|
|
|
(exists $ENV{LC_ALL} && length $ENV{LC_ALL}) ||
|
|
|
|
(exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) {
|
2010-02-14 23:25:30 +01:00
|
|
|
$getobj=sub {
|
|
|
|
$gettext_obj=eval q{
|
2007-01-28 01:35:32 +01:00
|
|
|
use Locale::gettext q{textdomain};
|
|
|
|
Locale::gettext->domain('ikiwiki')
|
|
|
|
};
|
2009-06-09 00:27:40 +02:00
|
|
|
};
|
2007-01-28 01:35:32 +01:00
|
|
|
}
|
2010-02-14 23:25:30 +01:00
|
|
|
|
|
|
|
no warnings 'redefine';
|
|
|
|
*gettext=sub {
|
|
|
|
$getobj->() if $getobj;
|
|
|
|
if ($gettext_obj) {
|
|
|
|
$gettext_obj->get(shift);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return shift;
|
|
|
|
}
|
|
|
|
};
|
2010-04-17 18:54:22 +02:00
|
|
|
*ngettext=sub {
|
2010-02-14 23:25:30 +01:00
|
|
|
$getobj->() if $getobj;
|
|
|
|
if ($gettext_obj) {
|
|
|
|
$gettext_obj->nget(@_);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return ($_[2] == 1 ? $_[0] : $_[1])
|
|
|
|
}
|
|
|
|
};
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2007-01-28 01:35:32 +01:00
|
|
|
|
2009-06-09 00:27:40 +02:00
|
|
|
sub gettext {
|
|
|
|
define_gettext();
|
|
|
|
gettext(@_);
|
|
|
|
}
|
|
|
|
|
2010-02-14 23:25:30 +01:00
|
|
|
sub ngettext {
|
|
|
|
define_gettext();
|
|
|
|
ngettext(@_);
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub yesno ($) {
|
2008-07-12 18:01:08 +02:00
|
|
|
my $val=shift;
|
|
|
|
|
2009-01-03 18:52:47 +01:00
|
|
|
return (defined $val && (lc($val) eq gettext("yes") || lc($val) eq "yes" || $val eq "1"));
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-07-12 18:01:08 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub inject {
|
2008-10-21 23:57:19 +02:00
|
|
|
# Injects a new function into the symbol table to replace an
|
|
|
|
# exported function.
|
|
|
|
my %params=@_;
|
|
|
|
|
|
|
|
# This is deep ugly perl foo, beware.
|
|
|
|
no strict;
|
|
|
|
no warnings;
|
|
|
|
if (! defined $params{parent}) {
|
|
|
|
$params{parent}='::';
|
|
|
|
$params{old}=\&{$params{name}};
|
|
|
|
$params{name}=~s/.*:://;
|
|
|
|
}
|
|
|
|
my $parent=$params{parent};
|
|
|
|
foreach my $ns (grep /^\w+::/, keys %{$parent}) {
|
|
|
|
$ns = $params{parent} . $ns;
|
|
|
|
inject(%params, parent => $ns) unless $ns eq '::main::';
|
|
|
|
*{$ns . $params{name}} = $params{call}
|
|
|
|
if exists ${$ns}{$params{name}} &&
|
|
|
|
\&{${$ns}{$params{name}}} == $params{old};
|
|
|
|
}
|
|
|
|
use strict;
|
|
|
|
use warnings;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-10-21 23:57:19 +02:00
|
|
|
|
2010-04-02 01:28:02 +02:00
|
|
|
sub add_link ($$;$) {
|
Avoid %links accumulating duplicates. (For TOVA)
This is sorta an optimisation, and sorta a bug fix. In one
test case I have available, it can speed a page build up from 3
minutes to 3 seconds.
The root of the problem is that $links{$page} contains arrays of
links, rather than hashes of links. And when a link is found,
it is just pushed onto the array, without checking for dups.
Now, the array is emptied before scanning a page, so there
should not be a lot of opportunity for lots of duplicate links
to pile up in it. But, in some cases, they can, and if there
are hundreds of duplicate links in the array, then scanning it
for matching links, as match_link and some other code does,
becomes much more expensive than it needs to be.
Perhaps the real right fix would be to change the data structure
to a hash. But, the list of links is never accessed like that,
you always want to iterate through it.
I also looked at deduping the list in saveindex, but that does
a lot of unnecessary work, and doesn't completly solve the problem.
So, finally, I decided to add an add_link function that handles deduping,
and make ikiwiki-transition remove the old dup links.
2009-05-06 05:40:09 +02:00
|
|
|
my $page=shift;
|
|
|
|
my $link=shift;
|
2010-04-02 01:28:02 +02:00
|
|
|
my $type=shift;
|
Avoid %links accumulating duplicates. (For TOVA)
This is sorta an optimisation, and sorta a bug fix. In one
test case I have available, it can speed a page build up from 3
minutes to 3 seconds.
The root of the problem is that $links{$page} contains arrays of
links, rather than hashes of links. And when a link is found,
it is just pushed onto the array, without checking for dups.
Now, the array is emptied before scanning a page, so there
should not be a lot of opportunity for lots of duplicate links
to pile up in it. But, in some cases, they can, and if there
are hundreds of duplicate links in the array, then scanning it
for matching links, as match_link and some other code does,
becomes much more expensive than it needs to be.
Perhaps the real right fix would be to change the data structure
to a hash. But, the list of links is never accessed like that,
you always want to iterate through it.
I also looked at deduping the list in saveindex, but that does
a lot of unnecessary work, and doesn't completly solve the problem.
So, finally, I decided to add an add_link function that handles deduping,
and make ikiwiki-transition remove the old dup links.
2009-05-06 05:40:09 +02:00
|
|
|
|
|
|
|
push @{$links{$page}}, $link
|
|
|
|
unless grep { $_ eq $link } @{$links{$page}};
|
2010-04-02 01:28:02 +02:00
|
|
|
|
|
|
|
if (defined $type) {
|
|
|
|
$typedlinks{$page}{$type}{$link} = 1;
|
|
|
|
}
|
Avoid %links accumulating duplicates. (For TOVA)
This is sorta an optimisation, and sorta a bug fix. In one
test case I have available, it can speed a page build up from 3
minutes to 3 seconds.
The root of the problem is that $links{$page} contains arrays of
links, rather than hashes of links. And when a link is found,
it is just pushed onto the array, without checking for dups.
Now, the array is emptied before scanning a page, so there
should not be a lot of opportunity for lots of duplicate links
to pile up in it. But, in some cases, they can, and if there
are hundreds of duplicate links in the array, then scanning it
for matching links, as match_link and some other code does,
becomes much more expensive than it needs to be.
Perhaps the real right fix would be to change the data structure
to a hash. But, the list of links is never accessed like that,
you always want to iterate through it.
I also looked at deduping the list in saveindex, but that does
a lot of unnecessary work, and doesn't completly solve the problem.
So, finally, I decided to add an add_link function that handles deduping,
and make ikiwiki-transition remove the old dup links.
2009-05-06 05:40:09 +02:00
|
|
|
}
|
|
|
|
|
2010-04-17 19:35:15 +02:00
|
|
|
sub add_autofile ($$$) {
|
|
|
|
my $file=shift;
|
|
|
|
my $plugin=shift;
|
|
|
|
my $generator=shift;
|
|
|
|
|
|
|
|
$autofiles{$file}{plugin}=$plugin;
|
|
|
|
$autofiles{$file}{generator}=$generator;
|
|
|
|
}
|
|
|
|
|
2013-07-27 14:12:01 +02:00
|
|
|
sub useragent () {
|
|
|
|
return LWP::UserAgent->new(
|
|
|
|
cookie_jar => $config{cookiejar},
|
|
|
|
env_proxy => 1, # respect proxy env vars
|
2014-01-30 13:41:55 +01:00
|
|
|
agent => $config{useragent},
|
2013-07-27 14:12:01 +02:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2010-04-11 07:30:03 +02:00
|
|
|
sub sortspec_translate ($$) {
|
2010-03-26 00:31:53 +01:00
|
|
|
my $spec = shift;
|
2010-04-11 07:30:03 +02:00
|
|
|
my $reverse = shift;
|
2010-03-26 00:31:53 +01:00
|
|
|
|
|
|
|
my $code = "";
|
|
|
|
my @data;
|
|
|
|
while ($spec =~ m{
|
|
|
|
\s*
|
|
|
|
(-?) # group 1: perhaps negated
|
|
|
|
\s*
|
|
|
|
( # group 2: a word
|
|
|
|
\w+\([^\)]*\) # command(params)
|
|
|
|
|
|
|
|
|
[^\s]+ # or anything else
|
|
|
|
)
|
|
|
|
\s*
|
|
|
|
}gx) {
|
|
|
|
my $negated = $1;
|
|
|
|
my $word = $2;
|
|
|
|
my $params = undef;
|
|
|
|
|
|
|
|
if ($word =~ m/^(\w+)\((.*)\)$/) {
|
|
|
|
# command with parameters
|
|
|
|
$params = $2;
|
|
|
|
$word = $1;
|
|
|
|
}
|
|
|
|
elsif ($word !~ m/^\w+$/) {
|
|
|
|
error(sprintf(gettext("invalid sort type %s"), $word));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (length $code) {
|
|
|
|
$code .= " || ";
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($negated) {
|
|
|
|
$code .= "-";
|
|
|
|
}
|
|
|
|
|
2010-04-03 14:57:38 +02:00
|
|
|
if (exists $IkiWiki::SortSpec::{"cmp_$word"}) {
|
2010-03-26 00:31:53 +01:00
|
|
|
if (defined $params) {
|
|
|
|
push @data, $params;
|
2010-04-05 23:50:51 +02:00
|
|
|
$code .= "IkiWiki::SortSpec::cmp_$word(\$data[$#data])";
|
2010-03-26 00:31:53 +01:00
|
|
|
}
|
|
|
|
else {
|
2010-04-05 23:50:51 +02:00
|
|
|
$code .= "IkiWiki::SortSpec::cmp_$word(undef)";
|
2010-03-26 00:31:53 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
error(sprintf(gettext("unknown sort type %s"), $word));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (! length $code) {
|
|
|
|
# undefined sorting method... sort arbitrarily
|
|
|
|
return sub { 0 };
|
|
|
|
}
|
|
|
|
|
2010-04-11 07:30:03 +02:00
|
|
|
if ($reverse) {
|
|
|
|
$code="-($code)";
|
|
|
|
}
|
|
|
|
|
2010-03-26 00:31:53 +01:00
|
|
|
no warnings;
|
|
|
|
return eval 'sub { '.$code.' }';
|
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub pagespec_translate ($) {
|
2006-08-02 02:14:31 +02:00
|
|
|
my $spec=shift;
|
|
|
|
|
|
|
|
# Convert spec to perl code.
|
|
|
|
my $code="";
|
2009-05-18 21:25:10 +02:00
|
|
|
my @data;
|
2007-06-02 01:40:43 +02:00
|
|
|
while ($spec=~m{
|
|
|
|
\s* # ignore whitespace
|
|
|
|
( # 1: match a single word
|
|
|
|
\! # !
|
|
|
|
|
|
|
|
|
\( # (
|
|
|
|
|
|
|
|
|
\) # )
|
|
|
|
|
|
2007-08-12 01:31:57 +02:00
|
|
|
\w+\([^\)]*\) # command(params)
|
2007-06-02 01:40:43 +02:00
|
|
|
|
|
|
|
|
[^\s()]+ # any other text
|
|
|
|
)
|
|
|
|
\s* # ignore whitespace
|
2009-10-05 02:35:02 +02:00
|
|
|
}gx) {
|
2006-08-02 02:14:31 +02:00
|
|
|
my $word=$1;
|
2007-08-15 10:08:32 +02:00
|
|
|
if (lc $word eq 'and') {
|
2009-10-08 01:40:44 +02:00
|
|
|
$code.=' &';
|
2006-08-02 02:14:31 +02:00
|
|
|
}
|
2007-08-15 10:08:32 +02:00
|
|
|
elsif (lc $word eq 'or') {
|
2009-10-08 01:40:44 +02:00
|
|
|
$code.=' |';
|
2006-08-02 02:14:31 +02:00
|
|
|
}
|
|
|
|
elsif ($word eq "(" || $word eq ")" || $word eq "!") {
|
2007-08-15 10:08:32 +02:00
|
|
|
$code.=' '.$word;
|
2006-08-02 02:14:31 +02:00
|
|
|
}
|
2007-02-12 03:44:47 +01:00
|
|
|
elsif ($word =~ /^(\w+)\((.*)\)$/) {
|
|
|
|
if (exists $IkiWiki::PageSpec::{"match_$1"}) {
|
2009-05-18 21:25:10 +02:00
|
|
|
push @data, $2;
|
|
|
|
$code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)";
|
2007-02-12 03:44:47 +01:00
|
|
|
}
|
|
|
|
else {
|
2009-05-18 21:25:10 +02:00
|
|
|
push @data, qq{unknown function in pagespec "$word"};
|
|
|
|
$code.="IkiWiki::ErrorReason->new(\$data[$#data])";
|
2007-02-12 03:44:47 +01:00
|
|
|
}
|
2006-08-02 02:14:31 +02:00
|
|
|
}
|
|
|
|
else {
|
2009-05-18 21:25:10 +02:00
|
|
|
push @data, $word;
|
|
|
|
$code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)";
|
2006-08-02 02:14:31 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-04-24 19:49:15 +02:00
|
|
|
if (! length $code) {
|
2009-01-20 22:30:59 +01:00
|
|
|
$code="IkiWiki::FailReason->new('empty pagespec')";
|
2008-04-24 19:49:15 +02:00
|
|
|
}
|
|
|
|
|
2008-05-22 19:11:25 +02:00
|
|
|
no warnings;
|
2008-03-21 11:36:07 +01:00
|
|
|
return eval 'sub { my $page=shift; '.$code.' }';
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-02 05:39:19 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub pagespec_match ($$;@) {
|
2006-08-02 05:39:19 +02:00
|
|
|
my $page=shift;
|
|
|
|
my $spec=shift;
|
2007-04-27 04:55:52 +02:00
|
|
|
my @params=@_;
|
|
|
|
|
|
|
|
# Backwards compatability with old calling convention.
|
|
|
|
if (@params == 1) {
|
2007-08-15 10:08:32 +02:00
|
|
|
unshift @params, 'location';
|
2007-04-27 04:55:52 +02:00
|
|
|
}
|
2006-08-02 05:39:19 +02:00
|
|
|
|
2008-03-21 11:36:07 +01:00
|
|
|
my $sub=pagespec_translate($spec);
|
2009-04-23 20:07:28 +02:00
|
|
|
return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"")
|
2010-03-29 02:23:22 +02:00
|
|
|
if ! defined $sub;
|
2008-03-21 11:36:07 +01:00
|
|
|
return $sub->($page, @params);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-02 02:14:31 +02:00
|
|
|
|
2014-07-11 23:08:08 +02:00
|
|
|
# e.g. @pages = sort_pages("title", \@pages, reverse => "yes")
|
|
|
|
#
|
|
|
|
# Not exported yet, but could be in future if it is generally useful.
|
|
|
|
# Note that this signature is not the same as IkiWiki::SortSpec::sort_pages,
|
|
|
|
# which is "more internal".
|
|
|
|
sub sort_pages ($$;@) {
|
|
|
|
my $sort = shift;
|
|
|
|
my $list = shift;
|
|
|
|
my %params = @_;
|
|
|
|
$sort = sortspec_translate($sort, $params{reverse});
|
|
|
|
return IkiWiki::SortSpec::sort_pages($sort, @$list);
|
|
|
|
}
|
|
|
|
|
2009-04-23 21:45:30 +02:00
|
|
|
sub pagespec_match_list ($$;@) {
|
2009-10-09 05:51:06 +02:00
|
|
|
my $page=shift;
|
|
|
|
my $pagespec=shift;
|
|
|
|
my %params=@_;
|
2009-04-23 21:45:30 +02:00
|
|
|
|
2009-10-09 05:51:06 +02:00
|
|
|
# Backwards compatability with old calling convention.
|
|
|
|
if (ref $page) {
|
|
|
|
print STDERR "warning: a plugin (".caller().") is using pagespec_match_list in an obsolete way, and needs to be updated\n";
|
|
|
|
$params{list}=$page;
|
|
|
|
$page=$params{location}; # ugh!
|
|
|
|
}
|
|
|
|
|
|
|
|
my $sub=pagespec_translate($pagespec);
|
|
|
|
error "syntax error in pagespec \"$pagespec\""
|
2010-03-29 02:23:22 +02:00
|
|
|
if ! defined $sub;
|
2010-04-12 18:41:32 +02:00
|
|
|
my $sort=sortspec_translate($params{sort}, $params{reverse})
|
|
|
|
if defined $params{sort};
|
2009-10-09 05:51:06 +02:00
|
|
|
|
|
|
|
my @candidates;
|
2009-10-09 19:28:41 +02:00
|
|
|
if (exists $params{list}) {
|
|
|
|
@candidates=exists $params{filter}
|
|
|
|
? grep { ! $params{filter}->($_) } @{$params{list}}
|
|
|
|
: @{$params{list}};
|
2009-10-09 05:51:06 +02:00
|
|
|
}
|
|
|
|
else {
|
2009-10-09 19:28:41 +02:00
|
|
|
@candidates=exists $params{filter}
|
|
|
|
? grep { ! $params{filter}->($_) } keys %pagesources
|
|
|
|
: keys %pagesources;
|
2009-10-09 05:51:06 +02:00
|
|
|
}
|
2010-04-11 07:30:03 +02:00
|
|
|
|
2010-04-12 18:41:32 +02:00
|
|
|
# clear params, remainder is passed to pagespec
|
|
|
|
$depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT);
|
2010-04-11 07:30:03 +02:00
|
|
|
my $num=$params{num};
|
2010-04-12 18:41:32 +02:00
|
|
|
delete @params{qw{num deptype reverse sort filter list}};
|
|
|
|
|
2010-04-11 07:30:03 +02:00
|
|
|
# when only the top matches will be returned, it's efficient to
|
|
|
|
# sort before matching to pagespec,
|
|
|
|
if (defined $num && defined $sort) {
|
|
|
|
@candidates=IkiWiki::SortSpec::sort_pages(
|
2010-04-12 18:41:32 +02:00
|
|
|
$sort, @candidates);
|
2009-10-09 05:51:06 +02:00
|
|
|
}
|
2009-04-23 21:45:30 +02:00
|
|
|
|
2009-10-09 05:51:06 +02:00
|
|
|
my @matches;
|
|
|
|
my $firstfail;
|
|
|
|
my $count=0;
|
2009-10-09 20:27:11 +02:00
|
|
|
my $accum=IkiWiki::SuccessReason->new();
|
2009-10-09 05:51:06 +02:00
|
|
|
foreach my $p (@candidates) {
|
2009-10-09 19:02:10 +02:00
|
|
|
my $r=$sub->($p, %params, location => $page);
|
2009-10-09 20:27:11 +02:00
|
|
|
error(sprintf(gettext("cannot match pages: %s"), $r))
|
|
|
|
if $r->isa("IkiWiki::ErrorReason");
|
2010-04-22 19:17:45 +02:00
|
|
|
unless ($r || $r->influences_static) {
|
2010-04-22 06:12:15 +02:00
|
|
|
$r->remove_influence($p);
|
|
|
|
}
|
2009-10-09 20:27:11 +02:00
|
|
|
$accum |= $r;
|
2009-10-09 05:51:06 +02:00
|
|
|
if ($r) {
|
2009-10-09 20:27:11 +02:00
|
|
|
push @matches, $p;
|
2009-10-09 19:02:10 +02:00
|
|
|
last if defined $num && ++$count == $num;
|
2009-10-09 05:51:06 +02:00
|
|
|
}
|
2009-04-23 21:45:30 +02:00
|
|
|
}
|
2009-10-09 05:51:06 +02:00
|
|
|
|
2009-10-09 20:27:11 +02:00
|
|
|
# Add simple dependencies for accumulated influences.
|
2009-10-09 23:15:40 +02:00
|
|
|
my $i=$accum->influences;
|
|
|
|
foreach my $k (keys %$i) {
|
|
|
|
$depends_simple{$page}{lc $k} |= $i->{$k};
|
2009-04-23 21:45:30 +02:00
|
|
|
}
|
2009-10-09 05:51:06 +02:00
|
|
|
|
2010-04-11 07:30:03 +02:00
|
|
|
# when all matches will be returned, it's efficient to
|
|
|
|
# sort after matching
|
|
|
|
if (! defined $num && defined $sort) {
|
|
|
|
return IkiWiki::SortSpec::sort_pages(
|
2010-04-12 18:41:32 +02:00
|
|
|
$sort, @matches);
|
2010-04-11 07:30:03 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
return @matches;
|
|
|
|
}
|
2009-04-23 21:45:30 +02:00
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub pagespec_valid ($) {
|
2008-03-17 19:04:59 +01:00
|
|
|
my $spec=shift;
|
|
|
|
|
2010-03-29 02:23:22 +02:00
|
|
|
return defined pagespec_translate($spec);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-10-21 23:57:19 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub glob2re ($) {
|
2008-07-02 23:33:37 +02:00
|
|
|
my $re=quotemeta(shift);
|
|
|
|
$re=~s/\\\*/.*/g;
|
|
|
|
$re=~s/\\\?/./g;
|
2010-11-20 00:59:04 +01:00
|
|
|
return qr/^$re$/i;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-03-17 19:04:59 +01:00
|
|
|
|
2007-04-27 09:55:40 +02:00
|
|
|
package IkiWiki::FailReason;
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
use overload (
|
2009-10-08 01:40:44 +02:00
|
|
|
'""' => sub { $_[0][0] },
|
2007-04-27 19:01:52 +02:00
|
|
|
'0+' => sub { 0 },
|
2007-04-27 10:34:09 +02:00
|
|
|
'!' => sub { bless $_[0], 'IkiWiki::SuccessReason'},
|
2009-10-13 20:37:14 +02:00
|
|
|
'&' => sub { $_[0]->merge_influences($_[1], 1); $_[0] },
|
2009-10-08 03:48:03 +02:00
|
|
|
'|' => sub { $_[1]->merge_influences($_[0]); $_[1] },
|
2007-04-27 10:34:09 +02:00
|
|
|
fallback => 1,
|
2008-12-17 21:22:16 +01:00
|
|
|
);
|
2007-04-27 10:34:09 +02:00
|
|
|
|
2009-10-08 01:40:44 +02:00
|
|
|
our @ISA = 'IkiWiki::SuccessReason';
|
2009-04-23 20:07:28 +02:00
|
|
|
|
2007-04-27 10:34:09 +02:00
|
|
|
package IkiWiki::SuccessReason;
|
|
|
|
|
2014-03-03 12:08:59 +01:00
|
|
|
# A blessed array-ref:
|
|
|
|
#
|
|
|
|
# [0]: human-readable reason for success (or, in FailReason subclass, failure)
|
|
|
|
# [1]{""}:
|
|
|
|
# - if absent or false, the influences of this evaluation are "static",
|
|
|
|
# see the influences_static method
|
|
|
|
# - if true, they are dynamic (not static)
|
|
|
|
# [1]{any other key}:
|
|
|
|
# the dependency types of influences, as returned by the influences method
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
use overload (
|
2014-03-03 12:08:59 +01:00
|
|
|
# in string context, it's the human-readable reason
|
2009-10-08 01:40:44 +02:00
|
|
|
'""' => sub { $_[0][0] },
|
2014-03-03 12:08:59 +01:00
|
|
|
# in boolean context, SuccessReason is 1 and FailReason is 0
|
2007-04-27 19:01:52 +02:00
|
|
|
'0+' => sub { 1 },
|
2014-03-03 12:08:59 +01:00
|
|
|
# negating a result gives the opposite result with the same influences
|
2007-04-27 10:34:09 +02:00
|
|
|
'!' => sub { bless $_[0], 'IkiWiki::FailReason'},
|
2014-03-03 12:08:59 +01:00
|
|
|
# A & B = (A ? B : A) with the influences of both
|
2009-10-13 20:37:14 +02:00
|
|
|
'&' => sub { $_[1]->merge_influences($_[0], 1); $_[1] },
|
2014-03-03 12:08:59 +01:00
|
|
|
# A | B = (A ? A : B) with the influences of both
|
2009-10-08 03:48:03 +02:00
|
|
|
'|' => sub { $_[0]->merge_influences($_[1]); $_[0] },
|
2007-04-27 09:55:40 +02:00
|
|
|
fallback => 1,
|
2008-12-17 21:22:16 +01:00
|
|
|
);
|
2007-04-27 09:55:40 +02:00
|
|
|
|
2014-03-03 12:08:59 +01:00
|
|
|
# SuccessReason->new("human-readable reason", page => deptype, ...)
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub new {
|
2008-05-05 20:50:26 +02:00
|
|
|
my $class = shift;
|
|
|
|
my $value = shift;
|
2009-10-08 03:48:03 +02:00
|
|
|
return bless [$value, {@_}], $class;
|
2009-10-08 01:40:44 +02:00
|
|
|
}
|
|
|
|
|
2014-03-03 12:08:59 +01:00
|
|
|
# influences(): return a reference to a copy of the hash
|
|
|
|
# { page => dependency type } describing the pages that indirectly influenced
|
|
|
|
# this result, but would not cause a dependency through ikiwiki's core
|
|
|
|
# dependency logic.
|
|
|
|
#
|
|
|
|
# See [[todo/dependency_types]] for extensive discussion of what this means.
|
|
|
|
#
|
|
|
|
# influences(page => deptype, ...): remove all influences, replace them
|
|
|
|
# with the arguments, and return a reference to a copy of the new influences.
|
|
|
|
|
2009-10-08 01:40:44 +02:00
|
|
|
sub influences {
|
fix handling of influences of pagespecs that fail to match
If a pagespec fails to match, I had been throwing the influences away, but
that is not right. Consider `backlink(foo)`, where foo does not exist.
It still needs to be added as an influence, because if it is created, it
will influence the pagespec to match.
But with that fix, `link(bar)` had as influences all pages, whether they
link to bar or not. Which is not necessary, because modifiying a page to
add a link to bar will directly cause the pagespec to match.
So, in match_link (and all the match_* functions for page metadata),
only return an influence if the match succeeds.
match_backlink had been implemented as the inverse of match_link, but that
is no longer completly true. While match_link does not return an influence
on failure, match_backlink does.
match_created_before/after also return the influence on failure, this way
if created_after(foo) currently fails because foo does not exist, it will
still update the page with the pagespec if foo is created.
2009-10-08 19:38:46 +02:00
|
|
|
my $this=shift;
|
2009-10-09 23:15:40 +02:00
|
|
|
$this->[1]={@_} if @_;
|
|
|
|
my %i=%{$this->[1]};
|
|
|
|
delete $i{""};
|
|
|
|
return \%i;
|
|
|
|
}
|
|
|
|
|
2014-03-03 12:08:59 +01:00
|
|
|
# True if this result has the same influences whichever page it matches,
|
|
|
|
# For instance, whether bar matches backlink(foo) is influenced only by
|
|
|
|
# the set of links in foo, so its only influence is { foo => DEPEND_LINKS },
|
|
|
|
# which does not mention bar anywhere.
|
|
|
|
#
|
|
|
|
# False if this result would have different influences when matching
|
|
|
|
# different pages. For instance, when testing whether link(foo) matches bar,
|
|
|
|
# { bar => DEPEND_LINKS } is an influence on that result, because changing
|
|
|
|
# bar's links could change the outcome; so its influences are not the same
|
|
|
|
# as when testing whether link(foo) matches baz.
|
2014-09-15 22:22:42 +02:00
|
|
|
#
|
|
|
|
# Static influences are one of the things that make pagespec_match_list
|
|
|
|
# more efficient than repeated calls to pagespec_match.
|
2014-03-03 12:08:59 +01:00
|
|
|
|
2009-10-09 23:15:40 +02:00
|
|
|
sub influences_static {
|
|
|
|
return ! $_[0][1]->{""};
|
2009-10-08 03:48:03 +02:00
|
|
|
}
|
|
|
|
|
2014-03-03 12:08:59 +01:00
|
|
|
# Change the influences of $this to be the influences of "$this & $other"
|
|
|
|
# or "$this | $other".
|
|
|
|
#
|
|
|
|
# If both $this and $other are either successful or have influences,
|
|
|
|
# or this is an "or" operation, the result has all the influences from
|
|
|
|
# either of the arguments. It has dynamic influences if either argument
|
|
|
|
# has dynamic influences.
|
|
|
|
#
|
|
|
|
# If this is an "and" operation, and at least one argument is a
|
|
|
|
# FailReason with no influences, the result has no influences, and they
|
|
|
|
# are not dynamic. For instance, link(foo) matching bar is influenced
|
|
|
|
# by bar, but enabled(ddate) has no influences. Suppose ddate is disabled;
|
|
|
|
# then (link(foo) and enabled(ddate)) not matching bar is not influenced by
|
|
|
|
# bar, because it would be false however often you edit bar.
|
|
|
|
|
2009-10-08 03:48:03 +02:00
|
|
|
sub merge_influences {
|
|
|
|
my $this=shift;
|
|
|
|
my $other=shift;
|
2009-10-13 20:37:14 +02:00
|
|
|
my $anded=shift;
|
|
|
|
|
2014-03-03 12:08:59 +01:00
|
|
|
# This "if" is odd because it needs to avoid negating $this
|
|
|
|
# or $other, which would alter the objects in-place. Be careful.
|
2009-10-13 20:37:14 +02:00
|
|
|
if (! $anded || (($this || %{$this->[1]}) &&
|
2010-04-22 03:38:53 +02:00
|
|
|
($other || %{$other->[1]}))) {
|
2009-10-13 20:37:14 +02:00
|
|
|
foreach my $influence (keys %{$other->[1]}) {
|
|
|
|
$this->[1]{$influence} |= $other->[1]{$influence};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
# influence blocker
|
|
|
|
$this->[1]={};
|
2009-10-08 03:48:03 +02:00
|
|
|
}
|
2009-10-08 01:40:44 +02:00
|
|
|
}
|
|
|
|
|
2014-03-03 12:08:59 +01:00
|
|
|
# Change $this so it is not considered to be influenced by $torm.
|
|
|
|
|
2010-04-22 06:12:15 +02:00
|
|
|
sub remove_influence {
|
|
|
|
my $this=shift;
|
|
|
|
my $torm=shift;
|
|
|
|
|
|
|
|
delete $this->[1]{$torm};
|
|
|
|
}
|
|
|
|
|
2009-10-08 01:40:44 +02:00
|
|
|
package IkiWiki::ErrorReason;
|
|
|
|
|
|
|
|
our @ISA = 'IkiWiki::FailReason';
|
2007-04-27 09:55:40 +02:00
|
|
|
|
2007-02-12 03:44:47 +01:00
|
|
|
package IkiWiki::PageSpec;
|
|
|
|
|
2009-01-10 20:24:21 +01:00
|
|
|
sub derel ($$) {
|
|
|
|
my $path=shift;
|
|
|
|
my $from=shift;
|
|
|
|
|
2010-08-30 19:36:00 +02:00
|
|
|
if ($path =~ m!^\.(/|$)!) {
|
|
|
|
if ($1) {
|
|
|
|
$from=~s#/?[^/]+$## if defined $from;
|
|
|
|
$path=~s#^\./##;
|
|
|
|
$path="$from/$path" if defined $from && length $from;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$path = $from;
|
|
|
|
$path = "" unless defined $path;
|
|
|
|
}
|
2009-01-10 20:24:21 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return $path;
|
|
|
|
}
|
|
|
|
|
2010-11-14 17:22:15 +01:00
|
|
|
my %glob_cache;
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_glob ($$;@) {
|
2006-08-02 02:14:31 +02:00
|
|
|
my $page=shift;
|
|
|
|
my $glob=shift;
|
2007-04-27 04:55:52 +02:00
|
|
|
my %params=@_;
|
|
|
|
|
2009-01-10 20:24:21 +01:00
|
|
|
$glob=derel($glob, $params{location});
|
2006-08-02 02:14:31 +02:00
|
|
|
|
2010-11-14 17:22:15 +01:00
|
|
|
# Instead of converting the glob to a regex every time,
|
|
|
|
# cache the compiled regex to save time.
|
2010-11-20 17:25:52 +01:00
|
|
|
my $re=$glob_cache{$glob};
|
|
|
|
unless (defined $re) {
|
|
|
|
$glob_cache{$glob} = $re = IkiWiki::glob2re($glob);
|
2010-11-14 17:22:15 +01:00
|
|
|
}
|
2010-11-20 17:25:52 +01:00
|
|
|
if ($page =~ $re) {
|
2010-05-07 01:04:56 +02:00
|
|
|
if (! IkiWiki::isinternal($page) || $params{internal}) {
|
2008-01-29 21:05:49 +01:00
|
|
|
return IkiWiki::SuccessReason->new("$glob matches $page");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return IkiWiki::FailReason->new("$glob matches $page, but the page is an internal page");
|
|
|
|
}
|
2007-04-27 09:55:40 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
return IkiWiki::FailReason->new("$glob does not match $page");
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-02 02:14:31 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_internal ($$;@) {
|
2010-05-07 02:46:58 +02:00
|
|
|
return match_glob(shift, shift, @_, internal => 1)
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-01-29 21:05:49 +01:00
|
|
|
|
2010-04-27 00:47:17 +02:00
|
|
|
sub match_page ($$;@) {
|
2010-05-07 01:04:56 +02:00
|
|
|
my $page=shift;
|
2010-05-07 02:46:58 +02:00
|
|
|
my $match=match_glob($page, shift, @_);
|
2010-08-04 14:24:47 +02:00
|
|
|
if ($match) {
|
|
|
|
my $source=exists $IkiWiki::pagesources{$page} ?
|
|
|
|
$IkiWiki::pagesources{$page} :
|
|
|
|
$IkiWiki::delpagesources{$page};
|
|
|
|
my $type=defined $source ? IkiWiki::pagetype($source) : undef;
|
|
|
|
if (! defined $type) {
|
|
|
|
return IkiWiki::FailReason->new("$page is not a page");
|
|
|
|
}
|
2010-05-07 01:04:56 +02:00
|
|
|
}
|
2010-08-04 14:24:47 +02:00
|
|
|
return $match;
|
2010-04-27 00:47:17 +02:00
|
|
|
}
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_link ($$;@) {
|
2006-08-02 02:14:31 +02:00
|
|
|
my $page=shift;
|
2006-08-13 04:03:43 +02:00
|
|
|
my $link=lc(shift);
|
2007-04-27 04:55:52 +02:00
|
|
|
my %params=@_;
|
|
|
|
|
2009-01-10 20:24:21 +01:00
|
|
|
$link=derel($link, $params{location});
|
2007-08-15 10:08:32 +02:00
|
|
|
my $from=exists $params{location} ? $params{location} : '';
|
2010-04-02 01:28:02 +02:00
|
|
|
my $linktype=$params{linktype};
|
|
|
|
my $qualifier='';
|
|
|
|
if (defined $linktype) {
|
|
|
|
$qualifier=" with type $linktype";
|
|
|
|
}
|
2007-03-22 00:11:09 +01:00
|
|
|
|
2007-08-15 10:08:32 +02:00
|
|
|
my $links = $IkiWiki::links{$page};
|
2010-03-26 06:38:53 +01:00
|
|
|
return IkiWiki::FailReason->new("$page has no links", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
|
2009-10-08 03:48:03 +02:00
|
|
|
unless $links && @{$links};
|
2007-03-22 00:11:09 +01:00
|
|
|
my $bestlink = IkiWiki::bestlink($from, $link);
|
2007-08-15 10:08:32 +02:00
|
|
|
foreach my $p (@{$links}) {
|
2010-06-09 20:33:49 +02:00
|
|
|
next unless (! defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p});
|
|
|
|
|
2007-05-30 21:54:08 +02:00
|
|
|
if (length $bestlink) {
|
2010-06-09 20:33:49 +02:00
|
|
|
if ($bestlink eq IkiWiki::bestlink($page, $p)) {
|
2010-04-02 01:28:02 +02:00
|
|
|
return IkiWiki::SuccessReason->new("$page links to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
|
|
|
|
}
|
2007-05-30 21:54:08 +02:00
|
|
|
}
|
|
|
|
else {
|
2010-06-09 20:33:49 +02:00
|
|
|
if (match_glob($p, $link, %params)) {
|
2010-04-02 01:28:02 +02:00
|
|
|
return IkiWiki::SuccessReason->new("$page links to page $p$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
|
|
|
|
}
|
2009-10-03 20:14:30 +02:00
|
|
|
my ($p_rel)=$p=~/^\/?(.*)/;
|
2008-11-09 21:31:57 +01:00
|
|
|
$link=~s/^\///;
|
2010-06-09 20:33:49 +02:00
|
|
|
if (match_glob($p_rel, $link, %params)) {
|
2010-04-02 01:28:02 +02:00
|
|
|
return IkiWiki::SuccessReason->new("$page links to page $p_rel$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1)
|
|
|
|
}
|
2007-05-30 21:54:08 +02:00
|
|
|
}
|
2006-08-02 02:14:31 +02:00
|
|
|
}
|
2010-04-02 01:28:02 +02:00
|
|
|
return IkiWiki::FailReason->new("$page does not link to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-02 02:14:31 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_backlink ($$;@) {
|
2012-01-13 11:02:11 +01:00
|
|
|
my $page=shift;
|
|
|
|
my $testpage=shift;
|
|
|
|
my %params=@_;
|
|
|
|
if ($testpage eq '.') {
|
|
|
|
$testpage = $params{'location'}
|
|
|
|
}
|
|
|
|
my $ret=match_link($testpage, $page, @_);
|
|
|
|
$ret->influences($testpage => $IkiWiki::DEPEND_LINKS);
|
fix handling of influences of pagespecs that fail to match
If a pagespec fails to match, I had been throwing the influences away, but
that is not right. Consider `backlink(foo)`, where foo does not exist.
It still needs to be added as an influence, because if it is created, it
will influence the pagespec to match.
But with that fix, `link(bar)` had as influences all pages, whether they
link to bar or not. Which is not necessary, because modifiying a page to
add a link to bar will directly cause the pagespec to match.
So, in match_link (and all the match_* functions for page metadata),
only return an influence if the match succeeds.
match_backlink had been implemented as the inverse of match_link, but that
is no longer completly true. While match_link does not return an influence
on failure, match_backlink does.
match_created_before/after also return the influence on failure, this way
if created_after(foo) currently fails because foo does not exist, it will
still update the page with the pagespec if foo is created.
2009-10-08 19:38:46 +02:00
|
|
|
return $ret;
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-02 02:14:31 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_created_before ($$;@) {
|
2006-08-03 18:55:52 +02:00
|
|
|
my $page=shift;
|
|
|
|
my $testpage=shift;
|
2009-01-10 20:36:03 +01:00
|
|
|
my %params=@_;
|
|
|
|
|
|
|
|
$testpage=derel($testpage, $params{location});
|
2006-08-03 18:55:52 +02:00
|
|
|
|
2007-02-12 03:44:47 +01:00
|
|
|
if (exists $IkiWiki::pagectime{$testpage}) {
|
2007-04-27 10:34:09 +02:00
|
|
|
if ($IkiWiki::pagectime{$page} < $IkiWiki::pagectime{$testpage}) {
|
2009-10-08 03:48:03 +02:00
|
|
|
return IkiWiki::SuccessReason->new("$page created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
|
|
|
else {
|
2009-10-08 03:48:03 +02:00
|
|
|
return IkiWiki::FailReason->new("$page not created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
2006-08-03 18:55:52 +02:00
|
|
|
}
|
|
|
|
else {
|
2009-10-08 03:48:03 +02:00
|
|
|
return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
|
2006-08-03 18:55:52 +02:00
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-03 18:55:52 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_created_after ($$;@) {
|
2006-08-03 18:55:52 +02:00
|
|
|
my $page=shift;
|
|
|
|
my $testpage=shift;
|
2009-01-10 20:36:03 +01:00
|
|
|
my %params=@_;
|
|
|
|
|
|
|
|
$testpage=derel($testpage, $params{location});
|
2006-08-03 18:55:52 +02:00
|
|
|
|
2007-02-12 03:44:47 +01:00
|
|
|
if (exists $IkiWiki::pagectime{$testpage}) {
|
2007-04-27 10:34:09 +02:00
|
|
|
if ($IkiWiki::pagectime{$page} > $IkiWiki::pagectime{$testpage}) {
|
2009-10-08 03:48:03 +02:00
|
|
|
return IkiWiki::SuccessReason->new("$page created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
|
|
|
else {
|
2009-10-08 03:48:03 +02:00
|
|
|
return IkiWiki::FailReason->new("$page not created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE);
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
2006-08-03 18:55:52 +02:00
|
|
|
}
|
|
|
|
else {
|
2009-10-08 03:48:03 +02:00
|
|
|
return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE);
|
2006-08-03 18:55:52 +02:00
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-03 18:55:52 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_creation_day ($$;@) {
|
2010-10-23 22:47:19 +02:00
|
|
|
my $page=shift;
|
2010-10-21 00:52:46 +02:00
|
|
|
my $d=shift;
|
|
|
|
if ($d !~ /^\d+$/) {
|
2010-10-21 00:53:50 +02:00
|
|
|
return IkiWiki::ErrorReason->new("invalid day $d");
|
2010-10-21 00:52:46 +02:00
|
|
|
}
|
2010-10-23 22:47:19 +02:00
|
|
|
if ((localtime($IkiWiki::pagectime{$page}))[3] == $d) {
|
2007-08-15 10:08:32 +02:00
|
|
|
return IkiWiki::SuccessReason->new('creation_day matched');
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
|
|
|
else {
|
2007-08-15 10:08:32 +02:00
|
|
|
return IkiWiki::FailReason->new('creation_day did not match');
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-02 02:14:31 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_creation_month ($$;@) {
|
2010-10-23 22:47:19 +02:00
|
|
|
my $page=shift;
|
2010-10-21 00:52:46 +02:00
|
|
|
my $m=shift;
|
|
|
|
if ($m !~ /^\d+$/) {
|
2010-10-21 00:53:50 +02:00
|
|
|
return IkiWiki::ErrorReason->new("invalid month $m");
|
2010-10-21 00:52:46 +02:00
|
|
|
}
|
2010-10-23 22:47:19 +02:00
|
|
|
if ((localtime($IkiWiki::pagectime{$page}))[4] + 1 == $m) {
|
2007-08-15 10:08:32 +02:00
|
|
|
return IkiWiki::SuccessReason->new('creation_month matched');
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
|
|
|
else {
|
2007-08-15 10:08:32 +02:00
|
|
|
return IkiWiki::FailReason->new('creation_month did not match');
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-02 02:14:31 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_creation_year ($$;@) {
|
2010-10-23 22:47:19 +02:00
|
|
|
my $page=shift;
|
2010-10-21 00:52:46 +02:00
|
|
|
my $y=shift;
|
|
|
|
if ($y !~ /^\d+$/) {
|
2010-10-21 00:53:50 +02:00
|
|
|
return IkiWiki::ErrorReason->new("invalid year $y");
|
2010-10-21 00:52:46 +02:00
|
|
|
}
|
2010-10-23 22:47:19 +02:00
|
|
|
if ((localtime($IkiWiki::pagectime{$page}))[5] + 1900 == $y) {
|
2007-08-15 10:08:32 +02:00
|
|
|
return IkiWiki::SuccessReason->new('creation_year matched');
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
|
|
|
else {
|
2007-08-15 10:08:32 +02:00
|
|
|
return IkiWiki::FailReason->new('creation_year did not match');
|
2007-04-27 10:34:09 +02:00
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-08-02 02:14:31 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_user ($$;@) {
|
2008-10-08 23:47:38 +02:00
|
|
|
shift;
|
|
|
|
my $user=shift;
|
|
|
|
my %params=@_;
|
|
|
|
|
|
|
|
if (! exists $params{user}) {
|
2009-04-23 20:07:28 +02:00
|
|
|
return IkiWiki::ErrorReason->new("no user specified");
|
2008-10-08 23:47:38 +02:00
|
|
|
}
|
|
|
|
|
2013-07-08 17:49:38 +02:00
|
|
|
my $regexp=IkiWiki::glob2re($user);
|
|
|
|
|
2010-11-20 01:02:49 +01:00
|
|
|
if (defined $params{user} && $params{user}=~$regexp) {
|
2008-10-08 23:47:38 +02:00
|
|
|
return IkiWiki::SuccessReason->new("user is $user");
|
|
|
|
}
|
|
|
|
elsif (! defined $params{user}) {
|
|
|
|
return IkiWiki::FailReason->new("not logged in");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return IkiWiki::FailReason->new("user is $params{user}, not $user");
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-10-08 23:47:38 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_admin ($$;@) {
|
2008-10-08 23:47:38 +02:00
|
|
|
shift;
|
|
|
|
shift;
|
|
|
|
my %params=@_;
|
|
|
|
|
|
|
|
if (! exists $params{user}) {
|
2009-04-23 20:07:28 +02:00
|
|
|
return IkiWiki::ErrorReason->new("no user specified");
|
2008-10-08 23:47:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if (defined $params{user} && IkiWiki::is_admin($params{user})) {
|
|
|
|
return IkiWiki::SuccessReason->new("user is an admin");
|
|
|
|
}
|
|
|
|
elsif (! defined $params{user}) {
|
|
|
|
return IkiWiki::FailReason->new("not logged in");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return IkiWiki::FailReason->new("user is not an admin");
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-10-08 23:47:38 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub match_ip ($$;@) {
|
2008-10-08 23:47:38 +02:00
|
|
|
shift;
|
|
|
|
my $ip=shift;
|
|
|
|
my %params=@_;
|
|
|
|
|
|
|
|
if (! exists $params{ip}) {
|
2009-04-23 20:07:28 +02:00
|
|
|
return IkiWiki::ErrorReason->new("no IP specified");
|
2008-10-08 23:47:38 +02:00
|
|
|
}
|
2013-07-08 17:49:38 +02:00
|
|
|
|
|
|
|
my $regexp=IkiWiki::glob2re(lc $ip);
|
2008-10-08 23:47:38 +02:00
|
|
|
|
2013-07-08 17:49:38 +02:00
|
|
|
if (defined $params{ip} && lc $params{ip}=~$regexp) {
|
2008-10-08 23:47:38 +02:00
|
|
|
return IkiWiki::SuccessReason->new("IP is $ip");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return IkiWiki::FailReason->new("IP is $params{ip}, not $ip");
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-10-08 23:47:38 +02:00
|
|
|
|
2010-04-03 14:57:38 +02:00
|
|
|
package IkiWiki::SortSpec;
|
|
|
|
|
2010-04-05 23:50:51 +02:00
|
|
|
# This is in the SortSpec namespace so that the $a and $b that sort() uses
|
2010-04-07 05:29:18 +02:00
|
|
|
# are easily available in this namespace, for cmp functions to use them.
|
|
|
|
sub sort_pages {
|
2010-04-12 18:41:32 +02:00
|
|
|
my $f=shift;
|
2010-04-11 07:30:03 +02:00
|
|
|
sort $f @_
|
2010-04-05 23:50:51 +02:00
|
|
|
}
|
|
|
|
|
2010-03-26 00:31:53 +01:00
|
|
|
sub cmp_title {
|
2010-04-05 23:50:51 +02:00
|
|
|
IkiWiki::pagetitle(IkiWiki::basename($a))
|
2010-03-26 00:31:53 +01:00
|
|
|
cmp
|
2010-04-05 23:50:51 +02:00
|
|
|
IkiWiki::pagetitle(IkiWiki::basename($b))
|
2010-03-26 00:31:53 +01:00
|
|
|
}
|
|
|
|
|
2011-11-12 16:11:02 +01:00
|
|
|
sub cmp_path { IkiWiki::pagetitle($a) cmp IkiWiki::pagetitle($b) }
|
2010-04-05 23:50:51 +02:00
|
|
|
sub cmp_mtime { $IkiWiki::pagemtime{$b} <=> $IkiWiki::pagemtime{$a} }
|
|
|
|
sub cmp_age { $IkiWiki::pagectime{$b} <=> $IkiWiki::pagectime{$a} }
|
2010-03-26 00:31:53 +01:00
|
|
|
|
2006-05-02 08:53:33 +02:00
|
|
|
1
|