549 lines
12 KiB
Perl
Executable File
549 lines
12 KiB
Perl
Executable File
#!/usr/bin/perl -T
|
|
$ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
|
|
|
|
package IkiWiki;
|
|
use warnings;
|
|
use strict;
|
|
use File::Spec;
|
|
use HTML::Template;
|
|
use lib '.'; # For use without installation, removed by Makefile.
|
|
|
|
use vars qw{%config %links %oldlinks %oldpagemtime %pagectime
|
|
%renderedfiles %pagesources %inlinepages};
|
|
|
|
sub usage () { #{{{
|
|
die "usage: ikiwiki [options] source dest\n";
|
|
} #}}}
|
|
|
|
sub getconfig () { #{{{
|
|
if (! exists $ENV{WRAPPED_OPTIONS}) {
|
|
%config=(
|
|
wiki_file_prune_regexp => qr{((^|/).svn/|\.\.|^\.|\/\.|\.html?$|\.rss$)},
|
|
wiki_link_regexp => qr/\[\[(?:([^\s\]\|]+)\|)?([^\s\]]+)\]\]/,
|
|
wiki_processor_regexp => qr/\[\[(\w+)\s+([^\]]+)\]\]/,
|
|
wiki_file_regexp => qr/(^[-A-Za-z0-9_.:\/+]+$)/,
|
|
verbose => 0,
|
|
wikiname => "wiki",
|
|
default_pageext => ".mdwn",
|
|
cgi => 0,
|
|
svn => 1,
|
|
url => '',
|
|
cgiurl => '',
|
|
historyurl => '',
|
|
diffurl => '',
|
|
anonok => 0,
|
|
rss => 0,
|
|
rebuild => 0,
|
|
refresh => 0,
|
|
getctime => 0,
|
|
hyperestraier => 0,
|
|
wrapper => undef,
|
|
wrappermode => undef,
|
|
srcdir => undef,
|
|
destdir => undef,
|
|
templatedir => "/usr/share/ikiwiki/templates",
|
|
underlaydir => "/usr/share/ikiwiki/basewiki",
|
|
setup => undef,
|
|
adminuser => undef,
|
|
);
|
|
|
|
eval q{use Getopt::Long};
|
|
GetOptions(
|
|
"setup|s=s" => \$config{setup},
|
|
"wikiname=s" => \$config{wikiname},
|
|
"verbose|v!" => \$config{verbose},
|
|
"rebuild!" => \$config{rebuild},
|
|
"refresh!" => \$config{refresh},
|
|
"getctime" => \$config{getctime},
|
|
"wrappermode=i" => \$config{wrappermode},
|
|
"svn!" => \$config{svn},
|
|
"anonok!" => \$config{anonok},
|
|
"hyperestraier" => \$config{hyperestraier},
|
|
"rss!" => \$config{rss},
|
|
"cgi!" => \$config{cgi},
|
|
"url=s" => \$config{url},
|
|
"cgiurl=s" => \$config{cgiurl},
|
|
"historyurl=s" => \$config{historyurl},
|
|
"diffurl=s" => \$config{diffurl},
|
|
"exclude=s@" => sub {
|
|
$config{wiki_file_prune_regexp}=qr/$config{wiki_file_prune_regexp}|$_[1]/;
|
|
},
|
|
"adminuser=s@" => sub {
|
|
push @{$config{adminuser}}, $_[1]
|
|
},
|
|
"templatedir=s" => sub {
|
|
$config{templatedir}=possibly_foolish_untaint($_[1])
|
|
},
|
|
"underlaydir=s" => sub {
|
|
$config{underlaydir}=possibly_foolish_untaint($_[1])
|
|
},
|
|
"wrapper:s" => sub {
|
|
$config{wrapper}=$_[1] ? $_[1] : "ikiwiki-wrap"
|
|
},
|
|
) || usage();
|
|
|
|
if (! $config{setup}) {
|
|
usage() unless @ARGV == 2;
|
|
$config{srcdir} = possibly_foolish_untaint(shift @ARGV);
|
|
$config{destdir} = possibly_foolish_untaint(shift @ARGV);
|
|
checkconfig();
|
|
}
|
|
}
|
|
else {
|
|
# wrapper passes a full config structure in the environment
|
|
# variable
|
|
eval possibly_foolish_untaint($ENV{WRAPPED_OPTIONS});
|
|
checkconfig();
|
|
}
|
|
} #}}}
|
|
|
|
sub checkconfig () { #{{{
|
|
if ($config{cgi} && ! length $config{url}) {
|
|
error("Must specify url to wiki with --url when using --cgi\n");
|
|
}
|
|
if ($config{rss} && ! length $config{url}) {
|
|
error("Must specify url to wiki with --url when using --rss\n");
|
|
}
|
|
if ($config{hyperestraier} && ! length $config{url}) {
|
|
error("Must specify --url when using --hyperestraier\n");
|
|
}
|
|
|
|
$config{wikistatedir}="$config{srcdir}/.ikiwiki"
|
|
unless exists $config{wikistatedir};
|
|
|
|
if ($config{svn}) {
|
|
require IkiWiki::Rcs::SVN;
|
|
$config{rcs}=1;
|
|
}
|
|
else {
|
|
require IkiWiki::Rcs::Stub;
|
|
$config{rcs}=0;
|
|
}
|
|
} #}}}
|
|
|
|
sub error ($) { #{{{
|
|
if ($config{cgi}) {
|
|
print "Content-type: text/html\n\n";
|
|
print misctemplate("Error", "<p>Error: @_</p>");
|
|
}
|
|
die @_;
|
|
} #}}}
|
|
|
|
sub possibly_foolish_untaint ($) { #{{{
|
|
my $tainted=shift;
|
|
my ($untainted)=$tainted=~/(.*)/;
|
|
return $untainted;
|
|
} #}}}
|
|
|
|
sub debug ($) { #{{{
|
|
return unless $config{verbose};
|
|
if (! $config{cgi}) {
|
|
print "@_\n";
|
|
}
|
|
else {
|
|
print STDERR "@_\n";
|
|
}
|
|
} #}}}
|
|
|
|
sub basename ($) { #{{{
|
|
my $file=shift;
|
|
|
|
$file=~s!.*/!!;
|
|
return $file;
|
|
} #}}}
|
|
|
|
sub dirname ($) { #{{{
|
|
my $file=shift;
|
|
|
|
$file=~s!/?[^/]+$!!;
|
|
return $file;
|
|
} #}}}
|
|
|
|
sub pagetype ($) { #{{{
|
|
my $page=shift;
|
|
|
|
if ($page =~ /\.mdwn$/) {
|
|
return ".mdwn";
|
|
}
|
|
else {
|
|
return "unknown";
|
|
}
|
|
} #}}}
|
|
|
|
sub pagename ($) { #{{{
|
|
my $file=shift;
|
|
|
|
my $type=pagetype($file);
|
|
my $page=$file;
|
|
$page=~s/\Q$type\E*$// unless $type eq 'unknown';
|
|
return $page;
|
|
} #}}}
|
|
|
|
sub htmlpage ($) { #{{{
|
|
my $page=shift;
|
|
|
|
return $page.".html";
|
|
} #}}}
|
|
|
|
sub srcfile ($) { #{{{
|
|
my $file=shift;
|
|
|
|
return "$config{srcdir}/$file" if -e "$config{srcdir}/$file";
|
|
return "$config{underlaydir}/$file" if -e "$config{underlaydir}/$file";
|
|
error("internal error: $file cannot be found");
|
|
} #}}}
|
|
|
|
sub readfile ($) { #{{{
|
|
my $file=shift;
|
|
|
|
if (-l $file) {
|
|
error("cannot read a symlink ($file)");
|
|
}
|
|
|
|
local $/=undef;
|
|
open (IN, "$file") || error("failed to read $file: $!");
|
|
my $ret=<IN>;
|
|
close IN;
|
|
return $ret;
|
|
} #}}}
|
|
|
|
sub writefile ($$$) { #{{{
|
|
my $file=shift; # can include subdirs
|
|
my $destdir=shift; # directory to put file in
|
|
my $content=shift;
|
|
|
|
my $test=$file;
|
|
while (length $test) {
|
|
if (-l "$destdir/$test") {
|
|
error("cannot write to a symlink ($test)");
|
|
}
|
|
$test=dirname($test);
|
|
}
|
|
|
|
my $dir=dirname("$destdir/$file");
|
|
if (! -d $dir) {
|
|
my $d="";
|
|
foreach my $s (split(m!/+!, $dir)) {
|
|
$d.="$s/";
|
|
if (! -d $d) {
|
|
mkdir($d) || error("failed to create directory $d: $!");
|
|
}
|
|
}
|
|
}
|
|
|
|
open (OUT, ">$destdir/$file") || error("failed to write $destdir/$file: $!");
|
|
print OUT $content;
|
|
close OUT;
|
|
} #}}}
|
|
|
|
sub bestlink ($$) { #{{{
|
|
# Given a page and the text of a link on the page, determine which
|
|
# existing page that link best points to. Prefers pages under a
|
|
# subdirectory with the same name as the source page, failing that
|
|
# goes down the directory tree to the base looking for matching
|
|
# pages.
|
|
my $page=shift;
|
|
my $link=lc(shift);
|
|
|
|
my $cwd=$page;
|
|
do {
|
|
my $l=$cwd;
|
|
$l.="/" if length $l;
|
|
$l.=$link;
|
|
|
|
if (exists $links{$l}) {
|
|
#debug("for $page, \"$link\", use $l");
|
|
return $l;
|
|
}
|
|
} while $cwd=~s!/?[^/]+$!!;
|
|
|
|
#print STDERR "warning: page $page, broken link: $link\n";
|
|
return "";
|
|
} #}}}
|
|
|
|
sub isinlinableimage ($) { #{{{
|
|
my $file=shift;
|
|
|
|
$file=~/\.(png|gif|jpg|jpeg)$/i;
|
|
} #}}}
|
|
|
|
sub pagetitle ($) { #{{{
|
|
my $page=shift;
|
|
$page=~s/__(\d+)__/&#$1;/g;
|
|
$page=~y/_/ /;
|
|
return $page;
|
|
} #}}}
|
|
|
|
sub titlepage ($) { #{{{
|
|
my $title=shift;
|
|
$title=~y/ /_/;
|
|
$title=~s/([^-A-Za-z0-9_:+\/.])/"__".ord($1)."__"/eg;
|
|
return $title;
|
|
} #}}}
|
|
|
|
sub cgiurl (@) { #{{{
|
|
my %params=@_;
|
|
|
|
return $config{cgiurl}."?".join("&", map "$_=$params{$_}", keys %params);
|
|
} #}}}
|
|
|
|
sub styleurl (;$) { #{{{
|
|
my $page=shift;
|
|
|
|
return "$config{url}/style.css" if ! defined $page;
|
|
|
|
$page=~s/[^\/]+$//;
|
|
$page=~s/[^\/]+\//..\//g;
|
|
return $page."style.css";
|
|
} #}}}
|
|
|
|
sub htmllink ($$;$$$) { #{{{
|
|
my $page=shift;
|
|
my $link=shift;
|
|
my $noimageinline=shift; # don't turn links into inline html images
|
|
my $forcesubpage=shift; # force a link to a subpage
|
|
my $linktext=shift; # set to force the link text to something
|
|
|
|
my $bestlink;
|
|
if (! $forcesubpage) {
|
|
$bestlink=bestlink($page, $link);
|
|
}
|
|
else {
|
|
$bestlink="$page/".lc($link);
|
|
}
|
|
|
|
$linktext=pagetitle(basename($link)) unless defined $linktext;
|
|
|
|
return $linktext if length $bestlink && $page eq $bestlink;
|
|
|
|
# TODO BUG: %renderedfiles may not have it, if the linked to page
|
|
# was also added and isn't yet rendered! Note that this bug is
|
|
# masked by the bug mentioned below that makes all new files
|
|
# be rendered twice.
|
|
if (! grep { $_ eq $bestlink } values %renderedfiles) {
|
|
$bestlink=htmlpage($bestlink);
|
|
}
|
|
if (! grep { $_ eq $bestlink } values %renderedfiles) {
|
|
return "<span><a href=\"".
|
|
cgiurl(do => "create", page => $link, from =>$page).
|
|
"\">?</a>$linktext</span>"
|
|
}
|
|
|
|
$bestlink=File::Spec->abs2rel($bestlink, dirname($page));
|
|
|
|
if (! $noimageinline && isinlinableimage($bestlink)) {
|
|
return "<img src=\"$bestlink\" alt=\"$linktext\" />";
|
|
}
|
|
return "<a href=\"$bestlink\">$linktext</a>";
|
|
} #}}}
|
|
|
|
sub indexlink () { #{{{
|
|
return "<a href=\"$config{url}\">$config{wikiname}</a>";
|
|
} #}}}
|
|
|
|
sub lockwiki () { #{{{
|
|
# Take an exclusive lock on the wiki to prevent multiple concurrent
|
|
# run issues. The lock will be dropped on program exit.
|
|
if (! -d $config{wikistatedir}) {
|
|
mkdir($config{wikistatedir});
|
|
}
|
|
open(WIKILOCK, ">$config{wikistatedir}/lockfile") ||
|
|
error ("cannot write to $config{wikistatedir}/lockfile: $!");
|
|
if (! flock(WIKILOCK, 2 | 4)) {
|
|
debug("wiki seems to be locked, waiting for lock");
|
|
my $wait=600; # arbitrary, but don't hang forever to
|
|
# prevent process pileup
|
|
for (1..600) {
|
|
return if flock(WIKILOCK, 2 | 4);
|
|
sleep 1;
|
|
}
|
|
error("wiki is locked; waited $wait seconds without lock being freed (possible stuck process or stale lock?)");
|
|
}
|
|
} #}}}
|
|
|
|
sub unlockwiki () { #{{{
|
|
close WIKILOCK;
|
|
} #}}}
|
|
|
|
sub loadindex () { #{{{
|
|
open (IN, "$config{wikistatedir}/index") || return;
|
|
while (<IN>) {
|
|
$_=possibly_foolish_untaint($_);
|
|
chomp;
|
|
my %items;
|
|
$items{link}=[];
|
|
foreach my $i (split(/ /, $_)) {
|
|
my ($item, $val)=split(/=/, $i, 2);
|
|
push @{$items{$item}}, $val;
|
|
}
|
|
|
|
next unless exists $items{src}; # skip bad lines for now
|
|
|
|
my $page=pagename($items{src}[0]);
|
|
if (! $config{rebuild}) {
|
|
$pagesources{$page}=$items{src}[0];
|
|
$oldpagemtime{$page}=$items{mtime}[0];
|
|
$oldlinks{$page}=[@{$items{link}}];
|
|
$links{$page}=[@{$items{link}}];
|
|
$inlinepages{$page}=join(" ", @{$items{inlinepage}})
|
|
if exists $items{inlinepage};
|
|
$renderedfiles{$page}=$items{dest}[0];
|
|
}
|
|
$pagectime{$page}=$items{ctime}[0];
|
|
}
|
|
close IN;
|
|
} #}}}
|
|
|
|
sub saveindex () { #{{{
|
|
if (! -d $config{wikistatedir}) {
|
|
mkdir($config{wikistatedir});
|
|
}
|
|
open (OUT, ">$config{wikistatedir}/index") ||
|
|
error("cannot write to $config{wikistatedir}/index: $!");
|
|
foreach my $page (keys %oldpagemtime) {
|
|
next unless $oldpagemtime{$page};
|
|
my $line="mtime=$oldpagemtime{$page} ".
|
|
"ctime=$pagectime{$page} ".
|
|
"src=$pagesources{$page} ".
|
|
"dest=$renderedfiles{$page}";
|
|
$line.=" link=$_" foreach @{$links{$page}};
|
|
if (exists $inlinepages{$page}) {
|
|
$line.=" inlinepage=$_" foreach split " ", $inlinepages{$page};
|
|
}
|
|
print OUT $line."\n";
|
|
}
|
|
close OUT;
|
|
} #}}}
|
|
|
|
sub misctemplate ($$) { #{{{
|
|
my $title=shift;
|
|
my $pagebody=shift;
|
|
|
|
my $template=HTML::Template->new(
|
|
filename => "$config{templatedir}/misc.tmpl"
|
|
);
|
|
$template->param(
|
|
title => $title,
|
|
indexlink => indexlink(),
|
|
wikiname => $config{wikiname},
|
|
pagebody => $pagebody,
|
|
styleurl => styleurl(),
|
|
);
|
|
return $template->output;
|
|
}#}}}
|
|
|
|
sub userinfo_get ($$) { #{{{
|
|
my $user=shift;
|
|
my $field=shift;
|
|
|
|
eval q{use Storable};
|
|
my $userdata=eval{ Storable::lock_retrieve("$config{wikistatedir}/userdb") };
|
|
if (! defined $userdata || ! ref $userdata ||
|
|
! exists $userdata->{$user} || ! ref $userdata->{$user} ||
|
|
! exists $userdata->{$user}->{$field}) {
|
|
return "";
|
|
}
|
|
return $userdata->{$user}->{$field};
|
|
} #}}}
|
|
|
|
sub userinfo_set ($$$) { #{{{
|
|
my $user=shift;
|
|
my $field=shift;
|
|
my $value=shift;
|
|
|
|
eval q{use Storable};
|
|
my $userdata=eval{ Storable::lock_retrieve("$config{wikistatedir}/userdb") };
|
|
if (! defined $userdata || ! ref $userdata ||
|
|
! exists $userdata->{$user} || ! ref $userdata->{$user}) {
|
|
return "";
|
|
}
|
|
|
|
$userdata->{$user}->{$field}=$value;
|
|
my $oldmask=umask(077);
|
|
my $ret=Storable::lock_store($userdata, "$config{wikistatedir}/userdb");
|
|
umask($oldmask);
|
|
return $ret;
|
|
} #}}}
|
|
|
|
sub userinfo_setall ($$) { #{{{
|
|
my $user=shift;
|
|
my $info=shift;
|
|
|
|
eval q{use Storable};
|
|
my $userdata=eval{ Storable::lock_retrieve("$config{wikistatedir}/userdb") };
|
|
if (! defined $userdata || ! ref $userdata) {
|
|
$userdata={};
|
|
}
|
|
$userdata->{$user}=$info;
|
|
my $oldmask=umask(077);
|
|
my $ret=Storable::lock_store($userdata, "$config{wikistatedir}/userdb");
|
|
umask($oldmask);
|
|
return $ret;
|
|
} #}}}
|
|
|
|
sub is_admin ($) { #{{{
|
|
my $user_name=shift;
|
|
|
|
return grep { $_ eq $user_name } @{$config{adminuser}};
|
|
} #}}}
|
|
|
|
sub glob_match ($$) { #{{{
|
|
my $page=shift;
|
|
my $glob=shift;
|
|
|
|
# turn glob into safe regexp
|
|
$glob=quotemeta($glob);
|
|
$glob=~s/\\\*/.*/g;
|
|
$glob=~s/\\\?/./g;
|
|
$glob=~s!\\/!/!g;
|
|
|
|
$page=~/^$glob$/i;
|
|
} #}}}
|
|
|
|
sub globlist_match ($$) { #{{{
|
|
my $page=shift;
|
|
my @globlist=split(" ", shift);
|
|
|
|
# check any negated globs first
|
|
foreach my $glob (@globlist) {
|
|
return 0 if $glob=~/^!(.*)/ && glob_match($page, $1);
|
|
}
|
|
|
|
foreach my $glob (@globlist) {
|
|
return 1 if glob_match($page, $glob);
|
|
}
|
|
|
|
return 0;
|
|
} #}}}
|
|
|
|
sub main () { #{{{
|
|
getconfig();
|
|
|
|
if ($config{cgi}) {
|
|
lockwiki();
|
|
loadindex();
|
|
require IkiWiki::CGI;
|
|
cgi();
|
|
}
|
|
elsif ($config{setup}) {
|
|
require IkiWiki::Setup;
|
|
setup();
|
|
}
|
|
elsif ($config{wrapper}) {
|
|
lockwiki();
|
|
require IkiWiki::Wrapper;
|
|
gen_wrapper();
|
|
}
|
|
else {
|
|
lockwiki();
|
|
loadindex();
|
|
require IkiWiki::Render;
|
|
rcs_update();
|
|
rcs_getctime() if $config{getctime};
|
|
refresh();
|
|
saveindex();
|
|
}
|
|
} #}}}
|
|
|
|
main;
|