818 lines
18 KiB
Perl
Executable File
818 lines
18 KiB
Perl
Executable File
#!/usr/bin/perl -T
|
|
|
|
use warnings;
|
|
use strict;
|
|
use File::Find;
|
|
use Memoize;
|
|
use File::Spec;
|
|
|
|
$ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
|
|
|
|
BEGIN {
|
|
$blosxom::version="is a proper perl module too much to ask?";
|
|
do "/usr/bin/markdown";
|
|
}
|
|
|
|
my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
|
|
%pagesources);
|
|
my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/;
|
|
my $wiki_file_regexp=qr/(^[-A-Za-z0-9_.:\/+]+$)/;
|
|
my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.|^\.|\/\.|\.html?$)!;
|
|
my $verbose=0;
|
|
my $wikiname="wiki";
|
|
my $default_pagetype=".mdwn";
|
|
my $cgi=0;
|
|
my $url="";
|
|
my $cgiurl="";
|
|
my $svn=1;
|
|
|
|
sub usage {
|
|
die "usage: ikiwiki [options] source dest\n";
|
|
}
|
|
|
|
sub error ($) {
|
|
if ($cgi) {
|
|
print "Content-type: text/html\n\n";
|
|
print "Error: @_\n";
|
|
exit 1;
|
|
}
|
|
else {
|
|
die @_;
|
|
}
|
|
}
|
|
|
|
sub debug ($) {
|
|
print "@_\n" if $verbose;
|
|
}
|
|
|
|
sub mtime ($) {
|
|
my $page=shift;
|
|
|
|
return (stat($page))[9];
|
|
}
|
|
|
|
sub possibly_foolish_untaint ($) {
|
|
my $tainted=shift;
|
|
my ($untainted)=$tainted=~/(.*)/;
|
|
return $untainted;
|
|
}
|
|
|
|
sub basename {
|
|
my $file=shift;
|
|
|
|
$file=~s!.*/!!;
|
|
return $file;
|
|
}
|
|
|
|
sub dirname {
|
|
my $file=shift;
|
|
|
|
$file=~s!/?[^/]+$!!;
|
|
return $file;
|
|
}
|
|
|
|
sub pagetype ($) {
|
|
my $page=shift;
|
|
|
|
if ($page =~ /\.mdwn$/) {
|
|
return ".mdwn";
|
|
}
|
|
else {
|
|
return "unknown";
|
|
}
|
|
}
|
|
|
|
sub pagename ($) {
|
|
my $file=shift;
|
|
|
|
my $type=pagetype($file);
|
|
my $page=$file;
|
|
$page=~s/\Q$type\E*$// unless $type eq 'unknown';
|
|
return $page;
|
|
}
|
|
|
|
sub htmlpage ($) {
|
|
my $page=shift;
|
|
|
|
return $page.".html";
|
|
}
|
|
|
|
sub readfile ($) {
|
|
my $file=shift;
|
|
|
|
local $/=undef;
|
|
open (IN, "$file") || error("failed to read $file: $!");
|
|
my $ret=<IN>;
|
|
close IN;
|
|
return $ret;
|
|
}
|
|
|
|
sub writefile ($$) {
|
|
my $file=shift;
|
|
my $content=shift;
|
|
|
|
my $dir=dirname($file);
|
|
if (! -d $dir) {
|
|
my $d="";
|
|
foreach my $s (split(m!/+!, $dir)) {
|
|
$d.="$s/";
|
|
if (! -d $d) {
|
|
mkdir($d) || error("failed to create directory $d: $!");
|
|
}
|
|
}
|
|
}
|
|
|
|
open (OUT, ">$file") || error("failed to write $file: $!");
|
|
print OUT $content;
|
|
close OUT;
|
|
}
|
|
|
|
sub findlinks {
|
|
my $content=shift;
|
|
|
|
my @links;
|
|
while ($content =~ /$wiki_link_regexp/g) {
|
|
push @links, lc($1);
|
|
}
|
|
return @links;
|
|
}
|
|
|
|
# Given a page and the text of a link on the page, determine which existing
|
|
# page that link best points to. Prefers pages under a subdirectory with
|
|
# the same name as the source page, failing that goes down the directory tree
|
|
# to the base looking for matching pages.
|
|
sub bestlink ($$) {
|
|
my $page=shift;
|
|
my $link=lc(shift);
|
|
|
|
my $cwd=$page;
|
|
do {
|
|
my $l=$cwd;
|
|
$l.="/" if length $l;
|
|
$l.=$link;
|
|
|
|
if (exists $links{$l}) {
|
|
#debug("for $page, \"$link\", use $l");
|
|
return $l;
|
|
}
|
|
} while $cwd=~s!/?[^/]+$!!;
|
|
|
|
#print STDERR "warning: page $page, broken link: $link\n";
|
|
return "";
|
|
}
|
|
|
|
sub isinlinableimage ($) {
|
|
my $file=shift;
|
|
|
|
$file=~/\.(png|gif|jpg|jpeg)$/;
|
|
}
|
|
|
|
sub htmllink ($$) {
|
|
my $page=shift;
|
|
my $link=shift;
|
|
|
|
my $bestlink=bestlink($page, $link);
|
|
|
|
return $link if $page eq $bestlink;
|
|
|
|
# TODO BUG: %renderedfiles may not have it, if the linked to page
|
|
# was also added and isn't yet rendered! Note that this bug is
|
|
# masked by the bug mentioned below that makes all new files
|
|
# be rendered twice.
|
|
if (! grep { $_ eq $bestlink } values %renderedfiles) {
|
|
$bestlink=htmlpage($bestlink);
|
|
}
|
|
if (! grep { $_ eq $bestlink } values %renderedfiles) {
|
|
return "<a href=\"$cgiurl?do=create&page=$link&from=$page\">?</a>$link"
|
|
}
|
|
|
|
$bestlink=File::Spec->abs2rel($bestlink, dirname($page));
|
|
|
|
if (isinlinableimage($bestlink)) {
|
|
return "<img src=\"$bestlink\">";
|
|
}
|
|
return "<a href=\"$bestlink\">$link</a>";
|
|
}
|
|
|
|
sub linkify ($$) {
|
|
my $content=shift;
|
|
my $file=shift;
|
|
|
|
$content =~ s/$wiki_link_regexp/htmllink(pagename($file), $1)/eg;
|
|
|
|
return $content;
|
|
}
|
|
|
|
sub htmlize ($$) {
|
|
my $type=shift;
|
|
my $content=shift;
|
|
|
|
if ($type eq '.mdwn') {
|
|
return Markdown::Markdown($content);
|
|
}
|
|
else {
|
|
error("htmlization of $type not supported");
|
|
}
|
|
}
|
|
|
|
sub linkbacks ($$) {
|
|
my $content=shift;
|
|
my $page=shift;
|
|
|
|
my @links;
|
|
foreach my $p (keys %links) {
|
|
next if bestlink($page, $p) eq $page;
|
|
if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
|
|
my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
|
|
|
|
# Trim common dir prefixes from both pages.
|
|
my $p_trimmed=$p;
|
|
my $page_trimmed=$page;
|
|
my $dir;
|
|
1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
|
|
defined $dir &&
|
|
$p_trimmed=~s/^\Q$dir\E// &&
|
|
$page_trimmed=~s/^\Q$dir\E//;
|
|
|
|
push @links, "<a href=\"$href\">$p_trimmed</a>";
|
|
}
|
|
}
|
|
|
|
$content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
|
|
return $content;
|
|
}
|
|
|
|
sub finalize ($$) {
|
|
my $content=shift;
|
|
my $page=shift;
|
|
|
|
my $title=basename($page);
|
|
$title=~s/_/ /g;
|
|
|
|
my $pagelink="";
|
|
my $path="";
|
|
foreach my $dir (reverse split("/", $page)) {
|
|
if (length($pagelink)) {
|
|
$pagelink="<a href=\"$path$dir.html\">$dir</a>/ $pagelink";
|
|
}
|
|
else {
|
|
$pagelink=$dir;
|
|
}
|
|
$path.="../";
|
|
}
|
|
$path=~s/\.\.\/$/index.html/;
|
|
$pagelink="<a href=\"$path\">$wikiname</a>/ $pagelink";
|
|
|
|
my @actions;
|
|
if (length $cgiurl) {
|
|
push @actions, "<a href=\"$cgiurl?do=edit&page=$page\">Edit</a>";
|
|
push @actions, "<a href=\"$cgiurl?do=recentchanges\">RecentChanges</a>";
|
|
}
|
|
|
|
$content="<html>\n<head><title>$title</title></head>\n<body>\n".
|
|
"<h1>$pagelink</h1>\n".
|
|
"@actions\n<hr>\n".
|
|
$content.
|
|
"</body>\n</html>\n";
|
|
|
|
return $content;
|
|
}
|
|
|
|
sub render ($) {
|
|
my $file=shift;
|
|
|
|
my $type=pagetype($file);
|
|
my $content=readfile("$srcdir/$file");
|
|
if ($type ne 'unknown') {
|
|
my $page=pagename($file);
|
|
|
|
$links{$page}=[findlinks($content)];
|
|
|
|
$content=linkify($content, $file);
|
|
$content=htmlize($type, $content);
|
|
$content=linkbacks($content, $page);
|
|
$content=finalize($content, $page);
|
|
|
|
writefile("$destdir/".htmlpage($page), $content);
|
|
$oldpagemtime{$page}=time;
|
|
$renderedfiles{$page}=htmlpage($page);
|
|
}
|
|
else {
|
|
$links{$file}=[];
|
|
writefile("$destdir/$file", $content);
|
|
$oldpagemtime{$file}=time;
|
|
$renderedfiles{$file}=$file;
|
|
}
|
|
}
|
|
|
|
sub loadindex () {
|
|
open (IN, "$srcdir/.index") || return;
|
|
while (<IN>) {
|
|
$_=possibly_foolish_untaint($_);
|
|
chomp;
|
|
my ($mtime, $file, $rendered, @links)=split(' ', $_);
|
|
my $page=pagename($file);
|
|
$pagesources{$page}=$file;
|
|
$oldpagemtime{$page}=$mtime;
|
|
$oldlinks{$page}=[@links];
|
|
$links{$page}=[@links];
|
|
$renderedfiles{$page}=$rendered;
|
|
}
|
|
close IN;
|
|
}
|
|
|
|
sub saveindex () {
|
|
open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
|
|
foreach my $page (keys %oldpagemtime) {
|
|
print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
|
|
join(" ", @{$links{$page}})."\n"
|
|
if $oldpagemtime{$page};
|
|
}
|
|
close OUT;
|
|
}
|
|
|
|
sub rcs_update () {
|
|
if (-d "$srcdir/.svn") {
|
|
if (system("svn", "update", "--quiet", $srcdir) != 0) {
|
|
warn("svn update failed\n");
|
|
}
|
|
}
|
|
}
|
|
|
|
sub rcs_commit ($) {
|
|
my $message=shift;
|
|
|
|
if (-d "$srcdir/.svn") {
|
|
if (system("svn", "commit", "--quiet", "-m",
|
|
possibly_foolish_untaint($message), $srcdir) != 0) {
|
|
warn("svn commit failed\n");
|
|
}
|
|
}
|
|
}
|
|
|
|
sub rcs_add ($) {
|
|
my $file=shift;
|
|
|
|
if (-d "$srcdir/.svn") {
|
|
my $parent=dirname($file);
|
|
while (! -d "$srcdir/$parent/.svn") {
|
|
$file=$parent;
|
|
$parent=dirname($file);
|
|
}
|
|
|
|
if (system("svn", "add", "--quiet", "$srcdir/$file") != 0) {
|
|
warn("svn add failed\n");
|
|
}
|
|
}
|
|
}
|
|
|
|
sub rcs_recentchanges ($) {
|
|
my $num=shift;
|
|
my @ret;
|
|
|
|
eval q{use Date::Parse};
|
|
eval q{use Time::Duration};
|
|
|
|
if (-d "$srcdir/.svn") {
|
|
my $info=`LANG=C svn info $srcdir`;
|
|
my ($svn_url)=$info=~/^URL: (.*)$/m;
|
|
|
|
# FIXME: currently assumes that the wiki is somewhere
|
|
# under trunk in svn, doesn't support other layouts.
|
|
my ($svn_base)=$svn_url=~m!(/trunk(?:/.*)?)$!;
|
|
|
|
my $div=qr/^--------------------+$/;
|
|
my $infoline=qr/^r(\d+)\s+\|\s+([^\s]+)\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/;
|
|
my $state='start';
|
|
my ($rev, $user, $when, @pages, $message);
|
|
foreach (`LANG=C svn log -v '$svn_url'`) {
|
|
chomp;
|
|
if ($state eq 'start' && /$div/) {
|
|
$state='header';
|
|
}
|
|
elsif ($state eq 'header' && /$infoline/) {
|
|
$rev=$1;
|
|
$user=$2;
|
|
$when=concise(ago(time - str2time($3)));
|
|
}
|
|
elsif ($state eq 'header' && /^\s+[A-Z]\s+\Q$svn_base\E\/(.+)$/) {
|
|
push @pages, pagename($1);
|
|
}
|
|
elsif ($state eq 'header' && /^$/) {
|
|
$state='body';
|
|
}
|
|
elsif ($state eq 'body' && /$div/) {
|
|
push @ret, { rev => $rev, user => $user,
|
|
when => $when, message => $message,
|
|
pages => [@pages] };
|
|
return @ret if @ret >= $num;
|
|
|
|
$state='header';
|
|
$message=$rev=$user=$when=undef;
|
|
@pages=();
|
|
}
|
|
elsif ($state eq 'body') {
|
|
$message.="$_<br>\n";
|
|
}
|
|
}
|
|
}
|
|
|
|
return @ret;
|
|
}
|
|
|
|
sub prune ($) {
|
|
my $file=shift;
|
|
|
|
unlink($file);
|
|
my $dir=dirname($file);
|
|
while (rmdir($dir)) {
|
|
$dir=dirname($dir);
|
|
}
|
|
}
|
|
|
|
sub refresh () {
|
|
# Find existing pages.
|
|
my %exists;
|
|
my @files;
|
|
find({
|
|
no_chdir => 1,
|
|
wanted => sub {
|
|
if (/$wiki_file_prune_regexp/) {
|
|
$File::Find::prune=1;
|
|
}
|
|
elsif (! -d $_) {
|
|
my ($f)=/$wiki_file_regexp/; # untaint
|
|
if (! defined $f) {
|
|
warn("skipping bad filename $_\n");
|
|
}
|
|
else {
|
|
$f=~s/^\Q$srcdir\E\/?//;
|
|
push @files, $f;
|
|
$exists{pagename($f)}=1;
|
|
}
|
|
}
|
|
},
|
|
}, $srcdir);
|
|
|
|
my %rendered;
|
|
|
|
# check for added or removed pages
|
|
my @add;
|
|
foreach my $file (@files) {
|
|
my $page=pagename($file);
|
|
if (! $oldpagemtime{$page}) {
|
|
debug("new page $page");
|
|
push @add, $file;
|
|
$links{$page}=[];
|
|
$pagesources{$page}=$file;
|
|
}
|
|
}
|
|
my @del;
|
|
foreach my $page (keys %oldpagemtime) {
|
|
if (! $exists{$page}) {
|
|
debug("removing old page $page");
|
|
push @del, $renderedfiles{$page};
|
|
prune($destdir."/".$renderedfiles{$page});
|
|
delete $renderedfiles{$page};
|
|
$oldpagemtime{$page}=0;
|
|
delete $pagesources{$page};
|
|
}
|
|
}
|
|
|
|
# render any updated files
|
|
foreach my $file (@files) {
|
|
my $page=pagename($file);
|
|
|
|
if (! exists $oldpagemtime{$page} ||
|
|
mtime("$srcdir/$file") > $oldpagemtime{$page}) {
|
|
debug("rendering changed file $file");
|
|
render($file);
|
|
$rendered{$file}=1;
|
|
}
|
|
}
|
|
|
|
# if any files were added or removed, check to see if each page
|
|
# needs an update due to linking to them
|
|
# TODO: inefficient; pages may get rendered above and again here;
|
|
# problem is the bestlink may have changed and we won't know until
|
|
# now
|
|
if (@add || @del) {
|
|
FILE: foreach my $file (@files) {
|
|
my $page=pagename($file);
|
|
foreach my $f (@add, @del) {
|
|
my $p=pagename($f);
|
|
foreach my $link (@{$links{$page}}) {
|
|
if (bestlink($page, $link) eq $p) {
|
|
debug("rendering $file, which links to $p");
|
|
render($file);
|
|
$rendered{$file}=1;
|
|
next FILE;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
# handle linkbacks; if a page has added/removed links, update the
|
|
# pages it links to
|
|
# TODO: inefficient; pages may get rendered above and again here;
|
|
# problem is the linkbacks could be wrong in the first pass render
|
|
# above
|
|
if (%rendered) {
|
|
my %linkchanged;
|
|
foreach my $file (keys %rendered, @del) {
|
|
my $page=pagename($file);
|
|
if (exists $links{$page}) {
|
|
foreach my $link (@{$links{$page}}) {
|
|
$link=bestlink($page, $link);
|
|
if (length $link &&
|
|
! exists $oldlinks{$page} ||
|
|
! grep { $_ eq $link } @{$oldlinks{$page}}) {
|
|
$linkchanged{$link}=1;
|
|
}
|
|
}
|
|
}
|
|
if (exists $oldlinks{$page}) {
|
|
foreach my $link (@{$oldlinks{$page}}) {
|
|
$link=bestlink($page, $link);
|
|
if (length $link &&
|
|
! exists $links{$page} ||
|
|
! grep { $_ eq $link } @{$links{$page}}) {
|
|
$linkchanged{$link}=1;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
foreach my $link (keys %linkchanged) {
|
|
my $linkfile=$pagesources{$link};
|
|
if (defined $linkfile) {
|
|
debug("rendering $linkfile, to update its linkbacks");
|
|
render($linkfile);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
# Generates a C wrapper program for running ikiwiki in a specific way.
|
|
# The wrapper may be safely made suid.
|
|
sub gen_wrapper ($$) {
|
|
my ($svn, $rebuild)=@_;
|
|
|
|
eval q{use Cwd 'abs_path'};
|
|
$srcdir=abs_path($srcdir);
|
|
$destdir=abs_path($destdir);
|
|
my $this=abs_path($0);
|
|
if (! -x $this) {
|
|
error("$this doesn't seem to be executable");
|
|
}
|
|
|
|
my @params=($srcdir, $destdir, "--wikiname=$wikiname");
|
|
push @params, "--verbose" if $verbose;
|
|
push @params, "--rebuild" if $rebuild;
|
|
push @params, "--nosvn" if !$svn;
|
|
push @params, "--cgi" if $cgi;
|
|
push @params, "--url=$url" if $url;
|
|
push @params, "--cgiurl=$cgiurl" if $cgiurl;
|
|
my $params=join(" ", @params);
|
|
my $call='';
|
|
foreach my $p ($this, $this, @params) {
|
|
$call.=qq{"$p", };
|
|
}
|
|
$call.="NULL";
|
|
|
|
my @envsave;
|
|
push @envsave, qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI
|
|
CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE} if $cgi;
|
|
my $envsave="";
|
|
foreach my $var (@envsave) {
|
|
$envsave.=<<"EOF"
|
|
if ((s=getenv("$var")))
|
|
asprintf(&newenviron[i++], "%s=%s", "$var", s);
|
|
EOF
|
|
}
|
|
|
|
open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");;
|
|
print OUT <<"EOF";
|
|
/* A wrapper for ikiwiki, can be safely made suid. */
|
|
#define _GNU_SOURCE
|
|
#include <stdio.h>
|
|
#include <unistd.h>
|
|
#include <stdlib.h>
|
|
#include <string.h>
|
|
|
|
extern char **environ;
|
|
|
|
int main (int argc, char **argv) {
|
|
/* Sanitize environment. */
|
|
char *s;
|
|
char *newenviron[$#envsave+3];
|
|
int i=0;
|
|
$envsave
|
|
newenviron[i++]="HOME=$ENV{HOME}";
|
|
newenviron[i]=NULL;
|
|
environ=newenviron;
|
|
|
|
if (argc == 2 && strcmp(argv[1], "--params") == 0) {
|
|
printf("$params\\n");
|
|
exit(0);
|
|
}
|
|
|
|
execl($call);
|
|
perror("failed to run $this");
|
|
exit(1);
|
|
}
|
|
EOF
|
|
close OUT;
|
|
if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) {
|
|
error("failed to compile ikiwiki-wrap.c");
|
|
}
|
|
unlink("ikiwiki-wrap.c");
|
|
print "successfully generated ikiwiki-wrap\n";
|
|
exit 0;
|
|
}
|
|
|
|
sub cgi () {
|
|
eval q{use CGI};
|
|
my $q=CGI->new;
|
|
|
|
my $do=$q->param('do');
|
|
if (! defined $do || ! length $do) {
|
|
error("\"do\" parameter missing");
|
|
}
|
|
|
|
if ($do eq 'recentchanges') {
|
|
my $list="<ul>\n";
|
|
foreach my $change (rcs_recentchanges(100)) {
|
|
$list.="<li>";
|
|
$list.=join(", ", map { htmllink("index", $_) } @{$change->{pages}});
|
|
$list.="<br>\n";
|
|
$list.="changed ".$change->{when}." by ".
|
|
htmllink("index", $change->{user}).
|
|
": <i>".$change->{message}."</i>\n";
|
|
$list.="</li>\n";
|
|
}
|
|
$list.="</ul>\n";
|
|
|
|
print $q->header,
|
|
$q->start_html("RecentChanges"),
|
|
$q->h1("<a href=\"$url\">$wikiname</a>/ RecentChanges"),
|
|
$list,
|
|
$q->end_form,
|
|
$q->end_html;
|
|
return;
|
|
}
|
|
|
|
my ($page)=$q->param('page')=~/$wiki_file_regexp/;
|
|
if (! defined $page || ! length $page || $page ne $q->param('page') ||
|
|
$page=~/$wiki_file_prune_regexp/ || $page=~/^\//) {
|
|
error("bad page name");
|
|
}
|
|
$page=lc($page);
|
|
|
|
my $action=$q->request_uri;
|
|
$action=~s/\?.*//;
|
|
|
|
if ($do eq 'create') {
|
|
if (exists $pagesources{lc($page)}) {
|
|
# hmm, someone else made the page in the meantime?
|
|
print $q->redirect("$url/".htmlpage($page));
|
|
}
|
|
|
|
my @page_locs;
|
|
my ($from)=$q->param('from')=~/$wiki_file_regexp/;
|
|
if (! defined $from || ! length $from ||
|
|
$from ne $q->param('from') ||
|
|
$from=~/$wiki_file_prune_regexp/ || $from=~/^\//) {
|
|
@page_locs=$page;
|
|
}
|
|
else {
|
|
my $dir=$from."/";
|
|
$dir=~s![^/]+/$!!;
|
|
push @page_locs, $dir.$page;
|
|
push @page_locs, "$from/$page";
|
|
while (length $dir) {
|
|
$dir=~s![^/]+/$!!;
|
|
push @page_locs, $dir.$page;
|
|
}
|
|
}
|
|
|
|
$q->param("do", "save");
|
|
print $q->header,
|
|
$q->start_html("Creating $page"),
|
|
$q->start_h1("<a href=\"$url\">$wikiname</a>/ Creating $page"),
|
|
$q->start_form(-action => $action),
|
|
$q->hidden('do'),
|
|
"Select page location:",
|
|
$q->popup_menu('page', \@page_locs),
|
|
$q->textarea(-name => 'content',
|
|
-default => "",
|
|
-rows => 20,
|
|
-columns => 80),
|
|
$q->br,
|
|
"Optional comment about this change:",
|
|
$q->br,
|
|
$q->textfield(-name => "comments", -size => 80),
|
|
$q->br,
|
|
$q->submit("Save Page"),
|
|
$q->end_form,
|
|
$q->end_html;
|
|
}
|
|
elsif ($do eq 'edit') {
|
|
my $content="";
|
|
if (exists $pagesources{lc($page)}) {
|
|
$content=readfile("$srcdir/$pagesources{lc($page)}");
|
|
$content=~s/\n/\r\n/g;
|
|
}
|
|
$q->param("do", "save");
|
|
print $q->header,
|
|
$q->start_html("Editing $page"),
|
|
$q->h1("<a href=\"$url\">$wikiname</a>/ Editing $page"),
|
|
$q->start_form(-action => $action),
|
|
$q->hidden('do'),
|
|
$q->hidden('page'),
|
|
$q->textarea(-name => 'content',
|
|
-default => $content,
|
|
-rows => 20,
|
|
-columns => 80),
|
|
$q->br,
|
|
"Optional comment about this change:",
|
|
$q->br,
|
|
$q->textfield(-name => "comments", -size => 80),
|
|
$q->br,
|
|
$q->submit("Save Page"),
|
|
$q->end_form,
|
|
$q->end_html;
|
|
}
|
|
elsif ($do eq 'save') {
|
|
my $file=$page.$default_pagetype;
|
|
my $newfile=1;
|
|
if (exists $pagesources{lc($page)}) {
|
|
$file=$pagesources{lc($page)};
|
|
$newfile=0;
|
|
}
|
|
|
|
my $content=$q->param('content');
|
|
$content=~s/\r\n/\n/g;
|
|
$content=~s/\r/\n/g;
|
|
writefile("$srcdir/$file", $content);
|
|
|
|
my $message="web commit from $ENV{REMOTE_ADDR}";
|
|
if (defined $q->param('comments')) {
|
|
$message.=": ".$q->param('comments');
|
|
}
|
|
|
|
if ($svn) {
|
|
if ($newfile) {
|
|
rcs_add($file);
|
|
}
|
|
# presumably the commit will trigger an update
|
|
# of the wiki
|
|
rcs_commit($message);
|
|
}
|
|
else {
|
|
refresh();
|
|
}
|
|
|
|
print $q->redirect("$url/".htmlpage($page));
|
|
}
|
|
else {
|
|
error("unknown do parameter");
|
|
}
|
|
}
|
|
|
|
my $rebuild=0;
|
|
my $wrapper=0;
|
|
if (grep /^-/, @ARGV) {
|
|
eval {use Getopt::Long};
|
|
GetOptions(
|
|
"wikiname=s" => \$wikiname,
|
|
"verbose|v" => \$verbose,
|
|
"rebuild" => \$rebuild,
|
|
"wrapper" => \$wrapper,
|
|
"svn!" => \$svn,
|
|
"cgi" => \$cgi,
|
|
"url=s" => \$url,
|
|
"cgiurl=s" => \$cgiurl,
|
|
) || usage();
|
|
}
|
|
usage() unless @ARGV == 2;
|
|
($srcdir) = possibly_foolish_untaint(shift);
|
|
($destdir) = possibly_foolish_untaint(shift);
|
|
|
|
if ($cgi && ! length $url) {
|
|
error("Must specify url to wiki with --url when using --cgi");
|
|
}
|
|
|
|
gen_wrapper($svn, $rebuild) if $wrapper;
|
|
memoize('pagename');
|
|
memoize('bestlink');
|
|
loadindex() unless $rebuild;
|
|
if ($cgi) {
|
|
cgi();
|
|
}
|
|
else {
|
|
rcs_update() if $svn;
|
|
refresh();
|
|
saveindex();
|
|
}
|