Merge branch 'master' into prefix-directives

master
Joey Hess 2008-02-05 15:59:42 -05:00
commit f92505d78b
117 changed files with 2527 additions and 2322 deletions

View File

@ -62,13 +62,14 @@ sub defaultconfig () { #{{{
cgi => 0, cgi => 0,
post_commit => 0, post_commit => 0,
rcs => '', rcs => '',
notify => 0,
url => '', url => '',
cgiurl => '', cgiurl => '',
historyurl => '', historyurl => '',
diffurl => '', diffurl => '',
rss => 0, rss => 0,
atom => 0, atom => 0,
allowrss => 0,
allowatom => 0,
discussion => 1, discussion => 1,
rebuild => 0, rebuild => 0,
refresh => 0, refresh => 0,
@ -76,7 +77,6 @@ sub defaultconfig () { #{{{
w3mmode => 0, w3mmode => 0,
wrapper => undef, wrapper => undef,
wrappermode => undef, wrappermode => undef,
svnrepo => undef,
svnpath => "trunk", svnpath => "trunk",
gitorigin_branch => "origin", gitorigin_branch => "origin",
gitmaster_branch => "master", gitmaster_branch => "master",
@ -90,7 +90,7 @@ sub defaultconfig () { #{{{
adminuser => undef, adminuser => undef,
adminemail => undef, adminemail => undef,
plugin => [qw{mdwn inline htmlscrubber passwordauth openid signinedit plugin => [qw{mdwn inline htmlscrubber passwordauth openid signinedit
lockedit conditional}], lockedit conditional recentchanges}],
libdir => undef, libdir => undef,
timeformat => '%c', timeformat => '%c',
locale => undef, locale => undef,
@ -281,6 +281,12 @@ sub pagetype ($) { #{{{
return; return;
} #}}} } #}}}
sub isinternal ($) { #{{{
my $page=shift;
return exists $pagesources{$page} &&
$pagesources{$page} =~ /\._([^.]+)$/;
} #}}}
sub pagename ($) { #{{{ sub pagename ($) { #{{{
my $file=shift; my $file=shift;
@ -628,6 +634,20 @@ sub htmllink ($$$;@) { #{{{
return "<a href=\"$bestlink\"@attrs>$linktext</a>"; return "<a href=\"$bestlink\"@attrs>$linktext</a>";
} #}}} } #}}}
sub userlink ($) { #{{{
my $user=shift;
my $oiduser=eval { openiduser($user) };
if (defined $oiduser) {
return "<a href=\"$user\">$oiduser</a>";
}
else {
return htmllink("", "", escapeHTML(
length $config{userdir} ? $config{userdir}."/".$user : $user
), noimageinline => 1);
}
} #}}}
sub htmlize ($$$) { #{{{ sub htmlize ($$$) { #{{{
my $page=shift; my $page=shift;
my $type=shift; my $type=shift;
@ -913,7 +933,7 @@ sub loadindex () { #{{{
%oldrenderedfiles=%pagectime=(); %oldrenderedfiles=%pagectime=();
if (! $config{rebuild}) { if (! $config{rebuild}) {
%pagesources=%pagemtime=%oldlinks=%links=%depends= %pagesources=%pagemtime=%oldlinks=%links=%depends=
%destsources=%renderedfiles=%pagecase=(); %destsources=%renderedfiles=%pagecase=%pagestate=();
} }
open (my $in, "<", "$config{wikistatedir}/index") || return; open (my $in, "<", "$config{wikistatedir}/index") || return;
while (<$in>) { while (<$in>) {
@ -979,7 +999,7 @@ sub saveindex () { #{{{
if (exists $pagestate{$page}) { if (exists $pagestate{$page}) {
foreach my $id (@hookids) { foreach my $id (@hookids) {
foreach my $key (keys %{$pagestate{$page}{$id}}) { foreach my $key (keys %{$pagestate{$page}{$id}}) {
$line.=' '.$id.'_'.encode_entities($key)."=".encode_entities($pagestate{$page}{$id}{$key}); $line.=' '.$id.'_'.encode_entities($key)."=".encode_entities($pagestate{$page}{$id}{$key}, " \t\n");
} }
} }
} }
@ -1299,13 +1319,22 @@ sub match_glob ($$;@) { #{{{
$glob=~s/\\\?/./g; $glob=~s/\\\?/./g;
if ($page=~/^$glob$/i) { if ($page=~/^$glob$/i) {
return IkiWiki::SuccessReason->new("$glob matches $page"); if (! IkiWiki::isinternal($page) || $params{internal}) {
return IkiWiki::SuccessReason->new("$glob matches $page");
}
else {
return IkiWiki::FailReason->new("$glob matches $page, but the page is an internal page");
}
} }
else { else {
return IkiWiki::FailReason->new("$glob does not match $page"); return IkiWiki::FailReason->new("$glob does not match $page");
} }
} #}}} } #}}}
sub match_internal ($$;@) { #{{{
return match_glob($_[0], $_[1], @_, internal => 1)
} #}}}
sub match_link ($$;@) { #{{{ sub match_link ($$;@) { #{{{
my $page=shift; my $page=shift;
my $link=lc(shift); my $link=lc(shift);
@ -1401,19 +1430,4 @@ sub match_creation_year ($$;@) { #{{{
} }
} #}}} } #}}}
sub match_user ($$;@) { #{{{
shift;
my $user=shift;
my %params=@_;
return IkiWiki::FailReason->new('cannot match user')
unless exists $params{user};
if ($user eq $params{user}) {
return IkiWiki::SuccessReason->new("user is $user")
}
else {
return IkiWiki::FailReason->new("user is not $user");
}
} #}}}
1 1

View File

@ -84,53 +84,6 @@ sub decode_cgi_utf8 ($) { #{{{
} }
} #}}} } #}}}
sub cgi_recentchanges ($) { #{{{
my $q=shift;
# Optimisation: building recentchanges means calculating lots of
# links. Memoizing htmllink speeds it up a lot (can't be memoized
# during page builds as the return values may change, but they
# won't here.)
eval q{use Memoize};
error($@) if $@;
memoize("htmllink");
eval q{use Time::Duration};
error($@) if $@;
my $changelog=[rcs_recentchanges(100)];
foreach my $change (@$changelog) {
$change->{when} = concise(ago($change->{when}));
$change->{user} = userlink($change->{user});
my $is_excess = exists $change->{pages}[10]; # limit pages to first 10
delete @{$change->{pages}}[10 .. @{$change->{pages}}] if $is_excess;
$change->{pages} = [
map {
$_->{link} = htmllink("", "", $_->{page},
noimageinline => 1,
linktext => pagetitle($_->{page}));
$_;
} @{$change->{pages}}
];
push @{$change->{pages}}, { link => '...' } if $is_excess;
}
my $template=template("recentchanges.tmpl");
$template->param(
title => "RecentChanges",
indexlink => indexlink(),
wikiname => $config{wikiname},
changelog => $changelog,
baseurl => baseurl(),
);
run_hooks(pagetemplate => sub {
shift->(page => "", destpage => "", template => $template);
});
print $q->header(-charset => 'utf-8'), $template->output;
} #}}}
# Check if the user is signed in. If not, redirect to the signin form and # Check if the user is signed in. If not, redirect to the signin form and
# save their place to return to later. # save their place to return to later.
sub needsignin ($$) { #{{{ sub needsignin ($$) { #{{{
@ -242,9 +195,6 @@ sub cgi_prefs ($$) { #{{{
$form->field(name => "do", type => "hidden"); $form->field(name => "do", type => "hidden");
$form->field(name => "email", size => 50, fieldset => "preferences"); $form->field(name => "email", size => 50, fieldset => "preferences");
$form->field(name => "subscriptions", size => 50,
fieldset => "preferences",
comment => "(".htmllink("", "", "ikiwiki/PageSpec", noimageinline => 1).")");
$form->field(name => "banned_users", size => 50, $form->field(name => "banned_users", size => 50,
fieldset => "admin"); fieldset => "admin");
@ -256,8 +206,6 @@ sub cgi_prefs ($$) { #{{{
if (! $form->submitted) { if (! $form->submitted) {
$form->field(name => "email", force => 1, $form->field(name => "email", force => 1,
value => userinfo_get($user_name, "email")); value => userinfo_get($user_name, "email"));
$form->field(name => "subscriptions", force => 1,
value => userinfo_get($user_name, "subscriptions"));
if (is_admin($user_name)) { if (is_admin($user_name)) {
$form->field(name => "banned_users", force => 1, $form->field(name => "banned_users", force => 1,
value => join(" ", get_banned_users())); value => join(" ", get_banned_users()));
@ -274,11 +222,9 @@ sub cgi_prefs ($$) { #{{{
return; return;
} }
elsif ($form->submitted eq 'Save Preferences' && $form->validate) { elsif ($form->submitted eq 'Save Preferences' && $form->validate) {
foreach my $field (qw(email subscriptions)) { if (defined $form->field('email')) {
if (defined $form->field($field)) { userinfo_set($user_name, 'email', $form->field('email')) ||
userinfo_set($user_name, $field, $form->field($field)) || error("failed to set email");
error("failed to set $field");
}
} }
if (is_admin($user_name)) { if (is_admin($user_name)) {
set_banned_users(grep { ! is_admin($_) } set_banned_users(grep { ! is_admin($_) }
@ -341,7 +287,7 @@ sub cgi_editpage ($$) { #{{{
if (exists $pagesources{$page} && $form->field("do") ne "create") { if (exists $pagesources{$page} && $form->field("do") ne "create") {
$file=$pagesources{$page}; $file=$pagesources{$page};
$type=pagetype($file); $type=pagetype($file);
if (! defined $type) { if (! defined $type || $type=~/^_/) {
error(sprintf(gettext("%s is not an editable page"), $page)); error(sprintf(gettext("%s is not an editable page"), $page));
} }
if (! $form->submitted) { if (! $form->submitted) {
@ -411,6 +357,8 @@ sub cgi_editpage ($$) { #{{{
linkify($page, "", linkify($page, "",
preprocess($page, $page, preprocess($page, $page,
filter($page, $page, $content), 0, 1)))); filter($page, $page, $content), 0, 1))));
# previewing may have created files on disk
saveindex();
} }
elsif ($form->submitted eq "Save Page") { elsif ($form->submitted eq "Save Page") {
$form->tmpl_param("page_preview", ""); $form->tmpl_param("page_preview", "");
@ -470,7 +418,8 @@ sub cgi_editpage ($$) { #{{{
my @page_types; my @page_types;
if (exists $hooks{htmlize}) { if (exists $hooks{htmlize}) {
@page_types=keys %{$hooks{htmlize}}; @page_types=grep { !/^_/ }
keys %{$hooks{htmlize}};
} }
$form->tmpl_param("page_select", 1); $form->tmpl_param("page_select", 1);
@ -501,7 +450,6 @@ sub cgi_editpage ($$) { #{{{
} }
showform($form, \@buttons, $session, $q); showform($form, \@buttons, $session, $q);
saveindex();
} }
else { else {
# save page # save page
@ -579,7 +527,7 @@ sub cgi_editpage ($$) { #{{{
# Prevent deadlock with post-commit hook by # Prevent deadlock with post-commit hook by
# signaling to it that it should not try to # signaling to it that it should not try to
# do anything (except send commit mails). # do anything.
disable_commit_hook(); disable_commit_hook();
$conflict=rcs_commit($file, $message, $conflict=rcs_commit($file, $message,
$form->field("rcsinfo"), $form->field("rcsinfo"),
@ -592,10 +540,6 @@ sub cgi_editpage ($$) { #{{{
# may have been committed while the post-commit hook was # may have been committed while the post-commit hook was
# disabled. # disabled.
require IkiWiki::Render; require IkiWiki::Render;
# Reload index, since the first time it's loaded is before
# the wiki is locked, and things may have changed in the
# meantime.
loadindex();
refresh(); refresh();
saveindex(); saveindex();
@ -667,14 +611,9 @@ sub cgi (;$$) { #{{{
} }
} }
# Things that do not need a session.
if ($do eq 'recentchanges') {
cgi_recentchanges($q);
return;
}
# Need to lock the wiki before getting a session. # Need to lock the wiki before getting a session.
lockwiki(); lockwiki();
loadindex();
if (! $session) { if (! $session) {
$session=cgi_getsession($q); $session=cgi_getsession($q);
@ -726,32 +665,4 @@ sub cgi (;$$) { #{{{
} }
} #}}} } #}}}
sub userlink ($) { #{{{
my $user=shift;
eval q{use CGI 'escapeHTML'};
error($@) if $@;
if ($user =~ m!^https?://! &&
eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) {
# Munge user-urls, as used by eg, OpenID.
my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user);
my $display=$oid->display;
# Convert "user.somehost.com" to "user [somehost.com]".
if ($display !~ /\[/) {
$display=~s/^(.*?)\.([^.]+\.[a-z]+)$/$1 [$2]/;
}
# Convert "http://somehost.com/user" to "user [somehost.com]".
if ($display !~ /\[/) {
$display=~s/^https?:\/\/(.+)\/([^\/]+)$/$2 [$1]/;
}
$display=~s!^https?://!!; # make sure this is removed
return "<a href=\"$user\">".escapeHTML($display)."</a>";
}
else {
return htmllink("", "", escapeHTML(
length $config{userdir} ? $config{userdir}."/".$user : $user
), noimageinline => 1);
}
} #}}}
1 1

View File

@ -33,43 +33,62 @@ sub getopt () { #{{{
sub checkconfig () { #{{{ sub checkconfig () { #{{{
if ($config{aggregate} && ! ($config{post_commit} && if ($config{aggregate} && ! ($config{post_commit} &&
IkiWiki::commit_hook_enabled())) { IkiWiki::commit_hook_enabled())) {
if (! IkiWiki::lockwiki(0)) { # See if any feeds need aggregation.
debug("wiki is locked by another process, not aggregating"); loadstate();
exit 1; my @feeds=needsaggregate();
return unless @feeds;
if (! lockaggregate()) {
debug("an aggregation process is already running");
return;
} }
# force a later rebuild of source pages
$IkiWiki::forcerebuild{$_->{sourcepage}}=1
foreach @feeds;
# Fork a child process to handle the aggregation. # Fork a child process to handle the aggregation.
# The parent process will then handle building the result. # The parent process will then handle building the
# This avoids messy code to clear state accumulated while # result. This avoids messy code to clear state
# aggregating. # accumulated while aggregating.
defined(my $pid = fork) or error("Can't fork: $!"); defined(my $pid = fork) or error("Can't fork: $!");
if (! $pid) { if (! $pid) {
loadstate();
IkiWiki::loadindex(); IkiWiki::loadindex();
aggregate();
# Aggregation happens without the main wiki lock
# being held. This allows editing pages etc while
# aggregation is running.
aggregate(@feeds);
IkiWiki::lockwiki;
# Merge changes, since aggregation state may have
# changed on disk while the aggregation was happening.
mergestate();
expire(); expire();
savestate(); savestate();
IkiWiki::unlockwiki;
exit 0; exit 0;
} }
waitpid($pid,0); waitpid($pid,0);
if ($?) { if ($?) {
error "aggregation failed with code $?"; error "aggregation failed with code $?";
} }
IkiWiki::unlockwiki(); clearstate();
unlockaggregate();
} }
} #}}} } #}}}
sub needsbuild (@) { #{{{ sub needsbuild (@) { #{{{
my $needsbuild=shift; my $needsbuild=shift;
loadstate(); # if not already loaded loadstate();
foreach my $feed (values %feeds) { foreach my $feed (values %feeds) {
if (grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) { if (exists $pagesources{$feed->{sourcepage}} &&
# Mark all feeds originating on this page as removable; grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
# preprocess will unmark those that still exist. # Mark all feeds originating on this page as
remove_feeds($feed->{sourcepage}); # not yet seen; preprocess will unmark those that
# still exist.
markunseen($feed->{sourcepage});
} }
} }
} # }}} } # }}}
@ -102,8 +121,7 @@ sub preprocess (@) { #{{{
$feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60; $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
$feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0; $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
$feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0; $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
delete $feed->{remove}; delete $feed->{unseen};
delete $feed->{expired};
$feed->{lastupdate}=0 unless defined $feed->{lastupdate}; $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
$feed->{numposts}=0 unless defined $feed->{numposts}; $feed->{numposts}=0 unless defined $feed->{numposts};
$feed->{newposts}=0 unless defined $feed->{newposts}; $feed->{newposts}=0 unless defined $feed->{newposts};
@ -133,11 +151,22 @@ sub delete (@) { #{{{
# Remove feed data for removed pages. # Remove feed data for removed pages.
foreach my $file (@files) { foreach my $file (@files) {
my $page=pagename($file); my $page=pagename($file);
remove_feeds($page); markunseen($page);
}
} #}}}
sub markunseen ($) { #{{{
my $page=shift;
foreach my $id (keys %feeds) {
if ($feeds{$id}->{sourcepage} eq $page) {
$feeds{$id}->{unseen}=1;
}
} }
} #}}} } #}}}
my $state_loaded=0; my $state_loaded=0;
sub loadstate () { #{{{ sub loadstate () { #{{{
return if $state_loaded; return if $state_loaded;
$state_loaded=1; $state_loaded=1;
@ -176,32 +205,13 @@ sub loadstate () { #{{{
sub savestate () { #{{{ sub savestate () { #{{{
return unless $state_loaded; return unless $state_loaded;
garbage_collect();
eval q{use HTML::Entities}; eval q{use HTML::Entities};
error($@) if $@; error($@) if $@;
my $newfile="$config{wikistatedir}/aggregate.new"; my $newfile="$config{wikistatedir}/aggregate.new";
my $cleanup = sub { unlink($newfile) }; my $cleanup = sub { unlink($newfile) };
open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup); open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
foreach my $data (values %feeds, values %guids) { foreach my $data (values %feeds, values %guids) {
if ($data->{remove}) {
if ($data->{name}) {
foreach my $guid (values %guids) {
if ($guid->{feed} eq $data->{name}) {
$guid->{remove}=1;
}
}
}
else {
unlink pagefile($data->{page})
if exists $data->{page};
}
next;
}
elsif ($data->{expired} && exists $data->{page}) {
unlink pagefile($data->{page});
delete $data->{page};
delete $data->{md5};
}
my @line; my @line;
foreach my $field (keys %$data) { foreach my $field (keys %$data) {
if ($field eq "name" || $field eq "feed" || if ($field eq "name" || $field eq "feed" ||
@ -222,6 +232,69 @@ sub savestate () { #{{{
error("rename $newfile: $!", $cleanup); error("rename $newfile: $!", $cleanup);
} #}}} } #}}}
sub garbage_collect () { #{{{
foreach my $name (keys %feeds) {
# remove any feeds that were not seen while building the pages
# that used to contain them
if ($feeds{$name}->{unseen}) {
delete $feeds{$name};
}
}
foreach my $guid (values %guids) {
# any guid whose feed is gone should be removed
if (! exists $feeds{$guid->{feed}}) {
unlink pagefile($guid->{page})
if exists $guid->{page};
delete $guids{$guid->{guid}};
}
# handle expired guids
elsif ($guid->{expired} && exists $guid->{page}) {
unlink pagefile($guid->{page});
delete $guid->{page};
delete $guid->{md5};
}
}
} #}}}
sub mergestate () { #{{{
# Load the current state in from disk, and merge into it
# values from the state in memory that might have changed
# during aggregation.
my %myfeeds=%feeds;
my %myguids=%guids;
clearstate();
loadstate();
# All that can change in feed state during aggregation is a few
# fields.
foreach my $name (keys %myfeeds) {
if (exists $feeds{$name}) {
foreach my $field (qw{message lastupdate numposts
newposts error}) {
$feeds{$name}->{$field}=$myfeeds{$name}->{$field};
}
}
}
# New guids can be created during aggregation.
# It's also possible that guids were removed from the on-disk state
# while the aggregation was in process. That would only happen if
# their feed was also removed, so any removed guids added back here
# will be garbage collected later.
foreach my $guid (keys %myguids) {
if (! exists $guids{$guid}) {
$guids{$guid}=$myguids{$guid};
}
}
} #}}}
sub clearstate () { #{{{
%feeds=();
%guids=();
$state_loaded=0;
} #}}}
sub expire () { #{{{ sub expire () { #{{{
foreach my $feed (values %feeds) { foreach my $feed (values %feeds) {
next unless $feed->{expireage} || $feed->{expirecount}; next unless $feed->{expireage} || $feed->{expirecount};
@ -253,7 +326,12 @@ sub expire () { #{{{
} }
} #}}} } #}}}
sub aggregate () { #{{{ sub needsaggregate () { #{{{
return values %feeds if $config{rebuild};
return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
} #}}}
sub aggregate (@) { #{{{
eval q{use XML::Feed}; eval q{use XML::Feed};
error($@) if $@; error($@) if $@;
eval q{use URI::Fetch}; eval q{use URI::Fetch};
@ -261,15 +339,12 @@ sub aggregate () { #{{{
eval q{use HTML::Entities}; eval q{use HTML::Entities};
error($@) if $@; error($@) if $@;
foreach my $feed (values %feeds) { foreach my $feed (@_) {
next unless $config{rebuild} ||
time - $feed->{lastupdate} >= $feed->{updateinterval};
$feed->{lastupdate}=time; $feed->{lastupdate}=time;
$feed->{newposts}=0; $feed->{newposts}=0;
$feed->{message}=sprintf(gettext("processed ok at %s"), $feed->{message}=sprintf(gettext("processed ok at %s"),
displaytime($feed->{lastupdate})); displaytime($feed->{lastupdate}));
$feed->{error}=0; $feed->{error}=0;
$IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
debug(sprintf(gettext("checking feed %s ..."), $feed->{name})); debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
@ -477,18 +552,6 @@ sub htmlabs ($$) { #{{{
return $ret; return $ret;
} #}}} } #}}}
sub remove_feeds () { #{{{
my $page=shift;
my %removed;
foreach my $id (keys %feeds) {
if ($feeds{$id}->{sourcepage} eq $page) {
$feeds{$id}->{remove}=1;
$removed{$id}=1;
}
}
} #}}}
sub pagefile ($) { #{{{ sub pagefile ($) { #{{{
my $page=shift; my $page=shift;
@ -499,4 +562,26 @@ sub htmlfn ($) { #{{{
return shift().".".$config{htmlext}; return shift().".".$config{htmlext};
} #}}} } #}}}
my $aggregatelock;
sub lockaggregate () { #{{{
# Take an exclusive lock to prevent multiple concurrent aggregators.
# Returns true if the lock was aquired.
if (! -d $config{wikistatedir}) {
mkdir($config{wikistatedir});
}
open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
close($aggregatelock) || error("failed closing aggregatelock: $!");
return 0;
}
return 1;
} #}}}
sub unlockaggregate () { #{{{
return close($aggregatelock) if $aggregatelock;
return;
} #}}}
1 1

View File

@ -390,7 +390,8 @@ sub needsbuild (@) { #{{{
# the current day # the current day
push @$needsbuild, $pagesources{$page}; push @$needsbuild, $pagesources{$page};
} }
if (grep { $_ eq $pagesources{$page} } @$needsbuild) { if (exists $pagesources{$page} &&
grep { $_ eq $pagesources{$page} } @$needsbuild) {
# remove state, will be re-added if # remove state, will be re-added if
# the calendar is still there during the # the calendar is still there during the
# rebuild # rebuild

View File

@ -18,6 +18,10 @@ sub checkconfig () { #{{{
sub IkiWiki::displaytime ($;$) { #{{{ sub IkiWiki::displaytime ($;$) { #{{{
my $time=shift; my $time=shift;
my $format=shift;
if (! defined $format) {
$format=$config{timeformat};
}
eval q{ eval q{
use DateTime; use DateTime;
use DateTime::Calendar::Discordian; use DateTime::Calendar::Discordian;
@ -27,7 +31,7 @@ sub IkiWiki::displaytime ($;$) { #{{{
} }
my $dt = DateTime->from_epoch(epoch => $time); my $dt = DateTime->from_epoch(epoch => $time);
my $dd = DateTime::Calendar::Discordian->from_object(object => $dt); my $dd = DateTime::Calendar::Discordian->from_object(object => $dt);
return $dd->strftime($IkiWiki::config{timeformat}); return $dd->strftime($format);
} #}}} } #}}}
5 5

View File

@ -21,7 +21,8 @@ sub needsbuild (@) { #{{{
foreach my $page (keys %pagestate) { foreach my $page (keys %pagestate) {
if (exists $pagestate{$page}{edittemplate}) { if (exists $pagestate{$page}{edittemplate}) {
if (grep { $_ eq $pagesources{$page} } @$needsbuild) { if (exists $pagesources{$page} &&
grep { $_ eq $pagesources{$page} } @$needsbuild) {
# remove state, it will be re-added # remove state, it will be re-added
# if the preprocessor directive is still # if the preprocessor directive is still
# there during the rebuild # there during the rebuild

View File

@ -34,6 +34,8 @@ sub getopt () { #{{{
GetOptions( GetOptions(
"rss!" => \$config{rss}, "rss!" => \$config{rss},
"atom!" => \$config{atom}, "atom!" => \$config{atom},
"allowrss!" => \$config{allowrss},
"allowatom!" => \$config{allowatom},
); );
} }
@ -91,11 +93,10 @@ sub preprocess_inline (@) { #{{{
} }
my $raw=yesno($params{raw}); my $raw=yesno($params{raw});
my $archive=yesno($params{archive}); my $archive=yesno($params{archive});
my $rss=($config{rss} && exists $params{rss}) ? yesno($params{rss}) : $config{rss}; my $rss=(($config{rss} || $config{allowrss}) && exists $params{rss}) ? yesno($params{rss}) : $config{rss};
my $atom=($config{atom} && exists $params{atom}) ? yesno($params{atom}) : $config{atom}; my $atom=(($config{atom} || $config{allowatom}) && exists $params{atom}) ? yesno($params{atom}) : $config{atom};
my $quick=exists $params{quick} ? yesno($params{quick}) : 0; my $quick=exists $params{quick} ? yesno($params{quick}) : 0;
my $feeds=exists $params{feeds} ? yesno($params{feeds}) : !$quick; my $feeds=exists $params{feeds} ? yesno($params{feeds}) : !$quick;
$feeds=0 if $params{preview};
my $feedonly=yesno($params{feedonly}); my $feedonly=yesno($params{feedonly});
if (! exists $params{show} && ! $archive) { if (! exists $params{show} && ! $archive) {
$params{show}=10; $params{show}=10;
@ -182,7 +183,7 @@ sub preprocess_inline (@) { #{{{
my $atomurl=basename(atompage($params{destpage}).$feednum) if $feeds && $atom; my $atomurl=basename(atompage($params{destpage}).$feednum) if $feeds && $atom;
my $ret=""; my $ret="";
if ($config{cgiurl} && (exists $params{rootpage} || if ($config{cgiurl} && ! $params{preview} && (exists $params{rootpage} ||
(exists $params{postform} && yesno($params{postform})))) { (exists $params{postform} && yesno($params{postform})))) {
# Add a blog post form, with feed buttons. # Add a blog post form, with feed buttons.
my $formtemplate=template("blogpost.tmpl", blind_cache => 1); my $formtemplate=template("blogpost.tmpl", blind_cache => 1);
@ -201,7 +202,7 @@ sub preprocess_inline (@) { #{{{
} }
$ret.=$formtemplate->output; $ret.=$formtemplate->output;
} }
elsif ($feeds) { elsif ($feeds && !$params{preview}) {
# Add feed buttons. # Add feed buttons.
my $linktemplate=template("feedlink.tmpl", blind_cache => 1); my $linktemplate=template("feedlink.tmpl", blind_cache => 1);
$linktemplate->param(rssurl => $rssurl) if $rss; $linktemplate->param(rssurl => $rssurl) if $rss;
@ -231,6 +232,8 @@ sub preprocess_inline (@) { #{{{
$template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage})); $template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage}));
$template->param(title => pagetitle(basename($page))); $template->param(title => pagetitle(basename($page)));
$template->param(ctime => displaytime($pagectime{$page}, $params{timeformat})); $template->param(ctime => displaytime($pagectime{$page}, $params{timeformat}));
$template->param(first => 1) if $page eq $list[0];
$template->param(last => 1) if $page eq $list[$#list];
if ($actions) { if ($actions) {
my $file = $pagesources{$page}; my $file = $pagesources{$page};
@ -286,18 +289,22 @@ sub preprocess_inline (@) { #{{{
if ($rss) { if ($rss) {
my $rssp=rsspage($params{destpage}).$feednum; my $rssp=rsspage($params{destpage}).$feednum;
will_render($params{destpage}, $rssp); will_render($params{destpage}, $rssp);
writefile($rssp, $config{destdir}, if (! $params{preview}) {
genfeed("rss", $rssurl, $desc, $params{destpage}, @list)); writefile($rssp, $config{destdir},
$toping{$params{destpage}}=1 unless $config{rebuild}; genfeed("rss", $rssurl, $desc, $params{destpage}, @list));
$feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/rss+xml" title="RSS" href="$rssurl" />}; $toping{$params{destpage}}=1 unless $config{rebuild};
$feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/rss+xml" title="RSS" href="$rssurl" />};
}
} }
if ($atom) { if ($atom) {
my $atomp=atompage($params{destpage}).$feednum; my $atomp=atompage($params{destpage}).$feednum;
will_render($params{destpage}, $atomp); will_render($params{destpage}, $atomp);
writefile($atomp, $config{destdir}, if (! $params{preview}) {
genfeed("atom", $atomurl, $desc, $params{destpage}, @list)); writefile($atomp, $config{destdir},
$toping{$params{destpage}}=1 unless $config{rebuild}; genfeed("atom", $atomurl, $desc, $params{destpage}, @list));
$feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/atom+xml" title="Atom" href="$atomurl" />}; $toping{$params{destpage}}=1 unless $config{rebuild};
$feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/atom+xml" title="Atom" href="$atomurl" />};
}
} }
} }

View File

@ -6,13 +6,7 @@ use warnings;
use strict; use strict;
use IkiWiki 2.00; use IkiWiki 2.00;
my %meta; my %metaheaders;
my %title;
my %permalink;
my %author;
my %authorurl;
my %license;
my %copyright;
sub import { #{{{ sub import { #{{{
hook(type => "needsbuild", id => "meta", call => \&needsbuild); hook(type => "needsbuild", id => "meta", call => \&needsbuild);
@ -24,7 +18,8 @@ sub needsbuild (@) { #{{{
my $needsbuild=shift; my $needsbuild=shift;
foreach my $page (keys %pagestate) { foreach my $page (keys %pagestate) {
if (exists $pagestate{$page}{meta}) { if (exists $pagestate{$page}{meta}) {
if (grep { $_ eq $pagesources{$page} } @$needsbuild) { if (exists $pagesources{$page} &&
grep { $_ eq $pagesources{$page} } @$needsbuild) {
# remove state, it will be re-added # remove state, it will be re-added
# if the preprocessor directive is still # if the preprocessor directive is still
# there during the rebuild # there during the rebuild
@ -71,16 +66,16 @@ sub preprocess (@) { #{{{
# Metadata collection that needs to happen during the scan pass. # Metadata collection that needs to happen during the scan pass.
if ($key eq 'title') { if ($key eq 'title') {
$title{$page}=HTML::Entities::encode_numeric($value); $pagestate{$page}{meta}{title}=HTML::Entities::encode_numeric($value);
} }
elsif ($key eq 'license') { elsif ($key eq 'license') {
push @{$meta{$page}}, '<link rel="license" href="#page_license" />'; push @{$metaheaders{$page}}, '<link rel="license" href="#page_license" />';
$license{$page}=$value; $pagestate{$page}{meta}{license}=$value;
return ""; return "";
} }
elsif ($key eq 'copyright') { elsif ($key eq 'copyright') {
push @{$meta{$page}}, '<link rel="copyright" href="#page_copyright" />'; push @{$metaheaders{$page}}, '<link rel="copyright" href="#page_copyright" />';
$copyright{$page}=$value; $pagestate{$page}{meta}{copyright}=$value;
return ""; return "";
} }
elsif ($key eq 'link' && ! %params) { elsif ($key eq 'link' && ! %params) {
@ -89,11 +84,11 @@ sub preprocess (@) { #{{{
return ""; return "";
} }
elsif ($key eq 'author') { elsif ($key eq 'author') {
$author{$page}=$value; $pagestate{$page}{meta}{author}=$value;
# fallthorough # fallthorough
} }
elsif ($key eq 'authorurl') { elsif ($key eq 'authorurl') {
$authorurl{$page}=$value; $pagestate{$page}{meta}{authorurl}=$value;
# fallthrough # fallthrough
} }
@ -111,8 +106,8 @@ sub preprocess (@) { #{{{
} }
} }
elsif ($key eq 'permalink') { elsif ($key eq 'permalink') {
$permalink{$page}=$value; $pagestate{$page}{meta}{permalink}=$value;
push @{$meta{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />'); push @{$metaheaders{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />');
} }
elsif ($key eq 'stylesheet') { elsif ($key eq 'stylesheet') {
my $rel=exists $params{rel} ? $params{rel} : "alternate stylesheet"; my $rel=exists $params{rel} ? $params{rel} : "alternate stylesheet";
@ -123,17 +118,17 @@ sub preprocess (@) { #{{{
if (! length $stylesheet) { if (! length $stylesheet) {
return "[[meta ".gettext("stylesheet not found")."]]"; return "[[meta ".gettext("stylesheet not found")."]]";
} }
push @{$meta{$page}}, '<link href="'.urlto($stylesheet, $page). push @{$metaheaders{$page}}, '<link href="'.urlto($stylesheet, $page).
'" rel="'.encode_entities($rel). '" rel="'.encode_entities($rel).
'" title="'.encode_entities($title). '" title="'.encode_entities($title).
"\" type=\"text/css\" />"; "\" type=\"text/css\" />";
} }
elsif ($key eq 'openid') { elsif ($key eq 'openid') {
if (exists $params{server}) { if (exists $params{server}) {
push @{$meta{$page}}, '<link href="'.encode_entities($params{server}). push @{$metaheaders{$page}}, '<link href="'.encode_entities($params{server}).
'" rel="openid.server" />'; '" rel="openid.server" />';
} }
push @{$meta{$page}}, '<link href="'.encode_entities($value). push @{$metaheaders{$page}}, '<link href="'.encode_entities($value).
'" rel="openid.delegate" />'; '" rel="openid.delegate" />';
} }
elsif ($key eq 'redir') { elsif ($key eq 'redir') {
@ -172,11 +167,11 @@ sub preprocess (@) { #{{{
if (! $safe) { if (! $safe) {
$redir=scrub($redir); $redir=scrub($redir);
} }
push @{$meta{$page}}, $redir; push @{$metaheaders{$page}}, $redir;
} }
elsif ($key eq 'link') { elsif ($key eq 'link') {
if (%params) { if (%params) {
push @{$meta{$page}}, scrub("<link href=\"".encode_entities($value)."\" ". push @{$metaheaders{$page}}, scrub("<link href=\"".encode_entities($value)."\" ".
join(" ", map { join(" ", map {
encode_entities($_)."=\"".encode_entities(decode_entities($params{$_}))."\"" encode_entities($_)."=\"".encode_entities(decode_entities($params{$_}))."\""
} keys %params). } keys %params).
@ -184,7 +179,7 @@ sub preprocess (@) { #{{{
} }
} }
else { else {
push @{$meta{$page}}, scrub('<meta name="'.encode_entities($key). push @{$metaheaders{$page}}, scrub('<meta name="'.encode_entities($key).
'" content="'.encode_entities($value).'" />'); '" content="'.encode_entities($value).'" />');
} }
@ -197,32 +192,80 @@ sub pagetemplate (@) { #{{{
my $destpage=$params{destpage}; my $destpage=$params{destpage};
my $template=$params{template}; my $template=$params{template};
if (exists $meta{$page} && $template->query(name => "meta")) { if (exists $metaheaders{$page} && $template->query(name => "meta")) {
# avoid duplicate meta lines # avoid duplicate meta lines
my %seen; my %seen;
$template->param(meta => join("\n", grep { (! $seen{$_}) && ($seen{$_}=1) } @{$meta{$page}})); $template->param(meta => join("\n", grep { (! $seen{$_}) && ($seen{$_}=1) } @{$metaheaders{$page}}));
} }
if (exists $title{$page} && $template->query(name => "title")) { if (exists $pagestate{$page}{meta}{title} && $template->query(name => "title")) {
$template->param(title => $title{$page}); $template->param(title => $pagestate{$page}{meta}{title});
$template->param(title_overridden => 1); $template->param(title_overridden => 1);
} }
$template->param(permalink => $permalink{$page})
if exists $permalink{$page} && $template->query(name => "permalink");
$template->param(author => $author{$page})
if exists $author{$page} && $template->query(name => "author");
$template->param(authorurl => $authorurl{$page})
if exists $authorurl{$page} && $template->query(name => "authorurl");
if (exists $license{$page} && $template->query(name => "license") && foreach my $field (qw{author authorurl permalink}) {
($page eq $destpage || ! exists $license{$destpage} || $template->param($field => $pagestate{$page}{meta}{$field})
$license{$page} ne $license{$destpage})) { if exists $pagestate{$page}{meta}{$field} && $template->query(name => $field);
$template->param(license => htmlize($page, $destpage, $license{$page}));
} }
if (exists $copyright{$page} && $template->query(name => "copyright") &&
($page eq $destpage || ! exists $copyright{$destpage} || foreach my $field (qw{license copyright}) {
$copyright{$page} ne $copyright{$destpage})) { if (exists $pagestate{$page}{meta}{$field} && $template->query(name => $field) &&
$template->param(copyright => htmlize($page, $destpage, $copyright{$page})); ($page eq $destpage || ! exists $pagestate{$destpage}{meta}{$field} ||
$pagestate{$page}{meta}{$field} ne $pagestate{$destpage}{meta}{$field})) {
$template->param($field => htmlize($page, $destpage, $pagestate{$page}{meta}{$field}));
}
} }
} # }}} } # }}}
sub match { #{{{
my $field=shift;
my $page=shift;
# turn glob into a safe regexp
my $re=quotemeta(shift);
$re=~s/\\\*/.*/g;
$re=~s/\\\?/./g;
my $val;
if (exists $pagestate{$page}{meta}{$field}) {
$val=$pagestate{$page}{meta}{$field};
}
elsif ($field eq 'title') {
$val=pagetitle($page);
}
if (defined $val) {
if ($val=~/^$re$/i) {
return IkiWiki::SuccessReason->new("$re matches $field of $page");
}
else {
return IkiWiki::FailReason->new("$re does not match $field of $page");
}
}
else {
return IkiWiki::FailReason->new("$page does not have a $field");
}
} #}}}
package IkiWiki::PageSpec;
sub match_title ($$;@) { #{{{
IkiWiki::Plugin::meta::match("title", @_);
} #}}}
sub match_author ($$;@) { #{{{
IkiWiki::Plugin::meta::match("author", @_);
} #}}}
sub match_authorurl ($$;@) { #{{{
IkiWiki::Plugin::meta::match("authorurl", @_);
} #}}}
sub match_license ($$;@) { #{{{
IkiWiki::Plugin::meta::match("license", @_);
} #}}}
sub match_copyright ($$;@) { #{{{
IkiWiki::Plugin::meta::match("copyright", @_);
} #}}}
1 1

View File

@ -164,4 +164,31 @@ sub getobj ($$) { #{{{
); );
} #}}} } #}}}
package IkiWiki;
# This is not used by this plugin, but this seems the best place to put it.
# Used elsewhere to pretty-display the name of an openid user.
sub openiduser ($) { #{{{
my $user=shift;
if ($user =~ m!^https?://! &&
eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) {
my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user);
my $display=$oid->display;
# Convert "user.somehost.com" to "user [somehost.com]".
if ($display !~ /\[/) {
$display=~s/^(.*?)\.([^.]+\.[a-z]+)$/$1 [$2]/;
}
# Convert "http://somehost.com/user" to "user [somehost.com]".
if ($display !~ /\[/) {
$display=~s/^https?:\/\/(.+)\/([^\/]+)$/$2 [$1]/;
}
$display=~s!^https?://!!; # make sure this is removed
eval q{use CGI 'escapeHTML'};
error($@) if $@;
return escapeHTML($display);
}
return;
}
1 1

View File

@ -7,7 +7,7 @@ use IkiWiki 2.00;
sub import { #{{{ sub import { #{{{
hook(type => "preprocess", id => "poll", call => \&preprocess); hook(type => "preprocess", id => "poll", call => \&preprocess);
hook(type => "cgi", id => "poll", call => \&cgi); hook(type => "sessioncgi", id => "poll", call => \&sessioncgi);
} # }}} } # }}}
sub yesno ($) { #{{{ sub yesno ($) { #{{{
@ -74,8 +74,9 @@ sub preprocess (@) { #{{{
return "<div class=poll>$ret</div>"; return "<div class=poll>$ret</div>";
} # }}} } # }}}
sub cgi ($) { #{{{ sub sessioncgi ($$) { #{{{
my $cgi=shift; my $cgi=shift;
my $session=shift;
if (defined $cgi->param('do') && $cgi->param('do') eq "poll") { if (defined $cgi->param('do') && $cgi->param('do') eq "poll") {
my $choice=$cgi->param('choice'); my $choice=$cgi->param('choice');
if (! defined $choice) { if (! defined $choice) {
@ -92,7 +93,6 @@ sub cgi ($) { #{{{
# Did they vote before? If so, let them change their vote, # Did they vote before? If so, let them change their vote,
# and check for dups. # and check for dups.
my $session=IkiWiki::cgi_getsession();
my $choice_param="poll_choice_${page}_$num"; my $choice_param="poll_choice_${page}_$num";
my $oldchoice=$session->param($choice_param); my $oldchoice=$session->param($choice_param);
if (defined $oldchoice && $oldchoice eq $choice) { if (defined $oldchoice && $oldchoice eq $choice) {

View File

@ -63,6 +63,10 @@ sub checkconfig () { #{{{
sub IkiWiki::displaytime ($;$) { #{{{ sub IkiWiki::displaytime ($;$) { #{{{
my $time=shift; my $time=shift;
my $format=shift;
if (! defined $format) {
$format=$config{prettydateformat};
}
eval q{use Date::Format}; eval q{use Date::Format};
error($@) if $@; error($@) if $@;
@ -93,7 +97,6 @@ sub IkiWiki::displaytime ($;$) { #{{{
$t=~s{\%A-}{my @yest=@t; $yest[6]--; strftime("%A", \@yest)}eg; $t=~s{\%A-}{my @yest=@t; $yest[6]--; strftime("%A", \@yest)}eg;
my $format=$config{prettydateformat};
$format=~s/\%X/$t/g; $format=~s/\%X/$t/g;
return strftime($format, \@t); return strftime($format, \@t);
} #}}} } #}}}

View File

@ -0,0 +1,169 @@
#!/usr/bin/perl
package IkiWiki::Plugin::recentchanges;
use warnings;
use strict;
use IkiWiki 2.00;
sub import { #{{{
hook(type => "checkconfig", id => "recentchanges", call => \&checkconfig);
hook(type => "refresh", id => "recentchanges", call => \&refresh);
hook(type => "pagetemplate", id => "recentchanges", call => \&pagetemplate);
hook(type => "htmlize", id => "_change", call => \&htmlize);
hook(type => "cgi", id => "recentchanges", call => \&cgi);
} #}}}
sub checkconfig () { #{{{
$config{recentchangespage}='recentchanges' unless defined $config{recentchangespage};
$config{recentchangesnum}=100 unless defined $config{recentchangesnum};
} #}}}
sub refresh ($) { #{{{
my %seen;
# add new changes
foreach my $change (IkiWiki::rcs_recentchanges($config{recentchangesnum})) {
$seen{store($change, $config{recentchangespage})}=1;
}
# delete old and excess changes
foreach my $page (keys %pagesources) {
if ($page =~ /\._change$/ && ! $seen{$page}) {
unlink($config{srcdir}.'/'.$pagesources{$page});
}
}
} #}}}
# Enable the recentchanges link on wiki pages.
sub pagetemplate (@) { #{{{
my %params=@_;
my $template=$params{template};
my $page=$params{page};
if ($config{rcs} && $page ne $config{recentchangespage} &&
$template->query(name => "recentchangesurl")) {
$template->param(recentchangesurl => urlto($config{recentchangespage}, $page));
$template->param(have_actions => 1);
}
} #}}}
# Pages with extension _change have plain html markup, pass through.
sub htmlize (@) { #{{{
my %params=@_;
return $params{content};
} #}}}
sub cgi ($) { #{{{
my $cgi=shift;
if (defined $cgi->param('do') && $cgi->param('do') eq "recentchanges_link") {
# This is a link from a change page to some
# other page. Since the change pages are only generated
# once, statically, links on them won't be updated if the
# page they link to is deleted, or newly created, or
# changes for whatever reason. So this CGI handles that
# dynamic linking stuff.
my $page=$cgi->param("page");
if (!defined $page) {
error("missing page parameter");
}
IkiWiki::loadindex();
my $link=bestlink("", $page);
if (! length $link) {
print "Content-type: text/html\n\n";
print IkiWiki::misctemplate(gettext(gettext("missing page")),
"<p>".
sprintf(gettext("The page %s does not exist."),
htmllink("", "", $page)).
"</p>");
}
else {
IkiWiki::redirect($cgi, $config{url}."/".htmlpage($link));
}
exit;
}
}
sub store ($$$) { #{{{
my $change=shift;
my $page="$config{recentchangespage}/change_".IkiWiki::titlepage($change->{rev});
# Optimisation to avoid re-writing pages. Assumes commits never
# change (or that any changes are not important).
return $page if exists $pagesources{$page} && ! $config{rebuild};
# Limit pages to first 10, and add links to the changed pages.
my $is_excess = exists $change->{pages}[10];
delete @{$change->{pages}}[10 .. @{$change->{pages}}] if $is_excess;
$change->{pages} = [
map {
if (length $config{cgiurl}) {
$_->{link} = "<a href=\"".
IkiWiki::cgiurl(
do => "recentchanges_link",
page => $_->{page}
).
"\">".
IkiWiki::pagetitle($_->{page}).
"</a>"
}
else {
$_->{link} = IkiWiki::pagetitle($_->{page});
}
$_->{baseurl}="$config{url}/" if length $config{url};
$_;
} @{$change->{pages}}
];
push @{$change->{pages}}, { link => '...' } if $is_excess;
# See if the committer is an openid.
$change->{author}=$change->{user};
my $oiduser=eval { IkiWiki::openiduser($change->{user}) };
if (defined $oiduser) {
$change->{authorurl}=$change->{user};
$change->{user}=$oiduser;
}
elsif (length $config{cgiurl}) {
$change->{authorurl} = IkiWiki::cgiurl(
do => "recentchanges_link",
page => (length $config{userdir} ? "$config{userdir}/" : "").$change->{author},
);
}
# escape wikilinks and preprocessor stuff in commit messages
if (ref $change->{message}) {
foreach my $field (@{$change->{message}}) {
if (exists $field->{line}) {
$field->{line} =~ s/(?<!\\)\[\[/\\\[\[/g;
}
}
}
# Fill out a template with the change info.
my $template=template("change.tmpl", blind_cache => 1);
$template->param(
%$change,
commitdate => displaytime($change->{when}, "%X %x"),
wikiname => $config{wikiname},
);
IkiWiki::run_hooks(pagetemplate => sub {
shift->(page => $page, destpage => $page, template => $template);
});
my $file=$page."._change";
writefile($file, $config{srcdir}, $template->output);
utime $change->{when}, $change->{when}, "$config{srcdir}/$file";
return $page;
} #}}}
sub updatechanges ($$) { #{{{
my $subdir=shift;
my @changes=@{shift()};
} #}}}
1

View File

@ -82,8 +82,8 @@ sub create ($$$) { #{{{
$logurl = urlto($imglog, $params->{destpage}); $logurl = urlto($imglog, $params->{destpage});
} }
else { else {
$imgurl="$params->{page}/$digest.png"; $imgurl=$params->{page}."/$digest.png";
$logurl="$params->{page}/$digest.log"; $logurl=$params->{page}."/$digest.log";
} }
if (-e "$config{destdir}/$imglink" || if (-e "$config{destdir}/$imglink" ||

View File

@ -18,7 +18,8 @@ sub needsbuild (@) { #{{{
if ($pagestate{$page}{version}{shown} ne $IkiWiki::version) { if ($pagestate{$page}{version}{shown} ne $IkiWiki::version) {
push @$needsbuild, $pagesources{$page}; push @$needsbuild, $pagesources{$page};
} }
if (grep { $_ eq $pagesources{$page} } @$needsbuild) { if (exists $pagesources{$page} &&
grep { $_ eq $pagesources{$page} } @$needsbuild) {
# remove state, will be re-added if # remove state, will be re-added if
# the version is still shown during the # the version is still shown during the
# rebuild # rebuild

View File

@ -37,6 +37,7 @@ sub rcs_recentchanges ($) {
# Examine the RCS history and generate a list of recent changes. # Examine the RCS history and generate a list of recent changes.
# The data structure returned for each change is: # The data structure returned for each change is:
# { # {
# rev => # the RCSs id for this commit
# user => # name of user who made the change, # user => # name of user who made the change,
# committype => # either "web" or the name of the rcs, # committype => # either "web" or the name of the rcs,
# when => # time when the change was made, # when => # time when the change was made,
@ -56,13 +57,6 @@ sub rcs_recentchanges ($) {
# } # }
} }
sub rcs_notify () {
# This function is called when a change is committed to the wiki,
# and ikiwiki is running as a post-commit hook from the RCS.
# It should examine the repository to somehow determine what pages
# changed, and then send emails to users subscribed to those pages.
}
sub rcs_getctime ($) { sub rcs_getctime ($) {
# Optional, used to get the page creation time from the RCS. # Optional, used to get the page creation time from the RCS.
error gettext("getctime not implemented"); error gettext("getctime not implemented");

166
IkiWiki/Rcs/bzr.pm 100644
View File

@ -0,0 +1,166 @@
#!/usr/bin/perl
use warnings;
use strict;
use IkiWiki;
use Encode;
use open qw{:utf8 :std};
package IkiWiki;
sub bzr_log ($) { #{{{
my $out = shift;
my @infos = ();
my $key = undef;
while (<$out>) {
my $line = $_;
my ($value);
if ($line =~ /^message:/) {
$key = "message";
$infos[$#infos]{$key} = "";
}
elsif ($line =~ /^(modified|added|renamed|renamed and modified|removed):/) {
$key = "files";
unless (defined($infos[$#infos]{$key})) { $infos[$#infos]{$key} = ""; }
}
elsif (defined($key) and $line =~ /^ (.*)/) {
$infos[$#infos]{$key} .= $1;
}
elsif ($line eq "------------------------------------------------------------\n") {
$key = undef;
push (@infos, {});
}
else {
chomp $line;
($key, $value) = split /: +/, $line, 2;
$infos[$#infos]{$key} = $value;
}
}
close $out;
return @infos;
} #}}}
sub rcs_update () { #{{{
my @cmdline = ("bzr", $config{srcdir}, "update");
if (system(@cmdline) != 0) {
warn "'@cmdline' failed: $!";
}
} #}}}
sub rcs_prepedit ($) { #{{{
return "";
} #}}}
sub rcs_commit ($$$;$$) { #{{{
my ($file, $message, $rcstoken, $user, $ipaddr) = @_;
if (defined $user) {
$user = possibly_foolish_untaint($user);
}
elsif (defined $ipaddr) {
$user = "Anonymous from ".possibly_foolish_untaint($ipaddr);
}
else {
$user = "Anonymous";
}
$message = possibly_foolish_untaint($message);
if (! length $message) {
$message = "no message given";
}
my @cmdline = ("bzr", "commit", "-m", $message, "--author", $user,
$config{srcdir}."/".$file);
if (system(@cmdline) != 0) {
warn "'@cmdline' failed: $!";
}
return undef; # success
} #}}}
sub rcs_add ($) { # {{{
my ($file) = @_;
my @cmdline = ("bzr", "add", "$config{srcdir}/$file");
if (system(@cmdline) != 0) {
warn "'@cmdline' failed: $!";
}
} #}}}
sub rcs_recentchanges ($) { #{{{
my ($num) = @_;
eval q{use CGI 'escapeHTML'};
error($@) if $@;
my @cmdline = ("bzr", "log", "-v", "--show-ids", "--limit", $num,
$config{srcdir});
open (my $out, "@cmdline |");
eval q{use Date::Parse};
error($@) if $@;
my @ret;
foreach my $info (bzr_log($out)) {
my @pages = ();
my @message = ();
foreach my $msgline (split(/\n/, $info->{message})) {
push @message, { line => $msgline };
}
foreach my $file (split(/\n/, $info->{files})) {
my ($filename, $fileid) = split(/[ \t]+/, $file);
my $diffurl = $config{'diffurl'};
$diffurl =~ s/\[\[file\]\]/$filename/go;
$diffurl =~ s/\[\[file-id\]\]/$fileid/go;
$diffurl =~ s/\[\[r2\]\]/$info->{revno}/go;
push @pages, {
page => pagename($filename),
diffurl => $diffurl,
};
}
my $user = $info->{"committer"};
if (defined($info->{"author"})) { $user = $info->{"author"}; }
$user =~ s/\s*<.*>\s*$//;
$user =~ s/^\s*//;
push @ret, {
rev => $info->{"revno"},
user => $user,
committype => "bzr",
when => time - str2time($info->{"timestamp"}),
message => [@message],
pages => [@pages],
};
}
return @ret;
} #}}}
sub rcs_getctime ($) { #{{{
my ($file) = @_;
# XXX filename passes through the shell here, should try to avoid
# that just in case
my @cmdline = ("bzr", "log", "--limit", '1', "$config{srcdir}/$file");
open (my $out, "@cmdline |");
my @log = bzr_log($out);
if (length @log < 1) {
return 0;
}
eval q{use Date::Parse};
error($@) if $@;
my $ctime = str2time($log[0]->{"timestamp"});
return $ctime;
} #}}}
1

View File

@ -247,8 +247,6 @@ sub _parse_diff_tree ($@) { #{{{
last; last;
} }
debug("No detail in diff-tree output") if !defined $ci{'details'};
return \%ci; return \%ci;
} #}}} } #}}}
@ -374,7 +372,7 @@ sub rcs_recentchanges ($) { #{{{
my ($sha1, $when) = ( my ($sha1, $when) = (
$ci->{'sha1'}, $ci->{'sha1'},
time - $ci->{'author_epoch'} $ci->{'author_epoch'}
); );
my (@pages, @messages); my (@pages, @messages);
@ -421,50 +419,6 @@ sub rcs_recentchanges ($) { #{{{
return @rets; return @rets;
} #}}} } #}}}
sub rcs_notify () { #{{{
# Send notification mail to subscribed users.
#
# In usual Git usage, hooks/update script is presumed to send
# notification mails (see git-receive-pack(1)). But we prefer
# hooks/post-update to support IkiWiki commits coming from a
# cloned repository (through command line) because post-update
# is called _after_ each ref in repository is updated (update
# hook is called _before_ the repository is updated). Since
# post-update hook does not accept command line arguments, we
# don't have an $ENV variable in this function.
#
# Here, we rely on a simple fact: we can extract all parts of the
# notification content by parsing the "HEAD" commit (which also
# triggers a refresh of IkiWiki pages).
my $ci = git_commit_info('HEAD');
return if !defined $ci;
my @changed_pages = map { $_->{'file'} } @{ $ci->{'details'} };
my ($user, $message);
if (@{ $ci->{'comment'} }[0] =~ m/$config{web_commit_regexp}/) {
$user = defined $2 ? "$2" : "$3";
$message = $4;
}
else {
$user = $ci->{'author_username'};
$message = join "\n", @{ $ci->{'comment'} };
}
my $sha1 = $ci->{'sha1'};
require IkiWiki::UserInfo;
send_commit_mails(
sub {
$message;
},
sub {
join "\n", run_or_die('git', 'diff', "${sha1}^", $sha1);
}, $user, @changed_pages
);
} #}}}
sub rcs_getctime ($) { #{{{ sub rcs_getctime ($) { #{{{
my $file=shift; my $file=shift;
# Remove srcdir prefix # Remove srcdir prefix

View File

@ -142,7 +142,7 @@ sub rcs_recentchanges ($) { #{{{
rev => $info->{"changeset"}, rev => $info->{"changeset"},
user => $user, user => $user,
committype => "mercurial", committype => "mercurial",
when => time - str2time($info->{"date"}), when => str2time($info->{"date"}),
message => [@message], message => [@message],
pages => [@pages], pages => [@pages],
}; };
@ -151,10 +151,6 @@ sub rcs_recentchanges ($) { #{{{
return @ret; return @ret;
} #}}} } #}}}
sub rcs_notify () { #{{{
# TODO
} #}}}
sub rcs_getctime ($) { #{{{ sub rcs_getctime ($) { #{{{
my ($file) = @_; my ($file) = @_;

View File

@ -342,10 +342,10 @@ sub rcs_commit ($$$;$$) { #{{{
return $conflict; return $conflict;
} }
if (defined($config{mtnsync}) && $config{mtnsync}) { if (defined($config{mtnsync}) && $config{mtnsync}) {
if (system("mtn", "--root=$config{mtnrootdir}", "sync", if (system("mtn", "--root=$config{mtnrootdir}", "push",
"--quiet", "--ticker=none", "--key", "--quiet", "--ticker=none", "--key",
$config{mtnkey}) != 0) { $config{mtnkey}) != 0) {
debug("monotone sync failed"); debug("monotone push failed");
} }
} }
@ -416,7 +416,7 @@ sub rcs_recentchanges ($) { #{{{
$committype = "monotone"; $committype = "monotone";
} }
} elsif ($cert->{name} eq "date") { } elsif ($cert->{name} eq "date") {
$when = time - str2time($cert->{value}, 'UTC'); $when = str2time($cert->{value}, 'UTC');
} elsif ($cert->{name} eq "changelog") { } elsif ($cert->{name} eq "changelog") {
my $messageText = $cert->{value}; my $messageText = $cert->{value};
# split the changelog into multiple # split the changelog into multiple
@ -431,10 +431,28 @@ sub rcs_recentchanges ($) { #{{{
my @changed_files = get_changed_files($automator, $rev); my @changed_files = get_changed_files($automator, $rev);
my $file; my $file;
my ($out, $err) = $automator->call("parents", $rev);
my @parents = ($out =~ m/^($sha1_pattern)$/);
my $parent = $parents[0];
foreach $file (@changed_files) { foreach $file (@changed_files) {
push @pages, { next unless length $file;
page => pagename($file),
} if length $file; if (defined $config{diffurl} and (@parents == 1)) {
my $diffurl=$config{diffurl};
$diffurl=~s/\[\[r1\]\]/$parent/g;
$diffurl=~s/\[\[r2\]\]/$rev/g;
$diffurl=~s/\[\[file\]\]/$file/g;
push @pages, {
page => pagename($file),
diffurl => $diffurl,
};
}
else {
push @pages, {
page => pagename($file),
}
}
} }
push @ret, { push @ret, {
@ -452,54 +470,6 @@ sub rcs_recentchanges ($) { #{{{
return @ret; return @ret;
} #}}} } #}}}
sub rcs_notify () { #{{{
debug("The monotone rcs_notify function is currently untested. Use at own risk!");
if (! exists $ENV{REV}) {
error(gettext("REV is not set, not running from mtn post-commit hook, cannot send notifications"));
}
if ($ENV{REV} !~ m/($sha1_pattern)/) { # sha1 is untainted now
error(gettext("REV is not a valid revision identifier, cannot send notifications"));
}
my $rev = $1;
check_config();
my $automator = Monotone->new();
$automator->open(undef, $config{mtnrootdir});
my $certs = [read_certs($automator, $rev)];
my $user;
my $message;
my $when;
foreach my $cert (@$certs) {
if ($cert->{signature} eq "ok" && $cert->{trust} eq "trusted") {
if ($cert->{name} eq "author") {
$user = $cert->{value};
} elsif ($cert->{name} eq "date") {
$when = $cert->{value};
} elsif ($cert->{name} eq "changelog") {
$message = $cert->{value};
}
}
}
my @changed_pages = get_changed_files($automator, $rev);
$automator->close();
require IkiWiki::UserInfo;
send_commit_mails(
sub {
return $message;
},
sub {
`mtn --root=$config{mtnrootdir} au content_diff -r $rev`;
},
$user, @changed_pages);
} #}}}
sub rcs_getctime ($) { #{{{ sub rcs_getctime ($) { #{{{
my $file=shift; my $file=shift;
@ -604,4 +574,3 @@ __DATA__
return true return true
end end
} }
EOF

View File

@ -171,7 +171,7 @@ sub rcs_recentchanges ($) { #{{{
my $rev = $logentry->{revision}; my $rev = $logentry->{revision};
my $user = $logentry->{author}; my $user = $logentry->{author};
my $when=time - str2time($logentry->{date}, 'UTC'); my $when=str2time($logentry->{date}, 'UTC');
foreach my $msgline (split(/\n/, $logentry->{msg})) { foreach my $msgline (split(/\n/, $logentry->{msg})) {
push @message, { line => $msgline }; push @message, { line => $msgline };
@ -203,7 +203,8 @@ sub rcs_recentchanges ($) { #{{{
diffurl => $diffurl, diffurl => $diffurl,
} if length $file; } if length $file;
} }
push @ret, { rev => $rev, push @ret, {
rev => $rev,
user => $user, user => $user,
committype => $committype, committype => $committype,
when => $when, when => $when,
@ -216,44 +217,6 @@ sub rcs_recentchanges ($) { #{{{
return @ret; return @ret;
} #}}} } #}}}
sub rcs_notify () { #{{{
if (! exists $ENV{REV}) {
error(gettext("REV is not set, not running from svn post-commit hook, cannot send notifications"));
}
my $rev=int(possibly_foolish_untaint($ENV{REV}));
my $user=`svnlook author $config{svnrepo} -r $rev`;
chomp $user;
my $message=`svnlook log $config{svnrepo} -r $rev`;
if ($message=~/$config{web_commit_regexp}/) {
$user=defined $2 ? "$2" : "$3";
$message=$4;
}
my @changed_pages;
foreach my $change (`svnlook changed $config{svnrepo} -r $rev`) {
chomp $change;
if (length $config{svnpath}) {
if ($change =~ /^[A-Z]+\s+\Q$config{svnpath}\E\/(.*)/) {
push @changed_pages, $1;
}
}
else {
push @changed_pages, $change;
}
}
require IkiWiki::UserInfo;
send_commit_mails(
sub {
return $message;
},
sub {
`svnlook diff $config{svnrepo} -r $rev --no-diff-deleted`;
}, $user, @changed_pages);
} #}}}
sub rcs_getctime ($) { #{{{ sub rcs_getctime ($) { #{{{
my $file=shift; my $file=shift;

View File

@ -120,7 +120,7 @@ sub rcs_recentchanges ($) {
split(/ /, "$newfiles $modfiles .arch-ids/fake.id"); split(/ /, "$newfiles $modfiles .arch-ids/fake.id");
my $sdate = $head->get("Standard-date"); my $sdate = $head->get("Standard-date");
my $when = time - str2time($sdate, 'UTC'); my $when = str2time($sdate, 'UTC');
my $committype = "web"; my $committype = "web";
if (defined $summ && $summ =~ /$config{web_commit_regexp}/) { if (defined $summ && $summ =~ /$config{web_commit_regexp}/) {
@ -145,7 +145,8 @@ sub rcs_recentchanges ($) {
diffurl => $diffurl, diffurl => $diffurl,
} if length $file; } if length $file;
} }
push @ret, { rev => $change, push @ret, {
rev => $change,
user => $user, user => $user,
committype => $committype, committype => $committype,
when => $when, when => $when,
@ -159,51 +160,6 @@ sub rcs_recentchanges ($) {
return @ret; return @ret;
} }
sub rcs_notify () { #{{{
# FIXME: Not set
if (! exists $ENV{ARCH_VERSION}) {
error("ARCH_VERSION is not set, not running from tla post-commit hook, cannot send notifications");
}
my $rev=int(possibly_foolish_untaint($ENV{REV}));
eval q{use Mail::Header};
error($@) if $@;
open(LOG, $ENV{"ARCH_LOG"});
my $head = Mail::Header->new(\*LOG);
close(LOG);
my $user = $head->get("Creator");
my $newfiles = $head->get("New-files");
my $modfiles = $head->get("Modified-files");
my $remfiles = $head->get("Removed-files");
my @changed_pages = grep { !/(^.*\/)?\.arch-ids\/.*\.id$/ }
split(/ /, "$newfiles $modfiles $remfiles .arch-ids/fake.id");
require IkiWiki::UserInfo;
send_commit_mails(
sub {
my $message = $head->get("Summary");
if ($message =~ /$config{web_commit_regexp}/) {
$user=defined $2 ? "$2" : "$3";
$message=$4;
}
},
sub {
my $logs = `tla logs -d $config{srcdir}`;
my @changesets = reverse split(/\n/, $logs);
my $i;
for($i=0;$i<$#changesets;$i++) {
last if $changesets[$i] eq $rev;
}
my $revminusone = $changesets[$i+1];
`tla diff -d $ENV{ARCH_TREE_ROOT} $revminusone`;
}, $user, @changed_pages);
} #}}}
sub rcs_getctime ($) { #{{{ sub rcs_getctime ($) { #{{{
my $file=shift; my $file=shift;
eval q{use Date::Parse}; eval q{use Date::Parse};

View File

@ -82,12 +82,9 @@ sub genpage ($$) { #{{{
if (length $config{cgiurl}) { if (length $config{cgiurl}) {
$template->param(editurl => cgiurl(do => "edit", page => pagetitle($page, 1))); $template->param(editurl => cgiurl(do => "edit", page => pagetitle($page, 1)));
$template->param(prefsurl => cgiurl(do => "prefs")); $template->param(prefsurl => cgiurl(do => "prefs"));
if ($config{rcs}) {
$template->param(recentchangesurl => cgiurl(do => "recentchanges"));
}
$actions++; $actions++;
} }
if (length $config{historyurl}) { if (length $config{historyurl}) {
my $u=$config{historyurl}; my $u=$config{historyurl};
$u=~s/\[\[file\]\]/$pagesources{$page}/g; $u=~s/\[\[file\]\]/$pagesources{$page}/g;
@ -196,6 +193,7 @@ sub render ($) { #{{{
my $page=pagename($file); my $page=pagename($file);
delete $depends{$page}; delete $depends{$page};
will_render($page, htmlpage($page), 1); will_render($page, htmlpage($page), 1);
return if $type=~/^_/;
my $content=htmlize($page, $type, my $content=htmlize($page, $type,
linkify($page, $page, linkify($page, $page,
@ -205,7 +203,6 @@ sub render ($) { #{{{
my $output=htmlpage($page); my $output=htmlpage($page);
writefile($output, $config{destdir}, genpage($page, $content)); writefile($output, $config{destdir}, genpage($page, $content));
utime($pagemtime{$page}, $pagemtime{$page}, $config{destdir}."/".$output);
} }
else { else {
my $srcfd=readfile($srcfile, 1, 1); my $srcfd=readfile($srcfile, 1, 1);
@ -231,7 +228,6 @@ sub render ($) { #{{{
} }
} }
}); });
utime($pagemtime{$file}, $pagemtime{$file}, $config{destdir}."/".$file);
} }
} #}}} } #}}}
@ -256,6 +252,8 @@ sub refresh () { #{{{
$test=dirname($test); $test=dirname($test);
} }
} }
run_hooks(refresh => sub { shift->() });
# find existing pages # find existing pages
my %exists; my %exists;
@ -314,15 +312,19 @@ sub refresh () { #{{{
}, $dir); }, $dir);
}; };
my %rendered; my (%rendered, @add, @del, @internal);
# check for added or removed pages # check for added or removed pages
my @add;
foreach my $file (@files) { foreach my $file (@files) {
my $page=pagename($file); my $page=pagename($file);
$pagesources{$page}=$file; $pagesources{$page}=$file;
if (! $pagemtime{$page}) { if (! $pagemtime{$page}) {
push @add, $file; if (isinternal($page)) {
push @internal, $file;
}
else {
push @add, $file;
}
$pagecase{lc $page}=$page; $pagecase{lc $page}=$page;
if ($config{getctime} && -e "$config{srcdir}/$file") { if ($config{getctime} && -e "$config{srcdir}/$file") {
$pagectime{$page}=rcs_getctime("$config{srcdir}/$file"); $pagectime{$page}=rcs_getctime("$config{srcdir}/$file");
@ -332,11 +334,15 @@ sub refresh () { #{{{
} }
} }
} }
my @del;
foreach my $page (keys %pagemtime) { foreach my $page (keys %pagemtime) {
if (! $exists{$page}) { if (! $exists{$page}) {
debug(sprintf(gettext("removing old page %s"), $page)); if (isinternal($page)) {
push @del, $pagesources{$page}; push @internal, $pagesources{$page};
}
else {
debug(sprintf(gettext("removing old page %s"), $page));
push @del, $pagesources{$page};
}
$links{$page}=[]; $links{$page}=[];
$renderedfiles{$page}=[]; $renderedfiles{$page}=[];
$pagemtime{$page}=0; $pagemtime{$page}=0;
@ -361,7 +367,14 @@ sub refresh () { #{{{
$mtime > $pagemtime{$page} || $mtime > $pagemtime{$page} ||
$forcerebuild{$page}) { $forcerebuild{$page}) {
$pagemtime{$page}=$mtime; $pagemtime{$page}=$mtime;
push @needsbuild, $file; if (isinternal($page)) {
push @internal, $file;
# Preprocess internal page in scan-only mode.
preprocess($page, $page, readfile(srcfile($file)), 1);
}
else {
push @needsbuild, $file;
}
} }
} }
run_hooks(needsbuild => sub { shift->(\@needsbuild) }); run_hooks(needsbuild => sub { shift->(\@needsbuild) });
@ -377,6 +390,15 @@ sub refresh () { #{{{
render($file); render($file);
$rendered{$file}=1; $rendered{$file}=1;
} }
foreach my $file (@internal) {
# internal pages are not rendered
my $page=pagename($file);
delete $depends{$page};
foreach my $old (@{$renderedfiles{$page}}) {
delete $destsources{$old};
}
$renderedfiles{$page}=[];
}
# rebuild pages that link to added or removed pages # rebuild pages that link to added or removed pages
if (@add || @del) { if (@add || @del) {
@ -392,13 +414,17 @@ sub refresh () { #{{{
} }
} }
if (%rendered || @del) { if (%rendered || @del || @internal) {
my @changed=(keys %rendered, @del);
# rebuild dependant pages # rebuild dependant pages
foreach my $f (@files) { foreach my $f (@files) {
next if $rendered{$f}; next if $rendered{$f};
my $p=pagename($f); my $p=pagename($f);
if (exists $depends{$p}) { if (exists $depends{$p}) {
foreach my $file (keys %rendered, @del) { # only consider internal files
# if the page explicitly depends on such files
foreach my $file (@changed, $depends{$p}=~/internal\(/ ? @internal : ()) {
next if $f eq $file; next if $f eq $file;
my $page=pagename($file); my $page=pagename($file);
if (pagespec_match($page, $depends{$p}, location => $p)) { if (pagespec_match($page, $depends{$p}, location => $p)) {
@ -414,7 +440,7 @@ sub refresh () { #{{{
# handle backlinks; if a page has added/removed links, # handle backlinks; if a page has added/removed links,
# update the pages it links to # update the pages it links to
my %linkchanged; my %linkchanged;
foreach my $file (keys %rendered, @del) { foreach my $file (@changed) {
my $page=pagename($file); my $page=pagename($file);
if (exists $links{$page}) { if (exists $links{$page}) {
@ -436,6 +462,7 @@ sub refresh () { #{{{
} }
} }
} }
foreach my $link (keys %linkchanged) { foreach my $link (keys %linkchanged) {
my $linkfile=$pagesources{$link}; my $linkfile=$pagesources{$link};
if (defined $linkfile) { if (defined $linkfile) {

View File

@ -92,91 +92,4 @@ sub set_banned_users (@) { #{{{
return userinfo_store($userinfo); return userinfo_store($userinfo);
} #}}} } #}}}
sub commit_notify_list ($@) { #{{{
my $committer=shift;
my @pages = map pagename($_), @_;
my @ret;
my $userinfo=userinfo_retrieve();
foreach my $user (keys %{$userinfo}) {
next if $user eq $committer;
if (exists $userinfo->{$user}->{subscriptions} &&
length $userinfo->{$user}->{subscriptions} &&
exists $userinfo->{$user}->{email} &&
length $userinfo->{$user}->{email} &&
grep { pagespec_match($_,
$userinfo->{$user}->{subscriptions},
user => $committer) }
map pagename($_), @_) {
push @ret, $userinfo->{$user}->{email};
}
}
return @ret;
} #}}}
sub send_commit_mails ($$$@) { #{{{
my $messagesub=shift;
my $diffsub=shift;
my $user=shift;
my @changed_pages=@_;
return unless @changed_pages;
my @email_recipients=commit_notify_list($user, @changed_pages);
if (@email_recipients) {
# TODO: if a commit spans multiple pages, this will send
# subscribers a diff that might contain pages they did not
# sign up for. Should separate the diff per page and
# reassemble into one mail with just the pages subscribed to.
my $diff=$diffsub->();
my $message=$messagesub->();
my $pagelist;
if (@changed_pages > 2) {
$pagelist="$changed_pages[0] $changed_pages[1] ...";
}
else {
$pagelist.=join(" ", @changed_pages);
}
#translators: The three variables are the name of the wiki,
#translators: A list of one or more pages that were changed,
#translators: And the name of the user making the change.
#translators: This is used as the subject of a commit email.
my $subject=sprintf(gettext("update of %s's %s by %s"),
$config{wikiname}, $pagelist, $user);
my $template=template("notifymail.tmpl");
$template->param(
wikiname => $config{wikiname},
diff => $diff,
user => $user,
message => $message,
);
# Daemonize, in case the mail sending takes a while.
defined(my $pid = fork) or error("Can't fork: $!");
return if $pid;
setsid() or error("Can't start a new session: $!");
chdir '/';
open STDIN, '/dev/null';
open STDOUT, '>/dev/null';
open STDERR, '>&STDOUT' or error("Can't dup stdout: $!");
unlockwiki(); # don't need to keep a lock on the wiki
eval q{use Mail::Sendmail};
error($@) if $@;
foreach my $email (@email_recipients) {
sendmail(
To => $email,
From => "$config{wikiname} <$config{adminemail}>",
Subject => $subject,
Message => $template->output,
);
}
exit 0; # daemon process done
}
} #}}}
1 1

View File

@ -36,22 +36,6 @@ sub gen_wrapper () { #{{{
addenv("$var", s); addenv("$var", s);
EOF EOF
} }
if ($config{rcs} eq "svn" && $config{notify}) {
# Support running directly as hooks/post-commit by passing
# $2 in REV in the environment.
$envsave.=<<"EOF"
if (argc == 3)
addenv("REV", argv[2]);
else if ((s=getenv("REV")))
addenv("REV", s);
EOF
}
if ($config{rcs} eq "tla" && $config{notify}) {
$envsave.=<<"EOF"
if ((s=getenv("ARCH_VERSION")))
addenv("ARCH_VERSION", s);
EOF
}
$Data::Dumper::Indent=0; # no newlines $Data::Dumper::Indent=0; # no newlines
my $configstring=Data::Dumper->Dump([\%config], ['*config']); my $configstring=Data::Dumper->Dump([\%config], ['*config']);

25
debian/NEWS vendored
View File

@ -1,4 +1,4 @@
ikiwiki (2.21) unstable; urgency=low ikiwiki (2.31) unstable; urgency=low
ikiwiki now has an new syntax for preprocessor directives, using the ikiwiki now has an new syntax for preprocessor directives, using the
prefix '!': prefix '!':
@ -31,6 +31,29 @@ ikiwiki (2.21) unstable; urgency=low
-- Josh Triplett <josh@freedesktop.org> Sat, 26 Jan 2008 16:26:47 -0800 -- Josh Triplett <josh@freedesktop.org> Sat, 26 Jan 2008 16:26:47 -0800
ikiwiki (2.30) unstable; urgency=low
Ever feel that ikiwiki's handling of RecentChanges wasn't truely in the
spirit of a wiki compiler? Well, that's changed. The RecentChanges page is
now a static page, not a CGI. Users can subscribe to its rss/atom feeds.
Custom RecentChanges pages can be easily set up that display only changes
to a subset of pages, or only changes by a subset of users.
All wikis need to be rebuilt on upgrade to this version. If you listed your
wiki in /etc/ikiwiki/wikilist this will be done automatically when the
Debian package is upgraded. Or use ikiwiki-mass-rebuild to force a rebuild.
With this excellent new RecentChanges support, the mail notification system
is its age (and known to be variously buggy and underimplemented for
various VCSes), and so ikiwiki's support for sending commit mails is REMOVED
from this version. If you were subscribed to commit mails, you should be
able to accomplish the same thing by subscribing to a RecentChanges feed.
The "svnrepo" and "notify" fields in setup files are no longer used, and
silently ignored. You may want to remove them from your setup file.
-- Joey Hess <joeyh@debian.org> Tue, 29 Jan 2008 17:18:31 -0500
ikiwiki (2.20) unstable; urgency=low ikiwiki (2.20) unstable; urgency=low
The template plugin has begin to htmlize the variables passed to templates. The template plugin has begin to htmlize the variables passed to templates.

70
debian/changelog vendored
View File

@ -1,4 +1,45 @@
ikiwiki (2.21) UNRELEASED; urgency=low ikiwiki (2.31) UNRELEASED; urgency=low
* Revert preservation of input file modification times in output files,
since this leads to too many problems with web caching, especially with
inlined pages. Properly solving this would involve tracking every page
that contributes to a page's content and using the youngest of them all,
as well as special cases for things like the version plugin, and it's just
too complex to do.
* aggregate: Forking a child broke the one state that mattered: Forcing
the aggregating page to be rebuilt. Fix this.
* cgi hooks are now run before ikiwiki state is loaded.
* This allows locking the wiki before loading state, which avoids some
tricky locking code when saving a web edit.
* poll: This plugin turns out to have edited pages w/o doing any locking.
Oops. Convert it from a cgi to a sessioncgi hook, which will work
much better.
* recentchanges: Improve handling of links on the very static changes pages
by thunking to the CGI, which can redirect to the page, or allow it to be
created if it doesn't exist.
* recentchanges: Exipre all *._change pages, even if the directory
they're in has changed.
* aggregate: Lots of changes; aggregation can now run without locking the
wiki, and there is a separate aggregatelock to prevent multiple concurrent
aggregation runs.
* monotone changes by Brian May:
- On commits, replace "mtn sync" bidirectional with "mtn push" single
direction. No need to pull changes when doing a commit. mtn sync
is still called in rcs_update.
- Support for viewing differences via patches using viewmtn.
* inline: When previewing, still call will_render on rss/atom files,
just avoid actually writing the files. This is necessary because ikiwiki
saves state after a preview (in case it actually *did* write files),
and if will_render isn't called its security checks will get upset
when the page is saved. Thanks to Edward Betts for his help tracking this
tricky bug down.
* inline: Add new `allowrss` and `allowatom` config options. These can be
used if you want a wiki that doesn't default to generating rss or atom
feeds, but that does allow them to be turned on for specific blogs.
-- Joey Hess <joeyh@debian.org> Sat, 02 Feb 2008 23:36:31 -0500
ikiwiki (2.30) unstable; urgency=low
[ Joey Hess ] [ Joey Hess ]
* Old versions of git-init don't support --git-dir or GIT_DIR with * Old versions of git-init don't support --git-dir or GIT_DIR with
@ -21,7 +62,32 @@ ikiwiki (2.21) UNRELEASED; urgency=low
the underlay to support either setting of prefix_directives. Add NEWS the underlay to support either setting of prefix_directives. Add NEWS
entry with migration information. entry with migration information.
-- Joey Hess <joeyh@debian.org> Fri, 11 Jan 2008 15:09:37 -0500 [ Joey Hess ]
* inline: The template can check for FIRST and LAST, which will be
set for the first and last inlined page. Useful for templates that build
tables and the like.
* prettydate,ddate: Don't ignore time formats passed to displaytime
function.
* Pages with extensions starting with "_" are internal-use, and will
not be rendered or web-edited, or matched by normal pagespecs.
* Add "internal()" pagespec that matches internal-use pages.
* RecentChanges is now a static html page, that's updated whenever a commit
is made to the wiki. It's built as a blog using inline, so it can have
an rss feed that users can subscribe to.
* Removed support for sending commit notification mails. Along with it went
the svnrepo and notify settings, though both will be ignored if left in
setup files. Also gone with it is the "user()" pagespec.
* Add refresh hook.
* meta: Add pagespec functions to match against title, author, authorurl,
license, and copyright. This can be used to create custom RecentChanges.
* meta: To support the pagespec functions, metadata about pages has to be
retained as pagestate.
* Fix encoding bug when pagestate values contained spaces.
* Add support for bzr, written by Jelmer Vernooij. Thanks also to bma for
his independent work on bzr support.
* Copyright file updates.
-- Joey Hess <joeyh@debian.org> Sat, 02 Feb 2008 17:41:57 -0500
ikiwiki (2.20) unstable; urgency=low ikiwiki (2.20) unstable; urgency=low

2
debian/control vendored
View File

@ -13,7 +13,7 @@ Vcs-Browser: http://git.ikiwiki.info/?p=ikiwiki
Package: ikiwiki Package: ikiwiki
Architecture: all Architecture: all
Depends: ${perl:Depends}, markdown, libhtml-template-perl, libhtml-parser-perl, liburi-perl Depends: ${perl:Depends}, markdown, libhtml-template-perl, libhtml-parser-perl, liburi-perl
Recommends: gcc | c-compiler, libc6-dev | libc-dev, subversion | git-core | tla | mercurial, libxml-simple-perl, libnet-openid-consumer-perl, liblwpx-paranoidagent-perl, libtimedate-perl, libhtml-scrubber-perl, libcgi-formbuilder-perl (>= 3.05), libtime-duration-perl, libcgi-session-perl (>= 4.14-1), libmail-sendmail-perl Recommends: gcc | c-compiler, libc6-dev | libc-dev, subversion | git-core | tla | bzr (>= 0.91) | mercurial | monotone, libxml-simple-perl, libnet-openid-consumer-perl, liblwpx-paranoidagent-perl, libtimedate-perl, libhtml-scrubber-perl, libcgi-formbuilder-perl (>= 3.05), libtime-duration-perl, libcgi-session-perl (>= 4.14-1), libmail-sendmail-perl
Suggests: viewvc | gitweb | viewcvs, hyperestraier, librpc-xml-perl, libtext-wikiformat-perl, python, python-docutils, polygen, tidy, libxml-feed-perl, libmailtools-perl, perlmagick, libfile-mimeinfo-perl, libcrypt-ssleay-perl, liblocale-gettext-perl (>= 1.05-1), libtext-typography-perl, libtext-csv-perl, libdigest-sha1-perl, graphviz Suggests: viewvc | gitweb | viewcvs, hyperestraier, librpc-xml-perl, libtext-wikiformat-perl, python, python-docutils, polygen, tidy, libxml-feed-perl, libmailtools-perl, perlmagick, libfile-mimeinfo-perl, libcrypt-ssleay-perl, liblocale-gettext-perl (>= 1.05-1), libtext-typography-perl, libtext-csv-perl, libdigest-sha1-perl, graphviz
Conflicts: ikiwiki-plugin-table Conflicts: ikiwiki-plugin-table
Replaces: ikiwiki-plugin-table Replaces: ikiwiki-plugin-table

10
debian/copyright vendored
View File

@ -1,15 +1,21 @@
Files: * Files: *
Copyright: © 2006-2007 Joey Hess <joey@ikiwiki.info> Copyright: © 2006-2008 Joey Hess <joey@ikiwiki.info>
License: GPL-2+ License: GPL-2+
The full text of the GPL is distributed as doc/GPL in ikiwiki's source, The full text of the GPL is distributed as doc/GPL in ikiwiki's source,
and is distributed in /usr/share/common-licenses/GPL-2 on Debian systems. and is distributed in /usr/share/common-licenses/GPL-2 on Debian systems.
Files: templates/*, underlays/basewiki/*, ikiwiki.setup Files: templates/*, underlays/basewiki/*, ikiwiki.setup
Copyright: © 2006-2007 Joey Hess <joey@ikiwiki.info> Copyright: © 2006-2008 Joey Hess <joey@ikiwiki.info>
License: other License: other
Redistribution and use in source and compiled forms, with or without Redistribution and use in source and compiled forms, with or without
modification, are permitted under any circumstances. No warranty. modification, are permitted under any circumstances. No warranty.
Files: bzr.pm
Copyright:
© 2008 Jelmer Vernooij <jelmer@samba.org>
© 2006 Emanuele Aina
License: GPL-2+
Files: git.pm Files: git.pm
Copyright: © 2006-2007 Recai Oktaş <roktas@debian.org> Copyright: © 2006-2007 Recai Oktaş <roktas@debian.org>
License: GPL-2+ License: GPL-2+

2
debian/postinst vendored
View File

@ -4,7 +4,7 @@ set -e
# Change this when some incompatible change is made that requires # Change this when some incompatible change is made that requires
# rebuilding all wikis. # rebuilding all wikis.
firstcompat=2.1 firstcompat=2.30
if [ "$1" = configure ] && \ if [ "$1" = configure ] && \
dpkg --compare-versions "$2" lt "$firstcompat"; then dpkg --compare-versions "$2" lt "$firstcompat"; then

View File

@ -1,258 +1,56 @@
#Ikiwiki plugin for the Monotone revision control system. The Monotone module still lacks support for setting up a post-commit hook,
so commits made via monotone will not automatically update the wiki.
I've just made a patch to the ikiwiki code that allows it to use the [[rcs/Monotone]] revision control system. It is available at: Here for future reference is the most recent version of support for
that I've been sent. It's not yet working; there are path issues. --[[Joey]]
<http://www.cse.unsw.edu.au/~willu/monotone-ikiwiki.diff>
At the moment it is basically complete. At present rcs_notify() is implemeted but untested, the rest is implemented and tested.
The current version of the patch handles conflicts through the web interface. It is still not perfect as it will break if there is a rename that conflicts with a web change (but so will the other Rcs plugins I think). It also commits a revision with conflict markers if there is a conflict requiring such markers... ick.
Note: This patch requires a rather recent Monotone perl module (18 August 2007 or later). It is available from the monotone repository here: <http://viewmtn.angrygoats.net/branch/changes/net.venge.monotone>.
> The setup instructions to add 40 lines of code to monotonerc is pretty frightning stuff.
> Is there some way this can be automated? --[[Joey]]
>> I've committed a bunch of this to monotone so that in future it could be removed.
>> I've also just fixed this so it is in a separate, automagically generated, rc file.
>>> Fair enough. Didn't realize you were a monotone committer. :-)
>>>> I am, but still a little newish. Feedback is good. In particular, this is my first major bit of PERL.
> Having rcs_commit return a warning message when there's an unresolved conflict
> isn't right; that message will populate the page edit box. You might want
> to use the error() function here?
>> It should never reach that case, so I have changed that to error.
> There'an incomplete comment ending with "note, this relies on the fact that"
>> erg... sorry, fixed.
[[tag patch]]
>> I've [[accepted|done]] this patch, thank you!
>>> Thanks for committing it. I hate keeping my own diffs. :)
>> I did make a few changes. Please review, and make sure it still works
>> (a test case like we have for some of the other RCSes would be nice..)
>>> Tested. It still works at least as well as it did. I'll try to get to a test case soon.
>>> In checking the source I noticed a few bogus comments I left in when editing,
>>> and a bug in page adding.
>>> Here is a small patch for them:
>>>> applied
Here is another patch. It fixes a FIXME you added. I was using $file within backticks because
I was getting an error trying to do it right. I've figured out the error, and now do it right. This
should also speed things up (very slightly)
> applied
>> BTW, will all the monotone output parsing work if LANG != C?
>>> It should (he says crossing fingers).
>>> In the places where I do any complex parsing I'm using a special
>>> version of the mtn commands designed for scripting. They have a
>>> stable, easy to parse, output that doesn't get translated (I think).
>> Do monotone post-commit hooks actually use REV?
>>> Monotone post-commit hooks are written in Lua and can do
>>> what they please. Setting the REV environment var before
>>> calling Ikiwiki seems reasonable, but I've not written the
>>> Lua hook.
>>>> So the rcs_notify support is not just untested, but can't work
>>>> at all w/o further development.
I've just done this further development... The following patch adds support for diffurls.
I've also partially tested the commit message support. I was unable to get Ikiwiki to send
change emails at all (cgi or otherwise), so I tested by adding warn() calls. There were a
few things that needed to be fixed. Support is much closer now (including a simple default
monotone lua hook).
When I stick this diff inline into the page, I have to indent it by four spaces, and that's fine.
But markdown seems to still be interpreting parts of it (e.g. the diff url) in strange ways. I
think it is the square brackets.
Index: IkiWiki/Rcs/monotone.pm
===================================================================
--- IkiWiki/Rcs/monotone.pm (revision 4252)
+++ IkiWiki/Rcs/monotone.pm (working copy)
@@ -186,8 +186,9 @@
check_config();
if (defined($config{mtnsync}) && $config{mtnsync}) {
+ check_mergerc();
if (system("mtn", "--root=$config{mtnrootdir}", "sync",
- "--quiet", "--ticker=none",
+ "--quiet", "--ticker=none", "--rcfile", $config{mtnmergerc},
"--key", $config{mtnkey}) != 0) {
debug("monotone sync failed before update");
}
@@ -342,10 +343,10 @@
return $conflict;
}
if (defined($config{mtnsync}) && $config{mtnsync}) {
- if (system("mtn", "--root=$config{mtnrootdir}", "sync",
+ if (system("mtn", "--root=$config{mtnrootdir}", "push",
"--quiet", "--ticker=none", "--key",
$config{mtnkey}) != 0) {
- debug("monotone sync failed");
+ debug("monotone push failed");
}
}
@@ -431,10 +432,28 @@
my @changed_files = get_changed_files($automator, $rev);
my $file;
+ my ($out, $err) = $automator->call("parents", $rev);
+ my @parents = ($out =~ m/^($sha1_pattern)$/);
+ my $parent = $parents[0];
+
foreach $file (@changed_files) {
- push @pages, {
- page => pagename($file),
- } if length $file;
+ if (length($file)) {
+ if (defined $config{diffurl} and (@parents == 1)) {
+ my $diffurl=$config{diffurl};
+ $diffurl=~s/\[\[r1\]\]/$parent/g;
+ $diffurl=~s/\[\[r2\]\]/$rev/g;
+ $diffurl=~s/\[\[file\]\]/$file/g;
+ push @pages, {
+ page => pagename($file),
+ diffurl => $diffurl,
+ };
+ }
+ else {
+ push @pages, {
+ page => pagename($file),
+ };
+ }
+ }
}
push @ret, {
@@ -487,6 +506,18 @@
my @changed_pages = get_changed_files($automator, $rev);
+ my ($out, $err) = $automator->call("parents", $rev);
+ my @parents = ($out =~ m/^($sha1_pattern)$/);
+ my $parent = $parents[0];
+
+ my $diff;
+
+ if (@parents == 1) {
+ $automator->setOpts("r", $parent, "r", $rev);
+ ($out, $err) = $automator->call("content_diff");
+ $diff = $out;
+ }
+
$automator->close();
require IkiWiki::UserInfo;
@@ -495,7 +526,7 @@
return $message;
},
sub {
- `mtn --root=$config{mtnrootdir} au content_diff -r $rev`;
+ return $diff;
},
$user, @changed_pages);
} #}}}
@@ -604,4 +635,9 @@
return true
end
}
+ function note_netsync_revision_received(new_id, revision, certs, session_id)
+ if (program_exists_in_path("ikiwiki-netsync-hook")) then
+ execute("ikiwiki-netsync-hook", new_id)
+ end
+ end
EOF
Index: IkiWiki/Wrapper.pm
===================================================================
--- IkiWiki/Wrapper.pm (revision 4252)
+++ IkiWiki/Wrapper.pm (working copy)
@@ -46,6 +46,16 @@
addenv("REV", s);
EOF
}
+ if ($config{rcs} eq "monotone" && $config{notify}) {
+ # Support running directly as hooks/post-commit by passing
+ # $1 in REV in the environment.
+ $envsave.=<<"EOF"
+ if (argc == 2)
+ addenv("REV", argv[1]);
+ else if ((s=getenv("REV")))
+ addenv("REV", s);
+EOF
+ }
if ($config{rcs} eq "tla" && $config{notify}) {
$envsave.=<<"EOF"
if ((s=getenv("ARCH_VERSION")))
Index: doc/rcs/monotone.mdwn
===================================================================
--- doc/rcs/monotone.mdwn (revision 4252)
+++ doc/rcs/monotone.mdwn (working copy)
@@ -1,16 +1,13 @@
-[monotone](http://monotone.ca/) is a distributed revision control system.
-Ikiwiki supports storing a wiki in Monotone and editing it using the [[cgi]] interface.
+[Monotone](http://monotone.ca/) is a distributed revision control system.
+Ikiwiki supports storing a wiki in a Monotone repository and editing it using the [[cgi]] interface.
It will use the Monotone logs to generate the [[RecentChanges]] page.
-The monotone support requires the Monotone perl module (from the contrib/ directory
-in the monotone source) to be installed. In particular, it needs version 0.03 or higher of that module.
+The monotone support in Ikiwiki requires the Monotone perl module to be installed
+(available from the contrib/ directory in the monotone source).
+In particular, it needs version 0.03 or higher of that module.
The module is available from the monotone source repository at:
<http://viewmtn.angrygoats.net/branch/changes/net.venge.monotone>
-Monotone support works, but there are still a few minor missing bits (listed here so they are not forgotten):
+At present the [[post-commit]] hook support is implemented but only partially tested.
-* At the moment there are no links to display diffs between revisions. It shouldn't be hard to add links to a [ViewMTN](http://grahame.angrygoats.net/moinmoin/ViewMTN) instance, but it hasn't been done yet.
-* The [[post-commit]] hook support, so that Ikiwiki sends change notifications when people commit using Monotone rather than the web interface, is partially implemented and untested.
-* Documentation (this page) could be improved.
-
There is also a mismatch between the way Ikiwiki handles conflicts and the way Monotone handles conflicts. At present, if there is a conflict, then Ikiwiki will commit a revision with conflict markers before presenting it to the user. This is ugly, but there is no clean way to fix it at present.
Index: doc/ikiwiki.setup
===================================================================
--- doc/ikiwiki.setup (revision 4252)
+++ doc/ikiwiki.setup (working copy)
@@ -46,6 +46,8 @@
# Monotone stuff
#rcs => "monotone",
#mtnkey => "web\@machine.company.com",
+ #historyurl => "http://viewmtn.company.com/",
+ #diffurl => "http://viewmtn.company.com/revision/diff/[[r1]]/with/[[r2]]/[[file]]",
# Set if you want the wiki to sync on update and commit.
#mtnsync => 0,
# The path to your workspace (defaults to the srcdir itself)
@@ -88,6 +90,16 @@
# # Enable mail notifications of commits.
# notify => 1,
#},
+ #{
+ # # The monotone netsync revision received wrapper.
+ # # Note that you also need to install a lua
+ # # hook into monotone to make this work
+ # # see: http://ikiwiki.info/rcs/monotone/
+ # wrapper => "/usr/local/bin/ikiwiki-netsync-hook",
+ # wrappermode => "04755",
+ # # Enable mail notifications of commits.
+ # notify => 1,
+ #},
],
# Generate rss feeds for blogs?
<pre>
diff --git a/IkiWiki/Rcs/monotone.pm b/IkiWiki/Rcs/monotone.pm
index cde6029..34f8f96 100644
--- a/IkiWiki/Rcs/monotone.pm
+++ b/IkiWiki/Rcs/monotone.pm
@@ -186,8 +186,9 @@ sub rcs_update () { #{{{
check_config();
if (defined($config{mtnsync}) && $config{mtnsync}) {
+ check_mergerc();
if (system("mtn", "--root=$config{mtnrootdir}", "sync",
- "--quiet", "--ticker=none",
+ "--quiet", "--ticker=none", "--rcfile", $config{mtnmergerc},
"--key", $config{mtnkey}) != 0) {
debug("monotone sync failed before update");
}
@@ -604,4 +605,9 @@ __DATA__
return true
end
}
+ function note_netsync_revision_received(new_id, revision, certs, session_id)
+ if (program_exists_in_path("ikiwiki-netsync-hook")) then
+ execute("ikiwiki-netsync-hook", new_id)
+ end
+ end
EOF
diff --git a/IkiWiki/Wrapper.pm b/IkiWiki/Wrapper.pm
index 2103ea5..cff718c 100644
diff --git a/doc/ikiwiki.setup b/doc/ikiwiki.setup
index 1377315..0cbe27e 100644
--- a/doc/ikiwiki.setup
+++ b/doc/ikiwiki.setup
@@ -88,6 +88,16 @@ use IkiWiki::Setup::Standard {
# # Enable mail notifications of commits.
# notify => 1,
#},
+ #{
+ # # The monotone netsync revision received wrapper.
+ # # Note that you also need to install a lua
+ # # hook into monotone to make this work
+ # # see: http://ikiwiki.info/rcs/monotone/
+ # wrapper => "/usr/local/bin/ikiwiki-netsync-hook",
+ # wrappermode => "04755",
+ # # Enable mail notifications of commits.
+ # notify => 1,
+ #},
],
# Generate rss feeds for blogs?
</pre>

View File

@ -0,0 +1,3 @@
When searching in ikiwiki, sometimes discussion pages turn up. However, they are only titled "discussion".
In order to know what topic they are discussing, you have to look at the URL. Shouldn't they be titled
"foo/discussion" or "discussion of foo" or something? Thanks, --[[perolofsson]]

View File

@ -1,10 +1,12 @@
[[done]] (in this branch); fixed removing email notification support!
I was suprised to receive two mails from ikiwiki about one web edit: I was suprised to receive two mails from ikiwiki about one web edit:
1 F Oct 30 To joey+ikiwiki update of ikiwiki's plugins/contrib/gallery.mdwn by http://arpitjain11.myopenid.com/ 1 F Oct 30 To joey+ikiwiki update of ikiwiki's plugins/contrib/gallery.mdwn by http://arpitjain11.myopenid.com/
1 F Oct 30 To joey+ikiwiki update of ikiwiki's plugins/contrib/gallery.mdwn by http://arpitjain11.myopenid.com/ 1 F Oct 30 To joey+ikiwiki update of ikiwiki's plugins/contrib/gallery.mdwn by http://arpitjain11.myopenid.com/
The first of these had the correct diff for the changes made by the web The first of these had the correct diff for the changes made by the web
edit (00259020061577316895370ee04cf00b634db98a). edit (00259020061577316895370ee04cf00b634db98a).
But the second had a diff for modifications I made to ikiwiki code But the second had a diff for modifications I made to ikiwiki code
around the same time (2a6e353c205a6c2c8b8e2eaf85fe9c585c1af0cd). around the same time (2a6e353c205a6c2c8b8e2eaf85fe9c585c1af0cd).
@ -38,3 +40,18 @@ diff for the first commit.
Ikiwiki's own locking prevents this from happenning if both commits are web Ikiwiki's own locking prevents this from happenning if both commits are web
edits. At least one of the two commits has to be a non-web commit. edits. At least one of the two commits has to be a non-web commit.
----
A related problem is that if two commits are made separately but then
pushed in together, the commit code only looks at the HEAD commit, which
is the second one. No notification is sent for the first.
----
Based on all of these problems with using the post-update hook, ikiwiki
should be changed to use the post-receive hook, which provides enough
information to avoid the assumuptions that led to these problems.
Transitioning existing wikis to using a new hook will be interesting. Also,
this hook is only present in git >= 1.5.0.7.
--[[Joey]]

View File

@ -12,7 +12,7 @@ which is exactly the same regular expression drawn out as a constant. It appear
> hole. It seems more likely that perl containes to have taint flag bugs > hole. It seems more likely that perl containes to have taint flag bugs
> even in 5.8. See also: [[prune_causing_taint_mode_failures]], > even in 5.8. See also: [[prune_causing_taint_mode_failures]],
> [[Insecure_dependency_in_mkdir]], > [[Insecure_dependency_in_mkdir]],
> [[Insecure_dependency_in_eval_while_running_with_-T_switch.mdwn]], > [[Insecure_dependency_in_eval_while_running_with_-T_switch]],
> and especially [[debbug 411786]] > and especially [[debbug 411786]]
> >
> The last of those was the last straw for me, and I disabled taint > The last of those was the last straw for me, and I disabled taint

View File

@ -1,3 +1,5 @@
While ikiwiki is primarily a wiki compiler, which generates static html pages, it does use CGI for two important wiki features, online page editing and the [[RecentChanges]] display. While ikiwiki is primarily a wiki compiler, which generates static html
pages, it does use CGI for online page editing.
To enable CGI, you need to create and install an ikiwiki.cgi wrapper. [[Setup]] explains how to do this. To enable CGI, you need to create and install an ikiwiki.cgi wrapper.
[[Setup]] explains how to do this.

View File

@ -32,20 +32,4 @@ Also. FreeBSD has ikiwiki in its
## revision control ## revision control
### git Ikiwiki is developed in a [[git_repository|git]].
Ikiwiki is developed in a git repository and can be checked out by
either of these commands:
git clone git://git.ikiwiki.info/
git clone http://git.ikiwiki.info/ikiwiki.git/
The gitweb is [here](http://git.ikiwiki.info/?p=ikiwiki).
Commits to this git repository are fed into [CIA](http://cia.vc), and can
be browsed, subscribed to etc on its
[project page](http://cia.vc/stats/project/ikiwiki).
### subversion
Ikiwiki's subversion repository is no longer available, use git instead.

View File

@ -127,7 +127,7 @@ with that there's no new commit marker syntax to learn.
Nearly the definition of a wiki, although perhaps ikiwiki challenges how Nearly the definition of a wiki, although perhaps ikiwiki challenges how
much of that web gunk a wiki really needs. These features are optional much of that web gunk a wiki really needs. These features are optional
and can be enabled by enabling [[CGI]]. and can be enabled by enabling [[CGI]] and a [[Revision_Control_System|rcs]].
### User registration ### User registration
@ -161,11 +161,6 @@ Well, sorta. Rather than implementing YA history browser, it can link to
ikiwiki can use the [[HyperEstraier]] search engine to add powerful ikiwiki can use the [[HyperEstraier]] search engine to add powerful
full text search capabilities to your wiki. full text search capabilities to your wiki.
### Commit mails
ikiwiki can be configured to send you commit mails with diffs of changes
to selected pages.
### [[w3mmode]] ### [[w3mmode]]
Can be set up so that w3m can be used to browse a wiki and edit pages Can be set up so that w3m can be used to browse a wiki and edit pages

33
doc/git.mdwn 100644
View File

@ -0,0 +1,33 @@
Ikiwiki is developed in a git repository and can be checked out
like this:
git clone git://git.ikiwiki.info/
Or like this if your firewall only passes http traffic (slow):
git clone http://git.ikiwiki.info/ikiwiki.git/
The gitweb is [here](http://git.ikiwiki.info/?p=ikiwiki).
Commits to this git repository are fed into [CIA](http://cia.vc), and can
be browsed, subscribed to etc on its
[project page](http://cia.vc/stats/project/ikiwiki).
## branches
You are of course free to set up your own ikiwiki git repository with your
own [[patches|patch]].
Some of the branches included in the main repository include:
* `gallery` contains the [[todo/Gallery]] plugin. It's not yet merged
die to license issues.
* `html` is an unfinished attempt at making ikiwiki output HTML 4.01
instead of xhtml.
* `prefix-directives` changes the preprocessor directive syntax. It
is approximately one failing test case away from merging.
* `wikiwyg` adds [[todo/wikiwyg]] support. It is unmerged pending some
changes.
* `pristine-tar` contains deltas that
[pristine-tar](http://kitenet.net/~joey/code/pristine-tar)
can use to recreate released tarballs of ikiwiki

View File

@ -23,7 +23,6 @@ use IkiWiki::Setup::Standard {
#rcs => "svn", #rcs => "svn",
#historyurl => "http://svn.example.org/trunk/[[file]]", #historyurl => "http://svn.example.org/trunk/[[file]]",
#diffurl => "http://svn.example.org/trunk/[[file]]?root=wiki&amp;r1=[[r1]]&amp;r2=[[r2]]", #diffurl => "http://svn.example.org/trunk/[[file]]?root=wiki&amp;r1=[[r1]]&amp;r2=[[r2]]",
#svnrepo => "/svn/wiki",
#svnpath => "trunk", #svnpath => "trunk",
# Git stuff. # Git stuff.
@ -43,9 +42,16 @@ use IkiWiki::Setup::Standard {
#historyurl => "http://localhost:8000/log/tip/[[file]]", # hg serve'd local repository #historyurl => "http://localhost:8000/log/tip/[[file]]", # hg serve'd local repository
#diffurl => "http://localhost:8000/?fd=[[r2]];file=[[file]]", #diffurl => "http://localhost:8000/?fd=[[r2]];file=[[file]]",
# Bazaar stuff.
#rcs => "bzr",
#historyurl => "",
#diffurl => "http://example.com/revision?start_revid=[[r2]]#[[file]]-s", # using loggerhead
# Monotone stuff # Monotone stuff
#rcs => "monotone", #rcs => "monotone",
#mtnkey => "web\@machine.company.com", #mtnkey => "web\@machine.company.com",
#historyurl => "http://viewmtn.example.com/",
#diffurl => "http://viewmtn.example.com/revision/diff/[[r1]]/with/[[r2]]/[[file]]",
# Set if you want the wiki to sync on update and commit. # Set if you want the wiki to sync on update and commit.
#mtnsync => 0, #mtnsync => 0,
# The path to your workspace (defaults to the srcdir itself) # The path to your workspace (defaults to the srcdir itself)
@ -72,8 +78,6 @@ use IkiWiki::Setup::Standard {
# # what you want. # # what you want.
# wrapper => "/svn/wikirepo/hooks/post-commit", # wrapper => "/svn/wikirepo/hooks/post-commit",
# wrappermode => "04755", # wrappermode => "04755",
# # Enable mail notifications of commits.
# notify => 1,
# # Log to syslog since svn post-commit hooks # # Log to syslog since svn post-commit hooks
# # hide output and errors. # # hide output and errors.
# syslog => 1, # syslog => 1,
@ -85,15 +89,16 @@ use IkiWiki::Setup::Standard {
# # what you want. # # what you want.
# wrapper => "/git/wiki.git/hooks/post-update", # wrapper => "/git/wiki.git/hooks/post-update",
# wrappermode => "06755", # wrappermode => "06755",
# # Enable mail notifications of commits.
# notify => 1,
#}, #},
], ],
# Generate rss feeds for blogs? # Default to generating rss feeds for blogs?
rss => 1, #rss => 1,
# Generate atom feeds for blogs? # Default to generating atom feeds for blogs?
atom => 1, #atom => 1,
# Allow generating feeds even if not generated by default?
#allowrss => 1,
#allowatom => 1,
# Urls to ping with XML-RPC when rss feeds are updated # Urls to ping with XML-RPC when rss feeds are updated
#pingurl => [qw{http://rpc.technorati.com/rpc/ping}], #pingurl => [qw{http://rpc.technorati.com/rpc/ping}],
# Include discussion links on all pages? # Include discussion links on all pages?
@ -120,6 +125,9 @@ use IkiWiki::Setup::Standard {
#account_creation_password => "example", #account_creation_password => "example",
# Uncomment to force ikiwiki to run with a particular umask. # Uncomment to force ikiwiki to run with a particular umask.
#umask => 022, #umask => 022,
# Default settings for the recentchanges page.
#recentchangespage => "recentchanges",
#recentchangesnum => 100,
# To add plugins, list them here. # To add plugins, list them here.
#add_plugins => [qw{goodstuff search wikitext camelcase #add_plugins => [qw{goodstuff search wikitext camelcase

View File

@ -52,10 +52,12 @@ directive:
Set to 0 to show all matching pages. Set to 0 to show all matching pages.
* `skip` - Specify a number of pages to skip displaying. Can be useful * `skip` - Specify a number of pages to skip displaying. Can be useful
to produce a feed that only shows archived pages. to produce a feed that only shows archived pages.
* `rss` - controls generation of an rss feed. On by default if the wiki is * `rss` - controls generation of an rss feed. If the wiki is configured to
configured to use rss feeds, set to "no" to disable. generate tss feeds by default, set to "no" to disable. If the wiki is
* `atom` - controls generation of an atom feed. On by default if the wiki is configured to `allowrss`, set to "yes" to enable.
configured to use atom feeds, set to "no" to disable. * `atom` - controls generation of an atom feed. If the wiki is configured to
generate atom feeds by default, set to "no" to disable. If the wiki is
configured to `allowatom`, set to "yes" to enable.
* `feeds` - controls generation of all types of feeds. Set to "no" to * `feeds` - controls generation of all types of feeds. Set to "no" to
disable generating any feeds. disable generating any feeds.
* `postform` - Set to "yes" to enables a form to post new pages to a [[blog]]. * `postform` - Set to "yes" to enables a form to post new pages to a [[blog]].

View File

@ -33,8 +33,13 @@ functions:
was created was created
* "`created_before(page)`" - match only pages created before the given page * "`created_before(page)`" - match only pages created before the given page
was created was created
* "`user(name)`" - only available in page subscription preferences, match * "`glob(someglob)`" - match pages that match the given glob. Just writing
only changes made by this user the glob by itself is actually a shorthand for this function.
* "`internal(glob)`" - like `glob()`, but matches even internal-use
pages that globs do not usually match.
* "`title(glob)`", "`author(glob)`", "`authorurl(glob)`",
"`license(glob)`", "`copyright(glob)`" - match pages that have the given
metadata, matching the specified glob.
For example, to match all pages in a blog that link to the page about music For example, to match all pages in a blog that link to the page about music
and were written in 2005: and were written in 2005:

View File

@ -14,7 +14,8 @@ with ikiwiki, and some [[tips]].
All wikis are supposed to have a [[SandBox]], so this one does too. All wikis are supposed to have a [[SandBox]], so this one does too.
This site generally runs the latest release of ikiwiki; currently, it runs ikiwiki [[version ]]. This site generally runs the latest release of ikiwiki; currently, it runs
ikiwiki [[version ]].
## developer resources ## developer resources

View File

@ -405,6 +405,9 @@ I'm playing around with various ways that I can use subversion with ikiwiki.
> away without running the post-commit wrapper on commit, and all you lose > away without running the post-commit wrapper on commit, and all you lose
> is the ability to send commit notification emails. > is the ability to send commit notification emails.
> (And now that [[recentchanges]] includes rss, you can just subscribe to
> that, no need to worry about commit notification emails anymore.)
* Is it possible / sensible to have ikiwiki share a subversion repository with other data (either completely unrelated files or another ikiwiki instance)? This works in part but again the post-commit hook seems problematic. * Is it possible / sensible to have ikiwiki share a subversion repository with other data (either completely unrelated files or another ikiwiki instance)? This works in part but again the post-commit hook seems problematic.
--[[AdamShand]] --[[AdamShand]]

View File

@ -0,0 +1,14 @@
ikiwiki.info has upgraded to the not yet released ikiwiki 2.30. This
version of ikiwiki drops support for subscribing to commit mail
notifications for pages. The idea is that you can subscribe to the new
[[RecentChanges]] feed instead. (Or create your own custom feed of only the
changes you're interested in, and subscribe to that.)
So if you were subscribed to mail notifications on here, you'll need to
change how you keep track of changes. Please let me know if there are any
missing features in the [[RecentChanges]] feeds.
Statically building the RecentChanges also has performance implications,
I'll keep an eye on [[server_speed]]..
--[[Joey]]

View File

@ -1,15 +1,15 @@
Quick poll: Do you feel that ikiwiki is fast enough on this server, or Quick poll: Do you feel that ikiwiki is fast enough on this server, or
should I move it to my much beefier auxillary server? should I move it to my much beefier auxiliary server?
[[poll 38 "It's fast enough" 4 "It's too slow!" 4 "No opinion"]] [[poll 40 "It's fast enough" 5 "It's too slow!" 4 "No opinion"]]
If you have specifics on performance issues, you might mention them on the If you have specifics on performance issues, you might mention them on the
[[discussion]] page. [[discussion]] page.
The current server is a single processor 2.8 ghz Sepron machine shared The current server is a single processor 2.8 ghz Sepron machine shared
amoung 4 other xen instances, and often among 4 other xen instances, and often
[heavily loaded](http://bluebird.kitenet.net/munin/kitenet.net/wren.kitenet.net-load.html) [heavily loaded](http://bluebird.kitenet.net/munin/kitenet.net/wren.kitenet.net-load.html)
by extraneous stuff like spamassassin and compiles. The auxillary server is by extraneous stuff like spamassassin and compiles. The auxiliary server is
a dual processor, dual core 2 ghz Opteron shared with other xen instances a dual processor, dual core 2 ghz Opteron shared with other xen instances
(exact number not available from provider), but with (exact number not available from provider), but with
[little other load](http://bluebird.kitenet.net/munin/kitenet.net/bluebird.kitenet.net-load.html). [little other load](http://bluebird.kitenet.net/munin/kitenet.net/bluebird.kitenet.net-load.html).

View File

@ -1,26 +0,0 @@
ikiwiki 2.10 released with [[toggle text="these changes"]]
[[toggleable text="""
* Tidy ctime debug output for git.
* French translation update. Closes: #[445923](http://bugs.debian.org/445923)
* Fix --get-ctime with git, needed to remove srcdir from filename.
* In the cgi edit path, reload the index file before rendering. A bug
showed up where a web edit that added a page caused a near-concurrent
web edit to fail in will\_render. While it would be hard to reproduce this,
my analysis is that the failing cgi started first, loaded the index file
(prior to locking) then the other cgi created the new page and rendered
it, and then the failing cgi choked on the new file when \_it\_ tried to
render it. Ensuring that the index file is loaded after taking the lock
will avoid this bug.
* Fix strange stderr-hiding code in the git module, allow error messages
to be passed on to stderr. Also fixes a potential bug, since git error
meesages were treated as if they came from git stdout.
* Add a "createlink" class attribute to the span for wikilinks pointing
to not-yet-existing pages. I don't have a useful style defined for that
though.
* Rewritten rst plugin by madduck is a python program that communicates with
ikiwiki via XML RPC. This should be much faster than the old plugin that
had to fork python for every rst page render. Note that if you use
the rst plugin, you now need to have the RPC::XML perl module installed.
* Danish translation from Jonas Smedegaard. Closes: #[446952](http://bugs.debian.org/446952)
* Support git authors of the form "joey &lt;joey&gt;", which is common when
importing from a svn repo."""]]

View File

@ -1,23 +0,0 @@
ikiwiki 2.11 released with [[toggle text="these changes"]]
[[toggleable text="""
* Correct a pair of logic errors that triggered if svnpath was empty.
* If gitorigin\_branch is set to the empty string, don't push or pull.
Useful for laptop clones of remote wikis.
* Add a calendar plugin, contributed by Manoj Srivastava.
* Reformat calendar plugin to ikiwiki conventions.
* The calendar plugin made *every* page depend on every other page,
which seemed a wee tiny little bit overkill. Fixed the dependency
calculations (I hope.)
* Removed manual ctime statting code, and just have the calendar plugin use
%pagectime.
* Ikiwiki has moved into a git repository.
* postsparkline: Avoid a confusing error message if no pages match
and instead show an empty graph.
* Add handling of feeds for nested inlines, as well as support for a
single page containing two different feeds.
* Also fixed some places in inline that failed to use destpage correctly.
* ikiwiki-mass-rebuild: Patch from HenrikBrixAndersen to fix order
of permissions dropping code to work on FreeBSD.
* ikiwiki-mass-rebuild: Don't clear PATH from the environment.
* Run git-commit -q (though it doesn't do much good due to its stderr
abuse)."""]]

View File

@ -1,31 +0,0 @@
ikiwiki 2.12 released with [[toggle text="these changes"]]
[[toggleable text="""
* [ Joey Hess ]
* Fix some issues with toggles in preview mode.
* Fix an aggregate plugin expiry bug. Over time, it's possible for the same
page name to be expired and reused for several distinct guids. When this
happened, the expiry code counted each past guid that had used that page
name as a currently existing page, and thus expired too many pages.
* Avoid a race in the git rcs\_commit function, by not assuming HEAD will
stay the same for the duration of the function.
* Avoid using commands like git-diff and instead use "git diff".
In some configurations, only the main git command is in the path.
* Improve the RecentChanges display for git merges, by passing -c instead
of -m to git-log, and by skipping display of commits that change no
pages.
* Don't truncate git commit messages to the first line in RecentChanges,
show the full message.
* map: Recent changes caused unnecessary ul's to be inserted for items
that were all at the same level, fix. Closes: #[449285](http://bugs.debian.org/449285)
* [ Josh Triplett ]
* Fix table plugin to not generate an unbalanced tbody tag with header=no
* Add xmlns attribute on html element in templates; pages can now
validate.
* [ Joey Hess ]
* In the example setup file, use mode 6755 for the git post-update hook.
It needs to be setgid if the master repo is a shared repository, so
that pushes into the working copy repository happen as the same group,
avoiding permissions problems.
* The first git commit legitimately has no parents. Avoid recentchanges
spewing uninitialised value warnings and debug messages about it.
Dummying up a parent of 0000000 allows gitweb to work too."""]]

View File

@ -1,26 +0,0 @@
ikiwiki 2.13 released with [[toggle text="these changes"]]
[[toggleable text="""
* Add liblwpx-paranoidagent-perl to recommends of Debian package,
this is needed to do OpenID really securely.
* ikiwiki.setup is licensed same as the basewiki, not GPLed.
* inline: Add timeformat parameter to control how the ctime of
inlined pages is displayed. Closes: #[451019](http://bugs.debian.org/451019)
* Add wrappergroup config option, which can be used to cause wrappers
to be created owned by some group other than the default. Useful
then there's a shared repository with access controlled by a group,
to let ikiwiki run setgid to that group.
* ikiwiki-mass-rebuild: Run build with the user in all their groups.
* Correct markdown in example index page in setup. Closes: #[451469](http://bugs.debian.org/451469)
* Better error message when a setup file has a syntax error.
Closes: #[451666](http://bugs.debian.org/451666)
* Fix mercurial historyurl in example setup file.
* More compact output for the brokenlinks plugin.
* Allow trailing slashes after page names in wikilinks.
* Don't consider links to anchors on the same page to be self links.
Patch by Daniel Burrows. Closes: #[451729](http://bugs.debian.org/451729)
* When usedirs is disabled, link direct to index.html files, not to
directories, to improve browsing of file:// urls.
Patch by Daniel Burrows. Closes: #[451728](http://bugs.debian.org/451728)
* Allow html5 video and audio tags and their attributes in the htmlscrubber.
* toc: Handle html elements embedded inside a header, rather than
stopping collecting the header text at the first element."""]]

View File

@ -1,20 +0,0 @@
This is a security fix release, upgrade is recommended.
News for ikiwiki 2.14:
This version of ikiwiki is more picky about symlinks in the path leading
to the srcdir, and will refuse to use a srcdir specified by such a path.
This was necessary to avoid some potential exploits, but could potentially
break (semi-)working wikis. If your wiki has a srcdir path containing a
symlink, you should change it to use a path that does not.
ikiwiki 2.14 released with [[toggle text="these changes"]]
[[toggleable text="""
* Let CC be used to control what compiler is used to build wrappers.
* Use 'cc' instead of gcc as the default compiler.
* Security fix: Ensure that there are no symlinks anywhere in the path
to the top of the srcdir. In certian unusual configurations, an attacker
who could commit to one of the parent directories of the srcdir could
use a symlink attack to cause ikiwiki to publish files elsewhere in the
filesystem. More details [[here|security#index29h2]]
"""]]

View File

@ -1,13 +0,0 @@
ikiwiki 2.15 released with [[toggle text="these changes"]]
[[toggleable text="""
* Add a new ikiwiki-makerepo program, that automates setting up a repo
and importing existing content for svn, git, and mercurial. This makes
the setup process much simpler.
* Reorganised git documentation.
* Actually install the ikiwiki-update-wikilist program.
* Improve workaround for perl bug #376329. Rather than double-encoding,
which has been reported to cause encoding problems (though I haven't
reproduced them), just catch a failure of markdown, and retry.
(The crazy perl bug magically disappears on the retry.)
Closes: #[449379](http://bugs.debian.org/449379)
* Add umask configuration option. Closes: #[443329](http://bugs.debian.org/443329)"""]]

View File

@ -1,80 +0,0 @@
News for ikiwiki 2.16:
Many of the pages in ikiwiki's basewiki have been moved and renamed in this
release, to avoid the basewiki including pages with names like "blog".
Redirection pages have been left behind for these moved pages temporarily,
and will be removed later.
The meta plugin no longer supports setting internal or external links
with "meta link". Instead, use "meta openid" for openid links, and use tags
for in-wiki invisible links between pages.
If you use the calendar plugin, ikiwiki is now smarter and your nightly
cron job to update the wiki doesn't need to rebuild everything. Just pass
--refresh to ikiwiki in the cron job and it will update only pages that
contain out of date calendars.
ikiwiki 2.16 released with [[toggle text="these changes"]]
[[toggleable text="""
* Major basewiki reorganisation. Most pages moved into ikiwiki/ subdirectory
to avoid polluting the main namespace, and some were further renamed.
* meta: Add redir support, based on a patch by Thomas Schwinge.
* Redirs added for moved basewiki pages. These will be removed in a future
release.
* Remove .otl file from sandbox to avoid build ugliness. Closes: #[454181](http://bugs.debian.org/454181)
* Finally implemented a simple per-page data storage mechanism for plugins,
via the %pagestate hash.
* Use pagestate in meta to detect potential redir loops.
* Added a version plugin that saves state about what's using it, to force
pages to rebuild when ikiwiki's version changes.
* The calendar plugin stores state about when it needs to be updated,
and forces rebuilds of the pages that contain calendars. So
running ikiwiki --refresh at midnight is now enough, no need for a full
wiki rebuild each midnight.
* calendar: Work around block html parsing bug in markdown 1.0.1 by
enclosing the calendar in an extra div.
* Fix file pruning code to work if ikiwiki is run with "." as the srcdir.
* Add an edittemplate plugin, allowing registering template pages, that
provide default content for new pages created using the web frontend.
* Change formbuilder hook to not be responsible for displaying a form,
so that more than one plugin can use this hook.
I believe this is a safe change, since only passwordauth uses this hook.
(If some other plugin already used it, it would have broken passwordauth!)
* Ensure that web edited pages always end in a newline.
* Avoid unnecessary stat calls to get mtime when rendering pages, use
cached value.
* Preserve input file modification times in output files.
* Allow dashes in preprocessor directive commands, and shortcuts.
* Htmlize parameters passed to the template preprocessor directive before
inserting them into the html template. This ensures that markdown
acts on them, even if the value is expanded inside a block-level html
element in the html template. Closes: #[454058](http://bugs.debian.org/454058)
* Use a div in the note template rather than a span.
* shortcut: Expand %S to the raw input text, not url-encoded.
* Don't increment feed numbers when an inline has no feeds. (Nis Martensen)
* Allow editing a page and deleting all content, while still disallowing
creating a new page that's entirely empty.
* meta: Drop support for "meta link", since supporting this for internal
links required meta to be run during scan, which complicated its data
storage, since it had to clear data stored during the scan pass to avoid
duplicating it during the normal preprocessing pass.
* If you used "meta link", you should switch to either "meta openid" (for
openid delegations), or tags (for internal, invisible links). I assume
that nobody really used "meta link" for external, non-openid links, since
the htmlscrubber ate those. (Tell me differently and I'll consider bringing
back that support.)
* meta: Improved data storage.
* meta: Drop the hackish filter hook that was used to clear
stored data before preprocessing, this hack was ugly, and broken (cf:
liw's disappearing openids).
* aggregate: Convert filter hook to a needsbuild hook.
* map: Don't inline images.
* brokenlinks: Don't list the same link multiple times. (%links might
contain multiple copies of the same link)
* git: Correct display of multiline commit messages in recentchanges.
* Re-organise dependencies and recommends now that recommends are installed
by default.
* Don't refuse to render files with ".." in their name. (Anchor the regexp.)
* Work around perl taint checking bug #411786, where perl sometimes randomly
sets the taint flag on untainted variables, by disabling taint checking
in the deb. This sucks."""]]

View File

@ -1,16 +0,0 @@
ikiwiki 2.17 released with [[toggle text="these changes"]]
[[toggleable text="""
* Improved parentlinks special case for index pages.
* redir: Support for specifying anchors.
* img: Avoid nesting images when linking to another image. Closes: #[457780](http://bugs.debian.org/457780)
* img: Allow the link parameter to point to an exterior url.
* conditional: Improve regexp testing for simple uses of pagespecs
that match only the page using the directive, adding 'included()'
and supporting negated pagespecs and added whitespace.
* map: Fix handling of common prefix to handle the case where it's
in a subdirectory. Patch by Larry Clapp.
* aggregate: Fix stupid mistake introduced when converting it to use
the needsbuild hook. This resulted in feeds not being removed when pages
were updated, and feeds sometimes being forgotten about.
* aggregate: Avoid uninitialised value warning when removing a feed that
has an expired guid."""]]

View File

@ -1,17 +0,0 @@
ikiwiki 2.18 released with [[toggle text="these changes"]]
[[toggleable text="""
* Split error messages for failures to drop real uid and gid.
* Retry dropping uid and gid, possibly this will help with the "Resource
temporarily unavailable" failures I've experienced under xen.
* Stop testing Encode::is\_utf8 in decode\_form\_utf8: That doesn't work.
* decode\_form\_utf8 only fixed the utf-8 encoding for fields that were
registered at the time it was called, which was before the
formbuilder\_setup hook. Fields added by the hook didn't get decoded.
But it can't be put after the hook either, since plugins using the hook
need to be able to use form values. To fix this dilemma, it's been changed
to a decode\_cgi\_utf8, which is called on the cgi query object, before the
form is set up, and decodes *all* cgi parameters.
* aggregate: Only save state if it was already loaded. This didn't used to
matter, but after recent changes, state is not always loaded, and saving
would kill it.
* table: Fix dependency tracking for external data files. Closes: #[458387](http://bugs.debian.org/458387)"""]]

View File

@ -1,17 +0,0 @@
ikiwiki 2.19 released with [[toggle text="these changes"]]
[[toggleable text="""
* Only try postsignin if no other action matched. Fixes a bug where the
user goes back from the signin screen and does something else.
* Improve behavior when trying to sign in with no cookies.
* Improved the canedit hook interface, allowing a callback function to be
returned (and not run in some cases) rather than the plugins directly
forcing a user to log in.
* opendiscussion: allow editing of the toplevel discussion page,
and, indirectly, allow creating new discussion pages.
* Add a prereq on Data::Dumper 2.11 or better, needed to dump q// objects.
* htmlscrubber: Further work around #365971 by adding tags for 'br/', 'hr/'
and 'p/'.
* aggregate: Include copyright statements from rss feed as meta copyright
directives.
* aggregate: Yet another state saving fix (sigh).
* aggregate: Add hack to support feeds with invalidly escaped html entities."""]]

View File

@ -0,0 +1,60 @@
News for ikiwiki 2.30:
Ever feel that ikiwiki's handling of RecentChanges wasn't truely in the
spirit of a wiki compiler? Well, that's changed. The RecentChanges page is
now a static page, not a CGI. Users can subscribe to its rss/atom feeds.
Custom RecentChanges pages can be easily set up that display only changes
to a subset of pages, or only changes by a subset of users.
All wikis need to be rebuilt on upgrade to this version. If you listed your
wiki in /etc/ikiwiki/wikilist this will be done automatically when the
Debian package is upgraded. Or use ikiwiki-mass-rebuild to force a rebuild.
With this excellent new RecentChanges support, the mail notification system
is showing its age (and known to be variously buggy and underimplemented for
various VCSes), and so ikiwiki's support for sending commit mails is REMOVED
from this version. If you were subscribed to commit mails, you should be
able to accomplish the same thing by subscribing to a RecentChanges feed.
The "svnrepo" and "notify" fields in setup files are no longer used, and
silently ignored. You may want to remove them from your setup file.
ikiwiki 2.30 released with [[toggle text="these changes"]]
[[toggleable text="""
* [ Joey Hess ]
* Old versions of git-init don't support --git-dir or GIT\_DIR with
--bare. Change ikiwiki-makerepo to use a method that should work with
those older versions too.
* aggregate: Don't let feeds set creation times for pages in the future.
* Add full parser for git diff-tree output (Brian Downing)
* aggregate: Fork a child process to handle the aggregation. This simplifies
the code, since that process can change internal state as needed, and
it will automatically be cleaned up for the parent process, which proceeds
to render the changes.
* [ Josh Triplett ]
* Add trailing comma to commented-out umask in sample ikiwiki.setup, so
that uncommenting it does not break the setup file.
* [ Joey Hess ]
* inline: The template can check for FIRST and LAST, which will be
set for the first and last inlined page. Useful for templates that build
tables and the like.
* prettydate,ddate: Don't ignore time formats passed to displaytime
function.
* Pages with extensions starting with "\_" are internal-use, and will
not be rendered or web-edited, or matched by normal pagespecs.
* Add "internal()" pagespec that matches internal-use pages.
* RecentChanges is now a static html page, that's updated whenever a commit
is made to the wiki. It's built as a blog using inline, so it can have
an rss feed that users can subscribe to.
* Removed support for sending commit notification mails. Along with it went
the svnrepo and notify settings, though both will be ignored if left in
setup files. Also gone with it is the "user()" pagespec.
* Add refresh hook.
* meta: Add pagespec functions to match against title, author, authorurl,
license, and copyright. This can be used to create custom RecentChanges.
* meta: To support the pagespec functions, metadata about pages has to be
retained as pagestate.
* Fix encoding bug when pagestate values contained spaces.
* Add support for bzr, written by Jelmer Vernooij. Thanks also to bma for
his independent work on bzr support.
* Copyright file updates."""]]

View File

@ -1,14 +0,0 @@
ikiwiki 2.8 released with [[toggle text="these changes"]]
[[toggleable text="""
* Redid the debian/copyright file, using the proposed new copyright file
format. Included many details not previously listed in the old file.
* inline: add feedonly option, set feedonly=yes to get only the feed button
but not inline the pages.
* meta: Support license and copyright information. The information will
be shown in the page footer. HTML will also be inserted that should
support the rel=license microformat as well as the HTML spec's
rel=copyright.
* table plugin: Actually specify the delimiter when parsing CSV.
* table plugin: The previous version broke WikiLinks inside quoted values.
Fix this by linkifying CSV data after parsing it, while DSV data is still
linkified before parsing."""]]

View File

@ -1,37 +0,0 @@
News for ikiwiki 2.9:
Since ikiwiki 2.0 was released, some limitiations have been added to what
ikiwiki considers a WikiLink. In short, if there are any spaces in between
the brackets, ikiwiki no longer considers it to be a WikiLink. If your wiki
contains things meant to be WikiLinks that contain spaces, you will need to
fix them, by replacing the spaces with underscores.
WikiLink have always been documented to not contain spaces, but bugs in
ikiwiki made it treat some text with spaces as WikiLinks. Most of these
bugs were fixed in version 2.2, and a final bug was fixed in this 2.9
release. These fixes are necessary to avoid ambiguity between
WikiLinks and PreProcessorDirectives. Apologies for any inconvenience
these bugs (and their fixes) may have caused.
ikiwiki 2.9 released with [[toggle text="these changes"]]
[[toggleable text="""
* Fix copyright and licence styling.
* tag: Add taglink preprocessor directive, supporting visible tag links.
Closes: #[443344](http://bugs.debian.org/443344)
* map: Fully specify paths to links to avoid issues when the bestlink
didn't point to the correct page.
* map: Render pages on the way to subpages whose parent pages
are not included in the map. Include special styling for such pages.
* map: Remove common prefixes and don't over-indent.
* Add class option to htmllink().
* img: Allow link=somepage to cause the image to link to a given page.
Slight overriding of link, as link=no is still used to disable the linking
entirely. Unless you have a page named "no"..
* Save index after previewing page edit, since even previewing can create
files in some situations, and this is appropriate in some cases, such as
the teximg plugin's error log file.
Such files will be automatically cleaned up at an appopriate later time.
* Don't allow whitespace in link text in a wikilink. This was still
allowed by the regexp in one case though not documented to work, and
was ambiguous with preprocessor directives.
* camelcase: Tighten regexp to avoid false positives. WikiWords are only
linkified now if they are preceeded by whitespace."""]]

View File

@ -4,5 +4,8 @@ to duplicate work without coordination, here's a queue of suggested patches.
If you post a patch to the [[todo]] or [[bugs]] list, or elsewhere, If you post a patch to the [[todo]] or [[bugs]] list, or elsewhere,
once it's ready to be applied, add a 'patch' tag so it will show up here. once it's ready to be applied, add a 'patch' tag so it will show up here.
If your patch is non-trivial and might need several iterations to get
right, please consider publishing a [[git]] branch.
[[inline pages="(todo/* or bugs/*) and link(patch) and !link(bugs/done) and [[inline pages="(todo/* or bugs/*) and link(patch) and !link(bugs/done) and
!link(todo/done) and !*/Discussion" rootpage="todo" archive="yes"]] !link(todo/done) and !*/Discussion" rootpage="todo" archive="yes"]]

View File

@ -10,4 +10,4 @@ pages to search for broken links, default is search them all.
If this plugin is turned on, here's a list of broken links on this wiki: If this plugin is turned on, here's a list of broken links on this wiki:
[[brokenlinks ]] [[brokenlinks pages="* and !recentchanges"]]

View File

@ -15,5 +15,6 @@ orphans.
Here's a list of orphaned pages on this wiki: Here's a list of orphaned pages on this wiki:
[[orphans pages="* and !news/* and !todo/* and !bugs/* and !users/* and [[orphans pages="* and !news/* and !todo/* and !bugs/* and !users/* and
!examples/* and !tips/* and !sandbox/* and !wikiicons/* and !plugins/*"]] !recentchanges and !examples/* and !tips/* and !sandbox/* and
!wikiicons/* and !plugins/*"]]
"""]] """]]

View File

@ -10,5 +10,5 @@ pages to count, default is to count them all.
This plugin is included in ikiwiki, but is not enabled by default. This plugin is included in ikiwiki, but is not enabled by default.
If it is turned on it can tell us that this wiki includes If it is turned on it can tell us that this wiki includes
[[pagecount ]] pages, of which [[pagecount pages="*/Discussion"]] are [[pagecount pages="* and !recentchanges"]]
discussion pages. pages, of which [[pagecount pages="*/Discussion"]] are discussion pages.

View File

@ -0,0 +1,26 @@
[[template id=plugin name=recentchanges core=1 author="[[Joey]]"]]
This plugin examines the [[revision_control_system|rcs]] history and
generates a page describing each recent change made to the wiki. These
pages can be joined together with [[inline]] to generate the
[[RecentChanges]] page.
Typically only the RecentChanges page will use the pages generated by this
plugin, but you can use it elsewhere too if you like. It's used like this:
\[[inline pages="internal(recentchanges/change_*)"
template=recentchanges show=0]]
Here's an example of how to show only changes to "bugs/*".
This matches against the title of the change, which includes a list of
modified pages.
\[[inline pages="internal(recentchanges/change_*) and title(*bugs/*)"
template=recentchanges show=0]]
Here's an example of how to show only changes that Joey didn't make.
(Joey commits sometimes as user `joey`, and sometimes via openid.)
\[[inline pages="internal(recentchanges/change_*) and
!author(joey) and !author(http://joey.kitenet.net*)"
template=recentchanges show=0]]

View File

@ -0,0 +1,17 @@
Thanks for that one, again, it's great!
One minor thing I noticed, seen on <http://www.bddebian.com/~wiki/recent_changes/>:
The links to user pages of e.g. *MichaelBanck* or *GianlucaGuida* don't work, as they're
being linked to <http://www.bddebian.com/~wiki/user/MichaelBanck>, whereas it should be
<http://www.bddebian.com/~wiki/user/michaelbanck>.
> I've fixed this.. --[[Joey]]
Another one. If you change the *recentchangespage* configuration option, (it seems to me)
that the pages from the old hierarchy will not be removed from the disk. But then, changing
this should be a rather uncommon thing.
--[[tschwinge]]
> And fixed this, by making it look at all *._change pages, not just
> those in a specific directory, when deciding which to expire. --[[Joey]]

View File

@ -82,11 +82,19 @@ configuration. It's called early in the startup process. The
function is passed no values. It's ok for the function to call function is passed no values. It's ok for the function to call
`error()` if something isn't configured right. `error()` if something isn't configured right.
### refresh
hook(type => "refresh", id => "foo", call => \&refresh);
This hook is called just before ikiwiki scans the wiki for changed files.
It's useful for plugins that need to create or modify a source page. The
function is passed no values.
### needsbuild ### needsbuild
hook(type => "needsbuild", id => "foo", call => \&needsbuild); hook(type => "needsbuild", id => "foo", call => \&needsbuild);
This allows a plugin the manipulate the list of files that need to be This allows a plugin to manipulate the list of files that need to be
built when the wiki is refreshed. The function is passed a reference to an built when the wiki is refreshed. The function is passed a reference to an
array of pages that will be rebuilt, and can modify the array, either array of pages that will be rebuilt, and can modify the array, either
adding or removing files from it. adding or removing files from it.
@ -214,8 +222,11 @@ source files that were rendered.
Use this to hook into ikiwiki's cgi script. Each registered cgi hook is Use this to hook into ikiwiki's cgi script. Each registered cgi hook is
called in turn, and passed a CGI object. The hook should examine the called in turn, and passed a CGI object. The hook should examine the
parameters, and if it will handle this CGI request, output a page (including the http headers) and parameters, and if it will handle this CGI request, output a page
terminate the program. (including the http headers) and terminate the program.
Note that cgi hooks are called as early as possible, before any ikiwiki
state is loaded, and with no session information.
### auth ### auth
@ -470,8 +481,13 @@ If the destination directory doesn't exist, it will first be created.
Given a page name and a destination file name (not including the base Given a page name and a destination file name (not including the base
destination directory), register that the page will result in that file destination directory), register that the page will result in that file
being rendered. It's important to call this before writing to any file in being rendered.
the destination directory.
It's important to call this before writing to any file in the destination
directory, and it's important to call it consistently every time, even if
the file isn't really written this time -- unless you delete any old
version of the file. In particular, in preview mode, this should still be
called even if the file isn't going to be written to during the preview.
Ikiwiki uses this information to automatically clean up rendered files when Ikiwiki uses this information to automatically clean up rendered files when
the page that rendered them goes away or is changes to no longer render the page that rendered them goes away or is changes to no longer render
@ -523,6 +539,19 @@ destination file, as registered by `will_render`.
Passed a page and an extension, returns the filename that page will be Passed a page and an extension, returns the filename that page will be
rendered to. rendered to.
## Internal use pages
Sometimes it's useful to put pages in the wiki without the overhead of
having them be rendered to individual html files. Such internal use pages
are collected together to form the RecentChanges page, for example.
To make an internal use page, register a filename extension that starts
with "_". Internal use pages cannot be edited with the web interface,
generally shouldn't contain wikilinks or preprocessor directives (use
either on them with extreme caution), and are not matched by regular
PageSpecs glob patterns, but instead only by a special `internal()`
[[ikiwiki/PageSpec]].
## RCS plugins ## RCS plugins
ikiwiki's support for [[revision_control_systems|rcs]] also uses pluggable ikiwiki's support for [[revision_control_systems|rcs]] also uses pluggable

View File

@ -1,7 +1,8 @@
If your wiki is kept in [[revision_control|rcs]], a post-commit hook is run If your wiki is kept in [[revision_control|rcs]], a post-commit hook is run
every time you commit a change to your repository. every time you commit a change to your repository.
ikiwiki generates the "post-commit hook" once you've uncommented the relevant section (under wrappers) in the ikiwiki.setup. ikiwiki generates the "post-commit hook" once you've uncommented the relevant
section (under wrappers) in the ikiwiki.setup.
The generated wrapper is a C program that is designed to safely be made The generated wrapper is a C program that is designed to safely be made
suid if necessary. It's hardcoded to run ikiwiki with the settings suid if necessary. It's hardcoded to run ikiwiki with the settings
@ -14,4 +15,5 @@ your wiki checkout and html directory. If so, you can safely make
the wrapper suid to a user who can write there (*not* to root!). You might the wrapper suid to a user who can write there (*not* to root!). You might
want to read [[Security]] first. want to read [[Security]] first.
[[Setup]] explains setting this up from the start and see [[rcs/details]] to know more. [[Setup]] explains setting this up from the start and see [[rcs/details]] to
know more.

8
doc/rcs/bzr.mdwn 100644
View File

@ -0,0 +1,8 @@
[Bazaar](http://bazaar-vcs.org/) is a distributed revison control
system developed by Canonical Ltd. Ikiwiki supports storing a wiki in a
bzr repository.
Ikiwiki can run as a post-update hook to update a wiki whenever commits
come in. When running as a [[cgi]] with bzr, ikiwiki automatically
commits edited pages, and uses the bzr history to generate the
[[RecentChanges]] page.

View File

@ -352,3 +352,5 @@ merge again with a merger that inserts conflict markers. It commits this new
revision with conflict markers to the repository. It then returns the text to the revision with conflict markers to the repository. It then returns the text to the
user for cleanup. This is less neat than it could be, in that a conflict marked user for cleanup. This is less neat than it could be, in that a conflict marked
revision gets committed to the repository. revision gets committed to the repository.
## [[bzr]]

View File

@ -22,7 +22,7 @@ but it works the best for typical ikiwiki use.
and git. and git.
It is **paramount** that you **never** push to the non-bare repository It is **paramount** that you **never** push to the non-bare repository
([this FAQ entry explains why](http://git.or.cz/gitwiki/GitFaq#head-b6a3d85f677763313159eb39f7dbf4579d4ee28b)). ([this FAQ entry explains why](http://git.or.cz/gitwiki/GitFaq#head-b96f48bc9c925074be9f95c0fce69bcece5f6e73)).
Instead, if you want to work on the wiki from a remote machine, clone Instead, if you want to work on the wiki from a remote machine, clone
the bare repository, using either the `git` transport (if available), or the bare repository, using either the `git` transport (if available), or
`ssh`. `ssh`.
@ -39,7 +39,7 @@ should go to the bare repository, which has a `post-update` hook that uses
ikiwiki to pull the changes to the srcdir. ikiwiki to pull the changes to the srcdir.
One setup that will work is to put all committers in a group (say, One setup that will work is to put all committers in a group (say,
ikiwiki), and use permissions to allow that group to commit to the bare git "ikiwiki"), and use permissions to allow that group to commit to the bare git
repository. Make both the post-update hook and ikiwiki.cgi be setgid repository. Make both the post-update hook and ikiwiki.cgi be setgid
to the group, as well as suid to the user who admins the wiki. The to the group, as well as suid to the user who admins the wiki. The
`wrappergroup` [[setup_file_option|usage]] can be used to make the wrappers `wrappergroup` [[setup_file_option|usage]] can be used to make the wrappers

View File

@ -1,16 +1,20 @@
[monotone](http://monotone.ca/) is a distributed revision control system. [Monotone](http://monotone.ca/) is a distributed revision control system.
Ikiwiki supports storing a wiki in Monotone and editing it using the [[cgi]] interface. Ikiwiki supports storing a wiki in a Monotone repository and editing it
It will use the Monotone logs to generate the [[RecentChanges]] page. using the [[cgi]] interface. It will use the Monotone logs to generate the
[[RecentChanges]] page.
The monotone support requires the Monotone perl module (from the contrib/ directory The monotone support in ikiwiki requires the Monotone perl module to be
in the monotone source) to be installed. In particular, it needs version 0.03 or higher of that module. installed. (It's available from the contrib/ directory in the monotone
source.) In particular, it needs version 0.03 or higher of that module.
The module is available from the monotone source repository at: The module is available from the monotone source repository at:
<http://viewmtn.angrygoats.net/branch/changes/net.venge.monotone> <http://viewmtn.angrygoats.net/branch/changes/net.venge.monotone>
Monotone support works, but there are still a few minor missing bits (listed here so they are not forgotten): Monotone support works, but there are still a few minor missing bits (listed here so they are not forgotten):
* At the moment there are no links to display diffs between revisions. It shouldn't be hard to add links to a [ViewMTN](http://grahame.angrygoats.net/moinmoin/ViewMTN) instance, but it hasn't been done yet. * The [[post-commit]] hook support is not yet completly implemented.
* The [[post-commit]] hook support, so that Ikiwiki sends change notifications when people commit using Monotone rather than the web interface, is partially implemented and untested.
* Documentation (this page) could be improved. * Documentation (this page) could be improved.
There is also a mismatch between the way Ikiwiki handles conflicts and the way Monotone handles conflicts. At present, if there is a conflict, then Ikiwiki will commit a revision with conflict markers before presenting it to the user. This is ugly, but there is no clean way to fix it at present. There is also a mismatch between the way Ikiwiki handles conflicts and the
way Monotone handles conflicts. At present, if there is a conflict, then
Ikiwiki will commit a revision with conflict markers before presenting it
to the user. This is ugly, but there is no clean way to fix it at present.

View File

@ -1,3 +1,5 @@
ikiwiki generates the list of recent changes by examining the history of [[meta title="RecentChanges"]]
the [[revision_control_system|rcs]] that the wiki is configured to use. You Recent changes to this wiki:
have to have [[CGI]] set up for this feature to be enabled.
[[inline pages="internal(recentchanges/change_*) and !*/Discussion"
template=recentchanges show=0]]

View File

@ -1,51 +1,19 @@
This is the SandBox, a page anyone can edit to try out this fab ikiwiki. This is the SandBox, a page anyone can edit to try out this fab ikiwiki.
# Table of Contents
testhead
sandbox
<h2>testhead</h2>
----
Do re me fa so la te... git?
i see. i see. lalala hmmmmmmmm.
## sandbox
[[sandbox]] does that work? yep. yay!
# テスト。
Test. Korean. 나는 한국인, 백두산,동해
Test. Chinese. 我是中国人,泰山、黄河。
testing openid ... ignore.
Test. Проверка. Тэстенг. テスト ığüşöçİ ทดสอบ éphémère
Testing right-to-left text: (שרה) should be spelled shin (ש) resh (ר) heh (ה) from right to left. Testing right-to-left text: (שרה) should be spelled shin (ש) resh (ר) heh (ה) from right to left.
Testing it in a comment...
Here's a paragraph. Here's a paragraph.
Here's another one with *emphasised* text. Here's another one with *emphasised* text.
do ë characters work? Sure. do ë characters work? Sure.
OpenID test. It works!!
Hupple hupple hupple hupple snork. Hupple hupple hupple hupple snork.
Exactly my point!
Test..
A [[nonexistingpage]]
There are Polish diacritical characters: ą, ć, ę, ł, ń, ó, ś, ż, ź. There are Polish diacritical characters: ą, ć, ę, ł, ń, ó, ś, ż, ź.
Korean characters test : 한글테스트입니다.
# Header # Header
## Subheader ## Subheader
@ -83,7 +51,7 @@ Bulleted list
* three * three
* four * four
* five * five
* six
---- ----
[[template id=note text="this is generated by the [[plugins/haiku]] plugin"]] [[template id=note text="this is generated by the [[plugins/haiku]] plugin"]]
@ -99,7 +67,8 @@ Bulleted list
* [GNU](http://www.gnu.org/) * [GNU](http://www.gnu.org/)
* [Email](mailto:noone@invalid) * [Email](mailto:noone@invalid)
* [![ikiwiki logo](http://ikiwiki.info/logo/ikiwiki.png)](http://ikiwiki.info) * [![ikiwiki logo](http://ikiwiki.info/logo/ikiwiki.png)](http://ikiwiki.info)
* <a href="http://www.google.com/>do html links work?</a> * <a href="http://www.google.com/">plain old html link</a>
----- -----
This sandbox is also a [[ikiwiki/blog]]! This sandbox is also a [[ikiwiki/blog]]!

View File

@ -0,0 +1,3 @@
# Test subpage
This is a test subpage. Isn't that special?

View File

@ -135,6 +135,12 @@ about using the git repositories.
ikiwiki-makerepo mercurial $SRCDIR ikiwiki-makerepo mercurial $SRCDIR
"""]] """]]
[[toggle id=mercurial text="Bazaar"]]
[[toggleable id=bazaar text="""
REPOSITORY=$SRCDIR
ikiwiki-makerepo bzr $SRCDIR
"""]]
[[toggle id=tla text="TLA"]] [[toggle id=tla text="TLA"]]
[[toggleable id=tla text=""" [[toggleable id=tla text="""
REPOSITORY=~/wikirepo REPOSITORY=~/wikirepo
@ -180,8 +186,7 @@ about using the git repositories.
Once your wiki is checked in to the revision control system, Once your wiki is checked in to the revision control system,
you should configure ikiwiki to use revision control. Edit your you should configure ikiwiki to use revision control. Edit your
ikiwiki.setup, and uncomment the lines for the revision control system ikiwiki.setup, and uncomment the lines for the revision control system
you chose to use. Be sure to set `svnrepo` to $REPOSITORY, if using you chose to use. Uncomment the block for the wrapper for your revision
subversion. Uncomment the block for the wrapper for your revision
control system, and configure the wrapper path in that block control system, and configure the wrapper path in that block
appropriately (for Git, it should be `$REPOSITORY/hooks/post-update`). appropriately (for Git, it should be `$REPOSITORY/hooks/post-update`).

View File

@ -1,5 +1,6 @@
This map excludes discussion pages, as well as subpages that are in feeds. This map excludes discussion pages, as well as subpages that are in feeds.
[[map pages="* and !*/discussion [[map pages="* and !*/discussion and !recentchanges
and !bugs/* and !examples/*/* and !news/* and !tips/* and !plugins/* and !sandbox/* and !todo/* and !users/* and !bugs/* and !examples/*/* and !news/* and !tips/* and !plugins/*
and !sandbox/* and !todo/* and !users/*
and !*.css and !*.ico and !*.png and !*.svgz and !*.gif"]] and !*.css and !*.ico and !*.png and !*.svgz and !*.gif"]]

View File

@ -70,27 +70,49 @@ img {
border-style: none; border-style: none;
} }
/* Stuff for the RecentChanges table. */ div.recentchanges {
tr.changeheader { border-style: solid;
border-width: 1px;
overflow: auto;
width: 100%;
background: #eee; background: #eee;
color: black !important; color: black !important;
} }
tr.changeinfo { .recentchanges .metadata {
background: #eee; padding: 0px 0.5em;
color: black !important;
} }
th.changeheader { .recentchanges .changelog {
padding: 1px .3em;
}
td.changeinfo {
padding: 1px .3em;
}
td.changetime {
white-space: nowrap;
padding: 1px .3em;
}
td.changelog {
font-style: italic; font-style: italic;
clear: both;
display: block;
padding: 1px 2px;
background: white !important;
color: black !important;
}
.recentchanges .desc {
display: none;
}
.recentchanges .committer {
float: left;
margin: 0;
width: 40%;
}
.recentchanges .committype {
float: left;
margin: 0;
width: 5%;
font-size: small;
}
.recentchanges .changedate {
float: left;
margin: 0;
width: 35%;
font-size: small;
}
.recentchanges .pagelinks {
float: right;
margin: 0;
width: 60%;
} }
/* Used for adding a blog page. */ /* Used for adding a blog page. */

View File

@ -4,3 +4,6 @@ What's the rationale behind excluding ones own changes from the commit emails se
> Well, commit mails are intended to keep you informed of changes in the > Well, commit mails are intended to keep you informed of changes in the
> wiki, and I assumed you'd know about changes you made yourself. > wiki, and I assumed you'd know about changes you made yourself.
> --[[Joey]] > --[[Joey]]
> [[done]] -- commit mails removed; recentchanges feeds can be configured
> for whatever you like.

View File

@ -15,14 +15,14 @@ Cheers,
> that contains the full page name. Then you just use a modified > that contains the full page name. Then you just use a modified
> `inlinepage.tmpl`, that uses that instead of the title. --[[Joey]] > `inlinepage.tmpl`, that uses that instead of the title. --[[Joey]]
diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm
index 59eabb6..82913ba 100644 index 59eabb6..82913ba 100644
--- a/IkiWiki/Plugin/inline.pm --- a/IkiWiki/Plugin/inline.pm
+++ b/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm
@@ -229,6 +229,7 @@ sub preprocess_inline (@) { #{{{ @@ -229,6 +229,7 @@ sub preprocess_inline (@) { #{{{
$template->param(content => $content); $template->param(content => $content);
} }
$template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage})); $template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage}));
+ $template->param(page => $page); + $template->param(page => $page);
$template->param(title => pagetitle(basename($page))); $template->param(title => pagetitle(basename($page)));
$template->param(ctime => displaytime($pagectime{$page}, $params{timeformat})); $template->param(ctime => displaytime($pagectime{$page}, $params{timeformat}));

View File

@ -3,4 +3,8 @@ user to add a page to their subscribed list while editing. This would prove
particularly useful for [[todo]] and [bug](bugs) items, to allow users to receive particularly useful for [[todo]] and [bug](bugs) items, to allow users to receive
notifications for activity on their reports. notifications for activity on their reports.
--[[JoshTriplett]] --[[JoshTriplett]]
I went and removed commit notification mails entirely, the idea is that you
subscribe using the [[RecentChanges]] rss feed, and filter it on your end.
Good enough? --[[Joey]]

View File

@ -0,0 +1,64 @@
The [[plugin/aggregate]] plugin's locking is a suboptimal.
There should be no need to lock the wiki while aggregating -- it's annoying
that long aggregate runs can block edits from happening. However, not
locking would present problems. One is, if an aggregate run is happening,
and the feed is removed, it could continue adding pages for that feed.
Those pages would then become orphaned, and stick around, since the feed
that had created them is gone, and thus there's no indication that they
should be removed.
To fix that, garbage collect any pages that were created by
aggregation once their feed is gone.
Are there other things that could happen while it's aggregating that it
should check for?
Well, things like the feed url etc could change, and it
would have to merge in such changes before saving the aggregation state.
New feeds could also be added, feeds could be moved from one source page to
another.
Merging that feed info seems doable, just re-load the aggregation state
from disk, and set the `message`, `lastupdate`, `numposts`, and `error`
fields to their new values if the feed still exists.
----
Another part of the mess is that it needs to avoid stacking multiple
aggregate processes up if aggregation is very slow. Currently this is done
by taking the lock in nonblocking mode, and not aggregating if it's locked.
This has various problems, for example a page edit at the right time can
prevent aggregation from happening.
Adding another lock just for aggregation could solve this. Check that lock
(in checkconfig) and exit if another aggregator holds it.
----
The other part of the mess is that it currently does aggregation in
checkconfig, locking the wiki for that, and loading state, and then
dropping the lock, unloading state, and letting the render happen. Which
reloads state. That state reloading is tricky to do just right.
A simple fix: Move the aggregation to the new 'render' hook. Then state
would be loaded, and there would be no reason to worry about aggregating.
Or aggregation could be kept in checkconfig, like so:
* load aggregation state
* get list of feeds needing aggregation
* exit if none
* attempt to take aggregation lock, exit if another aggregation is happening
* fork a child process to do the aggregation
* load wiki state (needed for aggregation to run)
* aggregate
* lock wiki
* reload aggregation state
* merge in aggregation state changes
* unlock wiki
* drop aggregation lock
* force rebuild of sourcepages of feeds that were aggregated
* exit checkconfig and continue with usual refresh process
[[done]]

View File

@ -0,0 +1,5 @@
The new internal page feature is designed for something like
[[plugins/aggregate]].
How to transition to it though? inlines of aggregated content would need to
change their pagespecs to use `internal()`.

View File

@ -9,4 +9,13 @@ a whole page into RecentChanges. Of course, it could only use _one_ of the
available markups, ie the default markdown. --[[Joey]] available markups, ie the default markdown. --[[Joey]]
To go along with this, the preview should show the formatted commit message. To go along with this, the preview should show the formatted commit message.
--[[JoshTriplett]] --[[JoshTriplett]]
This is really easy to do now, but it would have to be limited to applying
markdown formatting (or whatever formatter is default I suppose) to the
content, and *not* to expanding any WikiLinks or preprocessor directives.
Especially with the new static RecentChanges, expanding even wikilinks
would be pretty tricky to do. Applying markdown formatting seems like a
reasonable thing; it would make commit messages that have the form of a
bulletted list be marked up nicely, and would also handle _emphasised_
words etc, and even http links. --[[Joey]]

View File

@ -4,6 +4,8 @@ rcs_commit was only changed to work around bzr's lack of a switch to set the
username). bzr_log could probably be written better by someone better at perl, username). bzr_log could probably be written better by someone better at perl,
and rcs_getctime and rcs_notify aren't written at all. --[[bma]] and rcs_getctime and rcs_notify aren't written at all. --[[bma]]
(rcs_notify is not needed in this branch --[[Joey]])
#!/usr/bin/perl #!/usr/bin/perl
use warnings; use warnings;
@ -183,4 +185,10 @@ and rcs_getctime and rcs_notify aren't written at all. --[[bma]]
>>> It's new (in fact I'm not even sure that it made it in to 0.90, it might be in 0.91 due >>> It's new (in fact I'm not even sure that it made it in to 0.90, it might be in 0.91 due
>>> in a couple of weeks. >>> in a couple of weeks.
>>> I was just noting it for a future enhancement. --[[JamesWestby]] >>> I was just noting it for a future enhancement. --[[JamesWestby]]
> I've just posted another patch with support for bzr, including support for
> --author and a testsuite to git://git.samba.org/jelmer/ikiwiki.git. I hadn't
> seen this page earlier. --[[jelmer]]
> I used jelmer's patch --[[done]]! --[[Joey]]

View File

@ -0,0 +1,5 @@
[[Blog|ikiwiki/blog]] feeds and index pages show the posted time (ctime), the actual blog entry pages only show the modified time.
The user has to look at the history link to find when a blog item was posted.
It would be nice if blog entry post pages could include the ctime. -- [[Edward_Betts]]

View File

@ -0,0 +1,7 @@
[[tag wishlist]]
Given that ikiwiki has a suggested use as a tool for developers, I was thinking it might be cool if ikiwiki had [Doxygen](http://www.doxygen.org/) support. I'm not exactly sure how the integration would work. Something along the lines of a plugin to support .dox files would be my first thought. I'd leave generating the documentation from any source files for a separate run of Doxygen - it'd be easier and you probably don't want the source being edited over the web.
#### Background ####
I have been involved with one project that uses Doxygen to generate their web pages and user docs, as well as their 'in code' documentation: <http://orca-robotics.sourceforge.net/orca/index.html>. This makes the whole system somewhat like ikiwiki, but without the cgi for public editing. I was thinking of trying to convince that project to move to ikiwiki, but they're not going to want to re-write all their documentation.

View File

@ -3,3 +3,6 @@ authentication (perhaps as a [[tip|tips]]), showing how to authenticate the
user for edits without requiring authentication for the entire wiki. (Ideally, user for edits without requiring authentication for the entire wiki. (Ideally,
recentchanges should work without authentication as well, even though it goes recentchanges should work without authentication as well, even though it goes
through the CGI.) --[[JoshTriplett]] through the CGI.) --[[JoshTriplett]]
> (Now that recentchanges is a static page, it auths the same as other wiki
> pages.) --[[Joey]]

View File

@ -1,6 +1,6 @@
* Need to get post commit hook working (or an example of how to use it.) * Need to get post commit hook working (or an example of how to use it.)
* See below. --[[bma]] * See below. --[[bma]]
* rcs_notify is not implemented * rcs_notify is not implemented (not needed in this branch --[[Joey]])
* Is the code sufficiently robust? It just warns when mercurial fails. * Is the code sufficiently robust? It just warns when mercurial fails.
* When rcs_commit is called with a $user that is an openid, it will be * When rcs_commit is called with a $user that is an openid, it will be
passed through to mercurial -u. Will mercurial choke on this? passed through to mercurial -u. Will mercurial choke on this?

View File

@ -0,0 +1,14 @@
It'd be nice if the mtime of the files ikiwiki renders matched the mtime of
the source files.
However, this turns out to be more complex than just calling utime() a few
times. If a page inlines other, younger pages, then having an older mtime
means that an old version of it will be kept in web caches, forcing
annoying shift-reloads to see the changed content (for example).
And it's not just inline. The template plugin means that a change to a
template can result in changes to how a page gets rendered. The version
plugin changes page content without any younger page being involved. And
editing one of the html templates and rebuilding the wiki can change every
page. All of these need to be reflected in the file mtime to avoid caching
problems.

View File

@ -5,4 +5,13 @@ This would provide true "printable versions" of the wiki pages supporting it.
--[[JeremieKoenig]] --[[JeremieKoenig]]
Could this be done by making the output format a plugin, similar to the way
pyblosxom works? Atom and RSS could then possibly be moved into plugins.
Presumably they'd have to work by converting HTML into some other format, as
trying to force all input languages to generate more than one output language
would be impractical to say the least.
--[[bma]]
[[tag wishlist]] [[tag wishlist]]

View File

@ -1,11 +1,11 @@
> Another useful feature might be to be able to choose a different template > Another useful feature might be to be able to choose a different [[template|wikitemplates]]
> file for some pages; blog pages would use a template different from the > file for some pages; [[blog|ikiwiki/blog]] pages would use a template different from the
> home page, even if both are managed in the same repository, etc. > home page, even if both are managed in the same repository, etc.
Well, that would probably be fairly easy to add if it used pagespecs to Well, that would probably be fairly easy to add if it used [[pagespecs|ikiwiki/pagespec]] to
specify which pages use the non-default template. specify which pages use the non-default template.
Hmm, I think the pagetemplate hook should allow one to get close enough to Hmm, I think the [[pagetemplate|plugins/pagetemplate]] hook should allow one to get close enough to
this in a plugin now. this in a plugin now.
See also: [[Allow_per-page_template_selection]] -- same thing, really. See also: [[Allow_per-page_template_selection]] -- same thing, really.

View File

@ -44,7 +44,6 @@ Remaining TODOs:
just for this bit of functionality? just for this bit of functionality?
* Debian news file. * Debian news file.
* ikiwiki news file. * ikiwiki news file.
* Are commit emails still working?
--[[tschwinge]] --[[tschwinge]]

View File

@ -6,20 +6,6 @@ Suggestions of ideas for plugins:
> web-server-specific code to list all users, and openid can't feasibly do so > web-server-specific code to list all users, and openid can't feasibly do so
> at all. --[[JoshTriplett]] > at all. --[[JoshTriplett]]
* Support [[RecentChanges]] as a regular page containing a plugin that
updates each time there is a change, and statically builds the recent
changes list. (Would this be too expensive/inflexible? There might be
other ways to do it as a plugin, like making all links to RecentChanges
link to the cgi and have the cgi render it on demand.)
Or using an iframe
to inline the cgi, although firefox seems to render that nastily with
nested scroll bars. :-(
> Or just link to the equivalent in the version control system, if available;
> gitweb's shortlog or summary view would work nicely as a
> RecentChanges. --[[JoshTriplett]]
>>Why not fork the process? We wouldn't have to wait around for a response since we would assume the recent changes page was being generated correctly.
* It would be nice to be able to have a button to show "Differences" (or * It would be nice to be able to have a button to show "Differences" (or
"Show Diff") when editing a page. Is that an option that can be enabled? "Show Diff") when editing a page. Is that an option that can be enabled?
Using a plugin? Using a plugin?
@ -58,4 +44,4 @@ Suggestions of ideas for plugins:
* As I couldn't find another place to ask, I'll try here. I would like to install some contributed plugins, but can not find anywhere to downlod them. * As I couldn't find another place to ask, I'll try here. I would like to install some contributed plugins, but can not find anywhere to downlod them.
> Not sure what you mean, the [[plugins/contrib]] page lists contributed plugins, and each of their pages tells where to download the plugin from.. --[[Joey]] > Not sure what you mean, the [[plugins/contrib]] page lists contributed plugins, and each of their pages tells where to download the plugin from.. --[[Joey]]

View File

@ -86,3 +86,59 @@ your pages. --Ethan
> backend. > backend.
> >
> -- CharlesMauch > -- CharlesMauch
----
Here's a full design for redoing recentchanges, based on Ethan's ideas:
* Add a recentchanges plugin that has a preprocessor directive:
\[[recentchanges num=100 pages=* template=recentchanges.tmpl]]
If put on the [[recentchanges]] page, this would result in up to 100
recentchanges/change_$id.mdwn files being created.
* Which means the plugin has to store state and use a checkconfig hook
or the like to create the requested pages (and delete old ones) when
the wiki is rebuilt and when the post_commit hook is run.
* Then it's a simple matter of using inline on the recentchanges page
to display the changes. (With a special template to display nicely.)
* Rss/atom comes for free..
* So drop mail notifications.
* If someone wants to subscribe to notifications for only a subset
of pages, they can either filter the recentchanges in their rss
aggregator, or they can set up their own page that uses the recentchanges
directive for only the pages they want.
* The `rcs_notify` functions will be removed.
* To add diffs, another plugin can add a pagetemplate hook that calls
a `rcs_diff`. (optional)
* So to update the changes files, just call `rcs_recentchanges`, create
files for each new id, and delete files for each id that is no longer
included.
* The cgi support for recentchanges can be dropped, or moved to a different
plugin.
I'm unsure how fast this will all be, but by using regular pages, there's
cacheing, at least. The main slowdown might turn out to be the inlining and
not the generation of the changes pages. The current cgi recentchanges
code saves a tenth of a second or so by memoizing htmllink, an optimisation
that won't be available when using the more general inlining code.
An obvious optimisation, and one implied by this design, is that each change
file is only written once. This assumes that the data in them doesn't ever
change, which actually isn't true (svn commit messages can be changed), but
is probably close enough to true for our purposes.
Another optimisation would be to htmlize the change files when they're
written out -- avoids re-rendering a given file each time a new change is
made (thus doing 1/100th the work).
Links in the change files to the changed pages will need special handling.
These links should not generate backlinks. They probably shouldn't be
implemented as wikiliks at all. Instead, they should be raw, absolute
html links to the pages that were changed.
Only problem with this approach is that the links break if the changed
page later gets deleted. I think that's acceptable. It could link to
`ikiwiki.cgi?do=redir&page=foo`, but that's probably overkill.
--[[Joey]]
[[done]] !! (in this branch at least :-)

View File

@ -0,0 +1,9 @@
If one puts localized chars in wikilinks ikiwiki will escape it.
This works right from a technical point of view, but the URLs will become ugly.
So I made a patch which unaccent chars: <http://users.itk.ppke.hu/~cstamas/code/ikiwiki/unaccentpagetitlenames/>
This is a one liner change, but requires a bit of reordering in the code.
[[cstamas]]
[[tag wishlist patch]]

View File

@ -33,8 +33,7 @@ These options control the mode that ikiwiki operates in.
* --cgi * --cgi
Enable [[CGI]] mode. In cgi mode ikiwiki runs as a cgi script, and Enable [[CGI]] mode. In cgi mode ikiwiki runs as a cgi script, and
supports editing pages, signing in, registration, and displaying supports editing pages, signing in, and registration.
[[RecentChanges]].
To use ikiwiki as a [[CGI]] program you need to use --wrapper or --setup To use ikiwiki as a [[CGI]] program you need to use --wrapper or --setup
to generate a wrapper. The wrapper will generally need to run suid 6755 to to generate a wrapper. The wrapper will generally need to run suid 6755 to
@ -133,11 +132,6 @@ configuration options of their own.
access controlled by a group, it makes sense for the ikiwiki wrappers access controlled by a group, it makes sense for the ikiwiki wrappers
to run setgid to that group. to run setgid to that group.
* --notify, --no-notify
Enable email notification of commits. This should be used when running
ikiwiki as a [[post-commit]] hook.
* --rcs=svn|git|.., --no-rcs * --rcs=svn|git|.., --no-rcs
Enable or disable use of a [[revision_control_system|rcs]]. Enable or disable use of a [[revision_control_system|rcs]].
@ -146,16 +140,10 @@ configuration options of their own.
whatever the revision control system you select uses. whatever the revision control system you select uses.
In [[CGI]] mode, with a revision control system enabled, pages edited via In [[CGI]] mode, with a revision control system enabled, pages edited via
the web will be committed. Also, the [[RecentChanges]] link will be placed the web will be committed.
on pages.
No revision control is enabled by default. No revision control is enabled by default.
* --svnrepo /svn/wiki
Specify the location of the svn repository for the wiki. This is required
for using --notify with [[Subversion|rcs/svn]].
* --svnpath trunk * --svnpath trunk
Specify the path inside your svn repository where the wiki is located. Specify the path inside your svn repository where the wiki is located.
@ -165,13 +153,25 @@ configuration options of their own.
* --rss, --norss * --rss, --norss
If rss is set, ikiwiki will generate RSS feeds for pages that inline If rss is set, ikiwiki will default to generating RSS feeds for pages
a [[ikiwiki/blog]]. that inline a [[ikiwiki/blog]].
* --allowrss
If allowrss is set, and rss is not set, ikiwiki will not default to
generating RSS feeds, but setting `rss=yes` in the blog can override
this default and generate a feed.
* --atom, --noatom * --atom, --noatom
If atom is set, ikiwiki will generate Atom feeds for pages that inline If atom is set, ikiwiki will default to generating Atom feeds for pages
a [[ikiwiki/blog]]. that inline a [[ikiwiki/blog]].
* --allowatom
If allowatom is set, and rss is not set, ikiwiki will not default to
generating Atom feeds, but setting `atom=yes` in the blog can override
this default and generate a feed.
* --pingurl URL * --pingurl URL

View File

@ -0,0 +1,4 @@
My watchlist:
[[inline pages="todo/allow_wiki_syntax_in_commit_messages" archive="yes" sort="mtime" atom="yes"]]

View File

@ -0,0 +1,7 @@
[[meta title="Per Olofsson"]]
Per Olofsson
* <pelle@dsv.su.se>
* <pelle@debian.org>
* <http://people.dsv.su.se/~pelle/>.

View File

@ -10,14 +10,11 @@ located in /usr/share/ikiwiki/templates by default.
* `page.tmpl` - Used for displaying all regular wiki pages. * `page.tmpl` - Used for displaying all regular wiki pages.
* `misc.tmpl` - Generic template used for any page that doesn't * `misc.tmpl` - Generic template used for any page that doesn't
have a custom template. have a custom template.
* `recentchanges.tmpl` - Used for the RecentChanges page.
* `editpage.tmpl` - Create/edit page. * `editpage.tmpl` - Create/edit page.
* `notifymail.tmpl` - Not a html template, this is used to * `change.tmpl` - Used to create a page describing a change made to the wiki.
generate change notification mails for users who have subscribed to
changes to a page.
* `passwordmail.tmpl` - Not a html template, this is used to * `passwordmail.tmpl` - Not a html template, this is used to
generate the mail with the user's password in it. generate the mail with the user's password in it.
* `rsspage.tmpl` - Used for generating rss feeds for [blogs|[ikiwiki/blog]]. * `rsspage.tmpl` - Used for generating rss feeds for [[blogs|ikiwiki/blog]].
* `rssitem.tmpl` - Used for generating individual items on rss feeds. * `rssitem.tmpl` - Used for generating individual items on rss feeds.
* `atompage.tmpl` - Used for generating atom feeds for blogs. * `atompage.tmpl` - Used for generating atom feeds for blogs.
* `atomitem.tmpl` - Used for generating individual items on atom feeds. * `atomitem.tmpl` - Used for generating individual items on atom feeds.

View File

@ -7,7 +7,7 @@ repository="$3"
usage () { usage () {
echo "usage: ikiwiki-makerepo svn|git srcdir repository" >&2 echo "usage: ikiwiki-makerepo svn|git srcdir repository" >&2
echo " ikiwiki-makerepo mercurial srcdir" >&2 echo " ikiwiki-makerepo bzr|mercurial srcdir" >&2
exit 1 exit 1
} }
@ -20,7 +20,7 @@ if [ ! -d "$srcdir" ]; then
exit 1 exit 1
fi fi
if [ "$rcs" != mercurial ]; then if [ "$rcs" != mercurial ] && [ "$rcs" != bzr ]; then
if [ -e "$repository" ]; then if [ -e "$repository" ]; then
echo "repository $repository already exists, aborting" >&2 echo "repository $repository already exists, aborting" >&2
exit 1 exit 1
@ -73,6 +73,14 @@ mercurial)
hg commit -m "initial import" hg commit -m "initial import"
echo "Directory $srcdir is now set up as a mercurial repository" echo "Directory $srcdir is now set up as a mercurial repository"
;; ;;
bzr)
bzr init "$srcdir"
cd "$srcdir"
echo .ikiwiki > .bzrignore
bzr add * .bzrignore
bzr commit -m "initial import"
echo "Directory $srcdir is now set up as a bzr repository"
;;
*) *)
echo "Unsupported revision control system $rcs" >&2 echo "Unsupported revision control system $rcs" >&2
usage usage

View File

@ -36,12 +36,10 @@ sub getconfig () { #{{{
"cgi!" => \$config{cgi}, "cgi!" => \$config{cgi},
"discussion!" => \$config{discussion}, "discussion!" => \$config{discussion},
"w3mmode!" => \$config{w3mmode}, "w3mmode!" => \$config{w3mmode},
"notify!" => \$config{notify},
"url=s" => \$config{url}, "url=s" => \$config{url},
"cgiurl=s" => \$config{cgiurl}, "cgiurl=s" => \$config{cgiurl},
"historyurl=s" => \$config{historyurl}, "historyurl=s" => \$config{historyurl},
"diffurl=s" => \$config{diffurl}, "diffurl=s" => \$config{diffurl},
"svnrepo" => \$config{svnrepo},
"svnpath" => \$config{svnpath}, "svnpath" => \$config{svnpath},
"adminemail=s" => \$config{adminemail}, "adminemail=s" => \$config{adminemail},
"timeformat=s" => \$config{timeformat}, "timeformat=s" => \$config{timeformat},
@ -123,7 +121,6 @@ sub main () { #{{{
gen_wrapper(); gen_wrapper();
} }
elsif ($config{cgi}) { elsif ($config{cgi}) {
loadindex();
require IkiWiki::CGI; require IkiWiki::CGI;
cgi(); cgi();
} }
@ -132,10 +129,7 @@ sub main () { #{{{
commandline_render(); commandline_render();
} }
elsif ($config{post_commit} && ! commit_hook_enabled()) { elsif ($config{post_commit} && ! commit_hook_enabled()) {
if ($config{notify}) { # do nothing
loadindex();
rcs_notify();
}
} }
else { else {
lockwiki(); lockwiki();
@ -143,7 +137,6 @@ sub main () { #{{{
require IkiWiki::Render; require IkiWiki::Render;
rcs_update(); rcs_update();
refresh(); refresh();
rcs_notify() if $config{notify};
saveindex(); saveindex();
} }
} #}}} } #}}}

Some files were not shown because too many files have changed in this diff Show More