Merge branch 'master' into cvs

master
Amitai Schlair 2013-01-25 08:47:17 -05:00
commit 64370885cc
449 changed files with 10720 additions and 2425 deletions

1
.gitignore vendored
View File

@ -9,6 +9,7 @@ ikiwiki.out
ikiwiki-transition.out
ikiwiki-calendar.out
pm_to_blib
/MYMETA.yml
*.man
/po/cover_db
po/po2wiki_stamp

View File

@ -35,6 +35,7 @@ HTML::Tree
Sort::Naturally
Gravatar::URL
Net::INET6Glue
XML::Writer
=head1 AUTHOR

View File

@ -118,6 +118,22 @@ sub getsetup () {
safe => 0,
rebuild => 0,
},
cgi_overload_delay => {
type => "string",
default => '',
example => "10",
description => "number of seconds to delay CGI requests when overloaded",
safe => 1,
rebuild => 0,
},
cgi_overload_message => {
type => "string",
default => '',
example => "Please wait",
description => "message to display when overloaded (may contain html)",
safe => 1,
rebuild => 0,
},
rcs => {
type => "string",
default => '',
@ -1092,6 +1108,11 @@ sub cgiurl (@) {
join("&", map $_."=".uri_escape_utf8($params{$_}), keys %params);
}
sub cgiurl_abs (@) {
eval q{use URI};
URI->new_abs(cgiurl(@_), $config{cgiurl});
}
sub baseurl (;$) {
my $page=shift;

View File

@ -131,7 +131,7 @@ sub needsignin ($$) {
if (! defined $session->param("name") ||
! userinfo_get($session->param("name"), "regdate")) {
$session->param(postsignin => $ENV{QUERY_STRING});
$session->param(postsignin => $q->query_string);
cgi_signin($q, $session);
cgi_savesession($session);
exit;

View File

@ -113,8 +113,7 @@ sub launchaggregation () {
my @feeds=needsaggregate();
return unless @feeds;
if (! lockaggregate()) {
debug("an aggregation process is already running");
return;
error("an aggregation process is already running");
}
# force a later rebuild of source pages
$IkiWiki::forcerebuild{$_->{sourcepage}}=1
@ -201,7 +200,7 @@ sub migrate_to_internal {
if (-e $oldoutput) {
require IkiWiki::Render;
debug("removing output file $oldoutput");
IkiWiki::prune($oldoutput);
IkiWiki::prune($oldoutput, $config{destdir});
}
}

View File

@ -232,8 +232,9 @@ sub writefile ($$$;$$) {
}
# This is a wrapper around the real prune.
sub prune ($) {
sub prune ($;$) {
my $file=shift;
my $up_to=shift;
my @keys=IkiWiki::Plugin::amazon_s3::file2keys($file);
@ -250,7 +251,7 @@ sub prune ($) {
}
}
return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file);
return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file, $up_to);
}
1

View File

@ -148,7 +148,7 @@ sub formbuilder (@) {
$f=Encode::decode_utf8($f);
$f=~s/^$page\///;
if (IkiWiki::isinlinableimage($f) &&
UNIVERSAL::can("IkiWiki::Plugin::img", "import")) {
IkiWiki::Plugin::img->can("import")) {
$add.='[[!img '.$f.' align="right" size="" alt=""]]';
}
else {
@ -286,7 +286,7 @@ sub attachments_save {
}
return unless @attachments;
require IkiWiki::Render;
IkiWiki::prune($dir);
IkiWiki::prune($dir, $config{wikistatedir}."/attachments");
# Check the attachments in and trigger a wiki refresh.
if ($config{rcs}) {

View File

@ -5,6 +5,7 @@ use warnings;
use strict;
use IkiWiki;
use Encode;
use URI::Escape q{uri_escape_utf8};
use open qw{:utf8 :std};
sub import {
@ -242,8 +243,10 @@ sub rcs_recentchanges ($) {
# Skip source name in renames
$filename =~ s/^.* => //;
my $efilename = uri_escape_utf8($filename);
my $diffurl = defined $config{'diffurl'} ? $config{'diffurl'} : "";
$diffurl =~ s/\[\[file\]\]/$filename/go;
$diffurl =~ s/\[\[file\]\]/$efilename/go;
$diffurl =~ s/\[\[file-id\]\]/$fileid/go;
$diffurl =~ s/\[\[r2\]\]/$info->{revno}/go;

View File

@ -13,7 +13,7 @@
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
require 5.002;
package IkiWiki::Plugin::calendar;

View File

@ -301,7 +301,8 @@ sub editcomment ($$) {
my @buttons = (POST_COMMENT, PREVIEW, CANCEL);
my $form = CGI::FormBuilder->new(
fields => [qw{do sid page subject editcontent type author url}],
fields => [qw{do sid page subject editcontent type author
email url subscribe anonsubscribe}],
charset => 'utf-8',
method => 'POST',
required => [qw{editcontent}],
@ -346,18 +347,35 @@ sub editcomment ($$) {
$form->field(name => "type", value => $type, force => 1,
type => 'select', options => \@page_types);
$form->tmpl_param(username => $session->param('name'));
my $username=$session->param('name');
$form->tmpl_param(username => $username);
$form->field(name => "subscribe", type => 'hidden');
$form->field(name => "anonsubscribe", type => 'hidden');
if (IkiWiki::Plugin::notifyemail->can("subscribe")) {
if (defined $username) {
$form->field(name => "subscribe", type => "checkbox",
options => [gettext("email replies to me")]);
}
elsif (IkiWiki::Plugin::passwordauth->can("anonuser")) {
$form->field(name => "anonsubscribe", type => "checkbox",
options => [gettext("email replies to me")]);
}
}
if ($config{comments_allowauthor} and
! defined $session->param('name')) {
$form->tmpl_param(allowauthor => 1);
$form->field(name => 'author', type => 'text', size => '40');
$form->field(name => 'email', type => 'text', size => '40');
$form->field(name => 'url', type => 'text', size => '40');
}
else {
$form->tmpl_param(allowauthor => 0);
$form->field(name => 'author', type => 'hidden', value => '',
force => 1);
$form->field(name => 'email', type => 'hidden', value => '',
force => 1);
$form->field(name => 'url', type => 'hidden', value => '',
force => 1);
}
@ -425,10 +443,7 @@ sub editcomment ($$) {
$content .= " nickname=\"$nickname\"\n";
}
elsif (defined $session->remote_addr()) {
my $ip = $session->remote_addr();
if ($ip =~ m/^([.0-9]+)$/) {
$content .= " ip=\"$1\"\n";
}
$content .= " ip=\"".$session->remote_addr()."\"\n";
}
if ($config{comments_allowauthor}) {
@ -490,6 +505,20 @@ sub editcomment ($$) {
if ($form->submitted eq POST_COMMENT && $form->validate) {
IkiWiki::checksessionexpiry($cgi, $session);
if (IkiWiki::Plugin::notifyemail->can("subscribe")) {
my $subspec="comment($page)";
if (defined $username &&
length $form->field("subscribe")) {
IkiWiki::Plugin::notifyemail::subscribe(
$username, $subspec);
}
elsif (length $form->field("email") &&
length $form->field("anonsubscribe")) {
IkiWiki::Plugin::notifyemail::anonsubscribe(
$form->field("email"), $subspec);
}
}
$postcomment=1;
my $ok=IkiWiki::check_content(content => $form->field('editcontent'),
@ -575,7 +604,8 @@ sub editcomment ($$) {
sub getavatar ($) {
my $user=shift;
return undef unless defined $user;
my $avatar;
eval q{use Libravatar::URL};
if (! $@) {
@ -632,9 +662,11 @@ sub commentmoderation ($$) {
my $page=IkiWiki::dirname($f);
my $file="$config{srcdir}/$f";
my $filedir=$config{srcdir};
if (! -e $file) {
# old location
$file="$config{wikistatedir}/comments_pending/".$f;
$filedir="$config{wikistatedir}/comments_pending";
}
if ($action eq 'Accept') {
@ -649,7 +681,7 @@ sub commentmoderation ($$) {
}
require IkiWiki::Render;
IkiWiki::prune($file);
IkiWiki::prune($file, $filedir);
}
}

View File

@ -4,7 +4,6 @@ package IkiWiki::Plugin::conditional;
use warnings;
use strict;
use IkiWiki 3.00;
use UNIVERSAL;
sub import {
hook(type => "getsetup", id => "conditional", call => \&getsetup);

View File

@ -33,6 +33,7 @@ use warnings;
use strict;
use IkiWiki;
use URI::Escape q{uri_escape_utf8};
use File::chdir;
@ -313,7 +314,8 @@ sub rcs_recentchanges ($) {
$oldrev =~ s/INITIAL/0/;
$newrev =~ s/\(DEAD\)//;
my $diffurl = defined $config{diffurl} ? $config{diffurl} : "";
$diffurl=~s/\[\[file\]\]/$page/g;
my $epage = uri_escape_utf8($page);
$diffurl=~s/\[\[file\]\]/$epage/g;
$diffurl=~s/\[\[r1\]\]/$oldrev/g;
$diffurl=~s/\[\[r2\]\]/$newrev/g;
unshift @pages, {

View File

@ -3,6 +3,7 @@ package IkiWiki::Plugin::darcs;
use warnings;
use strict;
use URI::Escape q{uri_escape_utf8};
use IkiWiki;
sub import {
@ -336,7 +337,8 @@ sub rcs_recentchanges ($) {
foreach my $f (@files) {
my $d = defined $config{'diffurl'} ? $config{'diffurl'} : "";
$d =~ s/\[\[file\]\]/$f/go;
my $ef = uri_escape_utf8($f);
$d =~ s/\[\[file\]\]/$ef/go;
$d =~ s/\[\[hash\]\]/$hash/go;
push @pg, {

View File

@ -39,7 +39,7 @@ sub refresh () {
}
if ($delete) {
debug(sprintf(gettext("removing old preview %s"), $file));
IkiWiki::prune("$config{destdir}/$file");
IkiWiki::prune("$config{destdir}/$file", $config{destdir});
}
}
elsif (defined $mtime) {
@ -64,7 +64,8 @@ sub cgi_editpage ($$) {
decode_cgi_utf8($q);
my @fields=qw(do rcsinfo subpage from page type editcontent editmessage);
my @fields=qw(do rcsinfo subpage from page type editcontent
editmessage subscribe);
my @buttons=("Save Page", "Preview", "Cancel");
eval q{use CGI::FormBuilder};
error($@) if $@;
@ -157,6 +158,17 @@ sub cgi_editpage ($$) {
noimageinline => 1,
linktext => "FormattingHelp"));
my $cansubscribe=IkiWiki::Plugin::notifyemail->can("subscribe")
&& IkiWiki::Plugin::comments->can("import")
&& defined $session->param('name');
if ($cansubscribe) {
$form->field(name => "subscribe", type => "checkbox",
options => [gettext("email comments to me")]);
}
else {
$form->field(name => "subscribe", type => 'hidden');
}
my $previewing=0;
if ($form->submitted eq "Cancel") {
if ($form->field("do") eq "create" && defined $from) {
@ -448,6 +460,12 @@ sub cgi_editpage ($$) {
# caches and get the most recent version of the page.
redirect($q, $baseurl."?updated");
}
if ($cansubscribe && length $form->field("subscribe")) {
my $subspec="comment($page)";
IkiWiki::Plugin::notifyemail::subscribe(
$session->param('name'), $subspec);
}
}
exit;

View File

@ -132,7 +132,7 @@ sub filltemplate ($$) {
if ($@) {
# Indicate that the earlier preprocessor directive set
# up a template that doesn't work.
return "[[!pagetemplate ".gettext("failed to process template:")." $@]]";
return "[[!edittemplate ".gettext("failed to process template:")." $@]]";
}
$template->param(name => $page);

View File

@ -48,7 +48,6 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
section => "misc",
},
}
@ -140,7 +139,7 @@ sub match_mimetype ($$;@) {
my $mimeinfo_ok=! $@;
my $mimetype;
if ($mimeinfo_ok) {
my $mimetype=File::MimeInfo::Magic::magic($file);
$mimetype=File::MimeInfo::Magic::magic($file);
}
# Fall back to using file, which has a more complete

View File

@ -5,6 +5,7 @@ use warnings;
use strict;
use IkiWiki;
use Encode;
use URI::Escape q{uri_escape_utf8};
use open qw{:utf8 :std};
my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate Git sha1sums
@ -340,8 +341,8 @@ sub parse_diff_tree ($) {
my $dt_ref = shift;
# End of stream?
return if !defined @{ $dt_ref } ||
!defined @{ $dt_ref }[0] || !length @{ $dt_ref }[0];
return if ! @{ $dt_ref } ||
!defined $dt_ref->[0] || !length $dt_ref->[0];
my %ci;
# Header line.
@ -468,13 +469,10 @@ sub git_sha1 (;$) {
# Ignore error since a non-existing file might be given.
my ($sha1) = run_or_non('git', 'rev-list', '--max-count=1', 'HEAD',
'--', $file);
if ($sha1) {
if (defined $sha1) {
($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now
}
else {
debug("Empty sha1sum for '$file'.");
}
return defined $sha1 ? $sha1 : q{};
return defined $sha1 ? $sha1 : '';
}
sub rcs_update () {
@ -617,9 +615,10 @@ sub rcs_recentchanges ($) {
my @pages;
foreach my $detail (@{ $ci->{'details'} }) {
my $file = $detail->{'file'};
my $efile = uri_escape_utf8($file);
my $diffurl = defined $config{'diffurl'} ? $config{'diffurl'} : "";
$diffurl =~ s/\[\[file\]\]/$file/go;
$diffurl =~ s/\[\[file\]\]/$efile/go;
$diffurl =~ s/\[\[sha1_parent\]\]/$ci->{'parent'}/go;
$diffurl =~ s/\[\[sha1_from\]\]/$detail->{'sha1_from'}/go;
$diffurl =~ s/\[\[sha1_to\]\]/$detail->{'sha1_to'}/go;

View File

@ -132,6 +132,7 @@ sub graph (@) {
}, "text");
$p->parse($src);
$p->eof;
$s=~s/\[ href= \]//g; # handle self-links
$params{src}=$s;
}
else {

View File

@ -29,6 +29,7 @@ sub import {
"irc", "ircs", "lastfm", "ldaps", "magnet", "mms",
"msnim", "notes", "rsync", "secondlife", "skype", "ssh",
"sftp", "smb", "sms", "snews", "webcal", "ymsgr",
"bitcoin", "git", "svn", "bzr", "darcs", "hg"
);
# data is a special case. Allow a few data:image/ types,
# but disallow data:text/javascript and everything else.

View File

@ -7,6 +7,7 @@ use strict;
use IkiWiki 3.00;
sub import {
hook(type => "checkconfig", id => "httpauth", call => \&checkconfig);
hook(type => "getsetup", id => "httpauth", call => \&getsetup);
hook(type => "auth", id => "httpauth", call => \&auth);
hook(type => "formbuilder_setup", id => "httpauth",
@ -37,6 +38,19 @@ sub getsetup () {
rebuild => 0,
},
}
sub checkconfig () {
if ($config{cgi} && defined $config{cgiauthurl} &&
keys %{$IkiWiki::hooks{auth}} < 2) {
# There are no other auth hooks registered, so avoid
# the normal signin form, and jump right to httpauth.
require IkiWiki::CGI;
inject(name => "IkiWiki::cgi_signin", call => sub ($$) {
my $cgi=shift;
redir_cgiauthurl($cgi, $cgi->query_string());
});
}
}
sub redir_cgiauthurl ($;@) {
my $cgi=shift;

View File

@ -19,14 +19,14 @@ sub import {
hook(type => "checkconfig", id => "inline", call => \&checkconfig);
hook(type => "sessioncgi", id => "inline", call => \&sessioncgi);
hook(type => "preprocess", id => "inline",
call => \&IkiWiki::preprocess_inline);
call => \&IkiWiki::preprocess_inline, scan => 1);
hook(type => "pagetemplate", id => "inline",
call => \&IkiWiki::pagetemplate_inline);
hook(type => "format", id => "inline", call => \&format, first => 1);
# Hook to change to do pinging since it's called late.
# This ensures each page only pings once and prevents slow
# pings interrupting page builds.
hook(type => "change", id => "inline", call => \&IkiWiki::pingurl);
hook(type => "rendered", id => "inline", call => \&IkiWiki::pingurl);
}
sub getopt () {
@ -155,6 +155,23 @@ sub preprocess_inline (@) {
if (! exists $params{pages} && ! exists $params{pagenames}) {
error gettext("missing pages parameter");
}
if (! defined wantarray) {
# Running in scan mode: only do the essentials
if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) {
# default to sorting age, the same as inline itself,
# but let the params override that
IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params);
}
return;
}
if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) {
scalar IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params);
}
my $raw=yesno($params{raw});
my $archive=yesno($params{archive});
my $rss=(($config{rss} || $config{allowrss}) && exists $params{rss}) ? yesno($params{rss}) : $config{rss};
@ -194,8 +211,7 @@ sub preprocess_inline (@) {
}
}
@list = map { bestlink($params{page}, $_) }
split ' ', $params{pagenames};
@list = split ' ', $params{pagenames};
if (yesno($params{reverse})) {
@list=reverse(@list);
@ -204,6 +220,8 @@ sub preprocess_inline (@) {
foreach my $p (@list) {
add_depends($params{page}, $p, deptype($quick ? "presence" : "content"));
}
@list = grep { exists $pagesources{$_} } @list;
}
else {
my $num=0;
@ -677,7 +695,6 @@ sub genfeed ($$$$$@) {
guid => $guid,
feeddate => date_3339($lasttime),
feedurl => $feedurl,
version => $IkiWiki::version,
);
run_hooks(pagetemplate => sub {
shift->(page => $page, destpage => $page,

View File

@ -144,9 +144,9 @@ sub renamepage (@) {
my $old=$params{oldpage};
my $new=$params{newpage};
$params{content} =~ s{(?<!\\)$link_regexp}{
if (! is_externallink($page, $2, $3)) {
my $linktext=$2;
$params{content} =~ s{(?<!\\)($link_regexp)}{
if (! is_externallink($page, $3, $4)) {
my $linktext=$3;
my $link=$linktext;
if (bestlink($page, linkpage($linktext)) eq $old) {
$link=pagetitle($new, 1);
@ -161,9 +161,12 @@ sub renamepage (@) {
$link="/$link";
}
}
defined $1
? ( "[[$1|$link".($3 ? "#$3" : "")."]]" )
: ( "[[$link". ($3 ? "#$3" : "")."]]" )
defined $2
? ( "[[$2|$link".($4 ? "#$4" : "")."]]" )
: ( "[[$link". ($4 ? "#$4" : "")."]]" )
}
else {
$1
}
}eg;

View File

@ -5,6 +5,7 @@ use warnings;
use strict;
use IkiWiki;
use Encode;
use URI::Escape q{uri_escape_utf8};
use open qw{:utf8 :std};
sub import {
@ -265,7 +266,8 @@ sub rcs_recentchanges ($) {
foreach my $file (split / /,$info->{files}) {
my $diffurl = defined $config{diffurl} ? $config{'diffurl'} : "";
$diffurl =~ s/\[\[file\]\]/$file/go;
my $efile = uri_escape_utf8($file);
$diffurl =~ s/\[\[file\]\]/$efile/go;
$diffurl =~ s/\[\[r2\]\]/$info->{changeset}/go;
push @pages, {

View File

@ -275,17 +275,23 @@ sub preprocess (@) {
push @{$metaheaders{$page}}, '<meta name="robots"'.
' content="'.encode_entities($value).'" />';
}
elsif ($key eq 'description') {
push @{$metaheaders{$page}}, '<meta name="'.
encode_entities($key).
elsif ($key eq 'description' || $key eq 'author') {
push @{$metaheaders{$page}}, '<meta name="'.$key.
'" content="'.encode_entities($value).'" />';
}
elsif ($key eq 'name') {
push @{$metaheaders{$page}}, scrub('<meta '.$key.'="'.
push @{$metaheaders{$page}}, scrub('<meta name="'.
encode_entities($value).
join(' ', map { "$_=\"$params{$_}\"" } keys %params).
' />', $page, $destpage);
}
elsif ($key eq 'keywords') {
# Make sure the keyword string is safe: only allow alphanumeric
# characters, space and comma and strip the rest.
$value =~ s/[^[:alnum:], ]+//g;
push @{$metaheaders{$page}}, '<meta name="keywords"'.
' content="'.encode_entities($value).'" />';
}
else {
push @{$metaheaders{$page}}, scrub('<meta name="'.
encode_entities($key).'" content="'.
@ -312,8 +318,9 @@ sub pagetemplate (@) {
$template->param(title_overridden => 1);
}
foreach my $field (qw{author authorurl}) {
$template->param($field => $pagestate{$page}{meta}{$field})
foreach my $field (qw{authorurl}) {
eval q{use HTML::Entities};
$template->param($field => HTML::Entities::encode_entities($pagestate{$page}{meta}{$field}))
if exists $pagestate{$page}{meta}{$field} && $template->query(name => $field);
}
@ -324,7 +331,7 @@ sub pagetemplate (@) {
}
}
foreach my $field (qw{description}) {
foreach my $field (qw{description author}) {
eval q{use HTML::Entities};
$template->param($field => HTML::Entities::encode_numeric($pagestate{$page}{meta}{$field}))
if exists $pagestate{$page}{meta}{$field} && $template->query(name => $field);

View File

@ -24,6 +24,19 @@ sub getsetup () {
safe => 1,
rebuild => 1,
},
mirrorlist_use_cgi => {
type => 'boolean',
example => 1,
description => "generate links that point to the mirrors' ikiwiki CGI",
safe => 1,
rebuild => 1,
},
}
sub checkconfig () {
if (! defined $config{mirrorlist_use_cgi}) {
$config{mirrorlist_use_cgi}=0;
}
}
sub pagetemplate (@) {
@ -46,7 +59,9 @@ sub mirrorlist ($) {
join(", ",
map {
qq{<a href="}.
$config{mirrorlist}->{$_}."/".urlto($page, "").
( $config{mirrorlist_use_cgi} ?
$config{mirrorlist}->{$_}."?do=goto&page=$page" :
$config{mirrorlist}->{$_}."/".urlto($page, "") ).
qq{">$_</a>}
} keys %{$config{mirrorlist}}
).

View File

@ -7,6 +7,7 @@ use IkiWiki;
use Monotone;
use Date::Parse qw(str2time);
use Date::Format qw(time2str);
use URI::Escape q{uri_escape_utf8};
my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate sha1sums
my $mtn_version = undef;
@ -593,7 +594,8 @@ sub rcs_recentchanges ($) {
my $diffurl=$config{diffurl};
$diffurl=~s/\[\[r1\]\]/$parent/g;
$diffurl=~s/\[\[r2\]\]/$rev/g;
$diffurl=~s/\[\[file\]\]/$file/g;
my $efile = uri_escape_utf8($file);
$diffurl=~s/\[\[file\]\]/$efile/g;
push @pages, {
page => pagename($file),
diffurl => $diffurl,

View File

@ -0,0 +1,168 @@
#!/usr/bin/perl
package IkiWiki::Plugin::notifyemail;
use warnings;
use strict;
use IkiWiki 3.00;
sub import {
hook(type => "formbuilder", id => "notifyemail", call => \&formbuilder);
hook(type => "getsetup", id => "notifyemail", call => \&getsetup);
hook(type => "changes", id => "notifyemail", call => \&notify);
}
sub getsetup () {
return
plugin => {
safe => 1,
rebuild => 0,
},
}
sub formbuilder (@) {
my %params=@_;
my $form=$params{form};
return unless $form->title eq "preferences";
my $session=$params{session};
my $username=$session->param("name");
$form->field(name => "subscriptions", size => 50,
fieldset => "preferences",
comment => "(".htmllink("", "", "ikiwiki/PageSpec", noimageinline => 1).")");
if (! $form->submitted) {
$form->field(name => "subscriptions", force => 1,
value => getsubscriptions($username));
}
elsif ($form->submitted eq "Save Preferences" && $form->validate &&
defined $form->field("subscriptions")) {
setsubscriptions($username, $form->field('subscriptions'));
}
}
sub getsubscriptions ($) {
my $user=shift;
eval q{use IkiWiki::UserInfo};
error $@ if $@;
IkiWiki::userinfo_get($user, "subscriptions");
}
sub setsubscriptions ($$) {
my $user=shift;
my $subscriptions=shift;
eval q{use IkiWiki::UserInfo};
error $@ if $@;
IkiWiki::userinfo_set($user, "subscriptions", $subscriptions);
}
# Called by other plugins to subscribe the user to a pagespec.
sub subscribe ($$) {
my $user=shift;
my $addpagespec=shift;
my $pagespec=getsubscriptions($user);
setsubscriptions($user,
length $pagespec ? $pagespec." or ".$addpagespec : $addpagespec);
}
# Called by other plugins to subscribe an email to a pagespec.
sub anonsubscribe ($$) {
my $email=shift;
my $addpagespec=shift;
if (IkiWiki::Plugin::passwordauth->can("anonuser")) {
my $user=IkiWiki::Plugin::passwordauth::anonuser($email);
if (! defined $user) {
error(gettext("Cannot subscribe your email address without logging in."));
}
subscribe($user, $addpagespec);
}
}
sub notify (@) {
my @files=@_;
return unless @files;
eval q{use Mail::Sendmail};
error $@ if $@;
eval q{use IkiWiki::UserInfo};
error $@ if $@;
eval q{use URI};
error($@) if $@;
# Daemonize, in case the mail sending takes a while.
defined(my $pid = fork) or error("Can't fork: $!");
return if $pid; # parent
chdir '/';
open STDIN, '/dev/null';
open STDOUT, '>/dev/null';
POSIX::setsid() or error("Can't start a new session: $!");
open STDERR, '>&STDOUT' or error("Can't dup stdout: $!");
# Don't need to keep a lock on the wiki as a daemon.
IkiWiki::unlockwiki();
my $userinfo=IkiWiki::userinfo_retrieve();
exit 0 unless defined $userinfo;
foreach my $user (keys %$userinfo) {
my $pagespec=$userinfo->{$user}->{"subscriptions"};
next unless defined $pagespec && length $pagespec;
my $email=$userinfo->{$user}->{email};
next unless defined $email && length $email;
foreach my $file (@files) {
my $page=pagename($file);
next unless pagespec_match($page, $pagespec);
my $content="";
my $showcontent=defined pagetype($file);
if ($showcontent) {
$content=eval { readfile(srcfile($file)) };
$showcontent=0 if $@;
}
my $url;
if (! IkiWiki::isinternal($page)) {
$url=urlto($page, undef, 1);
}
elsif (defined $pagestate{$page}{meta}{permalink}) {
# need to use permalink for an internal page
$url=URI->new_abs($pagestate{$page}{meta}{permalink}, $config{url});
}
else {
$url=$config{url}; # crummy fallback url
}
my $pagedesc=$page;
if (defined $pagestate{$page}{meta}{title} &&
length $pagestate{$page}{meta}{title}) {
$pagedesc=qq{"$pagestate{$page}{meta}{title}"};
}
my $subject=gettext("change notification:")." ".$pagedesc;
if (pagetype($file) eq '_comment') {
$subject=gettext("comment notification:")." ".$pagedesc;
}
my $prefsurl=IkiWiki::cgiurl_abs(do => 'prefs');
if (IkiWiki::Plugin::passwordauth->can("anonusertoken")) {
my $token=IkiWiki::Plugin::passwordauth::anonusertoken($userinfo->{$user});
$prefsurl=IkiWiki::cgiurl_abs(
do => 'tokenauth',
name => $user,
token => $token,
) if defined $token;
}
my $template=template("notifyemail.tmpl");
$template->param(
wikiname => $config{wikiname},
url => $url,
prefsurl => $prefsurl,
showcontent => $showcontent,
content => $content,
);
sendmail(
To => $email,
From => "$config{wikiname} <$config{adminemail}>",
Subject => $subject,
Message => $template->output,
);
}
}
exit 0; # daemon child
}
1

View File

@ -25,7 +25,7 @@ sub canedit ($$) {
my $cgi=shift;
my $session=shift;
return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i;
return "" if $config{discussion} && $page=~/(\/|^)\Q$config{discussionpage}\E$/i;
return "" if pagespec_match($page, "postcomment(*)");
return undef;
}

View File

@ -100,9 +100,10 @@ sub formbuilder_setup (@) {
IkiWiki::openiduser($session->param("name"))) {
$form->field(name => "openid_identifier", disabled => 1,
label => htmllink("", "", "ikiwiki/OpenID", noimageinline => 1),
value => $session->param("name"),
size => length($session->param("name")), force => 1,
fieldset => "login");
value => "",
size => 1, force => 1,
fieldset => "login",
comment => $session->param("name"));
$form->field(name => "email", type => "hidden");
}
}

View File

@ -0,0 +1,594 @@
#!/usr/bin/perl
# Copyright 2011 Blars Blarson
# Released under GPL version 2
package IkiWiki::Plugin::osm;
use utf8;
use strict;
use warnings;
use IkiWiki 3.0;
sub import {
add_underlay("osm");
hook(type => "getsetup", id => "osm", call => \&getsetup);
hook(type => "format", id => "osm", call => \&format);
hook(type => "preprocess", id => "osm", call => \&preprocess);
hook(type => "preprocess", id => "waypoint", call => \&process_waypoint);
hook(type => "savestate", id => "waypoint", call => \&savestate);
hook(type => "cgi", id => "osm", call => \&cgi);
}
sub getsetup () {
return
plugin => {
safe => 1,
rebuild => 1,
section => "special-purpose",
},
osm_default_zoom => {
type => "integer",
example => "15",
description => "the default zoom when you click on the map link",
safe => 1,
rebuild => 1,
},
osm_default_icon => {
type => "string",
example => "ikiwiki/images/osm.png",
description => "the icon shown on links and on the main map",
safe => 0,
rebuild => 1,
},
osm_alt => {
type => "string",
example => "",
description => "the alt tag of links, defaults to empty",
safe => 0,
rebuild => 1,
},
osm_format => {
type => "string",
example => "KML",
description => "the output format for waypoints, can be KML, GeoJSON or CSV (one or many, comma-separated)",
safe => 1,
rebuild => 1,
},
osm_tag_default_icon => {
type => "string",
example => "icon.png",
description => "the icon attached to a tag, displayed on the map for tagged pages",
safe => 0,
rebuild => 1,
},
osm_openlayers_url => {
type => "string",
example => "http://www.openlayers.org/api/OpenLayers.js",
description => "Url for the OpenLayers.js file",
safe => 0,
rebuild => 1,
},
osm_layers => {
type => "string",
example => { 'OSM', 'GoogleSatellite' },
description => "Layers to use in the map. Can be either the 'OSM' string or a type option for Google maps (GoogleNormal, GoogleSatellite, GoogleHybrid or GooglePhysical). It can also be an arbitrary URL in a syntax acceptable for OpenLayers.Layer.OSM.url parameter.",
safe => 0,
rebuild => 1,
},
osm_google_apikey => {
type => "string",
example => "",
description => "Google maps API key, Google layer not used if missing, see https://code.google.com/apis/console/ to get an API key",
safe => 1,
rebuild => 1,
},
}
sub register_rendered_files {
my $map = shift;
my $page = shift;
my $dest = shift;
if ($page eq $dest) {
my %formats = get_formats();
if ($formats{'GeoJSON'}) {
will_render($page, "$map/pois.json");
}
if ($formats{'CSV'}) {
will_render($page, "$map/pois.txt");
}
if ($formats{'KML'}) {
will_render($page, "$map/pois.kml");
}
}
}
sub preprocess {
my %params=@_;
my $page = $params{page};
my $dest = $params{destpage};
my $loc = $params{loc}; # sanitized below
my $lat = $params{lat}; # sanitized below
my $lon = $params{lon}; # sanitized below
my $href = $params{href};
my ($width, $height, $float);
$height = scrub($params{'height'} || "300px", $page, $dest); # sanitized here
$width = scrub($params{'width'} || "500px", $page, $dest); # sanitized here
$float = (defined($params{'right'}) && 'right') || (defined($params{'left'}) && 'left'); # sanitized here
my $zoom = scrub($params{'zoom'} // $config{'osm_default_zoom'} // 15, $page, $dest); # sanitized below
my $map;
$map = $params{'map'} || 'map';
$map = scrub($map, $page, $dest); # sanitized here
my $name = scrub($params{'name'} || $map, $page, $dest);
if (defined($lon) || defined($lat) || defined($loc)) {
($lon, $lat) = scrub_lonlat($loc, $lon, $lat);
}
if ($zoom !~ /^\d\d?$/ || $zoom < 2 || $zoom > 18) {
error("Bad zoom");
}
if (! defined $href || ! length $href) {
$href=IkiWiki::cgiurl(
do => "osm",
map => $map,
);
}
register_rendered_files($map, $page, $dest);
$pagestate{$page}{'osm'}{$map}{'displays'}{$name} = {
height => $height,
width => $width,
float => $float,
zoom => $zoom,
fullscreen => 0,
editable => defined($params{'editable'}),
lat => $lat,
lon => $lon,
href => $href,
google_apikey => $config{'osm_google_apikey'},
};
return "<div id=\"mapdiv-$name\"></div>";
}
sub process_waypoint {
my %params=@_;
my $loc = $params{'loc'}; # sanitized below
my $lat = $params{'lat'}; # sanitized below
my $lon = $params{'lon'}; # sanitized below
my $page = $params{'page'}; # not sanitized?
my $dest = $params{'destpage'}; # not sanitized?
my $hidden = defined($params{'hidden'}); # sanitized here
my ($p) = $page =~ /(?:^|\/)([^\/]+)\/?$/; # shorter page name
my $name = scrub($params{'name'} || $p, $page, $dest); # sanitized here
my $desc = scrub($params{'desc'} || '', $page, $dest); # sanitized here
my $zoom = scrub($params{'zoom'} // $config{'osm_default_zoom'} // 15, $page, $dest); # sanitized below
my $icon = $config{'osm_default_icon'} || "ikiwiki/images/osm.png"; # sanitized: we trust $config
my $map = scrub($params{'map'} || 'map', $page, $dest); # sanitized here
my $alt = $config{'osm_alt'} ? "alt=\"$config{'osm_alt'}\"" : ''; # sanitized: we trust $config
if ($zoom !~ /^\d\d?$/ || $zoom < 2 || $zoom > 18) {
error("Bad zoom");
}
($lon, $lat) = scrub_lonlat($loc, $lon, $lat);
if (!defined($lat) || !defined($lon)) {
error("Must specify lat and lon");
}
my $tag = $params{'tag'};
foreach my $t (keys %{$typedlinks{$page}{'tag'}}) {
if ($icon = get_tag_icon($t)) {
$tag = $t;
last;
}
$t =~ s!/$config{'tagbase'}/!!;
if ($icon = get_tag_icon($t)) {
$tag = $t;
last;
}
}
$icon = urlto($icon, $dest, 1);
$tag = '' unless $tag;
register_rendered_files($map, $page, $dest);
$pagestate{$page}{'osm'}{$map}{'waypoints'}{$name} = {
page => $page,
desc => $desc,
icon => $icon,
tag => $tag,
lat => $lat,
lon => $lon,
# How to link back to the page from the map, not to be
# confused with the URL of the map itself sent to the
# embeded map below. Note: used in generated KML etc file,
# so must be absolute.
href => urlto($page),
};
my $mapurl = IkiWiki::cgiurl(
do => "osm",
map => $map,
lat => $lat,
lon => $lon,
zoom => $zoom,
);
my $output = '';
if (defined($params{'embed'})) {
$output .= preprocess(%params,
href => $mapurl,
);
}
if (!$hidden) {
$output .= "<a href=\"$mapurl\"><img class=\"img\" src=\"$icon\" $alt /></a>";
}
return $output;
}
# get the icon from the given tag
sub get_tag_icon($) {
my $tag = shift;
# look for an icon attached to the tag
my $attached = $tag . '/' . $config{'osm_tag_default_icon'};
if (srcfile($attached)) {
return $attached;
}
else {
return undef;
}
}
sub scrub_lonlat($$$) {
my ($loc, $lon, $lat) = @_;
if ($loc) {
if ($loc =~ /^\s*(\-?\d+(?:\.\d*°?|(?:°?|\s)\s*\d+(?:\.\d*\'?|(?:\'|\s)\s*\d+(?:\.\d*)?\"?|\'?)°?)[NS]?)\s*\,?\;?\s*(\-?\d+(?:\.\d*°?|(?:°?|\s)\s*\d+(?:\.\d*\'?|(?:\'|\s)\s*\d+(?:\.\d*)?\"?|\'?)°?)[EW]?)\s*$/) {
$lat = $1;
$lon = $2;
}
else {
error("Bad loc");
}
}
if (defined($lat)) {
if ($lat =~ /^(\-?)(\d+)(?:(\.\d*)°?|(?:°|\s)\s*(\d+)(?:(\.\d*)\'?|(?:\'|\s)\s*(\d+(?:\.\d*)?\"?)|\'?)|°?)\s*([NS])?\s*$/) {
$lat = $2 + ($3//0) + ((($4//0) + (($5//0) + (($6//0)/60.)))/60.);
if (($1 eq '-') || (($7//'') eq 'S')) {
$lat = - $lat;
}
}
else {
error("Bad lat");
}
}
if (defined($lon)) {
if ($lon =~ /^(\-?)(\d+)(?:(\.\d*)°?|(?:°|\s)\s*(\d+)(?:(\.\d*)\'?|(?:\'|\s)\s*(\d+(?:\.\d*)?\"?)|\'?)|°?)\s*([EW])?$/) {
$lon = $2 + ($3//0) + ((($4//0) + (($5//0) + (($6//0)/60.)))/60.);
if (($1 eq '-') || (($7//'') eq 'W')) {
$lon = - $lon;
}
}
else {
error("Bad lon");
}
}
if ($lat < -90 || $lat > 90 || $lon < -180 || $lon > 180) {
error("Location out of range");
}
return ($lon, $lat);
}
sub savestate {
my %waypoints = ();
my %linestrings = ();
foreach my $page (keys %pagestate) {
if (exists $pagestate{$page}{'osm'}) {
foreach my $map (keys %{$pagestate{$page}{'osm'}}) {
foreach my $name (keys %{$pagestate{$page}{'osm'}{$map}{'waypoints'}}) {
debug("found waypoint $name");
$waypoints{$map}{$name} = $pagestate{$page}{'osm'}{$map}{'waypoints'}{$name};
}
}
}
}
foreach my $page (keys %pagestate) {
if (exists $pagestate{$page}{'osm'}) {
foreach my $map (keys %{$pagestate{$page}{'osm'}}) {
# examine the links on this page
foreach my $name (keys %{$pagestate{$page}{'osm'}{$map}{'waypoints'}}) {
if (exists $links{$page}) {
foreach my $otherpage (@{$links{$page}}) {
if (exists $waypoints{$map}{$otherpage}) {
push(@{$linestrings{$map}}, [
[ $waypoints{$map}{$name}{'lon'}, $waypoints{$map}{$name}{'lat'} ],
[ $waypoints{$map}{$otherpage}{'lon'}, $waypoints{$map}{$otherpage}{'lat'} ]
]);
}
}
}
}
}
# clear the state, it will be regenerated on the next parse
# the idea here is to clear up removed waypoints...
$pagestate{$page}{'osm'} = ();
}
}
my %formats = get_formats();
if ($formats{'GeoJSON'}) {
writejson(\%waypoints, \%linestrings);
}
if ($formats{'CSV'}) {
writecsvs(\%waypoints, \%linestrings);
}
if ($formats{'KML'}) {
writekml(\%waypoints, \%linestrings);
}
}
sub writejson($;$) {
my %waypoints = %{$_[0]};
my %linestrings = %{$_[1]};
eval q{use JSON};
error $@ if $@;
foreach my $map (keys %waypoints) {
my %geojson = ( "type" => "FeatureCollection", "features" => []);
foreach my $name (keys %{$waypoints{$map}}) {
my %marker = ( "type" => "Feature",
"geometry" => { "type" => "Point", "coordinates" => [ $waypoints{$map}{$name}{'lon'}, $waypoints{$map}{$name}{'lat'} ] },
"properties" => $waypoints{$map}{$name} );
push @{$geojson{'features'}}, \%marker;
}
foreach my $linestring (@{$linestrings{$map}}) {
my %json = ( "type" => "Feature",
"geometry" => { "type" => "LineString", "coordinates" => $linestring });
push @{$geojson{'features'}}, \%json;
}
writefile("pois.json", $config{destdir} . "/$map", to_json(\%geojson));
}
}
sub writekml($;$) {
my %waypoints = %{$_[0]};
my %linestrings = %{$_[1]};
eval q{use XML::Writer};
error $@ if $@;
foreach my $map (keys %waypoints) {
my $output;
my $writer = XML::Writer->new( OUTPUT => \$output,
DATA_MODE => 1, DATA_INDENT => ' ', ENCODING => 'UTF-8');
$writer->xmlDecl();
$writer->startTag("kml", "xmlns" => "http://www.opengis.net/kml/2.2");
$writer->startTag("Document");
# first pass: get the icons
my %tags_map = (); # keep track of tags seen
foreach my $name (keys %{$waypoints{$map}}) {
my %options = %{$waypoints{$map}{$name}};
if (!$tags_map{$options{tag}}) {
debug("found new style " . $options{tag});
$tags_map{$options{tag}} = ();
$writer->startTag("Style", id => $options{tag});
$writer->startTag("IconStyle");
$writer->startTag("Icon");
$writer->startTag("href");
$writer->characters($options{icon});
$writer->endTag();
$writer->endTag();
$writer->endTag();
$writer->endTag();
}
$tags_map{$options{tag}}{$name} = \%options;
}
foreach my $name (keys %{$waypoints{$map}}) {
my %options = %{$waypoints{$map}{$name}};
$writer->startTag("Placemark");
$writer->startTag("name");
$writer->characters($name);
$writer->endTag();
$writer->startTag("styleUrl");
$writer->characters('#' . $options{tag});
$writer->endTag();
#$writer->emptyTag('atom:link', href => $options{href});
# to make it easier for us as the atom:link parameter is
# hard to access from javascript
$writer->startTag('href');
$writer->characters($options{href});
$writer->endTag();
$writer->startTag("description");
$writer->characters($options{desc});
$writer->endTag();
$writer->startTag("Point");
$writer->startTag("coordinates");
$writer->characters($options{lon} . "," . $options{lat});
$writer->endTag();
$writer->endTag();
$writer->endTag();
}
my $i = 0;
foreach my $linestring (@{$linestrings{$map}}) {
$writer->startTag("Placemark");
$writer->startTag("name");
$writer->characters("linestring " . $i++);
$writer->endTag();
$writer->startTag("LineString");
$writer->startTag("coordinates");
my $str = '';
foreach my $coord (@{$linestring}) {
$str .= join(',', @{$coord}) . " \n";
}
$writer->characters($str);
$writer->endTag();
$writer->endTag();
$writer->endTag();
}
$writer->endTag();
$writer->endTag();
$writer->end();
writefile("pois.kml", $config{destdir} . "/$map", $output);
}
}
sub writecsvs($;$) {
my %waypoints = %{$_[0]};
foreach my $map (keys %waypoints) {
my $poisf = "lat\tlon\ttitle\tdescription\ticon\ticonSize\ticonOffset\n";
foreach my $name (keys %{$waypoints{$map}}) {
my %options = %{$waypoints{$map}{$name}};
my $line =
$options{'lat'} . "\t" .
$options{'lon'} . "\t" .
$name . "\t" .
$options{'desc'} . '<br /><a href="' . $options{'page'} . '">' . $name . "</a>\t" .
$options{'icon'} . "\n";
$poisf .= $line;
}
writefile("pois.txt", $config{destdir} . "/$map", $poisf);
}
}
# pipe some data through the HTML scrubber
#
# code taken from the meta.pm plugin
sub scrub($$$) {
if (IkiWiki::Plugin::htmlscrubber->can("sanitize")) {
return IkiWiki::Plugin::htmlscrubber::sanitize(
content => shift, page => shift, destpage => shift);
}
else {
return shift;
}
}
# taken from toggle.pm
sub format (@) {
my %params=@_;
if ($params{content}=~m!<div[^>]*id="mapdiv-[^"]*"[^>]*>!g) {
if (! ($params{content}=~s!</body>!include_javascript($params{page})."</body>"!em)) {
# no <body> tag, probably in preview mode
$params{content}=$params{content} . include_javascript($params{page});
}
}
return $params{content};
}
sub preferred_format() {
if (!defined($config{'osm_format'}) || !$config{'osm_format'}) {
$config{'osm_format'} = 'KML';
}
my @spl = split(/, */, $config{'osm_format'});
return shift @spl;
}
sub get_formats() {
if (!defined($config{'osm_format'}) || !$config{'osm_format'}) {
$config{'osm_format'} = 'KML';
}
map { $_ => 1 } split(/, */, $config{'osm_format'});
}
sub include_javascript ($) {
my $page=shift;
my $loader;
if (exists $pagestate{$page}{'osm'}) {
foreach my $map (keys %{$pagestate{$page}{'osm'}}) {
foreach my $name (keys %{$pagestate{$page}{'osm'}{$map}{'displays'}}) {
$loader .= map_setup_code($map, $name, %{$pagestate{$page}{'osm'}{$map}{'displays'}{$name}});
}
}
}
if ($loader) {
return embed_map_code($page) . "<script type=\"text/javascript\" charset=\"utf-8\">$loader</script>";
}
else {
return '';
}
}
sub cgi($) {
my $cgi=shift;
return unless defined $cgi->param('do') &&
$cgi->param("do") eq "osm";
IkiWiki::loadindex();
IkiWiki::decode_cgi_utf8($cgi);
my $map = $cgi->param('map');
if (!defined $map || $map !~ /^[a-z]*$/) {
error("invalid map parameter");
}
print "Content-Type: text/html\r\n";
print ("\r\n");
print "<html><body>";
print "<div id=\"mapdiv-$map\"></div>";
print embed_map_code();
print "<script type=\"text/javascript\" charset=\"utf-8\">";
print map_setup_code($map, $map,
lat => "urlParams['lat']",
lon => "urlParams['lon']",
zoom => "urlParams['zoom']",
fullscreen => 1,
editable => 1,
google_apikey => $config{'osm_google_apikey'},
);
print "</script>";
print "</body></html>";
exit 0;
}
sub embed_map_code(;$) {
my $page=shift;
my $olurl = $config{osm_openlayers_url} || "http://www.openlayers.org/api/OpenLayers.js";
my $code = '<script src="'.$olurl.'" type="text/javascript" charset="utf-8"></script>'."\n".
'<script src="'.urlto("ikiwiki/osm.js", $page).
'" type="text/javascript" charset="utf-8"></script>'."\n";
if ($config{'osm_google_apikey'}) {
$code .= '<script src="http://maps.google.com/maps?file=api&amp;v=2&amp;key='.$config{'osm_google_apikey'}.'&sensor=false" type="text/javascript" charset="utf-8"></script>';
}
return $code;
}
sub map_setup_code($;@) {
my $map=shift;
my $name=shift;
my %options=@_;
my $mapurl = $config{osm_map_url};
eval q{use JSON};
error $@ if $@;
$options{'format'} = preferred_format();
my %formats = get_formats();
if ($formats{'GeoJSON'}) {
$options{'jsonurl'} = urlto($map."/pois.json");
}
if ($formats{'CSV'}) {
$options{'csvurl'} = urlto($map."/pois.txt");
}
if ($formats{'KML'}) {
$options{'kmlurl'} = urlto($map."/pois.kml");
}
if ($mapurl) {
$options{'mapurl'} = $mapurl;
}
$options{'layers'} = $config{osm_layers};
return "mapsetup('mapdiv-$name', " . to_json(\%options) . ");";
}
1;

View File

@ -96,6 +96,72 @@ sub setpassword ($$;$) {
else {
IkiWiki::userinfo_set($user, $field, $password);
}
# Setting the password clears any passwordless login token.
if ($field ne 'passwordless') {
IkiWiki::userinfo_set($user, "passwordless", "");
}
}
# Generates a token that can be used to log the user in.
# This needs to be hard to guess. Generating a cgi session id will
# make it as hard to guess as any cgi session.
sub gentoken ($$;$) {
my $user=shift;
my $tokenfield=shift;
my $reversable=shift;
eval q{use CGI::Session};
error($@) if $@;
my $token = CGI::Session->new->id;
if (! $reversable) {
setpassword($user, $token, $tokenfield);
}
else {
IkiWiki::userinfo_set($user, $tokenfield, $token);
}
return $token;
}
# An anonymous user has no normal password, only a passwordless login
# token. Given an email address, this sets up such a user for that email,
# unless one already exists, and returns the username.
sub anonuser ($) {
my $email=shift;
# Want a username for this email that won't overlap with any other.
my $user=$email;
$user=~s/@/_/g;
my $userinfo=IkiWiki::userinfo_retrieve();
if (! exists $userinfo->{$user} || ! ref $userinfo->{$user}) {
if (IkiWiki::userinfo_setall($user, {
'email' => $email,
'regdate' => time})) {
gentoken($user, "passwordless", 1);
return $user;
}
else {
error(gettext("Error creating account."));
}
}
elsif (defined anonusertoken($userinfo->{$user})) {
return $user;
}
else {
return undef;
}
}
sub anonusertoken ($) {
my $userhash=shift;
if (exists $userhash->{passwordless} &&
length $userhash->{passwordless}) {
return $userhash->{passwordless};
}
else {
return undef;
}
}
sub formbuilder_setup (@) {
@ -277,20 +343,13 @@ sub formbuilder (@) {
if (! length $email) {
error(gettext("No email address, so cannot email password reset instructions."));
}
# Store a token that can be used once
# to log the user in. This needs to be hard
# to guess. Generating a cgi session id will
# make it as hard to guess as any cgi session.
eval q{use CGI::Session};
error($@) if $@;
my $token = CGI::Session->new->id;
setpassword($user_name, $token, "resettoken");
my $token=gentoken($user_name, "resettoken");
my $template=template("passwordmail.tmpl");
$template->param(
user_name => $user_name,
passwordurl => IkiWiki::cgiurl(
passwordurl => IkiWiki::cgiurl_abs(
'do' => "reset",
'name' => $user_name,
'token' => $token,
@ -329,7 +388,7 @@ sub formbuilder (@) {
elsif ($form->title eq "preferences") {
if ($form->submitted eq "Save Preferences" && $form->validate) {
my $user_name=$form->field('name');
if ($form->field("password") && length $form->field("password")) {
if (defined $form->field("password") && length $form->field("password")) {
setpassword($user_name, $form->field('password'));
}
}
@ -356,6 +415,22 @@ sub sessioncgi ($$) {
IkiWiki::cgi_prefs($q, $session);
exit;
}
elsif ($q->param('do') eq 'tokenauth') {
my $name=$q->param("name");
my $token=$q->param("token");
if (! defined $name || ! defined $token ||
! length $name || ! length $token) {
error(gettext("incorrect url"));
}
if (! checkpassword($name, $token, "passwordless")) {
error(gettext("access denied"));
}
$session->param("name", $name);
IkiWiki::cgi_prefs($q, $session);
exit;
}
elsif ($q->param("do") eq "register") {
# After registration, need to go somewhere, so show prefs page.
$session->param(postsignin => "do=prefs");

View File

@ -13,7 +13,7 @@ sub import {
hook(type => "needsbuild", id => "pinger", call => \&needsbuild);
hook(type => "preprocess", id => "ping", call => \&preprocess);
hook(type => "delete", id => "pinger", call => \&ping);
hook(type => "change", id => "pinger", call => \&ping);
hook(type => "rendered", id => "pinger", call => \&ping);
}
sub getsetup () {

View File

@ -23,7 +23,6 @@ use File::Copy;
use File::Spec;
use File::Temp;
use Memoize;
use UNIVERSAL;
my ($master_language_code, $master_language_name);
my %translations;
@ -48,7 +47,7 @@ sub import {
hook(type => "pagetemplate", id => "po", call => \&pagetemplate, last => 1);
hook(type => "rename", id => "po", call => \&renamepages, first => 1);
hook(type => "delete", id => "po", call => \&mydelete);
hook(type => "change", id => "po", call => \&change);
hook(type => "rendered", id => "po", call => \&rendered);
hook(type => "checkcontent", id => "po", call => \&checkcontent);
hook(type => "canremove", id => "po", call => \&canremove);
hook(type => "canrename", id => "po", call => \&canrename);
@ -428,7 +427,7 @@ sub mydelete (@) {
map { deletetranslations($_) } grep istranslatablefile($_), @deleted;
}
sub change (@) {
sub rendered (@) {
my @rendered=@_;
my $updated_po_files=0;
@ -1103,7 +1102,7 @@ sub deletetranslations ($) {
IkiWiki::rcs_remove($_);
}
else {
IkiWiki::prune("$config{srcdir}/$_");
IkiWiki::prune("$config{srcdir}/$_", $config{srcdir});
}
} @todelete;

View File

@ -23,11 +23,13 @@ sub getsetup () {
my %pagenum;
sub preprocess (@) {
my %params=(open => "yes", total => "yes", percent => "yes", @_);
my %params=(open => "yes", total => "yes", percent => "yes",
expandable => "no", @_);
my $open=IkiWiki::yesno($params{open});
my $showtotal=IkiWiki::yesno($params{total});
my $showpercent=IkiWiki::yesno($params{percent});
my $expandable=IkiWiki::yesno($params{expandable});
$pagenum{$params{page}}++;
my %choices;
@ -74,6 +76,19 @@ sub preprocess (@) {
$ret.="</form>\n";
}
}
if ($expandable && $open && exists $config{cgiurl}) {
$ret.="<p>\n";
$ret.="<form method=\"POST\" action=\"".IkiWiki::cgiurl()."\">\n";
$ret.="<input type=\"hidden\" name=\"do\" value=\"poll\" />\n";
$ret.="<input type=\"hidden\" name=\"num\" value=\"$pagenum{$params{page}}\" />\n";
$ret.="<input type=\"hidden\" name=\"page\" value=\"$params{page}\" />\n";
$ret.=gettext("Write in").": <input name=\"choice\" size=50 />\n";
$ret.="<input type=\"submit\" value=\"".gettext("vote")."\" />\n";
$ret.="</form>\n";
$ret.="</p>\n";
}
if ($showtotal) {
$ret.="<span>".gettext("Total votes:")." $total</span>\n";
}
@ -85,7 +100,7 @@ sub sessioncgi ($$) {
my $session=shift;
if (defined $cgi->param('do') && $cgi->param('do') eq "poll") {
my $choice=decode_utf8($cgi->param('choice'));
if (! defined $choice) {
if (! defined $choice || not length $choice) {
error("no choice specified");
}
my $num=$cgi->param('num');
@ -118,7 +133,14 @@ sub sessioncgi ($$) {
my $params=shift;
return "\\[[$prefix $params]]" if $escape;
if (--$num == 0) {
$params=~s/(^|\s+)(\d+)\s+"?\Q$choice\E"?(\s+|$)/$1.($2+1)." \"$choice\"".$3/se;
if ($params=~s/(^|\s+)(\d+)\s+"?\Q$choice\E"?(\s+|$)/$1.($2+1)." \"$choice\"".$3/se) {
}
elsif ($params=~/expandable=(\w+)/
& &IkiWiki::yesno($1)) {
$choice=~s/["\]\n\r]//g;
$params.=" 1 \"$choice\""
if length $choice;
}
if (defined $oldchoice) {
$params=~s/(^|\s+)(\d+)\s+"?\Q$oldchoice\E"?(\s+|$)/$1.($2-1 >=0 ? $2-1 : 0)." \"$oldchoice\"".$3/se;
}

View File

@ -165,6 +165,7 @@ sub store ($$$) {
# Limit pages to first 10, and add links to the changed pages.
my $is_excess = exists $change->{pages}[10];
delete @{$change->{pages}}[10 .. @{$change->{pages}}] if $is_excess;
my $has_diffurl=0;
$change->{pages} = [
map {
if (length $config{cgiurl}) {
@ -180,6 +181,9 @@ sub store ($$$) {
else {
$_->{link} = pagetitle($_->{page});
}
if (defined $_->{diffurl}) {
$has_diffurl=1;
}
$_;
} @{$change->{pages}}
@ -227,6 +231,8 @@ sub store ($$$) {
wikiname => $config{wikiname},
);
$template->param(has_diffurl => 1) if $has_diffurl;
$template->param(permalink => urlto($config{recentchangespage})."#change-".titlepage($change->{rev}))
if exists $config{url};

View File

@ -9,10 +9,12 @@ use HTML::Entities;
my $maxlines=200;
sub import {
add_underlay("javascript");
hook(type => "getsetup", id => "recentchangesdiff",
call => \&getsetup);
hook(type => "pagetemplate", id => "recentchangesdiff",
call => \&pagetemplate);
hook(type => "format", id => "recentchangesdiff.pm", call => \&format);
}
sub getsetup () {
@ -55,4 +57,24 @@ sub pagetemplate (@) {
}
}
sub format (@) {
my %params=@_;
if (! ($params{content}=~s!^(<body[^>]*>)!$1.include_javascript($params{page})!em)) {
# no <body> tag, probably in preview mode
$params{content}=include_javascript(undef).$params{content};
}
return $params{content};
}
# taken verbatim from toggle.pm
sub include_javascript ($) {
my $from=shift;
return '<script src="'.urlto("ikiwiki/ikiwiki.js", $from).
'" type="text/javascript" charset="utf-8"></script>'."\n".
'<script src="'.urlto("ikiwiki/toggle.js", $from).
'" type="text/javascript" charset="utf-8"></script>';
}
1

View File

@ -22,6 +22,13 @@ sub getsetup () {
},
}
sub allowed_dirs {
return grep { defined $_ } (
$config{srcdir},
$IkiWiki::Plugin::transient::transientdir,
);
}
sub check_canremove ($$$) {
my $page=shift;
my $q=shift;
@ -33,12 +40,22 @@ sub check_canremove ($$$) {
htmllink("", "", $page, noimageinline => 1)));
}
# Must exist on disk, and be a regular file.
# Must exist in either the srcdir or a suitable underlay (e.g.
# transient underlay), and be a regular file.
my $file=$pagesources{$page};
if (! -e "$config{srcdir}/$file") {
my $dir;
foreach my $srcdir (allowed_dirs()) {
if (-e "$srcdir/$file") {
$dir = $srcdir;
last;
}
}
if (! defined $dir) {
error(sprintf(gettext("%s is not in the srcdir, so it cannot be deleted"), $file));
}
elsif (-l "$config{srcdir}/$file" && ! -f _) {
elsif (-l "$dir/$file" && ! -f _) {
error(sprintf(gettext("%s is not a file"), $file));
}
@ -46,7 +63,7 @@ sub check_canremove ($$$) {
# This is sorta overkill, but better safe than sorry.
if (! defined pagetype($pagesources{$page})) {
if (IkiWiki::Plugin::attachment->can("check_canattach")) {
IkiWiki::Plugin::attachment::check_canattach($session, $page, "$config{srcdir}/$file");
IkiWiki::Plugin::attachment::check_canattach($session, $page, "$dir/$file");
}
else {
error("removal of attachments is not allowed");
@ -124,7 +141,7 @@ sub removal_confirm ($$@) {
my $f=IkiWiki::Plugin::attachment::is_held_attachment($page);
if (defined $f) {
require IkiWiki::Render;
IkiWiki::prune($f);
IkiWiki::prune($f, "$config{wikistatedir}/attachments");
}
}
}
@ -223,21 +240,34 @@ sub sessioncgi ($$) {
require IkiWiki::Render;
if ($config{rcs}) {
IkiWiki::disable_commit_hook();
foreach my $file (@files) {
IkiWiki::rcs_remove($file);
}
my $rcs_removed = 1;
foreach my $file (@files) {
foreach my $srcdir (allowed_dirs()) {
if (-e "$srcdir/$file") {
if ($srcdir eq $config{srcdir} && $config{rcs}) {
IkiWiki::rcs_remove($file);
$rcs_removed = 1;
}
else {
IkiWiki::prune("$srcdir/$file", $srcdir);
}
}
}
}
if ($config{rcs}) {
if ($rcs_removed) {
IkiWiki::rcs_commit_staged(
message => gettext("removed"),
session => $session,
);
}
IkiWiki::rcs_commit_staged(
message => gettext("removed"),
session => $session,
);
IkiWiki::enable_commit_hook();
IkiWiki::rcs_update();
}
else {
foreach my $file (@files) {
IkiWiki::prune("$config{srcdir}/$file");
}
}
IkiWiki::refresh();
IkiWiki::saveindex();

View File

@ -206,14 +206,22 @@ sub rename_start ($$$$) {
exit 0;
}
sub postrename ($;$$$) {
sub postrename ($$$;$$) {
my $cgi=shift;
my $session=shift;
my $src=shift;
my $dest=shift;
my $attachment=shift;
# Load saved form state and return to edit page.
my $postrename=CGI->new($session->param("postrename"));
# Load saved form state and return to edit page, using stored old
# cgi state. Or, if the rename was not started on the edit page,
# return to the renamed page.
my $postrename=$session->param("postrename");
if (! defined $postrename) {
IkiWiki::redirect($cgi, urlto(defined $dest ? $dest : $src));
exit;
}
my $oldcgi=CGI->new($postrename);
$session->clear("postrename");
IkiWiki::cgi_savesession($session);
@ -222,21 +230,21 @@ sub postrename ($;$$$) {
# They renamed the page they were editing. This requires
# fixups to the edit form state.
# Tweak the edit form to be editing the new page.
$postrename->param("page", $dest);
$oldcgi->param("page", $dest);
}
# Update edit form content to fix any links present
# on it.
$postrename->param("editcontent",
$oldcgi->param("editcontent",
renamepage_hook($dest, $src, $dest,
$postrename->param("editcontent")));
$oldcgi->param("editcontent")));
# Get a new edit token; old was likely invalidated.
$postrename->param("rcsinfo",
$oldcgi->param("rcsinfo",
IkiWiki::rcs_prepedit($pagesources{$dest}));
}
IkiWiki::cgi_editpage($postrename, $session);
IkiWiki::cgi_editpage($oldcgi, $session);
}
sub formbuilder (@) {
@ -291,16 +299,16 @@ sub sessioncgi ($$) {
my $session=shift;
my ($form, $buttons)=rename_form($q, $session, Encode::decode_utf8($q->param("page")));
IkiWiki::decode_form_utf8($form);
my $src=$form->field("page");
if ($form->submitted eq 'Cancel') {
postrename($session);
postrename($q, $session, $src);
}
elsif ($form->submitted eq 'Rename' && $form->validate) {
IkiWiki::checksessionexpiry($q, $session, $q->param('sid'));
# These untaints are safe because of the checks
# performed in check_canrename later.
my $src=$form->field("page");
my $srcfile=IkiWiki::possibly_foolish_untaint($pagesources{$src})
if exists $pagesources{$src};
my $dest=IkiWiki::possibly_foolish_untaint(titlepage($form->field("new_name")));
@ -324,7 +332,7 @@ sub sessioncgi ($$) {
IkiWiki::Plugin::attachment::is_held_attachment($src);
if ($held) {
rename($held, IkiWiki::Plugin::attachment::attachment_holding_location($dest));
postrename($session, $src, $dest, $q->param("attachment"))
postrename($q, $session, $src, $dest, $q->param("attachment"))
unless defined $srcfile;
}
@ -430,7 +438,7 @@ sub sessioncgi ($$) {
$renamesummary.=$template->output;
}
postrename($session, $src, $dest, $q->param("attachment"));
postrename($q, $session, $src, $dest, $q->param("attachment"));
}
else {
IkiWiki::showform($form, $buttons, $session, $q);

View File

@ -7,7 +7,7 @@ use IkiWiki 3.00;
sub import {
hook(type => "getsetup", id => "rsync", call => \&getsetup);
hook(type => "change", id => "rsync", call => \&postrefresh);
hook(type => "rendered", id => "rsync", call => \&postrefresh);
hook(type => "delete", id => "rsync", call => \&postrefresh);
}

View File

@ -73,11 +73,21 @@ sub shortcut_expand ($$@) {
add_depends($params{destpage}, "shortcuts");
my $text=join(" ", @params);
my $encoded_text=$text;
$encoded_text=~s/([^A-Za-z0-9])/sprintf("%%%02X", ord($1))/seg;
$url=~s{\%([sS])}{
$1 eq 's' ? $encoded_text : $text
$url=~s{\%([sSW])}{
if ($1 eq 's') {
my $t=$text;
$t=~s/([^A-Za-z0-9])/sprintf("%%%02X", ord($1))/seg;
$t;
}
elsif ($1 eq 'S') {
$text;
}
elsif ($1 eq 'W') {
my $t=Encode::encode_utf8($text);
$t=~s/([^A-Za-z0-9])/sprintf("%%%02X", ord($1))/seg;
$t;
}
}eg;
$text=~s/_/ /g;

View File

@ -26,7 +26,8 @@ sub import {
hook(type => "templatefile", id => "skeleton", call => \&templatefile);
hook(type => "pageactions", id => "skeleton", call => \&pageactions);
hook(type => "delete", id => "skeleton", call => \&delete);
hook(type => "change", id => "skeleton", call => \&change);
hook(type => "rendered", id => "skeleton", call => \&rendered);
hook(type => "changes", id => "skeleton", call => \&changes);
hook(type => "cgi", id => "skeleton", call => \&cgi);
hook(type => "auth", id => "skeleton", call => \&auth);
hook(type => "sessioncgi", id => "skeleton", call => \&sessioncgi);
@ -53,7 +54,6 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
section => "misc",
},
skeleton => {
type => "boolean",
@ -167,10 +167,16 @@ sub delete (@) {
debug("skeleton plugin told that files were deleted: @files");
}
sub change (@) {
sub rendered (@) {
my @files=@_;
debug("skeleton plugin told that changed files were rendered: @files");
debug("skeleton plugin told that files were rendered: @files");
}
sub changes (@) {
my @files=@_;
debug("skeleton plugin told that files were changed: @files");
}
sub cgi ($) {

View File

@ -5,6 +5,7 @@ use warnings;
use strict;
use IkiWiki;
use POSIX qw(setlocale LC_CTYPE);
use URI::Escape q{uri_escape_utf8};
sub import {
hook(type => "checkconfig", id => "svn", call => \&checkconfig);
@ -292,7 +293,8 @@ sub rcs_recentchanges ($) {
}
my $diffurl=defined $config{diffurl} ? $config{diffurl} : "";
$diffurl=~s/\[\[file\]\]/$file/g;
my $efile = uri_escape_utf8($file);
$diffurl=~s/\[\[file\]\]/$efile/g;
$diffurl=~s/\[\[r1\]\]/$rev - 1/eg;
$diffurl=~s/\[\[r2\]\]/$rev/g;

View File

@ -4,6 +4,7 @@ package IkiWiki::Plugin::tla;
use warnings;
use strict;
use IkiWiki;
use URI::Escape q{uri_escape_utf8};
sub import {
hook(type => "checkconfig", id => "tla", call => \&checkconfig);
@ -224,7 +225,8 @@ sub rcs_recentchanges ($) {
foreach my $file (@paths) {
my $diffurl=defined $config{diffurl} ? $config{diffurl} : "";
$diffurl=~s/\[\[file\]\]/$file/g;
my $efile = uri_escape_utf8($file);
$diffurl=~s/\[\[file\]\]/$efile/g;
$diffurl=~s/\[\[rev\]\]/$change/g;
push @pages, {
page => pagename($file),

View File

@ -0,0 +1,467 @@
#!/usr/bin/perl
# Copyright © 2008-2011 Joey Hess
# Copyright © 2009-2012 Simon McVittie <http://smcv.pseudorandom.co.uk/>
# Licensed under the GNU GPL, version 2, or any later version published by the
# Free Software Foundation
package IkiWiki::Plugin::trail;
use warnings;
use strict;
use IkiWiki 3.00;
sub import {
hook(type => "getsetup", id => "trail", call => \&getsetup);
hook(type => "needsbuild", id => "trail", call => \&needsbuild);
hook(type => "preprocess", id => "trailoptions", call => \&preprocess_trailoptions, scan => 1);
hook(type => "preprocess", id => "trailitem", call => \&preprocess_trailitem, scan => 1);
hook(type => "preprocess", id => "trailitems", call => \&preprocess_trailitems, scan => 1);
hook(type => "preprocess", id => "traillink", call => \&preprocess_traillink, scan => 1);
hook(type => "pagetemplate", id => "trail", call => \&pagetemplate);
hook(type => "build_affected", id => "trail", call => \&build_affected);
}
# Page state
#
# If a page $T is a trail, then it can have
#
# * $pagestate{$T}{trail}{contents}
# Reference to an array of lists each containing either:
# - [pagenames => "page1", "page2"]
# Those literal pages
# - [link => "link"]
# A link specification, pointing to the same page that [[link]]
# would select
# - [pagespec => "posts/*", "age", 0]
# A match by pagespec; the third array element is the sort order
# and the fourth is whether to reverse sorting
#
# * $pagestate{$T}{trail}{sort}
# A sorting order; if absent or undef, the trail is in the order given
# by the links that form it
#
# * $pagestate{$T}{trail}{circular}
# True if this trail is circular (i.e. going "next" from the last item is
# allowed, and takes you back to the first)
#
# * $pagestate{$T}{trail}{reverse}
# True if C<sort> is to be reversed.
#
# If a page $M is a member of a trail $T, then it has
#
# * $pagestate{$M}{trail}{item}{$T}[0]
# The page before this one in C<$T> at the last rebuild, or undef.
#
# * $pagestate{$M}{trail}{item}{$T}[1]
# The page after this one in C<$T> at the last refresh, or undef.
sub getsetup () {
return
plugin => {
safe => 1,
rebuild => undef,
},
}
# Cache of pages' old titles, so we can tell whether they changed
my %old_trail_titles;
sub needsbuild (@) {
my $needsbuild=shift;
foreach my $page (keys %pagestate) {
if (exists $pagestate{$page}{trail}) {
if (exists $pagesources{$page} &&
grep { $_ eq $pagesources{$page} } @$needsbuild) {
# Remember its title, so we can know whether
# it changed.
$old_trail_titles{$page} = title_of($page);
# Remove state, it will be re-added
# if the preprocessor directive is still
# there during the rebuild. {item} is the
# only thing that's added for items, not
# trails, and it's harmless to delete that -
# the item is being rebuilt anyway.
delete $pagestate{$page}{trail};
}
}
}
return $needsbuild;
}
my $scanned = 0;
sub preprocess_trailoptions (@) {
my %params = @_;
if (exists $params{circular}) {
$pagestate{$params{page}}{trail}{circular} =
IkiWiki::yesno($params{circular});
}
if (exists $params{sort}) {
$pagestate{$params{page}}{trail}{sort} = $params{sort};
}
if (exists $params{reverse}) {
$pagestate{$params{page}}{trail}{reverse} = $params{reverse};
}
return "";
}
sub preprocess_trailitem (@) {
my $link = shift;
shift;
# avoid collecting everything in the preprocess stage if we already
# did in the scan stage
if (defined wantarray) {
return "" if $scanned;
}
else {
$scanned = 1;
}
my %params = @_;
my $trail = $params{page};
$link = linkpage($link);
add_link($params{page}, $link, 'trail');
push @{$pagestate{$params{page}}{trail}{contents}}, [link => $link];
return "";
}
sub preprocess_trailitems (@) {
my %params = @_;
# avoid collecting everything in the preprocess stage if we already
# did in the scan stage
if (defined wantarray) {
return "" if $scanned;
}
else {
$scanned = 1;
}
# trail members from a pagespec ought to be in some sort of order,
# and path is a nice obvious default
$params{sort} = 'path' unless exists $params{sort};
$params{reverse} = 'no' unless exists $params{reverse};
if (exists $params{pages}) {
push @{$pagestate{$params{page}}{trail}{contents}},
["pagespec" => $params{pages}, $params{sort},
IkiWiki::yesno($params{reverse})];
}
if (exists $params{pagenames}) {
push @{$pagestate{$params{page}}{trail}{contents}},
[pagenames => (split ' ', $params{pagenames})];
}
return "";
}
sub preprocess_traillink (@) {
my $link = shift;
shift;
my %params = @_;
my $trail = $params{page};
$link =~ qr{
(?:
([^\|]+) # 1: link text
\| # followed by |
)? # optional
(.+) # 2: page to link to
}x;
my $linktext = $1;
$link = linkpage($2);
add_link($params{page}, $link, 'trail');
# avoid collecting everything in the preprocess stage if we already
# did in the scan stage
my $already;
if (defined wantarray) {
$already = $scanned;
}
else {
$scanned = 1;
}
push @{$pagestate{$params{page}}{trail}{contents}}, [link => $link] unless $already;
if (defined $linktext) {
$linktext = pagetitle($linktext);
}
if (exists $params{text}) {
$linktext = $params{text};
}
if (defined $linktext) {
return htmllink($trail, $params{destpage},
$link, linktext => $linktext);
}
return htmllink($trail, $params{destpage}, $link);
}
# trail => [member1, member2]
my %trail_to_members;
# member => { trail => [prev, next] }
# e.g. if %trail_to_members = (
# trail1 => ["member1", "member2"],
# trail2 => ["member0", "member1"],
# )
#
# then $member_to_trails{member1} = {
# trail1 => [undef, "member2"],
# trail2 => ["member0", undef],
# }
my %member_to_trails;
# member => 1
my %rebuild_trail_members;
sub trails_differ {
my ($old, $new) = @_;
foreach my $trail (keys %$old) {
if (! exists $new->{$trail}) {
return 1;
}
if (exists $old_trail_titles{$trail} &&
title_of($trail) ne $old_trail_titles{$trail}) {
return 1;
}
my ($old_p, $old_n) = @{$old->{$trail}};
my ($new_p, $new_n) = @{$new->{$trail}};
$old_p = "" unless defined $old_p;
$old_n = "" unless defined $old_n;
$new_p = "" unless defined $new_p;
$new_n = "" unless defined $new_n;
if ($old_p ne $new_p) {
return 1;
}
if (exists $old_trail_titles{$old_p} &&
title_of($old_p) ne $old_trail_titles{$old_p}) {
return 1;
}
if ($old_n ne $new_n) {
return 1;
}
if (exists $old_trail_titles{$old_n} &&
title_of($old_n) ne $old_trail_titles{$old_n}) {
return 1;
}
}
foreach my $trail (keys %$new) {
if (! exists $old->{$trail}) {
return 1;
}
}
return 0;
}
my $done_prerender = 0;
sub prerender {
return if $done_prerender;
%trail_to_members = ();
%member_to_trails = ();
foreach my $trail (keys %pagestate) {
next unless exists $pagestate{$trail}{trail}{contents};
my $members = [];
my @contents = @{$pagestate{$trail}{trail}{contents}};
foreach my $c (@contents) {
if ($c->[0] eq 'pagespec') {
push @$members, pagespec_match_list($trail,
$c->[1], sort => $c->[2],
reverse => $c->[3]);
}
elsif ($c->[0] eq 'pagenames') {
my @pagenames = @$c;
shift @pagenames;
foreach my $page (@pagenames) {
if (exists $pagesources{$page}) {
push @$members, $page;
}
else {
# rebuild trail if it turns up
add_depends($trail, $page, deptype("presence"));
}
}
}
elsif ($c->[0] eq 'link') {
my $best = bestlink($trail, $c->[1]);
push @$members, $best if length $best;
}
}
if (defined $pagestate{$trail}{trail}{sort}) {
# re-sort
@$members = pagespec_match_list($trail, 'internal(*)',
list => $members,
sort => $pagestate{$trail}{trail}{sort});
}
if (IkiWiki::yesno $pagestate{$trail}{trail}{reverse}) {
@$members = reverse @$members;
}
# uniquify
my %seen;
my @tmp;
foreach my $member (@$members) {
push @tmp, $member unless $seen{$member};
$seen{$member} = 1;
}
$members = [@tmp];
for (my $i = 0; $i <= $#$members; $i++) {
my $member = $members->[$i];
my $prev;
$prev = $members->[$i - 1] if $i > 0;
my $next = $members->[$i + 1];
$member_to_trails{$member}{$trail} = [$prev, $next];
}
if ((scalar @$members) > 1 && $pagestate{$trail}{trail}{circular}) {
$member_to_trails{$members->[0]}{$trail}[0] = $members->[$#$members];
$member_to_trails{$members->[$#$members]}{$trail}[1] = $members->[0];
}
$trail_to_members{$trail} = $members;
}
foreach my $member (keys %pagestate) {
if (exists $pagestate{$member}{trail}{item} &&
! exists $member_to_trails{$member}) {
$rebuild_trail_members{$member} = 1;
delete $pagestate{$member}{trail}{item};
}
}
foreach my $member (keys %member_to_trails) {
if (! exists $pagestate{$member}{trail}{item}) {
$rebuild_trail_members{$member} = 1;
}
else {
if (trails_differ($pagestate{$member}{trail}{item},
$member_to_trails{$member})) {
$rebuild_trail_members{$member} = 1;
}
}
$pagestate{$member}{trail}{item} = $member_to_trails{$member};
}
$done_prerender = 1;
}
sub build_affected {
my %affected;
# In principle we might not have done this yet, although in practice
# at least the trail itself has probably changed, and its template
# almost certainly contains TRAILS or TRAILLOOP, triggering our
# prerender as a side-effect.
prerender();
foreach my $member (keys %rebuild_trail_members) {
$affected{$member} = sprintf(gettext("building %s, its previous or next page has changed"), $member);
}
return %affected;
}
sub title_of ($) {
my $page = shift;
if (defined ($pagestate{$page}{meta}{title})) {
return $pagestate{$page}{meta}{title};
}
return pagetitle(IkiWiki::basename($page));
}
my $recursive = 0;
sub pagetemplate (@) {
my %params = @_;
my $page = $params{page};
my $template = $params{template};
return unless length $page;
if ($template->query(name => 'trails') && ! $recursive) {
prerender();
$recursive = 1;
my $inner = template("trails.tmpl", blind_cache => 1);
IkiWiki::run_hooks(pagetemplate => sub {
shift->(%params, template => $inner)
});
$template->param(trails => $inner->output);
$recursive = 0;
}
if ($template->query(name => 'trailloop')) {
prerender();
my @trails;
# sort backlinks by page name to have a consistent order
foreach my $trail (sort keys %{$member_to_trails{$page}}) {
my $members = $trail_to_members{$trail};
my ($prev, $next) = @{$member_to_trails{$page}{$trail}};
my ($prevurl, $nexturl, $prevtitle, $nexttitle);
if (defined $prev) {
$prevurl = urlto($prev, $page);
$prevtitle = title_of($prev);
}
if (defined $next) {
$nexturl = urlto($next, $page);
$nexttitle = title_of($next);
}
push @trails, {
prevpage => $prev,
prevtitle => $prevtitle,
prevurl => $prevurl,
nextpage => $next,
nexttitle => $nexttitle,
nexturl => $nexturl,
trailpage => $trail,
trailtitle => title_of($trail),
trailurl => urlto($trail, $page),
};
}
$template->param(trailloop => \@trails);
}
}
1;

View File

@ -8,7 +8,7 @@ use IkiWiki 3.00;
sub import {
hook(type => "getsetup", id => "transient", call => \&getsetup);
hook(type => "checkconfig", id => "transient", call => \&checkconfig);
hook(type => "change", id => "transient", call => \&change);
hook(type => "rendered", id => "transient", call => \&rendered);
}
sub getsetup () {
@ -33,7 +33,7 @@ sub checkconfig () {
}
}
sub change (@) {
sub rendered (@) {
foreach my $file (@_) {
# If the corresponding file exists in the transient underlay
# and isn't actually being used, we can get rid of it.
@ -43,7 +43,7 @@ sub change (@) {
my $casualty = "$transientdir/$file";
if (srcfile($file) ne $casualty && -e $casualty) {
debug(sprintf(gettext("removing transient version of %s"), $file));
IkiWiki::prune($casualty);
IkiWiki::prune($casualty, $transientdir);
}
}
}

View File

@ -262,12 +262,13 @@ sub render ($$) {
}
}
sub prune ($) {
sub prune ($;$) {
my $file=shift;
my $up_to=shift;
unlink($file);
my $dir=dirname($file);
while (rmdir($dir)) {
while ((! defined $up_to || $dir =~ m{^\Q$up_to\E\/}) && rmdir($dir)) {
$dir=dirname($dir);
}
}
@ -447,7 +448,7 @@ sub remove_del (@) {
}
foreach my $old (@{$oldrenderedfiles{$page}}) {
prune($config{destdir}."/".$old);
prune($config{destdir}."/".$old, $config{destdir});
}
foreach my $source (keys %destsources) {
@ -537,7 +538,7 @@ sub remove_unrendered () {
foreach my $file (@{$oldrenderedfiles{$page}}) {
if (! grep { $_ eq $file } @{$renderedfiles{$page}}) {
debug(sprintf(gettext("removing %s, no longer built by %s"), $file, $page));
prune($config{destdir}."/".$file);
prune($config{destdir}."/".$file, $config{destdir});
}
}
}
@ -800,6 +801,14 @@ sub refresh () {
derender_internal($file);
}
run_hooks(build_affected => sub {
my %affected = shift->();
while (my ($page, $message) = each %affected) {
next unless exists $pagesources{$page};
render($pagesources{$page}, $message);
}
});
my ($backlinkchanged, $linkchangers)=calculate_changed_links($changed,
$del, $oldlink_targets);
@ -821,8 +830,13 @@ sub refresh () {
run_hooks(delete => sub { shift->(@$del, @$internal_del) });
}
if (%rendered) {
run_hooks(change => sub { shift->(keys %rendered) });
run_hooks(rendered => sub { shift->(keys %rendered) });
run_hooks(change => sub { shift->(keys %rendered) }); # back-compat
}
my %all_changed = map { $_ => 1 }
@$new, @$changed, @$del,
@$internal_new, @$internal_changed, @$internal_del;
run_hooks(changes => sub { shift->(keys %all_changed) });
}
sub clean_rendered {
@ -831,7 +845,7 @@ sub clean_rendered {
remove_unrendered();
foreach my $page (keys %oldrenderedfiles) {
foreach my $file (@{$oldrenderedfiles{$page}}) {
prune($config{destdir}."/".$file);
prune($config{destdir}."/".$file, $config{destdir});
}
}
}

View File

@ -93,12 +93,53 @@ EOF
# memory, a pile up of processes could cause thrashing
# otherwise. The fd of the lock is stored in
# IKIWIKI_CGILOCK_FD so unlockwiki can close it.
$pre_exec=<<"EOF";
#
# A lot of cgi wrapper processes can potentially build
# up and clog an otherwise unloaded web server. To
# partially avoid this, when a GET comes in and the lock
# is already held, rather than blocking a html page is
# constructed that retries. This is enabled by setting
# cgi_overload_delay.
if (defined $config{cgi_overload_delay} &&
$config{cgi_overload_delay} =~/^[0-9]+/) {
my $i=int($config{cgi_overload_delay});
$pre_exec.="#define CGI_OVERLOAD_DELAY $i\n"
if $i > 0;
my $msg=gettext("Please wait");
$msg=~s/"/\\"/g;
$pre_exec.='#define CGI_PLEASE_WAIT_TITLE "'.$msg."\"\n";
if (defined $config{cgi_overload_message} && length $config{cgi_overload_message}) {
$msg=$config{cgi_overload_message};
$msg=~s/"/\\"/g;
}
$pre_exec.='#define CGI_PLEASE_WAIT_BODY "'.$msg."\"\n";
}
$pre_exec.=<<"EOF";
lockfd=open("$config{wikistatedir}/cgilock", O_CREAT | O_RDWR, 0666);
if (lockfd != -1 && lockf(lockfd, F_LOCK, 0) == 0) {
char *fd_s=malloc(8);
sprintf(fd_s, "%i", lockfd);
setenv("IKIWIKI_CGILOCK_FD", fd_s, 1);
if (lockfd != -1) {
#ifdef CGI_OVERLOAD_DELAY
char *request_method = getenv("REQUEST_METHOD");
if (request_method && strcmp(request_method, "GET") == 0) {
if (lockf(lockfd, F_TLOCK, 0) == 0) {
set_cgilock_fd(lockfd);
}
else {
printf("Content-Type: text/html\\nRefresh: %i; URL=%s\\n\\n<html><head><title>%s</title><head><body><p>%s</p></body></html>",
CGI_OVERLOAD_DELAY,
getenv("REQUEST_URI"),
CGI_PLEASE_WAIT_TITLE,
CGI_PLEASE_WAIT_BODY);
exit(0);
}
}
else if (lockf(lockfd, F_LOCK, 0) == 0) {
set_cgilock_fd(lockfd);
}
#else
if (lockf(lockfd, F_LOCK, 0) == 0) {
set_cgilock_fd(lockfd);
}
#endif
}
EOF
}
@ -140,6 +181,12 @@ void addenv(char *var, char *val) {
newenviron[i++]=s;
}
set_cgilock_fd (int lockfd) {
char *fd_s=malloc(8);
sprintf(fd_s, "%i", lockfd);
setenv("IKIWIKI_CGILOCK_FD", fd_s, 1);
}
int main (int argc, char **argv) {
int lockfd=-1;
char *s;
@ -214,7 +261,7 @@ $set_background_command
EOF
my @cc=exists $ENV{CC} ? possibly_foolish_untaint($ENV{CC}) : 'cc';
push @cc, possibly_foolish_untaint($ENV{CFLAGS}) if exists $ENV{CFLAGS};
push @cc, split(' ', possibly_foolish_untaint($ENV{CFLAGS})) if exists $ENV{CFLAGS};
if (system(@cc, "$wrapper.c", "-o", "$wrapper.new") != 0) {
#translators: The parameter is a C filename.
error(sprintf(gettext("failed to compile %s"), "$wrapper.c"));

View File

@ -75,7 +75,7 @@ underlay_install:
install -d $(DESTDIR)$(PREFIX)/share/ikiwiki
for dir in `cd underlays && $(FIND) . -follow -type d`; do \
install -d $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir; \
for file in `$(FIND) underlays/$$dir -follow -maxdepth 1 -type f -not -name \\*.full.js -not -name \\*.full.css`; do \
for file in `$(FIND) underlays/$$dir -follow -maxdepth 1 -type f ! -name \\*.full.js ! -name \\*.full.css`; do \
cp -pRL $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir 2>/dev/null || \
install -m 644 $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir; \
done; \

View File

@ -36,7 +36,7 @@ IkiWiki::Setup::Automator->import(
cgiurl => "http://$domain/~$ENV{USER}/$wikiname_short/ikiwiki.cgi",
cgi_wrapper => "$ENV{HOME}/public_html/$wikiname_short/ikiwiki.cgi",
adminemail => "$ENV{USER}\@$domain",
add_plugins => [qw{goodstuff websetup comments blogspam calendar sidebar}],
add_plugins => [qw{goodstuff websetup comments blogspam calendar sidebar trail}],
disable_plugins => [qw{}],
libdir => "$ENV{HOME}/.ikiwiki",
rss => 1,

135
debian/changelog vendored
View File

@ -1,3 +1,138 @@
ikiwiki (3.20121213) UNRELEASED; urgency=low
* htmlscrubber: Allow the bitcoin URI scheme.
* htmlscrubber: Allow the URI schemes of major VCS's.
* aggregate: When run with --aggregate, if an aggregation is already
running, don't go on and --refresh.
* trail: Avoid excess dependencies between pages in the trail
and the page defining the trail. Thanks, smcv.
* opendiscussion: Don't allow editing discussion pages if discussion pages
are disabled. (smcv)
* poll: Add expandable option to allow users to easily add new choices to
a poll.
* trail: Avoid massive slowdown caused by pagetemplate hook when displaying
dynamic cgi pages, which cannot use trail anyway.
-- Joey Hess <joeyh@debian.org> Sat, 22 Dec 2012 16:15:24 -0400
ikiwiki (3.20121212) unstable; urgency=low
* filecheck: Fix bug that prevented File::MimeInfo::Magic from ever
being used.
* openid: Display openid in Preferences page as a comment, so it can be
selected in all browsers.
-- Joey Hess <joeyh@debian.org> Tue, 11 Dec 2012 12:12:12 -0400
ikiwiki (3.20121017) unstable; urgency=low
* recentchangesdiff: fix further breakage to the template from 3.20120725
-- Joey Hess <joeyh@debian.org> Tue, 16 Oct 2012 20:49:27 -0400
ikiwiki (3.20121016) unstable; urgency=low
* monochrome: New theme, contributed by Jon Dowland.
* rst: Ported to python 3, while still also being valid python 2.
Thanks, W. Trevor King
* Try to avoid a situation in which so many ikiwiki cgi wrapper programs
are running, all waiting on some long-running thing like a site rebuild,
that it prevents the web server from doing anything else. The current
approach only avoids this problem for GET requests; if multiple cgi's
run GETs on a site at the same time, one will display a "please wait"
page for a configurable number of seconds, which then redirects to retry.
To enable this protection, set cgi_overload_delay to the number of
seconds to wait. This is not enabled by default.
* Add back a 1em margin between archivepage divs.
* recentchangesdiff: Correct broken template that resulted in duplicate
diff icons being displayed, and bloated the recentchanges page with
inline diffs when the configuration should have not allowed them.
-- Joey Hess <joeyh@debian.org> Tue, 16 Oct 2012 15:14:19 -0400
ikiwiki (3.20120725) unstable; urgency=low
* recentchangesdiff: When diffurl is not set, provide inline diffs
in the recentchanges page, with visibility toggleable via javascript.
Thanks, Antoine Beaupré
* Split CFLAGS into words when building wrapper. Closes: #682237
* osm: Avoid calling urlto before generated files are registered.
Thanks, Philippe Gauthier and Antoine Beaupré
* osm: Add osm_openlayers_url configuration setting.
Thanks, Genevieve
* osm: osm_layers can be used to configured the layers displayed on the map.
Thanks, Antoine Beaupré
* comments: Remove ipv6 address specific code.
-- Joey Hess <joeyh@debian.org> Sat, 25 Aug 2012 10:58:42 -0400
ikiwiki (3.20120629) unstable; urgency=low
* mirrorlist: Add mirrorlist_use_cgi setting that avoids usedirs or
other config differences by linking to the mirror's CGI. (intrigeri)
-- Joey Hess <joeyh@debian.org> Fri, 29 Jun 2012 10:16:08 -0400
ikiwiki (3.20120516) unstable; urgency=high
* meta: Security fix; add missing sanitization of author and authorurl.
CVE-2012-0220 Thanks, Raúl Benencia
-- Joey Hess <joeyh@debian.org> Wed, 16 May 2012 19:51:27 -0400
ikiwiki (3.20120419) unstable; urgency=low
* Remove dead link from plugins/teximg. Closes: #664885
* inline: When the pagenames list includes pages that do not exist, skip
them.
* meta: Export author information in html <meta> tag. Closes: #664779
Thanks, Martin Michlmayr
* notifyemail: New plugin, sends email notifications about new and
changed pages, and allows subscribing to comments.
* Added a "changes" hook. Renamed the "change" hook to "rendered", but
the old hook name is called for now for back-compat.
* meta: Support keywords header. Closes: #664780
Thanks, Martin Michlmayr
* passwordauth: Fix url in password recovery email to be absolute.
* httpauth: When it's the only auth method, avoid a pointless and
confusing signin form, and go right to the httpauthurl.
* rename: Allow rename to be started not from the edit page; return to
the renamed page in this case.
* remove: Support removing of pages in the transient underlay. (smcv)
* inline, trail: The pagenames parameter is now a list of absolute
pagenames, not relative wikilink type names. This is necessary to fix
a bug, and makes pagenames more consistent with the pagespec used
in the pages parameter. (smcv)
* link: Fix renaming wikilinks that contain embedded urls.
* graphviz: Handle self-links.
* trail: Improve CSS, also display trail links at bottom of page,
and a bug fix. (smcv)
-- Joey Hess <joeyh@debian.org> Thu, 19 Apr 2012 15:32:07 -0400
ikiwiki (3.20120319) unstable; urgency=low
* osm: New plugin to embed an OpenStreetMap into a wiki page.
Supports waypoints, tags, and can even draw paths matching
wikilinks between pages containing waypoints.
Thanks to Blars Blarson and Antoine Beaupré, as well as the worldwide
OpenStreetMap community for this utter awesomeness.
* trail: New plugin to add navigation trails through pages via Next and
Previous links. Trails can easily be added to existing inlines by setting
trail=yes in the inline.
Thanks to Simon McVittie for his persistance developing this feature.
* Fix a snail mail address. Closes: #659158
* openid-jquery.js: Update URL of Wordpress favicon. Closes: #660549
* Drop the version attribute on the generator tag in Atom feeds
to make builds more reproducible. Closes: #661569 (Paul Wise)
* shortcut: Support Wikipedia's form of url-encoding for unicode
characters, which involves mojibake. Closes: #661198
* Add a few missing jquery UI icons to attachment upload widget underlay.
* URI escape filename when generating the diffurl.
* Add build-affected hook. Used by trail.
-- Joey Hess <joeyh@debian.org> Mon, 19 Mar 2012 14:24:43 -0400
ikiwiki (3.20120202) unstable; urgency=low
* mdwn: Added nodiscount setting, which can be used to avoid using the

2
debian/compat vendored
View File

@ -1 +1 @@
7
9

6
debian/control vendored
View File

@ -1,7 +1,7 @@
Source: ikiwiki
Section: web
Priority: optional
Build-Depends: perl, debhelper (>= 7.0.50)
Build-Depends: perl, debhelper (>= 9)
Build-Depends-Indep: dpkg-dev (>= 1.9.0), libxml-simple-perl,
libtext-markdown-discount-perl,
libtimedate-perl, libhtml-template-perl,
@ -10,7 +10,7 @@ Build-Depends-Indep: dpkg-dev (>= 1.9.0), libxml-simple-perl,
libfile-chdir-perl, libyaml-libyaml-perl, python-support
Maintainer: Joey Hess <joeyh@debian.org>
Uploaders: Josh Triplett <josh@freedesktop.org>
Standards-Version: 3.9.2
Standards-Version: 3.9.3
Homepage: http://ikiwiki.info/
Vcs-Git: git://git.ikiwiki.info/
@ -38,7 +38,7 @@ Suggests: viewvc | gitweb | viewcvs, libsearch-xapian-perl,
libsparkline-php, texlive, dvipng, libtext-wikicreole-perl,
libsort-naturally-perl, libtext-textile-perl, libhighlight-perl,
po4a (>= 0.35-1), gettext, libnet-inet6glue-perl,
libtext-multimarkdown-perl
libtext-multimarkdown-perl, libxml-writer-perl
Conflicts: ikiwiki-plugin-table
Replaces: ikiwiki-plugin-table
Provides: ikiwiki-plugin-table

14
debian/copyright vendored
View File

@ -1,4 +1,4 @@
Format: http://dep.debian.net/deps/dep5/
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Source: native package
Files: *
@ -153,6 +153,14 @@ Files: IkiWiki/Plugin/rsync.pm
Copyright: © 2009 Amitai Schlair
License: BSD-2-clause
Files: IkiWiki/Plugin/osm.pm
Copyright: © 2011 Blars Blarson, Antoine Beaupré
License: GPL-2
Files: IkiWiki/Plugin/trail.pm
Copyright: 2009-2012 Simon McVittie <http://smcv.pseudorandom.co.uk/>
License: GPL-2+
Files: doc/logo/*
Copyright: © 2006 Recai Oktaş <roktas@debian.org>
License: GPL-2+
@ -240,6 +248,10 @@ Files: underlays/themes/goldtype/*
Copyright: © Lars Wirzenius
License: GPL-2+
Files: underlays/themes/monochrome/*
Copyright: © 2012 Jon Dowland
License: GPL-2+
License: BSD-2-clause
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions

View File

@ -1,3 +1,11 @@
ikiwiki works with anchors in various situations.
You can insert anchors directly in the body of a page and it will be used on the resulting HTML, for example:
<a name="anchor"></a>
... will make the link [[anchor#anchor]] work..
<a name="anchor"></a>
This page accumulates links to the concept of anchors.

View File

@ -30,3 +30,5 @@ Bulleted list
* item
[[ikiwiki/WikiLink]]
[[!calendar type="month" pages="blog/*"]]

View File

@ -20,6 +20,6 @@ Long-lived branches in the main git repository:
* `ignore` gets various branches merged to it that [[Joey]] wishes to ignore
when looking at everyone's unmerged changes.
* `pristine-tar` contains deltas that
[pristine-tar](http://kitenet.net/~joey/code/pristine-tar)
[pristine-tar](http://joeyh.name/code/pristine-tar)
can use to recreate released tarballs of ikiwiki
* `setup` contains the ikiwiki.setup file for this site

View File

@ -0,0 +1,28 @@
If you wish to install ikiwiki in your home directory (for example because you don't have root access), you need to set environment variables (such as PATH and PERL5LIB) to point to these directories that contain your personal copy of IkiWiki.
The CGI wrapper remembers PATH, but not the environment variable PERL5LIB. Consequently, it will look for plugins and so on in the usual system directories, not in your personal copy. This is particularly insidious if you have a system copy of a different version installed, as your CGI wrapper may then load in code from this version.
I think the CGI wrapper should remember PERL5LIB too.
-- Martin
Thank's a lot for pointing me to this location in the code. I was looking it for some time.
This brutal patch implement your solution as a temporary fix.
*** Wrapper.pm.old 2012-08-25 16:41:41.000000000 +0200
--- Wrapper.pm 2012-10-01 17:33:17.582956524 +0200
***************
*** 149,154 ****
--- 149,155 ----
$envsave
newenviron[i++]="HOME=$ENV{HOME}";
newenviron[i++]="PATH=$ENV{PATH}";
+ newenviron[i++]="PERL5LIB=$ENV{PERL5LIB}";
newenviron[i++]="WRAPPED_OPTIONS=$configstring";
#ifdef __TINYC__
As I am not sure that remembering `PERL5LIB` is a good idea, I think that a prettier solution will be to add a config variable (let's say `cgi_wrapper_perllib`) which, if fixed, contains the `PERL5LIB` value to include in the wrapper, or another (let's say `cgi_wrapper_remember_libdir`), which, if fixed, remember the current `PERL5LIB`.
-- Bruno

View File

@ -0,0 +1,11 @@
Hi folks,
This is a fairly fresh wiki. I recently noticed the Links: section the the bottom looked like this:
Links: index recentchanges/change 0b2f03d3d21a3bb21f6de75d8711c73df227e17c recentchanges/change 1c5b830b15c4f2f0cc97ecc0adfd60a1f1578918 recentchanges/change 20b20b91b90b28cdf2563eb959a733c6dfebea7a recentchanges/change 3377cedd66380ed416f59076d69f546bf12ae1e4 recentchanges/change 4c53d778870ea368931e7df2a40ea67d00130202 recentchanges/change 7a9f3c441a9ec7e189c9df322851afa21fd8b00c recentchanges/change 7dcaea1be47308ee27a18f893ff232a8370e348a recentchanges/change 963245d4e127159e12da436dea30941ec371c6be recentchanges/change cd489ff4abde8dd611f7e42596b93953b38b9e1c ...
All of those "recentchanges/ change xxxxxxx" links are clickable, but all yield 404 when clicked.
When I disable the CamelCase plugin and rebuild the wiki, all the Links other than index disappear, as they should. Re-enable CamelCase, and they're back.
This is a very simple wiki. Just fresh, only one page other than index (this one), and nothing at all fancy/weird about it.

View File

@ -0,0 +1,5 @@
If you look at [[todo/org mode]], the link to the Discussion page is not there (has a question mark), as if it didn't exist. But--through the search--I discovered that the Discussion page does exist actually: [[todo/org mode/Discussion]].
So, there is a bug that prevents a link to the existing Discussion page from appearing in the correct way on the corresponding main page. --Ivan Z.
Perhaps, this has something to do with the same piece of code/logic (concerning case-sensitivity) as the fixed [[bugs/unwanted discussion links on discussion pages]]? --Ivan Z.

View File

@ -0,0 +1,3 @@
If I use the linkmap directive twice on a single page, I get the same image appearing in both locations, even though the parameters for the two directives may have been different.
-- Martin

View File

@ -0,0 +1,11 @@
Say you are commenting on this report. The Navbar on top will look like
[ikiwiki](http://ikiwiki.info/)/ [bugs](http://ikiwiki.info/bugs/)/ commenting on Navbar does not link to page being commented on while commenting
while either of those two options would be better:
[ikiwiki](http://ikiwiki.info/)/ [bugs](http://ikiwiki.info/bugs/)/ commenting on [Navbar does not link to page being commented on while commenting](http://ikiwiki.info/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting/)
[ikiwiki](http://ikiwiki.info/)/ [bugs](http://ikiwiki.info/bugs/)/ [Navbar does not link to page being commented on while commenting](http://ikiwiki.info/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting/) / New comment
-- RichiH

View File

@ -1,13 +0,0 @@
I'm writing [pykipandoc plugin](https://github.com/temmen/pykipandoc/blob/master/pykipandoc), that work at least as pandoc-iki.
It works in compile mode, editing pages in web mode however results in
pandoc: : hGetContents: invalid argument (Invalid or incomplete multibyte or wide character)
I think that is because HTTP POST request building editpage doesn't correctly manage utf-8 contents: see strange chars in this form-data name="editcontent"?
This principle has guided pandoc’s decisions in finding syntax for tables, footnotes, and other extensions.
Please, any advice can be sent to [GitHub pykipandoc](https://github.com/temmen/pykipandoc) (some other info there on the [README](https://github.com/temmen/pykipandoc/blob/master/README.md)) and to [temmenel(at)gmail(dot)com](mailto:temmenel@gmail.com).
¡Thank you all!

View File

@ -0,0 +1,15 @@
[[!tag bugs wishlist]]
I accidentally made a typo spelling "surprises" and changed my URL from
<http://natalian.org/archives/2012/12/04/Singapore_banking_suprises/>
to
<http://natalian.org/archives/2012/12/04/Singapore_banking_suprises/>
Using the meta redir. However the meta redir now appears in the index of <http://natalian.org/>
Any ideas how to handle this situation?
> Well, you can adjust the inline's pagespec to exclude it, or even tag it
> with a tag that the pagespec is adjusted to exclude. --[[Joey]]

View File

@ -0,0 +1,47 @@
Saving a wiki page in ikwiki or
<tt>ikiwiki --setup wiki.setup --rebuild</tt> takes a **dozen minutes** on a tiny tiny wiki (10 user-added pages)!
I profiled ikiwiki with [[!cpan Devel::SmallProf]] : see [[users/mathdesc]] for details.
And I came to the conclusion that [[plugins/filecheck]] on attachment was the only cause.
It always go the fallback code using time-consuming file even there it's look like it's
not successful.
<pre>
# Get the mime type.
#
# First, try File::Mimeinfo. This is fast, but doesn't recognise
# all files.
eval q{use File::MimeInfo::Magic};
my $mimeinfo_ok=! $@;
my $mimetype;
if ($mimeinfo_ok) {
my $mimetype=File::MimeInfo::Magic::magic($file);
}
# Fall back to using file, which has a more complete
# magic database.
if (! defined $mimetype) {
open(my $file_h, "-|", "file", "-bi", $file);
$mimetype=<$file_h>;
chomp $mimetype;
close $file_h;
}
if (! defined $mimetype || $mimetype !~s /;.*//) {
# Fall back to default value.
$mimetype=File::MimeInfo::Magic::default($file)
if $mimeinfo_ok;
if (! defined $mimetype) {
$mimetype="unknown";
}
}
</pre>
I found on [[plugins/filecheck/discussion/]] what [[users/DavidBremner/]] described as :
> no way to detect text/plain using File::MimeInfo::Magic::magic()
But I can't figure out if my issue is boarder and includes this or not..
Any ideas , solve :) more that welcome.
> [[done]], as isbear noted in [[discussion]], there was a bug that
> prevented File::MimeInfo::Magic from ever being used. --[[Joey]]

View File

@ -0,0 +1,141 @@
##Foreword :
Disabling of filecheck is not actually possible because btw it cause the attachment.pm to malfunction and
any of pagespec that could contain a *mimetype* condition.
attachment.pm imports "statically" filecheck so actually disabling it should be *interdicted* .
<pre>
sub import {
add_underlay("attachment");
add_underlay("javascript");
add_underlay("jquery");
hook(type => "getsetup", id => "attachment", call => \&getsetup);
hook(type => "checkconfig", id => "attachment", call => \&checkconfig);
hook(type => "formbuilder_setup", id => "attachment", call => \&formbuilder_setup);
hook(type => "formbuilder", id => "attachment", call => \&formbuilder, last => 1);
IkiWiki::loadplugin("filecheck");
}
</pre>
----
## How bad is it ?
So I tried on three pages to inline <tt>!mimetype(image/*)</tt> while I allowed attachment of <tt>mimetype(image/*)</tt>
My profiling tests in the bug report shows that most of the time is spend in the "Fallback using file" block code,
I tried to comment that block and see how it'll perform. Obviously this is much much faster ... but is the mimetype
discovered using only *File::MimeInfo* ?
Dumping some strings before return to STDERR, rebuilding . This is just a [[!toggle id="code-test" text="dumpdebug adding"]]
[[!toggleable id="code-test" text="""
<pre>
sub match_mimetype ($$;@) {
my $page=shift;
my $wanted=shift;
my %params=@_;
my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page});
if (! defined $file) {
return IkiWiki::ErrorReason->new("file does not exist");
}
# Get the mime type.
#
# First, try File::Mimeinfo. This is fast, but doesn't recognise
# all files.
eval q{use File::MimeInfo::Magic};
my $mimeinfo_ok=! $@;
my $mimetype;
print STDERR " --- match_mimetype (".$file.")\n";
if ($mimeinfo_ok) {
my $mimetype=File::MimeInfo::Magic::magic($file);
}
# Fall back to using file, which has a more complete
# magic database.
#if (! defined $mimetype) {
# open(my $file_h, "-|", "file", "-bi", $file);
# $mimetype=<$file_h>;
# chomp $mimetype;
# close $file_h;
#}
if (! defined $mimetype || $mimetype !~s /;.*//) {
# Fall back to default value.
$mimetype=File::MimeInfo::Magic::default($file)
if $mimeinfo_ok;
if (! defined $mimetype) {
$mimetype="unknown";
}
}
my $regexp=IkiWiki::glob2re($wanted);
if ($mimetype!~$regexp) {
print STDERR " xxx MIME unknown ($mimetype - $wanted - $regexp ) \n";
return IkiWiki::FailReason->new("file MIME type is $mimetype, not $wanted");
}
else {
print STDERR " vvv MIME found\n";
return IkiWiki::SuccessReason->new("file MIME type is $mimetype");
}
}
</pre>
"""]]
The results dump to stderr (or a file called... 'say *mime*) looks like this :
<pre>
--- match_mimetype (/usr/share/ikiwiki/attachment/ikiwiki/jquery.fileupload-ui.js)
xxx MIME unknown (text/plain - image/* - (?i-xsm:^image\/.*$) )
--- match_mimetype (/usr/share/ikiwiki/locale/fr/directives/ikiwiki/directive/fortune.mdwn)
xxx MIME unknown (text/plain - image/* - (?i-xsm:^image\/.*$) )
--- match_mimetype (/usr/share/ikiwiki/locale/fr/basewiki/shortcuts.mdwn)
xxx MIME unknown (text/plain - image/* - (?i-xsm:^image\/.*$)
--- match_mimetype (/usr/share/ikiwiki/smiley/smileys/alert.png)
xxx MIME unknown (application/octet-stream - image/* - (?i-xsm:^image\/.*$) )
--- match_mimetype (/usr/share/ikiwiki/attachment/ikiwiki/images/ui-bg_flat_75_ffffff_40x100.png)
xxx MIME unknown (application/octet-stream - image/* - (?i-xsm:^image\/.*$)
</pre>
<tt>---</tt> prepend signals the file on analysis<br/>
<tt>xxx</tt> prepend signals a returns failure : mime is unknown, the match is a failure<br/>
<tt>vvv</tt> prepend signals a return success.<br/>
This is nasty-scary results ! Something missed me or this mime-filecheck is plain nuts ?
*Question 1* : How many files have been analysed : **3055** (yet on a tiny tiny wiki)
<pre>grep "^ --- " mime | wc -l
3055
</pre>
*Question 2* : How many time it fails : *all the time*
<pre>
grep "^ xxx " mime | wc -l
3055
</pre>
*Question 1bis* : Doh btw , how many files have been re-analysed ? ** 2835 ** OMG !!
<pre>grep "^ --- " mime | sort -u | wc -l
220
</pre>
## Conclusion
- Only the system command *file -bi* works. While it is **should** be easy on the cpu , it's also hard on the I/O -> VM :(
- Something nasty with the mime implementation and/or my system configuration -> Hints ? :D
- Need to cache during the rebuild : a same page needs not being rechecked for its mime while it's locked !
--mathdesc
> > if ($mimeinfo_ok) {
> > my $mimetype=File::MimeInfo::Magic::magic($file);
> > }
>
> That seems strange to me, `my` restricts scope of $mimetype to enclosing if block, thus, assigned value will be dropped - I think, it is the problem.
> Try removing that stray `my`.
>
> --isbear

View File

@ -0,0 +1,18 @@
Observed behavior:
When I create a link like \[[cmd_test]] , the link appears as 'cmd test'.
Expected behavior:
I would like to be able to create links with underscores. I realize this is a feature, and I searched for ways to escape the underscore so it would appear, but I didn't find any.
> as a workaround, you can use \[[cmd\_\_95\_\_test|cmd_test]] (which will link to a page named "cmd test" at the url location "cmd\_test") or \[[cmd\_\_95\_\_test]] (which will link to a page named "cmd\_test" at the url location "cmd\_\_95\_\_test"). i would, from my limited understanding of ikiwiki internals, consider the bug valid, and suggest that
>
> * explicit link text be not subject to de-escaping (why should it; this would be the short term solution)
> * escaped page names never be used in user visible parts of ikiwiki (in my opinion, a user should not need to know about those internals, especially as they are configuration dependant (wiki_file_regexp))
>
> note that in [[ikiwiki/wikilink]], that very behavior is documented; it says that "\[[foo\_bar|Sandbox]]" will show as "foo bar". (although you can't tell that apart from "foo\_bar" easily because it's a hyperlink).
>
> i assume that this behavior stems from times when wikilinks and [[ikiwiki/directive]]s were not distinguished by \[[ vs \[[! but by the use of whitespace in directives, so whitespace had to be avoided in wikilinks.
>
> --[[chrysn]]

View File

@ -22,3 +22,13 @@ Of course, the next time I rerun ikiwiki --setup, it will overwrite my wrapper-w
I made a logfile of all the args, env, and stdin/stdout to/from my wrapper. If you're interested, I'll email it to you. I wasn't able to attach it here.
-- [[terry|tjgolubi]]
I confirm that the supplied w3mmode setup appears not to work. When I try to edit a page and save it, w3m tries to access an URL beginning http://localhost/ . The HTML source of the edit page contains a BASE URL beginning with http://localhost. It should not. Maybe this is a result of changes a while back, where use of absolute URLs was enforced in various places in Ikiwiki.
-- Martin
The problem is that IkiWiki::CGI::cgitemplate() and IkiWiki::CGI::redirect() use Perl's CGI::url() to determine the absolute URL of the CGI script when it is being executed. url() generates an URL beginning http://localhost. As w3m's serverless CGI mode is rather unusual, presumably there's no provision for the URL of a CGI script beginning file:///, even if there's a way to specify that.
A quick workaround might be to force the use of $config{url} instead of $cgi->url as a base for URLs when w3mmode is set.
-- Martin

View File

@ -0,0 +1,11 @@
import os
os.environment['LANG'] = 'it_IT.utf-8'
Suona plausibile?
[GitHub pykipandoc](https://github.com/temmen/pykipandoc) -- Temmen
> The place to put contrib plugins is in [[plugins/contrib]].
>
> Closing this bug report as whatever it is that was fixed is apparently not an ikiwiki
> bug.. I guess. [[done]] --[[Joey]]

View File

@ -0,0 +1,15 @@
I think there's a bug in the code that determines if the cgiurl is relative
to the url. If one has a different port than the other, they're not
relative, and I hear Fil encountered an issue where the wrong port was then
used. --[[Joey]]
> I tested, setting cgiurl to a nonstandard port. After rebuilding,
> pages used the full url. So I don't see a bug here, or am missing
> something from my memory of the report (which was done the bad way, on
> IRC). [[done]] --[[Joey]]
> > Sorry about wittering on IRC instead of reporting proper bugs.
> >
> > The setup I have is nginx in front of apache, so that nginx is listening on port 80, apache is on port 81, and ikiwiki is being served by apache. After upgrading to 3.20120203 (backported to squeeze) I found that the URLs in the edit page all have the port set as :81 ... but now that I look at it more closely, that is the case for several ikiwiki-hosting controlled sites, but not for a few other sites that are also on the same machine, so it must be some difference between the settings for the sites, either in ikiwiki, or apache, or perhaps even nginx. Anyway, on the affected sites, explicitly including a port :80 in the cgiurl fixes the problem.
> > So, for the moment, this bug report is a bit useless, until I find out what is causing the ikiwiki-hosting sites to be beffuddled, so it should probably stay closed -[[fil]]

View File

@ -0,0 +1,16 @@
smcv@vasks:~$ git clone git://git.ikiwiki.info/
Cloning into git.ikiwiki.info...
fatal: read error: Connection reset by peer
I tried this from a UK consumer ISP, my virtual server in the
UK, and vasks (aka alioth.debian.org) in the Netherlands,
with the same results. I can't update my clone from `origin`
either; for the moment I'm using the github mirror instead.
--[[smcv]]
> Strange.. The git-daemon was not running, but one child was running
> waiting on an upload-pack, but not accepting new connections. Nothing
> in the logs about what happened to the parent. The monitor that checks
> services are running was satisfied with the child.. I've made it
> restart if the parent pid is no longer running, which should avoid
> this problem in the future. --[[Joey]] [[done]]

View File

@ -1,4 +1,4 @@
[[!template id=gitbranch branch=GiuseppeBilotta/scanif author="Giuseppe Bilotta"]]
[[!template id=gitbranch branch=GiuseppeBilotta/scanif author="[[GiuseppeBilotta]]"]]
When a directive that should be run during scan preprocessing is inside
an if directive, it doesn't get called because the if preprocessing does

View File

@ -0,0 +1,27 @@
Definition lists do not look great here...
Here is an example.
<dl>
<dt>this is a term</dt>
<dd>and this is its definition.</dd>
</dl>
(This wiki doesn't support Markdown's extended definition lists, but still, this is valid markup.)
I believe `<dt>` should be made bold. I have added this to my `local.css`, and I would hate to add this all the time forever:
/* definition lists look better with the term in bold */
dt
{
font-weight: bold;
}
:) How does that look? I can provide a patch for the base wiki if you guys really want... ;) -- [[anarcat]]
> What you dislike seems to be the default rendering of definition lists by
> browsers. I don't think it's ikiwiki's place to override browser defaults
> for standard markup in the document body, at least not in the default
> antitheme. --[[Joey]]
> > How about in the actiontab theme then? :)

View File

@ -0,0 +1,57 @@
For some time now, in circumstances that I've had enormous troubles
trying to track, I've seen feeds getting removed by ikiwiki when
apparently unrelated pages got changed, with the message:
> removing somepath/somepage/somefeed, no longer built by some/unrelated/page
I've finally been able to find how and why it happens. The situation is
the following:
* page A has an inline directive that (directly) generates a feed F
* page B inlines A, thus (indirectly) generating F again
* page B is rendered after page A
The feed removal happens when changes are made to prevent B from
inlining A; for example, because B is a tag page and A is untagged B, or
because B includes A through a pagespec that no longer matches A. In
this case, this happens:
* page A is built, rendering F
* page B is built, _not_ rendering F, which it used to render
* F is removed because it is not built by B anymore
Note that although this issue is triggered (for me) from the changes I
proposed last year to allow feed generation from nested inlines
coalescing it to be page-based instead of destpage-based
(bb8f76a4a04686def8cc6f21bcca80cb2cc3b2c9 and
72c8f01b36c841b0e83a2ad7ad1365b9116075c5) there is potential for it
popping up in other cases.
Specifically, the logic for the removal of dependent pages currently
relies on the assumption that each output has a single generator. My
changes caused this assumption to be violated, hence the error, but
other cases may pop up for other plugins in the future.
I have a [patch] fixing this issue (for feeds specifically, i.e. only
the problem I am actually having) on top of my `mystuff` branch, but
since that also has heaps of other unrelated stuff, you may want to just
[pick it from my gitweb][gw].
[gw]: (http://git.oblomov.eu/ikiwiki/patch/671cb26cf50643827f258270d9ac8ad0b1388a65)
The patch changes the `will_render()` for feeds to be based on the page
rather than on the destpage, matching the fact that for nested inlines
it's the inner page that is ultimately responsible for generating the
feed.
I've noticed that it requires at least _two_ full rebuilds before the
index is again in a sensible state. (On the first rebuild, all feeds
from nested inlines are actually _removed_.)
While the patch is needed because there are legitimate cases in which
nested feeds are needed (for example, I have an index page that inlines
index pages for subsection of my site, and I want _those_ feed from
being visible), there are other cases when one may want to skip feed
generation from nested inlines.
--[[GiuseppeBilotta]]

View File

@ -3,3 +3,5 @@
Whoops, somehow missed a spot on the last incarnation of this branch.
`find -not` doesn't work on NetBSD and `find !` runs equivalently
for me. Fixed in 9659272e25fac37f896991dab01a05b4f4c85ccb.
> [[done]] --[[Joey]]

View File

@ -0,0 +1,15 @@
The following code in our sandbox generates an empty graph:
[[!graph src=""""
google [ href="http://google.com/" ]
sandbox [ href=\[[SandBox]] ]
help [ href=\[[ikiwiki/formatting]] ]
newpage [ href=\[[NewPage]] ]
google -> sandbox -> help -> newpage -> help -> google;
""""]]
It is the exact same thing that on the [[ikiwiki/directive/graph/]] directive documentation, from the [[plugins/graphviz]] plugin. This is ikiwiki 3.20120203 on Debian wheezy and graphviz is installed (2.26.3-10). Note that the first demo actually works. See <http://mesh.openisp.ca/sandbox> --[[anarcat]]
> Looking at the example shows too many double quoted. [[fixed|done]]
> --[[Joey]]

View File

@ -0,0 +1,19 @@
If I make a comment from an ipv4 address
I see the commenter's ipv4 address logged in the comment file.
If I make a comment from an ipv6 address
I see nothing.
There is a sanity check in /usr/share/perl5/IkiWiki/Plugin/comments.pm
line 447 (according to today's version) there is an ipv4 specific regexp.
I removed the regexp and used the value without this added check and it fixed
the problem for me. Not sure if this is the best solution. --[[cstamas]]
[[patch]]
[[!tag ipv6]]
> [[done]] --[[Joey]]
> > Thank you! --[[cstamas]]

View File

@ -0,0 +1,14 @@
This is very minor. Noticed in nginx's logs that jquery-ui.min.css (the attachment plugin uses this) keeps referencing some png files that are not available in public_html/mywiki/ikiwiki/images/ These should be included in underlays/attachment/ikiwiki/images/ in the source repo and seem to be copied from /usr/local/share/ikiwiki/attachment/ikiwiki/images/ when I compile a new wiki. The complete list of images jquery-ui.min.css is looking for can be found here. https://github.com/jquery/jquery-ui/tree/1.8.14/themes/base/images
> Do you have a list of files that are *actually* used when ikiwiki is
> running? I don't want to include a lot of files that jquery only
> uses in other situations. The currently included files are exactly those
> that I see it try to use. --[[Joey]]
Fair enough. These 3 files are the only ones that appear consistently in nginx error logs.
ui-bg_glass_75_dadada_1x400.png
ui-icons_454545_256x240.png
ui-bg_glass_95_fef1ec_1x400.png
> Hmm, that's most of the missing ones. I just added them all. [[done]]
> --[[Joey]]

View File

@ -0,0 +1,18 @@
[[ikiwiki/directive/linkmap]]s display the file name instead of the pagetitle, showing unsightly underscore escapes and underscores instead of blanks to users.
the attached [[!taglink patch]] fixes this; from its commit message:
display the pagetitle() in linkmaps
without this patch, linkmaps display underscores and underscore escape
sequences in the rendered output.
this introduces a pageescape function, which invoces pagetitle() to get
rid of underscore escapes and wraps the resulting utf8 string
appropriately for inclusion in a dot file (using dot's html encoding
because it can represent the '\"' dyad properly, and because it doesn't
need special-casing of newlines).
the output will look much better (at least in my wikis) with the "[[bugs/pagetitle function does not respect meta titles]]" issue fixed.
the patch is stored in [[the patch.pl]] as created by git-format-patch. (btw, what's the preferred way to send patches, apart from creating a git branch somewhere?)

View File

@ -0,0 +1,68 @@
From efbb1121ffdc146f5c9a481a51f23ad151b9f240 Mon Sep 17 00:00:00 2001
From: chrysn <chrysn@fsfe.org>
Date: Thu, 15 Mar 2012 14:38:42 +0100
Subject: [PATCH] display the pagetitle() in linkmaps
without this patch, linkmaps display underscores and underscore escape
sequences in the rendered output.
this introduces a pageescape function, which invoces pagetitle() to get
rid of underscore escapes and wraps the resulting utf8 string
appropriately for inclusion in a dot file (using dot's html encoding
because it can represent the '\"' dyad properly, and because it doesn't
need special-casing of newlines).
---
IkiWiki/Plugin/linkmap.pm | 17 +++++++++++++++--
1 files changed, 15 insertions(+), 2 deletions(-)
diff --git a/IkiWiki/Plugin/linkmap.pm b/IkiWiki/Plugin/linkmap.pm
index ac26e07..b5ef1a1 100644
--- a/IkiWiki/Plugin/linkmap.pm
+++ b/IkiWiki/Plugin/linkmap.pm
@@ -5,6 +5,7 @@ use warnings;
use strict;
use IkiWiki 3.00;
use IPC::Open2;
+use HTML::Entities;
sub import {
hook(type => "getsetup", id => "linkmap", call => \&getsetup);
@@ -22,6 +23,18 @@ sub getsetup () {
my $mapnum=0;
+sub pageescape {
+ my $item = shift;
+ # encoding explicitly in case ikiwiki is configured to accept <> or &
+ # in file names
+ my $title = pagetitle($item, 1);
+ # it would not be necessary to encode *all* the html entities (<> would
+ # be sufficient, &" probably a good idea), as dot accepts utf8, but it
+ # isn't bad either
+ $title = encode_entities($title);
+ return("<$title>");
+}
+
sub preprocess (@) {
my %params=@_;
@@ -63,7 +76,7 @@ sub preprocess (@) {
my $show=sub {
my $item=shift;
if (! $shown{$item}) {
- print OUT "\"$item\" [shape=box,href=\"$mapitems{$item}\"];\n";
+ print OUT pageescape($item)." [shape=box,href=\"$mapitems{$item}\"];\n";
$shown{$item}=1;
}
};
@@ -74,7 +87,7 @@ sub preprocess (@) {
foreach my $endpoint ($item, $link) {
$show->($endpoint);
}
- print OUT "\"$item\" -> \"$link\";\n";
+ print OUT pageescape($item)." -> ".pageescape($link).";\n";
}
}
print OUT "}\n";
--
1.7.9.1

View File

@ -0,0 +1,34 @@
The [[ikiwiki/directive/listdirectives]]` directive doesn't register a link between the page and the subpages. This is a problem because then the [[ikiwiki/directive/orphans]] directive then marks the directives as orphans... Maybe it is a but with the orphans directive however... A simple workaround is to exclude those files from the orphans call... --[[anarcat]]
> There's a distinction between wikilinks (matched by `link()`,
> `backlink()` etc.) and other constructs that produce a
> hyperlink. Some directives count as a wikilink (like `tag`)
> but many don't (notably `inline`, `map`, `listdirectives`,
> and `orphans` itself). As documented in
> [[ikiwiki/directive/orphans]], orphans will tend to list
> pages that are only matched by inlines/maps, too.
>
> The rule of thumb seems to be that a link to a particular
> page counts as a wikilink, but a directive that lists
> pages matching some pattern does not; so I think
> `listdirectives` is working as intended here.
> `orphans` itself obviously shouldn't count as a wikilink,
> because that would defeat the point of it :-)
>
> Anything that uses a [[ikiwiki/pagespec]] to generate links,
> like `inline` and `map`, can't generate wikilinks, because
> wikilinks are gathered during the scan phase, and pagespecs
> can't be matched until after the scan phase has finished
> (otherwise, it'd be non-deterministic whether all wikilinks
> had been seen yet, and `link()` in pagespecs wouldn't work
> predictably).
>
> I suggest just using something like:
>
> \[[!orphans pages="* and !blog/* and !ikiwiki/directive/*"]]
>
> This wiki's example of listing [[plugins/orphans]] has a
> more elaborate pagespec, which avoids bugs, todo items etc.
> as well.
>
> --[[smcv]]

View File

@ -26,7 +26,19 @@ Is this a problem on my site or does anyone else see this?
>>> The right fix would probably be for `do=create` to allow replacing a page
>>> in the transient underlay without complaining (like the behaviour that
>>> `do=edit` normally has). That wouldn't help you unless [[plugins/autoindex]]
>>> `do=edit` normally has).
>>>> ... which it turns out it already does. --[[smcv]]
>>> That wouldn't help you unless [[plugins/autoindex]]
>>> defaulted to making transient pages (`autoindex_commit => 0`), but if we
>>> can fix [[removal_of_transient_pages]] then maybe that default can change?
>>> --[[smcv]]
>>>> It turns out that with `autoindex_commit => 0`, the failure mode is
>>>> different. The transient map is created when you attach the
>>>> attachment. When you save the page, it's written into the srcdir,
>>>> the map is deleted from the transientdir, and the ctime/mtime
>>>> in the indexdb are those of the file in the srcdir, but for some
>>>> reason the HTML output isn't re-generated (despite a refresh
>>>> happening). --[[smcv]]

View File

@ -0,0 +1,44 @@
In commit aaa72a3a8, Joey noted:
> bestlink returns '' if no existing page matches a link. This propigated
> through inline and other plugins, causing uninitialized value warnings, and
> in some cases (when filecheck was enabled) making the whole directive fail.
>
> Skipping the empty results fixes that, but this is papering over another
> problem: If the missing page is later added, there is not dependency
> information to know that the inline needs to be updated. Perhaps smcv will
> fix that later.
Potential ways this could be addressed:
* Add a presence dependency on everything the reference could match:
so if the `inline` is on `a/b/c` and the missing page is `m`,
add a `$depends_simple` `$DEPEND_PRESENCE` dependency on `a/b/c/m`,
`a/b/m`, `a/m`, `m` and (if configured) `$config{userdir}/m`
* Make the page names in `\[[!inline pagenames=...]]` count as wikilinks,
changing the behaviour of `link()` and backlinks, but causing appropriate
rebuilds via the special cases in `IkiWiki::Render`
* Extend the special cases in `IkiWiki::Render` to consider a superset of
wikilinks, to which `pagenames` would add its named pages, without
affecting `link()` and backlinks
(Note that `\[[!inline pages=...]]` cannot count as wikilinks, because
pagespecs can contain `link()`, so can't be evaluated until we know what
wikilinks exist, at which point it's too late to add more wikilinks.)
I think the presence dependency is probably the cleanest approach?
--[[smcv]]
> I think it was possibly a mistake to use wikilink style lookup for
> `pagenames`. --[[Joey]]
[[!tag patch]] [[!template id=gitbranch branch=smcv/literal-pagenames author="[[smcv]]"]]
>> I used the linking rules to make references to
>> "nearby" pages convenient, but if you'd prefer "absolute"
>> semantics, my `ready/literal-pagenames` branch does that. For
>> my main use-case for `pagenames` ([[plugins/contrib/album]])
>> it's fine either way. --[[smcv]]
>>> Ok, [[merged|done]]. I think it's more consistent this way. --[[Joey]]

View File

@ -1,6 +1,11 @@
[[!template id=gitbranch branch=smcv/ready/less-open author="[[smcv]]"]]
[[!tag patch]]
The [[plugins/opendiscussion]] plugin allows pages named according to
the `discussionpage` setting to be edited anonymously, even if
`discussion => 0` is set.
(If it respected the `discussion` option, the combination of
`opendiscussion` and `moderatedcomments` might be good for blogs.)
[[done]] --[[smcv]]

View File

@ -0,0 +1,26 @@
This would be great to see fixed. It's perplexing to have discussion => 0 in my configuration, not have any discussion links on my site, but still be able to add a discussion page by URL hacking something like this: /cgi-bin/ikiwiki/ikiwiki.cgi?page=posts%2Fdiscussion&do=edit.
spammers have figured that little trick out so I am consitently getting spammed checked into my git repository.
I'm not really sure if this patch introduced other problems, but it seems to have fixed my site:
0 mcclelland@chavez:~/.ikiwiki/IkiWiki/Plugin$ diff -u /usr/share/perl5/IkiWiki/Plugin/opendiscussion.pm opendiscussion.pm
--- /usr/share/perl5/IkiWiki/Plugin/opendiscussion.pm 2012-05-07 11:31:24.000000000 -0400
+++ opendiscussion.pm 2012-07-29 17:49:28.000000000 -0400
@@ -25,7 +25,7 @@
my $cgi=shift;
my $session=shift;
- return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i;
+ return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i && $config{discussion};
return "" if pagespec_match($page, "postcomment(*)");
return undef;
}
1 mcclelland@chavez:~/.ikiwiki/IkiWiki/Plugin$
If libdir is configured to be ~/.ikiwiki in your ikiwiki.settings file, and you are running Debian, you can do the following:
mkdir -p ~/.ikiwiki/IkiWiki/Plugin
cp /usr/share/perl5/IkiWiki/Plugin/opendiscussion.pm ~/.ikiwiki/IkiWiki/Plugin/
And then apply the patch above to ~/.ikiwiki/Ikiwiki/Plugin/opendiscussion.pm.

View File

@ -0,0 +1,14 @@
[[!template id=gitbranch branch=anarcat/master author="[[anarcat]]"]]
I know this sounds backwards, but it seems to me that the KML-generated map should be displayable on google maps. KML is the standard Google uses for google maps, and since we use it, we should interoperate with them. God knows why this is failing, but it is and should probably be fixed for the sake of interoperability: <https://maps.google.ca/maps?q=http:%2F%2Fwiki.reseaulibre.ca%2Fmap%2Fpois.kml> -- [[users/anarcat]]
> The KML only needs a Document tag because it uses "shared styles" -- don't ask me what this is. Here is a [[patch]]: [[https://reseaulibre.deuxpi.ca/0001-Add-Document-tag-to-OSM-plugin-KML-output.patch]] --[[deuxpi]]
> > I applied the patch to my master branch and tested it on the above URL: it works... mostly. The icons for the elements on the actual map seem incorrect (some are the proper icons, some others are the ugly default blue pin of google maps, weird) but I think this is a step in the right direction. Thus, this should be merged. -- [[anarcat]]
>>> I've cherry-picked this patch, but from the description it does not
>>> sound "fixed" enough to close this bug. (OTOH, perhaps only google can
>>> fix it, so it people are happy with the state of affairs I won't insist
>>> this bug be left open.) --[[Joey]]
> > > > I am happy with this right now, so let's mark this as [[done]]. I do agree this seems like a google bug, so let's move on. --[[anarcat]]

View File

@ -0,0 +1,32 @@
This is not a problem on Apache webservers because they, oddly enough, ignore trailing slashes on paths (maybe some `PATH_INFO` magic, no idea). But basically, in our wiki, the paths to the icon tags are generated with a trailing slash. An excerpt of our [KML file](http://wiki.reseaulibre.ca/map/pois.kml):
<Style id="/tag/up">
<IconStyle>
<Icon>
<href>http://wiki.reseaulibre.ca//tag/up/icon.png/</href>
</Icon>
</IconStyle>
</Style>
Notice the trailing `/` after the `icon.png`. This breaks display on nginx - the file that gets served isn't the icon, but the frontpage for some reason. I followed the [[setup instructions|tips/dot cgi]] for Nginx that I just had to write because there weren't any, so maybe I screwed up some part, but it does seem to me that the trailing slash is wrong regardless.
(Also notice how the style tag is being turned over backwards by the HTML sanitizer here, cute. :P)
I wrote a crude hack for this, but this strikes me as a similar problem to the one we found in [[bugs/osm linkto() usage breaks map rendering]]. However, I am at a loss how to fix this cleanly because we cannot `will_render()` the tag icons, as they are already generated out there! Weird. Anyways, here's the stupid [[patch]]:
[[!format diff """
diff --git a/IkiWiki/Plugin/osm.pm b/IkiWiki/Plugin/osm.pm
index a7baa5f..c9650d0 100644
--- a/IkiWiki/Plugin/osm.pm
+++ b/IkiWiki/Plugin/osm.pm
@@ -192,6 +192,7 @@ sub process_waypoint {
}
}
$icon = urlto($icon, $dest, 1);
+ $icon =~ s!/*$!!; # hack - urlto shouldn't be appending a slash in the first place
$tag = '' unless $tag;
register_rendered_files($map, $page, $dest);
$pagestate{$page}{'osm'}{$map}{'waypoints'}{$name} = {
"""]]
I'm not writing this to a branch out of sheer shame of my misunderstanding. ;) There also may be a workaround that could be done in Nginx too. --[[anarcat]]

View File

@ -0,0 +1,23 @@
[[!template id=gitbranch branch=anarcat/master author="[[anarcat]]"]]
Under some circumstances that remain unclear to me, the usage of `urlto()` in the revised version of the [[plugins/osm]] plugin break the map totally. The javascript console in Chromium tells me the following:
GET http://mesh.openisp.ca/map/pois.kml/ 404 (Not Found)
Indeed, that URL yields a 404. The proper URL is <http://mesh.openisp.ca/map/pois.kml>. --[[anarcat]]
## Proposed solution
The problem seems to be caused by `urlto()` being called for the `osm`
directive before the generated files are registered with `will_render()`
from the `waypoint` directive. Proposed patch adds a function that is
called from the `preprocess` hook for both directives that registers the
files.
Here is a [[patch]] to IkiWiki/Plugin/osm.pm: <https://reseaulibre.deuxpi.ca/0000-Fix-incorrect-URL-pointing-to-the-generated-waypoint.patch>
--[[deuxpi]]
I confirm the patch works, and I added it to my master branch. --[[anarcat]]
> [[applied|done]]. Thanks guys. --[[Joey]]

View File

@ -0,0 +1,5 @@
I have heard repeated reports on <http://mesh.openisp.ca/> that editing a page that has a waypoint in it will sometimes make that waypoint disappear from the main map. I have yet to understand why that happens or how, but multiple users have reported that.
A workaround is to rebuild the whole wiki, although sometimes re-editing the same page will bring the waypoint back on the map.
I have been able to reproduce this by simply creating a new node. It will not show up on the map until the wiki is rebuilt or the node is resaved. -- [[anarcat]]

View File

@ -279,3 +279,11 @@ So, looking at your meta branch: --[[Joey]]
>>>> for the po plugin, because I want to merge the po plugin soon.
>>>> If #2 gets tackled later, we will certianly have all kinds of fun.
>>>> no matter what is done for the po plugin. --[[Joey]]
>>>>> For the record: I've gotten used to the lack of this feature,
>>>>> and it now seems much less important to me than it was when
>>>>> initially developing the po plugin. So, I'm hereby officially
>>>>> removing this from my plate. If anyone else wants to start from
>>>>> scratch, or from my initial work, I'm happy to review the
>>>>> po-related part of things -- just drop me an email in this
>>>>> case. --[[intrigeri]]

View File

@ -25,3 +25,49 @@ pages, until this is fixed. --[[Joey]]
>>>> to affect by web edits. The `-f` check seems rather redundant,
>>>> surely if it's in `%pagesources` ikiwiki has already verified it's
>>>> safe. --[[Joey]]
----
[[!template id=gitbranch branch=smcv/ready/transient-rm author="[[Simon McVittie|smcv]]"]]
Here's a branch. It special-cases the `$transientdir`, but in such a way
that the special case could easily be extended to other locations where
deletion should be allowed.
It also changes `IkiWiki::prune()` to optionally stop pruning empty
parent directories at the point where you'd expect it to (for instance,
previously it would remove the `$transientdir` itself, if it turns out
to be empty), and updates callers.
The new `prune` API looks like this:
IkiWiki::prune("$config{srcdir}/$file", $config{srcdir});
with the second argument optional. I wonder whether it ought to look
more like `writefile`:
IkiWiki::prune($config{srcdir}, $file);
although that would be either an incompatible change to internal API
(forcing all callers to update to 2-argument), or being a bit
inconsistent between the one-and two-argument forms. Thoughts?
--[[smcv]]
> I've applied the branch as-is, so this bug is [[done]].
> `prune` is not an exported API so changing it would be ok..
> I think required 2-argument would be better, but have not checked
> all the call sites to see if the `$file` is available split out
> as that would need. --[[Joey]]
[[!template id=gitbranch branch=smcv/ready/prune author="[[Simon McVittie|smcv]]"]]
>> Try this, then? I had to make some changes to `attachment`
>> to make the split versions available. I suggest reviewing
>> patch-by-patch.
>>
>> I also tried to fix a related bug which I found while testing it:
>> the special case for renaming held attachments didn't seem to work.
>> (`smcv/wip/rename-held`.) Unfortunately, it seems that with that
>> change, the held attachment is committed to the `srcdir` when you
>> rename it, which doesn't seem to be the intention either? --[[smcv]]

View File

@ -0,0 +1,12 @@
When renaming a page here, ikiwiki destroyed unrelated links from unrelated pages. You can see the effect [here](http://mesh.openisp.ca/recentchanges/#diff-dc8dfa96efd3a4d649f571c3aa776f20b3ce0131), or by checking out the git tree (`git://mesh.openisp.ca/
`) and looking at commit `dc8dfa96efd3a4d649f571c3aa776f20b3ce0131`.
The renamed page was `configuration/bat-hosts` to `configuration/batman/bat-hosts` and the deleted links were ``\[[AUR | https://aur.archlinux.org/]]` and `\[[CHANGELOG|http://svn.dd-wrt.com:8000/browser/src/router/batman-adv/CHANGELOG]]`. --[[anarcat]]
> <del>Nevermind that, that commit was unrelated to the rename and probably an operator error.</del> - No, actually, I just reproduced this again - see [another example](http://mesh.openisp.ca/recentchanges/#diff-d67dc2f0fdc149b13122fd6cba887a01c693e949).
>> Looks like these all involve the wacky wikilink form that includes an
>> external url in the link. Fixed rename code to know about those.
>> [[done]] --[[Joey]]
>>> Phew!!! Thanks a *lot* for that one, it was really annoying! :) --[[anarcat]]

View File

@ -0,0 +1,3 @@
The [[/ikiwiki/directive/toc]] directive scrapes all headings from the page, including those in the sidebar. So, if the sidebar includes navigational headers, every page with a table of contents will display those navigational headers before the headers in that page's content.
I'd like some way to exclude the sidebar from the table of contents. As discussed via Jabber, perhaps toc could have a config option to ignore headers inside a nav tag or a tag with id="sidebar".

View File

@ -0,0 +1,95 @@
I've just modified the trail plugin to use only presence, and not
content dependencies. Using content dependencies, particularly to the page
that defines the trail, meant that every time that page changed, *every*
page in the trail gets rebuilt. This leads to users setting up sites that
have horrible performance, if the trail is defined in, for example, the top
page of a blog.
Unfortunatly, this change to presence dependencies has
introduced a bug. Now when an existing trail is removed, the pages in the
trail don't get rebuilt to remove the trail (both html display and state).
> Actually, this particular case is usually OK. Suppose a trail `untrail`
> contains `untrail/a` (as is the case in the regression
> test I'm writing), and you build the wiki, then edit `untrail` to no
> longer be a trail, and refresh. `untrail` has changed, so it is
> rendered. Assuming that the template of either `untrail` or another
> changed page happens to contain the `TRAILS` variable (which is not
> guaranteed, but is highly likely), `I::P::t::prerender`
> is invoked. It notices that `untrail/a` was previously a trail
> member and is no longer, and rebuilds it with the diagnostic
> "building untrail/a, its previous or next page has changed".
>
> Strictly speaking, I should change `I::P::t::build_affected`
> so it calls `prerender`, so we're guaranteed to have done the
> recalculation. Fixed in my branch. --[[smcv]]
I think that to fix this bug, the plugin should use a hook to
force rebuilding of all the pages that were in the trail, when
the trail is removed (or changed).
> The case of "the trail is changed" is still broken:
> if the order of items changes, or the trail is removed,
> then the logic above means it's OK, but if you
> change the `\[[!meta title]]` of the trail, or anything else
> used in the prev/up/next bar, the items won't show that
> change. Fixed in my branch. --[[smcv]]
There's a difficulty in doing that: The needsbuild hook runs before the scan
hook, so before it has a chance to see if the trail directive is still there.
It'd need some changes to ikiwiki's hooks.
> That's what `build_affected` is for, and trail already used it. --s
(An improvement in this area would probably simplify other plugins, which
currently abuse the needsbuild hook to unset state, to handle the case
where the directive that resulted in that state is removed.)
I apologise for introducing a known bug, but the dependency mess was too
bad to leave as-is. And I have very little time (and regrettably, even less
power) to deal with it right now. :( --[[Joey]]
[[!template id=gitbranch branch=smcv/ready/trail author="[[Simon_McVittie|smcv]]"]]
[[!tag patch]]
> I believe my `ready/trail` branch fixes this. There are regression tests.
>
> Here is an analysis of how the trail pages interdepend.
>
> * If *trail* contains a page *member* which does exist, *member* depends
> on *trail*. This is so that if the trail directive is deleted from
> *trail*, or if *trail*'s "friendly" title or trail settings are changed,
> the trail navigation bar in *member* will pick up that change. This is
> now only a presence dependency, which isn't enough to make those happen
> correctly. [Edited to add: actually, the title is the only thing that
> can affect *member* without affecting the order of members.]
>
> * If *trail* contains consecutive pages *m1* and *m2* in that order,
> *m1* and *m2* depend on each other. This is so that if one's
> "friendly" title changes, the other is rebuilt. This is now only
> a presence dependency, which isn't enough to make those happen
> correctly. In my branch, I explicitly track the "friendly" title
> for every page that's edited and is involved in a trail somehow.
>
> * If *trail* has *member* in its `pagenames` but there is no page called
> *member*, then *trail* must be rebuilt if *member* is created. This
> was always a presence dependency, and is fine.
>
> In addition, the `trail` plugin remembers the maps
> { trail => next item in that trail } and { trail => previous item in
> that trail } for each page. If either changes, the page gets rebuilt
> by `build_affected`, with almost the same logic as is used to update
> pages that link to a changed page. My branch extends this to track the
> "friendly title" of each page involved in a trail, either by being
> the trail itself or a member (or both).
>
> I think it's true to say that the trail always depends on every member,
> even if it doesn't display them. This might mean that we can use
> "render the trail page" as an opportunity to work out whether any of
> its members are also going to need re-rendering?
> [Edited to add: actually, I didn't need this to be true, but I made the
> regression test check it anyway.]
>
> --[[smcv]]
>>> Thanks **very** much! [[done]] --[[Joey]]

View File

@ -0,0 +1,3 @@
When commenting on, or I think editing, a page that uses the trail
plugin, the trail is displayed across the top of the page. This should not
happen, probably. --[[Joey]]

View File

@ -0,0 +1,97 @@
[[!template id=gitbranch branch=smcv/trail author=smcv]] [[!tag patch]]
`t/trail.t` has some test suite failures. This is after applying
[[smcv]]'s patch that fixed some races that caused it to fail
sometimes. These remaining failures may also be intermittant,
although I can get them reliably on my laptop. I've added some debugging
output, which seems to point to an actual bug in the plugin AFAICS. --[[Joey]]
> I can reproduce this reliably at 0a23666ddd but not 3.20120203. Bisecting
> indicates that it regressed in aaa72a3a80f, "inline: When the pagenames list
> includes pages that do not exist, skip them".
>
> I don't think this is the bug noted in the commit message - the inline
> containing `sorting/new` uses `pages`, not `pagenames`. --[[smcv]]
>> It seems you removed `trail` support from `inline` in that commit.
>> Assuming that wasn't intentional, this is fixed in `smcv/trail`.
>> --[[smcv]]
>>> Looks like a bad merge of some kind. pulled, [[done]] --[[Joey]]
<pre>
ok 71 - expected n=sorting/end p=sorting/beginning in sorting/middle.html
not ok 72 - expected n=sorting/new p=sorting/middle in sorting/end.html
# Failed test 'expected n=sorting/new p=sorting/middle in sorting/end.html'
# at t/trail.t line 13.
# got: 'n=sorting/linked2 p=sorting/middle'
# expected: 'n=sorting/new p=sorting/middle'
not ok 73 - expected n=sorting/old p=sorting/end in sorting/new.html
# Failed test 'expected n=sorting/old p=sorting/end in sorting/new.html'
# at t/trail.t line 13.
# got: undef
# expected: 'n=sorting/old p=sorting/end'
not ok 74 - expected n=sorting/ancient p=sorting/new in sorting/old.html
# Failed test 'expected n=sorting/ancient p=sorting/new in sorting/old.html'
# at t/trail.t line 13.
# got: undef
# expected: 'n=sorting/ancient p=sorting/new'
not ok 75 - expected n=sorting/linked2 p=sorting/old in sorting/ancient.html
# Failed test 'expected n=sorting/linked2 p=sorting/old in sorting/ancient.html'
# at t/trail.t line 13.
# got: undef
# expected: 'n=sorting/linked2 p=sorting/old'
not ok 76 - expected n= p=sorting/ancient in sorting/linked2.html
# Failed test 'expected n= p=sorting/ancient in sorting/linked2.html'
# at t/trail.t line 13.
# got: 'n= p=sorting/end'
# expected: 'n= p=sorting/ancient'
ok 77
</pre>
Here, the "new" page does not seem to be included into the trail as expected.
Looking at the rendered page, there is no trail directive output on it either.
--[[Joey]]
<pre>
ok 90
not ok 91 - expected n=sorting/new p= in sorting/old.html
# Failed test 'expected n=sorting/new p= in sorting/old.html'
# at t/trail.t line 13.
# got: undef
# expected: 'n=sorting/new p='
not ok 92 - expected n=sorting/middle p=sorting/old in sorting/new.html
# Failed test 'expected n=sorting/middle p=sorting/old in sorting/new.html'
# at t/trail.t line 13.
# got: undef
# expected: 'n=sorting/middle p=sorting/old'
not ok 93 - expected n=sorting/linked2 p=sorting/new in sorting/middle.html
# Failed test 'expected n=sorting/linked2 p=sorting/new in sorting/middle.html'
# at t/trail.t line 13.
# got: 'n=sorting/linked2 p='
# expected: 'n=sorting/linked2 p=sorting/new'
ok 94 - expected n=sorting/linked p=sorting/middle in sorting/linked2.html
ok 95 - expected n=sorting/end p=sorting/linked2 in sorting/linked.html
ok 96 - expected n=sorting/a/c p=sorting/linked in sorting/end.html
ok 97 - expected n=sorting/beginning p=sorting/end in sorting/a/c.html
ok 98 - expected n=sorting/a/b p=sorting/a/c in sorting/beginning.html
not ok 99 - expected n=sorting/ancient p=sorting/beginning in sorting/a/b.html
# Failed test 'expected n=sorting/ancient p=sorting/beginning in sorting/a/b.html'
# at t/trail.t line 13.
# got: 'n=sorting/z/a p=sorting/beginning'
# expected: 'n=sorting/ancient p=sorting/beginning'
not ok 100 - expected n=sorting/z/a p=sorting/a/b in sorting/ancient.html
# Failed test 'expected n=sorting/z/a p=sorting/a/b in sorting/ancient.html'
# at t/trail.t line 13.
# got: undef
# expected: 'n=sorting/z/a p=sorting/a/b'
not ok 101 - expected n= p=sorting/ancient in sorting/z/a.html
# Failed test 'expected n= p=sorting/ancient in sorting/z/a.html'
# at t/trail.t line 13.
# got: 'n= p=sorting/a/b'
# expected: 'n= p=sorting/ancient'
ok 102
</pre>
Haven't investigated, but looks like the same sort of problem, a
page expected to be in the trail isn't. --[[Joey]]

View File

@ -1,7 +1,31 @@
mkdir -p ikiwiki-tag-test/raw/a_dir/ ikiwiki-tag-test/rendered/
echo '[[!taglink a_tag]]' > ikiwiki-tag-test/raw/a_dir/a_page.mdwn
echo '\[[!taglink a_tag]]' > ikiwiki-tag-test/raw/a_dir/a_page.mdwn
ikiwiki --verbose --plugin tag --plugin autoindex --plugin mdwn --set autoindex_commit=0 --set tagbase=tag --set tag_autocreate=1 --set tag_autocreate_commit=0 ikiwiki-tag-test/raw/ ikiwiki-tag-test/rendered/
ls -al ikiwiki-tag-test/raw/.ikiwiki/transient/
ls -al ikiwiki-tag-test/rendered/tag/
Shouldn't `ikiwiki-tag-test/raw/.ikiwiki/transient/tag.mdwn` and `ikiwiki-tag-test/rendered/tag/index.html` exist?
[[!tag patch]]
[[!template id=gitbranch branch=smcv/ready/autoindex author=smcv]]
[[!template id=gitbranch branch=smcv/ready/autoindex-more-often author=smcv]]
> To have a starting point to (maybe) change this, my `ready/autoindex`
> branch adds a regression test for the current behaviour, both with
> and without `autoindex_commit` enabled. It also fixes an unnecessary
> and potentially harmful special case for the transient directory.
>
> The fact that files in underlays (including transient files) don't
> trigger autoindexing is deliberate. However, this is the second
> request to change this behaviour: the first was
> [[!debbug 611068]], which has a patch from Tuomas Jormola.
> On that bug report, Joey explains why it's undesirable
> for the original behaviour of autoindex (when the
> index isn't transient).
>
> I'm not sure whether the same reasoning still applies when the
> index is transient, though (`autoindex_commit => 0`),
> because the index pages won't be cluttering up people's
> git repositories any more? My `autoindex-more` branch changes
> the logic so it will do what you want in the `autoindex_commit => 0`
> case, and amends the appropriate regression test. --[[smcv]]

View File

@ -46,4 +46,22 @@ and have it render like:
> there should give some strong hints how to fix this bug, though I haven't
> tried to apply the method yet. --[[Joey]]
>> As far, as I can see, smileys bug is solved by checking for code/pre. In
>> this case, however, this is not applicable. WikiLinks/directives *should* be
>> expanded before passing text to formatter, as their expansion may contain
>> markup. Directives should be processed before, as they may provide *partial*
>> markup (eg `template` ones), that have no sense except when in the page
>> cotext. Links should be processed before, because, at least multimarkdown may
>> try to expand them as anchor-links.
>>
>> For now, my partial solution is to restrict links to not have space at the
>> start, this way in many cases escaping in code may be done in natural way
>> and not break copypastability. For example, shell 'if \[[ condition ]];'
>> will work fine with this.
>>
>> Maybe directives can also be restricted to only be allowed on the line by
>> themselves (not separated by blank lines, however) or something similar.
>>
>> --[[isbear]]
[[!debbug 487397]]

View File

@ -1,3 +1,8 @@
in ikiwiki instances that don't reside in the git root directory (the only ones i know of are ikiwiki itself), reverts show the wrong link in the recentchanges (for example, in the ikiwiki main repository's 4530430 and its revert, the main index page was edited, but the revert shows doc/index as a link).
the expected behavior is to compensate for the modified root directory (i.e., show index instead of doc/index).
> This seems to work OK now - commit 84c4ca33 and its reversion both
> appear correctly in [[recentchanges]]. Looking at git history,
> Joey [[fixed this|done]] in commit 1b6c1895 before 3.20120203.
> --[[smcv]]

View File

@ -0,0 +1,13 @@
YAML:XS is not listed as a dep in the spec file which results in
```
HOME=/home/me /usr/bin/perl -Iblib/lib ikiwiki.in -dumpsetup ikiwiki.setup
Can't locate YAML/XS.pm in @INC (@INC contains: . blib/lib /usr/local/lib64/perl5 /usr/local/share/perl5 /usr/lib64/perl5/vendor_perl /usr/share/perl5/vendor_perl /usr/lib64/perl5 /usr/share/perl5) at (eval 39) line 2.
BEGIN failed--compilation aborted at (eval 39) line 2.
make: *** [ikiwiki.setup] Error 2
error: Bad exit status from /var/tmp/rpm-tmp.Sgq2QK (%build)
```
when trying to build
> Ok, added. [[done]] --[[Joey]]

Some files were not shown because too many files have changed in this diff Show More