commit
ef003f48f4
79
IkiWiki.pm
79
IkiWiki.pm
|
@ -18,15 +18,15 @@ use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase
|
|||
|
||||
use Exporter q{import};
|
||||
our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
|
||||
bestlink htmllink readfile writefile pagetype srcfile pagename
|
||||
displaytime will_render gettext urlto targetpage
|
||||
add_underlay pagetitle titlepage linkpage newpagefile
|
||||
inject
|
||||
pagespec_match_list bestlink htmllink readfile writefile
|
||||
pagetype srcfile pagename displaytime will_render gettext urlto
|
||||
targetpage add_underlay pagetitle titlepage linkpage
|
||||
newpagefile inject add_link
|
||||
%config %links %pagestate %wikistate %renderedfiles
|
||||
%pagesources %destsources);
|
||||
our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
|
||||
our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
|
||||
our $installdir=''; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
|
||||
our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
|
||||
|
||||
# Optimisation.
|
||||
use Memoize;
|
||||
|
@ -157,6 +157,13 @@ sub getsetup () {
|
|||
safe => 0, # path
|
||||
rebuild => 0,
|
||||
},
|
||||
underlaydirbase => {
|
||||
type => "internal",
|
||||
default => "$installdir/share/ikiwiki",
|
||||
description => "parent directory containing additional underlays",
|
||||
safe => 0,
|
||||
rebuild => 0,
|
||||
},
|
||||
wrappers => {
|
||||
type => "internal",
|
||||
default => [],
|
||||
|
@ -715,7 +722,7 @@ sub add_underlay ($) {
|
|||
my $dir=shift;
|
||||
|
||||
if ($dir !~ /^\//) {
|
||||
$dir="$config{underlaydir}/../$dir";
|
||||
$dir="$config{underlaydirbase}/$dir";
|
||||
}
|
||||
|
||||
if (! grep { $_ eq $dir } @{$config{underlaydirs}}) {
|
||||
|
@ -1671,12 +1678,6 @@ sub rcs_receive () {
|
|||
$hooks{rcs}{rcs_receive}{call}->();
|
||||
}
|
||||
|
||||
sub safequote ($) {
|
||||
my $s=shift;
|
||||
$s=~s/[{}]//g;
|
||||
return "q{$s}";
|
||||
}
|
||||
|
||||
sub add_depends ($$) {
|
||||
my $page=shift;
|
||||
my $pagespec=shift;
|
||||
|
@ -1757,6 +1758,14 @@ sub inject {
|
|||
use warnings;
|
||||
}
|
||||
|
||||
sub add_link ($$) {
|
||||
my $page=shift;
|
||||
my $link=shift;
|
||||
|
||||
push @{$links{$page}}, $link
|
||||
unless grep { $_ eq $link } @{$links{$page}};
|
||||
}
|
||||
|
||||
sub pagespec_merge ($$) {
|
||||
my $a=shift;
|
||||
my $b=shift;
|
||||
|
@ -1770,6 +1779,7 @@ sub pagespec_translate ($) {
|
|||
|
||||
# Convert spec to perl code.
|
||||
my $code="";
|
||||
my @data;
|
||||
while ($spec=~m{
|
||||
\s* # ignore whitespace
|
||||
( # 1: match a single word
|
||||
|
@ -1797,14 +1807,17 @@ sub pagespec_translate ($) {
|
|||
}
|
||||
elsif ($word =~ /^(\w+)\((.*)\)$/) {
|
||||
if (exists $IkiWiki::PageSpec::{"match_$1"}) {
|
||||
$code.="IkiWiki::PageSpec::match_$1(\$page, ".safequote($2).", \@_)";
|
||||
push @data, $2;
|
||||
$code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)";
|
||||
}
|
||||
else {
|
||||
$code.="IkiWiki::FailReason->new(".safequote(qq{unknown function in pagespec "$word"}).")";
|
||||
push @data, qq{unknown function in pagespec "$word"};
|
||||
$code.="IkiWiki::ErrorReason->new(\$data[$#data])";
|
||||
}
|
||||
}
|
||||
else {
|
||||
$code.=" IkiWiki::PageSpec::match_glob(\$page, ".safequote($word).", \@_)";
|
||||
push @data, $word;
|
||||
$code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1827,11 +1840,35 @@ sub pagespec_match ($$;@) {
|
|||
}
|
||||
|
||||
my $sub=pagespec_translate($spec);
|
||||
return IkiWiki::FailReason->new("syntax error in pagespec \"$spec\"")
|
||||
return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"")
|
||||
if $@ || ! defined $sub;
|
||||
return $sub->($page, @params);
|
||||
}
|
||||
|
||||
sub pagespec_match_list ($$;@) {
|
||||
my $pages=shift;
|
||||
my $spec=shift;
|
||||
my @params=@_;
|
||||
|
||||
my $sub=pagespec_translate($spec);
|
||||
error "syntax error in pagespec \"$spec\""
|
||||
if $@ || ! defined $sub;
|
||||
|
||||
my @ret;
|
||||
my $r;
|
||||
foreach my $page (@$pages) {
|
||||
$r=$sub->($page, @params);
|
||||
push @ret, $page if $r;
|
||||
}
|
||||
|
||||
if (! @ret && defined $r && $r->isa("IkiWiki::ErrorReason")) {
|
||||
error(sprintf(gettext("cannot match pages: %s"), $r));
|
||||
}
|
||||
else {
|
||||
return @ret;
|
||||
}
|
||||
}
|
||||
|
||||
sub pagespec_valid ($) {
|
||||
my $spec=shift;
|
||||
|
||||
|
@ -1861,6 +1898,10 @@ sub new {
|
|||
return bless \$value, $class;
|
||||
}
|
||||
|
||||
package IkiWiki::ErrorReason;
|
||||
|
||||
our @ISA = 'IkiWiki::FailReason';
|
||||
|
||||
package IkiWiki::SuccessReason;
|
||||
|
||||
use overload (
|
||||
|
@ -2021,7 +2062,7 @@ sub match_user ($$;@) {
|
|||
my %params=@_;
|
||||
|
||||
if (! exists $params{user}) {
|
||||
return IkiWiki::FailReason->new("no user specified");
|
||||
return IkiWiki::ErrorReason->new("no user specified");
|
||||
}
|
||||
|
||||
if (defined $params{user} && lc $params{user} eq lc $user) {
|
||||
|
@ -2041,7 +2082,7 @@ sub match_admin ($$;@) {
|
|||
my %params=@_;
|
||||
|
||||
if (! exists $params{user}) {
|
||||
return IkiWiki::FailReason->new("no user specified");
|
||||
return IkiWiki::ErrorReason->new("no user specified");
|
||||
}
|
||||
|
||||
if (defined $params{user} && IkiWiki::is_admin($params{user})) {
|
||||
|
@ -2061,7 +2102,7 @@ sub match_ip ($$;@) {
|
|||
my %params=@_;
|
||||
|
||||
if (! exists $params{ip}) {
|
||||
return IkiWiki::FailReason->new("no IP specified");
|
||||
return IkiWiki::ErrorReason->new("no IP specified");
|
||||
}
|
||||
|
||||
if (defined $params{ip} && lc $params{ip} eq lc $ip) {
|
||||
|
|
|
@ -9,6 +9,7 @@ my $defaulturl='http://test.blogspam.net:8888/';
|
|||
|
||||
sub import {
|
||||
hook(type => "getsetup", id => "blogspam", call => \&getsetup);
|
||||
hook(type => "checkconfig", id => "blogspam", call => \&checkconfig);
|
||||
hook(type => "checkcontent", id => "blogspam", call => \&checkcontent);
|
||||
}
|
||||
|
||||
|
@ -43,17 +44,19 @@ sub getsetup () {
|
|||
},
|
||||
}
|
||||
|
||||
sub checkcontent (@) {
|
||||
my %params=@_;
|
||||
|
||||
sub checkconfig () {
|
||||
# This is done at checkconfig time because printing an error
|
||||
# if the module is missing when a spam is posted would not
|
||||
# let the admin know about the problem.
|
||||
eval q{
|
||||
use RPC::XML;
|
||||
use RPC::XML::Client;
|
||||
};
|
||||
if ($@) {
|
||||
warn($@);
|
||||
return undef;
|
||||
}
|
||||
error $@ if $@;
|
||||
}
|
||||
|
||||
sub checkcontent (@) {
|
||||
my %params=@_;
|
||||
|
||||
if (exists $config{blogspam_pagespec}) {
|
||||
return undef
|
||||
|
|
|
@ -28,18 +28,17 @@ sub preprocess (@) {
|
|||
add_depends($params{page}, $params{pages});
|
||||
|
||||
my %broken;
|
||||
foreach my $page (keys %links) {
|
||||
if (pagespec_match($page, $params{pages}, location => $params{page})) {
|
||||
my $discussion=gettext("discussion");
|
||||
my %seen;
|
||||
foreach my $link (@{$links{$page}}) {
|
||||
next if $seen{$link};
|
||||
$seen{$link}=1;
|
||||
next if $link =~ /.*\/\Q$discussion\E/i && $config{discussion};
|
||||
my $bestlink=bestlink($page, $link);
|
||||
next if length $bestlink;
|
||||
push @{$broken{$link}}, $page;
|
||||
}
|
||||
foreach my $page (pagespec_match_list([keys %links],
|
||||
$params{pages}, location => $params{page})) {
|
||||
my $discussion=gettext("discussion");
|
||||
my %seen;
|
||||
foreach my $link (@{$links{$page}}) {
|
||||
next if $seen{$link};
|
||||
$seen{$link}=1;
|
||||
next if $link =~ /.*\/\Q$discussion\E/i && $config{discussion};
|
||||
my $bestlink=bestlink($page, $link);
|
||||
next if length $bestlink;
|
||||
push @{$broken{$link}}, $page;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -369,8 +369,7 @@ sub preprocess (@) {
|
|||
my $page =$params{page};
|
||||
|
||||
if (! defined $cache{$pagespec}) {
|
||||
foreach my $p (keys %pagesources) {
|
||||
next unless pagespec_match($p, $pagespec);
|
||||
foreach my $p (pagespec_match_list([keys %pagesources], $pagespec)) {
|
||||
my $mtime = $IkiWiki::pagectime{$p};
|
||||
my $src = $pagesources{$p};
|
||||
my @date = localtime($mtime);
|
||||
|
|
|
@ -61,7 +61,7 @@ sub scan (@) {
|
|||
my $content=$params{content};
|
||||
|
||||
while ($content =~ /$link_regexp/g) {
|
||||
push @{$links{$page}}, linkpage($1) unless ignored($1)
|
||||
add_link($page, linkpage($1)) unless ignored($1)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -317,6 +317,13 @@ sub editcomment ($$) {
|
|||
force => 1);
|
||||
}
|
||||
|
||||
if (! defined $session->param('name')) {
|
||||
# Make signinurl work and return here.
|
||||
$form->tmpl_param(signinurl => IkiWiki::cgiurl(do => 'signin'));
|
||||
$session->param(postsignin => $ENV{QUERY_STRING});
|
||||
IkiWiki::cgi_savesession($session);
|
||||
}
|
||||
|
||||
# The untaint is OK (as in editpage) because we're about to pass
|
||||
# it to file_pruned anyway
|
||||
my $page = $form->field('page');
|
||||
|
|
|
@ -276,8 +276,9 @@ sub cgi_editpage ($$) {
|
|||
|
||||
my @page_types;
|
||||
if (exists $hooks{htmlize}) {
|
||||
@page_types=grep { !/^_/ }
|
||||
keys %{$hooks{htmlize}};
|
||||
foreach my $key (grep { !/^_/ } keys %{$hooks{htmlize}}) {
|
||||
push @page_types, [$key, $hooks{htmlize}{$key}{longname} || $key];
|
||||
}
|
||||
}
|
||||
|
||||
$form->tmpl_param("page_select", 1);
|
||||
|
|
|
@ -230,10 +230,13 @@ sub hook ($@) {
|
|||
}
|
||||
|
||||
sub pagespec_match ($@) {
|
||||
# convert pagespec_match's return object into a XML RPC boolean
|
||||
# convert return object into a XML RPC boolean
|
||||
my $plugin=shift;
|
||||
my $page=shift;
|
||||
my $spec=shift;
|
||||
|
||||
return RPC::XML::boolean->new(0 + IkiWiki::pagespec_march(@_));
|
||||
return RPC::XML::boolean->new(0 + IkiWiki::pagespec_match(
|
||||
$page, $spec, @_));
|
||||
}
|
||||
|
||||
1
|
||||
|
|
|
@ -71,13 +71,13 @@ sub match_maxsize ($$;@) {
|
|||
my $page=shift;
|
||||
my $maxsize=eval{IkiWiki::Plugin::filecheck::parsesize(shift)};
|
||||
if ($@) {
|
||||
return IkiWiki::FailReason->new("unable to parse maxsize (or number too large)");
|
||||
return IkiWiki::ErrorReason->new("unable to parse maxsize (or number too large)");
|
||||
}
|
||||
|
||||
my %params=@_;
|
||||
my $file=exists $params{file} ? $params{file} : $IkiWiki::pagesources{$page};
|
||||
if (! defined $file) {
|
||||
return IkiWiki::FailReason->new("no file specified");
|
||||
return IkiWiki::ErrorReason->new("no file specified");
|
||||
}
|
||||
|
||||
if (-s $file > $maxsize) {
|
||||
|
@ -92,13 +92,13 @@ sub match_minsize ($$;@) {
|
|||
my $page=shift;
|
||||
my $minsize=eval{IkiWiki::Plugin::filecheck::parsesize(shift)};
|
||||
if ($@) {
|
||||
return IkiWiki::FailReason->new("unable to parse minsize (or number too large)");
|
||||
return IkiWiki::ErrorReason->new("unable to parse minsize (or number too large)");
|
||||
}
|
||||
|
||||
my %params=@_;
|
||||
my $file=exists $params{file} ? $params{file} : $IkiWiki::pagesources{$page};
|
||||
if (! defined $file) {
|
||||
return IkiWiki::FailReason->new("no file specified");
|
||||
return IkiWiki::ErrorReason->new("no file specified");
|
||||
}
|
||||
|
||||
if (-s $file < $minsize) {
|
||||
|
@ -116,14 +116,14 @@ sub match_mimetype ($$;@) {
|
|||
my %params=@_;
|
||||
my $file=exists $params{file} ? $params{file} : $IkiWiki::pagesources{$page};
|
||||
if (! defined $file) {
|
||||
return IkiWiki::FailReason->new("no file specified");
|
||||
return IkiWiki::ErrorReason->new("no file specified");
|
||||
}
|
||||
|
||||
# Use ::magic to get the mime type, the idea is to only trust
|
||||
# data obtained by examining the actual file contents.
|
||||
eval q{use File::MimeInfo::Magic};
|
||||
if ($@) {
|
||||
return IkiWiki::FailReason->new("failed to load File::MimeInfo::Magic ($@); cannot check MIME type");
|
||||
return IkiWiki::ErrorReason->new("failed to load File::MimeInfo::Magic ($@); cannot check MIME type");
|
||||
}
|
||||
my $mimetype=File::MimeInfo::Magic::magic($file);
|
||||
if (! defined $mimetype) {
|
||||
|
@ -149,12 +149,12 @@ sub match_virusfree ($$;@) {
|
|||
my %params=@_;
|
||||
my $file=exists $params{file} ? $params{file} : $IkiWiki::pagesources{$page};
|
||||
if (! defined $file) {
|
||||
return IkiWiki::FailReason->new("no file specified");
|
||||
return IkiWiki::ErrorReason->new("no file specified");
|
||||
}
|
||||
|
||||
if (! exists $IkiWiki::config{virus_checker} ||
|
||||
! length $IkiWiki::config{virus_checker}) {
|
||||
return IkiWiki::FailReason->new("no virus_checker configured");
|
||||
return IkiWiki::ErrorReason->new("no virus_checker configured");
|
||||
}
|
||||
|
||||
# The file needs to be fed into the virus checker on stdin,
|
||||
|
@ -162,7 +162,7 @@ sub match_virusfree ($$;@) {
|
|||
# used, clamd would fail to read it.
|
||||
eval q{use IPC::Open2};
|
||||
error($@) if $@;
|
||||
open (IN, "<", $file) || return IkiWiki::FailReason->new("failed to read file");
|
||||
open (IN, "<", $file) || return IkiWiki::ErrorReason->new("failed to read file");
|
||||
binmode(IN);
|
||||
my $sigpipe=0;
|
||||
$SIG{PIPE} = sub { $sigpipe=1 };
|
||||
|
|
|
@ -43,7 +43,7 @@ sub preprocess (@) {
|
|||
return '';
|
||||
}
|
||||
|
||||
push @{$links{$params{page}}}, $image;
|
||||
add_link($params{page}, $image);
|
||||
# optimisation: detect scan mode, and avoid generating the image
|
||||
if (! defined wantarray) {
|
||||
return;
|
||||
|
|
|
@ -183,13 +183,9 @@ sub preprocess_inline (@) {
|
|||
$params{template} = $archive ? "archivepage" : "inlinepage";
|
||||
}
|
||||
|
||||
my @list;
|
||||
foreach my $page (keys %pagesources) {
|
||||
next if $page eq $params{page};
|
||||
if (pagespec_match($page, $params{pages}, location => $params{page})) {
|
||||
push @list, $page;
|
||||
}
|
||||
}
|
||||
my @list=pagespec_match_list(
|
||||
[ grep { $_ ne $params{page} } keys %pagesources ],
|
||||
$params{pages}, location => $params{page});
|
||||
|
||||
if (exists $params{sort} && $params{sort} eq 'title') {
|
||||
@list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list;
|
||||
|
@ -346,7 +342,7 @@ sub preprocess_inline (@) {
|
|||
my $content=get_inline_content($page, $params{destpage});
|
||||
$template->param(content => $content);
|
||||
}
|
||||
$template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage}));
|
||||
$template->param(pageurl => urlto($page, $params{destpage}));
|
||||
$template->param(inlinepage => $page);
|
||||
$template->param(title => pagetitle(basename($page)));
|
||||
$template->param(ctime => displaytime($pagectime{$page}, $params{timeformat}));
|
||||
|
|
|
@ -86,7 +86,7 @@ sub scan (@) {
|
|||
my $content=$params{content};
|
||||
|
||||
while ($content =~ /(?<!\\)$link_regexp/g) {
|
||||
push @{$links{$page}}, linkpage($2);
|
||||
add_link($page, linkpage($2));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -32,32 +32,31 @@ sub preprocess (@) {
|
|||
|
||||
# Get all the items to map.
|
||||
my %mapitems;
|
||||
foreach my $page (keys %pagesources) {
|
||||
if (pagespec_match($page, $params{pages}, location => $params{page})) {
|
||||
if (exists $params{show} &&
|
||||
exists $pagestate{$page} &&
|
||||
exists $pagestate{$page}{meta}{$params{show}}) {
|
||||
$mapitems{$page}=$pagestate{$page}{meta}{$params{show}};
|
||||
}
|
||||
else {
|
||||
$mapitems{$page}='';
|
||||
}
|
||||
# Check for a common prefix.
|
||||
if (! defined $common_prefix) {
|
||||
$common_prefix=$page;
|
||||
}
|
||||
elsif (length $common_prefix &&
|
||||
$page !~ /^\Q$common_prefix\E(\/|$)/) {
|
||||
my @a=split(/\//, $page);
|
||||
my @b=split(/\//, $common_prefix);
|
||||
$common_prefix="";
|
||||
while (@a && @b && $a[0] eq $b[0]) {
|
||||
if (length $common_prefix) {
|
||||
$common_prefix.="/";
|
||||
}
|
||||
$common_prefix.=shift(@a);
|
||||
shift @b;
|
||||
foreach my $page (pagespec_match_list([keys %pagesources],
|
||||
$params{pages}, location => $params{page})) {
|
||||
if (exists $params{show} &&
|
||||
exists $pagestate{$page} &&
|
||||
exists $pagestate{$page}{meta}{$params{show}}) {
|
||||
$mapitems{$page}=$pagestate{$page}{meta}{$params{show}};
|
||||
}
|
||||
else {
|
||||
$mapitems{$page}='';
|
||||
}
|
||||
# Check for a common prefix.
|
||||
if (! defined $common_prefix) {
|
||||
$common_prefix=$page;
|
||||
}
|
||||
elsif (length $common_prefix &&
|
||||
$page !~ /^\Q$common_prefix\E(\/|$)/) {
|
||||
my @a=split(/\//, $page);
|
||||
my @b=split(/\//, $common_prefix);
|
||||
$common_prefix="";
|
||||
while (@a && @b && $a[0] eq $b[0]) {
|
||||
if (length $common_prefix) {
|
||||
$common_prefix.="/";
|
||||
}
|
||||
$common_prefix.=shift(@a);
|
||||
shift @b;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use IkiWiki 3.00;
|
|||
|
||||
sub import {
|
||||
hook(type => "getsetup", id => "mdwn", call => \&getsetup);
|
||||
hook(type => "htmlize", id => "mdwn", call => \&htmlize);
|
||||
hook(type => "htmlize", id => "mdwn", call => \&htmlize, longname => "Markdown");
|
||||
}
|
||||
|
||||
sub getsetup () {
|
||||
|
|
|
@ -110,7 +110,7 @@ sub preprocess (@) {
|
|||
}
|
||||
elsif ($key eq 'link' && ! %params) {
|
||||
# hidden WikiLink
|
||||
push @{$links{$page}}, $value;
|
||||
add_link($page, $value);
|
||||
return "";
|
||||
}
|
||||
elsif ($key eq 'author') {
|
||||
|
|
|
@ -35,9 +35,10 @@ sub preprocess (@) {
|
|||
|
||||
my @orphans;
|
||||
my $discussion=gettext("discussion");
|
||||
foreach my $page (keys %pagesources) {
|
||||
next if $linkedto{$page} || $page eq 'index';
|
||||
next unless pagespec_match($page, $params{pages}, location => $params{page});
|
||||
foreach my $page (pagespec_match_list(
|
||||
[ grep { ! $linkedto{$_} && $_ ne 'index' }
|
||||
keys %pagesources ],
|
||||
$params{pages}, location => $params{page})) {
|
||||
# If the page has a link to some other page, it's
|
||||
# indirectly linked to a page via that page's backlinks.
|
||||
next if grep {
|
||||
|
|
|
@ -26,13 +26,9 @@ sub preprocess (@) {
|
|||
# register a dependency.
|
||||
add_depends($params{page}, $params{pages});
|
||||
|
||||
my @pages=keys %pagesources;
|
||||
return $#pages+1 if $params{pages} eq "*"; # optimisation
|
||||
my $count=0;
|
||||
foreach my $page (@pages) {
|
||||
$count++ if pagespec_match($page, $params{pages}, location => $params{page});
|
||||
}
|
||||
return $count;
|
||||
my @pages=pagespec_match_list([keys %pagesources], $params{pages}, location => $params{page})
|
||||
if $params{pages} ne "*"; # optimisation;
|
||||
return $#pages+1;
|
||||
}
|
||||
|
||||
1
|
||||
|
|
|
@ -41,12 +41,11 @@ sub preprocess (@) {
|
|||
|
||||
my %counts;
|
||||
my $max = 0;
|
||||
foreach my $page (keys %links) {
|
||||
if (pagespec_match($page, $params{pages}, location => $params{page})) {
|
||||
use IkiWiki::Render;
|
||||
$counts{$page} = scalar(IkiWiki::backlinks($page));
|
||||
$max = $counts{$page} if $counts{$page} > $max;
|
||||
}
|
||||
foreach my $page (pagespec_match_list([keys %links],
|
||||
$params{pages}, location => $params{page})) {
|
||||
use IkiWiki::Render;
|
||||
$counts{$page} = scalar(IkiWiki::backlinks($page));
|
||||
$max = $counts{$page} if $counts{$page} > $max;
|
||||
}
|
||||
|
||||
if ($style eq 'table') {
|
||||
|
|
|
@ -50,15 +50,10 @@ sub preprocess (@) {
|
|||
|
||||
add_depends($params{page}, $params{pages});
|
||||
|
||||
my @list;
|
||||
foreach my $page (keys %pagesources) {
|
||||
next if $page eq $params{page};
|
||||
if (pagespec_match($page, $params{pages}, location => $params{page})) {
|
||||
push @list, $page;
|
||||
}
|
||||
}
|
||||
|
||||
@list = sort { $params{timehash}->{$b} <=> $params{timehash}->{$a} } @list;
|
||||
my @list=sort { $params{timehash}->{$b} <=> $params{timehash}->{$a} }
|
||||
pagespec_match_list(
|
||||
[ grep { $_ ne $params{page} } keys %pagesources],
|
||||
$params{pages}, location => $params{page});
|
||||
|
||||
my @data=eval qq{IkiWiki::Plugin::postsparkline::formula::$formula(\\\%params, \@list)};
|
||||
if ($@) {
|
||||
|
|
|
@ -73,7 +73,7 @@ sub preprocess_tag (@) {
|
|||
$tag=linkpage($tag);
|
||||
$tags{$page}{$tag}=1;
|
||||
# hidden WikiLink
|
||||
push @{$links{$page}}, tagpage($tag);
|
||||
add_link($page, tagpage($tag));
|
||||
}
|
||||
|
||||
return "";
|
||||
|
@ -88,14 +88,14 @@ sub preprocess_taglink (@) {
|
|||
if (/(.*)\|(.*)/) {
|
||||
my $tag=linkpage($2);
|
||||
$tags{$params{page}}{$tag}=1;
|
||||
push @{$links{$params{page}}}, tagpage($tag);
|
||||
add_link($params{page}, tagpage($tag));
|
||||
return taglink($params{page}, $params{destpage}, $tag,
|
||||
linktext => pagetitle($1));
|
||||
}
|
||||
else {
|
||||
my $tag=linkpage($_);
|
||||
$tags{$params{page}}{$tag}=1;
|
||||
push @{$links{$params{page}}}, tagpage($tag);
|
||||
add_link($params{page}, tagpage($tag));
|
||||
return taglink($params{page}, $params{destpage}, $tag);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ use Encode;
|
|||
|
||||
sub import {
|
||||
hook(type => "getsetup", id => "textile", call => \&getsetup);
|
||||
hook(type => "htmlize", id => "txtl", call => \&htmlize);
|
||||
hook(type => "htmlize", id => "txtl", call => \&htmlize, longname => "Textile");
|
||||
}
|
||||
|
||||
sub getsetup () {
|
||||
|
|
|
@ -403,6 +403,7 @@ sub showform ($$) {
|
|||
$form->reset(0); # doesn't really make sense here
|
||||
}
|
||||
else {
|
||||
my $oldsetup=readfile($config{setupfile});
|
||||
IkiWiki::Setup::dump($config{setupfile});
|
||||
|
||||
IkiWiki::saveindex();
|
||||
|
@ -426,16 +427,21 @@ sub showform ($$) {
|
|||
"-refresh", "-wrappers", "-v");
|
||||
}
|
||||
|
||||
close STDERR;
|
||||
open(STDERR, ">&STDOUT");
|
||||
my $ret=system(@command);
|
||||
print "\n<pre>";
|
||||
print "\n<\/pre>";
|
||||
if ($ret != 0) {
|
||||
print '<p class="error">'.
|
||||
sprintf(gettext("<p class=\"error\">Error: %s exited nonzero (%s)"),
|
||||
sprintf(gettext("Error: %s exited nonzero (%s). Discarding setup changes."),
|
||||
join(" ", @command), $ret).
|
||||
'</p>';
|
||||
open(OUT, ">", $config{setupfile}) || error("$config{setupfile}: $!");
|
||||
print OUT $oldsetup;
|
||||
close OUT;
|
||||
}
|
||||
|
||||
print $tail;
|
||||
print $tail;
|
||||
exit 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
ikiwiki (3.12) unstable; urgency=low
|
||||
|
||||
You may want to run `ikiwiki-transition deduplinks /path/to/srcdir`
|
||||
after upgrading to this version of ikiwiki. This command will
|
||||
optimise your wiki's saved state, removing duplicate information
|
||||
that can slow ikiwiki down.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Wed, 06 May 2009 00:25:06 -0400
|
||||
|
||||
ikiwiki (3.01) unstable; urgency=low
|
||||
|
||||
If your wiki uses git, and you have a `diffurl` configured in
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
ikiwiki (3.11) unstable; urgency=low
|
||||
ikiwiki (3.14) UNRELEASED; urgency=low
|
||||
|
||||
* Add new hooks: canremove, canrename, rename. (intrigeri)
|
||||
* rename: Refactor subpage rename handling code into rename hook. (intrigeri)
|
||||
|
@ -7,6 +7,62 @@ ikiwiki (3.11) unstable; urgency=low
|
|||
|
||||
-- Joey Hess <joeyh@debian.org> Mon, 20 Apr 2009 19:40:25 -0400
|
||||
|
||||
ikiwiki (3.13) UNRELEASED; urgency=low
|
||||
|
||||
* ikiwiki-transition: If passed a nonexistant srcdir, or one not
|
||||
containing .ikiwiki, abort with an error rather than creating it.
|
||||
* Allow underlaydir to be overridden without messing up inclusion
|
||||
of other underlays via add_underlay.
|
||||
* More friendly display of markdown, textile in edit form selector
|
||||
(jmtd)
|
||||
* Allow curly braces to be used in pagespecs, and avoid a whole class
|
||||
of potential security problems, by avoiding performing any string
|
||||
interpolation on user-supplied data when translating pagespecs.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Wed, 06 May 2009 20:45:44 -0400
|
||||
|
||||
ikiwiki (3.12) unstable; urgency=low
|
||||
|
||||
* Re-enable python-support and add python:Depends to control file.
|
||||
* ikiwiki-makerepo: Avoid using abs_path, as it apparently
|
||||
fails on nonexistant directories with some broken perl
|
||||
versions.
|
||||
* inline: Minor optimisation.
|
||||
* add_link: New function, which plugins should use rather than
|
||||
modifying %links directly, to avoid it accumulating duplicates.
|
||||
* ikiwiki-transition: Add a deduplinks action, that can be used
|
||||
to remove duplicate links and optimise a wiki w/o rebuilding it.
|
||||
* external: Fix pagespec_match and pagespec_match_list.
|
||||
Closes: #527281
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Wed, 06 May 2009 00:31:16 -0400
|
||||
|
||||
ikiwiki (3.11) unstable; urgency=low
|
||||
|
||||
* Avoid using python-support. Closes: #525086
|
||||
* websetup: Display stderr in browser if ikiwiki setup fails.
|
||||
* blogspam: Load RPC::XML library in checkconfig, so that an
|
||||
error can be printed at that point if it's not available,
|
||||
allowing the admin to see it during wiki setup.
|
||||
Closes: #520015
|
||||
* websetup: If setup fails, restore old setup file.
|
||||
* relativedate: Deal with clock skew.
|
||||
* Add IkiWiki::ErrorReason objects, and modify pagespecs to return
|
||||
them in cases where they fail to match due to a configuration or syntax
|
||||
error.
|
||||
* pagespec_match_list: New API function, matches pages in a list
|
||||
and throws an error if the pagespec is bad.
|
||||
* inline, brokenlinks, calendar, linkmap, map, orphans, pagecount,
|
||||
pagestate, postsparkline: Display a handy error message if the pagespec
|
||||
is erronious.
|
||||
* comments: Add link to comment post form to allow user to sign in
|
||||
if they wish to, if the configuration makes signin optional
|
||||
for commenting.
|
||||
* Updated Danish translation from Jonas Smedegaard. Closes: #525751
|
||||
* translation.mdwn: Typo fixes. Closes: #525753
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Mon, 04 May 2009 15:45:10 -0400
|
||||
|
||||
ikiwiki (3.10) unstable; urgency=low
|
||||
|
||||
* darcs: Finally added support for this VCS, thanks to many
|
||||
|
|
|
@ -9,14 +9,14 @@ Build-Depends-Indep: dpkg-dev (>= 1.9.0), libxml-simple-perl,
|
|||
libhtml-parser-perl, liburi-perl, perlmagick
|
||||
Maintainer: Joey Hess <joeyh@debian.org>
|
||||
Uploaders: Josh Triplett <josh@freedesktop.org>
|
||||
Standards-Version: 3.8.0
|
||||
Standards-Version: 3.8.1
|
||||
Homepage: http://ikiwiki.info/
|
||||
Vcs-Git: git://git.ikiwiki.info/
|
||||
Vcs-Browser: http://git.ikiwiki.info/?p=ikiwiki
|
||||
|
||||
Package: ikiwiki
|
||||
Architecture: all
|
||||
Depends: ${misc:Depends}, ${perl:Depends},
|
||||
Depends: ${misc:Depends}, ${perl:Depends}, ${python:Depends},
|
||||
libtext-markdown-perl | markdown,
|
||||
libhtml-scrubber-perl, libhtml-template-perl,
|
||||
libhtml-parser-perl, liburi-perl
|
||||
|
|
|
@ -82,3 +82,48 @@ I can't see any related entries. Any ideas?
|
|||
|
||||
>>> Ubuntu Gutsy also has Perl 5.8.8-7, so probably it has the bug too.
|
||||
>>> --[[Paweł|ptecza]]
|
||||
|
||||
>>>> I just got it while building my latest version of git.ikiwiki.info + my stuff.
|
||||
>>>> Only thing different in my version in IkiWiki.pm is that I moved a </a> over
|
||||
>>>> a word (for createlink), and disabled the lowercasing of created pages. Running
|
||||
>>>> Lenny's Perl. --[[simonraven]]
|
||||
|
||||
>>>> Simon, I'm not clear what version of ikiwiki you're using.
|
||||
>>>> Since version 2.40, taint checking has been disabled by
|
||||
>>>> default due to the underlying perl bug. Unless you
|
||||
>>>> build ikiwiki with NOTAINT=0. --[[Joey]]
|
||||
|
||||
>>>> Hi, nope not doing this. Um, sorry, v. 3.13. I've no idea why it suddenly started doing this.
|
||||
>>>> It wasn't before. I've been messing around IkiWiki.pm to see if I can set
|
||||
>>>> a umask for `mkdir`.
|
||||
|
||||
line 775 and down:
|
||||
+ umask ($config{umask} || 0022);
|
||||
|
||||
>>>> I figured it *might* be the `umask`, but I'll see in a few when / if it gets past that in the build. No; I keep getting garbage during the brokenlinks test
|
||||
|
||||
<pre>
|
||||
t/basewiki_brokenlinks.....Insecure dependency in mkdir while running with -T switch at IkiWiki.pm line 776.
|
||||
|
||||
# Failed test at t/basewiki_brokenlinks.t line 11.
|
||||
|
||||
# Failed test at t/basewiki_brokenlinks.t line 19.
|
||||
|
||||
|
||||
broken links found
|
||||
<li>shortcut from <a href="./shortcuts/">shortcuts</a></li></ul>
|
||||
|
||||
|
||||
|
||||
# Failed test at t/basewiki_brokenlinks.t line 25.
|
||||
Insecure dependency in mkdir while running with -T switch at IkiWiki.pm line 776.
|
||||
|
||||
# Failed test at t/basewiki_brokenlinks.t line 11.
|
||||
|
||||
# Failed test at t/basewiki_brokenlinks.t line 25.
|
||||
# Looks like you failed 5 tests of 12.
|
||||
dubious
|
||||
Test returned status 5 (wstat 1280, 0x500)
|
||||
</pre>
|
||||
|
||||
>>>> I get this over and over... I haven't touched that AFAICT, at all. --[[simonraven]]
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
If I have a <--#include virtual="foo" --> in some file, it gets stripped, even though other HTML comments don't get stripped. I imagine it's some plugin doing it, or IkiWiki itself, or an IkiWiki dependency, but I haven't found where this is happening. I'm trying to implement a workaround for my sidebars forcing a rebuild of the wiki every day - I use the calendar plugin - when the day changes, by using SSI.
|
||||
|
||||
> It is probably the [[plugins/htmlscrubber]] plugin. -- [[Jon]]
|
|
@ -4,17 +4,19 @@ If I click on "Czars in Russia", I'd like Ikiwiki to create "Czars\_in\_Russia.m
|
|||
|
||||
> There's a simple patch that can do this:
|
||||
|
||||
> --- a/IkiWiki.pm
|
||||
> +++ b/IkiWiki.pm
|
||||
> @@ -584,7 +584,7 @@ sub htmllink ($$$;@) {
|
||||
> return "<span class=\"createlink\"><a href=\"".
|
||||
> cgiurl(
|
||||
> do => "create",
|
||||
> - page => pagetitle(lc($link), 1),
|
||||
> + page => pagetitle($link, 1),
|
||||
> from => $lpage
|
||||
> ).
|
||||
> "\">?</a>$linktext</span>"
|
||||
<pre>
|
||||
-- a/IkiWiki.pm
|
||||
+++ b/IkiWiki.pm
|
||||
@@ -584,7 +584,7 @@ sub htmllink ($$$;@) {
|
||||
return "<span class=\"createlink\"><a href=\"".
|
||||
cgiurl(
|
||||
do => "create",
|
||||
- page => pagetitle(lc($link), 1),
|
||||
+ page => pagetitle($link, 1),
|
||||
from => $lpage
|
||||
).
|
||||
"\">?</a>$linktext</span>"
|
||||
</pre>
|
||||
|
||||
> This is fine if you don't mind mixed or randomly cased filenames getting
|
||||
> created. Otoh, if the link happened to start a sentence and so had its
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
I was just hovering over the '...' next to the backlinks on a page on
|
||||
<http://ikiwiki.info/>. In terms of the size of my browser window, this was
|
||||
towards the bottom-right of the screen.
|
||||
|
||||
When I hovered over the '...', the additional backlinks float appeared. This
|
||||
caused the page length to grow down, meaning a horizontal scrollbar was added
|
||||
to the page. This meant the text reflowed, and the '...' moved outside of my
|
||||
mouse pointer region.
|
||||
|
||||
This caused an infinite loop of box appears... text moves, box disappears...
|
||||
box re-appears.. which was not very visually pleasant.
|
||||
|
||||
In general I think that the onhover float is a bit of bad UI. Even a truncated
|
||||
list of backlinks looks cluttered due to there being no delimiters. I moved to
|
||||
having an always-complete list of backlinks and having them as LI elements
|
||||
inside a UL to make it look neater, although I appreciate that would make some
|
||||
pages very long indeed.
|
||||
|
||||
How about doing something a little like [[plugins/toggle]] for the excess
|
||||
items instead?
|
||||
|
||||
-- [[Jon]]
|
||||
|
||||
----
|
||||
|
||||
An additional, related issue: if the box expands beyond the bottom of the
|
||||
page, you might move your mouse pointer to the scrollbar in order to move
|
||||
further down the list, but of course then you are outside the hover region.
|
||||
|
||||
-- [[Jon]]
|
||||
|
||||
> I agree, browser handling of this CSS is often not good.
|
||||
>
|
||||
> A toggle would be the perfect UI, but the heaviness of needing
|
||||
> to include 30 lines of javascript to do it, plus then it only working
|
||||
> with javascript enabled, is also not optimal.
|
||||
>
|
||||
> Another idea would be to make the "..." a link to the ikiwiki cgi.
|
||||
> The cgi could then have a mode that displays all the backlinks of a page
|
||||
> in a list.
|
||||
>
|
||||
> Yet another idea: Find some more refined CSS for handling a variable
|
||||
> size popup.. --[[Joey]]
|
|
@ -0,0 +1,23 @@
|
|||
I'm editing /posts/foo. If I create a link to a subpage (in my case,
|
||||
"discussion"), and hit preview, it gets resolved to /discussion, not
|
||||
/posts/foo/discussion. If I hit commit, the latter happens. This seems like
|
||||
a bug. --liw
|
||||
|
||||
> That would be a bug, but I cannot reproduce it. For example, I edited
|
||||
> <http://kitenet.net/~joey/blog/entry/wikis_out_of_disk/> and added a
|
||||
> discussion link and on preview it went to the page's discussion page. I
|
||||
> don't normally have a toplevel /discussion page, but I also tried adding
|
||||
> one, and the link still doesn't link to it. Testcase? --[[Joey]]
|
||||
|
||||
>> I can reproduce this on <http://blog.liw.fi/posts/distributed-internet-witness-service/>:
|
||||
>> if I edit the page, then preview (no changes made), the "discussion" link at the bottom
|
||||
>> of the page points in the preview
|
||||
>> to <http://blog.liw.fi/discussion/>,
|
||||
>> whereas the saved page has it pointing to
|
||||
>> <http://blog.liw.fi/posts/distributed-internet-witness-service/discussion/>.
|
||||
>> I'll arrange so that you can edit the page to test this.
|
||||
>> --liw
|
||||
|
||||
>> Joey suggested my wiki might be missing the FORCEBASEURL snippet from the misc.tmpl
|
||||
>> template, and he's right. Mea culpa: I had not diffed the various templates when upgrading
|
||||
>> and had missed that updated. [[done]] --liw
|
|
@ -0,0 +1,24 @@
|
|||
The standalone 'markdown' utility is perfectly happy with an external link inside a `<h1>`, e.g.:
|
||||
|
||||
# Review of [Dwarf Fortress][]
|
||||
...
|
||||
[Dwarf Fortress]: http://www.bay12games.com/dwarves/
|
||||
|
||||
produces
|
||||
|
||||
<h1>Review of <a href="http://www.bay12games.com/dwarves/">Dwarf Fortress</a></h1>
|
||||
|
||||
but when I try to use this construct in an ikiwiki page, I get
|
||||
|
||||
<h1>Review of [Dwarf Fortress][]</h1>
|
||||
|
||||
It works fine with h2 and deeper. The square brackets also appear in the output of an [[ikiwiki/directive/inline]] directive in archive mode, I haven't tried non-archive mode.
|
||||
|
||||
> I think you were confused by markdown's slightly wacky mix of square brackets and parens.
|
||||
> The url in a markdown link goes in parens, not square brackets. For example:
|
||||
|
||||
# [Google](http://google.com/)
|
||||
|
||||
> [[done]] --[[Joey]]
|
||||
|
||||
>> It works here but it definitely does *not* work on my wiki; but on further experimentation, I believe my problem is being caused by JasonBlevins' [h1title](http://code.jblevins.org/ikiwiki/plugins.git/plain/h1title.pm) plugin.
|
|
@ -1,47 +1,44 @@
|
|||
Some elements of [HTML5](http://www.whatwg.org/specs/web-apps/current-work/multipage/) can be safely supported by ikiwiki. There are [several differences between HTML4 and HTMl5](http://www.w3.org/TR/html5-diff/). Unsupported new elements _should degrade gracefully_.
|
||||
Some elements of
|
||||
[HTML5](http://www.whatwg.org/specs/web-apps/current-work/multipage/) can be
|
||||
safely supported by ikiwiki. There are [several differences between HTML4 and
|
||||
HTML5](http://www.w3.org/TR/html5-diff/).
|
||||
|
||||
> In the `origin/html` branch, there is an old work in progress to make
|
||||
> ikiwiki use html 4 instead of xhtml. If that could be brought forward and
|
||||
> finished then the plan has been to switch ikiwiki over to doing html 4.
|
||||
> I don't think it makes sense to try to make it support both xhtml and
|
||||
> html, it would complicate the code for no benefit.
|
||||
>
|
||||
> I think that is the best route toward supporting html 5 as well. Get
|
||||
> ikiwiki doing html 4 first and the changes needed to get to 5 from there
|
||||
> should be small. Probably just changing some doctypes and a few other
|
||||
> small changes which could be kept in a branch, or even shipped in ikiwiki
|
||||
> mainline as an alternate set of templates. Some of the changes, like
|
||||
> supporting new html 5 tags in the htmlscrubber, can be done in mainline.
|
||||
> (Like was already done for the html 5 video and audio tags.)
|
||||
>
|
||||
> This approach seems much more maintainable going foward than rolling a
|
||||
> html 5 branch immediatly and trying to keep that continually up-to-date
|
||||
> with mainline ikiwiki that is still using xhtml. --[[Joey]]
|
||||
* [HTML5 branch](http://git.webconverger.org/?p=ikiwiki;h=refs/heads/html5)
|
||||
* [ikiwiki instance with HTML5 templates](http://natalian.org)
|
||||
|
||||
However as an [early adopter](http://en.wikipedia.org/wiki/Early_adopter) I would like to start using HTML5 as much as possible. The more pragmatic solution would be to use elements supported by the browsers of your readership I guess. I'm following other early adopters like [Anne](http://annevankesteren.nl/) for clues on how to proceed.
|
||||
# HTML5 Validation and t/html.t
|
||||
|
||||
* [Initial patch](http://git.webconverger.org/?p=ikiwiki;a=commit;h=2e2bb3f74f5000b1269142d6f9bdf1bcb4075ca4)
|
||||
[validator.nu](http://validator.nu/) is the authorative HTML5 validator,
|
||||
however it is almost impossible to sanely introduce as a build dependency
|
||||
because of its insane Java requirements. :( I test locally via
|
||||
[cURL](http://wiki.whatwg.org/wiki/IDE), though Debian packages cannot be built
|
||||
with a network dependency.
|
||||
|
||||
> I can't figure out how to pull from this repository.
|
||||
>> Sorry! I have fixed the cloneurl file to read `git clone git://webconverger.org/git/ikiwiki`
|
||||
In the future, hopefully ikiwiki can test for valid HTML5 using [Relax NG
|
||||
schema](http://syntax.whattf.org/) using a Debian package tool
|
||||
[rnv](http://packages.qa.debian.org/r/rnv.html).
|
||||
|
||||
I'm unsure how to turn off the test validation by the very old [wdg-html-validator](http://packages.qa.debian.org/w/wdg-html-validator.html). So I have been unable to test my initial patches as I can't build ikiwiki. I would like to know how to edit the rules/Makefile to temporarily disable this.
|
||||
# HTML5 migration issues
|
||||
|
||||
> Don't run ¨make test" ... --[[Joey]]
|
||||
>> I don't quite grok debhelper7 [rules](http://git.ikiwiki.info/?p=ikiwiki;a=blob;f=debian/rules).
|
||||
# [article](http://www.whatwg.org/specs/web-apps/current-work/multipage/semantics.html#the-article-element) element
|
||||
|
||||
>>> Well, ok :-) `rm t/html.t` or, add an empty `override_dh_auto_test` rule.
|
||||
>>> --[[Joey]]
|
||||
This element is poorly supported by browsers. As a workaround, `style.css` needs:
|
||||
|
||||
[validator.nu](http://validator.nu/) incidentally is **the** HTML5 validator, however it is almost impossible to sanely introduce as a build dependency because of its insane Java requirements. :( I test locally via [cURL](http://wiki.whatwg.org/wiki/IDE), though Debian packages cannot be built with a network dependency.
|
||||
article {
|
||||
display: block;
|
||||
}
|
||||
|
||||
# Notes
|
||||
Internet Explorer will display it as a block, though you can't seem to be further control the style.
|
||||
|
||||
* the [time element](http://www.whatwg.org/specs/web-apps/current-work/multipage/text-level-semantics.html#the-time-element) ideally needs the datatime= attribute set with iso8601 time
|
||||
* I suspect the migration to the new semantic elements of HTML5 like article, header & footer to take some time, due to browser support. Though they sure make the template code look much nicer.
|
||||
* `<br>` and too many `<div>`s usually indicates poor semantics.
|
||||
> YMMV, but I tend to find that kind of concern counterproductive.
|
||||
> --[[Joey]]
|
||||
# Validator complains with no h1-h6 in header
|
||||
|
||||
* Many of the header `<span>`s should be proper [header elements](http://www.whatwg.org/specs/web-apps/current-work/multipage/semantics.html#the-h1,-h2,-h3,-h4,-h5,-and-h6-elements)
|
||||
> See [[todo/Option_to_make_title_an_h1__63__]] for why not. --[[Joey]]
|
||||
* [#509](http://bugzilla.validator.nu/show_bug.cgi?id=509)
|
||||
|
||||
## Time element
|
||||
|
||||
The [time element](http://www.whatwg.org/specs/web-apps/current-work/multipage/text-level-semantics.html#the-time-element) ideally needs the datatime= attribute set by a template variable with what [HTML5 defines as a valid datetime string](http://www.whatwg.org/specs/web-apps/current-work/multipage/infrastructure.html#valid-global-date-and-time-string).
|
||||
|
||||
As a workaround:
|
||||
|
||||
au:~% grep timeformat natalian.setup
|
||||
timeformat => '%Y-%m-%d',
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
The [[ikiwiki/directive/inline]] directive applies a template to each page-to-be-inlined, but the loop over the pages is in the Perl, not the template itself. This means if I want to wrap a container `<div>` or a `<table>` or whatever around the entire set of inlined pages, I can't do it by just editing the template. In fact, I think the only way to do it without hacking any Perl is with a wrapper template directive, e.g.
|
||||
|
||||
\[[!template id="wrapinline" pages="..."]]
|
||||
|
||||
with a template definition like
|
||||
|
||||
<div id="foo">\[[!inline ... pages="<TMPL_VAR raw_pages>"]]</div>
|
||||
|
||||
It would be much more convenient if the loop over pages happened in the template, allowing me to just stick whatever markup I want around the loop.
|
|
@ -0,0 +1,44 @@
|
|||
I want match pages which have actually curly braces in the names (like this one), but this matches a lot of pages without the braces in their names :( :
|
||||
|
||||
[[!inline show="3" feeds="no" archive="yes" pages="*_{*}_*"]]
|
||||
|
||||
(note: the inline above has been restricted to 3 matches to keep this page
|
||||
concise. Hopefully it is still clear that this page is not in the output set,
|
||||
and the 3 pages in the output set do not contain curly braces in their
|
||||
titles).
|
||||
|
||||
When escaped, it doesn't work at all:
|
||||
|
||||
[[!inline show="3" feeds="no" archive="yes" pages="*_\{*}_*"]]
|
||||
|
||||
[[!inline show="3" feeds="no" archive="yes" pages="*_{*\}_*"]]
|
||||
|
||||
More tests:
|
||||
|
||||
"\*{\*":
|
||||
|
||||
[[!inline show="3" feeds="no" archive="yes" pages="*{*"]]
|
||||
|
||||
"\*\\{\*":
|
||||
|
||||
[[!inline show="3" feeds="no" archive="yes" pages="*\{*"]]
|
||||
|
||||
> This is due to the current handling of quoting and escaping issues
|
||||
> when converting a pagespec to perl code. `safequote` is used to
|
||||
> safely quote an input string as a `q{}` quote, and it strips
|
||||
> curlies when doing so to avoid one being used to break out of the `q{}`.
|
||||
>
|
||||
> Alternative ways to handle it would be:
|
||||
>
|
||||
> * Escape curlies. But then you have to deal with backslashes
|
||||
> in the user's input as they could try to defeat your escaping.
|
||||
> Gets tricky.
|
||||
>
|
||||
> * Avoid exposing user input to interpolation as a string. One
|
||||
> way that comes to mind is to have a local string lookup hash,
|
||||
> and insert each user specified string into it, then use the hash
|
||||
> to lookup the specified strings at runtime. [[done]]
|
||||
>
|
||||
> --[[Joey]]
|
||||
|
||||
Thank you! I'll try it. --Ivan Z.
|
|
@ -231,3 +231,44 @@ So, looking at your meta branch: --[[Joey]]
|
|||
>> handling, since po contains a workaround for that, and it's probably
|
||||
>> acceptable to use potentially slow methods to handle this case.)
|
||||
>> --[[Joey]]
|
||||
|
||||
>>> I'm glad to implement whatever decision we'll make, but I don't
|
||||
>>> clearly understand what this discussion's conclusion is. It seems
|
||||
>>> like we agree at least on one point: meta page titles shall not be
|
||||
>>> displayed all over the place by default; I have therefore disabled
|
||||
>>> `meta_overrides_page_title` by default in my `meta` branch.
|
||||
>>>
|
||||
>>> My next question is then: do we only want to satisfy the `po`
|
||||
>>> plugin needs? Or do we want to allow people who want this, such as
|
||||
>>> [[madduck]], to turn on a config switch so that meta page titles
|
||||
>>> are displayed as wikilinks titles? In the latter case, what level
|
||||
>>> of configurability do we want? I can think of a quite inelegant
|
||||
>>> way to implement full configurability, and provide a configuration
|
||||
>>> switch for every place where links are displayed, such as
|
||||
>>> wikilinks, parentlinks, etc., but I don't think the added bonus is
|
||||
>>> worth the complexity of it.
|
||||
>>>
|
||||
>>> I think we can roughly split the needs into three categories:
|
||||
>>>
|
||||
>>> 1. never display any modified page title in links; this is the
|
||||
>>> current behaviour, and we should keep it as the default one
|
||||
>>> 2. display modified page titles only at well chosen places; that
|
||||
>>> could be "manual" wikilinks, I mean those generated by the
|
||||
>>> `link`, `camelcase` & al. plugins, the recentchanges page, and
|
||||
>>> maybe a few other places; keep the usual pagename-based title
|
||||
>>> for every other link, such as the parentlinks ones.
|
||||
>>> The inter-page dependency problem remains, though. As a first
|
||||
>>> step, I'm in favour of the "slow, but correct" implementation,
|
||||
>>> with a big warning stating that enabling this option can make
|
||||
>>> a wiki really sluggish; if someone really wants this to work
|
||||
>>> fast, he/she'll implement a clever dependency handler :)
|
||||
>>> 3. display modified page titles all over the place; IMHO, we
|
||||
>>> should implement only the bits needed so that the `po` plugin
|
||||
>>> can set this up, rather than provide this as
|
||||
>>> a user-configurable option.
|
||||
>>>
|
||||
>>> So my question is: do we want to implement the #2 case, or not?
|
||||
>>> I propose myself to only implement #1 and #3 to start with, but do
|
||||
>>> it in a way that leaves room for #2.
|
||||
>>>
|
||||
>>> --[[intrigeri]]
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
[[!tag bugs wishlist]]
|
||||
|
||||
Escaping pipe-symbol in [[taglink|ikwiki/directive/taglink]] targets doesn't work as I wanted:
|
||||
|
||||
[[!taglink smth_with_a_pipe|about_the_\|-symbol]]
|
||||
[[!taglink smth_with_a_pipe|about_the_|-symbol]]
|
||||
|
||||
as opposed to simple wikilinks:
|
||||
|
||||
[[a link to smth with a pipe|about the \|-symbol]]
|
||||
[[a link to smth with a pipe|about the |-symbol]]
|
||||
|
||||
And it seems to work in pagespecs:
|
||||
|
||||
tagged:
|
||||
|
||||
[[!map pages="tagged(about the |-symbol)"]]
|
||||
|
||||
[[!map pages="tagged(about the \|-symbol)"]]
|
||||
|
||||
link:
|
||||
|
||||
[[!map pages="link(about the |-symbol)"]]
|
||||
|
||||
[[!map pages="link(about the \|-symbol)"]]
|
|
@ -0,0 +1,6 @@
|
|||
The [[plugins/remove]] plugin does not report an error if git rm fails. (It
|
||||
probably doesn't if other VCS backends fail too). This can happen for example
|
||||
if a page in your source directory is not a tracked file for whatever reason
|
||||
(in my case, due to renaming the files and forgetting to commit that change).
|
||||
|
||||
-- [[Jon]]
|
|
@ -0,0 +1,17 @@
|
|||
It may be that I'm simply misunderstanding something, but what is the rationale
|
||||
for having `tagged()` also match normal wikilinks?
|
||||
|
||||
> It simply hasn't been implemented yet -- see the answer in [[todo/tag_pagespec_function]]. Tags and wikilinks share the same underlying implementation, although ab reasonable expectation is that they are kept separate. --Ivan Z.
|
||||
|
||||
The following situation. I have `tagbase => 'tag'`. On some pages, scattered
|
||||
over the whole wiki, I use `\[[!tag open_issue_gdb]]` to declare that this page
|
||||
contains information about an open issue with GDB. Then, I have a page
|
||||
`/tag/open_issues_gdb.mdwn` that essentially contains `\[[!map
|
||||
pages="tagged(open_issue_gdb)"]]`. So far, so good: this page indeed does list
|
||||
all pages that are tagged like this. But now, when I add in `/gdb.mdwn` a link
|
||||
to this page, like `\[[Open Issues|tag/open_issue_gdb]]`, then `/gdb.mdwn`
|
||||
itself shows up in the map on `tag/open_issues_gdb.mdwn`. In my understanding
|
||||
this is due to the wikilink being equal to a `\[[!tag ...]]`. What's the
|
||||
rationale on this, or what am I doing wrong, and how to achieve what I want?
|
||||
|
||||
--[[tschwinge]]
|
|
@ -14,3 +14,7 @@ The most recent version is always available at: <http://git.upsilon.cc/cgi-bin/g
|
|||
-- [[StefanoZacchiroli]]
|
||||
|
||||
> Just letting me know about the change works -- updated. --[[Joey]]
|
||||
|
||||
----
|
||||
|
||||
Added small changes to embeddedmoose.css to work with ikiwiki 3.x. Figured here is as good as any until Josh can review and update if he so chooses: <http://bosboot.org/tb-embeddedmoose.css>. It removes annoying borders around the header and footer. -- [[TimBosse]]
|
||||
|
|
|
@ -15,6 +15,6 @@ Thanks
|
|||
> sure that page contains invalid utf-8 if the message is then printed.
|
||||
>
|
||||
> Another option is to use the `isutf8` program from
|
||||
> moreutils](http://kitenet.net/~joey/code/moreutils/),
|
||||
> [moreutils](http://kitenet.net/~joey/code/moreutils/),
|
||||
> and run it on each file, it will tell you the line number
|
||||
> and character position that is invalid. --[[Joey]]
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
Re [Let's just rely on backlinks for this?](http://git.ikiwiki.info/?p=ikiwiki;a=blobdiff;f=doc/ikiwiki/pagespec/discussion.mdwn;h=4eed3722ccc744595fc8380e68e69dc9e1ad6450;hp=f4fdd764ed61c34c09af9df046a1a134b7d0ffe6;hb=a4ce0468f6f562ad4ec749f156528aa5b3f2fe39;hpb=23a4ee6d15dbd9b8e8c6588a829dd30a26a8de32) and [2](http://git.ikiwiki.info/?p=ikiwiki;a=blobdiff;f=doc/ikiwiki/wikilink/discussion.mdwn;h=0677ff7ded6a86be7d010dfd97affdb6b266832a;hp=274c8aaf1d075adbf3b76496e1945348aefe836a;hb=4f4666c4ae5cdf5e04ac42cc52a97d90e978adb0;hpb=a4ce0468f6f562ad4ec749f156528aa5b3f2fe39):
|
||||
|
||||
I simply didn't notice there is an "add a new bug" form at [[bugs]], so I used the obvious way to create a page through the web-interface: to put first a wikilink pointing at the new page. --Ivan Z.
|
|
@ -38,6 +38,7 @@ into [[Joey]]'s working tree. This is recommended. :-)
|
|||
* [[gmcmanus]] `git://github.com/gmcmanus/ikiwiki.git`
|
||||
* [[jelmer]] `git://git.samba.org/jelmer/ikiwiki.git`
|
||||
* [[hendry]] `git://webconverger.org/git/ikiwiki`
|
||||
* [[jon]] `git://github.com/jmtd/ikiwiki.git`
|
||||
|
||||
## branches
|
||||
|
||||
|
|
|
@ -61,6 +61,13 @@ If this is not done explicitly, a user's plaintext password will be
|
|||
automatically converted to a hash when a user logs in for the first time
|
||||
after upgrade to ikiwiki 2.48.
|
||||
|
||||
# deduplinks srcdir
|
||||
|
||||
In the past, bugs in ikiwiki have allowed duplicate link information
|
||||
to be stored in its indexdb. This mode removes such duplicate information,
|
||||
which may speed up wikis afflicted by it. Note that rebuilding the wiki
|
||||
will have the same effect.
|
||||
|
||||
# AUTHOR
|
||||
|
||||
Josh Triplett <josh@freedesktop.org>, Joey Hess <joey@ikiwiki.info>
|
||||
|
|
|
@ -11,3 +11,64 @@ Question: Is there a way to generate a listing that shows *both* title and descr
|
|||
Is that possible?
|
||||
|
||||
--Peter
|
||||
|
||||
----
|
||||
|
||||
The site I'm trying to set up right now (not really a wiki - no public editing) is divided into topics. Topics are pages that have `\[[!meta link="/topic"]]`. Topic pages contain an index of their subpages (done with `\[[!inline]]`); the subpages are the real content. I want a map in the sidebar that lists:
|
||||
|
||||
* all of the topics;
|
||||
* all of the first-level subpages of the *current topic only*.
|
||||
|
||||
That is, if the current page is "Topic A" or "Topic A/Page 1", then the map should look like
|
||||
|
||||
Topic A
|
||||
Page 1
|
||||
Page 2
|
||||
Page 3
|
||||
Topic B
|
||||
Topic C
|
||||
|
||||
but if the current page is "Topic B" or one of its subpages, then the map should look like
|
||||
|
||||
Topic A
|
||||
Topic B
|
||||
Page 1
|
||||
Page 2
|
||||
Page 3
|
||||
Topic C
|
||||
|
||||
On the top-level index page, or on any other page that is neither a topic nor a subpage of a topic, the map should list only the topics.
|
||||
|
||||
Is there any way to do that? I don't mind mucking around with `\[[!meta]]` on every page if that's what it takes.
|
||||
|
||||
-- Zack
|
||||
|
||||
> I think that you're looking for this:
|
||||
>
|
||||
> `pages="((Topic*/* or Topic*) and ./*) or (Topic* and ! Topic*/*)"`
|
||||
>
|
||||
> Let's pull that [[PageSpec]] apart.
|
||||
>
|
||||
> * `(Topic*/* or Topic*)` matches all pages that are underneath a Topic
|
||||
> page or are a topic page themselves.
|
||||
> * `and ./*` further adds the limitation that the pages have to be
|
||||
> in the same directory as the page that is displaying the map. So,
|
||||
> for `Topic_A/Page_1`, it will match `Topic_A/*`; for `Topic_A`,
|
||||
> it will match `Topic_*` but not subpages.
|
||||
> * Finally, `Topic* and ! Topic*/*` matches all the toplevel topic pages,
|
||||
> since we always want those to show up.
|
||||
>
|
||||
> I haven't tested that this works or displays, but I hope it gets you
|
||||
> on the right track. PS, be aware of
|
||||
> [[this_sidebar_issue|todo/Post-compilation_inclusion_of_the_sidebar]]!
|
||||
> --[[Joey]]
|
||||
|
||||
>> Thanks, but this assumes that topic pages are named `Topic<something>`.
|
||||
>> They aren't. They are tagged with `\[[!meta link="/topic"]]`, and as
|
||||
>> far as I can tell there is no [[PageSpec]] notation for "subpages of a
|
||||
>> page that satisfies link(foo)"...
|
||||
>> -- Zack
|
||||
|
||||
>>> I think that the ideas and code in
|
||||
>>> [[todo/tracking_bugs_with_dependencies]] might also handle this case.
|
||||
>>> --[[Joey]]
|
||||
|
|
|
@ -12,7 +12,7 @@ To scale the image, use height=x:
|
|||
\[[!teximg code="\frac{1}{2}" height="17"]]
|
||||
\[[!teximg code="\frac{1}{2}" height="8"]]
|
||||
|
||||
If no height is choosen the default height 12 is used. Valid heights are: 8, 9,
|
||||
If no height is chosen the default height 12 is used. Valid heights are: 8, 9,
|
||||
10, 11, 12, 14, 17, 20. If another height is entered, the closest available
|
||||
height is used.
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ To sign up for an OpenID, visit one of the following identity providers:
|
|||
* [Videntity](http://videntity.org/)
|
||||
* [LiveJournal](http://www.livejournal.com/openid/)
|
||||
* [TrustBearer](https://openid.trustbearer.com/)
|
||||
* or any of the [many others out there](http://openiddirectory.com/openid-providers-c-1.html).
|
||||
* or any of the [many others out there](http://openiddirectory.com/openid-providers-c-1.html) (but not [Yahoo](http://openid.yahoo.com) [[yet|plugins/openid/discussion/#Yahoo_unsupported]]).
|
||||
|
||||
Your OpenID is the URL that you are given when you sign up.
|
||||
[[!if test="enabled(openid)" then="""
|
||||
|
|
|
@ -45,6 +45,7 @@ Projects & Organizations
|
|||
* [Pigro Network](http://www.pigro.net) is running a hg based ikiwiki. (And provides ikiwiki hosting for $10/m.)
|
||||
* [Cosin Homepage](http://cosin.ch) uses an Ikiwiki with a subversion repository.
|
||||
* [Bosco Free Orienteering Software](http://bosco.durcheinandertal.ch)
|
||||
* The [GNU Hurd](http://www.gnu.org/software/hurd/)'s web pages
|
||||
|
||||
Personal sites and blogs
|
||||
========================
|
||||
|
@ -119,7 +120,8 @@ Personal sites and blogs
|
|||
* [Bernd Zeimetz (bzed)](http://bzed.de/)
|
||||
* [Gaudenz Steinlin](http://gaudenz.durcheinandertal.ch)
|
||||
* [Simon Kjika'qawej C.](http://simonraven.kisikew.org/ikiwiki/) Please note it might change location at any time (likely wiki.k.o or under /wiki/ at simonraven.k.o).
|
||||
|
||||
* [NeoCarz Wiki](http://www.neocarz.com/wiki/) Yes - its actually Ikiwiki behind that! I'm using Nginx and XSL to transform the ikiwiki renderings thanks to the valid XHTML output of ikiwiki. Great work Joey!!
|
||||
* [Natalian - Kai Hendry's personal blog](http://natalian.org/)
|
||||
|
||||
Please feel free to add your own ikiwiki site!
|
||||
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
## Ikiwiki 3.12
|
||||
|
||||
Joey, what about news for Ikiwiki 3.12? The changelog says is has been released
|
||||
6 days ago... :) --[[Paweł|ptecza]]
|
||||
|
||||
---
|
||||
|
||||
## Ikiwiki 2.14
|
||||
|
||||
Hi Joey! Where can I find the source package for ikiwiki 2.14? I rather prefer
|
||||
`wget` than `git` to download it :) I've just checked
|
||||
[Debian page of ikiwiki source package](http://packages.debian.org/unstable/source/ikiwiki)
|
||||
|
|
|
@ -10,4 +10,4 @@ log back in, try out the OpenID signup process if you don't already have an
|
|||
OpenID, and see how OpenID works for you. And let me know your feelings about
|
||||
making such a switch. --[[Joey]]
|
||||
|
||||
[[!poll 61 "Accept only OpenID for logins" 18 "Accept only password logins" 36 "Accept both"]]
|
||||
[[!poll 62 "Accept only OpenID for logins" 19 "Accept only password logins" 36 "Accept both"]]
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
ikiwiki 2.68 released with [[!toggle text="these changes"]]
|
||||
[[!toggleable text="""
|
||||
* Add support for checking pushes from untrusted git committers. This can be
|
||||
used to set up anonymous git pushes, and other similar things.
|
||||
* format: New plugin, allows embedding differently formatted text inside a
|
||||
page (ie, otl inside a mdwn page, or syntax highlighted code inside a
|
||||
page).
|
||||
* relativedate: New javascript-alicious plugin that makes all dates display
|
||||
relative, in a very nice way, if I say so myself.
|
||||
* Optimise the no-op post-commit hook, to speed up web edits by a fraction
|
||||
of a second.
|
||||
* git: Allow [[sha1\_commit]] to be used in the diffurl, to support cgit.
|
||||
* shortcut: Fix display of shortcuts while previewing.
|
||||
* Plugins that used to override displaytime should instead override
|
||||
formattime. displaytime will call that, and may wrap markup around the
|
||||
formatted time.
|
||||
* Add an underlay for javascript, and add ikiwiki.js containing some utility
|
||||
code.
|
||||
* toggle: Stop embedding the full toggle code on each page using it, and
|
||||
move it to toggle.js in the javascript underlay.
|
||||
* recentchanges: Make feed links point back to anchors on the recentchanges
|
||||
page. (JasonBlevins)
|
||||
* Fix issue with utf-8 in wikiname breaking session cookies, by
|
||||
entity-encoding the wikiname in the session cookie.
|
||||
* Use the pure perl Data::Dumper when generating setup files to ensure that
|
||||
utf-8 characters are written out as such, and not as the encoded perl
|
||||
strings the C Data::Dumper produces.
|
||||
* inline: Only the last feed link was put on the page, fix this to include
|
||||
all feed links. So rss will be included along with atom, and pages with
|
||||
multiple feeds will get links added for all feeds.
|
||||
* tag: When tagpage is set, force the links created by tagging to point at
|
||||
the toplevel tagpage, and not closer subpages. The html links already went
|
||||
there, but internally the links were not recorded as absolute, which could
|
||||
cause confusing backlinks etc.
|
||||
* Add an inject function, that can be used by plugins that want to
|
||||
replace one of ikiwiki's functions with their own version.
|
||||
(This is a scary thing that grubs through the symbol table, and replaces
|
||||
all exported occurances of a function with the injected version.)
|
||||
* external: RPC functions can be injected to replace exported functions.
|
||||
* Updated French translation. Closes: #[502694](http://bugs.debian.org/502694)
|
||||
* Updated Spanish translation from the ever vigilant Victor Moral.
|
||||
* Updated Danish translation from Jonas Smedegaard. Closes: #[503117](http://bugs.debian.org/503117)
|
||||
* Preserve syslog setting when doing `ikiwiki -setup foo -dumpsetup bar`
|
||||
* Several fixes to --render mode."""]]
|
|
@ -1,24 +0,0 @@
|
|||
ikiwiki 2.69 released with [[!toggle text="these changes"]]
|
||||
[[!toggleable text="""
|
||||
* Avoid multiple ikiwiki cgi processes piling up, eating all memory,
|
||||
and thrashing, by making the cgi wrapper wait on a cgilock.
|
||||
If you had to set apache's MaxClients low to avoid ikiwiki thrashing your
|
||||
server, you can now turn it up to a high value.
|
||||
* Stop busy-waiting in lockwiki, as this could delay ikiwiki from waking up
|
||||
for up to one second. The bailout code is no longer needed after above
|
||||
change.
|
||||
* Remove support for unused optional wait parameter from lockwiki.
|
||||
* aggregate: Try to query XML::Feed for the base url when derelevatising
|
||||
links. Since this needs the just released XML::Feed 0.3, as well
|
||||
as a not yet released XML::RSS, it will fall back to the old method
|
||||
if no xml:base info is available.
|
||||
* meta: Plugin is now enabled by default since the basewiki uses it.
|
||||
* txt: Do not encode quotes when filtering the txt, as that broke
|
||||
later parsing of any directives on the page.
|
||||
* Fix the link() pagespec to match links that are internally recorded as
|
||||
absolute.
|
||||
* Add rel=nofollow to recentchanges\_links for the same (weak) reasons it
|
||||
was earlier added to edit links.
|
||||
* tag: Normalize tagbase so leading/trailing slashes in it don't break
|
||||
things.
|
||||
* bzr: Fix dates for recentchanges."""]]
|
|
@ -1,3 +0,0 @@
|
|||
ikiwiki 2.70 released with [[!toggle text="these changes"]]
|
||||
[[!toggleable text="""
|
||||
* Avoid crash on malformed utf-8 discovered by intrigeri."""]]
|
|
@ -1,28 +0,0 @@
|
|||
ikiwiki 2.71 released with [[!toggle text="these changes"]]
|
||||
[[!toggleable text="""
|
||||
* comments: Blog-style comment support, contributed by Simon McVittie.
|
||||
* htmlbalance: New plugin contributed by Simon McVittie.
|
||||
* Change deb dependencies to list Text::Markdown before markdown (really
|
||||
this time).
|
||||
* Improve escaping of wikilinks and preprocessor directives in content
|
||||
produced by aggregate and recentchanges.
|
||||
* French translation update from Philippe Batailler. Closes: #[506250](http://bugs.debian.org/506250)
|
||||
* Spanish translation update from Victor Moral.
|
||||
* Fix handling of wrappergroup option.
|
||||
* Correct --dumpsetup to include the srcdir in the setup file.
|
||||
* German translation update from Kai Wasserbäch. Closes: #[507056](http://bugs.debian.org/507056)
|
||||
* inline: Support emptyfeeds=no option to skip generating empty feeds.
|
||||
* inline: Support feedfile option to change the filename of the feed
|
||||
generated.
|
||||
* meta: Pass info to htmlscrubber so htmlscrubber\_skip can take effect.
|
||||
* htmlbalance: don't compact whitespace, and set misc other options (smcv)
|
||||
* rename: Fix double-escaping of page name in edit box.
|
||||
* monotone: When getting the log, tell monotone how many entries
|
||||
we want, rather than closing the pipe, which it dislikes. (thm)
|
||||
* Coding style change: Remove explcit vim folding markers.
|
||||
* aggregate: If a feed fails to be downloaded, try again immediatly
|
||||
next time aggregation is run, even if the usual time has not passed.
|
||||
Closes: #[508622](http://bugs.debian.org/508622) (Michael Gold)
|
||||
* meta: Process meta date during scan pass so that the date will always
|
||||
affect sorting in inlines.
|
||||
* Improve display of some openids (smcv)"""]]
|
|
@ -1,9 +0,0 @@
|
|||
ikiwiki 2.72 released with [[!toggle text="these changes"]]
|
||||
[[!toggleable text="""
|
||||
* Avoid comments in recentchanges being broken links (smcv)
|
||||
* Add deprecation warning for GlobLists, which will stop working in 3.0.
|
||||
* camelcase: Add camelcase\_ignore setting.
|
||||
* googlecalendar: Add runtime deprecation warning.
|
||||
* comments: Deal with users entering unqualified or partial urls.
|
||||
* inline: Run format hook first, to ensure other format hooks can affect
|
||||
inlined content. Closes: #[509710](http://bugs.debian.org/509710)"""]]
|
|
@ -1,15 +0,0 @@
|
|||
ikiwiki 3.10 released with [[!toggle text="these changes"]]
|
||||
[[!toggleable text="""
|
||||
* darcs: Finally added support for this VCS, thanks to many
|
||||
contributors:
|
||||
- Thomas Schwinge wrote the original file, implementing only rcs\_commit.
|
||||
- Benjamin A'Lee contributed an alternative implementation.
|
||||
- Tuomo Valkonen contributed rcs\_getctime and stub rcs\_recentchanges.
|
||||
- Simon Michael contributed multiple changes.
|
||||
- Petr Ročkai fixed rcs\_recentchanges.
|
||||
- Sven M. Hallberg merged the above and added missing features.
|
||||
* Add missing newline to Confirm Password prompt.
|
||||
* Add missing permalink support to archivepage and titlepage templates.
|
||||
* debian/control: Wrap fields.
|
||||
* inline: Add author info to archive display.
|
||||
* Add a microblog template that is useful for inlining microblogging posts."""]]
|
|
@ -0,0 +1,23 @@
|
|||
ikiwiki 3.11 released with [[!toggle text="these changes"]]
|
||||
[[!toggleable text="""
|
||||
* Avoid using python-support. Closes: #[525086](http://bugs.debian.org/525086)
|
||||
* websetup: Display stderr in browser if ikiwiki setup fails.
|
||||
* blogspam: Load RPC::XML library in checkconfig, so that an
|
||||
error can be printed at that point if it's not available,
|
||||
allowing the admin to see it during wiki setup.
|
||||
Closes: #[520015](http://bugs.debian.org/520015)
|
||||
* websetup: If setup fails, restore old setup file.
|
||||
* relativedate: Deal with clock skew.
|
||||
* Add IkiWiki::ErrorReason objects, and modify pagespecs to return
|
||||
them in cases where they fail to match due to a configuration or syntax
|
||||
error.
|
||||
* pagespec\_match\_list: New API function, matches pages in a list
|
||||
and throws an error if the pagespec is bad.
|
||||
* inline, brokenlinks, calendar, linkmap, map, orphans, pagecount,
|
||||
pagestate, postsparkline: Display a handy error message if the pagespec
|
||||
is erronious.
|
||||
* comments: Add link to comment post form to allow user to sign in
|
||||
if they wish to, if the configuration makes signin optional
|
||||
for commenting.
|
||||
* Updated Danish translation from Jonas Smedegaard. Closes: #[525751](http://bugs.debian.org/525751)
|
||||
* translation.mdwn: Typo fixes. Closes: #[525753](http://bugs.debian.org/525753)"""]]
|
|
@ -0,0 +1,19 @@
|
|||
You may want to run `ikiwiki-transition deduplinks /path/to/srcdir`
|
||||
after upgrading to this version of ikiwiki. This command will
|
||||
optimise your wiki's saved state, removing duplicate information
|
||||
that can slow ikiwiki down.
|
||||
|
||||
ikiwiki 3.12 released with [[!toggle text="these changes"]]
|
||||
[[!toggleable text="""
|
||||
* Re-enable python-support and add python:Depends to control file.
|
||||
* ikiwiki-makerepo: Avoid using abs_path, as it apparently
|
||||
fails on nonexistant directories with some broken perl
|
||||
versions.
|
||||
* inline: Minor optimisation.
|
||||
* add_link: New function, which plugins should use rather than
|
||||
modifying %links directly, to avoid it accumulating duplicates.
|
||||
* ikiwiki-transition: Add a deduplinks action, that can be used
|
||||
to remove duplicate links and optimise a wiki w/o rebuilding it.
|
||||
* external: Fix pagespec_match and pagespec_match_list.
|
||||
Closes: #527281
|
||||
"""]]
|
|
@ -7,12 +7,18 @@ This plugin adds antispam support to ikiwiki, using the
|
|||
appear to contain spam will be rejected; comments that look spammy will be
|
||||
stored in a queue for moderation by an admin.
|
||||
|
||||
To check for and moderate comments, log in to the wiki as an admin,
|
||||
go to your Preferences page, and click the "Comment Moderation" button.
|
||||
|
||||
The plugin requires the [[!cpan RPC::XML]] perl module.
|
||||
|
||||
You can control how content is tested via the `blogspam_options` setting.
|
||||
The list of options is [here](http://blogspam.net/api/testComment.html#options).
|
||||
By default, the options are configured in a way that is appropriate for
|
||||
wiki content. This includes turning off some of the more problimatic tests.
|
||||
wiki content. This includes turning off some of the more problematic tests.
|
||||
An interesting option for testing is `fail`, by setting it (e.g.,
|
||||
`blogspam_options => 'fail'`), *all* comments will be marked as SPAM, so that
|
||||
you can check whether the interaction with blogspam.net works.
|
||||
|
||||
The `blogspam_pagespec` setting is a [[ikiwiki/PageSpec]] that can be
|
||||
used to configure which pages are checked for spam. The default is to check
|
||||
|
|
|
@ -6,8 +6,11 @@
|
|||
Someone was just asking for it and I had written these two plugins already some months ago,
|
||||
so I'm now publishing them here.
|
||||
|
||||
<http://www.schwinge.homeip.net/~thomas/tmp/copyright.pm>
|
||||
<http://www.schwinge.homeip.net/~thomas/tmp/license.pm>
|
||||
[`copyright.pm`](http://www.schwinge.homeip.net/~thomas/tmp/copyright.pm)
|
||||
and
|
||||
[`license.pm`](http://www.schwinge.homeip.net/~thomas/tmp/license.pm)
|
||||
|
||||
Usage instructions are found inside the two plugin files.
|
||||
|
||||
--[[tschwinge]]
|
||||
|
||||
|
@ -28,6 +31,11 @@ those plugins with a current ikiwiki release, i.e. 2.61, but they appeared to do
|
|||
nothing, really. Also, those example pages don't seem to use those plugins, even;
|
||||
they set "copyright" and "license" properties using ordinary [[meta]] tags. Maybe
|
||||
I'm missing something terribly obvious? --Peter
|
||||
|
||||
> Only obvious if you read the source :-). You need to put a file named "copyright.html"
|
||||
>(respectively "license.html") in your wiki. Everything underneath that (in the wikilink sense) will use that
|
||||
>content for the license or copyright. Saves putting \[[meta license="foo"]] in every page [[DavidBremner]]
|
||||
|
||||
By the way: these need not be *HTML* files; `copyright.mdwn`,
|
||||
respectively `license.mdwn`, or every other format supported
|
||||
by ikiwiki are likewise fine. --[[tschwinge]]
|
||||
|
|
|
@ -360,12 +360,30 @@ daring a timid "please pull"... or rather, please review again :)
|
|||
> * What's the reasoning behind checking that the link plugin
|
||||
> is enabled? AFAICS, the same code in the scan hook should
|
||||
> also work when other link plugins like camelcase are used.
|
||||
>>
|
||||
>> That's right, fixed.
|
||||
>>
|
||||
> * In `pagetemplate` there is a comment that claims the code
|
||||
> relies on `genpage`, but I don't see how it does; it seems
|
||||
> to always add a discussion link?
|
||||
>>
|
||||
>> It relies on IkiWiki::Render's `genpage` as this function sets the
|
||||
>> `discussionlink` template param iff it considers a discussion link
|
||||
>> should appear on the current page. That's why I'm testing
|
||||
>> `$template->param('discussionlink')`.
|
||||
>>
|
||||
> * Is there any real reason not to allow removing a translation?
|
||||
> I'm imagining a spammy translation, which an admin might not
|
||||
> be able to fix, but could remove.
|
||||
>>
|
||||
>> On the other hand, allowing one to "remove" a translation would
|
||||
>> probably lead to misunderstandings, as such a "removed" translation
|
||||
>> page would appear back as soon as it is "removed" (with no strings
|
||||
>> translated, though). I think an admin would be in a position to
|
||||
>> delete the spammy `.po` file by hand using whatever VCS is in use.
|
||||
>> Not that I'd really care, but I am slightly in favour of the way
|
||||
>> it currently works.
|
||||
>>
|
||||
> * Re the meta title escaping issue worked around by `change`.
|
||||
> I suppose this does not only affect meta, but other things
|
||||
> at scan time too. Also, handling it only on rebuild feels
|
||||
|
@ -383,4 +401,6 @@ daring a timid "please pull"... or rather, please review again :)
|
|||
>
|
||||
> --[[Joey]]
|
||||
>>
|
||||
>> I'll think about it soon.
|
||||
>>
|
||||
>> --[[intrigeri]]
|
||||
|
|
|
@ -6,4 +6,16 @@ It is scheduled for 2007-12-20 18:00 UTC.
|
|||
See <http://umeet.uninet.edu/umeet2007/english/prog.html> for the complete program
|
||||
and for information about how to join.
|
||||
|
||||
--[[tschwinge]]
|
||||
--[[tschwinge]]
|
||||
|
||||
----
|
||||
<a id="Yahoo_unsupported" />
|
||||
[[!tag bugs]]
|
||||
|
||||
It looks like OpenID 2.0 (the only supported by Yahoo) is not supported in ikiwiki. :( I signed up at http://openid.yahoo.com/ , and tried to login to my ikiwiki with the new ID (of the form: https://me.yahoo.com/a-username), but Yahoo told me:
|
||||
|
||||
> Sorry! You will not be able to login to this website as it is using an older version of the the OpenID technology. Yahoo! only supports OpenID 2.0 because it is more secure. For more information, check out the OpenID documentation at [Yahoo! Developer Network](http://developer.yahoo.com/openid/).
|
||||
|
||||
-- Ivan Z.
|
||||
|
||||
They have more on OpenID 2.0 in [their FAQ](http://developer.yahoo.com/openid/faq.html). --Ivan Z.
|
||||
|
|
|
@ -77,3 +77,75 @@ as the script handler, or only on `mod_perl` to be installed and loaded.
|
|||
* [http://www.openfusion.com.au/labs/mod_auth_tkt/](mod_auth_tkt) along with CPAN's
|
||||
`Apache::AuthTkt`
|
||||
--[[intrigeri]]
|
||||
|
||||
I've more or less managed to implement something based on `mod_perl` and
|
||||
`Apache::AuthenHook`, respectively in Debian packages `libapache2-mod-perl2`
|
||||
and `libapache-authenhook-perl`.
|
||||
|
||||
In the Apache VirtualHost configuration, I have added the following:
|
||||
|
||||
PerlLoadModule Apache::AuthenHook
|
||||
PerlModule My::IkiWikiBasicProvider
|
||||
|
||||
<Location /test/>
|
||||
AuthType Basic
|
||||
AuthName "wiki"
|
||||
AuthBasicProvider My::IkiWikiBasicProvider
|
||||
Require valid-user
|
||||
ErrorDocument 401 /test/ikiwiki.cgi?do=signin
|
||||
</Location>
|
||||
<LocationMatch "^/test/(ikiwiki\.cgi$|.*\.css$|wikiicons/)">
|
||||
Satisfy any
|
||||
</LocationMatch>
|
||||
|
||||
The perl module lies in `/etc/apache2/My/IkiWikiBasicProvider.pm`:
|
||||
|
||||
package My::IkiWikiBasicProvider;
|
||||
|
||||
use warnings;
|
||||
use strict;
|
||||
use Apache2::Const -compile => qw(OK DECLINED HTTP_UNAUTHORIZED);
|
||||
use Storable;
|
||||
use Authen::Passphrase;
|
||||
|
||||
sub userinfo_retrieve () {
|
||||
my $userinfo=eval{ Storable::lock_retrieve("/var/lib/ikiwiki/test/.ikiwiki/userdb") };
|
||||
return $userinfo;
|
||||
}
|
||||
|
||||
sub handler {
|
||||
my ($r, $user, $password) = @_;
|
||||
my $field = "password";
|
||||
|
||||
if (! defined $password || ! length $password) {
|
||||
return Apache2::Const::DECLINED;
|
||||
}
|
||||
my $userinfo = userinfo_retrieve();
|
||||
if (! length $user || ! defined $userinfo ||
|
||||
! exists $userinfo->{$user} || ! ref $userinfo->{$user}) {
|
||||
return Apache2::Const::DECLINED;
|
||||
}
|
||||
my $ret=0;
|
||||
if (exists $userinfo->{$user}->{"crypt".$field}) {
|
||||
error $@ if $@;
|
||||
my $p = Authen::Passphrase->from_crypt($userinfo->{$user}->{"crypt".$field});
|
||||
$ret=$p->match($password);
|
||||
}
|
||||
elsif (exists $userinfo->{$user}->{$field}) {
|
||||
$ret=$password eq $userinfo->{$user}->{$field};
|
||||
}
|
||||
if ($ret) {
|
||||
return Apache2::Const::OK;
|
||||
}
|
||||
return Apache2::Const::DECLINED;
|
||||
}
|
||||
|
||||
1;
|
||||
|
||||
This setup also allows people with the master password to create their own
|
||||
account.
|
||||
|
||||
I'm not really fluent in Perl, and all this can probably be improved (*or
|
||||
might destroy your computer as it is* and YMMV).
|
||||
|
||||
-- [[Lunar]]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
## Nested plugins
|
||||
|
||||
Is it possible to use another plugin into your toggle plugin? For example,
|
||||
I want to have toggleable table and try to use Victor Moral's table plugin,
|
||||
I want to have toggleable table and try to use [[Victor Moral|users/victormoral]]'s [[table plugin|plugins/table]],
|
||||
but no success. How can I do it?
|
||||
--PTecza
|
||||
|
||||
|
@ -15,3 +15,29 @@ but no success. How can I do it?
|
|||
|
||||
|
||||
## [[bugs/Bug_when_toggling_in_a_preview_page]]
|
||||
|
||||
----
|
||||
|
||||
## Using toggle directives in a list item##
|
||||
Take this code snippet.
|
||||
|
||||
* [[!toggle id="test" text="test"]]
|
||||
[[!toggleable id="test text="""toggle"""]]
|
||||
|
||||
In the HTML-output the `ul` and `div` overlap.
|
||||
|
||||
<div id="content">
|
||||
<ul>
|
||||
<li><a class="toggle" href="#test.test">test</a>
|
||||
<div class="toggleable" id="test.-test"></li>
|
||||
</ul>
|
||||
|
||||
<p>toggle</p>
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
Fixing this manually the Javascript seems not to be working and `toggle` is shown unconditionally.
|
||||
|
||||
I do not know if this is due to [[shortcomming with nested preprocessor directives|todo/nested_preprocessor_directives]] you mentioned in the beginning of this page. Maybe a note could be added to the main page of the plugin. --Paul
|
||||
|
|
|
@ -98,7 +98,7 @@ function is passed no values.
|
|||
|
||||
This allows a plugin to manipulate the list of files that need to be
|
||||
built when the wiki is refreshed. The function is passed a reference to an
|
||||
array of pages that will be rebuilt, and can modify the array, either
|
||||
array of files that will be rebuilt, and can modify the array, either
|
||||
adding or removing files from it.
|
||||
|
||||
### scan
|
||||
|
@ -107,8 +107,8 @@ adding or removing files from it.
|
|||
|
||||
This hook is called early in the process of building the wiki, and is used
|
||||
as a first pass scan of the page, to collect metadata about the page. It's
|
||||
mostly used to scan the page for [[WikiLinks|ikiwiki/WikiLink]], and add them to `%links`.
|
||||
Present in IkiWiki 2.40 and later.
|
||||
mostly used to scan the page for [[WikiLinks|ikiwiki/WikiLink]], and add
|
||||
them to `%links`. Present in IkiWiki 2.40 and later.
|
||||
|
||||
The function is passed named parameters "page" and "content". Its return
|
||||
value is ignored.
|
||||
|
@ -151,11 +151,11 @@ parameter is set to a true value if the page is being previewed.
|
|||
If `hook` is passed an optional "scan" parameter, set to a true value, this
|
||||
makes the hook be called during the preliminary scan that ikiwiki makes of
|
||||
updated pages, before begining to render pages. This should be done if the
|
||||
hook modifies data in `%links`. Note that doing so will make the hook be
|
||||
run twice per page build, so avoid doing it for expensive hooks. (As an
|
||||
optimisation, if your preprocessor hook is called in a void context, you
|
||||
can assume it's being run in scan mode, and avoid doing expensive things at
|
||||
that point.)
|
||||
hook modifies data in `%links` (typically by calling `add_link`). Note that
|
||||
doing so will make the hook be run twice per page build, so avoid doing it
|
||||
for expensive hooks. (As an optimisation, if your preprocessor hook is
|
||||
called in a void context, you can assume it's being run in scan mode, and
|
||||
avoid doing expensive things at that point.)
|
||||
|
||||
Note that if the [[htmlscrubber]] is enabled, html in
|
||||
preprocessor [[ikiwiki/directive]] output is sanitised, which may limit what
|
||||
|
@ -174,7 +174,8 @@ links. The function is passed named parameters "page", "destpage", and
|
|||
and later.
|
||||
|
||||
Plugins that implement linkify must also implement a scan hook, that scans
|
||||
for the links on the page and adds them to `%links`.
|
||||
for the links on the page and adds them to `%links` (typically by calling
|
||||
`add_link`).
|
||||
|
||||
### htmlize
|
||||
|
||||
|
@ -197,6 +198,9 @@ value, then the id parameter specifies not a filename extension, but
|
|||
a whole filename that can be htmlized. This is useful for files
|
||||
like `Makefile` that have no extension.
|
||||
|
||||
If `hook` is passed an optional "longname" parameter, this value is used
|
||||
when prompting a user to choose a page type on the edit page form.
|
||||
|
||||
### pagetemplate
|
||||
|
||||
hook(type => "pagetemplate", id => "foo", call => \&pagetemplate);
|
||||
|
@ -612,6 +616,19 @@ The most often used is "location", which specifies the location the
|
|||
PageSpec should match against. If not passed, relative PageSpecs will match
|
||||
relative to the top of the wiki.
|
||||
|
||||
#### `pagespec_match_list($$;@)`
|
||||
|
||||
Passed a reference to a list of page names, and [[ikiwiki/PageSpec]],
|
||||
returns the set of pages that match the [[ikiwiki/PageSpec]].
|
||||
|
||||
Additional named parameters can be passed, to further limit the match.
|
||||
The most often used is "location", which specifies the location the
|
||||
PageSpec should match against. If not passed, relative PageSpecs will match
|
||||
relative to the top of the wiki.
|
||||
|
||||
Unlike pagespec_match, this may throw an error if there is an error in
|
||||
the pagespec.
|
||||
|
||||
#### `bestlink($$)`
|
||||
|
||||
Given a page and the text of a link on the page, determine which
|
||||
|
@ -780,6 +797,11 @@ Optionally, a third parameter can be passed, to specify the preferred
|
|||
filename of the page. For example, `targetpage("foo", "rss", "feed")`
|
||||
will yield something like `foo/feed.rss`.
|
||||
|
||||
#### `add_link($$)`
|
||||
|
||||
This adds a link to `%links`, ensuring that duplicate links are not
|
||||
added. Pass it the page that contains the link, and the link text.
|
||||
|
||||
## Miscellaneous
|
||||
|
||||
### Internal use pages
|
||||
|
@ -930,9 +952,12 @@ It's also possible to write plugins that add new functions to
|
|||
IkiWiki::PageSpec package, that is named `match_foo`, where "foo()" is
|
||||
how it will be accessed in a [[ikiwiki/PageSpec]]. The function will be passed
|
||||
two parameters: The name of the page being matched, and the thing to match
|
||||
against. It may also be passed additional, named parameters. It should return
|
||||
a IkiWiki::SuccessReason object if the match succeeds, or an
|
||||
IkiWiki::FailReason object if the match fails.
|
||||
against. It may also be passed additional, named parameters.
|
||||
|
||||
It should return a IkiWiki::SuccessReason object if the match succeeds, or
|
||||
an IkiWiki::FailReason object if the match fails. If the match cannot be
|
||||
attempted at all, for any page, it can instead return an
|
||||
IkiWiki::ErrorReason object explaining why.
|
||||
|
||||
### Setup plugins
|
||||
|
||||
|
|
|
@ -95,3 +95,9 @@ Does that mean that apache:apache should just own everything, and I should only
|
|||
> Ikiwiki is designed so that you don't have to worry about this kind of permissions issue.
|
||||
> Instead you can just configure the ikiwiki.cgi, in the setup file, to be suid to your
|
||||
> user. Then there's no need to let the web server's user modify files at all. --[[Joey]]
|
||||
|
||||
|
||||
## using a local wiki to preview changes: an srcdir needed?
|
||||
I have read the hints about using a local wiki to preview changes, but I haven't understood: is it assumed that I should also have a separate "srcdir" for this local preview-wiki (as it is done for the main wiki site), or I could point the local ikiwiki's "srcdir" to the working dir? Can something bad happen if I do this? I guess no, because--as I see it--the reason to have 2 repos for the main site was only enabling pushing to it, so it's a peculiarity of git, and not a requirement for a clean functioning of ikiwiki.
|
||||
|
||||
Ok, probably, I have answered my question myself, but I'll let this comment stay here, if someone else will be tinking about the same issue. --Ivan Z.
|
||||
|
|
|
@ -3,5 +3,4 @@
|
|||
* How about some math?
|
||||
* $\frac{1}{2} = \frac{3}{6}$
|
||||
|
||||
|
||||
|
||||
and teximg? [[!teximg code="\frac{1}{2}"]]
|
||||
|
|
|
@ -96,31 +96,49 @@ What is the syntax for specifying the adminuser as an openid user? I've tried a
|
|||
|
||||
----
|
||||
|
||||
I apologize if this is the incorrect forum for this question, but I am trying to get ikiwiki set up and running with git. I followed all the directions and all seems to work until I go back and try to make changes. The steps I am performing:
|
||||
I apologize if this is the incorrect forum for this question, but I am
|
||||
trying to get ikiwiki set up and running with git. I followed all the
|
||||
directions and all seems to work until I go back and try to make changes.
|
||||
The steps I am performing:
|
||||
|
||||
cd $SRCDIR (e.g. ~/ikisrc)
|
||||
vim index.mdwn (add a couple lines)
|
||||
git commit -a -m 'test'
|
||||
git push
|
||||
|
||||
I then get a long error message which reads in part "You asked me to pull without telling me which branch you
|
||||
want to merge with, and 'branch.master.merge' in your configuration file does not tell me either." From that point on, I get:
|
||||
I then get a long error message which reads in part "You asked me to pull
|
||||
without telling me which branch you want to merge with, and
|
||||
'branch.master.merge' in your configuration file does not tell me either."
|
||||
From that point on, I get:
|
||||
|
||||
sws@odin:~/dev/ikisrc$ git push
|
||||
To /home/git/repos/myikiwiki.git
|
||||
! [rejected] master -> master (non-fast forward)
|
||||
error: failed to push to '/home/git/repos/myikiwiki.git'
|
||||
|
||||
If I do a git clone ssh://odin/path/to/$REPOSITORY from another machine and try to edit I get the same error sequence. What am I doing wrong?
|
||||
If I do a git clone ssh://odin/path/to/$REPOSITORY from another machine and
|
||||
try to edit I get the same error sequence. What am I doing wrong?
|
||||
|
||||
> I don't know. The only time I have seen this message is when
|
||||
> the master git repository was not bare. All current instructions and
|
||||
> `ikiwiki-makerepo` have a proper bare repo used for the master
|
||||
> repository, but perhaps you followed some old, broken instructions, or
|
||||
> forgot to make it bare? --[[Joey]]
|
||||
|
||||
-----
|
||||
|
||||
I follow every steps of the setup procedure, change some templates and tried to modify some page through the web but was not able to do so. Every page seems to be locked by the adminuser user. When I remove the adminuser in the setup file, every ran fine. Did I miss something ? What is exactly the adminuser supposed to be allowed to ? Is he the only user allowed to modify pages ?
|
||||
I follow every steps of the setup procedure, change some templates and
|
||||
tried to modify some page through the web but was not able to do so. Every
|
||||
page seems to be locked by the adminuser user. When I remove the adminuser
|
||||
in the setup file, every ran fine. Did I miss something ? What is exactly
|
||||
the adminuser supposed to be allowed to ? Is he the only user allowed to
|
||||
modify pages ?
|
||||
|
||||
> This was a bug in ikwiki that was fixed in version 2.44. --[[Joey]]
|
||||
|
||||
-----
|
||||
I hope you guys can put up with an absolute newbie. I am fairly new to linux and completely new to Perl. I have just installed MoinMoin locally in my PC, running ubuntu 8.4 and was about to use it until I ran into your ikiwiki. I thought ikiwiki is a better fit for what I want to do, so am seriously considering installing it as well in ubuntu. Except that the install seems way beyond my understanding.
|
||||
|
||||
pI hope you guys can put up with an absolute newbie. I am fairly new to linux and completely new to Perl. I have just installed MoinMoin locally in my PC, running ubuntu 8.4 and was about to use it until I ran into your ikiwiki. I thought ikiwiki is a better fit for what I want to do, so am seriously considering installing it as well in ubuntu. Except that the install seems way beyond my understanding.
|
||||
|
||||
Do i need to install git first? Which git -- the git-core? Ubuntu's instructions on installing the git-core is: "sudo apt-get install git-core". Is that it? Do I need to do a git-init as well, or will the ikiwiki-makerepo handle that? If I have to do a git-init as well, what --share values should I specify?
|
||||
|
||||
|
@ -165,3 +183,58 @@ I setup ikiwiki on a fedora 10 machine and I am using apache as my http server.
|
|||
> ikiwiki respects the umask, so if your umask is one that causes things to
|
||||
> be group writable, they will by. If you want to override that, there is
|
||||
> also a `umask ` setting in your setup file. --[[Joey]]
|
||||
|
||||
----
|
||||
|
||||
/etc/ikiwiki/auto.setup tries to get abs_path of a non-existent
|
||||
"repository" path (in ikiwiki-makerepo), and that doesn't work in my perl:
|
||||
|
||||
<pre>
|
||||
[mort@localhost ~]$ perl -e 'use Cwd q{abs_path}; print abs_path("/var")'
|
||||
/var[mort@localhost ~]$ perl -e 'use Cwd q{abs_path}; print abs_path("/abcde")'
|
||||
[mort@localhost ~]$
|
||||
</pre>
|
||||
|
||||
Because of this, /etc/ikiwiki/auto.setup fails:
|
||||
|
||||
<pre>
|
||||
$ ikiwiki -setup /etc/ikiwiki/auto.setup
|
||||
What will the wiki be named? wiki
|
||||
What revision control system to use? git
|
||||
What wiki user (or openid) will be admin? mort
|
||||
|
||||
|
||||
Setting up wiki ...
|
||||
internal error finding repository abs_path
|
||||
/etc/ikiwiki/auto.setup: failed to set up the repository with ikiwiki-makerepo
|
||||
|
||||
usage: ikiwiki [options] source dest
|
||||
ikiwiki --setup configfile
|
||||
$ perl -v
|
||||
|
||||
This is perl, v5.8.8 built for i386-linux-thread-multi
|
||||
(with 2 registered patches, see perl -V for more detail)
|
||||
|
||||
Copyright 1987-2007, Larry Wall
|
||||
|
||||
Perl may be copied only under the terms of either the Artistic License or the
|
||||
GNU General Public License, which may be found in the Perl 5 source kit.
|
||||
|
||||
Complete documentation for Perl, including FAQ lists, should be found on
|
||||
this system using "man perl" or "perldoc perl". If you have access to the
|
||||
Internet, point your browser at http://www.perl.org/, the Perl Home Page.
|
||||
|
||||
$
|
||||
</pre>
|
||||
|
||||
Can't ikiwiki's "make test" perhaps test for this, so that one knows something will go wrong?
|
||||
-- Ivan Z.
|
||||
|
||||
> FWIW, I tried the same thing with perl 5.8.8 from Debian etch, and its
|
||||
> Cwd does not have the problem. But I've modified `ikiwiki-makerepo` to
|
||||
> avoid using `abs_path` this way anyhow. --[[Joey]]
|
||||
|
||||
Thank you! I'm not a Perl programmer, so what's your opinion: is this behavior a violation of the specification of abs_path and I should report it to [ALTLinux](http://bugs.altlinux.org) (the distro)? --Ivan Z.
|
||||
|
||||
> That is not entirely clear to me from the documentation. It doesn't
|
||||
> say the path has to exist, but doesn't say it cannot either. --[[Joey]]
|
||||
|
|
|
@ -60,7 +60,7 @@ This page controls what shortcut links the wiki supports.
|
|||
* [[!shortcut name=man url="http://linux.die.net/man/%s"]]
|
||||
* [[!shortcut name=ohloh url="http://www.ohloh.net/projects/%s"]]
|
||||
|
||||
To add a new shortcut, use the `shortcut`
|
||||
To add a new shortcut, use the [[`shortcut`|ikiwiki/directive/shortcut]]
|
||||
[[ikiwiki/directive]]. In the url, "%s" is replaced with the
|
||||
text passed to the named shortcut, after url-encoding it, and '%S' is
|
||||
replaced with the raw, non-encoded text. The optional `desc` parameter
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
I managed to install ikiwiki on eggplant farms, with most basic features except markdown.
|
||||
|
||||
I think ikiwiki is more suitable for VPS/dedicated server. Shared hosting doesn't fit.
|
||||
|
||||
I just (2009/04/27) installed ikiwiki on DreamHost and the CPAN instructions here are unnecessarily complicated. I used "cpan" instead of "perl -MCPAN -e shell" and had no trouble with that portion of the install. --[[schmonz]]
|
||||
|
|
|
@ -24,21 +24,6 @@ git-fast-import statistics:
|
|||
|
||||
etc.
|
||||
|
||||
(Removed now dead info and blah blah.)
|
||||
|
||||
> Well, if this really is a script error, it's not really the script, but the wordpress XML dump, referring to a
|
||||
> possible malformed or invalid unicode character in the dump file. This is what I can gather from other scripts.
|
||||
> I'll be checking my dump file shortly.
|
||||
|
||||
>> This is only part of the problem... I'm not exactly sure what's going on, and it's get late/early for me....
|
||||
|
||||
>>> I used --force for fast-import, but then everything seems deleted, so you end up doing a reset, checkout, add, *then* commit.
|
||||
>>> Seems really odd. I edited the script however, maybe this is why... this is my changes:
|
||||
|
||||
-print "data %d" % len(data)
|
||||
+print "data %d merge refs/heads/%s" % (len(data), branch)
|
||||
|
||||
>>> That control character is a ^q^0 in emacs, see git fast-import --help for more info.
|
||||
>>> I'll be trying an import *without* that change, to see what happens.
|
||||
|
||||
>>>> I still have to do the above to preserve the changes done by this script... (removed previous note).
|
||||
|
||||
> It works fine.... The script is picky about having everything in proper UTF-8, **and** proper XML and HTML escaping. You need that to have a successful import. I let Emacs remove DOS line endings, and it works OK (if on *nix of some sort, of course). Thing is with this `git fast-import`, is that you have to `git reset` afterwards, (let's say you put them in posts/) `git checkout posts`, `git add posts`, then commit. I don't know if this a characteristic with `git fast-import`, but this is the way I get my posts to exist on the filesystem. If I don't do this, then I lose the data. If you get that "Not updating..." error, then just --force the import in. --[[users/simonraven]]
|
||||
|
|
|
@ -23,3 +23,21 @@ On the [[/ikiwiki/pagespec]] page, it says "internal" pages aren't "first-class"
|
|||
|
||||
>> Oooh, I see, it's referring to an absolute path (relative to the site), right?
|
||||
>> Thanks :).
|
||||
|
||||
>>> Right, PageSpecs are always absolute paths unless prefixed with `./`
|
||||
>>> (somewhat confusingly since WikiLinks are always realtive unless
|
||||
>>> previxed with `/` ...) --[[Joey]]
|
||||
|
||||
>> This is not working for me at all, all I get is some SHA1 hash all the time. I've tried variants of the `internal()` arg, and nothing gets spit out. --[[simonraven]]
|
||||
|
||||
>>> Sounds like [[!debbug 380212]]?
|
||||
>>> If so, the fix is to use Text::Markdown, or markdown 1.0.2 instead of buggy
|
||||
>>> old markdown 1.0.1. --[[Joey]]
|
||||
|
||||
>> `ii libtext-markdown-perl 1.0.21-1 Markdown and MultiMarkdown markup languages library`
|
||||
>>
|
||||
>> I'm using `Text::Markdown` due to its "multi-markdown" support. Yes, it does seem exactly like [[!debbug 380212]] .
|
||||
>> Maybe update it from CPAN + dh-make-perl (if there's a new one, that is) --[[simonraven]]
|
||||
>> I've just built and installed `libtext-markdown-perl 1.0.21-1` from dh-make-perl & CPAN, and regenerated that page.. let's see what happens... no hashes, but nothing else either:
|
||||
>>
|
||||
>> "kijkaqawej: last checked 10 minutes ago (25 posts)" -- inside of a box, no display of posts.
|
||||
|
|
|
@ -63,6 +63,8 @@ The iki-fast-load ruby script from the u32 page is given below:
|
|||
pipe.puts
|
||||
end
|
||||
|
||||
> Would be nice to know where you could get "node-callbacks"... this thing is useless without it. --[[users/simonraven]]
|
||||
|
||||
|
||||
Mediawiki.pm - A plugin which supports mediawiki format.
|
||||
|
||||
|
|
|
@ -52,6 +52,6 @@ Note that the first part enables cgi server wide but depending on default
|
|||
configuration, it may be not enough. The second part creates a specific
|
||||
rule that allow `ikiwiki.cgi` to be executed.
|
||||
|
||||
**Warning:** I only use this on my development server (offline). I am not
|
||||
sure of how secure this approach is. If you have any thought about it, feel
|
||||
free to let me know.
|
||||
**Warning:** I only use this lighttpd configuration on my development
|
||||
server (offline). I am not sure of how secure this approach is.
|
||||
If you have any thought about it, feel free to let me know.
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
## warning: lighttpd only or both?
|
||||
|
||||
Is your warning at the bottom (you don't know how secure it is) only about
|
||||
lighttpd or it's about apache2 configuration as well?
|
||||
|
||||
> The latter. (Although I don't know why using lighttpd would lead
|
||||
> to any additional security exposure anyway.) --[[Joey]]
|
||||
|
||||
I'm asking this because right now I want to setup an httpd solely for the
|
||||
public use of ikiwiki on a general purpose computer (there are other things
|
||||
there), and so I need to choose the more secure solution. --Ivan Z.
|
||||
|
||||
> AFAIU, my main simplest security measure should be running the public
|
||||
> ikiwiki's cgi under a special user, but then: how do I push to the repo
|
||||
> owned by that other user? I see, probably I should setup the public wiki
|
||||
> under the special user (so that it was able to create the cgi-script with
|
||||
> the desired permission), and then give my personal user the required
|
||||
> permissions to make a git-push by, say, creating a special Unix group for
|
||||
> this.
|
||||
|
||||
> Shouldn't there be a page here which would document a secure public and
|
||||
> multi-user installation of ikiwiki (by "multi-user" I mean writable by a
|
||||
> group of local Unix users)? If there isn't such yet, I started writing it
|
||||
> with this discussion.--Ivan Z.
|
||||
|
||||
> I see, perhaps a simpler setup would not make use of a Unix group, but
|
||||
> simply allow pushing to the public wiki (kept under a special user) through
|
||||
> git+ssh. --Ivan Z.
|
||||
|
||||
>> Yes, it's certianly possible to configure git (and svn, etc) repositories so that
|
||||
>> two users can both push to them. There should be plenty of docs out there
|
||||
>> about doing that.
|
||||
>>
|
||||
>> The easiest way though is probably
|
||||
>> to add your ssh key to the special user's `.ssh/authorized_keys`
|
||||
>> and push that way. --[[Joey]]
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
I modified the script a bit so categories and tags would actually show up in the output file.
|
||||
|
||||
|
||||
-----
|
||||
<pre>
|
||||
#!/usr/bin/env python
|
||||
|
||||
|
@ -29,7 +29,7 @@ I modified the script a bit so categories and tags would actually show up in the
|
|||
Usage: run --help as an argument with this script.
|
||||
|
||||
Notes:
|
||||
I added some extra bits to include the [[!tag foo]] stuff in the post,
|
||||
I added some extra bits to include the \[[!tag foo]] stuff in the post,
|
||||
as it wasn't before, at all. I'll diff the versions out so you can see
|
||||
the mess I made :).
|
||||
|
||||
|
@ -66,7 +66,7 @@ def main(name, email, subdir, branch='master'):
|
|||
commit_msg = """Importing WordPress post "%s" [%s]""" % (x.title.string, x.guid.string)
|
||||
timestamp = time.mktime(time.strptime(x.find('wp:post_date_gmt').string, "%Y-%m-%d %H:%M:%S"))
|
||||
|
||||
content = '[[!meta title="%s"]]\n\n' % (x.title.string.replace('"', r'\"'))
|
||||
content = '\[[!meta title="%s"]]\n\n' % (x.title.string.replace('"', r'\"'))
|
||||
content += x.find('content:encoded').string.replace('\r\n', '\n')
|
||||
|
||||
# categories = x.findAll('category')
|
||||
|
@ -76,8 +76,8 @@ def main(name, email, subdir, branch='master'):
|
|||
"""
|
||||
We do it differently here because we have duplicates otherwise.
|
||||
Take a look:
|
||||
<category><![CDATA[Health]]></category>
|
||||
<category domain="category" nicename="health"><![CDATA[Health]]></category>
|
||||
<category><![CDATA[Health]]></category>
|
||||
<category domain="category" nicename="health"><![CDATA[Health]]></category>
|
||||
|
||||
If we do the what original did, we end up with all tags and cats doubled.
|
||||
Therefore we only pick out nicename="foo". Our 'True' below is our 'foo'.
|
||||
|
@ -90,14 +90,14 @@ def main(name, email, subdir, branch='master'):
|
|||
for cat in categories:
|
||||
# remove 'tags/' because we have a 'tagbase' set.
|
||||
# your choice: 'tag', or 'taglink'
|
||||
# content += "\n[[!tag %s]]" % (cat.string.replace(' ', '-'))
|
||||
content += "\n[[!taglink %s]]" % (cat.string.replace(' ', '-'))
|
||||
# content += "\n\[[!tag %s]]" % (cat.string.replace(' ', '-'))
|
||||
content += "\n\[[!taglink %s]]" % (cat.string.replace(' ', '-'))
|
||||
# print >>sys.stderr, cat.string.replace(' ', '-')
|
||||
|
||||
# moved this thing down
|
||||
data = content.encode('ascii', 'html_replace')
|
||||
print "commit refs/heads/%s" % branch
|
||||
print "committer %s <%s> %d +0000" % (name, email, timestamp)
|
||||
print "committer %s <%s> %d +0000" % (name, email, timestamp)
|
||||
print "data %d" % len(commit_msg)
|
||||
print commit_msg
|
||||
print "M 644 inline %s" % os.path.join(subdir, "%s.mdwn" % stub)
|
||||
|
@ -111,3 +111,123 @@ if __name__ == "__main__":
|
|||
main(*sys.argv[1:])
|
||||
|
||||
</pre>
|
||||
-----
|
||||
|
||||
I have another version of the script, which uses the `timestamp` from the script, and inserts that as a \[[!meta date="foodate"]]. I'm posting it here just in case I happen to be doing something to the httpd.
|
||||
|
||||
(Hopefully I've escaped everything properly; if I missed something, check the source.)
|
||||
|
||||
-----
|
||||
<pre>
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Purpose:
|
||||
Wordpress-to-Ikiwiki import tool
|
||||
|
||||
Copyright:
|
||||
Copyright (C) 2007 Chris Lamb <chris@chris-lamb.co.uk>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Usage: run --help as an argument with this script.
|
||||
|
||||
Notes:
|
||||
I added some extra bits to include the \[[!tag foo]] stuff in the post,
|
||||
as it wasn't before, at all. I'll diff the versions out so you can see
|
||||
the mess I made :).
|
||||
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
import time
|
||||
import re
|
||||
|
||||
from datetime import datetime
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
|
||||
import codecs, htmlentitydefs
|
||||
|
||||
codecs.register_error('html_replace', lambda x: (''.join([u'&%s;' \
|
||||
% htmlentitydefs.codepoint2name[ord(c)] for c in x.object[x.start:x.end]]), x.end))
|
||||
|
||||
def main(name, email, subdir, branch='master'):
|
||||
soup = BeautifulSoup(sys.stdin.read())
|
||||
|
||||
# Regular expression to match stub in URL.
|
||||
stub_pattern = re.compile(r'.*\/(.+)\/$')
|
||||
|
||||
for x in soup.findAll('item'):
|
||||
# Ignore draft posts
|
||||
if x.find('wp:status').string != 'publish': continue
|
||||
|
||||
match = stub_pattern.match(x.guid.string)
|
||||
if match:
|
||||
stub = match.groups()[0]
|
||||
else:
|
||||
# Fall back to our own stubs
|
||||
stub = re.sub(r'[^a-zA-Z0-9_]', '-', x.title.string).lower()
|
||||
|
||||
commit_msg = """Importing WordPress post "%s" [%s]""" % (x.title.string, x.guid.string)
|
||||
timestamp = time.mktime(time.strptime(x.find('wp:post_date_gmt').string, "%Y-%m-%d %H:%M:%S"))
|
||||
content = '\[[!meta title="%s"]]\n' % (x.title.string.replace('"', r'\"'))
|
||||
content += "\[[!meta date=\"%s\"]]\n" % datetime.fromtimestamp(timestamp)
|
||||
content += x.find('content:encoded').string.replace('\r\n', '\n')
|
||||
|
||||
"""
|
||||
We do it differently here because we have duplicates otherwise.
|
||||
Take a look:
|
||||
<category><![CDATA[Health]]></category>
|
||||
<category domain="category" nicename="health"><![CDATA[Health]]></category>
|
||||
|
||||
If we do the what original did, we end up with all tags and cats doubled.
|
||||
Therefore we only pick out nicename="foo". Our 'True' below is our 'foo'.
|
||||
I'd much rather have the value of 'nicename', and tried, but my
|
||||
python skillz are extremely limited....
|
||||
"""
|
||||
categories = x.findAll('category', nicename=True)
|
||||
if categories:
|
||||
content += "\n"
|
||||
for cat in categories:
|
||||
# remove 'tags/' because we have a 'tagbase' set.
|
||||
# your choice: 'tag', or 'taglink'
|
||||
# content += "\n\[[!tag %s]]" % (cat.string.replace(' ', '-'))
|
||||
content += "\n\[[!taglink %s]]" % (cat.string.replace(' ', '-'))
|
||||
# this is just debugging, and for fun
|
||||
# print >>sys.stderr, cat.string.replace(' ', '-')
|
||||
|
||||
# moved this thing down
|
||||
data = content.encode('ascii', 'html_replace')
|
||||
print "commit refs/heads/%s" % branch
|
||||
print "committer %s <%s> %d +0000" % (name, email, timestamp)
|
||||
print "data %d" % len(commit_msg)
|
||||
print commit_msg
|
||||
print "M 644 inline %s" % os.path.join(subdir, "%s.mdwn" % stub)
|
||||
print "data %d" % len(data)
|
||||
print data
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) not in (4, 5):
|
||||
print >>sys.stderr, "%s: usage: %s name email subdir [branch] < wordpress-export.xml | git-fast-import " % (sys.argv[0], sys.argv[0])
|
||||
else:
|
||||
main(*sys.argv[1:])
|
||||
|
||||
</pre>
|
||||
-----
|
||||
|
||||
|
||||
[[!tag wordpress]]
|
||||
[[!tag python]]
|
||||
[[!tag conversion]]
|
||||
[[!tag ikiwiki]]
|
||||
|
|
|
@ -28,3 +28,40 @@ NicolasLimare
|
|||
> sidebar could be done as you describe using .shtml. --[[Joey]]
|
||||
|
||||
[[wishlist]]
|
||||
|
||||
> I have a plan for a way to avoid unecessary rebuilds caused by the
|
||||
> sidebar. The idea is to use wikistate to store what a sidebar renders to.
|
||||
> Then in the needsbuild hook, render sidebar(s) and compare with their
|
||||
> previous stored rendering. If a sidebar's rendered content has changed,
|
||||
> then all pages that display that sidebar need to be forced to be rebuilt.
|
||||
>
|
||||
> Also, if there is no previous stored rendering for a sidebar, or
|
||||
> if there is a stored rendering for a sidebar page that no longer exists, then
|
||||
> the pages need to be rebuilt. (This should deal with the [[bugs/Building_a_sidebar_does_not_regenerate_the_subpages]] bug.
|
||||
>
|
||||
> This would also save significant time, since the stored sidebar rendering
|
||||
> could just be dumped into the page by the pagetemplate hook. Current code
|
||||
> re-loads and renders the same sidebar file for every page built!
|
||||
>
|
||||
> The sticky part is (relative) links on the sidebar. These would need to
|
||||
> be modified somehow depending on the page that the sidebar is placed on,
|
||||
> to not break the link.
|
||||
>
|
||||
> Another wrinkle is changing subpage links on a sidebar. Suppose a sidebar
|
||||
> links to page `foo`. If page `bar/foo` exists, the sidebar on page bar will,
|
||||
> currently, link to that page, in preference to a toplevel `foo`.
|
||||
> If `bar/foo` is removed, it will update to link to `foo`. With the new
|
||||
> scheme, the stored sidebar rendering is not for page `foo`, and so
|
||||
> the change of the `bar/foo` link will not be noticed or acted on.
|
||||
> Granted, it's unlikely that anyone relies on the current behavior. You
|
||||
> generally want links on a sidebar to link to the same place on every page
|
||||
> that displays it. So finding some way to force all links on a sidebar to
|
||||
> be handled absolutely and documenting that would avoid this problem.
|
||||
>
|
||||
> So, one way to handle both the above problems would be to use the
|
||||
> pre-rendered sidebar for each page, but use a html parser to look for
|
||||
> links in it, and munge them to work as relative links on the page the
|
||||
> sidebar is being added to. Or, if the wiki's url is known, just do this
|
||||
> once when rendering the sidebar, adding the full url to the links.
|
||||
> (Maybe require `url` be set when using sidebar?)
|
||||
> --[[Joey]]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
It would be great if I could tell ikiwiki to automatically instantiate pages for each tag, according to a template, especially when `$tagbase` is set.
|
||||
It would be great if I could tell ikiwiki to automatically instantiate pages for each [[tag|/tags]], according to a template, especially when `$tagbase` is set.
|
||||
|
||||
Tags are mainly specific to the object to which they’re stuck. However, I often use them the other way around, too: as concepts. And sometimes I’d like to see all pages related to a given concept (“tagged with a given tag”). The only way to do this with ikiwiki is to instantiate a page for each tag and slap a map on it. This is quite tedious and I’d really love to see Ikiwiki do so by default for all tags.
|
||||
|
||||
|
@ -10,7 +10,7 @@ I would love to see this as well. -- dato
|
|||
|
||||
---
|
||||
|
||||
I have create a patch to tag.pm for add the option for auto create tag pages.
|
||||
I have create a patch to [[tag.pm|plugins/tag]] for add the option for auto create tag pages.
|
||||
A new setting is used to enable or disable auto-create tag pages, `tag_autocreate`.
|
||||
The new tag file is created during the preprocess phase.
|
||||
The new tag file is then complied during the change phase.
|
||||
|
@ -96,7 +96,7 @@ _tag.pm from version 3.01_
|
|||
+
|
||||
|
||||
|
||||
This uses a template called `autotagpage.tmpl`, here is my template file:
|
||||
This uses a [[template|wikitemplates]] called `autotagpage.tmpl`, here is my template file:
|
||||
|
||||
\[[!inline pages="link(<TMPL_VAR TAG>)" archive="yes"]]
|
||||
|
||||
|
@ -106,3 +106,20 @@ I am not sure if that is the best way to handle it.
|
|||
|
||||
[[!tag patch]]
|
||||
-- Jeremy Schultz <jeremy.schultz@uleth.ca>
|
||||
|
||||
No, this doesn't help:
|
||||
|
||||
+ # This refresh/saveindex is to fix the Tags link
|
||||
+ # With out this additional refresh/saveindex the tag link displays ?tag
|
||||
+ IkiWiki::refresh();
|
||||
+ IkiWiki::saveindex();
|
||||
|
||||
On the second extra pass, it doesn't notice that it has to update the "?"-link. If I run ikiwiki once more, it is updated. I don't know yet how this should be fixed, because I don't know the internals of ikiwiki well enough. Something inhibits detecting the need to update in refresh() in Render.pm; perhaps, this condition:
|
||||
|
||||
if (! $pagemtime{$page}) {
|
||||
...
|
||||
push @add, $file;
|
||||
...
|
||||
}
|
||||
|
||||
is not satisfied for the newly created tag page. I shall put debug msgs into Render.pm to find out better how it works. --Ivan Z.
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
The [[blogspam plugin|plugins/blogspam]] is just great.
|
||||
|
||||
However, it lacks support in the web interface to [train comments as
|
||||
SPAM](http://blogspam.net/api/classifyComment.html), when they were
|
||||
erroneously identified as ham. It would be great to have such
|
||||
support, also in the spirit of helping
|
||||
[blogspam.net](http://blogspam.net) to get better and better.
|
||||
|
||||
What would consist the most appropriate user interface is not entirely
|
||||
clear to me in the general case (wiki page editing). The case of blog
|
||||
comments look easier to: when the admin user is logged in (and if the
|
||||
blogspam plugin is enabled), each comment can have an extra link "mark
|
||||
as SPAM" which would both delete/revert the comment and submit it to
|
||||
the configured blogspam server for training.
|
||||
|
||||
> Comments can't have an extra link when the admin user is logged
|
||||
> in, because the admin user sees the same static pages as everyone
|
||||
> else (non-admins still see the "remove" link provided by the remove
|
||||
> plugin, too). Perhaps a better UI would be that the action of that
|
||||
> link was overridden by the blogspam plugin to go to a form with
|
||||
> a checkbox for "also submit as spam"? --[[smcv]]
|
||||
|
||||
Similarly, ham training can be plugged directly into the current
|
||||
comment moderation interface. Each comment that gets approved by the
|
||||
admin, can be sent to blogspam.net as ham. If this is considered too
|
||||
"aggressive", this behaviour can need to be explicitly enabled by
|
||||
turning on a configuration option.
|
||||
|
||||
-- [[Zack]]
|
||||
|
||||
[[!tag wishlist]]
|
|
@ -0,0 +1,34 @@
|
|||
At the moment the text area in the edit form has a fixed size of 20 rows.
|
||||
|
||||
On longer pages its not very comfortable to edit pages with such a small box. The whole screen size should be used instead([example](http://img3.imagebanana.com/img/bl10u9mb/editingtodo_1241804460828.png)).
|
||||
|
||||
> The whole screen width is used, via the following
|
||||
> from style.css:
|
||||
>
|
||||
> {
|
||||
> width: 100%;
|
||||
> }
|
||||
>
|
||||
> Perhaps you have replaced it with a modified style sheet that does not
|
||||
> include that? --[[Joey]] [[!tag done]]
|
||||
|
||||
>> The screen shot was made with http://ikiwiki.info/ where i didn't change anything. The width is optimally used. The problem is the height.
|
||||
|
||||
>>> You confused me by talking about rows...
|
||||
>>> I don't know how to allow CSS to resize a textarea
|
||||
>>> to the full browser height. The obvious `height: 75%;`
|
||||
>>> does not work, at least in firefox and epiphany.
|
||||
>>>
|
||||
>>> Ah, of course, if it did work, it'd make it be 75% of
|
||||
>>> the full *page* height, and not the browser window height.
|
||||
>>>
|
||||
>>> According to
|
||||
>>> [this page](http://stackoverflow.com/questions/632983/css-height-if-textarea-as-a-percentage-of-the-viewport-height):
|
||||
>>>>>50% of what? Parent says ‘auto’, which means base it on the height of the child content. Which depends on the height on the parent. Argh! etc.
|
||||
>>>>>
|
||||
>>>>>So you have to give its parent a percentage height. And the parent's parent, all the way up to the root.
|
||||
>>> So, other than a javascript-based resizer, some very tricky and invasive CSS
|
||||
>>> seems to be needed. Please someone let me know if you succeed in doing that.
|
||||
>>> --[[Joey]]
|
||||
|
||||
>>>>>> the javascript approach would need to work something like this: you need to know about the "bottom-most" item on the edit page, and get a handle for that object in the DOM. You can then obtain the absolute position height-wise of this element and the absolute position of the bottom of the window to determine the pixel-difference. Then, you set the height of the textarea to (current height in px) + determined-value. This needs to be re-triggered on various resize events, at least for the window and probably for other elements too. I may have a stab at this at some point. -- [[Jon]]
|
|
@ -0,0 +1,13 @@
|
|||
On the edit form when you are creating a new page, you are given an option of
|
||||
page types that can be used. The string presented to the user here is not
|
||||
particularly friendly: e.g., mdwn, txtl... it would be nice if the drop-down
|
||||
contents were "Markdown", "Textile", etc. (the values in the option tags can
|
||||
remain the same).
|
||||
|
||||
I've written a first-take set of patches for this. They are in
|
||||
git://github.com/jmtd/ikiwiki.git in the branch "friendly_markup_names". [[!tag patch]]
|
||||
|
||||
-- [[Jon]]
|
||||
|
||||
[[merged|done]], TFTP! (I have not checked if any other format plugins
|
||||
would benefit from a longer name) --[[Joey]]
|
|
@ -5,9 +5,9 @@ Using the tag functionality I could group some news items for including them int
|
|||
The tagged items should not differ from the items, that are not tagged.
|
||||
I didn't find any way to hide the tag list or links and I don't want to have to create a "hidden" page containing links to the pages and then using the backlink functionality, because this is more prone to errors. It's easier to forget adding a link on a second page than forgetting to add a needed tag to a new newsitem.
|
||||
|
||||
> I found out, that using the meta plugin it is possible to create the hidden link, that I wanted.
|
||||
> I found out, that using the [[meta plugin|plugins/meta]] it is possible to create the hidden link, that I wanted.
|
||||
-- [[users/Enno]]
|
||||
|
||||
>> Yes, meta link will not show up as a visible link on the page, while
|
||||
>> Yes, [[meta link|ikiwiki/directive/meta]] will not show up as a visible link on the page, while
|
||||
>> also not showing up in the list of tags of a page, so it seems what you
|
||||
>> want. [[done]] --[[Joey]]
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
[[!tag wishlist]]
|
||||
|
||||
Continuing the ideas in [[bugs/Inline doesn't wikilink to pages]].
|
||||
|
||||
I thought of a use case for another feature: making [[ikiwiki/directive/inline]] inherit the link relations of the included pages (optionally, say, with `inheritlinks=yes`). For example, if I want to list `elements/*` that have been linked to in any of `new_stuff/*`, I could try to write a [[ikiwiki/pagespec]] like
|
||||
`elements/* and backlink(new_stuff/*)`.
|
||||
|
||||
This is not yet possible, as discussed in [[todo/tracking_bugs_with_dependencies]].
|
||||
|
||||
It would be possible to work around this limitation of pagespecs if it was possible to create a page `all_new_stuff` with `\[[!inline pages="new_stuff/*" inheritlinks=yes]]`: then the desired pagespec would be expressed as `elements/* and backlink(all_new_stuff)`.
|
||||
|
||||
> Or, instead of specifying whether to inherit at the place of the inline, add more relations (`inline`, `backinline`) and relation composition (say, `*`, or haskell-ish `$` in order not confuse with the glob `*`) and explicitly write in the pagespecs that you want to follow the inline relation backwards: `elements/* and backlink$backinline(all_new_stuff)` or, equivalently, if [["classes"|todo/tracking_bugs_with_dependencies]] are implemented in pagespecs: `elements/* and backlink(backinline(all_new_stuff))`. Of course, this suggestion requires the powerful extension to pagespecs, but it gives more flexibility, and the possibility to avoid redundant information: the same pagespec at two places -- the inline and the other matching construction.
|
||||
>
|
||||
> BTW, adding more relations -- the `inline` relation among them -- would satisfy [[the other feature request|bugs/Inline doesn't wikilink to pages]]. --Ivan Z.
|
||||
|
||||
This is not just an ugly workaround. The availability of this feature has some reason: the classes of pages you want to refer to "recursively" (in that kind of complex pagespecs) tend to have some meaning themselves. So, I might indeed want to have a page like `all_new_stuff`, it would be useful for me. And at the same time I would like to write pagespecs like `elements/* and backlink(all_new_stuff)` -- and using the proposed feature in [[todo/tracking_bugs_with_dependencies/]] would be less clean because then I would have to enter the same information at two places: the possibly complex pagespec in the inline. And having redundant information leads to inconsistency.
|
||||
|
||||
So in a sense, in some or most cases, it would indeed be cleaner to "store" the definition of a class of pages referred to in complex pagespecs as a separate object. And the most natural representation for this definition of a class of pages (adhering to the principle of wiki that what you mean is entered/stored in its most natural representation, not through some hidden disconnected code) is making a page with an inline/map/or the like, so that at the same time you store the definition and you see what it is (the set of pages is displayed to you).
|
||||
|
||||
I would actually use it in my current "project" in ikiwiki: I actually edit a set of materials as a set of subpages `new_stuff/*`, and I also want to have a combined view of all of them (made through inline), and at another page, I want to list what has been linked to in `new_stuff/*` and what hasn't been linked to.--Ivan Z.
|
|
@ -0,0 +1,49 @@
|
|||
This is a fleshed out todo based on discussions at
|
||||
[[forum/managing_todo_lists]].
|
||||
|
||||
I would like to have TODO lists inside ikiwiki wikis. This would mean:
|
||||
|
||||
* a new markup plugin to support a language suitable for TODO lists (OPML,
|
||||
XOXO are two possible candidates)
|
||||
* some javascript to provide interactive editing.
|
||||
|
||||
As [[chrysn]] pointed out on the forum page, this has some crossover with
|
||||
[[structured page data]]. In particular, if the markup language chosen had a
|
||||
concept of invalid markup (existing plugins just tend to ignore stuff that
|
||||
isn't explicitly part of their markup) we would need to sensibly handle that.
|
||||
Perhaps rejecting web edits and providing context help on why the edit was
|
||||
rejected, although that sounds like a significant headache.
|
||||
|
||||
I have started working on this, albeit slowly. A proof of concept is at
|
||||
<http://dev.jmtd.net/outliner/>.
|
||||
|
||||
There are two git repositories associated with my WIP: one contains the
|
||||
javascript, the plugin, the changes made to page templates; the other contains
|
||||
the contents of that wiki-site (so the test todos and the contents of bugs/
|
||||
which forms a sort-of todo list for the todo list :) ) I will endeavour to get
|
||||
mirrors of those repos up on github or similar asap.
|
||||
|
||||
-- [[Jon]]
|
||||
|
||||
----
|
||||
|
||||
Just to report the WIP plugin for this is now in a reasonably good state. I ended
|
||||
up just inventing a new markup language -- for now, items are divided by newlines
|
||||
and lists are one-dimensional, for simplicity. I got fed up thinking about how to
|
||||
handle the structured data issues / needing a lot of boilerplate around items and
|
||||
the implications for the "new item" dialogue.
|
||||
|
||||
Still quite a lot to do though!
|
||||
|
||||
-- [[Jon]]
|
||||
|
||||
I've pushed a copy of the work in progress which consists of
|
||||
|
||||
* A change to page.tmpl
|
||||
* A javascript underlay directory + javascript file
|
||||
* a few CSS bits in a local.css
|
||||
* a plugin
|
||||
|
||||
to <http://github.com/jmtd/ikiwiki_todolist/>
|
||||
|
||||
-- [[Jon]]
|
|
@ -0,0 +1,9 @@
|
|||
[[!tag wishlist]]
|
||||
|
||||
As noted in [[todo/tag_pagespec_function]], there is a "misbehavior" of a `tagged()` pagespec: it matches even pages which have plain links to the tag page.
|
||||
|
||||
And in general, it would be quite useful to be able to distinguish different kinds of links: one more kind, in addition to "tag", is "bug dependency" noted in [[todo/structured_page_data#another_kind_of_links]] and [[todo/tracking_bugs_with_dependencies#another_kind_of_links]].
|
||||
|
||||
It could distinguish the links by the `rel=` attribute. ([[Tags already receive a special rel-class|todo/rel_attribute_for_links]].) This means there is a general need for a syntax to specify user-defined rel-classes on wikilink (then bug deps would simply use their special rel-class, either directly, or through a special directive like `\[[!depends ]]`), and to refer to them in pagespecs (in forward and backward direction).
|
||||
|
||||
Besides pagespecs, the `rel=` attribute could be used for styles. --Ivan Z.
|
|
@ -0,0 +1,14 @@
|
|||
Using the [[plugins/inline]] plugin, you can get an inline-postform for
|
||||
creating new pages.
|
||||
|
||||
It would be quite nice to have the flexibility to do this outside of the
|
||||
inline directive.
|
||||
|
||||
I've got a proof-of-concept hacked inline comment submission example at
|
||||
<http://dev.jmtd.net/comments/> for example. I've just copied the HTML from
|
||||
the post form and stuck it inside a [[plugins/toggle]].
|
||||
|
||||
(Before Simon completed the comments plugin, I thought this would the a
|
||||
logical first step towards doing comment-like things with inlined pages).
|
||||
|
||||
-- [[Jon]]
|
|
@ -90,8 +90,12 @@ diff -urNX ignorepats ikiwiki/IkiWiki/Plugin/relative.pm ikidev/IkiWiki/Plugin/r
|
|||
|
||||
[[!tag patch]]
|
||||
|
||||
> This looks really interesting. It reminds me of XPath and its conditionals.
|
||||
> This looks really interesting. It reminds me of [[!wikipedia XPath]] and its conditionals.
|
||||
> Those might actually work well adapted to pagespecs. For instance, to write
|
||||
> "match any page with a child blah", you could just write *[blah] , or if you
|
||||
> don't want to use relative-by-default in the conditionals, *[./blah].
|
||||
> -- [[JoshTriplett]]
|
||||
|
||||
> And it [[!taglink also_reminds_me|pagespec_in_DL_style]] of [[!wikipedia description logics]]: of course, given the relation `subpage` one could write a description-logic-style formula which would define the class of pages that are ("existentially") in a given relation (`subpage` or `inverse(subpage)*subpage`) to a certain other class of pages (e.g., named "blah") ("existentially" means there must exist a page, e.g., named "blah", which is in the given relation to the candidate).
|
||||
|
||||
> Probably the model behind XPath is similar (although I don't know enough to say this definitely).--Ivan Z.
|
||||
|
|
|
@ -82,6 +82,10 @@ See also:
|
|||
> rather than all pages linked from a given page.
|
||||
>
|
||||
>The first use case is handled by having a template in the page creation. You could
|
||||
|
||||
|
||||
|
||||
|
||||
>have some type of form to edit the data, but that's just sugar on top of the template.
|
||||
>If you were going to have a web form to edit the data, I can imagine a few ways to do it:
|
||||
>
|
||||
|
@ -243,8 +247,9 @@ in a large number of other cases.
|
|||
|
||||
> would match `data_eq(Depends on,6)`, `data_link(Depends on,bugs/bugA)`, `data_link(Depends on,bugs/bugB)`
|
||||
> or, if you applied the patch in [[todo/tracking_bugs_with_dependencies]] then you can use 'defined pagespecs'
|
||||
> such as `data_link(Depends on,~openBugs)`. The ability to label links like this allows separation of
|
||||
> such as `data_link(Depends on,~openBugs)`. <a id="another_kind_of_links" />The ability to label links like this allows separation of
|
||||
> dependencies between bugs from arbitrary links.
|
||||
>> This issue (the need for distinguished kinds of links) has also been brought up in other discussions: [[tracking_bugs_with_dependencies#another_kind_of_links]] (deps vs. links) and [[tag_pagespec_function]] (tags vs. links). --Ivan Z.
|
||||
|
||||
----
|
||||
|
||||
|
|
|
@ -12,7 +12,14 @@ match tagged pages independent of whatever the tagbase is set to.
|
|||
>
|
||||
> The only problem I see is it could be confusing if `tag(foo)` matched
|
||||
> a page that just linked to the tag via a wikilink, w/o actually tagging it.
|
||||
>
|
||||
|
||||
>> (My [[!taglink wishlist]].) Yes, this is confusing and not nice. I observed this misbehavior, because I wanted to match two different lists of pages (only tagged or linked in any way), but it didn't work. Would this feature require a complex patch? --Ivan Z.
|
||||
|
||||
>>> If you link to a page 'foo' which happens to be a tag then the page you link from will turn up in the set of pages returned by tagged(foo). The only way to avoid this would be for the tag plugin to not use wikilinks as an implementation method. That itself would not be too hard to do, but there might be people relying on the older behaviour. A better alternative might be to have a "tag2" plugin (or a better name) which implements tagging entirely separately. -- [[Jon]]
|
||||
>>>> I see; at least, your response is encouraging (that it's not hard). I could even find some work that can give similar features: [[structured page data#another_kind_of_links]] -- they envisage a pagespec like `data_link(Depends on,bugs/bugA)`, thus a "separation of dependencies between bugs from arbitrary links".
|
||||
|
||||
>>>> Indeed, having many relations that can be used in the formulas defining classes of objects (like pagespecs here) is a commonly imagined thing, so this would be a nice feature. (I'll be trying out the patches there first, probably.) In general, extending the language of pagespecs to something more powerful (like [[!wikipedia description logics]]) seems to be a nice possible feature. I saw more discussions of ideas [[!taglink about_the_extension_of_the_pagespec_language_in_the_direction_similar_to_description_logics|pagespec_in_DL_style]] somewhere else here. --Ivan Z.
|
||||
|
||||
> One other thing, perhaps it should be called `tagged()`? --[[Joey]]
|
||||
|
||||
[[!tag patch done]]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
Stuff still needing to be done with tags:
|
||||
Stuff still needing to be done with [[/tags]]:
|
||||
|
||||
* It's unfortunate that the rss category (tag) support doesn't include
|
||||
a domain="" attribute in the category elements. That would let readers
|
||||
|
|
|
@ -11,6 +11,8 @@ I like the idea of [[tips/integrated_issue_tracking_with_ikiwiki]], and I do so
|
|||
>
|
||||
>> I thought about this briefly, and got about that far.. glad you got
|
||||
>> further. :-) --[[Joey]]
|
||||
|
||||
>> Or, one [[!taglink could_also_refer|pagespec_in_DL_style]] to the language of [[!wikipedia description logics]]: their formulas actually define classes of objects through quantified relations to other classes. --Ivan Z.
|
||||
>
|
||||
> Another option would be go with a more functional syntax. The concept here would
|
||||
> be to allow a pagespec to appear in a 'pagespec function' anywhere a page can. e.g.
|
||||
|
@ -58,6 +60,7 @@ I like the idea of [[tips/integrated_issue_tracking_with_ikiwiki]], and I do so
|
|||
|
||||
>> So, equivilant example: `define(bugs, bugs/* and !*/Discussion) and define(openbugs, bugs and !link(done)) and openbugs and !link(openbugs)`
|
||||
>>
|
||||
|
||||
>> Re recursion, it is avoided.. but building a pagespec that is O(N^X) where N is the
|
||||
>> number of pages in the wiki is not avoided. Probably need to add DOS prevention.
|
||||
>> --[[Joey]]
|
||||
|
@ -67,13 +70,15 @@ I like the idea of [[tips/integrated_issue_tracking_with_ikiwiki]], and I do so
|
|||
|
||||
>>>> Yeah, guess that'd work. :-)
|
||||
|
||||
> One quick further thought. All the above discussion assumes that 'dependency' is the
|
||||
> <a id="another_kind_of_links" />One quick further thought. All the above discussion assumes that 'dependency' is the
|
||||
> same as 'links to', which is not really true. For example, you'd like to be able to say
|
||||
> "This bug does not depend upon [ [ link to other bug ] ]" and not have a dependency.
|
||||
> Without having different types of links, I don't see how this would be possible.
|
||||
>
|
||||
> -- [[Will]]
|
||||
|
||||
>> I saw that this issue is targeted at by the work on [[structured page data#another_kind_of_links]]. --Ivan Z.
|
||||
|
||||
Okie - I've had a quick attempt at this. Initial patch attached. This one doesn't quite work.
|
||||
And there is still a lot of debugging stuff in there.
|
||||
|
||||
|
@ -188,6 +193,41 @@ The following three inlines work for me with this patch:
|
|||
I've lost track of the indent level, so I'm going back to not indented - I think this is a working [[patch]] taking into
|
||||
account all comments above (which doesn't mean it is above reproach :) ). --[[Will]]
|
||||
|
||||
> Very belated code review of last version of the patch:
|
||||
>
|
||||
> * `is_globlist` is no longer needed
|
||||
> * I don't understand why the pagespec match regexp is changed
|
||||
> from having flags `igx` to `ixgs`. Don't see why you
|
||||
> want `.` to match '\n` in it, and don't see any `.` in the regexp
|
||||
> anyway?
|
||||
> * Some changes of `@_` to `%params` in `pagespec_makeperl` do not
|
||||
> make sense to me. I don't see where \%params is defined and populated,
|
||||
> except with `\$params{specFunc}`.
|
||||
> * Seems that the only reason `match_glob` has to check for `~` is
|
||||
> because when a named spec appears in a pagespec, it is translated
|
||||
> to `match_glob("~foo")`. If, instead, `pagespec_makeperl` checked
|
||||
> for named specs, it could convert them into `check_named_spec("foo")`
|
||||
> and avoid that ugliness.
|
||||
> * The changes to `match_link` seem either unecessary, or incomplete.
|
||||
> Shouldn't it check for named specs and call
|
||||
> `check_named_spec_existential`?
|
||||
> * Generally, the need to modify `match_*` functions so that they
|
||||
> check for and handle named pagespecs seems suboptimal, if
|
||||
> only because there might be others people may want to use named
|
||||
> pagespecs with. It would be possible to move this check
|
||||
> to `pagespec_makeperl`, by having it check if the parameter
|
||||
> passed to a pagespec function looked like a named pagespec.
|
||||
> The only issue is that some pagespec functions take a parameter
|
||||
> that is not a page name at all, and it could be weird
|
||||
> if such a parameter were accidentially interpreted as a named
|
||||
> pagespec. (But, that seems unlikely to happen.)
|
||||
> * I need to check if your trick to avoid infinite recursion
|
||||
> works if there are two named specs that recursively
|
||||
> call one-another. I suspect it does, but will test this
|
||||
> myself..
|
||||
>
|
||||
> --[[Joey]]
|
||||
|
||||
----
|
||||
|
||||
diff --git a/IkiWiki.pm b/IkiWiki.pm
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
If you want to translate your wiki into another language, there are
|
||||
essentailly three peices needed for a complete translation:
|
||||
essentially three pieces needed for a complete translation:
|
||||
|
||||
1. The messages in the ikiwiki program itself need to be translated.
|
||||
Ikiwiki is internationalised, and most such messages are already marked
|
||||
with `gettext()`. THe source tarball includes a creates a `po/ikiwiki.pot`
|
||||
with `gettext()`. The source tarball includes a `po/ikiwiki.pot`
|
||||
that can be copied and translated as a po file. All very standard.
|
||||
|
||||
Note that a few things in the source are not currently translated. These
|
||||
|
@ -27,6 +27,6 @@ essentailly three peices needed for a complete translation:
|
|||
[[todo/l10n]], but until that's complete, you'd need to copy and
|
||||
translate the templates by hand.
|
||||
|
||||
1. The [[basewiki]] itself needs to be translated. Whether to only translate
|
||||
the page contents, or also translate the page names, is an open
|
||||
question.
|
||||
1. The [[basewiki]] itself needs to be translated. The
|
||||
[[plugins/contrib/po]] ikiwiki plugin will allow translating
|
||||
wikis using po files and can be used for this.
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
<http://bosboot.org>
|
|
@ -47,7 +47,7 @@ welcome.
|
|||
headers at top of a file (e.g., `title: Page Title` or
|
||||
`date: November 2, 2008 11:14 EST`).
|
||||
|
||||
* [pandoc][] - Markdown page processing via Pandoc. LaTeX and
|
||||
* [pandoc][] - Markdown page processing via [Pandoc](http://johnmacfarlane.net/pandoc/) (a Haskell library for converting from one markup format to another). LaTeX and
|
||||
reStructuredText are optional.
|
||||
|
||||
* [path][] - Provides path-specific template conditionals such as
|
||||
|
|
|
@ -16,5 +16,12 @@ of software for this task.
|
|||
|
||||
* slides at <http://www.staff.ncl.ac.uk/jon.dowland/unix/docs/>.
|
||||
|
||||
I am also working on some ikiwiki hacks: an alternative approach to
|
||||
[[plugins/comments]]; a system for [[forum/managing_todo_lists]].
|
||||
I am also working on some ikiwiki hacks:
|
||||
|
||||
* an alternative approach to [[plugins/comments]] (see
|
||||
[[todo/more flexible inline postform]] for one piece of the puzzle;
|
||||
<http://dev.jmtd.net/comments/> for some investigation into making the post
|
||||
form more integrated)
|
||||
* a system for [[forum/managing_todo_lists]] (see also
|
||||
[[todo/interactive todo lists]] and <http://dev.jmtd.net/outliner/> for the
|
||||
current WIP).
|
||||
|
|
|
@ -4,14 +4,8 @@
|
|||
<tschwinge@gnu.org>
|
||||
<http://www.thomas.schwinge.homeip.net/>
|
||||
|
||||
|
||||
With respect to *[[ikiwiki]]* I'm currently working on...
|
||||
|
||||
* setting it up for the [GNU Hurd's web pages](http://www.gnu.org/software/hurd/);
|
||||
* setting it up as a replacement for the GNU Hurd's previous wiki (TWiki): <http://bddebian.com/~wiki/>;
|
||||
|
||||
... and all that while trying to preserve the previous content's history,
|
||||
which is stored in a CVS repository for the web pages and a RCS repository
|
||||
for the wiki.
|
||||
|
||||
Read [About the TWiki to ikiwiki conversion](http://www.bddebian.com/~wiki/about_the_twiki_to_ikiwiki_conversion/).
|
||||
I have converted the [GNU Hurd](http://www.gnu.org/software/hurd/)'s previous
|
||||
web pages and previous wiki pages to a *[[ikiwiki]]* system; and all that while
|
||||
preserving the previous content's history, which was stored in a CVS repository
|
||||
for the HTML web pages and a TWiki RCS repository for the wiki; see
|
||||
<http://www.gnu.org/software/hurd/colophon.html>.
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
## Place for local templates
|
||||
Where does one put any locally modified templates for an individual ikiwiki? --Ivan Z.
|
||||
|
||||
> You can put them whereever you like; the `templatedir` controls
|
||||
> where ikiwiki looks for them. --[[Joey]]
|
||||
|
||||
Thank you for your response! My question arose out of my intention to make
|
||||
custom templates for a wiki--specifically suited for the kind of content
|
||||
it will have--so, that would mean I would want to distribute them through
|
||||
git together with other content of the wiki. So, for this case the
|
||||
separation of conceptually ONE thing (the content, the templates, and the
|
||||
config option which orders to use these templates) into THREE separate
|
||||
files/repos (the main content repo, the repo with templates, and the config
|
||||
file) is not convenient: instead of distributing a single repo, I have to
|
||||
tell people to take three things if they want to replicate this wiki. How
|
||||
would you solve this inconvenience? Perhaps, a default location of the
|
||||
templates *inside* the source repo would do?--Ivan Z.
|
||||
|
||||
> I would avoid putting the templates in a subdirectory of the ikiwiki srcdir.
|
||||
> (I'd also avoid putting the ikiwiki setup file there.)
|
||||
> While it's safe to do either in some cases, there are configurations where
|
||||
> it's unsafe. For example, a malicious user could use attachment handling to
|
||||
> replace those files with their own, bad versions.
|
||||
>
|
||||
> So, two ideas for where to put the templatedir and ikiwiki setup.
|
||||
|
||||
> * The easiest option is to put your wiki content in a subdirectory
|
||||
> ("wiki", say) and point `srcdir` at that.
|
||||
> then you can have another subdirectory for the wikitemplates,
|
||||
> and put the setup file at the top.
|
||||
> * Another option if using git would be to have a separate branch,
|
||||
> in the same git repository, that holds wikitemplates and the setup file.
|
||||
> Then you check out the repository once to make the `srcdir` available,
|
||||
> and have a second checkout, of the other branch, to make the other stuff
|
||||
> available.
|
||||
>
|
||||
> Note that with either of these methods, you have to watch out if
|
||||
> giving other direct commit access to the repository. They could
|
||||
> still edit the setup file and templates, so only trusted users should
|
||||
> be given access. (It is, however, perfectly safe to let people edit
|
||||
> the wiki via the web, and is even safe to configure
|
||||
> [[tips/untrusted_git_push]] to such a repository.) --[[Joey]]
|
||||
|
||||
Thanks, that's a nice and simple idea: to have a subdirectory! I'll try it. --Ivan Z.
|
||||
|
||||
A [[!taglink wish|wishlist]]: the ikiwiki program could be improved so that it follows the same logic as git in looking for its config: it could ascend directories until it finds an `.ikiwiki/` directory with `.ikiwiki/setup` and then uses that configuration. Now I'm tired to always type `ikiwiki --setup path/to/the/setup --refresh` when working in my working clone of the sources; I'd like to simply type `ikiwiki` instead, and let it find the setup file. The default location to look for templates could also be made to be a sibling of the setup file: `.ikiwiki/templates/`. --Ivan Z.
|
|
@ -15,7 +15,5 @@ use IkiWiki::Setup::Standard {
|
|||
userdir => "users",
|
||||
usedirs => 0,
|
||||
prefix_directives => 1,
|
||||
cgiurl => "http://me",
|
||||
url => "http://me",
|
||||
add_plugins => [qw{goodstuff version haiku polygen fortune}],
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ if [ "$rcs" != mercurial ] && [ "$rcs" != bzr ]; then
|
|||
echo "repository $repository already exists, aborting" >&2
|
||||
exit 1
|
||||
fi
|
||||
repository="$(perl -e 'use Cwd q{abs_path}; $r=shift; $r=~s/\/*$//; print abs_path($r)' "$repository")"
|
||||
repository="$(perl -e 'use Cwd; $r=shift; $r=getcwd.q{/}.$r if $r!~m!^/!; print $r' "$repository")"
|
||||
if [ -z "$repository" ]; then
|
||||
echo "internal error finding repository abs_path" >&2
|
||||
exit 1
|
||||
|
|
|
@ -73,11 +73,7 @@ sub prefix_directives {
|
|||
}
|
||||
|
||||
sub indexdb {
|
||||
my $dir=shift;
|
||||
if (! defined $dir) {
|
||||
usage();
|
||||
}
|
||||
$config{wikistatedir}=$dir."/.ikiwiki";
|
||||
setstatedir(shift);
|
||||
|
||||
# Note: No lockwiki here because ikiwiki already locks it
|
||||
# before calling this.
|
||||
|
@ -96,11 +92,7 @@ sub indexdb {
|
|||
}
|
||||
|
||||
sub hashpassword {
|
||||
my $dir=shift;
|
||||
if (! defined $dir) {
|
||||
usage();
|
||||
}
|
||||
$config{wikistatedir}=$dir."/.ikiwiki";
|
||||
setstatedir(shift);
|
||||
|
||||
eval q{use IkiWiki::UserInfo};
|
||||
eval q{use Authen::Passphrase::BlowfishCrypt};
|
||||
|
@ -220,6 +212,35 @@ sub moveprefs {
|
|||
IkiWiki::Setup::dump($setup);
|
||||
}
|
||||
|
||||
sub deduplinks {
|
||||
setstatdir(shift);
|
||||
IkiWiki::loadindex();
|
||||
foreach my $page (keys %links) {
|
||||
my %l;
|
||||
$l{$_}=1 foreach @{$links{$page}};
|
||||
$links{$page}=[keys %l]
|
||||
}
|
||||
IkiWiki::saveindex();
|
||||
}
|
||||
|
||||
sub setstatedir {
|
||||
my $dir=shift;
|
||||
|
||||
if (! defined $dir) {
|
||||
usage();
|
||||
}
|
||||
|
||||
if (! -d $dir) {
|
||||
error("ikiwiki-transition: $dir does not exist");
|
||||
}
|
||||
|
||||
$config{wikistatedir}=$dir."/.ikiwiki";
|
||||
|
||||
if (! -d $config{wikistatedir}) {
|
||||
error("ikiwiki-transition: $config{wikistatedir} does not exist");
|
||||
}
|
||||
}
|
||||
|
||||
sub usage {
|
||||
print STDERR "Usage: ikiwiki-transition type ...\n";
|
||||
print STDERR "Currently supported transition subcommands:\n";
|
||||
|
@ -229,6 +250,7 @@ sub usage {
|
|||
print STDERR "\tmoveprefs setupfile\n";
|
||||
print STDERR "\thashpassword srcdir\n";
|
||||
print STDERR "\tindexdb srcdir\n";
|
||||
print STDERR "\tdeduplinks srcdir\n";
|
||||
exit 1;
|
||||
}
|
||||
|
||||
|
@ -253,6 +275,9 @@ elsif ($mode eq 'setupformat') {
|
|||
elsif ($mode eq 'moveprefs') {
|
||||
moveprefs(@ARGV);
|
||||
}
|
||||
elsif ($mode eq 'deduplinks') {
|
||||
deduplinks(@ARGV);
|
||||
}
|
||||
else {
|
||||
usage();
|
||||
}
|
||||
|
|
61
po/bg.po
61
po/bg.po
|
@ -7,7 +7,7 @@ msgid ""
|
|||
msgstr ""
|
||||
"Project-Id-Version: ikiwiki-bg\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2009-04-04 14:59-0400\n"
|
||||
"POT-Creation-Date: 2009-05-06 12:58-0400\n"
|
||||
"PO-Revision-Date: 2007-01-12 01:19+0200\n"
|
||||
"Last-Translator: Damyan Ivanov <dam@modsodtsys.com>\n"
|
||||
"Language-Team: Bulgarian <dict@fsa-bg.org>\n"
|
||||
|
@ -181,25 +181,25 @@ msgstr ""
|
|||
msgid "automatic index generation"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/blogspam.pm:105
|
||||
#: ../IkiWiki/Plugin/blogspam.pm:108
|
||||
msgid ""
|
||||
"Sorry, but that looks like spam to <a href=\"http://blogspam.net/"
|
||||
"\">blogspam</a>: "
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/brokenlinks.pm:33 ../IkiWiki/Plugin/editpage.pm:233
|
||||
#: ../IkiWiki/Plugin/inline.pm:361 ../IkiWiki/Plugin/opendiscussion.pm:26
|
||||
#: ../IkiWiki/Plugin/inline.pm:357 ../IkiWiki/Plugin/opendiscussion.pm:26
|
||||
#: ../IkiWiki/Plugin/orphans.pm:37 ../IkiWiki/Render.pm:79
|
||||
#: ../IkiWiki/Render.pm:149
|
||||
msgid "discussion"
|
||||
msgstr "дискусия"
|
||||
|
||||
#: ../IkiWiki/Plugin/brokenlinks.pm:49
|
||||
#: ../IkiWiki/Plugin/brokenlinks.pm:48
|
||||
#, perl-format
|
||||
msgid "%s from %s"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/brokenlinks.pm:56
|
||||
#: ../IkiWiki/Plugin/brokenlinks.pm:55
|
||||
msgid "There are no broken links!"
|
||||
msgstr "Няма „счупени” връзки!"
|
||||
|
||||
|
@ -216,55 +216,55 @@ msgstr ""
|
|||
msgid "Anonymous"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:326 ../IkiWiki/Plugin/editpage.pm:97
|
||||
#: ../IkiWiki/Plugin/comments.pm:333 ../IkiWiki/Plugin/editpage.pm:97
|
||||
msgid "bad page name"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:331
|
||||
#: ../IkiWiki/Plugin/comments.pm:338
|
||||
#, fuzzy, perl-format
|
||||
msgid "commenting on %s"
|
||||
msgstr "създаване на %s"
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:349
|
||||
#: ../IkiWiki/Plugin/comments.pm:356
|
||||
#, perl-format
|
||||
msgid "page '%s' doesn't exist, so you can't comment"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:356
|
||||
#: ../IkiWiki/Plugin/comments.pm:363
|
||||
#, perl-format
|
||||
msgid "comments on page '%s' are closed"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:450
|
||||
#: ../IkiWiki/Plugin/comments.pm:457
|
||||
msgid "comment stored for moderation"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:452
|
||||
#: ../IkiWiki/Plugin/comments.pm:459
|
||||
msgid "Your comment will be posted after moderator review"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:465
|
||||
#: ../IkiWiki/Plugin/comments.pm:472
|
||||
msgid "Added a comment"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:469
|
||||
#: ../IkiWiki/Plugin/comments.pm:476
|
||||
#, perl-format
|
||||
msgid "Added a comment: %s"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:511 ../IkiWiki/Plugin/websetup.pm:236
|
||||
#: ../IkiWiki/Plugin/comments.pm:518 ../IkiWiki/Plugin/websetup.pm:236
|
||||
msgid "you are not logged in as an admin"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:562
|
||||
#: ../IkiWiki/Plugin/comments.pm:569
|
||||
msgid "Comment moderation"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:601
|
||||
#: ../IkiWiki/Plugin/comments.pm:608
|
||||
msgid "comment moderation"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/comments.pm:752
|
||||
#: ../IkiWiki/Plugin/comments.pm:759
|
||||
msgid "Comments"
|
||||
msgstr ""
|
||||
|
||||
|
@ -419,29 +419,29 @@ msgstr "шаблонът „%s” не е намерен"
|
|||
msgid "missing pages parameter"
|
||||
msgstr "липсващ параметър „id” на шаблона"
|
||||
|
||||
#: ../IkiWiki/Plugin/inline.pm:200
|
||||
#: ../IkiWiki/Plugin/inline.pm:196
|
||||
msgid "Sort::Naturally needed for title_natural sort"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/inline.pm:211
|
||||
#: ../IkiWiki/Plugin/inline.pm:207
|
||||
#, perl-format
|
||||
msgid "unknown sort type %s"
|
||||
msgstr "непознат вид сортиране „%s”"
|
||||
|
||||
#: ../IkiWiki/Plugin/inline.pm:314
|
||||
#: ../IkiWiki/Plugin/inline.pm:310
|
||||
msgid "Add a new post titled:"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/inline.pm:334
|
||||
#: ../IkiWiki/Plugin/inline.pm:330
|
||||
#, perl-format
|
||||
msgid "nonexistant template %s"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/inline.pm:369 ../IkiWiki/Render.pm:83
|
||||
#: ../IkiWiki/Plugin/inline.pm:365 ../IkiWiki/Render.pm:83
|
||||
msgid "Discussion"
|
||||
msgstr "Дискусия"
|
||||
|
||||
#: ../IkiWiki/Plugin/inline.pm:600
|
||||
#: ../IkiWiki/Plugin/inline.pm:596
|
||||
msgid "RPC::XML::Client not found, not pinging"
|
||||
msgstr "модулът „RPC::XML::Client” не е намерен; източникът не е проверен"
|
||||
|
||||
|
@ -506,7 +506,7 @@ msgstr ""
|
|||
msgid "Get an OpenID"
|
||||
msgstr "Получаване на OpenID номер"
|
||||
|
||||
#: ../IkiWiki/Plugin/orphans.pm:51
|
||||
#: ../IkiWiki/Plugin/orphans.pm:52
|
||||
msgid "All pages are linked to by other pages."
|
||||
msgstr "Всички страници имат връзки от други страници."
|
||||
|
||||
|
@ -903,9 +903,9 @@ msgid ""
|
|||
"to rebuild the wiki."
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Plugin/websetup.pm:433
|
||||
#: ../IkiWiki/Plugin/websetup.pm:436
|
||||
#, perl-format
|
||||
msgid "<p class=\"error\">Error: %s exited nonzero (%s)"
|
||||
msgid "Error: %s exited nonzero (%s). Discarding setup changes."
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Receive.pm:35
|
||||
|
@ -986,12 +986,12 @@ msgstr "грешка при четене на „%s”: %s"
|
|||
msgid "you must enter a wikiname (that contains alphanumerics)"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Setup/Automator.pm:68
|
||||
#: ../IkiWiki/Setup/Automator.pm:71
|
||||
#, perl-format
|
||||
msgid "unsupported revision control system %s"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki/Setup/Automator.pm:94
|
||||
#: ../IkiWiki/Setup/Automator.pm:97
|
||||
msgid "failed to set up the repository with ikiwiki-makerepo"
|
||||
msgstr ""
|
||||
|
||||
|
@ -1068,6 +1068,11 @@ msgstr "открита е циклична завидимост при %s на
|
|||
msgid "yes"
|
||||
msgstr ""
|
||||
|
||||
#: ../IkiWiki.pm:1860
|
||||
#, fuzzy, perl-format
|
||||
msgid "cannot match pages: %s"
|
||||
msgstr "грешка при четене на „%s”: %s"
|
||||
|
||||
#: ../auto.setup:16
|
||||
msgid "What will the wiki be named?"
|
||||
msgstr ""
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue