Merge branch 'master' into cvs

master
Amitai Schlair 2012-01-30 19:56:51 -06:00
commit 8e752d0976
31 changed files with 444 additions and 71 deletions

1
.gitignore vendored
View File

@ -19,4 +19,3 @@ po/underlays/directives/ikiwiki/directive/*.mdwn
po/underlays_copy_stamp
underlays/locale
/t/tmp/
doc/bugs/Pages_with_non-ascii_characters_like_*

View File

@ -28,7 +28,7 @@ CGI::Session
Mail::Sendmail
CGI
Data::Dumper
YAML
YAML::XS
JSON
RPC::XML

View File

@ -20,7 +20,7 @@ use Exporter q{import};
our @EXPORT = qw(hook debug error htmlpage template template_depends
deptype add_depends pagespec_match pagespec_match_list bestlink
htmllink readfile writefile pagetype srcfile pagename
displaytime will_render gettext ngettext urlto targetpage
displaytime strftime_utf8 will_render gettext ngettext urlto targetpage
add_underlay pagetitle titlepage linkpage newpagefile
inject add_link add_autofile
%config %links %pagestate %wikistate %renderedfiles
@ -1148,9 +1148,19 @@ sub formattime ($;$) {
$format=$config{timeformat};
}
return strftime_utf8($format, localtime($time));
}
my $strftime_encoding;
sub strftime_utf8 {
# strftime doesn't know about encodings, so make sure
# its output is properly treated as utf8
return decode_utf8(POSIX::strftime($format, localtime($time)));
# its output is properly treated as utf8.
# Note that this does not handle utf-8 in the format string.
$strftime_encoding = POSIX::setlocale(&POSIX::LC_TIME) =~ m#\.([^@]+)#
unless defined $strftime_encoding;
$strftime_encoding
? Encode::decode($strftime_encoding, POSIX::strftime(@_))
: POSIX::strftime(@_);
}
sub date_3339 ($) {

View File

@ -22,7 +22,6 @@ use warnings;
use strict;
use IkiWiki 3.00;
use Time::Local;
use POSIX ();
my $time=time;
my @now=localtime($time);
@ -123,10 +122,10 @@ sub format_month (@) {
}
# Find out month names for this, next, and previous months
my $monthabbrev=POSIX::strftime("%b", @monthstart);
my $monthname=POSIX::strftime("%B", @monthstart);
my $pmonthname=POSIX::strftime("%B", localtime(timelocal(0,0,0,1,$pmonth-1,$pyear-1900)));
my $nmonthname=POSIX::strftime("%B", localtime(timelocal(0,0,0,1,$nmonth-1,$nyear-1900)));
my $monthabbrev=strftime_utf8("%b", @monthstart);
my $monthname=strftime_utf8("%B", @monthstart);
my $pmonthname=strftime_utf8("%B", localtime(timelocal(0,0,0,1,$pmonth-1,$pyear-1900)));
my $nmonthname=strftime_utf8("%B", localtime(timelocal(0,0,0,1,$nmonth-1,$nyear-1900)));
my $archivebase = 'archives';
$archivebase = $config{archivebase} if defined $config{archivebase};
@ -182,7 +181,7 @@ EOF
my %dowabbr;
for my $dow ($week_start_day..$week_start_day+6) {
my @day=localtime(timelocal(0,0,0,$start_day++,$params{month}-1,$params{year}-1900));
my $downame = POSIX::strftime("%A", @day);
my $downame = strftime_utf8("%A", @day);
my $dowabbr = substr($downame, 0, 1);
$downame{$dow % 7}=$downame;
$dowabbr{$dow % 7}=$dowabbr;
@ -329,8 +328,8 @@ EOF
for (my $month = 1; $month <= 12; $month++) {
my @day=localtime(timelocal(0,0,0,15,$month-1,$params{year}-1900));
my $murl;
my $monthname = POSIX::strftime("%B", @day);
my $monthabbr = POSIX::strftime("%b", @day);
my $monthname = strftime_utf8("%B", @day);
my $monthabbr = strftime_utf8("%b", @day);
$calendar.=qq{\t<tr>\n} if ($month % $params{months_per_row} == 1);
my $tag;
my $mtag=sprintf("%02d", $month);

View File

@ -9,7 +9,6 @@ use warnings;
use strict;
use IkiWiki 3.00;
use Encode;
use POSIX qw(strftime);
use constant PREVIEW => "Preview";
use constant POST_COMMENT => "Post comment";
@ -460,7 +459,7 @@ sub editcomment ($$) {
}
$content .= " subject=\"$subject\"\n";
$content .= " date=\"" . decode_utf8(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime)) . "\"\n";
$content .= " date=\"" . strftime_utf8('%Y-%m-%dT%H:%M:%SZ', gmtime) . "\"\n";
my $editcontent = $form->field('editcontent');
$editcontent="" if ! defined $editcontent;

View File

@ -118,10 +118,10 @@ sub IkiWiki::formattime ($;$) {
}
}
$t=~s{\%A-}{my @yest=@t; $yest[6]--; strftime("%A", \@yest)}eg;
$t=~s{\%A-}{my @yest=@t; $yest[6]--; strftime_utf8("%A", \@yest)}eg;
$format=~s/\%X/$t/g;
return strftime($format, \@t);
return strftime_utf8($format, \@t);
}
1

View File

@ -14,7 +14,10 @@ sub import {
sub gendump ($@) {
my $class=shift;
"#!/usr/bin/perl",
my $thisperl = eval q{use Config; $Config{perlpath}};
error($@) if $@;
"#!$thisperl",
"#",
(map { "# $_" } @_),
"use IkiWiki::Setup::Standard {",

View File

@ -11,10 +11,8 @@ sub loaddump ($$) {
my $class=shift;
my $content=shift;
eval q{use YAML::Any};
eval q{use YAML} if $@;
eval q{use YAML::XS};
die $@ if $@;
$YAML::Syck::ImplicitUnicode=1;
IkiWiki::Setup::merge(Load(encode_utf8($content)));
}
@ -35,12 +33,12 @@ sub dumpline ($$$$) {
my $type=shift;
my $prefix=shift;
eval q{use YAML::Old};
eval q{use YAML} if $@;
eval q{use YAML::XS};
die $@ if $@;
$YAML::UseHeader=0;
$YAML::XS::QuoteNumericStrings=0;
my $dump=Dump({$key => $value});
my $dump=decode_utf8(Dump({$key => $value}));
$dump=~s/^---\n//; # yaml header, we don't want
chomp $dump;
if (length $prefix) {
$dump=join("\n", map { $prefix.$_ } split(/\n/, $dump));

View File

@ -19,11 +19,14 @@ SED?=sed
# Additional configurable path variables.
W3M_CGI_BIN?=$(PREFIX)/lib/w3m/cgi-bin
SYSCONFDIR?=/etc/ikiwiki
MANDIR?=$(PREFIX)/share/man
tflag=$(shell if [ -n "$$NOTAINT" ] && [ "$$NOTAINT" != 1 ]; then printf -- "-T"; fi)
extramodules=$(shell if [ "$$PROFILE" = 1 ]; then printf -- "-d:NYTProf"; fi)
outprogs=ikiwiki.out ikiwiki-transition.out ikiwiki-calendar.out
scripts=ikiwiki-update-wikilist ikiwiki-makerepo
sysconfdir_scripts=ikiwiki-mass-rebuild ikiwiki-update-wikilist
PROBABLE_INST_LIB=$(shell \\
if [ "$(INSTALLDIRS)" = "perl" ]; then \\
@ -42,7 +45,7 @@ PROBABLE_INST_LIB=$(shell \\
ikiwiki.setup:
HOME=/home/me $(PERL) -Iblib/lib $(extramodules) $(tflag) ikiwiki.in -dumpsetup ikiwiki.setup
extra_build: $(outprogs) ikiwiki.setup docwiki
extra_build: $(outprogs) ikiwiki.setup docwiki sysconfdir
./mdwn2man ikiwiki 1 doc/usage.mdwn > ikiwiki.man
./mdwn2man ikiwiki-mass-rebuild 8 doc/ikiwiki-mass-rebuild.mdwn > ikiwiki-mass-rebuild.man
./mdwn2man ikiwiki-makerepo 1 doc/ikiwiki-makerepo.mdwn > ikiwiki-makerepo.man
@ -50,12 +53,15 @@ extra_build: $(outprogs) ikiwiki.setup docwiki
./mdwn2man ikiwiki-update-wikilist 1 doc/ikiwiki-update-wikilist.mdwn > ikiwiki-update-wikilist.man
./mdwn2man ikiwiki-calendar 1 doc/ikiwiki-calendar.mdwn > ikiwiki-calendar.man
$(MAKE) -C po
$(SED) -i.bkp "s/Version:.*/Version: $(VER)/" ikiwiki.spec
$(PERL) -pi.bkp -e "s/Version:.*/Version: $(VER)/" ikiwiki.spec
rm -f ikiwiki.spec.bkp
docwiki:
$(PERL) -Iblib/lib $(extramodules) $(tflag) ikiwiki.in -setup docwiki.setup -refresh
sysconfdir:
$(PERL) -pi -e "s|/etc/ikiwiki|$(SYSCONFDIR)|g" $(sysconfdir_scripts)
extra_clean:
$(PERL) -Iblib/lib $(extramodules) $(tflag) ikiwiki.in -setup docwiki.setup -clean
rm -f *.man $(outprogs) ikiwiki.setup plugins/*.pyc
@ -70,7 +76,7 @@ underlay_install:
for dir in `cd underlays && $(FIND) . -follow -type d`; do \
install -d $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir; \
for file in `$(FIND) underlays/$$dir -follow -maxdepth 1 -type f -not -name \\*.full.js -not -name \\*.full.css`; do \
cp -aL $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir 2>/dev/null || \
cp -pRL $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir 2>/dev/null || \
install -m 644 $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir; \
done; \
done
@ -79,7 +85,7 @@ underlay_install:
install -d $(DESTDIR)$(PREFIX)/share/ikiwiki/directives/ikiwiki/directive
for file in doc/ikiwiki/directive/*; do \
if [ -f "$$file" ]; then \
cp -aL $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/directives/ikiwiki/directive 2>/dev/null || \
cp -pRL $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/directives/ikiwiki/directive 2>/dev/null || \
install -m 644 $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/directives/ikiwiki/directive; \
fi \
done
@ -94,7 +100,7 @@ underlay_install:
elif echo "$$file" | grep -q base.css; then \
:; \
elif [ -f "$$file" ]; then \
cp -aL $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$file 2>/dev/null || \
cp -pRL $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$file 2>/dev/null || \
install -m 644 $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$file; \
fi \
done; \
@ -106,7 +112,7 @@ extra_install: underlay_install
install -d $(DESTDIR)$(PREFIX)/share/ikiwiki/examples/$$dir; \
done
for file in `cd doc/examples; $(FIND) . -type f ! -regex '.*discussion.*'`; do \
cp -aL doc/examples/$$file $(DESTDIR)$(PREFIX)/share/ikiwiki/examples/$$file 2>/dev/null || \
cp -pRL doc/examples/$$file $(DESTDIR)$(PREFIX)/share/ikiwiki/examples/$$file 2>/dev/null || \
install -m 644 doc/examples/$$file $(DESTDIR)$(PREFIX)/share/ikiwiki/examples/$$file; \
done
@ -125,15 +131,15 @@ extra_install: underlay_install
install -m 755 $$file $(DESTDIR)$(PREFIX)/lib/ikiwiki/plugins; \
done
install -d $(DESTDIR)$(PREFIX)/share/man/man1
install -m 644 ikiwiki.man $(DESTDIR)$(PREFIX)/share/man/man1/ikiwiki.1
install -m 644 ikiwiki-makerepo.man $(DESTDIR)$(PREFIX)/share/man/man1/ikiwiki-makerepo.1
install -m 644 ikiwiki-transition.man $(DESTDIR)$(PREFIX)/share/man/man1/ikiwiki-transition.1
install -m 644 ikiwiki-update-wikilist.man $(DESTDIR)$(PREFIX)/share/man/man1/ikiwiki-update-wikilist.1
install -m 644 ikiwiki-calendar.man $(DESTDIR)$(PREFIX)/share/man/man1/ikiwiki-calendar.1
install -d $(DESTDIR)$(MANDIR)/man1
install -m 644 ikiwiki.man $(DESTDIR)$(MANDIR)/man1/ikiwiki.1
install -m 644 ikiwiki-makerepo.man $(DESTDIR)$(MANDIR)/man1/ikiwiki-makerepo.1
install -m 644 ikiwiki-transition.man $(DESTDIR)$(MANDIR)/man1/ikiwiki-transition.1
install -m 644 ikiwiki-update-wikilist.man $(DESTDIR)$(MANDIR)/man1/ikiwiki-update-wikilist.1
install -m 644 ikiwiki-calendar.man $(DESTDIR)$(MANDIR)/man1/ikiwiki-calendar.1
install -d $(DESTDIR)$(PREFIX)/share/man/man8
install -m 644 ikiwiki-mass-rebuild.man $(DESTDIR)$(PREFIX)/share/man/man8/ikiwiki-mass-rebuild.8
install -d $(DESTDIR)$(MANDIR)/man8
install -m 644 ikiwiki-mass-rebuild.man $(DESTDIR)$(MANDIR)/man8/ikiwiki-mass-rebuild.8
install -d $(DESTDIR)$(PREFIX)/sbin
install ikiwiki-mass-rebuild $(DESTDIR)$(PREFIX)/sbin
@ -150,10 +156,10 @@ extra_install: underlay_install
# These might fail if a regular user is installing into a home
# directory.
-install -d $(DESTDIR)/etc/ikiwiki
-install -m 0644 wikilist $(DESTDIR)/etc/ikiwiki
-install -m 0644 auto.setup $(DESTDIR)/etc/ikiwiki
-install -m 0644 auto-blog.setup $(DESTDIR)/etc/ikiwiki
-install -d $(DESTDIR)$(SYSCONFDIR)
-install -m 0644 wikilist $(DESTDIR)$(SYSCONFDIR)
-install -m 0644 auto.setup $(DESTDIR)$(SYSCONFDIR)
-install -m 0644 auto-blog.setup $(DESTDIR)$(SYSCONFDIR)
# The git/hg plugins want to chdir; so does Devel::Cover. Skip those tests
# to stop them hurting each other.

7
debian/changelog vendored
View File

@ -2,6 +2,13 @@ ikiwiki (3.20120116) UNRELEASED; urgency=low
* mdwn: Added nodiscount setting, which can be used to avoid using the
markdown discount engine, when maximum compatability is needed.
* Switch to YAML::XS to work around insanity in YAML::Mo. Closes: #657533
* cvs: Ensure text files are added in non-binary mode. (Amitai Schlair)
* cvs: Various cleanups and testing. (Amitai Schlair)
* calendar, prettydate: Fix strftime encoding bug.
* shortcuts: Fixed a broken shortcut to wikipedia (accidentially
made into a shortcut to wikiMedia).
* Various portability improvements. (Amitai Schlair)
-- Joey Hess <joeyh@debian.org> Mon, 16 Jan 2012 13:41:14 -0400

4
debian/control vendored
View File

@ -7,7 +7,7 @@ Build-Depends-Indep: dpkg-dev (>= 1.9.0), libxml-simple-perl,
libtimedate-perl, libhtml-template-perl,
libhtml-scrubber-perl, wdg-html-validator,
libhtml-parser-perl, liburi-perl (>= 1.36), perlmagick, po4a (>= 0.34),
libfile-chdir-perl, libyaml-perl, python-support
libfile-chdir-perl, libyaml-libyaml-perl, python-support
Maintainer: Joey Hess <joeyh@debian.org>
Uploaders: Josh Triplett <josh@freedesktop.org>
Standards-Version: 3.9.2
@ -19,7 +19,7 @@ Architecture: all
Depends: ${misc:Depends}, ${perl:Depends}, ${python:Depends},
libtext-markdown-discount-perl,
libhtml-scrubber-perl, libhtml-template-perl,
libhtml-parser-perl, liburi-perl (>= 1.36), libyaml-perl, libjson-perl
libhtml-parser-perl, liburi-perl (>= 1.36), libyaml-libyaml-perl, libjson-perl
Recommends: gcc | c-compiler,
libc6-dev | libc-dev,
git (>= 1:1.7) | git-core (>= 1:1.5.0) | subversion | tla | bzr (>= 0.91) | mercurial | monotone (>= 0.38) | darcs,

View File

@ -0,0 +1,73 @@
Hello,
I studied this [[guy's problem|forum/Encoding_problem_in_french_with_ikiwiki-calendar]] and I propose here a (dirty) hack to correct it.
Bug summary: when using the [[calendar plugin|plugins/calendar]] in French (`LANG=fr_FR.UTF-8`), "Décembre" (French for "December") is rendered as "Décembre".
I managed to track this problem down to an encoding problem of `POSIX::strftime` in `Ikiwiki/Plugin/calendar.pm`. I used [[this guy's solution|http://www.perlmonks.org/?node_id=857018]] to solve the problem (the diff is printed below).
The problem is that I do not know Perl, encoding is one of the thing I would be happy not to dive into, and it is the first time I contribute to Ikiwiki: I copied and made a few changes to the code I found without understanding it. So I am not sure that my code is neat, or works in every situation. Feel free to (help me to) improve it!
Cheers,
Louis
> Yes, this seems basically right. I've applied a modified version of this.
> [[done]]
> --[[Joey]]
diff --git a/IkiWiki/Plugin/calendar.pm b/IkiWiki/Plugin/calendar.pm
index c7d2b7c..1345939 100644
--- a/IkiWiki/Plugin/calendar.pm
+++ b/IkiWiki/Plugin/calendar.pm
@@ -22,7 +22,14 @@ use warnings;
use strict;
use IkiWiki 3.00;
use Time::Local;
-use POSIX ();
+
+use POSIX qw/setlocale LC_TIME strftime/;
+use Encode;
+my ($strftime_encoding)= setlocale(LC_TIME)=~m#\.([^@]+)#;
+sub strftime_utf8 {
+# try to return an utf8 value from strftime
+ $strftime_encoding ? Encode::decode($strftime_encoding, &strftime) : &strftime;
+}
my $time=time;
my @now=localtime($time);
@@ -123,10 +130,10 @@ sub format_month (@) {
}
# Find out month names for this, next, and previous months
- my $monthabbrev=POSIX::strftime("%b", @monthstart);
- my $monthname=POSIX::strftime("%B", @monthstart);
- my $pmonthname=POSIX::strftime("%B", localtime(timelocal(0,0,0,1,$pmonth-1,$pyear-1900)));
- my $nmonthname=POSIX::strftime("%B", localtime(timelocal(0,0,0,1,$nmonth-1,$nyear-1900)));
+ my $monthabbrev=strftime_utf8("%b", @monthstart);
+ my $monthname=strftime_utf8("%B", @monthstart);
+ my $pmonthname=strftime_utf8("%B", localtime(timelocal(0,0,0,1,$pmonth-1,$pyear-1900)));
+ my $nmonthname=strftime_utf8("%B", localtime(timelocal(0,0,0,1,$nmonth-1,$nyear-1900)));
my $archivebase = 'archives';
$archivebase = $config{archivebase} if defined $config{archivebase};
@@ -182,7 +189,7 @@ EOF
my %dowabbr;
for my $dow ($week_start_day..$week_start_day+6) {
my @day=localtime(timelocal(0,0,0,$start_day++,$params{month}-1,$params{year}-1900));
- my $downame = POSIX::strftime("%A", @day);
+ my $downame = strftime_utf8("%A", @day);
my $dowabbr = substr($downame, 0, 1);
$downame{$dow % 7}=$downame;
$dowabbr{$dow % 7}=$dowabbr;
@@ -329,8 +336,8 @@ EOF
for (my $month = 1; $month <= 12; $month++) {
my @day=localtime(timelocal(0,0,0,15,$month-1,$params{year}-1900));
my $murl;
- my $monthname = POSIX::strftime("%B", @day);
- my $monthabbr = POSIX::strftime("%b", @day);
+ my $monthname = strftime_utf8("%B", @day);
+ my $monthabbr = strftime_utf8("%b", @day);
$calendar.=qq{\t<tr>\n} if ($month % $params{months_per_row} == 1);
my $tag;
my $mtag=sprintf("%02d", $month);

View File

@ -1 +0,0 @@
foo bar.

View File

@ -4,6 +4,7 @@ page fitting enough. Users of ikiwiki can ask questions here.
Note that for more formal bug reports or todo items, you can also edit the
[[bugs]] and [[todo]] pages.
## Current topics ##
[[!inline pages="forum/* and !forum/discussion and !forum/*/*"

View File

@ -0,0 +1,20 @@
Hi!
I'm using the ikiwiki calendar plugin.
My website is in french (locale fr_FR.UTF-8), and calendars that are generated by the plugin makes some encodi$
I don't know how the plugin generate translation for dates, but I've seen that there is no ikiwiki translation$
That's why I suppose (but I'm not sure) that it use date unix command to insert date into the html page, witho$
Could I have forgotten some options to make it nice or not?
Is someone could test it and verify if it works or not?
Thanks.
Zut
> This was discussed in [[bugs/Encoding_problem_in_calendar_plugin]]
> and is now fixed. --[[Joey]]

View File

@ -0,0 +1,14 @@
[[!comment format=mdwn
username="http://joey.kitenet.net/"
nickname="joey"
subject="comment 1"
date="2012-01-30T19:30:20Z"
content="""
Sure.. You're looking for the file `IkiWiki/Plugin/passwordauth.pm`
This line in particular is the text that gets modified and displayed to the user.
<pre>
name => \"account_creation_password\",
</pre>
"""]]

View File

@ -0,0 +1,10 @@
[[!comment format=mdwn
username="http://joey.kitenet.net/"
nickname="joey"
subject="comment 2"
date="2012-01-30T19:34:00Z"
content="""
Yes, good spotting, [[ikiwiki/directive/meta]] had a doubled quote in the openid example.
Otherwise, that example will work. You don't need anything installed on your server to add openid delegation to a page.
"""]]

View File

@ -1,12 +0,0 @@
[[!comment format=mdwn
username="https://me.yahoo.com/a/dh6LXMw6hZydhHOqgWKiORWXspNyeW9n1nk-#9ce8d"
nickname="shobo"
subject="comment 8"
date="2011-12-16T13:21:57Z"
content="""
URLs containing /ikiwiki.cgi/ are dynamically-generated pages; everything else is static.
How is this possible? Where can I find more about this?<a href=\"http://yourbookmaker.co.uk/bet365\"></a>
Shobo, Junior Software Developer
"""]]

View File

@ -0,0 +1,20 @@
Just thought people might like to know I've added a couple more plugins to contrib.
[[plugins/contrib/newpage]]: This plugin adds a new action to the "ACTIONS" section of a page; a button labelled "create" and an input field next to it.
The common way of creating a new page is to edit a different page and add a link to the new page. However, there are some situations where that is a nuisance; for example, where pages are listed using a map directive. The newpage plugin enables one to simply type the name of the new page, click the "create" button, and one is then taken to the standard IkiWiki create-page form.
[[plugins/contrib/jssearchfield]]: This plugin provides the [[plugins/contrib/ikiwiki/directive/jssearchfield]] directive. This
enables one to search the structured data ("field" values) of multiple pages.
This uses Javascript for the searching, which means that the entire thing
is self-contained and does not require a server or CGI access, unlike
the default IkiWiki search. This means that it can be used in places such
as ebook readers. The disadvantage is that because Javascript runs
in the browser, the searching is only as fast as the machine your browser
is running on.
Because this uses Javascript, the htmlscrubber must be turned off for any page where the directive is used.
This plugin depends on the [[!iki plugins/contrib/field]] plugin.
--[[KathrynAndersen]]

View File

@ -109,7 +109,7 @@ Supported fields:
\[[!meta openid="http://joeyh.myopenid.com/"
server="http://www.myopenid.com/server"
xrds-location="http://www.myopenid.com/xrds?username=joeyh.myopenid.com""]]
xrds-location="http://www.myopenid.com/xrds?username=joeyh.myopenid.com"]]
* link

View File

@ -164,8 +164,7 @@ Personal sites and blogs
* [Richard "RichiH" Hartmann](http://richardhartmann.de/blog) - I thought I had added myself a year ago. Oups :)
* [Jonas Smedegaard](http://dr.jones.dk/) multilingual "classic" website w/ blog
* [Siri Reiter](http://sirireiter.dk/) portfolio website with a blog (in danish)
* [L'Altro Wiki](http://laltromondo.dynalias.net/~iki/) Tutorials, reviews, miscellaneus articles in English and Italian, from the IRC network syrolnet.org
* [STUPiD](http://lhzhang.com/)
* [L'Altro Wiki](http://laltromondo.dynalias.net/~iki/) Tutorials, reviews, miscellaneus articles in English and Italian.
* gregoa's [p.r. - political rants](http://info.comodo.priv.at/pr/)
* [Michael Hammer](http://www.michael-hammer.at/)
* [Richardson Family Wiki](http://the4richardsons.com) A wiki, blog or some such nonsense for the family home page or something or other... I will eventually move the rest of my sites to ikiwiki. The source of the site is in git.

View File

@ -0,0 +1,42 @@
The `jssearchfield` directive is supplied by the [[!iki plugins/contrib/jssearchfield desc=jssearchfield]] plugin.
This enables one to search the structured data ("field" values) of
multiple pages. A search form is constructed, and the searching is
done with Javascript, which means that the entire thing is self-contained.
This depends on the [[!iki plugins/contrib/field]] plugin.
The pages to search are selected by a PageSpec given by the "pages"
parameter.
The fields to search are given by the "fields" parameter. By default,
the field name is given, and the user can type the search parameter for
that field into a text input field.
## OPTIONS
**pages**: A PageSpec to determine the pages to search through.
**fields**: The fields to put into the search form, and to display
in the results.
**tagfields**: Display the given fields as a list of tags that can
be selected from, rather than having a text input field. Every distinct
value of that field will be listed, so it is best used for things with
short values, like "Author" rather than long ones like "Description".
Note that "tagfields" must be a subset of "fields".
**sort**: A SortSpec to determine how the matching pages should be sorted; this is the "default" sort order that the results will be displayed in.
The search form also gives the option of "random" sort, which will
display the search results in random order.
## SEARCHING
The search form that is created by this directive contains the following:
* for each search field, a label, plus either a text input field, or a list of checkboxes with values next to them if the field is also a tagfield. Note that the lists of checkboxes are initially hidden; one must click on the triangle next to the label to display them.
* a "sort" toggle. One can select either "default" or "random".
* A "Search!" button, to trigger the search if needed (see below)
* A "Reset" button, which will clear all the values.
The searching is dynamic. As soon as a value is changed, either by tabbing out of the text field, or by selecting or de-selecting a checkbox, the search
results are updated. Furthermore, for tagfields, the tagfield lists
themselves are updated to reflect the current search results.

View File

@ -0,0 +1,35 @@
[[!template id=plugin name=jssearchfield author="[[rubykat]]"]]
[[!tag type/search]]
IkiWiki::Plugin::jssearchfield - Create a search form to search page field data.
This plugin provides the [[ikiwiki/directive/jssearchfield]] directive. This
enables one to search the structured data ("field" values) of multiple pages.
This uses Javascript for the searching, which means that the entire thing
is self-contained and does not require a server or CGI access, unlike
the default IkiWiki search. This means that it can be used in places such
as ebook readers. The disadvantage is that because Javascript runs
in the browser, the searching is only as fast as the machine your browser
is running on.
Because this uses Javascript, the htmlscrubber must be turned off for any page where the directive is used.
This plugin depends on the [[!iki plugins/contrib/field]] plugin.
## Activate the plugin
# activate the plugin
add_plugins => [qw{goodstuff field jssearchfield ....}],
# disable scrubbing for search page
htmlscrubber_skip => 'mysearchpage',
## PREREQUISITES
IkiWiki
IkiWiki::Plugin::field
HTML::Template
## DOWNLOAD
* browse at GitHub: <http://github.com/rubykat/ikiplugins/blob/master/IkiWiki/Plugin/jssearchfield.pm>
* git repo at git://github.com/rubykat/ikiplugins.git

View File

@ -0,0 +1,10 @@
How is this better than creating an inline with `rootpage` set,
which creates a similar new page form? I sometimes make the inline match
nothing, while still creating pages, in the odd cases where I have a map
or such displaying the pages. --[[Joey]]
> I wanted something that would automatically be available on every page, but only when editing was enabled.
> One of the sites I maintain as webmaster (<http://www.constrainttec.com/>) has a two-stage publication process. The "working" site is on an internal server, where it is set up as a wiki that authorized users in the company can edit. When they're satisfied with the changes they've made, the "working" site gets pushed (with git) to the "production" site, which is on a different server. The ikiwiki setup for the production site has editing completely disabled, because it is the site which is exposed to the outside world.
> For that site, I want all sign that it's a wiki to be hidden. Therefore using an inline directive would be unsuitable.
> --[[KathrynAndersen]]

View File

@ -59,3 +59,15 @@ copy [...]
> It does not, however, have a markdown to html converter -- for
> previewing it has to talk to the server with AJAX.
> --[[Joey]]
>> I've got pagedown working on my personal site (simon.kisikew.org) but I'm not sure how
>> I can inject the relevant &lt;div&gt;'s in the right place. They need to go **above**
>> the editing &lt;textarea&gt; . (Too bad about the licensing, it's rather nice.)
>> I had to do one minor change to it to have it inject itself into the page properly,
>> and that was to make this change in `Markdown.Editor.js`:
>>
>> `this.input = doc.getElementById("editcontent" + postfix);`
>>
>> on line 247. --[[simonraven]]
>>> Well, I re-figured out that I needed a TMPL_VAR FOO in the template(s). --[[simonraven]]

View File

@ -159,3 +159,14 @@ branch harder for you to pull.
Before I go writing a whole swack of test cases, could you merge
my latest? Through at least ad0e56cdcaaf76bc68d1b5c56e6845307b51c44a
there should be no functional change. --[[schmonz]]
Never mind, I was able to convince myself (by cloning `origin`
afresh and merging from `schmonz/cvs`). The history is a little
gross but the before-and-after diff looks right.
Bugs found and fixed so far:
* Stop treating text files as binary (`-kb`) on `rcs_add()`
(ac8eab29e8394aca4c0b23a6687ec947ea1ac869)
> Merged to current head. --[[Joey]]

View File

@ -15,7 +15,7 @@ This page controls what shortcut links the wiki supports.
* [[!shortcut name=archive url="http://web.archive.org/*/%S"]]
* [[!shortcut name=gmap url="https://maps.google.com/maps?q=%s"]]
* [[!shortcut name=gmsg url="https://groups.google.com/groups?selm=%s"]]
* [[!shortcut name=wikipedia url="https://en.wikimedia.org/wiki/%s"]]
* [[!shortcut name=wikipedia url="https://en.wikipedia.org/wiki/%s"]]
* [[!shortcut name=wikitravel url="https://wikitravel.org/en/%s"]]
* [[!shortcut name=wiktionary url="https://en.wiktionary.org/wiki/%s"]]
* [[!shortcut name=debbug url="http://bugs.debian.org/%S" desc="Debian bug #%s"]]

View File

@ -6,3 +6,84 @@ Lots of \[[!img ]] (~2200), lots of \[[!teximg ]] (~2700). A complete rebuild ta
We could use a big machine, with plenty of CPUs. Could some multi-threading support be added to ikiwiki, by forking out all the external heavy plugins (imagemagick, tex, ...) and/or by processing pages in parallel?
Disclaimer: I know nothing of the Perl approach to parallel processing.
> I agree that it would be lovely to be able to use multiple processors to speed up rebuilds on big sites (I have a big site myself), but, taking a quick look at what Perl threads entails, and taking into acount what I've seen of the code of IkiWiki, it would take a massive rewrite to make IkiWiki thread-safe - the API would have to be completely rewritten - and then more work again to introduce threading itself. So my unofficial humble opinion is that it's unlikely to be done.
> Which is a pity, and I hope I'm mistaken about it.
> --[[KathrynAndersen]]
> > I have much less experience with the internals of Ikiwiki, much
> > less Multi-threading perl, but I agree that to make Ikiwiki thread
> > safe and to make the modifications to really take advantage of the
> > threads is probably beyond the realm of reasonable
> > expectations. Having said that, I wonder if there aren't ways to
> > make Ikiwiki perform better for these big cases where the only
> > option is to wait for it to grind through everything. Something
> > along the lines of doing all of the aggregation and dependency
> > heavy stuff early on, and then doing all of the page rendering
> > stuff at the end quasi-asynchronously? Or am I way off in the deep
> > end.
> >
> > From a practical perspective, it seems like these massive rebuild
> > situations represent a really small subset of ikiwiki builds. Most
> > sites are pretty small, and most sites need full rebuilds very
> > very infrequently. In that scope, 10 minute rebuilds aren't that
> > bad seeming. In terms of performance challenges, it's the one page
> > with 3-5 dependency that takes 10 seconds (say) to rebuild that's
> > a larger challenge for Ikiwiki as a whole. At the same time, I'd
> > be willing to bet that performance benefits for these really big
> > repositories for using fast disks (i.e. SSDs) could probably just
> > about meet the benefit of most of the threading/async work.
> >
> > --[[tychoish]]
>>> It's at this point that doing profiling for a particular site would come
>>> in, because it would depend on the site content and how exactly IkiWiki is
>>> being used as to what the performance bottlenecks would be. For the
>>> original poster, it would be image processing. For me, it tends to be
>>> PageSpecs, because I have a lot of maps and reports.
>>> But I sincerely don't think that Disk I/O is the main bottleneck, not when
>>> the original poster mentions CPU usage, and also in my experience, I see
>>> IkiWiki chewing up 100% CPU usage one CPU, while the others remain idle. I
>>> haven't noticed slowdowns due to waiting for disk I/O, whether that be a
>>> system with HD or SSD storage.
>>> I agree that large sites are probably not the most common use-case, but it
>>> can be a chicken-and-egg situation with large sites and complete rebuilds,
>>> since it can often be the case with a large site that rebuilding based on
>>> dependencies takes *longer* than rebuilding the site from scratch, simply
>>> because there are so many pages that are interdependent. It's not always
>>> the number of pages itself, but how the site is being used. If IkiWiki is
>>> used with the absolute minimum number of page-dependencies - that is, no
>>> maps, no sitemaps, no trails, no tags, no backlinks, no albums - then one
>>> can have a very large number of pages without having performance problems.
>>> But when you have a change in PageA affecting PageB which affects PageC,
>>> PageD, PageE and PageF, then performance can drop off horribly. And it's a
>>> trade-off, because having features that interlink pages automatically is
>>> really nifty ad useful - but they have a price.
>>> I'm not really sure what the best solution is. Me, I profile my IkiWiki builds and try to tweak performance for them... but there's only so much I can do.
>>> --[[KathrynAndersen]]
>>>> IMHO, the best way to get a multithreaded ikiwiki is to rewrite it
>>>> in haskell, using as much pure code as possible. Many avenues
>>>> then would open up to taking advantage of haskell's ability to
>>>> parallize pure code.
>>>>
>>>> With that said, we already have some nice invariants that could be
>>>> used to parallelize page builds. In particular, we know that
>>>> page A never needs state built up while building page B, for any
>>>> pages A and B that don't have a dependency relationship -- and ikiwiki
>>>> tracks such dependency relationships, although not currently in a form
>>>> that makes it very easy (or fast..) to pick out such groups of
>>>> unrelated pages.
>>>>
>>>> OTOH, there are problems.. building page A can result in changes to
>>>> ikiwiki's state; building page B can result in other changes. All
>>>> such changes would have to be made thread-safely. And would the
>>>> resulting lock contention result in a program that ran any faster
>>>> once parallelized?
>>>>
>>>> Which is why [[rewrite_ikiwiki_in_haskell]], while pretty insane, is
>>>> something I keep thinking about. If only I had a spare year..
>>>> --[[Joey]]

View File

@ -62,8 +62,4 @@ Some other things to be scared about:
a bunch of haskell libraries. OTOH, it might be possible to build a
static binary at home and upload it, thus avoiding a messy installation
procedure entirely.
* I can barely code in haskell yet. I'm probably about 100x faster at
programming in perl. I need to get some more practical experience before
I´m fast and seasoned enough in haskell to attempt such a project.
(And so far, progress at learning has been slow and I have not managed
to write anything serious in haskell.) --[[Joey]]
--[[Joey]]

View File

@ -12,3 +12,46 @@ Congratulations for demonstrating that April fools jokes can still be subtle
>>> It doesn't really. I recently (re-)read about couchdb and thought that
>>> what it was trying to do had some comparisons with the thinking going on
>>> in [[todo/structured_page_data]]. -- [[Jon]]
-----
I'm torn about this idea, if it's actually serious. I'm very comfortable
programming in Perl, and have written quite a few modules for IkiWiki, and
it would be a huge pain to have to start from scratch all over again. On
the other hand, this could be a motivation for me to learn Haskell. My
only encounter with Haskell has been a brief time when I was using the
Xmonad window manager, but it looks like an interesting language.
Functional programming is cool.
There are a lot of interesting plusses for Haskell you note (in the parent
page), but it's true that the idea is horribly daunting (as [[Joey]] said
"If only I had a spare year"). Is there any way that you could "start
small"? Because nothing will ever happen if the task is too daunting to
even start.
> This seems destined to remain a thought experiment unless something like
> that can be done, or I get a serious case of second system disease.
>
> I've considered doing things like using the external plugin interface
> to run a separate haskell program, which would allow implementing
> arbitrary plugins in haskell (starting with a pandoc plugin..),
> and could perhaps grow to subsume the perl code. However, this would
> stick us with the perl data structures, which are not a very good fit
> for haskell. --[[Joey]]
On further thought... perhaps it would be easier to fork or contribute to
an existing Haskell-based wiki, such as <a
href="http://jaspervdj.be/hakyll">Hakyll</a>?
--[[KathrynAndersen]]
> As far as I know there are no other wikis (haskell or otherwise)
> that are wiki compilers. Since we know from experience that dealing
> with static compilation turns out to be one of the trickiest parts of
> ikiwiki, I'm doubtful about trying to bolt that into one. --[[Joey]]
>> Haykll isn't a wiki but it does do static compilation. The missing
>> parts are: the web interface, the wiki link processing, and page
>> dependency stuff. -- [[tychoish]]
>>> (nods) Which is why I suggested it. I'm not sure whether it would be easier to "bolt on" those things than static compilation, but it could be worth looking at, at least. -- [[KathrynAndersen]]

View File

@ -108,11 +108,9 @@ sub getconfig () {
if (! defined $var || ! defined $val) {
die gettext("usage: --set-yaml var=value"), "\n";
}
eval q{use YAML::Any};
eval q{use YAML} if $@;
eval q{use YAML::XS; use Encode};
die $@ if $@;
eval q{$YAML::Syck::ImplicitUnicode=1};
$config{$var}=Load($val."\n");
$config{$var}=Load(encode_utf8($val)."\n");
},
"version" => sub {
print "ikiwiki version $IkiWiki::version\n";