* Added plugin system, currently only supporting for PreProcessorDirectives.

* Added a pagecount plugin, enabled by default.
* Support PreProcessorDirectives with no parameters, ie "[[pagecount ]]".
* Fixed/optimised backlinks code, to avoid rebuilding pages to update
  backlinks when the backlinks hadn't really changed.
* Moved inline page support, rss generation etc into the inline plugin,
  enabled by default.
* Added brokenlinks plugin, not enabled by default, but rather handy.
* Fix several broken links in the doc wiki.
master
joey 2006-05-02 02:34:33 +00:00
parent 457d6bbbbf
commit 54d5308cd8
23 changed files with 466 additions and 197 deletions

View File

@ -0,0 +1,39 @@
#!/usr/bin/perl
# Provides a list of broken links.
package IkiWiki::Plugin::brokenlinks;
use warnings;
use strict;
sub import { #{{{
IkiWiki::register_plugin("preprocess", "brokenlinks", \&preprocess);
} # }}}
sub preprocess (@) { #{{{
my %params=@_;
$params{pages}="*" unless defined $params{pages};
# Needs to update whenever a page is added or removed, so
# register a dependency.
IkiWiki::add_depends($params{page}, $params{pages});
my @broken;
foreach my $page (%IkiWiki::links) {
if (IkiWiki::globlist_match($page, $params{pages})) {
foreach my $link (@{$IkiWiki::links{$page}}) {
next if $link =~ /.*\/discussion/i;
my $bestlink=IkiWiki::bestlink($page, $link);
next if length $bestlink;
push @broken,
IkiWiki::htmllink($page, $link, 1).
" in ".
IkiWiki::htmllink($params{page}, $page, 1);
}
}
}
return "There are no broken links!" unless @broken;
return "<ul>\n".join("\n", map { "<li>$_</li>" } sort @broken)."</ul>\n";
} # }}}
1

View File

@ -0,0 +1,161 @@
#!/usr/bin/perl
# Page inlining and blogging.
package IkiWiki::Plugin::inline;
use warnings;
use strict;
sub import { #{{{
IkiWiki::register_plugin("preprocess", "inline", \&IkiWiki::preprocess_inline);
} # }}}
# Back to ikiwiki namespace for the rest, this code is very much
# internal to ikiwiki even though it's separated into a plugin.
package IkiWiki;
sub preprocess_inline (@) { #{{{
my %params=@_;
if (! exists $params{pages}) {
return "";
}
if (! exists $params{archive}) {
$params{archive}="no";
}
if (! exists $params{show} && $params{archive} eq "no") {
$params{show}=10;
}
add_depends($params{page}, $params{pages});
my $ret="";
if (exists $params{rootpage}) {
# Add a blog post form, with a rss link button.
my $formtemplate=HTML::Template->new(blind_cache => 1,
filename => "$config{templatedir}/blogpost.tmpl");
$formtemplate->param(cgiurl => $config{cgiurl});
$formtemplate->param(rootpage => $params{rootpage});
if ($config{rss}) {
$formtemplate->param(rssurl => rsspage(basename($params{page})));
}
$ret.=$formtemplate->output;
}
elsif ($config{rss}) {
# Add a rss link button.
my $linktemplate=HTML::Template->new(blind_cache => 1,
filename => "$config{templatedir}/rsslink.tmpl");
$linktemplate->param(rssurl => rsspage(basename($params{page})));
$ret.=$linktemplate->output;
}
my $template=HTML::Template->new(blind_cache => 1,
filename => (($params{archive} eq "no")
? "$config{templatedir}/inlinepage.tmpl"
: "$config{templatedir}/inlinepagetitle.tmpl"));
my @pages;
foreach my $page (blog_list($params{pages}, $params{show})) {
next if $page eq $params{page};
push @pages, $page;
$template->param(pagelink => htmllink($params{page}, $page));
$template->param(content => get_inline_content($params{page}, $page))
if $params{archive} eq "no";
$template->param(ctime => scalar(gmtime($pagectime{$page})));
$ret.=$template->output;
}
# TODO: should really add this to renderedfiles and call
# check_overwrite, but currently renderedfiles
# only supports listing one file per page.
if ($config{rss}) {
writefile(rsspage($params{page}), $config{destdir},
genrss($params{page}, @pages));
}
return $ret;
} #}}}
sub blog_list ($$) { #{{{
my $globlist=shift;
my $maxitems=shift;
my @list;
foreach my $page (keys %pagesources) {
if (globlist_match($page, $globlist)) {
push @list, $page;
}
}
@list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
return @list if ! $maxitems || @list <= $maxitems;
return @list[0..$maxitems - 1];
} #}}}
sub get_inline_content ($$) { #{{{
my $parentpage=shift;
my $page=shift;
my $file=$pagesources{$page};
my $type=pagetype($file);
if ($type ne 'unknown') {
return htmlize($type, linkify(readfile(srcfile($file)), $parentpage));
}
else {
return "";
}
} #}}}
sub date_822 ($) { #{{{
my $time=shift;
eval q{use POSIX};
return POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
} #}}}
sub absolute_urls ($$) { #{{{
# sucky sub because rss sucks
my $content=shift;
my $url=shift;
$url=~s/[^\/]+$//;
$content=~s/<a\s+href="(?!http:\/\/)([^"]+)"/<a href="$url$1"/ig;
$content=~s/<img\s+src="(?!http:\/\/)([^"]+)"/<img src="$url$1"/ig;
return $content;
} #}}}
sub rsspage ($) { #{{{
my $page=shift;
return $page.".rss";
} #}}}
sub genrss ($@) { #{{{
my $page=shift;
my @pages=@_;
my $url="$config{url}/".htmlpage($page);
my $template=HTML::Template->new(blind_cache => 1,
filename => "$config{templatedir}/rsspage.tmpl");
my @items;
foreach my $p (@pages) {
push @items, {
itemtitle => pagetitle(basename($p)),
itemurl => "$config{url}/$renderedfiles{$p}",
itempubdate => date_822($pagectime{$p}),
itemcontent => absolute_urls(get_inline_content($page, $p), $url),
} if exists $renderedfiles{$p};
}
$template->param(
title => $config{wikiname},
pageurl => $url,
items => \@items,
);
return $template->output;
} #}}}
1

View File

@ -0,0 +1,29 @@
#!/usr/bin/perl
# Provides [[pagecount ]] to count the number of pages.
package IkiWiki::Plugin::pagecount;
use warnings;
use strict;
sub import { #{{{
IkiWiki::register_plugin("preprocess", "pagecount", \&preprocess);
} # }}}
sub preprocess (@) { #{{{
my %params=@_;
$params{pages}="*" unless defined $params{pages};
# Needs to update count whenever a page is added or removed, so
# register a dependency.
IkiWiki::add_depends($params{page}, $params{pages});
my @pages=keys %IkiWiki::pagesources;
return $#pages+1 if $params{pages} eq "*"; # optimisation
my $count=0;
foreach my $page (@pages) {
$count++ if IkiWiki::globlist_match($page, $params{pages});
}
return $count;
} # }}}
1

View File

@ -0,0 +1,19 @@
#!/usr/bin/perl
# Ikiwiki skeleton plugin. Replace "skeleton" with the name of your plugin
# in the lines below, and flesh out the methods to make it do something.
package IkiWiki::Plugin::skeleton;
use warnings;
use strict;
sub import { #{{{
IkiWiki::register_plugin("preprocess", "skeleton", \&preprocess);
} # }}}
sub preprocess (@) { #{{{
my %params=@_;
return "skeleton plugin result";
} # }}}
1

View File

@ -122,18 +122,10 @@ sub parentlinks ($) { #{{{
return @ret; return @ret;
} #}}} } #}}}
sub rsspage ($) { #{{{
my $page=shift;
return $page.".rss";
} #}}}
sub preprocess ($$) { #{{{ sub preprocess ($$) { #{{{
my $page=shift; my $page=shift;
my $content=shift; my $content=shift;
my %commands=(inline => \&preprocess_inline);
my $handle=sub { my $handle=sub {
my $escape=shift; my $escape=shift;
my $command=shift; my $command=shift;
@ -141,12 +133,12 @@ sub preprocess ($$) { #{{{
if (length $escape) { if (length $escape) {
return "[[$command $params]]"; return "[[$command $params]]";
} }
elsif (exists $commands{$command}) { elsif (exists $plugins{preprocess}{$command}) {
my %params; my %params;
while ($params =~ /(\w+)=\"([^"]+)"(\s+|$)/g) { while ($params =~ /(\w+)=\"([^"]+)"(\s+|$)/g) {
$params{$1}=$2; $params{$1}=$2;
} }
return $commands{$command}->(page => $page, %params); return $plugins{preprocess}{$command}->(page => $page, %params);
} }
else { else {
return "[[bad directive $command]]"; return "[[bad directive $command]]";
@ -157,102 +149,17 @@ sub preprocess ($$) { #{{{
return $content; return $content;
} #}}} } #}}}
sub blog_list ($$) { #{{{ sub add_depends ($$) { #{{{
my $globlist=shift;
my $maxitems=shift;
my @list;
foreach my $page (keys %pagesources) {
if (globlist_match($page, $globlist)) {
push @list, $page;
}
}
@list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
return @list if ! $maxitems || @list <= $maxitems;
return @list[0..$maxitems - 1];
} #}}}
sub get_inline_content ($$) { #{{{
my $parentpage=shift;
my $page=shift; my $page=shift;
my $globlist=shift;
my $file=$pagesources{$page}; if (! exists $depends{$page}) {
my $type=pagetype($file); $depends{$page}=$globlist;
if ($type ne 'unknown') {
return htmlize($type, linkify(readfile(srcfile($file)), $parentpage));
} }
else { else {
return ""; $depends{$page}.=" ".$globlist;
} }
} #}}} } # }}}
sub preprocess_inline ($@) { #{{{
my %params=@_;
if (! exists $params{pages}) {
return "";
}
if (! exists $params{archive}) {
$params{archive}="no";
}
if (! exists $params{show} && $params{archive} eq "no") {
$params{show}=10;
}
if (! exists $depends{$params{page}}) {
$depends{$params{page}}=$params{pages};
}
else {
$depends{$params{page}}.=" ".$params{pages};
}
my $ret="";
if (exists $params{rootpage}) {
# Add a blog post form, with a rss link button.
my $formtemplate=HTML::Template->new(blind_cache => 1,
filename => "$config{templatedir}/blogpost.tmpl");
$formtemplate->param(cgiurl => $config{cgiurl});
$formtemplate->param(rootpage => $params{rootpage});
if ($config{rss}) {
$formtemplate->param(rssurl => rsspage(basename($params{page})));
}
$ret.=$formtemplate->output;
}
elsif ($config{rss}) {
# Add a rss link button.
my $linktemplate=HTML::Template->new(blind_cache => 1,
filename => "$config{templatedir}/rsslink.tmpl");
$linktemplate->param(rssurl => rsspage(basename($params{page})));
$ret.=$linktemplate->output;
}
my $template=HTML::Template->new(blind_cache => 1,
filename => (($params{archive} eq "no")
? "$config{templatedir}/inlinepage.tmpl"
: "$config{templatedir}/inlinepagetitle.tmpl"));
my @pages;
foreach my $page (blog_list($params{pages}, $params{show})) {
next if $page eq $params{page};
push @pages, $page;
$template->param(pagelink => htmllink($params{page}, $page));
$template->param(content => get_inline_content($params{page}, $page))
if $params{archive} eq "no";
$template->param(ctime => scalar(gmtime($pagectime{$page})));
$ret.=$template->output;
}
# TODO: should really add this to renderedfiles and call
# check_overwrite, but currently renderedfiles
# only supports listing one file per page.
if ($config{rss}) {
writefile(rsspage($params{page}), $config{destdir},
genrss($params{page}, @pages));
}
return $ret;
} #}}}
sub genpage ($$$) { #{{{ sub genpage ($$$) { #{{{
my $content=shift; my $content=shift;
@ -295,53 +202,6 @@ sub genpage ($$$) { #{{{
return $template->output; return $template->output;
} #}}} } #}}}
sub date_822 ($) { #{{{
my $time=shift;
eval q{use POSIX};
return POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
} #}}}
sub absolute_urls ($$) { #{{{
# sucky sub because rss sucks
my $content=shift;
my $url=shift;
$url=~s/[^\/]+$//;
$content=~s/<a\s+href="(?!http:\/\/)([^"]+)"/<a href="$url$1"/ig;
$content=~s/<img\s+src="(?!http:\/\/)([^"]+)"/<img src="$url$1"/ig;
return $content;
} #}}}
sub genrss ($@) { #{{{
my $page=shift;
my @pages=@_;
my $url="$config{url}/".htmlpage($page);
my $template=HTML::Template->new(blind_cache => 1,
filename => "$config{templatedir}/rsspage.tmpl");
my @items;
foreach my $p (@pages) {
push @items, {
itemtitle => pagetitle(basename($p)),
itemurl => "$config{url}/$renderedfiles{$p}",
itempubdate => date_822($pagectime{$p}),
itemcontent => absolute_urls(get_inline_content($page, $p), $url),
} if exists $renderedfiles{$p};
}
$template->param(
title => $config{wikiname},
pageurl => $url,
items => \@items,
);
return $template->output;
} #}}}
sub check_overwrite ($$) { #{{{ sub check_overwrite ($$) { #{{{
# Important security check. Make sure to call this before saving # Important security check. Make sure to call this before saving
# any files to the source directory. # any files to the source directory.
@ -400,6 +260,7 @@ sub render ($) { #{{{
else { else {
my $content=readfile($srcfile, 1); my $content=readfile($srcfile, 1);
$links{$file}=[]; $links{$file}=[];
delete $depends{$file};
check_overwrite("$config{destdir}/$file", $file); check_overwrite("$config{destdir}/$file", $file);
writefile($file, $config{destdir}, $content, 1); writefile($file, $config{destdir}, $content, 1);
$oldpagemtime{$file}=time; $oldpagemtime{$file}=time;
@ -588,6 +449,7 @@ FILE: foreach my $file (@files) {
my $p=pagename($f); my $p=pagename($f);
if (exists $depends{$p}) { if (exists $depends{$p}) {
foreach my $file (keys %rendered, @del) { foreach my $file (keys %rendered, @del) {
next if $f eq $file;
my $page=pagename($file); my $page=pagename($file);
if (globlist_match($page, $depends{$p})) { if (globlist_match($page, $depends{$p})) {
debug("rendering $f, which depends on $page"); debug("rendering $f, which depends on $page");
@ -606,8 +468,8 @@ FILE: foreach my $file (@files) {
if (exists $links{$page}) { if (exists $links{$page}) {
foreach my $link (map { bestlink($page, $_) } @{$links{$page}}) { foreach my $link (map { bestlink($page, $_) } @{$links{$page}}) {
if (length $link && if (length $link &&
! exists $oldlinks{$page} || (! exists $oldlinks{$page} ||
! grep { $_ eq $link } @{$oldlinks{$page}}) { ! grep { bestlink($page, $_) eq $link } @{$oldlinks{$page}})) {
$linkchanged{$link}=1; $linkchanged{$link}=1;
} }
} }
@ -615,8 +477,8 @@ FILE: foreach my $file (@files) {
if (exists $oldlinks{$page}) { if (exists $oldlinks{$page}) {
foreach my $link (map { bestlink($page, $_) } @{$oldlinks{$page}}) { foreach my $link (map { bestlink($page, $_) } @{$oldlinks{$page}}) {
if (length $link && if (length $link &&
! exists $links{$page} || (! exists $links{$page} ||
! grep { $_ eq $link } @{$links{$page}}) { ! grep { bestlink($page, $_) eq $link } @{$links{$page}})) {
$linkchanged{$link}=1; $linkchanged{$link}=1;
} }
} }

View File

@ -13,7 +13,9 @@ pure_install:: extra_install
extra_build: extra_build:
./ikiwiki doc html --templatedir=templates --underlaydir=basewiki \ ./ikiwiki doc html --templatedir=templates --underlaydir=basewiki \
--wikiname="ikiwiki" --verbose --nosvn --exclude=/discussion --wikiname="ikiwiki" --verbose --nosvn \
--exclude=/discussion --plugin=brokenlinks \
--plugin=pagecount
./mdwn2man doc/usage.mdwn > ikiwiki.man ./mdwn2man doc/usage.mdwn > ikiwiki.man
extra_clean: extra_clean:

View File

@ -6,6 +6,7 @@ contain spaces and parameters. The general form is:
This gets expanded before the rest of the page is processed, and can be used This gets expanded before the rest of the page is processed, and can be used
to transform the page in various ways. to transform the page in various ways.
Currently, these preprocessor directives are available: Note that if a preprocessor directive has no parameters, a space still must
be put after its name, to avoid confusion with a [[WikiLink]]. For example:
* "inline" to make a [[blog]] \\[[pagecount ]]

11
debian/changelog vendored
View File

@ -11,8 +11,17 @@ ikiwiki (1.1) UNRELEASED; urgency=low
* Which means that more than one blog is now supported to appear on a * Which means that more than one blog is now supported to appear on a
single page. (With some limitations, like only the last one getting an single page. (With some limitations, like only the last one getting an
rss file.) rss file.)
* Added plugin system, currently only supporting for PreProcessorDirectives.
* Added a pagecount plugin, enabled by default.
* Support PreProcessorDirectives with no parameters, ie "[[pagecount ]]".
* Fixed/optimised backlinks code, to avoid rebuilding pages to update
backlinks when the backlinks hadn't really changed.
* Moved inline page support, rss generation etc into the inline plugin,
enabled by default.
* Added brokenlinks plugin, not enabled by default, but rather handy.
* Fix several broken links in the doc wiki.
-- Joey Hess <joeyh@debian.org> Mon, 1 May 2006 18:21:16 -0400 -- Joey Hess <joeyh@debian.org> Mon, 1 May 2006 21:01:12 -0400
ikiwiki (1.0) unstable; urgency=low ikiwiki (1.0) unstable; urgency=low

View File

@ -2,10 +2,11 @@
adding/removing a page. For example, if Foo/Bar links to "Baz", which is adding/removing a page. For example, if Foo/Bar links to "Baz", which is
Foo/Baz, and Foo/Bar/Baz gets added, it will update the links in Foo/Bar Foo/Baz, and Foo/Bar/Baz gets added, it will update the links in Foo/Bar
to point to it, but will forget to update the linkbacks in Foo/Baz. to point to it, but will forget to update the linkbacks in Foo/Baz.
And if Foo/Bar/Baz is then removed, it forgets to update Foo/Bar to link
* And if Foo/Bar/Baz is then removed, it forgets to update Foo/Bar to link
back to Foo/Baz. back to Foo/Baz.
-- is this still true? -- is this still true? (Yes (as of 1.0))
* If I try to do a web commit, to a svn+ssh repo, it fails with * If I try to do a web commit, to a svn+ssh repo, it fails with
"Host key verification failed." "Host key verification failed."
@ -31,4 +32,4 @@
line if --cgi is set, even if it's not yet running as a cgi line if --cgi is set, even if it's not yet running as a cgi
* if a page containing an rss feed happens to show up in an rss feed, * if a page containing an rss feed happens to show up in an rss feed,
the preprocessor directives won't be expanded (good) but are left in the preprocessor directives won't be expanded (good) but are left in
raw rather than removed (bad) raw rather than removed (bad).

View File

@ -61,7 +61,8 @@ Some of ikiwiki's features:
* [[BackLinks]] * [[BackLinks]]
Automatically included on pages. Rather faster than eg [[MoinMoin]] and always there to help with navigation. Automatically included on pages. Rather faster than eg MoinMoin and
always there to help with navigation.
* [[PageHistory]] * [[PageHistory]]
@ -115,6 +116,10 @@ Some of ikiwiki's features:
ikiwiki can be configured to send you commit mails with diffs of changes ikiwiki can be configured to send you commit mails with diffs of changes
to selected pages. to selected pages.
* [[Plugins]]
A plugin system allows extending ikiwiki in arbitrary ways.
---- ----
It also has some [[TODO]] items and [[Bugs]]. It also has some [[TODO]] items and [[Bugs]].

View File

@ -52,4 +52,6 @@ use IkiWiki::Setup::Standard {
#hyperestraier => 1, #hyperestraier => 1,
# Sanitize html? # Sanitize html?
sanitize => 1, sanitize => 1,
# To change the enabled plugins, edit this list
#plugins => [qw{pagecount inline brokenlinks}],
} }

27
doc/plugins.mdwn 100644
View File

@ -0,0 +1,27 @@
Plugins can be used to add additional features to ikiwiki. It's possible to
[[write]] your own plugins, or you can install and use plugins contributed by
others.
The ikiiki package includes some standard plugins that are installed and
by default. These include [[inline]], [[pagecount]], and [[brokenlinks]].
Of these, [[inline]] is enabled by default.
## Third party plugins
Plugins are perl modules and should be installed somewhere in the perl
module search path. See the @INC list at the end of the output of `perl -V`
for a list of the directories in that path. All plugins are in the
IkiWiki::Plugin namespace, so they go in a IkiWiki/Plugin subdirectory
inside the perl search path. For example, if your perl looks in
`/usr/local/lib/site_perl` for modules, you can locally install ikiwiki
plugins to `/usr/local/lib/site_perl/IkiWiki/Plugin`
Once a plugin is installed, you need to configure ikiwiki to load it using
the `--plugin` switch described in [[usage]], or the equivilant line in
[[ikiwiki.setup]].
## Plugin directory
Add your contributed plugins using this form:
[[inline pages="plugins/* !plugins/write !*/Discussion" archive="yes" rootpage="plugins/contrib" show="30"]]

View File

@ -0,0 +1,11 @@
This plugin generates a list of broken links on pages in the wiki. This is
a useful way to find pages that still need to be written, or links that
are written wrong.
The optional parameter "pages" can be a [[GlobList]] specifying the pages
to search for broken links, default is search them all.
This plugin is included in ikiwiki, but is not enabled by default.
If it is turned on, here's a list of broken links on this wiki:
[[brokenlinks ]]

View File

@ -0,0 +1,4 @@
Allows including one wiki page inside another, generating blogs and RSS
feeds. See [[blog]] for details.
This plugin is enabled by default.

View File

@ -0,0 +1,11 @@
Provides a \\[[pagecount ]] [[PreProcessorDirective]] that is replaced with
the total number of pages currently in the wiki.
The optional parameter "pages" can be a [[GlobList]] specifying the pages
to count, default is to count them all.
This plugin is included in ikiwiki, but is not enabled by default.
If it is turned on it can tell us that this wiki includes
[[pagecount ]] pages, of which [[pagecount pages="*/Discussion"]] are
discussion pages.

View File

@ -0,0 +1,74 @@
ikiwiki [[plugins]] are written in perl. Each plugin is a perl module, in
the `IkiWiki::Plugin` namespace. The name of the plugin is typically in
lowercase, such as `IkiWiki::Plugin::inline`. Ikiwiki includes a
`IkiWiki::Plugin::skeleton` that can be fleshed out to make a useful
plugin. `IkiWiki::Plugin::pagecount` is another simple example.
## Note
One thing to keep in mind when writing a plugin is that ikiwiki is a wiki
*compiler*. So plugins influence pages when they are built, not when they
are loaded. A plugin that inserts the current time into a page, for
example, will insert the build time. Also, as a compiler, ikiwiki avoids
rebuilding pages unless they have changed, so a plugin that prints some
random or changing thing on a page will generate a static page that won't
change until ikiwiki rebuilds the page for some other reason, like the page
being edited.
## Registering plugins
Plugins should, when imported, call IkiWiki::register_plugin to hook into
ikiwiki. The function takes four parameters:
1. A method type. Use "preprocess" to register a [[PreProcessorDirective]]
2. A command name. This is the bit that will appear inside brackets in a
page.
3. A reference to a subroutine that is run when the plugin is used.
## Writing a [[PreProcessorDirective]]
For preprocessor commands, the subroutine is passed named parameters. A
"page" parameter gives the name of the page that embedded the preprocessor
command. All parameters included in the preprocessor command are included
as named parameters as well. Whatever the subroutine returns goes onto the
page in place of the command.
## Error handing in plugins
While a plugin can call ikiwiki's error routine for a fatal error, for
errors that aren't intended to halt the entire wiki build, including bad
parameters passed to a [[PreProcessorDirective]], etc, it's better to just
return the error message as the output of the plugin.
## Html issues
Note that if [[HTMLSanitization]] is enabled, html in
[[PreProcessorDirective]] output is sanitised, which may limit what your
plugin can do. Also, the rest of the page content is not in html format at
preprocessor time.
## Wiki configuration
A plugin can access the wiki's configuration via the `%IkiWiki::config` hash.
The best way to understand the contents of the hash is to look at
[[ikiwiki.setup]], which sets the hash content to configure the wiki.
## Wiki data
If your plugin needs to access data about other pages in the wiki. It can
use the following hashes, using a page name as the key:
* `%IkiWiki::links` lists the names of each page
that is linked to from that page in an array reference.
* `%IkiWiki::pagemtime` contains the last modification time of each page
* `%IkiWiki::pagectime` contains the creation time of each page`
* `%IkiWiki::renderedfiles` contains the name of the file rendered by a
page
* `%IkiWiki::pagesources` contains the name of the source file for a page.
* `%IkiWiki::depends` contains a [[GlobList]] that is used to specify other
pages that a page depends on. If one of its dependencies is updated, the
page will also get rebuilt.
Many plugins will need to add dependencies to this hash; the best way to do
it is by using the IkiWiki::add_depends function, which takes as its
parameters the page name and a [[GlobList]] of dependencies to add.

View File

@ -1,7 +1,7 @@
A post-commit hook is run every time you commit a change to your subversion repository. To make the wiki be updated each time a commit is made, it can be run from (or as) a post-commit hook. A post-commit hook is run every time you commit a change to your subversion repository. To make the wiki be updated each time a commit is made, it can be run from (or as) a post-commit hook.
The best way to run ikiwiki in a [[Subversion]] post-commit hook is using The best way to run ikiwiki in a [[Subversion]] post-commit hook is using
a [[wrapper]], which can be generated using `ikiwiki --wrapper`. a wrapper, which can be generated using `ikiwiki --wrapper`.
First, set up the subversion checkout that ikiwiki will update and compile First, set up the subversion checkout that ikiwiki will update and compile
into your wiki at each subversion commit. Run ikiwiki a few times by hand into your wiki at each subversion commit. Run ikiwiki a few times by hand

View File

@ -62,7 +62,7 @@ this wiki, BTW.
## page locking can be bypassed via direct svn commits ## page locking can be bypassed via direct svn commits
A [[lock]]ed page can only be edited on the web by an admin, but A locked page can only be edited on the web by an admin, but
anyone who is allowed to commit direct to svn can bypass this. This is by anyone who is allowed to commit direct to svn can bypass this. This is by
design, although a subversion pre-commit hook could be used to prevent design, although a subversion pre-commit hook could be used to prevent
editing of locked pages when using subversion, if you really need to. editing of locked pages when using subversion, if you really need to.

View File

@ -7,9 +7,3 @@ Welcome to ikiwiki's todo list. Items are moved to [[todo/done]] when done.
# Full list of open items: # Full list of open items:
[[inline pages="todo/* !todo/done* !*/Discussion" archive="yes"]] [[inline pages="todo/* !todo/done* !*/Discussion" archive="yes"]]
----
Test:
[[inline pages="news/* !*/Discussion" rootpage="news" show="30"]]

View File

@ -1,7 +1,8 @@
* list of all missing pages * list of all missing pages
Could be a [[plugin]]. done
* list of registered users, with the names being links to any userpages. * list of registered users, with the names being links to any userpages.
Could be implemented with a [[preprocessordirective]], which suggests that there needs to be some sort of plugin interface for new preprocessordirectives. Although, how to let the wiki know that the page needs an update whever a new user is added? Might be a plugin, but how to let the wiki know that the page
needs an update whever a new user is added?

View File

@ -1,5 +1,3 @@
For one type of plugin, see [[todo/PluggableRenderers]].
A plugin system should ideally support things like: A plugin system should ideally support things like:
* [[todo/lists]] of pages, of mising pages / broken links, of registered users, etc * [[todo/lists]] of pages, of mising pages / broken links, of registered users, etc
@ -11,22 +9,15 @@ A plugin system should ideally support things like:
* would it be useful to reimplement the hyperestradier search integration as a plugin? * would it be useful to reimplement the hyperestradier search integration as a plugin?
* Support [[RecentChanges]] as a regular page containing a plugin that updates each time there is a change, and statically builds the recent changes list. (Would this be too expensive/inflexible? There might be other ways to do it as a plugin, like making all links to RecentChanges link to the cgi and have the cgi render it on demand.) * Support [[RecentChanges]] as a regular page containing a plugin that updates each time there is a change, and statically builds the recent changes list. (Would this be too expensive/inflexible? There might be other ways to do it as a plugin, like making all links to RecentChanges link to the cgi and have the cgi render it on demand.)
* etc * etc
* For another type of plugin, see [[todo/PluggableRenderers]].
Another, separate plugin system that already (mostly) exists in ikiwiki is the RCS backend, which allows writing modules to drive other RCS systems than subversion. Another, separate plugin system that already (mostly) exists in ikiwiki is
the RCS backend, which allows writing modules to drive other RCS systems
than subversion.
## preprocessor plugins ## preprocessor plugins
Considering ikiwiki plugins, one idea I have is to make the [[PreProcessorDirective]]s be a plugin. A setting in the config file would enable various plusins, which are perl modules, that each provide one or more preprocessor directives. done
Since preprocessing happens before htmlization but after a page is loaded and linkified, it should be possible to use it to create something like a link map or lists, or a page index. Page inlining and rss generation is already done via preprocessor directives and seems a natureal as a plugin too.
Note that things like a link map or a broken link list page would need to
be updated whenever a set (or all) pages change; the %depends hash
already allows for pages to register this, although there could be some
strange behavior if mixing multiple directives some of which exclude pages
that others might want to include.
I need to look at the full range of things that other wikis use their plugin systems for, but preprocessor directives as plugins certianly seems useful, even if it's not a complete solution.
## case study: Moin Moin plugins ## case study: Moin Moin plugins

View File

@ -41,7 +41,7 @@ These options control the mode that ikiwiki is operating in.
* --wrapper [file] * --wrapper [file]
Generate a [[wrapper]] binary that is hardcoded to do action specified by Generate a wrapper binary that is hardcoded to do action specified by
the other options, using the specified input files and `destination` the other options, using the specified input files and `destination`
directory. The filename to use for the wrapper is optional. directory. The filename to use for the wrapper is optional.
@ -70,13 +70,6 @@ These options configure the wiki.
The name of the wiki, default is "wiki". The name of the wiki, default is "wiki".
* --fixctime
Pull last changed time for all pages out of the revision control system.
This rarely used option provides a way to get the real creation times of
items in weblogs, for example when building a wiki from a new subversion
checkout. It is unoptimised and quite slow.
* --templatedir * --templatedir
Specify the directory that the page [[templates]] are stored in. Specify the directory that the page [[templates]] are stored in.
@ -129,7 +122,7 @@ These options configure the wiki.
* --rss, --norss * --rss, --norss
If rss is set, ikiwiki will generate rss feeds for pages that inline If rss is set, ikiwiki will generate rss feeds for pages that inline
a blog. a [[blog]].
* --url http://url/ * --url http://url/
@ -137,7 +130,8 @@ These options configure the wiki.
* --cgiurl http://url/ikiwiki.cgi * --cgiurl http://url/ikiwiki.cgi
Specifies the url to the ikiwiki [[CGI]] script [[wrapper]]. Required when building the wiki for links to the cgi script to be generated. Specifies the url to the ikiwiki [[CGI]] script wrapper. Required when
building the wiki for links to the cgi script to be generated.
* --historyurl http://url/trunk/\[[file]]?root=wiki * --historyurl http://url/trunk/\[[file]]?root=wiki
@ -172,6 +166,11 @@ These options configure the wiki.
Enable [[HtmlSanitization]] of wiki content. On by default, disable with Enable [[HtmlSanitization]] of wiki content. On by default, disable with
--no-sanitize. --no-sanitize.
* --plugin name
Enables the use of the specified plugin in the wiki. See [[plugins]] for
details. Note that plugin names are case sensative.
* --hyperestraier * --hyperestraier
Enables use of the [[HyperEstraier]] search engine for full test page Enables use of the [[HyperEstraier]] search engine for full test page
@ -179,7 +178,14 @@ These options configure the wiki.
* --verbose * --verbose
Be vebose about what it's doing. Be vebose about what is being done.
* --fixctime
Pull last changed time for all pages out of the revision control system.
This rarely used option provides a way to get the real creation times of
items in weblogs, for example when building a wiki from a new subversion
checkout. It is unoptimised and quite slow.
# AUTHOR # AUTHOR

24
ikiwiki
View File

@ -9,7 +9,7 @@ use HTML::Template;
use lib '.'; # For use without installation, removed by Makefile. use lib '.'; # For use without installation, removed by Makefile.
use vars qw{%config %links %oldlinks %oldpagemtime %pagectime use vars qw{%config %links %oldlinks %oldpagemtime %pagectime
%renderedfiles %pagesources %depends}; %renderedfiles %pagesources %depends %plugins};
sub usage () { #{{{ sub usage () { #{{{
die "usage: ikiwiki [options] source dest\n"; die "usage: ikiwiki [options] source dest\n";
@ -20,7 +20,7 @@ sub getconfig () { #{{{
%config=( %config=(
wiki_file_prune_regexp => qr{((^|/).svn/|\.\.|^\.|\/\.|\.html?$|\.rss$)}, wiki_file_prune_regexp => qr{((^|/).svn/|\.\.|^\.|\/\.|\.html?$|\.rss$)},
wiki_link_regexp => qr/\[\[(?:([^\s\]\|]+)\|)?([^\s\]]+)\]\]/, wiki_link_regexp => qr/\[\[(?:([^\s\]\|]+)\|)?([^\s\]]+)\]\]/,
wiki_processor_regexp => qr/\[\[(\w+)\s+([^\]]+)\]\]/, wiki_processor_regexp => qr/\[\[(\w+)\s+([^\]]*)\]\]/,
wiki_file_regexp => qr/(^[-[:alnum:]_.:\/+]+$)/, wiki_file_regexp => qr/(^[-[:alnum:]_.:\/+]+$)/,
verbose => 0, verbose => 0,
wikiname => "wiki", wikiname => "wiki",
@ -50,6 +50,7 @@ sub getconfig () { #{{{
setup => undef, setup => undef,
adminuser => undef, adminuser => undef,
adminemail => undef, adminemail => undef,
plugin => [qw{inline}],
); );
eval q{use Getopt::Long}; eval q{use Getopt::Long};
@ -90,6 +91,9 @@ sub getconfig () { #{{{
"wrapper:s" => sub { "wrapper:s" => sub {
$config{wrapper}=$_[1] ? $_[1] : "ikiwiki-wrap" $config{wrapper}=$_[1] ? $_[1] : "ikiwiki-wrap"
}, },
"plugin=s@" => sub {
push @{$config{plugin}}, $_[1];
}
) || usage(); ) || usage();
if (! $config{setup}) { if (! $config{setup}) {
@ -129,6 +133,14 @@ sub checkconfig () { #{{{
require IkiWiki::Rcs::Stub; require IkiWiki::Rcs::Stub;
$config{rcs}=0; $config{rcs}=0;
} }
foreach my $plugin (@{$config{plugin}}) {
$plugin="IkiWiki::Plugin::".possibly_foolish_untaint($plugin);
eval qq{use $plugin};
if ($@) {
error("Failed to load plugin $plugin: $@");
}
}
} #}}} } #}}}
sub error ($) { #{{{ sub error ($) { #{{{
@ -476,6 +488,14 @@ sub globlist_match ($$) { #{{{
return 0; return 0;
} #}}} } #}}}
sub register_plugin ($$$) { # {{{
my $type=shift;
my $command=shift;
my $function=shift;
$plugins{$type}{$command}=$function;
} # }}}
sub main () { #{{{ sub main () { #{{{
getconfig(); getconfig();