2008-12-25 01:43:07 +01:00
|
|
|
#!/usr/bin/perl
|
2016-07-28 10:50:09 +02:00
|
|
|
no lib '.';
|
2008-01-28 20:24:27 +01:00
|
|
|
use warnings;
|
|
|
|
use strict;
|
2016-07-28 10:50:09 +02:00
|
|
|
use FindBin; use lib $FindBin::Bin; # For use in nonstandard directory, munged by Makefile.
|
2008-03-21 14:37:52 +01:00
|
|
|
use IkiWiki;
|
|
|
|
use HTML::Entities;
|
2008-01-28 20:24:27 +01:00
|
|
|
|
2008-01-28 01:13:54 +01:00
|
|
|
my $regex = qr{
|
|
|
|
(\\?) # 1: escape?
|
|
|
|
\[\[(!?) # directive open; 2: optional prefix
|
|
|
|
([-\w]+) # 3: command
|
|
|
|
( # 4: the parameters (including initial whitespace)
|
|
|
|
\s+
|
|
|
|
(?:
|
|
|
|
(?:[-\w]+=)? # named parameter key?
|
|
|
|
(?:
|
|
|
|
""".*?""" # triple-quoted value
|
|
|
|
|
|
|
|
|
"[^"]+" # single-quoted value
|
|
|
|
|
|
|
|
|
[^\s\]]+ # unquoted value
|
|
|
|
)
|
|
|
|
\s* # whitespace or end
|
|
|
|
# of directive
|
|
|
|
)
|
|
|
|
*) # 0 or more parameters
|
|
|
|
\]\] # directive closed
|
|
|
|
}sx;
|
|
|
|
|
2008-01-28 20:24:27 +01:00
|
|
|
sub handle_directive {
|
2008-01-28 01:13:54 +01:00
|
|
|
my $escape = shift;
|
|
|
|
my $prefix = shift;
|
|
|
|
my $directive = shift;
|
|
|
|
my $args = shift;
|
|
|
|
|
|
|
|
if (length $escape) {
|
|
|
|
return "${escape}[[${prefix}${directive}${args}]]"
|
|
|
|
}
|
|
|
|
if ($directive =~ m/^(if|more|table|template|toggleable)$/) {
|
|
|
|
$args =~ s{$regex}{handle_directive($1, $2, $3, $4)}eg;
|
|
|
|
}
|
|
|
|
return "[[!${directive}${args}]]"
|
|
|
|
}
|
|
|
|
|
2008-01-30 23:22:59 +01:00
|
|
|
sub prefix_directives {
|
2009-05-20 19:26:20 +02:00
|
|
|
loadsetup(shift);
|
2008-12-25 01:43:07 +01:00
|
|
|
|
|
|
|
IkiWiki::loadplugins();
|
|
|
|
IkiWiki::checkconfig();
|
|
|
|
IkiWiki::loadindex();
|
|
|
|
|
|
|
|
if (! %pagesources) {
|
|
|
|
error "ikiwiki has not built this wiki yet, cannot transition";
|
|
|
|
}
|
|
|
|
|
|
|
|
foreach my $page (values %pagesources) {
|
|
|
|
next unless defined pagetype($page) &&
|
|
|
|
-f $config{srcdir}."/".$page;
|
|
|
|
my $content=readfile($config{srcdir}."/".$page);
|
|
|
|
my $oldcontent=$content;
|
|
|
|
$content=~s{$regex}{handle_directive($1, $2, $3, $4)}eg;
|
|
|
|
if ($oldcontent ne $content) {
|
|
|
|
writefile($page, $config{srcdir}, $content);
|
|
|
|
}
|
2008-01-30 23:22:59 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-03-21 14:37:52 +01:00
|
|
|
sub indexdb {
|
2009-05-07 02:46:26 +02:00
|
|
|
setstatedir(shift);
|
2008-03-21 14:37:52 +01:00
|
|
|
|
2008-05-30 23:35:34 +02:00
|
|
|
# Note: No lockwiki here because ikiwiki already locks it
|
|
|
|
# before calling this.
|
2008-03-21 14:37:52 +01:00
|
|
|
if (! IkiWiki::oldloadindex()) {
|
|
|
|
die "failed to load index\n";
|
|
|
|
}
|
|
|
|
if (! IkiWiki::saveindex()) {
|
|
|
|
die "failed to save indexdb\n"
|
|
|
|
}
|
|
|
|
if (! IkiWiki::loadindex()) {
|
|
|
|
die "transition failed, cannot load new indexdb\n";
|
|
|
|
}
|
|
|
|
if (! unlink("$config{wikistatedir}/index")) {
|
|
|
|
die "unlink failed: $!\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-05-30 23:35:34 +02:00
|
|
|
sub hashpassword {
|
2009-05-07 02:46:26 +02:00
|
|
|
setstatedir(shift);
|
2008-08-06 02:40:49 +02:00
|
|
|
|
2008-05-30 23:35:34 +02:00
|
|
|
eval q{use IkiWiki::UserInfo};
|
|
|
|
eval q{use Authen::Passphrase::BlowfishCrypt};
|
|
|
|
if ($@) {
|
|
|
|
error("ikiwiki-transition hashpassword: failed to load Authen::Passphrase, passwords not hashed");
|
|
|
|
}
|
|
|
|
|
|
|
|
IkiWiki::lockwiki();
|
|
|
|
IkiWiki::loadplugin("passwordauth");
|
|
|
|
my $userinfo = IkiWiki::userinfo_retrieve();
|
|
|
|
foreach my $user (keys %{$userinfo}) {
|
|
|
|
if (ref $userinfo->{$user} &&
|
|
|
|
exists $userinfo->{$user}->{password} &&
|
|
|
|
length $userinfo->{$user}->{password} &&
|
|
|
|
! exists $userinfo->{$user}->{cryptpassword}) {
|
|
|
|
IkiWiki::Plugin::passwordauth::setpassword($user, $userinfo->{$user}->{password});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-07-15 00:39:22 +02:00
|
|
|
sub aggregateinternal {
|
2009-05-20 19:26:20 +02:00
|
|
|
loadsetup(shift);
|
2008-07-15 00:39:22 +02:00
|
|
|
require IkiWiki::Plugin::aggregate;
|
2008-07-15 04:24:05 +02:00
|
|
|
IkiWiki::checkconfig();
|
2008-07-15 00:39:22 +02:00
|
|
|
IkiWiki::Plugin::aggregate::migrate_to_internal();
|
|
|
|
}
|
|
|
|
|
2008-08-06 02:40:49 +02:00
|
|
|
sub setupformat {
|
|
|
|
my $setup=shift;
|
|
|
|
|
2009-05-20 19:26:20 +02:00
|
|
|
loadsetup($setup);
|
2008-08-06 02:40:49 +02:00
|
|
|
IkiWiki::checkconfig();
|
|
|
|
|
|
|
|
# unpack old-format wrappers setting into new fields
|
2008-08-06 03:02:18 +02:00
|
|
|
my $cgi_seen=0;
|
|
|
|
my $rcs_seen=0;
|
2008-08-06 02:40:49 +02:00
|
|
|
foreach my $wrapper (@{$config{wrappers}}) {
|
|
|
|
if ($wrapper->{cgi}) {
|
2008-08-06 03:02:18 +02:00
|
|
|
if ($cgi_seen) {
|
|
|
|
die "don't know what to do with second cgi wrapper ".$wrapper->{wrapper}."\n";
|
|
|
|
}
|
|
|
|
$cgi_seen++;
|
2008-08-06 02:40:49 +02:00
|
|
|
print "setting cgi_wrapper to ".$wrapper->{wrapper}."\n";
|
|
|
|
$config{cgi_wrapper}=$wrapper->{wrapper};
|
|
|
|
$config{cgi_wrappermode}=$wrapper->{wrappermode}
|
|
|
|
if exists $wrapper->{wrappermode};
|
|
|
|
}
|
|
|
|
elsif ($config{rcs}) {
|
2008-08-06 03:02:18 +02:00
|
|
|
if ($rcs_seen) {
|
|
|
|
die "don't know what to do with second rcs wrapper ".$wrapper->{wrapper}."\n";
|
|
|
|
}
|
|
|
|
$rcs_seen++;
|
2008-08-06 02:40:49 +02:00
|
|
|
print "setting $config{rcs}_wrapper to ".$wrapper->{wrapper}."\n";
|
|
|
|
$config{$config{rcs}."_wrapper"}=$wrapper->{wrapper};
|
|
|
|
$config{$config{rcs}."_wrappermode"}=$wrapper->{wrappermode}
|
|
|
|
if exists $wrapper->{wrappermode};
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
die "don't know what to do with wrapper ".$wrapper->{wrapper}."\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
IkiWiki::Setup::dump($setup);
|
|
|
|
}
|
|
|
|
|
2008-12-24 22:16:03 +01:00
|
|
|
sub moveprefs {
|
|
|
|
my $setup=shift;
|
|
|
|
|
2009-05-20 19:26:20 +02:00
|
|
|
loadsetup($setup);
|
2008-12-24 22:16:03 +01:00
|
|
|
IkiWiki::checkconfig();
|
|
|
|
|
|
|
|
eval q{use IkiWiki::UserInfo};
|
|
|
|
error $@ if $@;
|
|
|
|
|
|
|
|
foreach my $field (qw{allowed_attachments locked_pages}) {
|
|
|
|
my $orig=$config{$field};
|
|
|
|
foreach my $admin (@{$config{adminuser}}) {
|
|
|
|
my $a=IkiWiki::userinfo_get($admin, $field);
|
|
|
|
if (defined $a && length $a &&
|
2008-12-31 20:54:22 +01:00
|
|
|
# might already have been moved
|
|
|
|
(! defined $orig || $a ne $orig)) {
|
|
|
|
if (defined $config{$field} &&
|
|
|
|
length $config{$field}) {
|
|
|
|
$config{$field}=IkiWiki::pagespec_merge($config{$field}, $a);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$config{$field}=$a;
|
|
|
|
}
|
2008-12-24 22:16:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
my %banned=map { $_ => 1 } @{$config{banned_users}}, IkiWiki::get_banned_users();
|
|
|
|
$config{banned_users}=[sort keys %banned];
|
|
|
|
|
|
|
|
IkiWiki::Setup::dump($setup);
|
|
|
|
}
|
|
|
|
|
Avoid %links accumulating duplicates. (For TOVA)
This is sorta an optimisation, and sorta a bug fix. In one
test case I have available, it can speed a page build up from 3
minutes to 3 seconds.
The root of the problem is that $links{$page} contains arrays of
links, rather than hashes of links. And when a link is found,
it is just pushed onto the array, without checking for dups.
Now, the array is emptied before scanning a page, so there
should not be a lot of opportunity for lots of duplicate links
to pile up in it. But, in some cases, they can, and if there
are hundreds of duplicate links in the array, then scanning it
for matching links, as match_link and some other code does,
becomes much more expensive than it needs to be.
Perhaps the real right fix would be to change the data structure
to a hash. But, the list of links is never accessed like that,
you always want to iterate through it.
I also looked at deduping the list in saveindex, but that does
a lot of unnecessary work, and doesn't completly solve the problem.
So, finally, I decided to add an add_link function that handles deduping,
and make ikiwiki-transition remove the old dup links.
2009-05-06 05:40:09 +02:00
|
|
|
sub deduplinks {
|
2009-05-22 19:09:11 +02:00
|
|
|
loadsetup(shift);
|
|
|
|
IkiWiki::loadplugins();
|
|
|
|
IkiWiki::checkconfig();
|
Avoid %links accumulating duplicates. (For TOVA)
This is sorta an optimisation, and sorta a bug fix. In one
test case I have available, it can speed a page build up from 3
minutes to 3 seconds.
The root of the problem is that $links{$page} contains arrays of
links, rather than hashes of links. And when a link is found,
it is just pushed onto the array, without checking for dups.
Now, the array is emptied before scanning a page, so there
should not be a lot of opportunity for lots of duplicate links
to pile up in it. But, in some cases, they can, and if there
are hundreds of duplicate links in the array, then scanning it
for matching links, as match_link and some other code does,
becomes much more expensive than it needs to be.
Perhaps the real right fix would be to change the data structure
to a hash. But, the list of links is never accessed like that,
you always want to iterate through it.
I also looked at deduping the list in saveindex, but that does
a lot of unnecessary work, and doesn't completly solve the problem.
So, finally, I decided to add an add_link function that handles deduping,
and make ikiwiki-transition remove the old dup links.
2009-05-06 05:40:09 +02:00
|
|
|
IkiWiki::loadindex();
|
|
|
|
foreach my $page (keys %links) {
|
|
|
|
my %l;
|
|
|
|
$l{$_}=1 foreach @{$links{$page}};
|
|
|
|
$links{$page}=[keys %l]
|
|
|
|
}
|
|
|
|
IkiWiki::saveindex();
|
|
|
|
}
|
|
|
|
|
2009-05-07 02:46:26 +02:00
|
|
|
sub setstatedir {
|
2009-05-20 19:26:20 +02:00
|
|
|
my $dirorsetup=shift;
|
2009-05-07 02:46:26 +02:00
|
|
|
|
2009-05-20 19:26:20 +02:00
|
|
|
if (! defined $dirorsetup) {
|
2009-05-07 02:46:26 +02:00
|
|
|
usage();
|
|
|
|
}
|
|
|
|
|
2009-05-20 19:26:20 +02:00
|
|
|
if (-d $dirorsetup) {
|
|
|
|
$config{wikistatedir}=$dirorsetup."/.ikiwiki";
|
|
|
|
}
|
|
|
|
elsif (-f $dirorsetup) {
|
|
|
|
loadsetup($dirorsetup);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
error("ikiwiki-transition: $dirorsetup does not exist");
|
2009-05-07 02:46:26 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if (! -d $config{wikistatedir}) {
|
|
|
|
error("ikiwiki-transition: $config{wikistatedir} does not exist");
|
|
|
|
}
|
|
|
|
}
|
2009-05-20 19:26:20 +02:00
|
|
|
|
|
|
|
sub loadsetup {
|
|
|
|
my $setup=shift;
|
|
|
|
if (! defined $setup) {
|
|
|
|
usage();
|
|
|
|
}
|
|
|
|
|
|
|
|
require IkiWiki::Setup;
|
|
|
|
|
|
|
|
%config = IkiWiki::defaultconfig();
|
|
|
|
IkiWiki::Setup::load($setup);
|
|
|
|
}
|
2009-05-07 02:46:26 +02:00
|
|
|
|
2008-01-30 23:22:59 +01:00
|
|
|
sub usage {
|
2008-03-21 14:37:52 +01:00
|
|
|
print STDERR "Usage: ikiwiki-transition type ...\n";
|
|
|
|
print STDERR "Currently supported transition subcommands:\n";
|
2008-12-25 01:43:07 +01:00
|
|
|
print STDERR "\tprefix_directives setupfile ...\n";
|
2008-08-06 02:40:49 +02:00
|
|
|
print STDERR "\taggregateinternal setupfile\n";
|
|
|
|
print STDERR "\tsetupformat setupfile\n";
|
2008-12-24 22:16:03 +01:00
|
|
|
print STDERR "\tmoveprefs setupfile\n";
|
2009-05-20 19:26:20 +02:00
|
|
|
print STDERR "\thashpassword setupfile|srcdir\n";
|
|
|
|
print STDERR "\tindexdb setupfile|srcdir\n";
|
2009-05-22 19:09:11 +02:00
|
|
|
print STDERR "\tdeduplinks setupfile\n";
|
2008-01-30 23:22:59 +01:00
|
|
|
exit 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
usage() unless @ARGV;
|
|
|
|
|
|
|
|
my $mode=shift;
|
|
|
|
if ($mode eq 'prefix_directives') {
|
|
|
|
prefix_directives(@ARGV);
|
|
|
|
}
|
2008-07-10 08:42:20 +02:00
|
|
|
elsif ($mode eq 'hashpassword') {
|
2008-05-30 23:35:34 +02:00
|
|
|
hashpassword(@ARGV);
|
|
|
|
}
|
2008-03-21 14:37:52 +01:00
|
|
|
elsif ($mode eq 'indexdb') {
|
|
|
|
indexdb(@ARGV);
|
|
|
|
}
|
2008-07-15 00:39:22 +02:00
|
|
|
elsif ($mode eq 'aggregateinternal') {
|
|
|
|
aggregateinternal(@ARGV);
|
|
|
|
}
|
2008-08-06 02:40:49 +02:00
|
|
|
elsif ($mode eq 'setupformat') {
|
|
|
|
setupformat(@ARGV);
|
|
|
|
}
|
2008-12-24 22:16:03 +01:00
|
|
|
elsif ($mode eq 'moveprefs') {
|
|
|
|
moveprefs(@ARGV);
|
|
|
|
}
|
Avoid %links accumulating duplicates. (For TOVA)
This is sorta an optimisation, and sorta a bug fix. In one
test case I have available, it can speed a page build up from 3
minutes to 3 seconds.
The root of the problem is that $links{$page} contains arrays of
links, rather than hashes of links. And when a link is found,
it is just pushed onto the array, without checking for dups.
Now, the array is emptied before scanning a page, so there
should not be a lot of opportunity for lots of duplicate links
to pile up in it. But, in some cases, they can, and if there
are hundreds of duplicate links in the array, then scanning it
for matching links, as match_link and some other code does,
becomes much more expensive than it needs to be.
Perhaps the real right fix would be to change the data structure
to a hash. But, the list of links is never accessed like that,
you always want to iterate through it.
I also looked at deduping the list in saveindex, but that does
a lot of unnecessary work, and doesn't completly solve the problem.
So, finally, I decided to add an add_link function that handles deduping,
and make ikiwiki-transition remove the old dup links.
2009-05-06 05:40:09 +02:00
|
|
|
elsif ($mode eq 'deduplinks') {
|
|
|
|
deduplinks(@ARGV);
|
|
|
|
}
|
2008-01-30 23:22:59 +01:00
|
|
|
else {
|
|
|
|
usage();
|
2008-01-28 01:13:54 +01:00
|
|
|
}
|
2008-03-21 14:37:52 +01:00
|
|
|
|
|
|
|
package IkiWiki;
|
|
|
|
|
2008-03-21 18:22:47 +01:00
|
|
|
# A slightly modified version of the old loadindex function.
|
2008-03-21 14:37:52 +01:00
|
|
|
sub oldloadindex {
|
|
|
|
%oldrenderedfiles=%pagectime=();
|
|
|
|
if (! $config{rebuild}) {
|
|
|
|
%pagesources=%pagemtime=%oldlinks=%links=%depends=
|
|
|
|
%destsources=%renderedfiles=%pagecase=%pagestate=();
|
|
|
|
}
|
|
|
|
open (my $in, "<", "$config{wikistatedir}/index") || return;
|
|
|
|
while (<$in>) {
|
|
|
|
chomp;
|
|
|
|
my %items;
|
|
|
|
$items{link}=[];
|
|
|
|
$items{dest}=[];
|
|
|
|
foreach my $i (split(/ /, $_)) {
|
|
|
|
my ($item, $val)=split(/=/, $i, 2);
|
|
|
|
push @{$items{$item}}, decode_entities($val);
|
|
|
|
}
|
|
|
|
|
|
|
|
next unless exists $items{src}; # skip bad lines for now
|
|
|
|
|
|
|
|
my $page=pagename($items{src}[0]);
|
|
|
|
if (! $config{rebuild}) {
|
|
|
|
$pagesources{$page}=$items{src}[0];
|
|
|
|
$pagemtime{$page}=$items{mtime}[0];
|
|
|
|
$oldlinks{$page}=[@{$items{link}}];
|
|
|
|
$links{$page}=[@{$items{link}}];
|
2009-10-05 02:30:21 +02:00
|
|
|
$depends{$page}={ $items{depends}[0] => $IkiWiki::DEPEND_CONTENT } if exists $items{depends};
|
2008-03-21 14:37:52 +01:00
|
|
|
$destsources{$_}=$page foreach @{$items{dest}};
|
|
|
|
$renderedfiles{$page}=[@{$items{dest}}];
|
|
|
|
$pagecase{lc $page}=$page;
|
|
|
|
foreach my $k (grep /_/, keys %items) {
|
|
|
|
my ($id, $key)=split(/_/, $k, 2);
|
|
|
|
$pagestate{$page}{decode_entities($id)}{decode_entities($key)}=$items{$k}[0];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$oldrenderedfiles{$page}=[@{$items{dest}}];
|
|
|
|
$pagectime{$page}=$items{ctime}[0];
|
|
|
|
}
|
2008-03-21 18:22:47 +01:00
|
|
|
|
|
|
|
# saveindex relies on %hooks being populated, else it won't save
|
|
|
|
# the page state owned by a given hook. But no plugins are loaded
|
|
|
|
# by this program, so populate %hooks with all hook ids that
|
|
|
|
# currently have page state.
|
|
|
|
foreach my $page (keys %pagemtime) {
|
|
|
|
foreach my $id (keys %{$pagestate{$page}}) {
|
|
|
|
$hooks{_dummy}{$id}=1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-03-21 14:37:52 +01:00
|
|
|
return close($in);
|
|
|
|
}
|
2008-12-24 22:16:03 +01:00
|
|
|
|
|
|
|
# Used to be in IkiWiki/UserInfo, but only used here now.
|
|
|
|
sub get_banned_users () {
|
|
|
|
my @ret;
|
|
|
|
my $userinfo=userinfo_retrieve();
|
|
|
|
foreach my $user (keys %{$userinfo}) {
|
|
|
|
push @ret, $user if $userinfo->{$user}->{banned};
|
|
|
|
}
|
|
|
|
return @ret;
|
|
|
|
}
|
|
|
|
|
2009-07-25 21:04:04 +02:00
|
|
|
# Used to be in IkiWiki, but only used here (to migrate admin prefs into the
|
|
|
|
# setup file) now.
|
|
|
|
sub pagespec_merge ($$) {
|
|
|
|
my $a=shift;
|
|
|
|
my $b=shift;
|
|
|
|
|
|
|
|
return $a if $a eq $b;
|
|
|
|
return "($a) or ($b)";
|
|
|
|
}
|
|
|
|
|
2008-12-24 22:16:03 +01:00
|
|
|
1
|