2006-10-21 23:59:44 +02:00
|
|
|
#!/usr/bin/perl
|
|
|
|
# Ikiwiki enhanced image handling plugin
|
|
|
|
# Christian Mock cm@tahina.priv.at 20061002
|
|
|
|
package IkiWiki::Plugin::img;
|
|
|
|
|
|
|
|
use warnings;
|
|
|
|
use strict;
|
2008-12-23 22:34:19 +01:00
|
|
|
use IkiWiki 3.00;
|
2006-10-21 23:59:44 +02:00
|
|
|
|
|
|
|
my %imgdefaults;
|
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub import {
|
2008-08-03 22:40:12 +02:00
|
|
|
hook(type => "getsetup", id => "img", call => \&getsetup);
|
2007-05-23 18:50:41 +02:00
|
|
|
hook(type => "preprocess", id => "img", call => \&preprocess, scan => 1);
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-10-21 23:59:44 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub getsetup () {
|
2008-08-03 22:40:12 +02:00
|
|
|
return
|
|
|
|
plugin => {
|
|
|
|
safe => 1,
|
|
|
|
rebuild => undef,
|
|
|
|
},
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2008-08-03 22:40:12 +02:00
|
|
|
|
2008-12-17 21:22:16 +01:00
|
|
|
sub preprocess (@) {
|
2006-10-21 23:59:44 +02:00
|
|
|
my ($image) = $_[0] =~ /$config{wiki_file_regexp}/; # untaint
|
|
|
|
my %params=@_;
|
|
|
|
|
2008-06-08 06:02:33 +02:00
|
|
|
if (exists $imgdefaults{$params{page}}) {
|
|
|
|
foreach my $key (keys %{$imgdefaults{$params{page}}}) {
|
|
|
|
if (! exists $params{$key}) {
|
|
|
|
$params{$key}=$imgdefaults{$params{page}}->{$key};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (! exists $params{size}) {
|
|
|
|
$params{size}='full';
|
2006-10-21 23:59:44 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if ($image eq 'defaults') {
|
2008-06-08 06:02:33 +02:00
|
|
|
$imgdefaults{$params{page}} = \%params;
|
2006-10-21 23:59:44 +02:00
|
|
|
return '';
|
|
|
|
}
|
|
|
|
|
Avoid %links accumulating duplicates. (For TOVA)
This is sorta an optimisation, and sorta a bug fix. In one
test case I have available, it can speed a page build up from 3
minutes to 3 seconds.
The root of the problem is that $links{$page} contains arrays of
links, rather than hashes of links. And when a link is found,
it is just pushed onto the array, without checking for dups.
Now, the array is emptied before scanning a page, so there
should not be a lot of opportunity for lots of duplicate links
to pile up in it. But, in some cases, they can, and if there
are hundreds of duplicate links in the array, then scanning it
for matching links, as match_link and some other code does,
becomes much more expensive than it needs to be.
Perhaps the real right fix would be to change the data structure
to a hash. But, the list of links is never accessed like that,
you always want to iterate through it.
I also looked at deduping the list in saveindex, but that does
a lot of unnecessary work, and doesn't completly solve the problem.
So, finally, I decided to add an add_link function that handles deduping,
and make ikiwiki-transition remove the old dup links.
2009-05-06 05:40:09 +02:00
|
|
|
add_link($params{page}, $image);
|
2009-07-27 22:22:26 +02:00
|
|
|
|
2008-01-09 08:31:11 +01:00
|
|
|
# optimisation: detect scan mode, and avoid generating the image
|
|
|
|
if (! defined wantarray) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2007-05-23 03:44:11 +02:00
|
|
|
my $file = bestlink($params{page}, $image);
|
2008-07-21 22:38:40 +02:00
|
|
|
my $srcfile = srcfile($file, 1);
|
2008-07-21 22:53:52 +02:00
|
|
|
if (! length $file || ! defined $srcfile) {
|
|
|
|
return htmllink($params{page}, $params{destpage}, $image);
|
2008-07-21 22:38:40 +02:00
|
|
|
}
|
2006-10-21 23:59:44 +02:00
|
|
|
|
2008-03-24 01:01:26 +01:00
|
|
|
my $dir = $params{page};
|
2006-10-21 23:59:44 +02:00
|
|
|
my $base = IkiWiki::basename($file);
|
2007-02-20 04:59:35 +01:00
|
|
|
|
|
|
|
eval q{use Image::Magick};
|
2008-07-13 21:05:34 +02:00
|
|
|
error gettext("Image::Magick is not installed") if $@;
|
2006-10-21 23:59:44 +02:00
|
|
|
my $im = Image::Magick->new;
|
|
|
|
my $imglink;
|
|
|
|
my $r;
|
|
|
|
|
2009-08-29 05:23:06 +02:00
|
|
|
my ($dwidth, $dheight);
|
|
|
|
|
2008-06-08 06:02:33 +02:00
|
|
|
if ($params{size} ne 'full') {
|
2009-07-27 22:22:26 +02:00
|
|
|
add_depends($params{page}, $image);
|
|
|
|
|
2008-09-09 21:20:06 +02:00
|
|
|
my ($w, $h) = ($params{size} =~ /^(\d*)x(\d*)$/);
|
2009-07-19 13:36:46 +02:00
|
|
|
error sprintf(gettext('wrong size format "%s" (should be WxH)'), $params{size})
|
2008-09-09 21:20:06 +02:00
|
|
|
unless (defined $w && defined $h &&
|
|
|
|
(length $w || length $h));
|
2006-10-21 23:59:44 +02:00
|
|
|
|
|
|
|
my $outfile = "$config{destdir}/$dir/${w}x${h}-$base";
|
|
|
|
$imglink = "$dir/${w}x${h}-$base";
|
2007-05-24 21:56:08 +02:00
|
|
|
|
2008-03-24 01:01:26 +01:00
|
|
|
will_render($params{page}, $imglink);
|
2006-10-21 23:59:44 +02:00
|
|
|
|
2008-07-21 22:38:40 +02:00
|
|
|
if (-e $outfile && (-M $srcfile >= -M $outfile)) {
|
2006-10-21 23:59:44 +02:00
|
|
|
$r = $im->Read($outfile);
|
2008-07-13 21:05:34 +02:00
|
|
|
error sprintf(gettext("failed to read %s: %s"), $outfile, $r) if $r;
|
2006-10-21 23:59:44 +02:00
|
|
|
}
|
|
|
|
else {
|
2008-07-21 22:38:40 +02:00
|
|
|
$r = $im->Read($srcfile);
|
2008-07-13 21:05:34 +02:00
|
|
|
error sprintf(gettext("failed to read %s: %s"), $file, $r) if $r;
|
2006-10-21 23:59:44 +02:00
|
|
|
|
2009-08-29 05:23:06 +02:00
|
|
|
# don't resize any larger
|
|
|
|
my ($rw, $rh) = ($w, $h);
|
|
|
|
if ((length $rw && $rw > $im->Get("width")) ||
|
|
|
|
(length $rh && $rh > $im->Get("height"))) {
|
|
|
|
$rw=$im->Get("width");
|
|
|
|
$rh=$im->Get("height");
|
|
|
|
}
|
|
|
|
|
|
|
|
$r = $im->Resize(geometry => "${rw}x${rh}");
|
2008-07-13 21:05:34 +02:00
|
|
|
error sprintf(gettext("failed to resize: %s"), $r) if $r;
|
2006-10-21 23:59:44 +02:00
|
|
|
|
2007-03-06 23:37:05 +01:00
|
|
|
# don't actually write file in preview mode
|
|
|
|
if (! $params{preview}) {
|
|
|
|
my @blob = $im->ImageToBlob();
|
|
|
|
writefile($imglink, $config{destdir}, $blob[0], 1);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$imglink = $file;
|
|
|
|
}
|
2006-10-21 23:59:44 +02:00
|
|
|
}
|
2009-08-29 05:23:06 +02:00
|
|
|
|
|
|
|
# since we don't really resize larger, set the display
|
|
|
|
# size, so the browser can scale the image up if necessary
|
|
|
|
if (length $w && length $h) {
|
|
|
|
($dwidth, $dheight)=($w, $h);
|
|
|
|
}
|
|
|
|
# avoid division by zero on 0x0 image
|
|
|
|
elsif ($im->Get("width") == 0 || $im->Get("height") == 0) {
|
|
|
|
($dwidth, $dheight)=(0, 0);
|
|
|
|
}
|
|
|
|
# calculate unspecified size from the other one, preserving
|
|
|
|
# aspect ratio
|
|
|
|
elsif (length $w) {
|
|
|
|
$dwidth=$w;
|
|
|
|
$dheight=$w / $im->Get("width") * $im->Get("height");
|
|
|
|
}
|
|
|
|
elsif (length $h) {
|
|
|
|
$dheight=$h;
|
|
|
|
$dwidth=$h / $im->Get("height") * $im->Get("width");
|
|
|
|
}
|
|
|
|
|
2006-10-21 23:59:44 +02:00
|
|
|
}
|
|
|
|
else {
|
2008-07-21 22:38:40 +02:00
|
|
|
$r = $im->Read($srcfile);
|
2008-07-13 21:05:34 +02:00
|
|
|
error sprintf(gettext("failed to read %s: %s"), $file, $r) if $r;
|
2006-10-21 23:59:44 +02:00
|
|
|
$imglink = $file;
|
2009-08-29 05:23:06 +02:00
|
|
|
$dwidth = $im->Get("width");
|
|
|
|
$dheight = $im->Get("height");
|
2006-10-21 23:59:44 +02:00
|
|
|
}
|
|
|
|
|
2007-03-06 23:37:05 +01:00
|
|
|
my ($fileurl, $imgurl);
|
|
|
|
if (! $params{preview}) {
|
2007-04-01 22:53:03 +02:00
|
|
|
$fileurl=urlto($file, $params{destpage});
|
|
|
|
$imgurl=urlto($imglink, $params{destpage});
|
2007-03-06 23:37:05 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
$fileurl="$config{url}/$file";
|
|
|
|
$imgurl="$config{url}/$imglink";
|
|
|
|
}
|
|
|
|
|
2007-05-23 03:44:11 +02:00
|
|
|
if (! defined($im->Get("width")) || ! defined($im->Get("height"))) {
|
2008-07-13 21:05:34 +02:00
|
|
|
error sprintf(gettext("failed to determine size of image %s"), $file)
|
2007-05-23 03:44:11 +02:00
|
|
|
}
|
|
|
|
|
2007-07-15 21:00:07 +02:00
|
|
|
my $imgtag='<img src="'.$imgurl.
|
2009-08-29 05:23:06 +02:00
|
|
|
'" width="'.$dwidth.
|
|
|
|
'" height="'.$dheight.'"'.
|
2009-06-16 18:15:06 +02:00
|
|
|
(exists $params{alt} ? ' alt="'.$params{alt}.'"' : '').
|
2008-06-08 06:02:33 +02:00
|
|
|
(exists $params{title} ? ' title="'.$params{title}.'"' : '').
|
2009-07-11 06:33:19 +02:00
|
|
|
(exists $params{align} ? ' align="'.$params{align}.'"' : '').
|
2007-05-28 21:07:38 +02:00
|
|
|
(exists $params{class} ? ' class="'.$params{class}.'"' : '').
|
|
|
|
(exists $params{id} ? ' id="'.$params{id}.'"' : '').
|
2007-07-15 21:00:07 +02:00
|
|
|
' />';
|
|
|
|
|
|
|
|
if (! defined $params{link} || lc($params{link}) eq 'yes') {
|
2008-06-08 05:45:40 +02:00
|
|
|
$imgtag='<a href="'.$fileurl.'">'.$imgtag.'</a>';
|
2007-07-15 21:00:07 +02:00
|
|
|
}
|
2007-12-28 22:14:43 +01:00
|
|
|
elsif ($params{link} =~ /^\w+:\/\//) {
|
2008-06-08 05:45:40 +02:00
|
|
|
$imgtag='<a href="'.$params{link}.'">'.$imgtag.'</a>';
|
2007-12-28 22:14:43 +01:00
|
|
|
}
|
2009-06-18 16:34:48 +02:00
|
|
|
else {
|
|
|
|
my $b = bestlink($params{page}, $params{link});
|
|
|
|
|
|
|
|
if (length $b) {
|
|
|
|
add_depends($params{page}, $b);
|
|
|
|
$imgtag=htmllink($params{page}, $params{destpage},
|
|
|
|
$params{link}, linktext => $imgtag,
|
|
|
|
noimageinline => 1);
|
|
|
|
}
|
2007-09-22 18:46:27 +02:00
|
|
|
}
|
2008-06-08 05:45:40 +02:00
|
|
|
|
2008-06-08 06:02:33 +02:00
|
|
|
if (exists $params{caption}) {
|
2008-06-08 05:45:40 +02:00
|
|
|
return '<table class="img">'.
|
2008-06-08 06:02:33 +02:00
|
|
|
'<caption>'.$params{caption}.'</caption>'.
|
2008-06-08 05:45:40 +02:00
|
|
|
'<tr><td>'.$imgtag.'</td></tr>'.
|
|
|
|
'</table>';
|
|
|
|
}
|
2007-07-15 21:00:07 +02:00
|
|
|
else {
|
|
|
|
return $imgtag;
|
|
|
|
}
|
2008-12-17 21:22:16 +01:00
|
|
|
}
|
2006-10-21 23:59:44 +02:00
|
|
|
|
2007-03-06 23:37:05 +01:00
|
|
|
1
|