diff --git a/IkiWiki/Plugin/cutpaste.pm b/IkiWiki/Plugin/cutpaste.pm index 4a8817168..0f6ea0b1f 100644 --- a/IkiWiki/Plugin/cutpaste.pm +++ b/IkiWiki/Plugin/cutpaste.pm @@ -5,10 +5,9 @@ use warnings; use strict; use IkiWiki 3.00; -my %savedtext; - sub import { hook(type => "getsetup", id => "cutpaste", call => \&getsetup); + hook(type => "needsbuild", id => "cutpaste", call => \&needsbuild); hook(type => "preprocess", id => "cut", call => \&preprocess_cut, scan => 1); hook(type => "preprocess", id => "copy", call => \&preprocess_copy, scan => 1); hook(type => "preprocess", id => "paste", call => \&preprocess_paste); @@ -23,6 +22,22 @@ sub getsetup () { }, } +sub needsbuild (@) { + my $needsbuild=shift; + foreach my $page (keys %pagestate) { + if (exists $pagestate{$page}{cutpaste}) { + if (exists $pagesources{$page} && + grep { $_ eq $pagesources{$page} } @$needsbuild) { + # remove state, will be re-added if + # the cut/copy directive is still present + # on rebuild. + delete $pagestate{$page}{cutpaste}; + } + } + } + return $needsbuild; +} + sub preprocess_cut (@) { my %params=@_; @@ -32,8 +47,7 @@ sub preprocess_cut (@) { } } - $savedtext{$params{page}} = {} if not exists $savedtext{$params{"page"}}; - $savedtext{$params{page}}->{$params{id}} = $params{text}; + $pagestate{$params{page}}{cutpaste}{$params{id}} = $params{text}; return "" if defined wantarray; } @@ -47,8 +61,7 @@ sub preprocess_copy (@) { } } - $savedtext{$params{page}} = {} if not exists $savedtext{$params{"page"}}; - $savedtext{$params{page}}->{$params{id}} = $params{text}; + $pagestate{$params{page}}{cutpaste}{$params{id}} = $params{text}; return IkiWiki::preprocess($params{page}, $params{destpage}, $params{text}) if defined wantarray; @@ -63,15 +76,15 @@ sub preprocess_paste (@) { } } - if (! exists $savedtext{$params{page}}) { + if (! exists $pagestate{$params{page}}{cutpaste}) { error gettext('no text was copied in this page'); } - if (! exists $savedtext{$params{page}}->{$params{id}}) { + if (! exists $pagestate{$params{page}}{cutpaste}{$params{id}}) { error sprintf(gettext('no text was copied in this page with id %s'), $params{id}); } return IkiWiki::preprocess($params{page}, $params{destpage}, - $savedtext{$params{page}}->{$params{id}}); + $pagestate{$params{page}}{cutpaste}{$params{id}}); } 1; diff --git a/debian/changelog b/debian/changelog index 27e0fb103..d6eebea78 100644 --- a/debian/changelog +++ b/debian/changelog @@ -24,6 +24,8 @@ ikiwiki (3.20100832) UNRELEASED; urgency=low of the rst plugin. * git: When updating from remote, use git pull --prune, to avoid possible errors from conflicting obsolete remote branches. + * cutpaste: Fix bug that occured in some cases involving inlines when + text was pasted on a page before being cut. -- Joey Hess Tue, 07 Sep 2010 12:08:05 -0400 diff --git a/doc/bugs/Error:_no_text_was_copied_in_this_page_--_missing_page_dependencies.mdwn b/doc/bugs/Error:_no_text_was_copied_in_this_page_--_missing_page_dependencies.mdwn index 356f9155a..4535cf35d 100644 --- a/doc/bugs/Error:_no_text_was_copied_in_this_page_--_missing_page_dependencies.mdwn +++ b/doc/bugs/Error:_no_text_was_copied_in_this_page_--_missing_page_dependencies.mdwn @@ -24,3 +24,23 @@ This error shows up only for *news.html*, but not in *news/2010-07-31* or for the aggregation in *index.html* or its RSS and atom files. --[[tschwinge]] + +> So the cutpaste plugin, in order to support pastes +> that come before the corresponding cut in the page, +> relies on the scan hook being called for the page +> before it is preprocessed. +> +> In the case of an inline, this doesn't happen, if +> the page in question has not changed. +> +> Really though it's not just inline, it's potentially anything +> that preprocesses content. None of those things guarantee that +> scan gets re-run on it first. +> +> I think cutpaste is going beyond the intended use of scan hooks, +> which is to gather link information, not do arbitrary data collection. +> Requiring scan be run repeatedly could be a lot more work. +> +> Using `%pagestate` to store the cut content when scanning would be +> one way to fix this bug. It would mean storing potentially big chunks +> of page content in the indexdb. --[[Joey]]