txt: Add a special case for robots.txt.
parent
09d4e9d6bb
commit
a01028ae81
|
@ -39,7 +39,14 @@ sub filter (@) {
|
|||
my %params = @_;
|
||||
my $content = $params{content};
|
||||
|
||||
if (defined $pagesources{$params{page}} && $pagesources{$params{page}} =~ /\.txt$/) {
|
||||
if (defined $pagesources{$params{page}} &&
|
||||
$pagesources{$params{page}} =~ /\.txt$/) {
|
||||
if ($pagesources{$params{page}} eq 'robots.txt' &&
|
||||
$params{page} eq $params{destpage}) {
|
||||
will_render($params{page}, 'robots.txt');
|
||||
writefile('robots.txt', $config{destdir}, $content);
|
||||
}
|
||||
|
||||
encode_entities($content, "<>&");
|
||||
if ($findurl) {
|
||||
my $finder = URI::Find->new(sub {
|
||||
|
|
|
@ -2,6 +2,7 @@ ikiwiki (3.20100404) UNRELEASED; urgency=low
|
|||
|
||||
* bzr: Fix bzr log parsing to work with bzr 2.0. (liw)
|
||||
* comments: Fix missing entity encoding in title.
|
||||
* txt: Add a special case for robots.txt.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Sun, 04 Apr 2010 12:17:11 -0400
|
||||
|
||||
|
|
|
@ -12,3 +12,8 @@ The only exceptions are that [[WikiLinks|ikiwiki/WikiLink]] and
|
|||
[[directives|ikiwiki/directive]] are still expanded by
|
||||
ikiwiki, and that, if the [[!cpan URI::Find]] perl module is installed, URLs
|
||||
in the txt file are converted to hyperlinks.
|
||||
|
||||
----
|
||||
|
||||
As a special case, a file `robots.txt` will be copied intact into the
|
||||
`destdir`, as well as creating a wiki page named "robots".
|
||||
|
|
Loading…
Reference in New Issue