more robust and faster handling of feedurls

master
joey 2006-07-30 06:14:44 +00:00
parent 8a5f9f6e00
commit dcaa4b013e
1 changed files with 30 additions and 28 deletions

View File

@ -74,7 +74,7 @@ sub preprocess (@) { #{{{
$dir=~s/^\/+//; $dir=~s/^\/+//;
($dir)=$dir=~/$IkiWiki::config{wiki_file_regexp}/; ($dir)=$dir=~/$IkiWiki::config{wiki_file_regexp}/;
$feed->{dir}=$dir; $feed->{dir}=$dir;
$feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : $params{url}; $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
$feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60; $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
$feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0; $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
$feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0; $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
@ -180,7 +180,7 @@ sub aggregate () { #{{{
eval q{use HTML::Entities}; eval q{use HTML::Entities};
die $@ if $@; die $@ if $@;
FEED: foreach my $feed (values %feeds) { foreach my $feed (values %feeds) {
next unless time - $feed->{lastupdate} >= $feed->{updateinterval}; next unless time - $feed->{lastupdate} >= $feed->{updateinterval};
$feed->{lastupdate}=time; $feed->{lastupdate}=time;
$feed->{newposts}=0; $feed->{newposts}=0;
@ -188,36 +188,38 @@ FEED: foreach my $feed (values %feeds) {
IkiWiki::debug("checking feed ".$feed->{name}." ..."); IkiWiki::debug("checking feed ".$feed->{name}." ...");
my @urls=XML::Feed->find_feeds($feed->{feedurl}); if (! length $feed->{feedurl}) {
if (! @urls) { my @urls=XML::Feed->find_feeds($feed->{url});
$feed->{message}="could not find feed at ".$feed->{feedurl}; if (! @urls) {
$feed->{message}="could not find feed at ".$feed->{feedurl};
IkiWiki::debug($feed->{message});
next;
}
$feed->{feedurl}=pop @urls;
}
my $f=eval{XML::Feed->parse(URI->new($feed->{feedurl}))};
if ($@) {
$feed->{message}="feed crashed XML::Feed! $@";
IkiWiki::debug($feed->{message}); IkiWiki::debug($feed->{message});
next FEED; next;
}
if (! $f) {
$feed->{message}=XML::Feed->errstr;
IkiWiki::debug($feed->{message});
next;
} }
foreach my $url (@urls) {
my $f=eval{XML::Feed->parse(URI->new($url))};
if ($@) {
$feed->{message}="feed crashed XML::Feed! $@";
IkiWiki::debug($feed->{message});
next FEED;
}
if (! $f) {
$feed->{message}=XML::Feed->errstr;
IkiWiki::debug($feed->{message});
next FEED;
}
foreach my $entry ($f->entries) { foreach my $entry ($f->entries) {
add_page( add_page(
feed => $feed, feed => $feed,
title => defined $entry->title ? decode_entities($entry->title) : "untitled", title => defined $entry->title ? decode_entities($entry->title) : "untitled",
link => $entry->link, link => $entry->link,
content => $entry->content->body, content => $entry->content->body,
guid => defined $entry->id ? $entry->id : time."_".$feed->name, guid => defined $entry->id ? $entry->id : time."_".$feed->name,
ctime => $entry->issued ? ($entry->issued->epoch || time) : time, ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
); );
}
} }
$feed->{message}="processed ok"; $feed->{message}="processed ok";
} }