Hmmm. Why does XML::RSS::Parse choke on the PM RSS feed (/var/www/atrixnet.com/cron/savepmnewestnodes.pl ?? (When I say "choke", I mean hang and suck RAM like a pig and then produce output nothing like what the docs for that module would lead me to expect.)
The code below is part of a cron job that I use to collect the latest PM nodes and display them on my website (www.atrixnet.com
#!/usr/bin/perl -w
use strict; use warnings;
use constant SAVEAS => '/cgi-bin/dat/pmnewestnodes.htmlpart';
# auto-flush STDOUT
++$|;
# globals
use vars qw( $dbh );
# libraries
use DBI;
use XML::RSS::Parser;
use File::Util;
# connect to DB
$dbh = DBI->connect(
q[DBI:mysql:] .
qq[database=myrssfeeds;] .
qq[host=localhost;] .
qq[port=3306],
'rssbot', # username
'^r$$p@$$w0rD!', # password
{ 'RaiseError' => 0 }
) or die qq[Aborting! Failed to connect to database: $DBI::errstr];
# grab feed from DB
my($rss) = ($dbh->selectrow_array(
q[SELECT content FROM feeds WHERE feedurl = ?],
undef, 'http://perlmonks.org/index.pl?node_id=30175&xmlstyle=rss'
))[0] or die q{Couldn't get RSS from DB! } . $DBI::errstr;
# parse feed
$rss = XML::RSS::Parser->new()->parse_string($rss);
die $rss->query('/channel/title');
# html-ify content
my($output) = '';
foreach my $i ( $rss->query('//item') ) {
my($node) = $i->query('title');
print $node->text_content, "\n";
}
File::Util->new->write_file(
'filename' => SAVEAS,
'content' => $output
);
print $output;
print qq[DONE. RSS PARSED AND SAVED AS HTML IN "${\ SAVEAS }"\n];
# disconnect if not already disconnected
END { $dbh->disconnect() if defined $dbh }