package Local::SiteRobot; use HTML::LinkExtor; use LWP::Simple; use URI; use strict; sub new { my $class = shift; my %options = ( DEPTH => undef, FOLLOW_REGEX => '', URLS => [], VERBOSE => 0 ); my %args = (%options, @_); foreach (keys %args) { die "Local::SiteRobot->new : Unknown argument option - $_" unless exists $options{$_}; }; my $self = bless \%args, (ref($class) || $class); $self->_verbose("Local::SiteRobot->new : Created new Local::SiteRobot object"); return $self; } sub crawl { my $self = shift; return undef unless @{$self->{URLS}}; my @pages; foreach my $url (@{$self->{URLS}}) { my $uri = URI->new($url); next unless $uri->scheme; next unless $uri->scheme eq 'http'; $self->_verbose("Local::SiteRobot->crawl : Crawling from URL ", $uri->canonical->as_string); push (@pages, $self->_crawl($uri->canonical->as_string)); $self->_verbose("Local::SiteRobot->crawl : Crawling from URL ", $uri->canonical->as_string, " returned ", scalar(@pages), " pages"); } return @pages; } sub _crawl { my ($self, $url, $depth) = @_; my @pages; my $uri = URI->new($url); $self->_verbose("Local::SiteRobot->_crawl : GET ", $uri->canonical->as_string); my $html = get($uri->canonical->as_string); return unless $html; return $uri->canonical->as_string if ((defined $self->{DEPTH}) && ($self->{DEPTH} == ($depth || 0))); ${$self->{pages}}{$uri->canonical->as_string} = 1; push (@pages, $uri->canonical->as_string); my $linkextor = HTML::LinkExtor->new(undef, $uri->canonical->as_string); $linkextor->parse($html); foreach my $link ($linkextor->links) { my ($tag, %attr) = @{$link}; next unless ($tag eq 'a'); next unless (defined $attr{'href'}); my $href = URI->new($attr{'href'}); next unless ($href->canonical->as_string =~ /$self->{FOLLOW_REGEX}/); next if exists ${$self->{pages}}{$href}; ${$self->{pages}}{$href} = 1; push (@pages, $self->_crawl($href, ($depth || 0) + 1)); } return @pages; } sub _verbose { my $self = shift; return unless $self->{VERBOSE}; print STDERR @_, "\n"; } 1; __END__