Beefy Boxes and Bandwidth Generously Provided by pair Networks
good chemistry is complicated,
and a little bit messy -LW
 
PerlMonks  

Comment on

( #3333=superdoc: print w/ replies, xml ) Need Help??
package Local::SiteRobot; use HTML::LinkExtor; use LWP::Simple; use URI; use strict; sub new { my $class = shift; my %options = ( DEPTH => undef, FOLLOW_REGEX => '', URLS => [], VERBOSE => 0 ); my %args = (%options, @_); foreach (keys %args) { die "Local::SiteRobot->new : Unknown argument option - $_" unl +ess exists $options{$_}; }; my $self = bless \%args, (ref($class) || $class); $self->_verbose("Local::SiteRobot->new : Created new Local::SiteRo +bot object"); return $self; } sub crawl { my $self = shift; return undef unless @{$self->{URLS}}; my @pages; foreach my $url (@{$self->{URLS}}) { my $uri = URI->new($url); next unless $uri->scheme; next unless $uri->scheme eq 'http'; $self->_verbose("Local::SiteRobot->crawl : Crawling from URL " +, $uri->canonical->as_string); push (@pages, $self->_crawl($uri->canonical->as_string)); $self->_verbose("Local::SiteRobot->crawl : Crawling from URL " +, $uri->canonical->as_string, " returned ", scalar(@pages), " pages") +; } return @pages; } sub _crawl { my ($self, $url, $depth) = @_; my @pages; my $uri = URI->new($url); $self->_verbose("Local::SiteRobot->_crawl : GET ", $uri->canonical +->as_string); my $html = get($uri->canonical->as_string); return unless $html; return $uri->canonical->as_string if ((defined $self->{DEPTH}) && +($self->{DEPTH} == ($depth || 0))); ${$self->{pages}}{$uri->canonical->as_string} = 1; push (@pages, $uri->canonical->as_string); my $linkextor = HTML::LinkExtor->new(undef, $uri->canonical->as_st +ring); $linkextor->parse($html); foreach my $link ($linkextor->links) { my ($tag, %attr) = @{$link}; next unless ($tag eq 'a'); next unless (defined $attr{'href'}); my $href = URI->new($attr{'href'}); next unless ($href->canonical->as_string =~ /$self->{FOLLOW_RE +GEX}/); next if exists ${$self->{pages}}{$href}; ${$self->{pages}}{$href} = 1; push (@pages, $self->_crawl($href, ($depth || 0) + 1)); } return @pages; } sub _verbose { my $self = shift; return unless $self->{VERBOSE}; print STDERR @_, "\n"; } 1; __END__

In reply to Local::SiteRobot - a simple web crawling module by rob_au

Title:
Use:  <p> text here (a paragraph) </p>
and:  <code> code here </code>
to format your post; it's "PerlMonks-approved HTML":



  • Posts are HTML formatted. Put <p> </p> tags around your paragraphs. Put <code> </code> tags around your code and data!
  • Read Where should I post X? if you're not absolutely sure you're posting in the right place.
  • Please read these before you post! —
  • Posts may use any of the Perl Monks Approved HTML tags:
    a, abbr, b, big, blockquote, br, caption, center, col, colgroup, dd, del, div, dl, dt, em, font, h1, h2, h3, h4, h5, h6, hr, i, ins, li, ol, p, pre, readmore, small, span, spoiler, strike, strong, sub, sup, table, tbody, td, tfoot, th, thead, tr, tt, u, ul, wbr
  • You may need to use entities for some characters, as follows. (Exception: Within code tags, you can put the characters literally.)
            For:     Use:
    & &amp;
    < &lt;
    > &gt;
    [ &#91;
    ] &#93;
  • Link using PerlMonks shortcuts! What shortcuts can I use for linking?
  • See Writeup Formatting Tips and other pages linked from there for more info.
  • Log In?
    Username:
    Password:

    What's my password?
    Create A New User
    Chatterbox?
    and the web crawler heard nothing...

    How do I use this? | Other CB clients
    Other Users?
    Others imbibing at the Monastery: (12)
    As of 2015-07-03 16:11 GMT
    Sections?
    Information?
    Find Nodes?
    Leftovers?
      Voting Booth?

      The top three priorities of my open tasks are (in descending order of likelihood to be worked on) ...









      Results (54 votes), past polls