#!/usr/bin/perl use strict; use warnings; use WWW::Mechanize; my $url = shift || die "Please pass in base url as argument to $0\n"; my %visited; my @links; my $max_depth = 3; my $depth = 0; my $mech = WWW::Mechanize->new(); # This helps prevent following off-site links. # Note, assumes that url's passed in will represent the # highest level in a website hierarchy that will be visited. # i.e. http://www.example.com/dir/ will record a link to # http://www.example.com/, but will not follow it and report # subsequent links. my( $base_uri ) = $url =~ m|^(.*/)|; get_links( $url ); sub get_links { my @urls = @_; my @found_links; for( @urls ){ # This prevents following off-site or off-parent links. next unless m/^$base_uri/; $mech->get( $_ ); # Filters out links we've already visited, plus mailto's and # javascript:etc hrefs. Adjust to suit. @found_links = grep { ++$visited{$_} == 1 && ! /^(mailto|javascript)/i } map { $_->url_abs() } $mech->links(); push @links, @found_links; } # Keep going, as long as we should. get_links( @found_links ) if $depth++ < $max_depth; } # Instead of printing them, you could insert them into the database. print $_ . "\n" for @links;