I started learning Perl 2 days ago so please don't bash.
I am trying to make a program that scrapes web links off a page using WWW::MECHANIZE. I am also trying to seperate the internal link (the ones that link to other parts of the site), from the externals links(The ones that link to other sites). Of course the code I made does not work correctly, the internal and external links are still mixed together after the script is run. Im %99 sure that my regex is incorrect.
Here is my code:
use WWW::Mechanize;
use Data::Dump qw(dump);
&crawl;
sub crawl{
my $browser = WWW::Mechanize->new;
my $url = 'https://www.centos.org/modules/news/article.php?storyid
+=384';
my $domain = 'centos.org';
my @all_links = getLinks($browser, $url);
my @internal_links = getLinks($browser, $url, qr/($domain|!http\:)
+/i);
my @external_links = getLinks($browser, $url, qr/(http\:&!$domain/
+i);
return 1;
}
sub getLinks{
my $browser = shift;
$browser->get( shift );
my @links = ();
my @current_page_links = $browser->find_all_links(url_regex => sh
+ift);
for(@current_page_links){
push(@links,"$_->[0]");
}
return @links;
}