use strict; use warnings; use diagnostics; use HTML::TableExtract; use WWW::Mechanize; use Time::HiRes; my $random = rand(10); my $huffdata = "C:/huff_data.txt"; open (MYFILE, "+>>", "$huffdata") or die "unable to open $huffdata $!"; my $url = "http://fundrace.huffingtonpost.com/neighbors.php?type=name&lname=SMITH"; sub parse_and_save{ sleep($random); my $mech = WWW::Mechanize->new; $mech->get($url); my $text = $mech->content; my $te = HTML::TableExtract->new( headers => [qw(Donor Contribution Address)] );; $te->parse($text); my $row; foreach $row ($te->rows) { print MYFILE join(",", @$row); } my @links = $mech->find_link( text_regex => qr/more/i ) or die "no links found"; for (@links){ $url = $_->url_abs($/); parse_and_save(); } } parse_and_save();