use feature 'say'; # a better "print" use Mojo; ###################################################### my $insert_str = "https://www.somesite.com"; #get the pages to fetch from the links.txt file open (LINK, "links.txt") || die "couldn't open the file!"; my $ua = Mojo::UserAgent->new; #loop through all of the urls while ($record = <LINK>) { say ("Getting web site info for: $record\n"); #determine the new file name by the subdirectory / path since all f +etched pages will be index.html $newFileName = (substr $record, (rindex($record, "/", (rindex($recor +d, "/") -1)) + 1), (rindex($record, "/") - rindex($record, "/", (rind +ex($record, "/") -1)) -1)) . '.html'; print("Should save the information to a new file as $newFileName\n") +; #get the page contents my $response = $ua->get($record)->res->dom; if ($response->is_success) { #Find the <div class="main-content"> my $content = $response->at('.main-content'); #TODO Replace all of the links with fully qualified url's #TODO Save the master_content to a file with the same file name } # else { # die $response->status_line; # #TODO Send an email to admin letting them know of the issue # } #end of while loop } close(LINK);
In reply to Web Scraping with Find / Replace by sjfranzen
| For: | Use: | ||
| & | & | ||
| < | < | ||
| > | > | ||
| [ | [ | ||
| ] | ] |