use WWW::Mechanize;
use HTML::TreeBuilder;
print "Please input the URL of the site to be searched \n";
my $url_name = ; # The user inputs the URL to be searched
#Create an instance of the webcrawler
our $webcrawler = WWW::Mechanize->new();
our $webcrawler = get($url_name);
our @website_links = $webcrawler->links($url_name);
# The HTML is stripped off the contents and the text is stored in an array of strings
our $x = 0;
our $stripped_html[$x] = $webcrawler( format => "text" );
$x = $x + 1;
my @visited_urls = ($url_name);
# While the array still has elements(URL's) check the content for links and strip the HTML
while (@website_links) {
if ((grep {$_ eq $website_links[0] } @visited_urls) > 0) { # If the URL has been visited don't visit again
shift @website_links;