Beefy Boxes and Bandwidth Generously Provided by pair Networks
laziness, impatience, and hubris

Link Checker

by tachyon (Chancellor)
on Oct 10, 2001 at 10:47 UTC ( [id://117944]=sourcecode: print w/replies, xml ) Need Help??
Category: Web Stuff
Author/Contact Info Dr James Freeman aka tachyon <>

This script is a website link checking tool. It extracts and checks *all* links for validity including anchors, http, ftp, mailto and image links.

Script performs a recursive search, width first to a user defined depth. External links are checked for validity but are not followed for obvious reasons - we don't want to check the whole web.

Broken anchors, links are repoted along with the server error. All email addresses harvested are checked for RFC822 compliance and optionally against an MX or A DNS listing.

More details in pod

#!/usr/bin/perl -w

use strict;
use HTML::TokeParser;
use LWP::UserAgent;
use Net::FTP;
use Getopt::Std;

my ( @urls, %emails, %seen, %seen_anchors, %opts );
my $root    = pop @ARGV or &useage; # root of search
getopts( 'vrd:', \%opts );  
my $level   = 0;
my $depth   = $opts{d} ? $opts{d} : 1;
my $verbose = $opts{v} ? 1 : 0;
my $res     = $opts{r} ? 1 : 0;
my $follow  = qr/(?:htm|html|cgi|pl|php|asp)/;
@{$urls[$level]} = ( $root );           
my ($domain)= $urls[0][0] =~ m|http://([^/]+)|i;
my $rfc822  = init_rfc822();

my $ua      = LWP::UserAgent->new;
my $proxy   = '';
$ua->proxy( 'http', $proxy );
# $ua->credentials( $netloc, $realm, $uname, $pass ); # uncomment if r
# get a DNS resolver object from Net::DNS and set nameservers
if ($res) {
    $^W = 0; # silence warnings about deprecated syntax from Net::DNS
    eval { require Net::DNS }; 
    $^W = 1;
    $res = $@ ? 0 : new Net::DNS::Resolver;
    warn "Net::DNS error $@\n" if $@;
    $res->nameservers( "", "" ) if $res;

for my $level_ref ( @urls ) { # we keep adding levels to @urls in the 
    last if $level > $depth;
    print "\nLevel $level\n" if $verbose;
    for my $url ( @$level_ref ) {    
        $url .= '/' if $url =~ m|http://[^/]+$|i;   # add trailing / i
+f forgotten
        my ($root) = $url =~ m|(http://.*/)|i;
        my $content = get( $url );   
        print "  Checking links....\n" if $verbose;
        my $parser = HTML::TokeParser->new( \$content );
        my %anchors;
        while ( my $token = $parser->get_tag(qw( a img )) ) {
            my $link = $token->[1]{href} || $token->[1]{src} || next; 
+  # next unless hyerlink
            my $type = $token->[0];
            my $alt  = $token->[1]{alt} || '';
            my $text = $type eq 'a' ? $parser->get_trimmed_text('/a') 
+: 'Image alt="'.$alt.'"';
            if ( $link =~ m|^\s*mailto:(.*)$|i ) {    # mail link
                $emails{"$text<$1>"} .= defined $emails{"$text<$1>"} ?
+ ", $url" : $url;
              next LINK;
            elsif ( $link =~ m|\Q$domain\E| ) {     # internal link
                do{ &seen( $link, $url ); next LINK } if defined $seen
                $seen{$link} = &testHTTP( $link, $text, $url );
                push @{$urls[$level]}, $link if $seen{$link} =~ /^OK.*
+$follow \(Link/;
            elsif ( $link =~ m|^\s*http://|i ) {    # external http li
                do{ &seen( $link, $url ); next LINK } if defined $seen
                $seen{$link} = &testHTTP( $link, $text, $url );
            elsif ( $link =~ m|^\s*ftp://|i ) {     # external ftp lin
                do{ &seen( $link, $url ); next LINK } if defined $seen
                $seen{$link} = &testFTP( $link, $text, $url );
            else {                                  # relative link
                $link =~ s|^\s*/||;                 # trim link
                do{ $anchors{$url.$link}[0] = $text; next LINK } if $l
+ink =~ m|^#|;
                my $rel_root = $root;
                # move back up tree in response to ../
                while ( $link =~ s|^\.\./|| ) {
                    $rel_root =~ s|[^/]+/$|| unless $rel_root =~ m|htt
+p://[^/]+/$|i # Until can't go any further!
                my $rel_link = $rel_root.$link;
                $rel_link =~ s|/\./|/|g;
                do{ &seen( $rel_link, $url ); next LINK } if defined $
                $seen{$rel_link} = &testHTTP( $rel_link, $text, $url )
                push @{$urls[$level]}, $rel_link if $seen{$rel_link} =
+~ /^OK.*$follow \(Link/;
        if (keys %anchors) {
            print "  Checking anchors...\n    Page: $url\n" if $verbos
            &check_anchors( $url, \%anchors, $content, $url );

my @dud_links = grep { not /^OK/ } values %seen;
# Schwartzian transform - due credit to RS :-)
sub page { pop =~ m/On page:'([^']+)'/; $1 }
@dud_links = map{$_->[0]}sort{$a->[1] cmp $b->[1]}map{[$_, page($_)]}@
print "\n\nBroken links $urls[0][0]\n";
@dud_links = ('No bad links!') unless @dud_links;
print "  $_\n" for @dud_links;

my @dud_anchors = grep { not /^OK/ } values %seen_anchors;
@dud_anchors = map{$_->[0]}sort{$a->[1] cmp $b->[1]}map{[$_, page($_)]
print "\n\nBroken anchors $urls[0][0]\n";
@dud_anchors = ('No bad anchors!') unless @dud_anchors;
print "  $_\n" for @dud_anchors;

print "\n\nMailto links\n";
for (keys %emails) {
    my ( $email, $domain ) = $_ =~ m/<([^@]+@([^>]+))>/;
    my $status = check_rfc822($email) ? 'OK RFC822 - ' : 'Failed RFC82
+2 - ';
    my $dns = dns_query($domain);
    $status .= ! defined $dns ? "No DNS check" : $dns ? "OK DNS" : "No
+ DNS listing";    
    print "  $status  email: $_  page(s): $emails{$_}\n";

sub check_rfc822 {
    my $email = shift;
  return 1 if $email =~ m/^$rfc822$/o;
  return 0;

sub dns_query {
  return undef unless $res;
    my $domain = shift;
    for my $dns ( 'MX', 'A' ) {
        my $packet = $res->send($domain, $dns ) or warn $res->errorstr
      return 1 if $packet->header->ancount; 
  return 0;             

sub seen {
    my ( $link, $on_page ) = @_;
    $seen{$link} .= ", $on_page";

sub get {
    my $url = shift;
    print "  Getting $url...." if $verbose;;
    my $request = HTTP::Request->new( 'GET', $url );
    my $content = $ua->request( $request ); 
    print "$$content{_msg} $$content{_rc}\n" if $verbose;;
  return $$content{_content};

sub testHTTP {
    my ( $url, $text, $on_page ) = @_;
    my $request = HTTP::Request->new( 'HEAD', $url );
    my $content = $ua->request( $request );
    my $answer  = "$$content{_msg} $$content{_rc} $url (Link text: $te
+xt ) (On page: $on_page )";
    print "    $answer\n" if $verbose;
    # if $url also include an anchor load page and check anchor exists
    if ($url =~ m|^(.*/[^/]+)(#[^/]+)$|) {
        my %anchors;
        $anchors{$url}[0] = $text;
        my $old_verbose = $verbose;
        $verbose = 0;
        my $content = get( $url );
        $verbose = $old_verbose;
        check_anchors( $1, \%anchors, $content, $on_page );
  return $answer;

sub testFTP {
    my ( $url, $text, $on_page ) = @_;
    my ( $host, $filepath ) = $url =~ m|ftp://([^/]+)(.*)$|;
    my $answer;
    if (my $ftp = Net::FTP->new( $host )) {
        $ftp->login( 'anonymous', '' );
        my $size = $ftp->size( $filepath );
        my $status = defined $size ? "OK" : "Not OK File does not exis
        $answer  = "$status FTP $url (Link text: $text ) (On page: $on
+_page )";
    else {
        $answer = "$@ FTP $url (Link text: $text ) (On page: $on_page 
    print "    $answer\n" if $verbose;
  return $answer;

sub check_anchors {
    my( $url, $anchor_ref, $content, $on_page ) = @_;
    my $parser = HTML::TokeParser->new( \$content );
    while ( my $token = $parser->get_tag('a') ) {
        my $link = $token->[1]{name} || next;   # next unless named
        $link = "$url#$link";
        $$anchor_ref{$link}[1] = 1 if defined $$anchor_ref{$link};
    for (sort keys %$anchor_ref) {
        $$anchor_ref{$_}[1] = defined $$anchor_ref{$_}[1] ? 1 : 0;
        my $status = $$anchor_ref{$_}[1] ? "OK" : "Not OK";
        my $answer  = "$status ANCHOR $_ (Link text: $$anchor_ref{$_}[
+0] ) (On page: $on_page )";;
        print "      $answer\n" if $verbose;
        $seen_anchors{$on_page.$_} = $answer;

sub init_rfc822 {
    my $rfc_pat = '';
     while (<DATA>) { 
        next unless $_;
        $rfc_pat .= $_ 
    $rfc_pat = qr/$rfc_pat/;
  return $rfc_pat;

sub useage {
    print "
    Useage: -[v,d[depth]] <url>
        -v sets verbose mode
        -dn sets a search depth of 'n' levels where 'n' is a +ve integ
        <url> is the root url in which to start the search

    defaults are quiet mode and depth level 1

=head1 NAME v0.001

=head1 SYNOPSIS -[v,r,d[depth]] <url>
        -v sets verbose mode
        -r resolve email address domains via DNS lookups
        -d[n] sets a search depth of 'n' levels where 'n' is a +ve int
        <url> is the root url in which to start the search

    defaults are quiet mode, no DNS lookup, and depth level 1 -vrd1000 > logfile.txt
    this will check your entire site (assuming a link depth <= 1000 :-
    and send the verbose output to the file logfile.txt


This script extracts and checks links for validity. A HEAD request is 
for each link found. A valid link will return status OK in the header.

Script performs a recursive search, width first to a user defined dept
+h. The 
default depth of 1 means just check the links on the root page. A 
value of 2 means check all the links on the root and child pages, a le
+vel of 
3 will check links on root, childen, children's children. A depth of 4
+ gets 
to the children's children's children's links and so on...
Links that have been checked are not checked again if they occur elsew
The pages on which repeat links occur are appended to the original che
+ck data 
so you can find all problem pages in the event of a broken link.

External http links (outside the root domain) are checked for validity
+ but are 
not followed for obvious reasons. External ftp links are checked to en
they point at a valid (accessible) file.

Anchors within a page are checked on block as an efficiency hack. Link
+s that 
point to anchors on pages other than the current working page are chec

Mailto links are harvested for (manual) checking. Their format is chec
against the RFC822 spec using pattern developed in program by Jeffery 
in the ORA book "Mastering Regular Expressions". A check can also be m
+ade to 
see the domain can be looked up via DNS to help eliminate domain name 

In verbose mode you see all the links that are checked at each depth. 
normal mode only the broken links, anchors and mailto harvest are outp

There are a number of hardcoded options such as username, password, pr
DNS name servers. The $follow = qr// defines what sort of links to fol
Not all links need to be followed. You are unlikely to find more links
+ by 
following the link to a .jpg image for instance. By default links that
+ end 
in any of:

=item * .htm 
=item * .html 
=item * .cgi 
=item * .pl 
=item * .php 
=item * .asp 

are followed by default but you can add whatever you want. 

=head1 AUTHOR

Dr James Freeman aka tachyon <lt><gt>

=head1 LICENSE

This package is free software; you can redistribute it and/or modify i
+t under 
the terms of the "GNU General Public License".


This script is distributed in the hope that it will be useful, 
but WITHOUT ANY WARRANTY; without even the implied warranty of 

See the "GNU General Public License" for more details.


# this data is the RFC822 pattern - leave it alone!
Replies are listed 'Best First'.
Re: Link Checker
by merlyn (Sage) on Oct 10, 2001 at 11:06 UTC
Re: Link Checker
by ajt (Prior) on Oct 10, 2001 at 12:02 UTC
    There is also the W3C's free link checking service, freely available at It works quite well, and you can download the Perl source from CVS, and run it yourself - though I've only got it to run on Solaris and Linux so far, wouldn't run on NT...!
Re: Link Checker
by markjugg (Curate) on Sep 18, 2002 at 21:32 UTC
    I tried this script and merlyn's third iteration, but I found I much preferred linklint because it produces excellent cross referenced HTML reports, and allows you to analyze a site both from a local perspective as well as a remote perspective and has an option to produce a "orphaned files" report.


Log In?

What's my password?
Create A New User
Domain Nodelet?
Node Status?
node history
Node Type: sourcecode [id://117944]
and the web crawler heard nothing...

How do I use this?Last hourOther CB clients
Other Users?
Others studying the Monastery: (3)
As of 2024-06-25 14:58 GMT
Find Nodes?
    Voting Booth?

    No recent polls found

    erzuuli‥ 🛈The London Perl and Raku Workshop takes place on 26th Oct 2024. If your company depends on Perl, please consider sponsoring and/or attending.