#!/usr/bin/perl use strict; use warnings; # Apache logs robots filter-outer # Author: Gary C. Wang # Contact: gary@quartertone.net # Website: www.quartertone.net # Filename: norobotlog # # Usage: norobotlog [logfile_name] # # This script parses Apache log files and # filters out entries from IP addresses # that request "robots.txt" file, commonly # associated with webcrawlers and site indexers. # Prior to usage, check regexp to make sure it matches your log format. # My log format is something like: # 192.168.0.xx - - [11/Jul/2004:22:25:22 -0400] "GET /robots.txt HTTP/1.0" 200 78 my %robots; my $ip_ptn = '((\d{1,3}\.){3}\d{1,3})'; # this regexp matches IP addresses my @file = <>; #file from stdin # First, find out which IPs are associated with crawlers foreach (@file) { # ----- Adjust this pattern to match your log file ----- $robots{$1} ++ if m/^$ip_ptn .+?robots\.txt/; } # Then weed those out, printing only the ones that do not request robots.txt foreach (@file) { if (m/$ip_ptn /) { print if ! defined $robots{$1}; } }