#!/usr/bin/perl # turn on perl safety features use strict; use warnings; #initialize modules use XML::Twig; use Data::Dumper; use DirHandle; use Digest::MD5; # check for working directory my $dir = $ARGV[0] or die "Must specify directory on command line"; my($filepath) = { }; # declare file list my %md5; # file to elim duplicates my @filepath_list = xmlfiles($dir); # call xmlfiles subroutine # to get list of files from data dir print "Processed files: \n"; # print list of processed foreach (@filepath_list) { # files print "$_\n"; } foreach $filepath (@filepath_list) { (my $outfile = $filepath)=~ s{\.xml$}{.clean.xml}; # dest. file open( OUT, ">$outfile") or die "cannot create output file!"; my $twig = new XML::Twig( twig_handlers => { article => \&eliminate_dup }); $twig->parsefile($filepath); $twig->flush(\*OUT); #save memory close OUT; #close file } exit; sub xmlfiles { $dir = shift; print $dir, "\n"; my $dh = DirHandle->new($dir) or die "can't open directory"; return sort # sort pathnames grep { -f } # choose only files map { "$dir/$_" } # create full paths grep { !/^\./ } # filter out dot files $dh->read(); # read all filenames } sub eliminate_dup { my( $t, $elt)= @_; my $elt_text= $elt->sprint; # get text and tags my $md5= md5($elt_text); if( $md5{$md5}) { $elt->delete; } # if md5 exists, remove element else { $md5{$md5}=1; # store md5 $t->flush( \*OUT); # flush memory } }