#!usr/bin/perl use strict; use warnings; use Data::Dumper; use List::Compare; use File::Find::Rule; sub get_files { my @dirs = ('/home/tinyos/Monks/uniq'); my $level = shift // 2; my @files = File::Find::Rule->file() ->name('*.txt') ->maxdepth($level) ->in(@dirs); return @files; } my @files = get_files(); # print Dumper \@files if @files; # file to compare against the rest my $path2compare = '/home/tinyos/Monks/uniq/compare.txt'; open my $fh, '<', $path2compare or die "Can't open file $path2compare: $!"; chomp(my @array2compare = <$fh>); close $fh or warn "File $path2compare close failed: $!"; # open files and load them into an array # compare unique lines and store them into a Hash of Arrays my %HoA; foreach my $path_to_file (@files) { open my $fh, '<', $path_to_file or die "Can't open file $path_to_file: $!"; chomp(my @lines = <$fh>); close $fh or warn "File $path_to_file close failed: $!"; my $lc = List::Compare->new('-u', \@array2compare, \@lines); # Get those items which appear at least once in both lists (their intersection). my @intersection = $lc->get_intersection; # Get those items which appear (at least once) only in the second list. # my @Ronly = $lc->get_complement; # print Dumper \@Ronly; # write to file here $HoA{$path_to_file} = \@intersection; } print Dumper \%HoA; __END__ $ perl unique.pl $VAR1 = { '/home/tinyos/Monks/uniq/compare.txt' => [ 'Common line', 'Unique line file Original' ], '/home/tinyos/Monks/uniq/unique1.txt' => [ 'Common line' ], '/home/tinyos/Monks/uniq/unique2.txt' => [ 'Common line' ] }; uniqu1.txt Unique line file1 Common line uniqu2.txt Common line Unique line file2 compare.txt Unique line file Original Common line