#!perl use strict; use warnings; # The final hash my %all; # process files one by one, gather lines from each foreach (@ARGV) { my $hash = unique_in_file($_); $all{$_}++ for keys %$hash; }; # print result print "$_\n" for grep { $all{$_} == @ARGV; # don't hardcode the needed count, the number +of files *will* change } sort keys %all; sub unique_in_file { my $fname = shift; # don't bother opening local @ARGV = ($fname); # do the same uniq(1) but for the current file my %uniq; while (<>) { chomp; # preprocess line here $uniq{$_}++; }; # could've returned array as well return \%uniq; };
In reply to Re: find common data in multiple files
by Dallaylaen
in thread find common data in multiple files
by mao9856
| For: | Use: | ||
| & | & | ||
| < | < | ||
| > | > | ||
| [ | [ | ||
| ] | ] |