Here you are pushing every line that has the same key into an arraymy $key = (split/\t/, $line)[0];
There can only be unique keys in such a data structure. UPDATEpush @{ $hash{$key} }, $line;
prints...while(<DATA>) { chomp; my $line = $_; my $key = (split/\s+/, $line)[0]; push @{ $hash{$key} }, $line; } print Dumper(\%hash); __DATA__ NM_001127328 202502_at NM_000018 200710_at NM_000019 205412_at NM_001111067 203935_at NM_000023 210632_s_at NM_000027 204332_s_at NM_000027 204333_s_at NM_000027 216064_s_at NM_000029 202834_at NM_000031 218487_at NM_000031 218489_s_at NM_000032 211560_s_at NM_000036 206121_at NM_000042 205216_s_at NM_000043 204780_s_at NM_000043 204781_s_at NM_000043 215719_x_at NM_000043 216252_x_at NM_000044 211110_s_at NM_000044 211621_at
$VAR1 = { 'NM_000032' => [ 'NM_000032 211560_s_at' ], 'NM_000044' => [ 'NM_000044 211110_s_at', 'NM_000044 211621_at' ], 'NM_001111067' => [ 'NM_001111067 203935_at' ], 'NM_000023' => [ 'NM_000023 210632_s_at' ], 'NM_001127328' => [ 'NM_001127328 202502_at' ], 'NM_000031' => [ 'NM_000031 218487_at', 'NM_000031 218489_s_at' ], 'NM_000043' => [ 'NM_000043 204780_s_at', 'NM_000043 204781_s_at', 'NM_000043 215719_x_at', 'NM_000043 216252_x_at' ], 'NM_000029' => [ 'NM_000029 202834_at' ], 'NM_000042' => [ 'NM_000042 205216_s_at' ], 'NM_000019' => [ 'NM_000019 205412_at' ], 'NM_000018' => [ 'NM_000018 200710_at' ], 'NM_000027' => [ 'NM_000027 204332_s_at', 'NM_000027 204333_s_at', 'NM_000027 216064_s_at' ], 'NM_000036' => [ 'NM_000036 206121_at' ] };
In reply to Re^3: Load a file into hash with duplicate keys
by umasuresh
in thread Load a file into hash with duplicate keys
by sophix
| For: | Use: | ||
| & | & | ||
| < | < | ||
| > | > | ||
| [ | [ | ||
| ] | ] |