#!/usr/bin/perl # maybe look at Math::Matrix # by Tachyon # The main requirement is that you have as much free disk space # for the temp files as the total file size. You will be limited # in the number of columns you can transpose by the number of open # file descriptors your Perl will let you have. It is very easy to # hack the logic to do N colunms per pass at the expense of 1 full # read of the input file per extra pass. Alternatively you could DBM # or tie a hash to a file and use the keys as pseudo file handles # and just append data to the values. # It should be really fast as we make a single pass through the input # data and then effectively just write it out (each temp file has one # full line in it). # transpose.pl #works but the temp files seems wasteful to me use strict; transpose90( "data.txt", "data-transpose.txt" ); sub transpose90 { my ( $infile, $outfile, $tmp ) = @_; $tmp ||= 'temp'; open IN, $infile or die "Can't read $infile $!\n"; # find number of columns and open a temp file for each local $_ = ; chomp; my @data = split ' '; my $num_cols = $#data; my @fhs; open $fhs[$_], ">$tmp$_.txt" or die "Can't create temp file $tmp$_ $!\n" for 0..$num_cols; print {$fhs[$_]} $data[$_], "\t" for 0..$num_cols; while( ) { chomp; @data = split ' '; print {$fhs[$_]} $data[$_], "\t" for 0..$num_cols; } close IN; close $fhs[$_] for 0..$num_cols; open OUT, ">$outfile" or die "Can't write $outfile $!\n"; for ( 0.. $num_cols ) { open IN, "$tmp$_.txt" or die "Can't read temp file $tmp$_ $!\n"; print OUT scalar(), "\n"; close IN; unlink "$tmp$_.txt" } close OUT; }