Though many times in need for file splitting utility, mostly
because of the
upload quotas, I didn’t see so simple feature in most of the archiving
programs (I don’t mean UNIX environment). This simple script splits
file in pieces and combines them:
use strict;
use Getopt::Std;
use File::Basename;
getopts('scf:p:');
our($opt_s, $opt_c, $opt_f, $opt_p);
my $file = $opt_f;
my $size = $opt_p || 1;
my $len = 1024;
my $c = 1;
my ($buf, $counter);
-f $file or -f "$file.1" or usage();
if ($opt_s){
$size = 1024 * $size;
open IN, "< $file" or die "cannot open_r $file $!";
open OUT, "> $file.$c" or die "cannot open_w $file $!";
binmode IN;
binmode OUT;
while(read(IN, $buf, $len)){
$counter++;
if($counter > $size){
$counter = 0;
$c++;
close OUT;
open OUT, "> $file.$c" or die "cannot open_w $file $!";
binmode OUT;
}
print OUT $buf
}
} elsif ($opt_c) {
my @files = grep { -f and /^$file\.\d+$/ } glob '*';
my $newfile = "splitter_$file";
open OUT, ">> $newfile" or die "cannot open_w $file $!";
binmode OUT;
map { $_ =~ s/^$file\.// } @files;
for(sort {$a<=>$b} @files ){
open IN, "$file.$_" or die "cannot open_r $file $!";
binmode IN;
print OUT $_ while <IN>;
close IN;
}
} else {
usage()
}
sub usage{
my $pro = basename($0);
print <<SQ;
$pro (-s|-c) -p piece size -f filename
-s split a file into pieces
-c collect a file from pieces
-p chunk size (defaults to 1MB)
-f file to be processed
SQ
exit 1
}