#!/usr/bin/perl
use 5.014;
use strict;
use warnings;
use autodie;
use AnyEvent;
use AnyEvent::HTTP;
use FileHandle;
use File::stat;
my $multiChunkSize=4194304; # Maximum size of a single block (in bytes
+)
my $multiLimit=6; # Maximum number of parallel HTTP requests
my ($wholeChunks,$chunkRemainder,$runID,$condvar,@offsets,@blocklist);
my $activeCount=0;
my $debug=1;
my $localfile="test";
# Byte size
sub byteSize {
use bytes;
my ($inval)=@_;
return length($inval);
}
# Quotient remainder calculator
sub qrem {
use integer;
my( $dividend, $divisor ) = @_;
my $quotient = $dividend / $divisor;
my $remainder = $dividend % $divisor;
my @result = ( $quotient, $remainder );
return @result;
}
# Get file size
sub fileSz {
my($file) = @_;
my $stat = stat($file);
return $stat->size;
}
# Get data
sub readData {
my ($file,$length,$offset)=@_;
my $fh = FileHandle->new;
my ($data);
if ($debug) { say "Reading ${file} offset ${offset} for length
+ ${length}";}
#open ($fh,"<",$file);
$fh->open("< $file");
binmode($fh);
read($fh,$data,$length,$offset);
if ($debug) { say "readData read ".byteSize($data);}
#close($fh);
$fh->close;
return $data;
}
# Process
sub doProcess {
return if $activeCount >= $multiLimit;
my $offset = shift @offsets;
return if !defined $offset;
$activeCount++;
if ($debug) { say "Active:${activeCount}, Offset:${offset}";}
$condvar->begin;
my $contentLength=$multiChunkSize-1;
my $content = readData($localfile,$contentLength,$offset);
}
# Populate offsets
sub populateOffsets {
my ($count,$offsetSize)=@_;
if (!defined $count || !defined $offsetSize) {exit 1;}
my $offset=0;
my @offsets;
for my $i (1..$count) {
push @offsets,$offset;
$offset = $offset + $offsetSize;
}
return @offsets;
}
######### MAIN
# Calculate chunk quantity
my @chunks = qrem(fileSz($localfile),$multiChunkSize);
$wholeChunks=$chunks[0];
$chunkRemainder=$chunks[1];
@offsets=populateOffsets(${wholeChunks},${multiChunkSize});
$condvar = AnyEvent->condvar;
# DO IT
for (1..$multiLimit) {
doProcess();
}
$condvar->recv;
|