#! perl -slw use strict; use threads ( stack_size => 4096 ); use threads::shared; use LWP::Simple; use Time::HiRes qw[ time sleep ]; our $T ||= 200; ## This can be change by a command line arguement -T=nnn my $url = ## your url here ##; ## This shared variable counts ## the number of running threads my $running :shared = 0; ## This records the start time my $start = time; ## For 1 to 200 for( 1 .. $T ) { ## start a new thread async( ## running this sub sub{ ## Increment the running threads count { lock $running; ++$running }; ## Make all threads wait until all threads are running ## so that the download requests all hit the server at the same time sleep 0.001 while $running < $T; ## The number (1..200) passed in as $_ below. my $id = shift; ## get $url and store it in a file with $id as part of the name getstore( $url, qq[c:/test/dl.t.$id] ); ## Now this thread is finished, decrement the count lock $running; --$running; }, $_ ## $_ (1..$T) becomes $id inside. ## Detach means that the threads go away as soon as they are done. ## Rather than hanging around consuming resources waiting to return ## a return value to join that we have no interest in. )->detach; } ## Now the main thread just sleeps till all the d/l threads have finished. sleep 1 while $running; ## And tells you how long the whole thing took printf "Took %.3f seconds\n", time() - $start;