use strict; use warnings; use Parallel::ForkManager; use HTTP::GHTTP; use Time::HiRes qw[ time ]; use File::Temp qw(tempdir tempfile); use File::Path; my $start = time; my $pm=new Parallel::ForkManager(15); my $temp_dir = tempdir(); for my $link (map { chomp; $_ } ) { $pm->start and next; my $getter = HTTP::GHTTP->new; $getter->set_uri("http://$link/"); $getter->process_request; my $page = $getter->get_body; my $fh = File::Temp->new(TEMPLATE => "sappyXXXXXXXX", DIR => $temp_dir, UNLINK => 0) or die "Could not make tempfile: $!"; print $fh $page or die "Could not print to tempfile: $!"; close $fh or die "Could not close tempfile: $!"; print "$link downloaded.\n"; $pm->finish; } $pm->wait_all_children; #rmtree([$temp_dir]); print "Removed temp dir '$temp_dir'\n"; print 'Done in: ', time - $start, ' seconds.'; __DATA__ www.google.com www.yahoo.com www.amazon.com www.ebay.com www.perlmonks.com news.yahoo.com news.google.com www.msn.com www.slashdot.org www.indymedia.org www.sfgate.com www.nytimes.com www.cnn.com #### blah@blah [534] perl -wT /home/sappy/dev/pfork.pl www.amazon.com downloaded. www.yahoo.com downloaded. news.google.com downloaded. www.google.com downloaded. news.yahoo.com downloaded. www.slashdot.org downloaded. www.indymedia.org downloaded. www.ebay.com downloaded. www.cnn.com downloaded. www.sfgate.com downloaded. Error in tempfile() using /tmp/1yUjjuyGJn/sappyXXXXXXXX: Have exceeded the maximum number of attempts (10) to open temp file/dir at /home/sappy/dev/pfork.pl line 19 Error in tempfile() using /tmp/1yUjjuyGJn/sappyXXXXXXXX: Have exceeded the maximum number of attempts (10) to open temp file/dir at /home/sappy/dev/pfork.pl line 19 #### my $fh = File::Temp->new(TEMPLATE => "sappy" . $$ . "XXXXXXXX", DIR => $temp_dir, UNLINK => 0) or die "Could not make tempfile: $!";