my $nr_workers = 5; #set this value for the number of side by side downloaders and parsers. Better yet, take it as an argument my $urlfile = "url_planets.txt"; # see comment about arguments my $outfile = "planet_names.txt"; # arguments are nice here too, but not the current point my ($URLQueue,$ContentQueue,$ParsedQueue); $URLQueue = new Thread::Queue; $ContentQueue = new Thread::Queue; $ParsedQueue = new Thread::Queue; my @threadObjs; push @threadObjs,threads->create(&ReadURLS,$URLQueue,$urlfile); #create the reading thread, and store areference to it in the threadObjs array, this will be important later # Set up the workers, any number of them can manipulate the queues. for (1..$nr_workers) { push @threadObjs,threads->create(&DownloadContent,$ContentQueue,$URLQueue); push @threadObjs,threads->create(&ParseContent,$ParsedQueue,$ContentQueue,qr!Rotations(.*)!); } push @threadObjs,threads->create(&WriteOut,$ParsedQueue,$outfile); # Now that all the threads are created, the main thread should call join on all of its child thread objects to ask perl to clean up after them, and so it doesn't exit before they're done causing an abrupt termination. foreach my $thr (@threadObjs) { $thr->join(); # Join can have a return value, but checking it adds overhead, only if you really need to } # At this point, barring some horrible catastrophe, the specified $outfile should have the desired output.