Seeing your having trouble with the forking, you could try this and see how it fairs. I've rather over-commented the code. HTH.
#! perl -slw use strict; use threads qw[ yield ]; use Thread::Queue; ## The command to run (DIR for testing. my $COMMAND = 'dir /s'; ## FIFOs in and out my $Qwork = new Thread::Queue; my $Qresults = new Thread::Queue; ## Track our workers my $workers :shared = 0; sub worker { $workers++; ## Count 'em in threads->self->detach; ## No return ## Wait until we got something to do yield until $Qwork->pending; ## Whilst there is work while ( $Qwork->pending ) { ## Grab some my $task = $Qwork->dequeue; ## do it and get the output open my $in, "$COMMAND $task 2>&1 |" ## Maybe queue it back rather than yelling or warn "command $task failed: $!"; ## Grab the output and post it back to the main thread ## Prefix with the work item if it must be segregated $Qresults->enqueue( $_ ) while <$in>; } $workers--; ## and count 'em out again } ## Start a pool of workers threads->new( \&worker ) for 1 .. 3; ## Give 'em something to do $Qwork->enqueue( $_ ) for qw[ D: P: Q: T: U: V: W: Z: ]; ## Drive to d +ir ## wait for something to do yield until $Qresults->pending; ## Workers running while( $workers ) { ## get a result if available my $result = $Qresults->dequeue if $Qresults->pending; ## Do something with $result printf $result if defined $result; ## Give up the timeslice if workers are running ## but we have no results yet yield while $workers and not $Qresults->pending; } ## All done.
In reply to Re: Parallel tasks
by BrowserUk
in thread Parallel tasks
by john.goor
| For: | Use: | ||
| & | & | ||
| < | < | ||
| > | > | ||
| [ | [ | ||
| ] | ] |