Here is what I would do , partition the url list upfront into different storable files, so when you fork you're only sharing a single filename, then later the parent process unifies the results of the child process ... only parent partitions jobs because only parent spawns children
#!/usr/bin/perl --
##
##
##
## perltidy -olq -csc -csci=3 -cscl="sub : BEGIN END " -otr -opr -ce
+-nibc -i=4 -pt=0 "-nsak=*"
## perltidy -olq -csc -csci=10 -cscl="sub : BEGIN END if " -otr -opr
+-ce -nibc -i=4 -pt=0 "-nsak=*"
## perltidy -olq -csc -csci=10 -cscl="sub : BEGIN END if while " -otr
+ -opr -ce -nibc -i=4 -pt=0 "-nsak=*"
#!/usr/bin/perl --
use strict;
use warnings;
use Data::Dump qw/ dd /;
Main( @ARGV );
exit( 0 );
sub Main {
my @files = StorePartitionUrls( GetInitialUniqueUrls() );
ForkThisStuff( @files );
UnifyChildResults( 'Ohmy-unique-hostname-urls-storable', @files );
} ## end sub Main
sub GetInitialUniqueUrls {
my @urls;
...
return \@urls;
} ## end sub GetInitialUniqueUrls
sub ForkThisStuff { ## spawn kids with one file, wait, whatever
for my $file ( @files ) {
EachChildGetsItsOwn( $file );
}
} ## end sub ForkThisStuff
sub ForkThisStuff {
for my $file( @files ){
## something forking here
EachChildGetsItsOwn( $file );
}
}
sub StorePartitionUrls {
my( $urls , $partition , $fnamet, ) = @_;
$partition ||= 100;
$fnamet ||= 'Ohmy-candidate-urls-%d-%d-storable';
my @files;
while( @$urls ){
my @hundred = splice @$urls, 0, $partition ;
#~ my $file = "Ohmy-".int( @$urls ).'-'.int( @hundred ).'-s
+torable';
my $file = sprintf $fnamet, int( @$urls ), int( @hundred );
lock_store \@hundred, $file;
push @files, $file;
}
return @files;
} ## end sub StorePartitionUrls
__END__
|