#! perl -slw use strict; use threads; use Thread::Queue; my $LOGDIR = "./logs"; my $WORKERS = 4; ## The basis of the communications my $Qwork = new Thread::Queue; ## Minimal error handling for posting sub compress_log { require Compress::ZLib; while( ( my $workitem = $Qwork->dequeue ) != 'DONE' ) { open IN, '<', $workitem or warn $! and next; open OUT, '> :raw', "$workitem.gz" or warn $! and next; ## Slurp the file, compress it and write it out. print OUT Compress::Zlib::memGzip( do{ $/ = \-s( IN ); } ); close OUT; close IN; unlink $workitem; } } ## Set up a method of terminating the process cleanly. ## The main loop below will terminate on ^C/^Break (on win32) ## Set appropriate SIG vales for your OS / usage. my $monitoring = 1; $SIG{INT} = $SIG{QUIT} = sub{ $monitoring = 0; }; ## start the compressor threads. my @workers = map{ threads->new( \&compress_log ) } 1 .. $WORKERS; ## Monitor the directory for new .log files my %seen; ## Track which files we have already queued. while( $monitoring ) { ## Grab the latest logs (exluding those we've already queued) my @newfiles = grep{ !defined $seen{ $_ }++ } glob( "$LOGDIR/*.log" ); ## And queue them for compression $Qwork->enqueue( @newfiles ); ## sleep a while before looking again sleep 10; } ## You can wait for existing work items to be processed here? ## The timeout chosen appropriate to your requirements my $timeout = 60; sleep 1 while --$timeout and $Qwork->pending; ## Empty any remaining items from the work queue -- Do something with them? $Qwork->dequeue while $Qwork->pending; ## Post 'Done' messages one per worker $Qwork->enqueue( ('DONE') x $WORKERS ); ## And wait for them to complete $_->join for @workers; ## Bye. exit;