Can any of the Honorable and Wise Monks that dwell within these Gates please help me track down the memory leak in my script? Basically, it is meant to be a long-running (read: infinitely looping) application that watches a remote directory and transfers files when new ones appear. Everything works perfectly for about one hour until it terminates with an "Out of memory!" message. I have used Devel::Peak to try to track it down, but to no avail, yet. Have included use strict and fixed variable scopes (at least in so far as I don't get any warnings upon initiation). I also tried putting most of the script into a subroutine (sub Prog{} below) in the hopes that Perl would free up its memory at the end of each iteration of the loop. The part that has me most baffled, though, is that I have a VERY similar Perl script that I wrote which is doing the same thing with some different data and it is running perfectly (for about three weeks nonstop now). I apologize if the script below is bulky, I really tried to remove unnecessary stuff.
#!/usr/bin/perl use warnings; use strict; use File::Copy; use File::Basename; use List::Compare; use Net::FTP; use Net::Netrc; my $ANCHOR = $ENV{ANCHOR}; our @globals; our @ProdRec; our @rmtDirList; our @matches; our @filesToMove; our @filesToRetrv; our $host; our $userAcct; our $ftpFailDir; our $fileToGet; our $remoteDir; our $finalDir; open CONFIG, "$ANCHOR/apps/smops/config/ConfigAscatLvl2.txt" || print +"Can't open config file\n"; my $config = join " ", <CONFIG>; close CONFIG; eval $config; print "Couldn't evaluate the config file: $@\n" if $@; for (;;) { &Prog; } sub Prog { my $prodNum = $globals[2]{'numOfProducts'}; my $dirNum = $globals[2]{'numOfGFTDirs'}; for (my $i=0; $i < $dirNum; $i++) { my $gftDirName = $ProdRec[$i]{'remoteDir'}; } my $loopSleep = $globals[2]{'sleepTime'}; my $resultsFound = 0; my $workingDir = $globals[2]{'workingDir'}; opendir( DIR1, $workingDir ) || print "Cannot open working directory: $workingDir\n"; my @wrkDirList = readdir (DIR1); for (my $i=0; $i < $dirNum; $i++) { my $remoteDir = $ProdRec[$i]{'remoteDir'}; our $userAcct = $ProdRec[$i]{'userAcct'}; our $host = $ProdRec[$i]{'remoteServ'}; my @currentDirList = &ftpDirList; push (@rmtDirList, @currentDirList); } my $lc = List::Compare->new( \@rmtDirList, \@wrkDirList ); my @onlyInRmtDir = $lc->get_unique; for (my $i=0; $i < $prodNum ; $i++) { my $host = $ProdRec[$i]{'remoteServ'}; my $userAcct = $ProdRec[$i]{'userAcct'}; my $finalDir = $ProdRec[$i]{'finalDir'}; my $remoteDir = $ProdRec[$i]{'remoteDir'}; my $ftpFailDir = $ProdRec[$i]{'ftpFailDir'}; if ( @onlyInRmtDir > 0 ) { my $grepString = $ProdRec[$i]{'grepString'}; $resultsFound = 1; my @tempMatches = grep { /$grepString/ } @onlyInRmtDir; push (@matches, @tempMatches); } ftpFileGet(); @matches = (); foreach my $getFile (@filesToMove) { copy("$workingDir/$getFile", "$finalDir/$getFile"); } @filesToRetrv = (); @filesToMove = (); } cleanUp(); sleep $loopSleep; } sub ftpDirList { my $select = Net::Netrc->lookup($host, $userAcct); my $pass = $select->password(); my $ftp = Net::FTP->new($host); $ftp->login($userAcct, $pass); $ftp->pasv; $ftp->binary; $ftp->cwd("$remoteDir"); my @currentDirList = $ftp->ls(); $ftp->quit(); return @currentDirList; } sub ftpFileGet { my $workingDir = $globals[2]{'workingDir'}; my $select = Net::Netrc->lookup($host, $userAcct); my $pass = $select->password(); my $ftp = Net::FTP->new($host); $ftp->login($userAcct, $pass); $ftp->pasv; $ftp->binary; chdir($workingDir); foreach my $fileToGet (@matches) { $ftp->cwd("$remoteDir"); my $remoteFileSize = $ftp->size($fileToGet); my $localFileName = "$fileToGet"."$globals[1]{'transfer'}"; my $ftpReturnVar = $ftp->get($fileToGet, $localFileName); next if ! defined $ftpReturnVar; my $localFileSize = (stat "$workingDir/$localFileName")[7]; if ($remoteFileSize == $localFileSize) { push (@filesToRetrv, $ftpReturnVar); } $ftp->quit(); return @filesToMove; } sub cleanUp { my $workingDir = $globals[2]{'workingDir'}; opendir( DIR1, $workingDir ) || print "Can't open $workingDir for Cl +eanup\n"; while (my $file = readdir DIR1) { next if -d "$workingDir/$file"; my $TODAY = time; my $hrtime = $globals[2]{'maxAge'}; my $mtime = (stat "$workingDir/$file")[9]; if ($TODAY - $hrtime > $mtime) { unlink $file; } } }
In reply to Memory leak! by joeymac
| For: | Use: | ||
| & | & | ||
| < | < | ||
| > | > | ||
| [ | [ | ||
| ] | ] |