# this might be a memory hog for large files # read in the items to process my @items; { local $/ = '***** SQL Statement *****'; open ( INPUT, '<', $input_file_name ) or die ("Can't read from input : $!"); @items = ; } # process each item foreach my $item ( @items ) { my ($sql, $other) = map { trim($_) } split ( /\Q***** Bind Variables *****\E/, $item); my ($bindvars) = split( m/\n\n/, $other ); get_file($sql)->print("\n\n", $bindvars); } # clean up close_files(); # get rid of leading/trailing whitespace. sub trim { my $string = shift; $string =~ s/^\s+//; $string =~ s/\s+$//; return $string; } { use IO::File; my $i = 0; my %files = (); # return the file handle associated with a given sql statement. sub get_file { my $sql = shift; if ( defined( $files{$sql} )) { return $files{$sql}; } # we don't already know about it. my $file = IO::File->new( "SQL$i.txt", '>' ); if (!defined($file)) { die "Error opening output file: $!"; } $i++; $files{$sql} = $file; $file->print( $sql ); # print the sql statement as a header. return $file; } # close all of the open file handles sub close_files { foreach my $file ( values %files ) { $file->close(); } } }