Here's a subroutine that does what you want, and optionally trims the file as well. It assumes a line length of 72 bytes and goes back number of lines * 72. If it doesn't get enough lines it goes back another block.
This is quite efficient especially when you only want a few lines at the end of a file, even for huge files. It's only 'slow' when you want 1000's of lines
###########################################################
sub read_log {
my $logfile=shift;
my $lines_wanted=shift;
my $TRIM=shift;
my $lines_found=0;
my $BLOCK=$lines_wanted * 72; #Assume 72 chars (bytes) per line
my $i=0;
my @lines=();
my $GO_BACK=$BLOCK;
my $SEEK_RESULT=1;
if (open(LOG,$logfile) ) {
## Stop if we have enough lines, or go back past the start
while ( $lines_found < $lines_wanted and $SEEK_RESULT) {
$SEEK_RESULT=seek LOG,-$GO_BACK,2; # Goto back approx request
+ed lines
<LOG>; # Chuck the first line remnant
@lines=<LOG>; # Get the rest of the lines
$lines_found=scalar (@lines); # Count the lines
$GO_BACK+=$BLOCK;
$i++;
}
close LOG;
# If too many lines, just splice the array
my $diff=$lines_found - $lines_wanted;
splice @lines,0,$diff if $diff;
trim_file($logfile,\@lines) if $TRIM;
} else {
print "Couldn't open $logfile: $!<BR>";
}
}
###########################################################
# Takes a file name and array of lines, backs up the existing file
# and trims it.
sub trim_file {
my $file =shift || die "No filename passed to trim_file()\n";
my $lines=shift || die "No lines passed to trim_file()\n";
my $backup="$file.bak";
unlink $backup if -e $backup;
rename $file,$backup;
open (OUT,">$file") or die "Couldn't create $file:$!\n";
print OUT foreach @$lines;
close OUT;
}
|