Novice here. I wrote a script that downloads and zips log files from a server. Sometimes the files can be up to 40MB+ and the script takes a real long time to add big files to the zip file. Is there any way to increase the buffer or any way to make the script add the files faster? I use Activestate Perl on Win 2000. Here is my code...
#!/usr/local/ActivePerl-5.6/bin/perl -w
use File::glob;
use File::Copy;
use Net::FTP;
use Archive::Zip;
$root = "/";
#Servers and login info
$host_disco = "server";
$user_disco = "user";
$password_disco = "password";
$dirlogs_disco = "/folder/folder";
$host_gw = "server";
$user_gw = "user";
$password_gw = "password";
$dirlogs_gw = "/folder/folder";
$host_roc = "server";
$user_roc = "user";
$password_roc = "password";
$dirlogs_roc = "/folder/folder";
$host_err = "server";
$user_err = "user";
$password_err = "password";
$dirlogs_err = "/folder/folder";
$date = "";
$logname = "";
#Call 4 subroutines that backup the logs. Pass them server and directo
+ry info for each environment(Schema). Zip up the logs.
print("Working...\n");
#get a date timestamp
$date = datestamp();
$logname = "$date.zip";
my $schema = "Disco";
#calls sub.'s and passes server info to login
backuplogs($host_disco, $user_disco, $password_disco, $dirlogs_disco,
+$schema, $logname);
$schema = "GW";
backuplogs($host_gw, $user_gw, $password_gw, $dirlogs_gw, $schema, $lo
+gname);
$schema = "ROC";
backuplogs($host_roc, $user_roc, $password_roc, $dirlogs_roc, $schema,
+ $logname);
$schema = "ERR";
backuplogs($host_err, $user_err, $password_err, $dirlogs_err, $schema,
+ $logname);
sub backuplogs
{
my ($host, $user, $pass, $dirlogs, $schema, $logname) = @_;
my @files = "";
my $file1 = "";
#connect to the server
my $ftp = Net::FTP->new($host) or die "Can't open $host: $@\n";
$ftp->login($user, $pass) or die "Couldn't login: @{[ $ftp->message ]}
+";
$ftp->ascii();
$ftp->cwd($root) or die "Couldn't cwd to $root: @{[ $ftp->message ]}\n
+";
$ftp->cwd($dirlogs) or die "Couldn't cwd to $dirlogs: @{[ $ftp->messag
+e ]}\n";
$, = "\n";
@files = $ftp->ls;
LINE: foreach $file1 (@files)
{
#if this file then skip it
if($file1 =~ /mdctxuapp54_cb.stderr/)
{
next LINE;
}
#do this for all files that are .stderr or .log
if($file1 !~ /.stderr/ || $file1 !~ /.log/)
{ $ftp->get($file1) || die "Can't get files from $dirlogs :@{[ $ft
+p->message ]}\n";
$ftp->delete($file1); #remove them from the server
}
}
$ftp->close();
#zip the files
my $file = "";
my $zip = Archive::Zip->new();
while($file = <C:/data/*.log>)
{
#add all log files to zip file
my $filename = $file;
$filename =~ s/C.*\///;
$zip->addFile( $filename );
$zip->writeToFileNamed( $logname );
$filename = "";
}
while($file = <C:/data/*.stderr>)
{
#add all stderr files to zip file
my $filename = $file;
$filename =~ s/C.*\///;
$zip->addFile( $filename );
$zip->writeToFileNamed( $logname );
$filename = "";
}
#copy zip file to respective folder out on sharedrive
copy($logname, "G:/some folder/$schema");
copy($logname, "C:/data/some folder/$schema");
#delete local files
unlink <*.log>;
unlink <*.stderr>;
unlink <*.zip>;
print("$schema has been backed up.\n");
}
#get date timestamp
sub datestamp
{
my ($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDay, $DayOf
+Year, $IsDST) = localtime(time);
my $RealMonth = $Month + 1;
my $FullYear = $Year + 1900;
my $AMorPM = "";
if($Day < 10)
{
$Day = "0" . $Day;
}
if($RealMonth < 10)
{
$RealMonth = "0" . $RealMonth;
}
my $date = "";
$date = "$RealMonth\_$Day\_$FullYear\_$Hour$Minute";
return $date;
}
Edit by castaway - added readmore tag