use Logs; use Net::FTP; use Time::Local; use Time::Timezone; use POSIX; use File::Temp qw/ tempfile /; open (IN, "/home/reports/ftp/WSB/test.txt"); #open (IN, "gzip -dc /tmp/arc/0620.gz |"); ($cntfh, $cntfn) = tempfile('WSB_count_XXXX', DIR => '/tmp'); ($urlfh, $urlfn) = tempfile('WSB_urls_XXXX', DIR => '/tmp'); chmod 0666, $urlfn; chmod 0666, $cntfn; my $dbh = DBI->connect('dbi:mysql:reports', 'root','rep0rts', {RaiseError => 1,PrintError => 0}) or Die ("load data","WSB","Could n t connect to database" . $dbh->errstr); my $url_str = "load data infile '" . $urlfn . "' into table tmpurl fields terminated by '|' (tstamp,url,count)"; my $cnt_str = "load data infile '" . $cntfn . "' into table tmpwsb fields terminated by '|' (tstamp,op100,op700,total)"; print "$url_str\n"; print "$cnt_str\n"; @lines = ; $lines = @lines; print "lines: $lines\n"; # parse records and increment counters foreach $line (@lines) { ($time, $opcode, $url) = parse($line); if ($opcode == "100") { $tstamp{$time}++; $op100{$time}++; if (length $url > 0) { my $tmpkey = $url . "<::>" . $time; $urls{$tmpkey}++; } } elsif ($opcode == "700") { $tstamp{$time}++; $op700{$time}++; if (length $url > 0) { my $tmpkey = $url . "::" . $time; $urls{$tmpkey}++; } } } @time = sort keys %tstamp; print "Done parsing: @time[0]: @time[$#time]\n"; for ($ii = @time[0]; $ii <= @time[$#time]; $ii++) { $tmpTime = strftime "%Y-%m-%d %H:%M:%S", localtime($ii); $tmp100 = $op100{$ii} || 0; $tmp700 = $op700{$ii} || 0; $tmpTotal = $tstamp{$ii} || 0; print $cntfh "$tmpTime|$tmp100|$tmp700|$tmpTotal\n"; } print "Done writing counts\n"; # enter URLS into database foreach $key (sort keys %urls) { my ($url,$tstamp) = split (/<::>/, $key); $tmpTime = strftime "%Y-%m-%d %H:%M:%S", localtime($tstamp); print $urlfh "$tmpTime|$url|$urls{$key}\n"; } print "Done writing urls\n"; $dbh->do($cnt_str); print "Done writing count DB\n"; $dbh->do($url_str); print "Done writing url DB\n"; $dbh->disconnect(); print "just done\n";