my $working_dir = $ARGV[0]; # starting directory
my $extension = $ARGV[1]; # extension to save
dedup($working_dir);
exit 0;
sub dedup {
my $path = shift;
my @files = glob("$path/*");
print "Checking [$path] ...\n";
foreach (@files) {
dedup("$_") if (-d $_ && -x $_); # recurse into subdirectories
my ($base, $ext) = m/(.*)\.([^\.]+)$/;
my @matches = glob("$base*");
print "\tremoving: $_\n" if scalar @matches > 1 && $ext ne $extension;
# unlink $_ if scalar @matches > 1 && $ext ne $extension;
}
}
####
my @data;
while (<>) {
my @fields = split ' ';
my $col = $. - 1;
for my $row (0..$#fields) {
$data[$row][$col] = $fields[$row];
}
}
foreach my $row (@data) {
foreach (0 .. $#$row) {
print $$row[$_];
}
print "\n";
}
##
##
use POSIX ":sys_wait_h";
my %kids = ();
# Signal Handler for SIGCHLD
sub sigchld_handler{
while (($pid = waitpid(-1, &WNOHANG)) > 0) {
delete $kids{$pid};
}
$SIG{CHLD} = \&sigchld_handler;
}
$SIG{CHLD} = \&sigchld_handler;
# You can repeat this for as many secondary
# processes as you need to merge the old indexes
for (1..3) {
if (my $pid = fork) {
# Parent Process keep track of forked children
$kids{$pid} = 1;
}
else {
# Child process
# ... do merging of old indexes here
sleep(3);
exit 0; # MUST EXIT HERE
}
}
while (keys %kids > 0) {
sleep(1); # wait for all children to finish
}
exit 0;
##
##
# Straight forward way (sort keys slows down the larger the list)
my %hash = map { $_ => 1 } @items;
my @uniq = sort keys %hash;
# Using List::MoreUtils::uniq()
use List::MoreUtils qw(uniq);
my @uniq = uniq(@items);
# This is the method under the hood in List::MoreUtils::uniq
my %h;
my @uniq = map { $h{$_}++ == 0 ? $_ : () } @items;
##
##
# AC_CHECK_PM
#----------------------------------------------------------
AC_DEFUN([AC_CHECK_PM],[
AC_MSG_CHECKING([for module $1])
if `perl -M$1 -e '' >/dev/null 2>&1` ; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no])
AC_MSG_ERROR([missing perl module $1, not in @INC])
fi;dnl
])