#!/usr/bin/perl -w use strict; use warnings; my @fileHeaders; my %usedHeaders; my $sortname = "sortfile.tmp"; # If you have more than one disk, consider putting # a disk different from most of your data. # Beware of disk space. open SORTFILE, ">", $sortname or die "Cannot create $sortname: $!\n"; # first pass, write to sortfile foreach my $filename (@ARGV) { open GSE, "<", $filename or die "Cannot open $filename: $!\n"; my $headerline = ; chomp($headerline); $headerline =~ s/\r$//; my @thisfileHeaders = split /\t/, $headerline; shift @thisfileHeaders; push @fileHeaders, \@thisfileHeaders; my $filekey = sprintf "%04d", $#fileHeaders; while() { y/\r//d; s/^([^\t]*)/$1\t$filekey/; print SORTFILE $_ or die "print SORTFILE failed: $!\n"; } if ($. > 1) { # if any data past headers $usedHeaders{$_}++ for @thisfileHeaders; } close GSE; } close SORTFILE or die "close SORTFILE failed: $!\n"; # Yes, close can fail. Buffered I/O, for one thing system("sort -o sortfile.tmp sortfile.tmp") == 0 or die "Sort failed!\n"; # second pass; consolidate data from multiple files my $prevkey; open SORTFILE, "<", $sortname or die "Cannot open $sortname: $!\n"; my @samples = sort keys %usedHeaders; print "Probe\t".join("\t",@samples)."\n"; my %data; while (my $ligne = ) { chomp($ligne); my @t = split(/\t/, $ligne); my $probe = shift @t; my $filenum = shift @t; if (!$prevkey || $probe ne $prevkey) { dumpdata(); $prevkey = $probe; } @data{@{$fileHeaders[$filenum]}} = @t; } dumpdata(); sub dumpdata { if (defined($prevkey)) { print "$prevkey\t", join("\t", @data{@samples}), "\n"; } $data{$_} = "" for @samples; #initialize to blanks so no warnings on printing undef }