thanks ronald 
for the suggestion which looks better but is just as ineffecient as mine
original. i timed both versions twice traversing 878 files in 1006
directories of total size 839.441066 MB. both taking 280 secs each time around.
to clarify this is how i get the filenames w original code embedded (commented)

sub check_folders {
        my($dir) = @_;
        local (*FOLDER);
    my(@subfiles, $file, $specfile);
    
        opendir(FOLDER, $dir) or die "cannot open $dir $!";                            
 
        @subfiles = readdir(FOLDER);
    closedir(FOLDER);

        foreach $file (sort(@subfiles)) {
                my $specfile = $dir.$delimiter.$file;
                $level++; # loop counter

                if (-f $specfile) {
                        $ext = basename($specfile);
                        $ext = "none" unless ($ext =~ s/.*(\.[^.]+)$/$1/);

                        push @{ $file_extensions{$ext} }, $specfile;

                        ## original was:
                        # $extensions{$ext} = $level;
                        # push(@complete, $ext,  $specfile);
                }       
                if (-d $specfile && $file !~ /^\.{1,2}$/) {
                        &check_folders($specfile);
                }
        }       
}

foreach my $extension (sort keys %file_extensions) {
        print $extension . "\n\n";
        foreach my $file (@{ $file_extensions{$extension} }) {
                        print  $file. "\n";
                }
        print "\n\n";
}

## original:
foreach my $key (sort(keys %extensions)) {
        print "$key\n\n";
        for (my $i = 0; $i <= $#complete; $i++) {
                if ($complete[$i] eq $key) {
                        print $complete[$i+1] . "\n";
                }
        }
        print "\n\n";
}


> You would have to tie each inner hash as a Tie::IxHash as well.

tried inside a for loop, did not work, but never mind

thanks again though
allan

Reply via email to