Advertisement
h3xx

simplify_static_dir

Jul 24th, 2011
165
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Perl 9.85 KB | None | 0 0
  1. #!/usr/bin/perl -w
  2. use strict;
  3.  
  4. our $VERSION = '1.2.0';
  5.  
  6. =pod
  7.  
  8. =head1 NAME
  9.  
  10. simplify_static_dir - optimize directories for size by combinining identical
  11. files
  12.  
  13. =head1 SYNOPSIS
  14.  
  15. B<simplify_static_dir> [I<OPTIONS>] [I<DIR>]...
  16.  
  17. =head1 DESCRIPTION
  18.  
  19. The more files this script can process at once, the better. It maintains an
  20. internal hash of files indexed by their SHA1 checksum. When it finds a
  21. collision it removes the file with the least amount of file system links, then
  22. creates a hard link to the other in its place. The larger the number of files
  23. scanned, the more likely it is that there will be collisions.
  24.  
  25. There are failsafes in place, though. If somehow two files' SHA1 checksums are
  26. identical, but the file sizes do not match, this program will error out (and
  27. you can go ahead and report the collision; it may be worth money).
  28.  
  29. There are other features built in as well. Following the logic that unique
  30. data, once created has the attribute of being unique of that point in time,
  31. this script will copy the timestamp of the older file onto the newer file just
  32. before it makes the hard link. Therefore, many times, simplified directories
  33. will have the appearance of being older than they actually are.
  34.  
  35. From the perspective of any program reading files from the simplified
  36. directoriws, the files will lookB<*> and behave the same way as the initial
  37. state.
  38.  
  39. B<*> Except for having an increased number of hard links.
  40.  
  41. =head1 OPTIONS
  42.  
  43. =over
  44.  
  45. =item B<-v>
  46.  
  47. Verbose output.
  48.  
  49. =item B<-f>
  50.  
  51. Print a sum of the number of freed bytes.
  52.  
  53. =item B<-m> I<REGEX>
  54.  
  55. Only match file paths matching I<REGEX>.
  56.  
  57. =item B<-M> I<REGEX>
  58.  
  59. Exclude file paths matching I<REGEX>.
  60.  
  61. =item B<-s>
  62.  
  63. Generate symlinks only.
  64.  
  65. =item B<-S>
  66.  
  67. Do not generate ANY symlinks.
  68.  
  69. =item B<-z>
  70.  
  71. Include zero-length files in search. Normally they are ignored (you don't save
  72. diskspace by hard-linking empty files).
  73.  
  74. =item B<--help>
  75.  
  76. Output this help message and exit.
  77.  
  78. =item B<--version>
  79.  
  80. Output version information and exit.
  81.  
  82. =back
  83.  
  84. By default, scans the current directory. Files not able to be hard-linked are
  85. symlinked by default.
  86.  
  87. =head1 CHANGES
  88.  
  89. =over
  90.  
  91. =item 1.1.0
  92.  
  93. Outputs GNU-style messages (ala `rm -v,' `ln -v,' etc.).
  94.  
  95. =item 1.1.1
  96.  
  97. Added B<-m> and B<-M> options.
  98.  
  99. =item 1.1.2
  100.  
  101. Added B<-z> option. Now the default behavior is to not process empty files.
  102. Because what's the point of freeing up no space?
  103.  
  104. =item 1.2.0
  105.  
  106. Uses L<Digest::SHA> instead of L<Digest::MD5>. MD5 has been broken.
  107.  
  108. =back
  109.  
  110. =head1 COPYRIGHT
  111.  
  112. Copyright (C) 2010 Dan Church.
  113.  
  114. License GPLv3+: GNU GPL version 3 or later (L<http://gnu.org/licenses/gpl.html>).
  115.  
  116. This is free software: you are free to change and redistribute it.
  117.  
  118. There is NO WARRANTY, to the extent permitted by law.
  119.  
  120. =head1 AUTHOR
  121.  
  122. Written by Dan Church S<E<lt>amphetamachine@gmail.comE<gt>>
  123.  
  124. =cut
  125.  
  126. use File::Find      qw/ find /;
  127. #use Digest::MD5    qw//;
  128. use Digest::SHA     qw//;
  129. use Getopt::Std     qw/ getopts /;
  130. use Pod::Text       qw//;
  131.  
  132. sub HELP_MESSAGE {
  133. #   my $fh = shift;
  134. #   print $fh <<EOF
  135. #Usage: $0 [DIRS]
  136. #Simplify a directory by hard-linking identical files.
  137. #
  138. #  -v           Verbose output.
  139. #  -f           Print a sum of the number of freed bytes.
  140. #  -m REGEX     Only match file paths matching REGEX.
  141. #  -M REGEX     Exclude file paths matching REGEX.
  142. #  -s           Generate symlinks only.
  143. #  -S           Do not generate ANY symlinks.
  144. #  -z           Include zero-length files in search.
  145. #
  146. #By default, scans the current directory. Files not able to be hard-linked are
  147. #symlinked by default.
  148. #EOF
  149. #;
  150.     my ($fh, $pod) = (shift, Pod::Text->new);
  151.     $pod->parse_from_file($0, $fh);
  152.  
  153.     exit 0;
  154. }
  155.  
  156. my %opts = (
  157.     'v' => 0,
  158.     'f' => 0,
  159.     'm' => '',
  160.     'M' => '',
  161.     's' => 0,
  162.     'S' => 0,
  163.     'z' => 0,
  164. );
  165.  
  166. &getopts('vfm:M:sSz', \%opts);
  167.  
  168. my %files;
  169.  
  170. # correct relative paths
  171. # OR if no directories given, search the current directory
  172. push @ARGV, $ENV{'PWD'} unless map { s#^([^/])#$ENV{'PWD'}/$1# } @ARGV;
  173.  
  174. my $freed_bytes = 0;
  175.  
  176. &find(\&findexec, @ARGV);
  177.  
  178. sub findexec {
  179.     # limit to or exclude file patterns specified by `-m' or `-M',
  180.     # respectively
  181.  
  182.     # truth table
  183.     # -m matches    | -M is used & matches  | !return?
  184.     # 0     | 0         | 0
  185.     # 0     | 1         | 0
  186.     # 1     | 0         | 1
  187.     # 1     | 1         | 0
  188.     # note: m// will match everything
  189.     unless ($File::Find::name =~ m/$opts{'m'}/ and
  190.         !(length $opts{'M'} and $File::Find::name =~ m/$opts{'M'}/)) {
  191.  
  192.         print STDERR "Skipping path `$File::Find::name'\n"
  193.             if $opts{'v'};
  194.         return;
  195.     }
  196.  
  197.     # make sure the file exists and it's not a link
  198.     if (-f $File::Find::name && ! -l $File::Find::name) {
  199.         #my $ctx = Digest::MD5->new;
  200.         my $ctx = Digest::SHA->new;
  201.         open FILE, "<$File::Find::name";
  202.         $ctx->addfile(*FILE);
  203.  
  204.         # save the hex digest because reading the value from
  205.         # Digest::* destroys it
  206.         my $digest = $ctx->hexdigest;
  207.  
  208.         # organize results from lstat into hash
  209.         my $entry = {};
  210.         (@{$entry}{qw/ name dev ino mode nlink uid gid rdev size
  211.                 atime mtime ctime blksize blocks /})
  212.             = ($File::Find::name, lstat $File::Find::name);
  213.  
  214.         # skip zero-length files if wanted (`-z')
  215.         # truth table:
  216.         # -z | non-zero length | return?
  217.         # 0  | 0               | 1
  218.         # 0  | 1               | 0
  219.         # 1  | 0               | 0
  220.         # 1  | 1               | 0
  221.         return unless $opts{'z'} or $entry->{'size'};
  222.  
  223.         # check to see if we've come across a file with the same crc
  224.         if (exists $files{$digest}) {
  225.             my $curr_entry = $files{$digest};
  226.  
  227.             # don't waste my time
  228.             return if $curr_entry->{'name'} eq $entry->{'name'} or
  229.                 $curr_entry->{'ino'} == $entry->{'ino'};
  230.  
  231.             # identical files should be the same size (I'm paranoid
  232.             # of checksumming procedures); if it's not, there's a
  233.             # problem with the checksumming procedure or this
  234.             # script is processing WAY too many files
  235.             # (addendum: perhaps I should congratulate the user on
  236.             # finding a collision in SHA-1)
  237.             if ($curr_entry->{'size'} != $entry->{'size'}) {
  238. die "ERROR: checksums identical for two non-identical files!!!:\n".
  239.     "files:\t`$curr_entry->{'name'}'\n".
  240.           "\t`$entry->{'name'}'\n".
  241.     "SHA1: ($digest)\n".
  242.     '(this is probably a limit of SHA1; try processing fewer files)';
  243.             }
  244.  
  245.             # find the oldest time stamp
  246.             my ($atime, $mtime) = @{(sort
  247.                 {$a->{'mtime'} <=> $b->{'mtime'}}
  248.                 ($entry, $curr_entry)
  249.             )[0]}{qw/ atime mtime /};
  250.  
  251.             # find the file less embedded in the file system
  252.             my ($less_linked, $more_linked) = sort
  253.                 {$a->{'nlink'} <=> $b->{'nlink'}}
  254.                 ($entry, $curr_entry);
  255.  
  256.             # hard-linkable files must exist on the same device and
  257.             # must not already be hard-linked
  258.             if ($curr_entry->{'dev'} == $entry->{'dev'} &&
  259.                 ! $opts{'s'}) {
  260. #               print "hard-linking $new_file\t=>$old_file\n";
  261.                 # attempt to unlink the file
  262.                 printf STDERR "removing file `%s'\n",
  263.                     $less_linked->{'name'} if $opts{'v'};
  264.                 unless (unlink $less_linked->{'name'}) {
  265.  
  266.                     # couldn't do it; try more-linked file
  267.  
  268.                     printf STDERR <<EOF
  269. Failed to remove file `%s': %s
  270. (using `%s')
  271. EOF
  272. ,
  273.                     $less_linked->{'name'},
  274.                     $!,
  275.                     $more_linked->{'name'}
  276.                         if $opts{'v'};
  277.  
  278.                     # if we can't do this, there's no point
  279.                     # in continuing
  280.                     unless (unlink $more_linked->{'name'}) {
  281. printf STDERR <<EOF
  282. Failed to remove file `%s' (second failure on match): %s
  283. Skipping...
  284. EOF
  285. ,
  286.                         $more_linked->{'name'},
  287.                         $!
  288.                             if $opts{'v'};
  289.  
  290.                         return;
  291.                     }
  292.  
  293.                     # the ol' switcheroo
  294.                     ($more_linked, $less_linked) =
  295.                     ($less_linked, $more_linked);
  296.  
  297.                 }
  298.  
  299.                 # we unlinked it or failed out
  300.                 $freed_bytes += $less_linked->{'size'}
  301.                     unless $less_linked->{'nlink'} > 1;
  302.  
  303.                 printf STDERR "hard linking `%s' => `%s'\n",
  304.                 $less_linked->{'name'}, $more_linked->{'name'}
  305.                 if $opts{'v'};
  306.  
  307.                 # hard link the files
  308.                 link $more_linked->{'name'},
  309.                 $less_linked->{'name'};
  310.  
  311.                 # update link count in our hash to reflect the
  312.                 # file system (referenced)
  313.                 ++$more_linked->{'nlink'};
  314.  
  315.                 # preserve older time stamp
  316.                 utime $atime, $mtime, $less_linked->{'name'};
  317.             } elsif (! $opts{'S'}) {
  318.                 # files are on different drives;
  319.                 # most that can be done is to soft-link them
  320.  
  321.                 unless (unlink $less_linked->{'name'}) {
  322.  
  323.                     # couldn't do it; try more-linked file
  324.  
  325.                     printf STDERR "couldn't remove file `%s' (using `%s')\n",
  326.                     $less_linked->{'name'},
  327.                     $more_linked->{'name'} if $opts{'v'};
  328.  
  329.                     # if we can't do this, there's no point
  330.                     # in continuing
  331.                     unlink $more_linked->{'name'}
  332.                         or return;
  333.  
  334.                     # the ol' switcheroo
  335.                     ($more_linked, $less_linked) =
  336.                     ($less_linked, $more_linked);
  337.  
  338.                 }
  339.  
  340.                 # we unlinked it or failed out
  341.                 $freed_bytes += $less_linked->{'size'};
  342.  
  343.                 printf STDERR "soft-linking %s => %s\n",
  344.                 $less_linked->{'name'}, $more_linked->{'name'}
  345.                 if $opts{'v'};
  346.  
  347.                 # create a soft link (TODO: relative links)
  348.                 symlink $more_linked->{'name'},
  349.                 $less_linked->{'name'};
  350.  
  351.                 # preserve older time stamp
  352.                 utime $atime, $mtime, $less_linked->{'name'};
  353.             }
  354.         } else {
  355.             # the file is unique (as far as we know)
  356.             # create a new entry in the hash table
  357.             $files{$digest} = $entry;
  358.         }
  359.     #} elsif (-l $File::Find::name) {
  360.     #   # do something to simplify symlinks
  361.     #   printf STDERR "FIXME: simplifying symlink `%s'\n",
  362.     #   $File::Find::name
  363.     #   if $opts{'v'};
  364.  
  365.     #   printf STDERR "symlink `%s' points to `%s'\n",
  366.     #   $File::Find::name, readlink $File::Find::name;
  367.     }
  368. }
  369.  
  370. printf STDERR "freed %d bytes (%0.4G %s)\n",
  371.     $freed_bytes, &hr_size($freed_bytes)
  372.         if $opts{'f'} or $opts{'v'};
  373.  
  374. sub hr_size {
  375.     my $sz = shift;
  376.     my @sizes = qw/ B KB MB GB TB PB EB ZB YB /;
  377.     my $fact = 1024;
  378.     my $thresh = 0.1;
  379.     my @ret;
  380.     foreach my $exp (reverse 0 .. $#sizes) {
  381.         if ($sz > (1 - $thresh) * $fact ** $exp) {
  382.             @ret = ($sz / $fact ** $exp, $sizes[$exp]);
  383.             last;
  384.         }
  385.     }
  386.  
  387.     # default to ($sz, 'bytes')
  388.     @ret = ($sz, $sizes[0]) unless @ret;
  389.  
  390.     wantarray ? @ret : "@ret"
  391. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement