Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/bin/bash
- #
- DIR="$1" # The source of the millions of files
- TARDEST="$2" # Where the tarballs should be placed
- # Create the million-file segments
- rm -f /tmp/chunk.*
- find "$DIR" -type f | split -l 1000000 - /tmp/chunk.
- # Create corresponding tarballs
- for CHUNK in $(cd /tmp && echo chunk.*)
- do
- test -f "$CHUNK" || continue
- echo "Creating tarball for chunk '$CHUNK'" >&2
- tar cTf "/tmp/$CHUNK" "$TARDEST/$CHUNK.tar"
- rm -f "/tmp/$CHUNK"
- done
- #!/bin/bash
- ctr=0;
- while readarray -n 1000000 -t asdf; do
- ctr=$((${ctr}+1));
- tar czf /destination/path/asdf.${ctr}.tgz "${asdf[@]}";
- # If you don't want compression, use this instead:
- #tar cf /destination/path/asdf.${ctr}.tar "${asdf[@]}";
- done <(find /source/path -not -type d);
- function something() {...}
- find /source/path -not -type d
- | readarray -n 1000000 -t -C something asdf
- find /source/path -not -type d -print0
- | parallel -j4 -d '