Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/bin/bash
- # Download images from reddit.com/r/earthporn
- # Needs "jq" and "wget" to run correctly
- # wget can be replaced with curl too
- # Size limit of 2MB
- # Use size as a proxy for resolution
- SIZELIMIT=$((2*1024*1024))
- # Destination folder
- WPDIR=~/Pictures/Wallpapers
- [ -d $WPDIR ] || mkdir -p $WPDIR
- cd $WPDIR
- for type in hot new top; do
- curl -s \
- -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:69.0) Gecko/20100101 Firefox/69.0' \
- https://www.reddit.com/r/earthporn/${type}.json?limit=100 | \
- /usr/local/bin/jq '.data.children[].data.url' | \
- tr -d '"' | \
- grep jpg | \
- while read url; do
- /usr/local/bin/wget -q -c $url
- done
- done
- # Delete if smaller than $SIZELIMIT bytes
- find . -type f -name '*jpg' | while read i; do
- if [ ! -s "$i" ]; then
- rm -f -- "$i"
- continue
- fi
- size=$(wc -c < "$i")
- if [ "$size" -lt $SIZELIMIT ]; then
- rm -f -- "$i"
- continue
- fi
- done
- # Only keep the latest 1000 images
- # ls -1t | tail +1001 | xargs rm -f
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement