Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/bin/bash
- # pururin
- read URL
- SITE="http://pururin.com"
- File=`echo ${URL} | sed 's/.*.\///g' | sed 's/\..*//g'`
- curl -# "${SITE}/thumbs/${URL}" | grep '<li class="I0"' | tr '" ' '\n' | grep ^/view/ | awk -v Z=$SITE '{print 'Z' $0}' | tr '\n' ' ' | xargs curl -# | grep '<img class="b" src="' | tr '"' '\n' | grep '/f/' | awk -v Z=$SITE '{print 'Z' $0}' >> "${File}.txt";
- #!/bin/bash
- # pururinAuto
- read URL
- SITE="http://pururin.com"
- File=`echo ${URL} | sed 's/.*.\///g' | sed 's/\..*//g'`
- mkdir "${File}"
- cd "${File}"
- curl -# "${SITE}/thumbs/${URL}" | grep '<li class="I0"' | tr '" ' '\n' | grep ^/view/ | awk -v Z=$SITE '{print 'Z' $0}' | tr '\n' ' ' | xargs curl -# | grep '<img class="b" src="' | tr '"' '\n' | grep '/f/' | awk -v Z=$SITE '{print 'Z' $0}' >>"${File}.txt"
- linkNum=`cat ${File}.txt | wc -l`
- linkNum=$(( $linkNum + 1 ))
- n=1
- while [ $n != $linkNum ]
- do sed -n "$n{p;q;}" ${File}.txt | xargs curl --retry 8 -g -# -O; n=$(( $n + 1 ))
- done
- cd ..
- #!/bin/bash
- # fakku
- read Media
- FILE=`echo ${Media} | sed 's/.*\///g'`
- curl -# "https://www.fakku.net/${Media}/read" | grep 'window.params.thumbs ='| tr '"' '\n' | grep fakku | sed 's/\\//g' | sed 's/^/https:/g' | sed 's/thumbs/images/g' | sed 's/\.thumb//g' >> "${FILE}.txt"
- #!/bin/bash
- # AutoFakku
- read Media
- FILE=`echo ${Media} | sed 's/.*\///g'`
- mkdir "${FILE}"
- cd "${FILE}"
- curl -# "https://www.fakku.net/${Media}/read" | grep 'window.params.thumbs ='| tr '"' '\n' | grep fakku | sed 's/\\//g' | sed 's/^/https:/g' | sed 's/thumbs/images/g' | sed 's/\.thumb//g' >> "${FILE}.txt"
- linkNum=`cat ${FILE}.txt | wc -l`
- linkNum=$(( $linkNum + 1 ))
- n=1
- while [ $n != $linkNum ]
- do sed -n "$n{p;q;}" ${FILE}.txt | xargs curl --retry 8 -g -# -O; n=$(( $n + 1 ))
- done
- cd ..
- #!/bin/bash
- # sankakucomplex
- read tags
- tags=`echo $tags| sed 's/ /%20/g'`
- number=1
- URL="https://chan.sankakucomplex.com/?tags=${tags}&page="
- echo $URL
- newpage=2
- while [ ${newpage} != 0 ]
- do echo $number
- page=`curl --retry 8 -# "${URL}${number}" |grep -e '^<span class="' -e "next-page-url" | tr '" ' '\n' | grep -e "/data/preview/" -e "next-page-url" | sed 's/\/\/c.san/http:\/\/cs.san/g' | sed 's/preview\///g'`
- newpage=`echo "$page" | grep -c "page"`
- echo "$page" | grep "/data/" >>"${tags}".txt
- number=$(( $number + 1 ))
- sleep 6
- done
- #!/bin/bash
- # e621
- num=1
- read TAGS
- site="https://e621.net/post/index/"
- pages=`curl -# "$site$num/$TAGS" | grep 'rel="last" title="Last Page">' | tr '/' '\n' | sed -n '4p'`
- pages=$(( $pages + 1 ))
- while [ $num != $pages ]
- do curl -# "$site$num/$TAGS" | sed 's/,"/\n/g' | grep 'sample_url":' | tr '"' '\n' | grep ^http>>"$TAGS".txt
- num=$(( $num + 1 ))
- done
- #!/bin/bash
- # konachan
- i=1
- SITE="http://konachan.com/post?page="
- read TAGS
- TAGS=`echo "$TAGS" | tr ' ' '+'`
- lastpage=`curl -# "${SITE}1&tags=${TAGS}" | grep '"Last Page" />' | tr '=&' '\n' | grep [0-9]`
- lastpage=$(( $lastpage + 1 ))
- while [ $i != $lastpage ]
- do curl --retry 10 -# "${SITE}${i}&tags=${TAGS}" | grep '</span></a></li> </ul>' | tr ' ' '\n' | grep 'http' |grep -v -e 'src="http:' -e '/post/'| tr '"' '\n' | grep 'http' >>"$TAGS".txt
- i=$(( $i + 1 ))
- done
- #!/bin/bash
- # rule34
- NUM=1
- read TAGS
- TAGS=`echo $TAGS| sed 's/ /%20/g'`
- URL="http://rule34.paheal.net/post/list/"
- PAGES=`curl -# "$URL$TAGS/1" | tr ' ' '\n' |grep '>Last</a><br><<' | tr '/' '\n' | grep '">' | tr '"' '\n' | grep [0-9]`
- PAGES=$(( $PAGES + 1 ))
- while [ $NUM != $PAGES ]
- do curl -# "$URL$TAGS/$NUM" | sed 's/href="/\n/g' | grep '">Image Only<' | sed 's/">Image Only</\n/g' | grep ^http >> "$TAGS".txt
- NUM=$(( $NUM + 1 ))
- done
- #!/bin/bash
- # gelbooru
- i=0
- SITE="http://gelbooru.com/"
- read TAGS
- TAGS=`echo ${TAGS}| tr ' ' '+'`
- URL="${SITE}index.php?page=post&s=list&tags=${TAGS}&pid="
- PAGES=`curl $URL | tr '=" ' '\n' | grep -B 6 '»' | grep [0-9]`
- PAGES=$(( $PAGES + 42 ));while [ $i != $PAGES ]
- do curl $URL$i | tr ' ' '\n' | grep "index.php?page=post&s=view&" | sed -E 's/"|amp;//g' | sed 's/href=//g'| awk -v Z=$SITE '{print 'Z' $0}' | tr "\n" " " | xargs curl | grep '<img alt="' | tr ' ' '\n' | grep "src" | tr '"?' '\n' | grep 'http'>>"${TAGS}".txt
- i=$(( $i + 42 ))
- done
- #!/bin/bash
- # listDownloader
- read FILE
- linkNum=`cat ${FILE} | wc -l`
- linkNum=$(( $linkNum + 1 ))
- n=1
- while [ $n != $linkNum ]
- do sed -n "$n{p;q;}" ${FILE} | xargs curl -g -# -O; n=$(( $n + 1 ))
- done
Advertisement
Add Comment
Please, Sign In to add comment