Advertisement
Guest User

Scripts: /u/BASH_SCRIPTS_FOR_YOU

a guest
Feb 7th, 2015
2,363
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.59 KB | None | 0 0
  1. #!/bin/bash
  2. # pururin
  3. read URL
  4. SITE="http://pururin.com"
  5. File=`echo ${URL} | sed 's/.*.\///g' | sed 's/\..*//g'`
  6. curl -# "${SITE}/thumbs/${URL}" | grep '<li class="I0"' | tr '" ' '\n' | grep ^/view/ | awk -v Z=$SITE '{print 'Z' $0}' | tr '\n' ' ' | xargs curl -# | grep '<img class="b" src="' | tr '"' '\n' | grep '/f/' | awk -v Z=$SITE '{print 'Z' $0}' >> "${File}.txt";
  7.  
  8.  
  9.  
  10.  
  11.  
  12.  
  13.  
  14.  
  15. #!/bin/bash
  16. # pururinAuto
  17. read URL
  18. SITE="http://pururin.com"
  19. File=`echo ${URL} | sed 's/.*.\///g' | sed 's/\..*//g'`
  20. mkdir "${File}"
  21. cd "${File}"
  22. curl -# "${SITE}/thumbs/${URL}" | grep '<li class="I0"' | tr '" ' '\n' | grep ^/view/ | awk -v Z=$SITE '{print 'Z' $0}' | tr '\n' ' ' | xargs curl -# | grep '<img class="b" src="' | tr '"' '\n' | grep '/f/' | awk -v Z=$SITE '{print 'Z' $0}' >>"${File}.txt"
  23. linkNum=`cat ${File}.txt | wc -l`
  24. linkNum=$(( $linkNum + 1 ))
  25. n=1
  26. while [ $n != $linkNum ]
  27. do sed -n "$n{p;q;}" ${File}.txt | xargs curl --retry 8 -g -# -O; n=$(( $n + 1 ))
  28. done
  29. cd ..
  30.  
  31.  
  32.  
  33.  
  34.  
  35.  
  36.  
  37. #!/bin/bash
  38. # fakku
  39. read Media
  40. FILE=`echo ${Media} | sed 's/.*\///g'`
  41. curl -# "https://www.fakku.net/${Media}/read" | grep 'window.params.thumbs ='| tr '"' '\n' | grep fakku | sed 's/\\//g' | sed 's/^/https:/g' | sed 's/thumbs/images/g' | sed 's/\.thumb//g' >> "${FILE}.txt"
  42.  
  43.  
  44.  
  45.  
  46.  
  47. #!/bin/bash
  48. # AutoFakku
  49. read Media
  50. FILE=`echo ${Media} | sed 's/.*\///g'`
  51. mkdir "${FILE}"
  52. cd "${FILE}"
  53. curl -# "https://www.fakku.net/${Media}/read" | grep 'window.params.thumbs ='| tr '"' '\n' | grep fakku | sed 's/\\//g' | sed 's/^/https:/g' | sed 's/thumbs/images/g' | sed 's/\.thumb//g' >> "${FILE}.txt"
  54. linkNum=`cat ${FILE}.txt | wc -l`
  55. linkNum=$(( $linkNum + 1 ))
  56. n=1
  57. while [ $n != $linkNum ]
  58. do sed -n "$n{p;q;}" ${FILE}.txt | xargs curl --retry 8 -g -# -O; n=$(( $n + 1 ))
  59. done
  60. cd ..
  61.  
  62.  
  63.  
  64.  
  65. #!/bin/bash
  66. # sankakucomplex
  67. read tags
  68. tags=`echo $tags| sed 's/ /%20/g'`
  69. number=1
  70. URL="https://chan.sankakucomplex.com/?tags=${tags}&page="
  71. echo $URL
  72. newpage=2
  73. while [ ${newpage} != 0 ]
  74. do echo $number
  75. page=`curl --retry 8 -# "${URL}${number}" |grep -e '^<span class="' -e "next-page-url" | tr '" ' '\n' | grep -e "/data/preview/" -e "next-page-url" | sed 's/\/\/c.san/http:\/\/cs.san/g' | sed 's/preview\///g'`
  76. newpage=`echo "$page" | grep -c "page"`
  77. echo "$page" | grep "/data/" >>"${tags}".txt
  78. number=$(( $number + 1 ))
  79. sleep 6
  80. done
  81.  
  82.  
  83.  
  84.  
  85. #!/bin/bash
  86. # e621
  87. num=1
  88. read TAGS
  89. site="https://e621.net/post/index/"
  90. pages=`curl -# "$site$num/$TAGS" | grep 'rel="last" title="Last Page">' | tr '/' '\n' | sed -n '4p'`
  91. pages=$(( $pages + 1 ))
  92. while [ $num != $pages ]
  93. do curl -# "$site$num/$TAGS" | sed 's/,"/\n/g' | grep 'sample_url":' | tr '"' '\n' | grep ^http>>"$TAGS".txt
  94. num=$(( $num + 1 ))
  95. done
  96.  
  97.  
  98.  
  99.  
  100. #!/bin/bash
  101. # konachan
  102. i=1
  103. SITE="http://konachan.com/post?page="
  104. read TAGS
  105. TAGS=`echo "$TAGS" | tr ' ' '+'`
  106. lastpage=`curl -# "${SITE}1&tags=${TAGS}" | grep '"Last Page" />' | tr '=&' '\n' | grep [0-9]`
  107. lastpage=$(( $lastpage + 1 ))
  108. while [ $i != $lastpage ]
  109. do curl --retry 10 -# "${SITE}${i}&tags=${TAGS}" | grep '</span></a></li> </ul>' | tr ' ' '\n' | grep 'http' |grep -v -e 'src="http:' -e '/post/'| tr '"' '\n' | grep 'http' >>"$TAGS".txt
  110. i=$(( $i + 1 ))
  111. done
  112.  
  113.  
  114.  
  115.  
  116. #!/bin/bash
  117. # rule34
  118. NUM=1
  119. read TAGS
  120. TAGS=`echo $TAGS| sed 's/ /%20/g'`
  121. URL="http://rule34.paheal.net/post/list/"
  122. PAGES=`curl -# "$URL$TAGS/1" | tr ' ' '\n' |grep '>Last</a><br>&lt;&lt;' | tr '/' '\n' | grep '">' | tr '"' '\n' | grep [0-9]`
  123. PAGES=$(( $PAGES + 1 ))
  124. while [ $NUM != $PAGES ]
  125. do curl -# "$URL$TAGS/$NUM" | sed 's/href="/\n/g' | grep '">Image Only<' | sed 's/">Image Only</\n/g' | grep ^http >> "$TAGS".txt
  126. NUM=$(( $NUM + 1 ))
  127. done
  128.  
  129.  
  130.  
  131.  
  132.  
  133.  
  134.  
  135.  
  136.  
  137.  
  138.  
  139.  
  140. #!/bin/bash
  141. # gelbooru
  142. i=0
  143. SITE="http://gelbooru.com/"
  144. read TAGS
  145. TAGS=`echo ${TAGS}| tr ' ' '+'`
  146. URL="${SITE}index.php?page=post&s=list&tags=${TAGS}&pid="
  147. PAGES=`curl $URL | tr '=" ' '\n' | grep -B 6 '&raquo' | grep [0-9]`
  148. PAGES=$(( $PAGES + 42 ));while [ $i != $PAGES ]
  149. do curl $URL$i | tr ' ' '\n' | grep "index.php?page=post&amp;s=view&amp" | sed -E 's/"|amp;//g' | sed 's/href=//g'| awk -v Z=$SITE '{print 'Z' $0}' | tr "\n" " " | xargs curl | grep '<img alt="' | tr ' ' '\n' | grep "src" | tr '"?' '\n' | grep 'http'>>"${TAGS}".txt
  150. i=$(( $i + 42 ))
  151. done
  152.  
  153.  
  154.  
  155.  
  156.  
  157.  
  158.  
  159. #!/bin/bash
  160. # listDownloader
  161. read FILE
  162. linkNum=`cat ${FILE} | wc -l`
  163. linkNum=$(( $linkNum + 1 ))
  164. n=1
  165. while [ $n != $linkNum ]
  166. do sed -n "$n{p;q;}" ${FILE} | xargs curl -g -# -O; n=$(( $n + 1 ))
  167. done
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement