Advertisement
rizky07

Bing revip

Jun 30th, 2021
111
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 6.80 KB | None | 0 0
  1. import re
  2. import socket
  3. import requests
  4. import urllib2
  5. import urllib
  6. import os
  7. import sys
  8. import time
  9. import cookielib
  10. from bs4 import BeautifulSoup
  11. from platform import system
  12.  
  13. banner =""" Tempik
  14. /$$$$$$$ /$$$$$$ /$$$$$$ /$$$$$$ /$$$$$$$$
  15. | $$__ $$ /$$__ $$ /$$__ $$ /$$__ $$|__ $$__/
  16. | $$ \ $$| $$ \ $$| $$ \__/| $$ \ $$ | $$
  17. | $$$$$$$ | $$$$$$$$| $$ | $$ | $$ | $$
  18. | $$__ $$| $$__ $$| $$ | $$ | $$ | $$
  19. | $$ \ $$| $$ | $$| $$ $$| $$ | $$ | $$
  20. | $$$$$$$/| $$ | $$| $$$$$$/| $$$$$$/ | $$
  21. |_______/ |__/ |__/ \______/ \______/ |__/
  22. \t Coded By Nelo.F4
  23. \t Explosion Squad Cyber
  24. \t https://www.facebook.com/fadiel.s.new
  25. \t https://t.me/Nelssshere
  26. """
  27. print (banner)
  28. headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:57.0) Gecko/20100101 Firefox/57.0"}
  29. print ("""
  30. 1. Bing Dorker Random Domains
  31. 2. Get Weblist From IP
  32. 3. Get IP From Website
  33. 4. Add HTTP On All Weblist
  34. 5. URL List Cleaner
  35. 6. Dork with allsite
  36. """)
  37.  
  38. nels = raw_input("root@Nels:~# ")
  39.  
  40. class males():
  41. def alldomains(self):
  42. iya = raw_input("List@Dork:~# ")
  43. iya = open(iya, 'r')
  44. dom = ['ac','site:ad','site:ae','site:af','site:ag','site:ai','site:al','site:am','site:an','site:ao','site:aq','site:ar','site:as','site:at','site:au','site:aw','site:ax','site:az','site:ba','site:bb','site:bd','site:be','site:bf','site:bg','site:bh','site:bi','site:bj','site:bm','site:bn','site:bo','site:br','site:bs','site:bt','site:bv','site:bw','site:by','site:bz','site:ca','site:cc','site:cd','site:cf','site:cg','site:ch','site:ci','site:ck','site:cl','site:cm','site:cn','site:co','site:cr','site:cu','site:cv','site:cx','site:cy','site:cz','site:de','site:dj','site:dk','site:dm','site:do','site:dz','site:ec','site:ee','site:eg','site:eh','site:er','site:es','site:et','site:eu','site:fi','site:fj','site:fk','site:fm','site:fo','site:fr','site:ga','site:gb','site:gd','site:ge','site:gf','site:gg','site:gh','site:gi','site:gl','site:gm','site:gn','site:gp','site:gq','site:gr','site:gs','site:gt','site:gu','site:gw','site:gy','site:hk','site:hm','site:hn','site:hr','site:ht','site:hu','site:id','site:ie','site:il','site:im','site:in','site:io','site:iq','site:is','site:it','site:je','site:jm','site:jo','site:jp','site:ke','site:kg','site:kh','site:ki','site:km','site:kn','site:kp','site:kr','site:kw','site:ky','site:kz','site:la','site:lb','site:lc','site:li','site:lk','site:lr','site:ls','site:lt','site:lu','site:lv','site:ly','site:ma','site:mc','site:md','site:me','site:mg','site:mh','site:mk','site:ml','site:mm','site:mn','site:mo','site:mp','site:mq','site:mr','site:ms','site:mt','site:mu','site:mv','site:mw','site:mx','site:my','site:mz','site:na','site:nc','site:ne','site:nf','site:ng','site:ni','site:nl','site:no','site:np','site:nr','site:nu','site:nz','site:om','site:pa','site:pe','site:pf','site:pg','site:ph','site:pk','site:pl','site:pm','site:pn','site:pr','site:ps','site:pt','site:pw','site:py','site:qa','site:re','site:ro','site:rs','site:ru','site:rw','site:sa','site:sb','site:sc','site:sd','site:se','site:sg','site:sh','site:si','site:sj','site:sk','site:sl','site:sm','site:sn','site:so','site:sr','site:st','site:su','site:sv','site:sy','site:sz','site:tc','site:td','site:tf','site:tg','site:th','site:tj','site:tk','site:tl','site:tm','site:tn','site:to','site:tp','site:tr','site:tt','site:tv','site:tw','site:tz','site:ua','site:ug','site:uk','site:um','site:us','site:uy','site:uz','site:va','site:vc','site:ve','site:vg','site:vi','site:vn','site:vu','site:wf','site:ws','site:ye','site:yt','site:za','site:zm','site:zw','site:com','site:net','site:org','site:biz','site:gov','site:mil','site:edu','site:info','site:int','site:tel','site:name','site:aero','site:asia','site:cat','site:coop','site:jobs','site:mobi','site:museum','site:pro','site:travel']
  45. for udah in dom:
  46. tam = []
  47. page = 1
  48. while page < 159:
  49. bing = "http://www.bing.com/search?q=+udah+' '+iya+&count=50&first="+str(page)
  50. rek = requests.get(bing,verify=False,headers=headers)
  51. eee = rek.content
  52. nemu = re.findall('<h2><a href="(.*?)"', eee)
  53. for o in nemu:
  54. i = o.split('/')
  55. if (i[0]+'//'+i[2]) in tam:
  56. pass
  57. else:
  58. tam.append(i[0]+'//'+i[2])
  59. print '[>>]',(i[0]+'//'+i[2])
  60. with open('AllDomains.txt', 'a') as s:
  61. s.writelines((i[0]+'//'+i[2])+'\n')
  62. page = page+50
  63.  
  64. def randomdomen(self):
  65. bo = raw_input("List@Dork:~# ")
  66. bo = open(bo, 'r')
  67. for oaja in bo:
  68. sa = []
  69. tu = 1
  70. while tu < 159:
  71. bing0 = "http://www.bing.com/search?q="+oaja+"+&count=50&first="+str(tu)
  72. iyoo = requests.get(bing0,verify=False,headers=headers)
  73. rrr = iyoo.content
  74. sip = re.findall('<h2><a href="(.*?)"', rrr)
  75. for i in sip:
  76. o = i.split('/')
  77. if (o[0]+'//'+o[2]) in sa:
  78. pass
  79. else:
  80. sa.append(o[0]+'//'+o[2])
  81. print '[>>]',(o[0]+'//'+o[2])
  82. with open('Random.txt', 'a') as s:
  83. s.writelines((o[0]+'//'+o[2])+'\n')
  84. tu = tu+50
  85.  
  86.  
  87. def grabip(self):
  88. ooke = raw_input("List@IP:~# ")
  89. ooke = open(ooke, 'r')
  90. for zzz in ooke:
  91. bo = []
  92. lonk = 1
  93. while lonk < 299:
  94. bingung = "http://www.bing.com/search?q=IP%3A"+zzz+"+&count=50&first="+str(lonk)
  95. iyagw = requests.get(bingung,verify=False,headers=headers)
  96. gans = iyagw.content
  97. ya = re.findall('<h2><a href="(.*?)"', gans)
  98. for z in ya:
  99. o = z.split('/')
  100. if (o[0]+'//'+o[2]) in bo:
  101. pass
  102. else:
  103. bo.append(o[0]+'//'+o[2])
  104. print '[>>]',(o[0]+'//'+o[2])
  105. with open('Grab.txt','a') as s:
  106. s.writelines((o[0]+'//'+o[2])+'\n')
  107. lonk = lonk+50
  108.  
  109. def http(self):
  110. kep = raw_input("List@Sites:~# ")
  111. kep = open(kep, 'r')
  112. for i in kep:
  113. i = i.rstrip()
  114. print("http://"+i)
  115. with open('HTTP.txt', 'a') as o:
  116. o.write("http://" + i + '\n')
  117. print("[>>] D0N3! Check HTTP.txt")
  118.  
  119. def clean(self):
  120. print ("URL LIST WITHOUT HTTP://")
  121. oh = raw_input("List@Sites:~#")
  122. oh = open(oh, 'r')
  123. for i in oh:
  124. i = i.rstrip()
  125. with open("Cleaner.txt", 'a') as f:
  126. f.write(i.split('/')[0] + '\n')
  127. print('[>>] D0N3! Check Cleaner.txt')
  128.  
  129. def getip(self):
  130. hooh = raw_input("List@IP:~# ")
  131. hooh = open(hooh, 'r')
  132. for i in hooh.readlines():
  133. done = i.rstrip()
  134. try:
  135. done = done.rstrip()
  136. bine = requests.get('http://api.hackertarget.com/reverseiplookup/?q='+done)
  137. if '.' in bine.content:
  138. print ("[>>]" + (bine.content))
  139. with open('Gotcha.txt', 'a') as o:
  140. o.writelines(bine.content + '\n')
  141. else:
  142. pass
  143.  
  144. except:
  145. pass
  146.  
  147.  
  148. dahah = males()
  149. if nels == '1':
  150. dahah.randomdomen()
  151. elif nels == '2':
  152. dahah.grabip()
  153. elif nels == '4':
  154. dahah.http()
  155. elif nels == '5':
  156. dahah.clean()
  157. elif nels == '3':
  158. dahah.getip()
  159. elif nels == '6':
  160. dahah.alldomains()
  161. else:
  162. print("?")
  163.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement