Advertisement
Guest User

Untitled

a guest
Mar 14th, 2013
11,007
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.34 KB | None | 0 0
  1. 130.14.29.111:80
  2. 130.14.29.120:80
  3. 130.159.235.31:80
  4. 14.198.198.220:8909
  5. 141.105.26.183:8000
  6. 160.79.35.27:80
  7. 164.77.196.75:80
  8. 164.77.196.78:45430
  9. 164.77.196.78:80
  10. 173.10.134.173:8081
  11. 174.132.145.80:80
  12. 174.137.152.60:8080
  13. 174.137.184.37:8080
  14. 174.142.125.161:80
  15.  
  16. total number of '0' = 8
  17. total number of 'x' = 6
  18. percentage = alive 60% , dead 40%
  19.  
  20.  
  21. x 130.14.29.111:80
  22. 0 130.14.29.120:80
  23. 0 130.159.235.31:80
  24. 0 14.198.198.220:8909
  25. 0 141.105.26.183:8000
  26. 0 160.79.35.27:80
  27. x 164.77.196.75:80
  28. x 164.77.196.78:45430
  29. x 164.77.196.78:80
  30. 0 173.10.134.173:8081
  31. 0 174.132.145.80:80
  32. 0 174.137.152.60:8080
  33. x 174.137.184.37:8080
  34. x 174.142.125.161:80
  35.  
  36. import socket
  37. import urllib2
  38. import threading
  39. import sys
  40. import Queue
  41. import socket
  42.  
  43. socket.setdefaulttimeout(7)
  44.  
  45. print "Bobng's proxy checker. Using %s second timeout"%(socket.getdefaulttimeout())
  46.  
  47. #input_file = sys.argv[1]
  48. #proxy_type = sys.argv[2] #options: http,s4,s5
  49. #output_file = sys.argv[3]
  50. input_file = 'proxylist.txt'
  51. proxy_type = 'http'
  52. output_file = 'proxy_alive.txt'
  53.  
  54. url = "www.seemyip.com" # Don't put http:// in here, or any /'s
  55.  
  56. check_queue = Queue.Queue()
  57. output_queue = Queue.Queue()
  58. threads = 20
  59.  
  60. def writer(f,rq):
  61. while True:
  62. line = rq.get()
  63. f.write(line+'n')
  64.  
  65. def checker(q,oq):
  66. while True:
  67. proxy_info = q.get() #ip:port
  68. if proxy_info == None:
  69. print "Finished"
  70. #quit()
  71. return
  72. #print "Checking %s"%proxy_info
  73. if proxy_type == 'http':
  74. try:
  75.  
  76. listhandle = open("proxylist.txt").read().split('n')
  77.  
  78. for line in listhandle:
  79. saveAlive = open("proxy_alive.txt", 'a')
  80.  
  81. details = line.split(':')
  82. email = details[0]
  83. password = details[1].replace('n', '')
  84.  
  85.  
  86. proxy_handler = urllib2.ProxyHandler({'http':proxy_info})
  87. opener = urllib2.build_opener(proxy_handler)
  88. opener.addheaders = [('User-agent','Mozilla/5.0')]
  89. urllib2.install_opener(opener)
  90. req = urllib2.Request("http://www.google.com")
  91. sock=urllib2.urlopen(req, timeout= 7)
  92. rs = sock.read(1000)
  93. if '<title>Google</title>' in rs:
  94. oq.put(proxy_info)
  95. print '[+] alive proxy' , proxy_info
  96. saveAlive.write(line)
  97. saveAlive.close()
  98. except urllib2.HTTPError,e:
  99. print 'url open error? slow?'
  100. pass
  101. except Exception,detail:
  102. print '[-] bad proxy' ,proxy_info
  103.  
  104. else:
  105. # gotta be socks
  106. try:
  107. s = socks.socksocket()
  108. if proxy_type == "s4":
  109. t = socks.PROXY_TYPE_SOCKS4
  110. else:
  111. t = socks.PROXY_TYPE_SOCKS5
  112. ip,port = proxy_info.split(':')
  113. s.setproxy(t,ip,int(port))
  114. s.connect((url,80))
  115. oq.put(proxy_info)
  116. print proxy_info
  117. except Exception,error:
  118. print proxy_info
  119.  
  120. threading.Thread(target=writer,args=(open(output_file,"wb"),output_queue)).start()
  121. for i in xrange(threads):
  122. threading.Thread(target=checker,args=(check_queue,output_queue)).start()
  123. for line in open(input_file).readlines():
  124. check_queue.put(line.strip('n'))
  125. print "File reading done"
  126. for i in xrange(threads):
  127. check_queue.put(None)
  128. raw_input("PRESS ENTER TO QUIT")
  129. sys.exit(0)
  130.  
  131. [ ( ip-address-1,'x' ), ( ip-address-2, '0'), ...... ]
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement