Advertisement
Guest User

Untitled

a guest
Jun 1st, 2012
626
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.44 KB | None | 0 0
  1. import socket
  2. import urllib2
  3. import threading
  4. import sys
  5. import Queue
  6. import socket
  7.  
  8. socket.setdefaulttimeout(7)
  9.  
  10. print "Bobng's proxy checker. Using %s second timeout"%(socket.getdefaulttimeout())
  11.  
  12. #input_file = sys.argv[1]
  13. #proxy_type = sys.argv[2] #options: http,s4,s5
  14. #output_file = sys.argv[3]
  15. input_file = 'proxylist.txt'
  16. proxy_type = 'http'
  17. output_file = 'proxy_alive.txt'
  18.  
  19. url = "www.seemyip.com" # Don't put http:// in here, or any /'s
  20.  
  21. check_queue = Queue.Queue()
  22. output_queue = Queue.Queue()
  23. threads = 20
  24.  
  25. def writer(f,rq):
  26.     while True:
  27.     line = rq.get()
  28.     f.write(line+'\n')
  29.  
  30. def checker(q,oq):
  31.     while True:
  32.     proxy_info = q.get() #ip:port
  33.     if proxy_info == None:
  34.         print "Finished"
  35.         #quit()
  36.         return
  37.     #print "Checking %s"%proxy_info
  38.     if proxy_type == 'http':
  39.         try:
  40.  
  41.         listhandle = open("proxylist.txt").read().split('\n')
  42.  
  43.         for line in listhandle:  
  44.             saveAlive = open("proxy_alive.txt", 'a')
  45.  
  46.             details = line.split(':')
  47.             email = details[0]
  48.             password = details[1].replace('\n', '')
  49.            
  50.            
  51.             proxy_handler = urllib2.ProxyHandler({'http':proxy_info})
  52.             opener = urllib2.build_opener(proxy_handler)
  53.             opener.addheaders = [('User-agent','Mozilla/5.0')]
  54.             urllib2.install_opener(opener)
  55.             req = urllib2.Request("http://www.google.com")
  56.             sock=urllib2.urlopen(req, timeout= 7)
  57.             rs = sock.read(1000)
  58.             if '<title>Google</title>' in rs:
  59.             oq.put(proxy_info)
  60.             print '[+] alive proxy' , proxy_info
  61.             saveAlive.write(line)
  62.         saveAlive.close()    
  63.         except urllib2.HTTPError,e:
  64.         print 'url open error? slow?'
  65.         pass
  66.         except Exception,detail:
  67.         print '[-] bad proxy' ,proxy_info
  68.        
  69.     else:
  70.         # gotta be socks
  71.         try:
  72.         s = socks.socksocket()
  73.         if proxy_type == "s4":
  74.             t = socks.PROXY_TYPE_SOCKS4
  75.         else:
  76.             t = socks.PROXY_TYPE_SOCKS5
  77.         ip,port = proxy_info.split(':')
  78.         s.setproxy(t,ip,int(port))
  79.         s.connect((url,80))
  80.         oq.put(proxy_info)
  81.         print proxy_info
  82.         except Exception,error:
  83.         print proxy_info
  84.        
  85. threading.Thread(target=writer,args=(open(output_file,"wb"),output_queue)).start()
  86. for i in xrange(threads):
  87.     threading.Thread(target=checker,args=(check_queue,output_queue)).start()
  88. for line in open(input_file).readlines():
  89.     check_queue.put(line.strip('\n'))
  90. print "File reading done"
  91. for i in xrange(threads):
  92.     check_queue.put(None)
  93. raw_input("PRESS ENTER TO QUIT")
  94. sys.exit(0)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement