Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- '''
- Created on Aug 5, 2011
- @author: v0mit
- '''
- import urllib, urllib2, random, sys, urlparse
- from BeautifulSoup import BeautifulSoup
- from BeautifulSoup import BeautifulStoneSoup
- if len(sys.argv) == 3:
- url = sys.argv[1]
- log_file = sys.argv[2]
- elif len(sys.argv) == 2:
- url = sys.argv[1]
- log_file = "vulnerable.txt"
- else:
- print("Usage: pyxsser.py <target> <output>")
- sys.exit(0)
- base_url = urlparse.urlsplit(url).netloc
- header = """
- ______ __ _______ _____
- | ___ \ \ \ / / ___/ ___|
- | |_/ /_ _ \ V /\ `--.\ `--. ___ _ __
- | __/| | | |/ \ `--. \`--. \/ _ \ '__|
- | | | |_| / /^\ |\__/ /\__/ / __/ |
- \_| \__, \/ \|____/\____/ \___|_|
- __/ |
- |___/ v0.2
- v0mit@darkpy.net
- """
- print(header)
- injection_str = ""
- for x in range(0,8):
- injection_str += random.choice("abcdefghi1234567890")
- injection_str = ";!--\"'<%s>=&{()}" % injection_str
- encoded_injection_str = urllib.urlencode({"":injection_str})
- #encoded_injection_str = "=lol"
- class _http_handler():
- def __init__(self):
- self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor())
- urllib2.install_opener(self.opener)
- def request(self, url, data=None):
- req = urllib2.Request(url)
- req.add_header('User-Agent',"PyXSSer.v0.2")
- if data != None:
- data = urllib.urlencode(data)
- try:
- response = self.opener.open(req,data)
- except urllib2.URLError as errno:
- print(errno)
- print("[!]urllib2.URLError({0})\n".format(errno))
- return
- return response.read()
- else:
- try:
- response = self.opener.open(req)
- except urllib2.URLError as errno:
- print("[!]urllib2.URLError({0})\n".format(errno))
- return
- except ValueError as errno:
- print("[!]ValueError({0}\n".format(errno))
- return
- return response.read()
- def get_forms(forms):
- valid_forms = []
- for form in forms:
- action = form.get("action")
- if action != None:
- inputs = form.findAll("input", attrs={"type":"text"})
- names = []
- for input in inputs:
- name = input.get("name")
- if input != None:
- names.append(str(name))
- valid_forms.append([str(action), names])
- return valid_forms
- def parse_target(data):
- soup = BeautifulSoup(data)
- forms = soup.findAll("form", attrs={"method":"get"})
- forms += soup.findAll("form", attrs={"method":"post"})
- links = soup.findAll("a")
- valid_forms = get_forms(forms)
- urls = generate_form_links(valid_forms)
- valid_links = get_links(links)
- urls += generate_links(valid_links)
- return urls
- def generate_links(valid_links):
- urls = []
- for link in valid_links:
- buff_list = []
- for x in valid_links[link]:
- buff_list.append(x)
- for x in range(0, len(buff_list)):
- new_url = link
- if not new_url.endswith("?"):
- new_url += "?"
- buff = ""
- for query in buff_list[:x]:
- buff += "&{0}=gl1".format(query)
- buff += "&{0}{1}".format(buff_list[x], encoded_injection_str)
- for query in buff_list[x:]:
- buff += "&{0}=gl2".format(query)
- new_url += buff[1:]
- urls.append(new_url)
- return urls
- def generate_form_links(forms):
- urls = []
- for form in forms:
- for para in range(0, len(form[1])):
- if form[0] == "#":
- new_url = url
- else:
- new_url = form[0]
- if not new_url.endswith("?"):
- new_url += "?"
- buff = ""
- for x in form[1][:para]:
- buff += "&{0}=1".format(x)
- buff += "&{0}{1}".format(form[1][para], encoded_injection_str)
- for x in form[1][para+1:]:
- buff += "&{0}=1".format(x)
- new_url += buff[1:]
- urls.append(urlparse.urljoin(url,new_url))
- return urls
- def get_links(links):
- urls = {}
- for link in links:
- link = link.get("href")
- if link == None:continue
- a = urlparse.urlparse(link)
- query = a.query
- if query == "":
- continue
- parsed_query = queries(query)
- if a.netloc == "":
- buf = urlparse.urljoin(url, a.path)
- else:
- if a.netloc != base_url:
- continue
- buf = "{0}://{1}{2}".format(a.scheme, a.netloc, a.path)
- if buf in urls:
- for q in parsed_query:
- urls[buf].add(str(q))
- else:
- urls[buf] = set()
- for q in parsed_query:
- urls[buf].add(str(q))
- return urls
- def queries(queries):
- queries_parsed = set()
- queries = queries.split("&")
- for q in queries:
- queries_parsed.add(q.split("=")[0])
- return queries_parsed
- h = _http_handler()
- data = h.request(url)
- if data is None:
- print("[!]Could not connect to target.")
- sys.exit(0)
- urls = parse_target(data)
- vulnerable = []
- for url in urls:
- print("[+]Testing:{0}".format(url))
- h = _http_handler()
- data = h.request(url)
- if data == None:
- continue
- if injection_str in data:
- print("\n[!]Possible XSS found!")
- print("[+]URL:{0}".format(url))
- vulnerable.append(url)
- n = data.find(injection_str)
- print("[+]Injection string found @ {0}".format(n))
- print("[+]{0}\n".format(data[n:n+len(injection_str)]))
- print("\n[+]Scann completed.")
- if not len(vulnerable):
- print("[+]Nothing found.")
- sys.exit(0)
- try:
- out = open(log_file, "wb")
- except IOError as errno:
- print("\n[!]IOError: [0}".format(errno))
- sys.exit(0)
- for url in vulnerable:
- out.write("{0}\n".format(url))
- out.close()
- print("[+]Report saved to: {0}".format(log_file))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement