Code:
import urllib, random, sys, time
from threading import Thread
# Konfiguration
# benutzerdefinierte Host/URL Liste:
# urls = ["http://google.com", "http://gmx.de", "weitere eintraege"]
# oder urls = ["http://nur_eine.url"]
urls = None
# benutzerdefinierte Logins:
# logins = ["horst", "detlef", "n0N4m3"]
# oder logins = ["nur_ein_eintrag"]
logins = None
# benutzerdefinierte computernamen
# pc_names = ["horst", "detlef", "msie6]
# oder pc_names = ["nur_ein_name"]
pc_names = None
# wieviele Threads (also gleichzeitige Verbindungen)
# sollen aufgebaut werden:
threads = 10
# wie oft soll der Fortschritt ausgegeben werden (in Sekunden):
update_counter = 10
# interne Variablen
a_z = [chr(x) for x in range(ord('a'),ord('z')+1)]
A_Z = [x.upper() for x in a_z]
chars = list("0123456789._-!^$%&+#?") + A_Z + a_z
counter = 0
shutdown = False
def rand_string(length):
result = []
for i in range(length):
result.append(random.choice(chars))
return urllib.quote("".join(result))
def generate_args():
#beispiel:
#http://fami33.co.cc/index.php?action=add&a=15&u=%64&l=%65&p=%6D&c=%4D
#maximal 18 "Programme", die ueber IDs unterschieden werden
a = random.randrange(0, 18 + 1)
#url
if urls is None:
u = rand_string(random.randrange(3, 255))
else:
u = random.choice(urls)
#login
if logins is None:
l = rand_string(random.randrange(3, 255))
else:
l = random.choice(logins)
#pass
p = rand_string(random.randrange(4, 30))
#computername
if pc_names is None:
c = rand_string(random.randrange(3, 100))
else:
c = random.choice(pc_names)
return 'index.php?' + urllib.urlencode((('action','add'),('a', a), ('u', u),
('l', l), ('p', p), ('c', c)))
def request(url, proxy):
try:
page = urllib.urlopen(url, proxies = proxy)
code, msg_len = page.code, len(page.read())
page.close()
if code != 200:
print("HTTP error:" + str(code))
if len(proxy) > 0:
proxyname = proxy['http']
# suche und loesche Proxyeintrag
if proxyname in proxies:
proxies.remove(proxyname)
print "Remove %s proxy from list" % proxy['http']
# nicht geklappt - proxy wurde wahrscheinlich vom Script um
# http:// ergaenzt - suche nach Eintrag ohne 'http://'
elif proxyname[7:] in proxies:
proxies.remove(proxy['http'][7:])
print "Remove %s proxy from list" % proxy['http']
# fehlermeldungen der Proxys nicht beachten
elif msg_len > 0 and len(proxy) == 0 :
raise Exception("This is not an iStealer webpanel!")
return True
except IOError, err:
return False
def fill_thread():
global counter
while not shutdown:
time.sleep(sleep_time)
if proxies is not None:
proxy = random.choice(proxies)
if not proxy.startswith('http://'):
proxy = 'http://' + proxy
proxy = {"http":proxy}
else:
proxy = {}
if request("%s/%s" % (hostname, generate_args()), proxy):
counter += 1
try:
hostname = sys.argv[1]
sleep_time = int(sys.argv[2])
proxies = None
if len(sys.argv) >= 4:
#format of proxyfile:
#http://www.someproxy.com:port"
with open(sys.argv[3]) as proxyfile:
proxies = [line for line in proxyfile.read().splitlines() if len(line) > 2]
for i in range(threads):
Thread(target = fill_thread).start()
while True:
time.sleep(update_counter)
print "Junk entries inserted:", counter
except KeyboardInterrupt:
print "Junk entries inserted: ", counter
except Exception, ex:
print "Error:", ex
print "usage: noisteal.py http://hostname.xy " + \
"sleep_time_sec [proxy_list_file]"
finally:
shutdown = True