parent
d8dfcd3e24
commit
9a14a5541a
15 changed files with 519 additions and 114 deletions
@ -0,0 +1,47 @@ |
||||
Contributing |
||||
************ |
||||
|
||||
.. contents:: Table of Contents |
||||
:local: |
||||
|
||||
Contributing |
||||
============ |
||||
|
||||
Want to help out? Awesome! This page will give you some ideas on features you can implement. Make sure to read the docs on `writing plugins <pappyplugins>`_ before starting since most of these features are implemented as plugins |
||||
|
||||
Feature Wishlist |
||||
================ |
||||
|
||||
This is a wish-list of features that I haven't gotten around to implementing, but could probably be done without too much trouble. I would prefer that you try and implement something via plugin because the core code tends to get changed up pretty regularly. If you build something via plugin, worst case scenario a few API calls break. If you try and implement something in the core, worst case scenario everything changes and your efforts are lost because the function you were modifying doesn't exist any more. |
||||
|
||||
If you need to save data to the disk, just save a JSON object to disk for your plugin. When you submit the pull request, I will make changes to plugin/schema to store the data in the datafile. |
||||
|
||||
Anyways, here's some ideas for things you could implement: |
||||
|
||||
* Creds management |
||||
When doing a test, the client may give you a number of usernames/passwords. It would be great if you can implement a system to easily copy/paste usernames and passwords from the console so you don't have to keep opening up creds.txt and copying from there. My suggestion is to add a command to coppy a username or a password and let people tab complete the username. |
||||
* Session management |
||||
Add a system to manage sessions and easily swap between them. I already started on a sessions class in pappyproxy/sessions.py which might help you get started. |
||||
* Scan history for easy findings |
||||
Some findings are as simple as checking whether a header exists or not. Implement a pluging to go through history and list off some of the easier to find findings. For example you could search for things like |
||||
|
||||
* Secure/httponly flag not set on cookies (mainly session cookies) |
||||
* Lack of HSTS headers |
||||
* Pasword fields with auto-complete |
||||
|
||||
* Perform an SSL config check on a host (ie similar functionality to an `ssllabs scan <https://www.ssllabs.com/>`_ without having to go through a website) |
||||
Find a library to perform some kind of check for weak ciphers, etc on a host and print out any issues that are found. |
||||
* Add a SQLMap button |
||||
Make it easy to pass a request to SQLMap to check for SQLi. Make sure you can configure which fields you do/don't want tested and by default just give either "yes it looks like SQLi" or "no it doesn't look like SQLi" |
||||
* Decoder functionality |
||||
Add some commands for encoding/decoding text. If you go after this, let me know because I'm probably going to be pretty picky about how this is implemented. You'll have to do better than just a ``base64_decode <text>`` command. |
||||
* Additional macro templates |
||||
Write some commands for generating additional types of macros. For example let people generate an intercepting macro that does search/replace or modifies a header. Save as much typing as possible for common actions. |
||||
* Show requests/responses real-time as they go through the proxy |
||||
Let people watch requests as they pass through the proxy. It's fine to implement this as an intercepting macro since people watching the requests aren't going to notice response streaming being disabled. |
||||
* Vim plugin to make editing HTTP messages easier |
||||
Implement some functionality to make editing HTTP messages easier. It would be great to have a plugin to automatically add to vim when using the interceptor/repeater to make editing requests easier. Look at burp's request editor and try to implement anything you miss from it. |
||||
* Request Diff |
||||
Add some way to compare requests. Preferably both a "diff" mode and a "just look at 2 at once" mode. Probably want to implement it as a vim plugin for consistency. |
||||
|
||||
Feel free to contact me with ideas if you want to add something to this list. |
@ -0,0 +1,113 @@ |
||||
import re |
||||
import itertools |
||||
|
||||
def clen(s): |
||||
ansi_escape = re.compile(r'\x1b[^m]*m') |
||||
return len(ansi_escape.sub('', s)) |
||||
|
||||
class Colors: |
||||
HEADER = '\033[95m' |
||||
OKBLUE = '\033[94m' |
||||
OKGREEN = '\033[92m' |
||||
WARNING = '\033[93m' |
||||
FAIL = '\033[91m' |
||||
# Effects |
||||
ENDC = '\033[0m' |
||||
BOLD = '\033[1m' |
||||
UNDERLINE = '\033[4m' |
||||
|
||||
# Colors |
||||
BLACK = '\033[30m' |
||||
RED = '\033[31m' |
||||
GREEN = '\033[32m' |
||||
YELLOW = '\033[33m' |
||||
BLUE = '\033[34m' |
||||
MAGENTA = '\033[35m' |
||||
CYAN = '\033[36m' |
||||
WHITE = '\033[37m' |
||||
|
||||
# BG Colors |
||||
BGBLACK = '\033[40m' |
||||
BGRED = '\033[41m' |
||||
BGGREEN = '\033[42m' |
||||
BGYELLOW = '\033[43m' |
||||
BGBLUE = '\033[44m' |
||||
BGMAGENTA = '\033[45m' |
||||
BGCYAN = '\033[46m' |
||||
BGWHITE = '\033[47m' |
||||
|
||||
# Light Colors |
||||
LBLACK = '\033[90m' |
||||
LRED = '\033[91m' |
||||
LGREEN = '\033[92m' |
||||
LYELLOW = '\033[93m' |
||||
LBLUE = '\033[94m' |
||||
LMAGENTA = '\033[95m' |
||||
LCYAN = '\033[96m' |
||||
LWHITE = '\033[97m' |
||||
|
||||
class Styles: |
||||
|
||||
################ |
||||
# Request tables |
||||
TABLE_HEADER = Colors.BOLD+Colors.UNDERLINE |
||||
VERB_GET = Colors.CYAN |
||||
VERB_POST = Colors.YELLOW |
||||
VERB_OTHER = Colors.BLUE |
||||
STATUS_200 = Colors.CYAN |
||||
STATUS_300 = Colors.MAGENTA |
||||
STATUS_400 = Colors.YELLOW |
||||
STATUS_500 = Colors.RED |
||||
PATH_COLORS = [Colors.CYAN, Colors.BLUE] |
||||
|
||||
KV_KEY = Colors.GREEN |
||||
KV_VAL = Colors.ENDC |
||||
|
||||
|
||||
def verb_color(verb): |
||||
if verb and verb == 'GET': |
||||
return Styles.VERB_GET |
||||
elif verb and verb == 'POST': |
||||
return Styles.VERB_POST |
||||
else: |
||||
return Styles.VERB_OTHER |
||||
|
||||
def scode_color(scode): |
||||
if scode and scode[0] == '2': |
||||
return Styles.STATUS_200 |
||||
elif scode and scode[0] == '3': |
||||
return Styles.STATUS_300 |
||||
elif scode and scode[0] == '4': |
||||
return Styles.STATUS_400 |
||||
elif scode and scode[0] == '5': |
||||
return Styles.STATUS_500 |
||||
else: |
||||
return Colors.ENDC |
||||
|
||||
def path_formatter(path, width=-1): |
||||
if len(path) > width and width != -1: |
||||
path = path[:width] |
||||
path = path[:-3]+'...' |
||||
parts = path.split('/') |
||||
colparts = [] |
||||
for p, c in zip(parts, itertools.cycle(Styles.PATH_COLORS)): |
||||
colparts.append(c+p+Colors.ENDC) |
||||
return '/'.join(colparts) |
||||
|
||||
def host_color(host): |
||||
# Give each unique host a different color (ish) |
||||
if not host: |
||||
return Colors.RED |
||||
hostcols = [Colors.RED, |
||||
Colors.GREEN, |
||||
Colors.YELLOW, |
||||
Colors.BLUE, |
||||
Colors.MAGENTA, |
||||
Colors.CYAN, |
||||
Colors.LRED, |
||||
Colors.LGREEN, |
||||
Colors.LYELLOW, |
||||
Colors.LBLUE, |
||||
Colors.LMAGENTA, |
||||
Colors.LCYAN] |
||||
return hostcols[hash(host)%(len(hostcols)-1)] |
@ -0,0 +1,155 @@ |
||||
import gc |
||||
import shlex |
||||
import code |
||||
import crochet |
||||
import os |
||||
import resource |
||||
import random |
||||
import datetime |
||||
from pappyproxy.http import Request, post_request |
||||
from pappyproxy.util import PappyException |
||||
from pappyproxy.requestcache import RequestCache |
||||
from pappyproxy.console import print_requests |
||||
from pappyproxy.pappy import heapstats, cons |
||||
from twisted.internet import defer |
||||
|
||||
def cache_info(line): |
||||
c = Request.cache |
||||
print 'Cache has %d/%d slots filled' % (len(c._cached_reqs), c._cache_size) |
||||
print 'Hit score: {0:.2f} ({1}/{2})'.format(c.hit_ratio, c.hits, c.hits+c.misses) |
||||
print '' |
||||
if line != 'q': |
||||
rl = [v for k, v in Request.cache._cached_reqs.iteritems()] |
||||
rs = sorted(rl, key=lambda r: Request.cache._last_used[r.reqid], reverse=True) |
||||
print_requests(rs) |
||||
|
||||
def memory_info(line): |
||||
try: |
||||
import psutil |
||||
except ImportError: |
||||
raise PappyException('This command requires the psutil package') |
||||
proc = psutil.Process(os.getpid()) |
||||
mem = proc.memory_info().rss |
||||
megabyte = (float(mem)/1024)/1024 |
||||
print 'Memory usage: {0:.2f} Mb ({1} bytes)'.format(megabyte, mem) |
||||
|
||||
def heap_info(line): |
||||
if heapstats is None: |
||||
raise PappyException('Command requires the guppy library') |
||||
size = heapstats.heap().size |
||||
print 'Heap usage: {0:.2f} Mb'.format(size/(1024.0*1024.0)) |
||||
print heapstats.heap() |
||||
|
||||
def limit_info(line): |
||||
rsrc = resource.RLIMIT_AS |
||||
soft, hard = resource.getrlimit(rsrc) |
||||
print 'Soft limit starts as:', soft |
||||
print 'Hard limit starts as:', hard |
||||
if line: |
||||
limit_mb = int(line) |
||||
limit_kb = int(line)*1024 |
||||
print 'Setting limit to %s Mb' % limit_mb |
||||
resource.setrlimit(rsrc, (limit_kb, hard)) #limit to one kilobyte |
||||
soft, hard = resource.getrlimit(rsrc) |
||||
print 'Soft limit is now:', soft |
||||
print 'Hard limit is now:', hard |
||||
|
||||
def graph_randobj(line): |
||||
try: |
||||
import objgraph |
||||
except ImportError: |
||||
raise PappyException('This command requires the objgraph library') |
||||
args = shlex.split(line) |
||||
if len(args) > 1: |
||||
fname = args[1] |
||||
else: |
||||
fname = 'chain.png' |
||||
print 'Getting random %s object...' % args[0] |
||||
obj = random.choice(objgraph.by_type(args[0])) |
||||
print 'Creating chain...' |
||||
chain = objgraph.find_backref_chain(obj, objgraph.is_proper_module) |
||||
print 'Saving chain...' |
||||
objgraph.show_chain(chain, filename=fname) |
||||
|
||||
|
||||
def heapdo(line): |
||||
if heapstats is None: |
||||
raise PappyException('Command requires the guppy library') |
||||
h = heapstats.heap() |
||||
code.interact(local=locals()) |
||||
|
||||
def collect(line): |
||||
gc.collect() |
||||
|
||||
@crochet.wait_for(timeout=None) |
||||
@defer.inlineCallbacks |
||||
def loadblock(line): |
||||
args = shlex.split(line) |
||||
yield Request.cache.load(args[0], int(args[1])) |
||||
|
||||
@crochet.wait_for(timeout=None) |
||||
@defer.inlineCallbacks |
||||
def big_fucking_data_file(line): |
||||
print "Generating some giant fucking requests" |
||||
for i in range(1000): |
||||
if i % 20 == 0: |
||||
print 'Generated %d' % i |
||||
r = post_request('https://www.google.com') |
||||
r.body = 'A'*(1024*1024) |
||||
yield r.async_deep_save() |
||||
|
||||
def time_cmd(line): |
||||
print 'Timing `%s`...' % line |
||||
start = datetime.datetime.now() |
||||
cons.onecmd(line.strip()) |
||||
end = datetime.datetime.now() |
||||
total_time = (end-start).total_seconds() |
||||
print '`{0}` took {1:.3f} seconds'.format(line, total_time) |
||||
|
||||
def cache_data(line): |
||||
args = shlex.split(line) |
||||
reqid = args[0] |
||||
cached = reqid in Request.cache._cached_reqs |
||||
if reqid in Request.cache._last_used: |
||||
last_used = Request.cache._last_used[reqid] |
||||
else: |
||||
last_used = 'NOT IN _last_used' |
||||
in_all = reqid in Request.cache.all_ids |
||||
in_unmangled = reqid in Request.cache.unmangled_ids |
||||
try: |
||||
ordered_ids_pos = Request.cache.ordered_ids.index(reqid) |
||||
except ValueError: |
||||
ordered_ids_pos = 'Not in ordered_ids' |
||||
in_inmem = reqid in Request.cache.inmem_reqs |
||||
|
||||
print '' |
||||
print 'Cache data about request %s ----------' % reqid |
||||
print 'Cahced: %s' % cached |
||||
print 'Last used: %s' % last_used |
||||
print 'In all_ids: %s' % in_all |
||||
print 'In unmangled: %s' % in_unmangled |
||||
print 'Ordered id pos: %s' % ordered_ids_pos |
||||
print 'Is inmem: %s' % in_inmem |
||||
print '' |
||||
|
||||
|
||||
def check_cache(line): |
||||
Request.cache.assert_ids() |
||||
|
||||
def load_cmds(cmd): |
||||
cmd.set_cmds({ |
||||
'cacheinfo': (cache_info, None), |
||||
'heapinfo': (heap_info, None), |
||||
'memlimit': (limit_info, None), |
||||
'heapdo': (heapdo, None), |
||||
'gccollect': (collect, None), |
||||
'graphobj': (graph_randobj, None), |
||||
'meminfo': (memory_info, None), |
||||
'bigdata': (big_fucking_data_file, None), |
||||
'checkcache': (check_cache, None), |
||||
'loadblock': (loadblock, None), |
||||
'time': (time_cmd, None), |
||||
'cachedata': (cache_data, None), |
||||
}) |
||||
cmd.add_aliases([ |
||||
]) |
@ -0,0 +1,23 @@ |
||||
from twisted.internet import defer |
||||
|
||||
""" |
||||
Schema v7 |
||||
|
||||
Creates an index for requests on start time in the data file. This will make |
||||
iterating through history a bit faster. |
||||
""" |
||||
|
||||
update_queries = [ |
||||
""" |
||||
CREATE INDEX ind_start_time ON requests(start_datetime); |
||||
""", |
||||
|
||||
""" |
||||
UPDATE schema_meta SET version=7; |
||||
""" |
||||
] |
||||
|
||||
@defer.inlineCallbacks |
||||
def update(dbpool): |
||||
for query in update_queries: |
||||
yield dbpool.runQuery(query) |
Loading…
Reference in new issue