Version 0.2.2

This commit is contained in:
Rob Glew 2016-01-26 16:23:40 -06:00
parent d8dfcd3e24
commit 9a14a5541a
15 changed files with 519 additions and 114 deletions

155
pappyproxy/plugins/debug.py Normal file
View file

@ -0,0 +1,155 @@
import gc
import shlex
import code
import crochet
import os
import resource
import random
import datetime
from pappyproxy.http import Request, post_request
from pappyproxy.util import PappyException
from pappyproxy.requestcache import RequestCache
from pappyproxy.console import print_requests
from pappyproxy.pappy import heapstats, cons
from twisted.internet import defer
def cache_info(line):
c = Request.cache
print 'Cache has %d/%d slots filled' % (len(c._cached_reqs), c._cache_size)
print 'Hit score: {0:.2f} ({1}/{2})'.format(c.hit_ratio, c.hits, c.hits+c.misses)
print ''
if line != 'q':
rl = [v for k, v in Request.cache._cached_reqs.iteritems()]
rs = sorted(rl, key=lambda r: Request.cache._last_used[r.reqid], reverse=True)
print_requests(rs)
def memory_info(line):
try:
import psutil
except ImportError:
raise PappyException('This command requires the psutil package')
proc = psutil.Process(os.getpid())
mem = proc.memory_info().rss
megabyte = (float(mem)/1024)/1024
print 'Memory usage: {0:.2f} Mb ({1} bytes)'.format(megabyte, mem)
def heap_info(line):
if heapstats is None:
raise PappyException('Command requires the guppy library')
size = heapstats.heap().size
print 'Heap usage: {0:.2f} Mb'.format(size/(1024.0*1024.0))
print heapstats.heap()
def limit_info(line):
rsrc = resource.RLIMIT_AS
soft, hard = resource.getrlimit(rsrc)
print 'Soft limit starts as:', soft
print 'Hard limit starts as:', hard
if line:
limit_mb = int(line)
limit_kb = int(line)*1024
print 'Setting limit to %s Mb' % limit_mb
resource.setrlimit(rsrc, (limit_kb, hard)) #limit to one kilobyte
soft, hard = resource.getrlimit(rsrc)
print 'Soft limit is now:', soft
print 'Hard limit is now:', hard
def graph_randobj(line):
try:
import objgraph
except ImportError:
raise PappyException('This command requires the objgraph library')
args = shlex.split(line)
if len(args) > 1:
fname = args[1]
else:
fname = 'chain.png'
print 'Getting random %s object...' % args[0]
obj = random.choice(objgraph.by_type(args[0]))
print 'Creating chain...'
chain = objgraph.find_backref_chain(obj, objgraph.is_proper_module)
print 'Saving chain...'
objgraph.show_chain(chain, filename=fname)
def heapdo(line):
if heapstats is None:
raise PappyException('Command requires the guppy library')
h = heapstats.heap()
code.interact(local=locals())
def collect(line):
gc.collect()
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def loadblock(line):
args = shlex.split(line)
yield Request.cache.load(args[0], int(args[1]))
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def big_fucking_data_file(line):
print "Generating some giant fucking requests"
for i in range(1000):
if i % 20 == 0:
print 'Generated %d' % i
r = post_request('https://www.google.com')
r.body = 'A'*(1024*1024)
yield r.async_deep_save()
def time_cmd(line):
print 'Timing `%s`...' % line
start = datetime.datetime.now()
cons.onecmd(line.strip())
end = datetime.datetime.now()
total_time = (end-start).total_seconds()
print '`{0}` took {1:.3f} seconds'.format(line, total_time)
def cache_data(line):
args = shlex.split(line)
reqid = args[0]
cached = reqid in Request.cache._cached_reqs
if reqid in Request.cache._last_used:
last_used = Request.cache._last_used[reqid]
else:
last_used = 'NOT IN _last_used'
in_all = reqid in Request.cache.all_ids
in_unmangled = reqid in Request.cache.unmangled_ids
try:
ordered_ids_pos = Request.cache.ordered_ids.index(reqid)
except ValueError:
ordered_ids_pos = 'Not in ordered_ids'
in_inmem = reqid in Request.cache.inmem_reqs
print ''
print 'Cache data about request %s ----------' % reqid
print 'Cahced: %s' % cached
print 'Last used: %s' % last_used
print 'In all_ids: %s' % in_all
print 'In unmangled: %s' % in_unmangled
print 'Ordered id pos: %s' % ordered_ids_pos
print 'Is inmem: %s' % in_inmem
print ''
def check_cache(line):
Request.cache.assert_ids()
def load_cmds(cmd):
cmd.set_cmds({
'cacheinfo': (cache_info, None),
'heapinfo': (heap_info, None),
'memlimit': (limit_info, None),
'heapdo': (heapdo, None),
'gccollect': (collect, None),
'graphobj': (graph_randobj, None),
'meminfo': (memory_info, None),
'bigdata': (big_fucking_data_file, None),
'checkcache': (check_cache, None),
'loadblock': (loadblock, None),
'time': (time_cmd, None),
'cachedata': (cache_data, None),
})
cmd.add_aliases([
])

View file

@ -8,6 +8,7 @@ from pappyproxy.util import PappyException
from pappyproxy.http import Request
from twisted.internet import defer
from pappyproxy.plugin import main_context_ids
from pappyproxy.colors import Colors, Styles, verb_color, scode_color, path_formatter, host_color
###################
## Helper functions
@ -21,8 +22,7 @@ def view_full_message(request, headers_only=False):
def print_request_extended(request):
# Prints extended info for the request
title = "Request Info (reqid=%s)" % request.reqid
print title
print '-'*len(title)
print Styles.TABLE_HEADER + title + Colors.ENDC
reqlen = len(request.body)
reqlen = '%d bytes' % reqlen
rsplen = 'No response'
@ -34,6 +34,7 @@ def print_request_extended(request):
if request.response:
response_code = str(request.response.response_code) + \
' ' + request.response.response_text
response_code = scode_color(response_code) + response_code + Colors.ENDC
rsplen = len(request.response.body)
rsplen = '%d bytes' % rsplen
@ -59,24 +60,31 @@ def print_request_extended(request):
time_made_str = request.time_start.strftime('%a, %b %d, %Y, %I:%M:%S %p')
else:
time_made_str = '--'
verb = verb_color(request.verb) + request.verb + Colors.ENDC
host = host_color(request.host) + request.host + Colors.ENDC
print 'Made on %s' % time_made_str
print 'ID: %s' % request.reqid
print 'Verb: %s' % request.verb
print 'Host: %s' % request.host
print 'Path: %s' % request.full_path
print 'Status Code: %s' % response_code
print 'Request Length: %s' % reqlen
print 'Response Length: %s' % rsplen
print_pairs = []
print_pairs.append(('Made on', time_made_str))
print_pairs.append(('ID', request.reqid))
print_pairs.append(('Verb', verb))
print_pairs.append(('Host', host))
print_pairs.append(('Path', path_formatter(request.full_path)))
print_pairs.append(('Status Code', response_code))
print_pairs.append(('Request Length', reqlen))
print_pairs.append(('Response Length', rsplen))
if request.response and request.response.unmangled:
print 'Unmangled Response Length: %s bytes' % len(request.response.unmangled.full_response)
print 'Time: %s' % time_str
print 'Port: %s' % request.port
print 'SSL: %s' % is_ssl
print 'Mangled: %s' % mangle_str
print 'Tags: %s' % (', '.join(request.tags))
print_pairs.append(('Unmangled Response Length', len(request.response.unmangled.full_response)))
print_pairs.append(('Time', time_str))
print_pairs.append(('Port', request.port))
print_pairs.append(('SSL', is_ssl))
print_pairs.append(('Mangled', mangle_str))
print_pairs.append(('Tags', ', '.join(request.tags)))
if request.plugin_data:
print 'Plugin Data: %s' % (request.plugin_data)
print_pairs.append(('Plugin Data', request.plugin_data))
for k, v in print_pairs:
print Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v)
def print_tree(tree):
# Prints a tree. Takes in a sorted list of path tuples