Version 0.2.4

This commit is contained in:
Rob Glew 2016-01-28 14:53:34 -06:00
parent d805eabeec
commit 0b6a63ddbb
12 changed files with 281 additions and 26 deletions

View file

@ -145,7 +145,7 @@ def load_cmds(cmd):
'gccollect': (collect, None),
'graphobj': (graph_randobj, None),
'meminfo': (memory_info, None),
'bigdata': (big_fucking_data_file, None),
'genbigdata': (big_fucking_data_file, None),
'checkcache': (check_cache, None),
'loadblock': (loadblock, None),
'time': (time_cmd, None),

View file

@ -1,3 +1,4 @@
import HTMLParser
import StringIO
import base64
import clipboard
@ -42,7 +43,13 @@ def gzip_decode_helper(s):
dec_data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(s))
dec_data = dec_data.read()
return dec_data
def html_encode_helper(s):
return ''.join(['&#x{0:x};'.format(ord(c)) for c in s])
def html_decode_helper(s):
return HTMLParser.HTMLParser().unescape(s)
def _code_helper(line, func, copy=True):
args = shlex.split(line)
if not args:
@ -110,6 +117,22 @@ def asciihex_encode(line):
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, asciihex_encode_helper))
def html_decode(line):
"""
Decode an html encoded string.
If no string is given, will decode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, html_decode_helper))
def html_encode(line):
"""
Encode a string and escape html control characters.
If no string is given, will encode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, html_encode_helper))
def gzip_decode(line):
"""
@ -175,6 +198,22 @@ def asciihex_encode_raw(line):
"""
print _code_helper(line, asciihex_encode_helper, copy=False)
def html_decode_raw(line):
"""
Same as html_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, html_decode_helper, copy=False)
def html_encode_raw(line):
"""
Same as html_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, html_encode_helper, copy=False)
def gzip_decode_raw(line):
"""
Same as gzip_decode but the output will never be printed as a hex dump and
@ -199,6 +238,8 @@ def load_cmds(cmd):
'asciihex_encode': (asciihex_encode, None),
'url_decode': (url_decode, None),
'url_encode': (url_encode, None),
'html_decode': (html_decode, None),
'html_encode': (html_encode, None),
'gzip_decode': (gzip_decode, None),
'gzip_encode': (gzip_encode, None),
'base64_decode_raw': (base64_decode_raw, None),
@ -207,6 +248,8 @@ def load_cmds(cmd):
'asciihex_encode_raw': (asciihex_encode_raw, None),
'url_decode_raw': (url_decode_raw, None),
'url_encode_raw': (url_encode_raw, None),
'html_decode_raw': (html_decode_raw, None),
'html_encode_raw': (html_encode_raw, None),
'gzip_decode_raw': (gzip_decode_raw, None),
'gzip_encode_raw': (gzip_encode_raw, None),
})
@ -217,6 +260,8 @@ def load_cmds(cmd):
('asciihex_encode', 'ahe'),
('url_decode', 'urld'),
('url_encode', 'urle'),
('html_decode', 'htmld'),
('html_encode', 'htmle'),
('gzip_decode', 'gzd'),
('gzip_encode', 'gze'),
('base64_decode_raw', 'b64dr'),
@ -225,6 +270,8 @@ def load_cmds(cmd):
('asciihex_encode_raw', 'aher'),
('url_decode_raw', 'urldr'),
('url_encode_raw', 'urler'),
('html_decode_raw', 'htmldr'),
('html_encode_raw', 'htmler'),
('gzip_decode_raw', 'gzdr'),
('gzip_encode_raw', 'gzer'),
])

View file

@ -2,10 +2,10 @@ import crochet
import pappyproxy
import shlex
from pappyproxy.console import confirm, load_reqlist
from pappyproxy.util import PappyException
from pappyproxy.http import Request
from pappyproxy.console import confirm, load_reqlist, Capturing
from pappyproxy.util import PappyException, remove_color
from pappyproxy.requestcache import RequestCache
from pappyproxy.pappy import cons
from twisted.internet import defer
from twisted.enterprise import adbapi
@ -114,7 +114,12 @@ def merge_datafile(line):
print 'Added %d requests' % count
finally:
other_dbpool.close()
def run_without_color(line):
with Capturing() as output:
cons.onecmd(line.strip())
print remove_color(output.val)
def load_cmds(cmd):
cmd.set_cmds({
'clrmem': (clrmem, None),
@ -122,7 +127,8 @@ def load_cmds(cmd):
'sv': (save, None),
'export': (export, None),
'log': (log, None),
'merge': (merge_datafile, None)
'merge': (merge_datafile, None),
'nocolor': (run_without_color, None),
})
cmd.add_aliases([
#('rpy', ''),

View file

@ -178,9 +178,7 @@ def list_reqs(line):
def view_request_info(line):
"""
View information about request
Usage: view_request_info <reqid> [u]
If 'u' is given as an additional argument, the unmangled version
of the request will be displayed.
Usage: view_request_info <reqid(s)>
"""
args = shlex.split(line)
reqids = args[0]
@ -197,9 +195,7 @@ def view_request_info(line):
def view_request_headers(line):
"""
View the headers of the request
Usage: view_request_headers <reqid> [u]
If 'u' is given as an additional argument, the unmangled version
of the request will be displayed.
Usage: view_request_headers <reqid(s)>
"""
args = shlex.split(line)
reqid = args[0]
@ -208,10 +204,10 @@ def view_request_headers(line):
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
print ''
view_full_message(req, True)
if len(reqs) > 1:
print '-'*30
print ''
@crochet.wait_for(timeout=None)
@ -219,9 +215,7 @@ def view_request_headers(line):
def view_full_request(line):
"""
View the full data of the request
Usage: view_full_request <reqid> [u]
If 'u' is given as an additional argument, the unmangled version
of the request will be displayed.
Usage: view_full_request <reqid(s)>
"""
args = shlex.split(line)
reqid = args[0]
@ -230,18 +224,36 @@ def view_full_request(line):
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
print ''
view_full_message(req)
if len(reqs) > 1:
print '-'*30
print ''
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_request_bytes(line):
"""
View the raw bytes of the request. Use this if you want to redirect output to a file.
Usage: view_request_bytes <reqid(s)>
"""
args = shlex.split(line)
reqid = args[0]
reqs = yield load_reqlist(reqid)
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
print req.full_message
if len(reqs) > 1:
print '-'*30
print ''
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_response_headers(line):
"""
View the headers of the response
Usage: view_response_headers <reqid>
Usage: view_response_headers <reqid(s)>
"""
reqs = yield load_reqlist(line)
for req in reqs:
@ -269,7 +281,22 @@ def view_full_response(line):
else:
print "Request %s does not have a response" % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_response_bytes(line):
"""
View the full data of the response associated with a request
Usage: view_request_bytes <reqid(s)>
"""
reqs = yield load_reqlist(line)
for req in reqs:
if req.response:
if len(reqs) > 1:
print '-'*15 + (' %s ' % req.reqid) + '-'*15
print req.response.full_message
else:
print "Request %s does not have a response" % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def dump_response(line):
@ -317,8 +344,10 @@ def load_cmds(cmd):
'view_request_info': (view_request_info, None),
'view_request_headers': (view_request_headers, None),
'view_full_request': (view_full_request, None),
'view_request_bytes': (view_request_bytes, None),
'view_response_headers': (view_response_headers, None),
'view_full_response': (view_full_response, None),
'view_response_bytes': (view_response_bytes, None),
'site_map': (site_map, None),
'dump_response': (dump_response, None),
})
@ -327,8 +356,10 @@ def load_cmds(cmd):
('view_request_info', 'viq'),
('view_request_headers', 'vhq'),
('view_full_request', 'vfq'),
('view_request_bytes', 'vbq'),
('view_response_headers', 'vhs'),
('site_map', 'sm'),
('view_full_response', 'vfs'),
('view_response_bytes', 'vbs'),
('site_map', 'sm'),
#('dump_response', 'dr'),
])