Version 0.3.0, move large amount of code to Puppy
This commit is contained in:
parent
76d20774a5
commit
f9737dbdd8
169 changed files with 6463 additions and 43862 deletions
BIN
pappyproxy/interface/__pycache__/context.cpython-35.pyc
Normal file
BIN
pappyproxy/interface/__pycache__/context.cpython-35.pyc
Normal file
Binary file not shown.
BIN
pappyproxy/interface/__pycache__/decode.cpython-35.pyc
Normal file
BIN
pappyproxy/interface/__pycache__/decode.cpython-35.pyc
Normal file
Binary file not shown.
BIN
pappyproxy/interface/__pycache__/macros.cpython-35.pyc
Normal file
BIN
pappyproxy/interface/__pycache__/macros.cpython-35.pyc
Normal file
Binary file not shown.
BIN
pappyproxy/interface/__pycache__/mangle.cpython-35.pyc
Normal file
BIN
pappyproxy/interface/__pycache__/mangle.cpython-35.pyc
Normal file
Binary file not shown.
BIN
pappyproxy/interface/__pycache__/misc.cpython-35.pyc
Normal file
BIN
pappyproxy/interface/__pycache__/misc.cpython-35.pyc
Normal file
Binary file not shown.
BIN
pappyproxy/interface/__pycache__/tags.cpython-35.pyc
Normal file
BIN
pappyproxy/interface/__pycache__/tags.cpython-35.pyc
Normal file
Binary file not shown.
BIN
pappyproxy/interface/__pycache__/test.cpython-35.pyc
Normal file
BIN
pappyproxy/interface/__pycache__/test.cpython-35.pyc
Normal file
Binary file not shown.
BIN
pappyproxy/interface/__pycache__/view.cpython-35.pyc
Normal file
BIN
pappyproxy/interface/__pycache__/view.cpython-35.pyc
Normal file
Binary file not shown.
245
pappyproxy/interface/context.py
Normal file
245
pappyproxy/interface/context.py
Normal file
|
@ -0,0 +1,245 @@
|
|||
from itertools import groupby
|
||||
|
||||
from ..proxy import InvalidQuery, time_to_nsecs
|
||||
from ..colors import Colors, Styles
|
||||
|
||||
# class BuiltinFilters(object):
|
||||
# _filters = {
|
||||
# 'not_image': (
|
||||
# ['path nctr "(\.png$|\.jpg$|\.gif$)"'],
|
||||
# 'Filter out image requests',
|
||||
# ),
|
||||
# 'not_jscss': (
|
||||
# ['path nctr "(\.js$|\.css$)"'],
|
||||
# 'Filter out javascript and css files',
|
||||
# ),
|
||||
# }
|
||||
|
||||
# @staticmethod
|
||||
# @defer.inlineCallbacks
|
||||
# def get(name):
|
||||
# if name not in BuiltinFilters._filters:
|
||||
# raise PappyException('%s not a bult in filter' % name)
|
||||
# if name in BuiltinFilters._filters:
|
||||
# filters = [pappyproxy.context.Filter(f) for f in BuiltinFilters._filters[name][0]]
|
||||
# for f in filters:
|
||||
# yield f.generate()
|
||||
# defer.returnValue(filters)
|
||||
# raise PappyException('"%s" is not a built-in filter' % name)
|
||||
|
||||
# @staticmethod
|
||||
# def list():
|
||||
# return [k for k, v in BuiltinFilters._filters.iteritems()]
|
||||
|
||||
# @staticmethod
|
||||
# def help(name):
|
||||
# if name not in BuiltinFilters._filters:
|
||||
# raise PappyException('"%s" is not a built-in filter' % name)
|
||||
# return pappyproxy.context.Filter(BuiltinFilters._filters[name][1])
|
||||
|
||||
|
||||
# def complete_filtercmd(text, line, begidx, endidx):
|
||||
# strs = [k for k, v in pappyproxy.context.Filter._filter_functions.iteritems()]
|
||||
# strs += [k for k, v in pappyproxy.context.Filter._async_filter_functions.iteritems()]
|
||||
# return autocomplete_startswith(text, strs)
|
||||
|
||||
# def complete_builtin_filter(text, line, begidx, endidx):
|
||||
# all_names = BuiltinFilters.list()
|
||||
# if not text:
|
||||
# ret = all_names[:]
|
||||
# else:
|
||||
# ret = [n for n in all_names if n.startswith(text)]
|
||||
# return ret
|
||||
|
||||
# @crochet.wait_for(timeout=None)
|
||||
# @defer.inlineCallbacks
|
||||
# def builtin_filter(line):
|
||||
# if not line:
|
||||
# raise PappyException("Filter name required")
|
||||
|
||||
# filters_to_add = yield BuiltinFilters.get(line)
|
||||
# for f in filters_to_add:
|
||||
# print f.filter_string
|
||||
# yield pappyproxy.pappy.main_context.add_filter(f)
|
||||
# defer.returnValue(None)
|
||||
|
||||
def filtercmd(client, args):
|
||||
"""
|
||||
Apply a filter to the current context
|
||||
Usage: filter <filter string>
|
||||
See README.md for information on filter strings
|
||||
"""
|
||||
try:
|
||||
phrases = [list(group) for k, group in groupby(args, lambda x: x == "OR") if not k]
|
||||
for phrase in phrases:
|
||||
# we do before/after by id not by timestamp
|
||||
if phrase[0] in ('before', 'b4', 'after', 'af') and len(phrase) > 1:
|
||||
r = client.req_by_id(phrase[1], headers_only=True)
|
||||
phrase[1] = str(time_to_nsecs(r.time_start))
|
||||
client.context.apply_phrase(phrases)
|
||||
except InvalidQuery as e:
|
||||
print(e)
|
||||
|
||||
def filter_up(client, args):
|
||||
"""
|
||||
Remove the last applied filter
|
||||
Usage: filter_up
|
||||
"""
|
||||
client.context.pop_phrase()
|
||||
|
||||
def filter_clear(client, args):
|
||||
"""
|
||||
Reset the context so that it contains no filters (ignores scope)
|
||||
Usage: filter_clear
|
||||
"""
|
||||
client.context.set_query([])
|
||||
|
||||
def filter_list(client, args):
|
||||
"""
|
||||
Print the filters that make up the current context
|
||||
Usage: filter_list
|
||||
"""
|
||||
from ..util import print_query
|
||||
print_query(client.context.query)
|
||||
|
||||
def scope_save(client, args):
|
||||
"""
|
||||
Set the scope to be the current context. Saved between launches
|
||||
Usage: scope_save
|
||||
"""
|
||||
client.set_scope(client.context.query)
|
||||
|
||||
def scope_reset(client, args):
|
||||
"""
|
||||
Set the context to be the scope (view in-scope items)
|
||||
Usage: scope_reset
|
||||
"""
|
||||
result = client.get_scope()
|
||||
if result.is_custom:
|
||||
print("Proxy is using a custom function to check scope. Cannot set context to scope.")
|
||||
return
|
||||
client.context.set_query(result.filter)
|
||||
|
||||
def scope_delete(client, args):
|
||||
"""
|
||||
Delete the scope so that it contains all request/response pairs
|
||||
Usage: scope_delete
|
||||
"""
|
||||
client.set_scope([])
|
||||
|
||||
def scope_list(client, args):
|
||||
"""
|
||||
Print the filters that make up the scope
|
||||
Usage: scope_list
|
||||
"""
|
||||
from ..util import print_query
|
||||
result = client.get_scope()
|
||||
if result.is_custom:
|
||||
print("Proxy is using a custom function to check scope")
|
||||
return
|
||||
print_query(result.filter)
|
||||
|
||||
def list_saved_queries(client, args):
|
||||
from ..util import print_query
|
||||
queries = client.all_saved_queries()
|
||||
print('')
|
||||
for q in queries:
|
||||
print(Styles.TABLE_HEADER + q.name + Colors.ENDC)
|
||||
print_query(q.query)
|
||||
print('')
|
||||
|
||||
def save_query(client, args):
|
||||
from ..util import print_query
|
||||
if len(args) != 1:
|
||||
print("Must give name to save filters as")
|
||||
return
|
||||
client.save_query(args[0], client.context.query)
|
||||
print('')
|
||||
print(Styles.TABLE_HEADER + args[0] + Colors.ENDC)
|
||||
print_query(client.context.query)
|
||||
print('')
|
||||
|
||||
def load_query(client, args):
|
||||
from ..util import print_query
|
||||
if len(args) != 1:
|
||||
print("Must give name of query to load")
|
||||
return
|
||||
new_query = client.load_query(args[0])
|
||||
client.context.set_query(new_query)
|
||||
print('')
|
||||
print(Styles.TABLE_HEADER + args[0] + Colors.ENDC)
|
||||
print_query(new_query)
|
||||
print('')
|
||||
|
||||
def delete_query(client, args):
|
||||
if len(args) != 1:
|
||||
print("Must give name of filter")
|
||||
return
|
||||
client.delete_query(args[0])
|
||||
|
||||
# @crochet.wait_for(timeout=None)
|
||||
# @defer.inlineCallbacks
|
||||
# def filter_prune(line):
|
||||
# """
|
||||
# Delete all out of context requests from the data file.
|
||||
# CANNOT BE UNDONE!! Be careful!
|
||||
# Usage: filter_prune
|
||||
# """
|
||||
# # Delete filtered items from datafile
|
||||
# print ''
|
||||
# print 'Currently active filters:'
|
||||
# for f in pappyproxy.pappy.main_context.active_filters:
|
||||
# print '> %s' % f.filter_string
|
||||
|
||||
# # We copy so that we're not removing items from a set we're iterating over
|
||||
# act_reqs = yield pappyproxy.pappy.main_context.get_reqs()
|
||||
# inact_reqs = set(Request.cache.req_ids()).difference(set(act_reqs))
|
||||
# message = 'This will delete %d/%d requests. You can NOT undo this!! Continue?' % (len(inact_reqs), (len(inact_reqs) + len(act_reqs)))
|
||||
# #print message
|
||||
# if not confirm(message, 'n'):
|
||||
# defer.returnValue(None)
|
||||
|
||||
# for reqid in inact_reqs:
|
||||
# try:
|
||||
# req = yield pappyproxy.http.Request.load_request(reqid)
|
||||
# yield req.deep_delete()
|
||||
# except PappyException as e:
|
||||
# print e
|
||||
# print 'Deleted %d requests' % len(inact_reqs)
|
||||
# defer.returnValue(None)
|
||||
|
||||
###############
|
||||
## Plugin hooks
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
#'filter': (filtercmd, complete_filtercmd),
|
||||
'filter': (filtercmd, None),
|
||||
'filter_up': (filter_up, None),
|
||||
'filter_list': (filter_list, None),
|
||||
'filter_clear': (filter_clear, None),
|
||||
'scope_list': (scope_list, None),
|
||||
'scope_delete': (scope_delete, None),
|
||||
'scope_reset': (scope_reset, None),
|
||||
'scope_save': (scope_save, None),
|
||||
'list_saved_queries': (list_saved_queries, None),
|
||||
# 'filter_prune': (filter_prune, None),
|
||||
# 'builtin_filter': (builtin_filter, complete_builtin_filter),
|
||||
'save_query': (save_query, None),
|
||||
'load_query': (load_query, None),
|
||||
'delete_query': (delete_query, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
('filter', 'f'),
|
||||
('filter', 'fl'),
|
||||
('filter_up', 'fu'),
|
||||
('filter_list', 'fls'),
|
||||
('filter_clear', 'fc'),
|
||||
('scope_list', 'sls'),
|
||||
('scope_reset', 'sr'),
|
||||
('list_saved_queries', 'sqls'),
|
||||
# ('builtin_filter', 'fbi'),
|
||||
('save_query', 'sq'),
|
||||
('load_query', 'lq'),
|
||||
('delete_query', 'dq'),
|
||||
])
|
326
pappyproxy/interface/decode.py
Normal file
326
pappyproxy/interface/decode.py
Normal file
|
@ -0,0 +1,326 @@
|
|||
import html
|
||||
import base64
|
||||
import datetime
|
||||
import gzip
|
||||
import shlex
|
||||
import string
|
||||
import urllib
|
||||
|
||||
from ..util import hexdump, printable_data, copy_to_clipboard, clipboard_contents, encode_basic_auth, parse_basic_auth
|
||||
from ..console import CommandError
|
||||
from io import StringIO
|
||||
|
||||
def print_maybe_bin(s):
|
||||
binary = False
|
||||
for c in s:
|
||||
if chr(c) not in string.printable:
|
||||
binary = True
|
||||
break
|
||||
if binary:
|
||||
print(hexdump(s))
|
||||
else:
|
||||
print(s.decode())
|
||||
|
||||
def asciihex_encode_helper(s):
|
||||
return ''.join('{0:x}'.format(c) for c in s).encode()
|
||||
|
||||
def asciihex_decode_helper(s):
|
||||
ret = []
|
||||
try:
|
||||
for a, b in zip(s[0::2], s[1::2]):
|
||||
c = chr(a)+chr(b)
|
||||
ret.append(chr(int(c, 16)))
|
||||
return ''.join(ret).encode()
|
||||
except Exception as e:
|
||||
raise CommandError(e)
|
||||
|
||||
def gzip_encode_helper(s):
|
||||
out = StringIO.StringIO()
|
||||
with gzip.GzipFile(fileobj=out, mode="w") as f:
|
||||
f.write(s)
|
||||
return out.getvalue()
|
||||
|
||||
def gzip_decode_helper(s):
|
||||
dec_data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(s))
|
||||
dec_data = dec_data.read()
|
||||
return dec_data
|
||||
|
||||
def base64_decode_helper(s):
|
||||
try:
|
||||
return base64.b64decode(s)
|
||||
except TypeError:
|
||||
for i in range(1, 5):
|
||||
try:
|
||||
s_padded = base64.b64decode(s + '='*i)
|
||||
return s_padded
|
||||
except:
|
||||
pass
|
||||
raise CommandError("Unable to base64 decode string")
|
||||
|
||||
def url_decode_helper(s):
|
||||
bs = s.decode()
|
||||
return urllib.parse.unquote(bs).encode()
|
||||
|
||||
def url_encode_helper(s):
|
||||
bs = s.decode()
|
||||
return urllib.parse.quote_plus(bs).encode()
|
||||
|
||||
def html_encode_helper(s):
|
||||
return ''.join(['&#x{0:x};'.format(c) for c in s]).encode()
|
||||
|
||||
def html_decode_helper(s):
|
||||
return html.unescape(s.decode()).encode()
|
||||
|
||||
def _code_helper(args, func, copy=True):
|
||||
if len(args) == 0:
|
||||
s = clipboard_contents().encode()
|
||||
print('Will decode:')
|
||||
print(printable_data(s))
|
||||
s = func(s)
|
||||
if copy:
|
||||
try:
|
||||
copy_to_clipboard(s)
|
||||
except Exception as e:
|
||||
print('Result cannot be copied to the clipboard. Result not copied.')
|
||||
raise e
|
||||
return s
|
||||
else:
|
||||
s = func(args[0].encode())
|
||||
if copy:
|
||||
try:
|
||||
copy_to_clipboard(s)
|
||||
except Exception as e:
|
||||
print('Result cannot be copied to the clipboard. Result not copied.')
|
||||
raise e
|
||||
return s
|
||||
|
||||
def base64_decode(client, args):
|
||||
"""
|
||||
Base64 decode a string.
|
||||
If no string is given, will decode the contents of the clipboard.
|
||||
Results are copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, base64_decode_helper))
|
||||
|
||||
def base64_encode(client, args):
|
||||
"""
|
||||
Base64 encode a string.
|
||||
If no string is given, will encode the contents of the clipboard.
|
||||
Results are copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, base64.b64encode))
|
||||
|
||||
def url_decode(client, args):
|
||||
"""
|
||||
URL decode a string.
|
||||
If no string is given, will decode the contents of the clipboard.
|
||||
Results are copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, url_decode_helper))
|
||||
|
||||
def url_encode(client, args):
|
||||
"""
|
||||
URL encode special characters in a string.
|
||||
If no string is given, will encode the contents of the clipboard.
|
||||
Results are copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, url_encode_helper))
|
||||
|
||||
def asciihex_decode(client, args):
|
||||
"""
|
||||
Decode an ascii hex string.
|
||||
If no string is given, will decode the contents of the clipboard.
|
||||
Results are copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, asciihex_decode_helper))
|
||||
|
||||
def asciihex_encode(client, args):
|
||||
"""
|
||||
Convert all the characters in a line to hex and combine them.
|
||||
If no string is given, will encode the contents of the clipboard.
|
||||
Results are copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, asciihex_encode_helper))
|
||||
|
||||
def html_decode(client, args):
|
||||
"""
|
||||
Decode an html encoded string.
|
||||
If no string is given, will decode the contents of the clipboard.
|
||||
Results are copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, html_decode_helper))
|
||||
|
||||
def html_encode(client, args):
|
||||
"""
|
||||
Encode a string and escape html control characters.
|
||||
If no string is given, will encode the contents of the clipboard.
|
||||
Results are copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, html_encode_helper))
|
||||
|
||||
def gzip_decode(client, args):
|
||||
"""
|
||||
Un-gzip a string.
|
||||
If no string is given, will decompress the contents of the clipboard.
|
||||
Results are copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, gzip_decode_helper))
|
||||
|
||||
def gzip_encode(client, args):
|
||||
"""
|
||||
Gzip a string.
|
||||
If no string is given, will decompress the contents of the clipboard.
|
||||
Results are NOT copied to the clipboard.
|
||||
"""
|
||||
print_maybe_bin(_code_helper(args, gzip_encode_helper, copy=False))
|
||||
|
||||
def base64_decode_raw(client, args):
|
||||
"""
|
||||
Same as base64_decode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, base64_decode_helper, copy=False))
|
||||
|
||||
def base64_encode_raw(client, args):
|
||||
"""
|
||||
Same as base64_encode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, base64.b64encode, copy=False))
|
||||
|
||||
def url_decode_raw(client, args):
|
||||
"""
|
||||
Same as url_decode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, url_decode_helper, copy=False))
|
||||
|
||||
def url_encode_raw(client, args):
|
||||
"""
|
||||
Same as url_encode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, url_encode_helper, copy=False))
|
||||
|
||||
def asciihex_decode_raw(client, args):
|
||||
"""
|
||||
Same as asciihex_decode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, asciihex_decode_helper, copy=False))
|
||||
|
||||
def asciihex_encode_raw(client, args):
|
||||
"""
|
||||
Same as asciihex_encode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, asciihex_encode_helper, copy=False))
|
||||
|
||||
def html_decode_raw(client, args):
|
||||
"""
|
||||
Same as html_decode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, html_decode_helper, copy=False))
|
||||
|
||||
def html_encode_raw(client, args):
|
||||
"""
|
||||
Same as html_encode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, html_encode_helper, copy=False))
|
||||
|
||||
def gzip_decode_raw(client, args):
|
||||
"""
|
||||
Same as gzip_decode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, gzip_decode_helper, copy=False))
|
||||
|
||||
def gzip_encode_raw(client, args):
|
||||
"""
|
||||
Same as gzip_encode but the output will never be printed as a hex dump and
|
||||
results will not be copied. It is suggested you redirect the output
|
||||
to a file.
|
||||
"""
|
||||
print(_code_helper(args, gzip_encode_helper, copy=False))
|
||||
|
||||
def unix_time_decode_helper(line):
|
||||
unix_time = int(line.strip())
|
||||
dtime = datetime.datetime.fromtimestamp(unix_time)
|
||||
return dtime.strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
def unix_time_decode(client, args):
|
||||
print(_code_helper(args, unix_time_decode_helper))
|
||||
|
||||
def http_auth_encode(client, args):
|
||||
if len(args) != 2:
|
||||
raise CommandError('Usage: http_auth_encode <username> <password>')
|
||||
username, password = args
|
||||
print(encode_basic_auth(username, password))
|
||||
|
||||
def http_auth_decode(client, args):
|
||||
username, password = decode_basic_auth(args[0])
|
||||
print(username)
|
||||
print(password)
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'base64_decode': (base64_decode, None),
|
||||
'base64_encode': (base64_encode, None),
|
||||
'asciihex_decode': (asciihex_decode, None),
|
||||
'asciihex_encode': (asciihex_encode, None),
|
||||
'url_decode': (url_decode, None),
|
||||
'url_encode': (url_encode, None),
|
||||
'html_decode': (html_decode, None),
|
||||
'html_encode': (html_encode, None),
|
||||
'gzip_decode': (gzip_decode, None),
|
||||
'gzip_encode': (gzip_encode, None),
|
||||
'base64_decode_raw': (base64_decode_raw, None),
|
||||
'base64_encode_raw': (base64_encode_raw, None),
|
||||
'asciihex_decode_raw': (asciihex_decode_raw, None),
|
||||
'asciihex_encode_raw': (asciihex_encode_raw, None),
|
||||
'url_decode_raw': (url_decode_raw, None),
|
||||
'url_encode_raw': (url_encode_raw, None),
|
||||
'html_decode_raw': (html_decode_raw, None),
|
||||
'html_encode_raw': (html_encode_raw, None),
|
||||
'gzip_decode_raw': (gzip_decode_raw, None),
|
||||
'gzip_encode_raw': (gzip_encode_raw, None),
|
||||
'unixtime_decode': (unix_time_decode, None),
|
||||
'httpauth_encode': (http_auth_encode, None),
|
||||
'httpauth_decode': (http_auth_decode, None)
|
||||
})
|
||||
cmd.add_aliases([
|
||||
('base64_decode', 'b64d'),
|
||||
('base64_encode', 'b64e'),
|
||||
('asciihex_decode', 'ahd'),
|
||||
('asciihex_encode', 'ahe'),
|
||||
('url_decode', 'urld'),
|
||||
('url_encode', 'urle'),
|
||||
('html_decode', 'htmld'),
|
||||
('html_encode', 'htmle'),
|
||||
('gzip_decode', 'gzd'),
|
||||
('gzip_encode', 'gze'),
|
||||
('base64_decode_raw', 'b64dr'),
|
||||
('base64_encode_raw', 'b64er'),
|
||||
('asciihex_decode_raw', 'ahdr'),
|
||||
('asciihex_encode_raw', 'aher'),
|
||||
('url_decode_raw', 'urldr'),
|
||||
('url_encode_raw', 'urler'),
|
||||
('html_decode_raw', 'htmldr'),
|
||||
('html_encode_raw', 'htmler'),
|
||||
('gzip_decode_raw', 'gzdr'),
|
||||
('gzip_encode_raw', 'gzer'),
|
||||
('unixtime_decode', 'uxtd'),
|
||||
('httpauth_encode', 'hae'),
|
||||
('httpauth_decode', 'had'),
|
||||
])
|
150
pappyproxy/interface/macros.py
Normal file
150
pappyproxy/interface/macros.py
Normal file
|
@ -0,0 +1,150 @@
|
|||
from ..util import load_reqlist
|
||||
from ..macros import macro_from_requests, MacroTemplate, load_macros
|
||||
from ..colors import Colors
|
||||
|
||||
macro_dict = {}
|
||||
int_macro_dict = {}
|
||||
int_conns = {}
|
||||
|
||||
def generate_macro(client, args):
|
||||
if len(args) == 0:
|
||||
print("usage: gma [name] [reqids]")
|
||||
return
|
||||
macro_name = args[0]
|
||||
|
||||
reqs = [r for r in load_reqlist(client, ','.join(args[1:]))]
|
||||
script_string = macro_from_requests(reqs)
|
||||
fname = MacroTemplate.template_filename('macro', macro_name)
|
||||
with open(fname, 'w') as f:
|
||||
f.write(script_string)
|
||||
print("Macro written to {}".format(fname))
|
||||
|
||||
def generate_int_macro(client, args):
|
||||
if len(args) == 0:
|
||||
print("usage: gima [name] [reqids]")
|
||||
return
|
||||
macro_name = args[0]
|
||||
|
||||
reqs = [r for r in load_reqlist(client, ','.join(args[1:]))]
|
||||
|
||||
script_string = macro_from_requests(reqs, template='intmacro')
|
||||
fname = MacroTemplate.template_filename('intmacro', macro_name)
|
||||
with open(fname, 'w') as f:
|
||||
f.write(script_string)
|
||||
print("Macro written to {}".format(fname))
|
||||
|
||||
def load_macros_cmd(client, args):
|
||||
global macro_dict
|
||||
|
||||
load_dir = '.'
|
||||
if len(args) > 0:
|
||||
load_dir = args[0]
|
||||
|
||||
_stop_all_int_macros()
|
||||
|
||||
loaded_macros, loaded_int_macros = load_macros(load_dir, client)
|
||||
for macro in loaded_macros:
|
||||
macro_dict[macro.name] = macro
|
||||
print("Loaded {} ({})".format(macro.name, macro.file_name))
|
||||
for macro in loaded_int_macros:
|
||||
int_macro_dict[macro.name] = macro
|
||||
print("Loaded {} ({})".format(macro.name, macro.file_name))
|
||||
|
||||
def complete_run_macro(text, line, begidx, endidx):
|
||||
from ..util import autocomplete_starts_with
|
||||
|
||||
global macro_dict
|
||||
strs = macro_dict.keys()
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
def run_macro(client, args):
|
||||
global macro_dict
|
||||
if len(args) == 0:
|
||||
print("usage: rma [macro name]")
|
||||
return
|
||||
macro = macro_dict[args[0]]
|
||||
macro.execute(client, args[1:])
|
||||
|
||||
def complete_run_int_macro(text, line, begidx, endidx):
|
||||
from ..util import autocomplete_starts_with
|
||||
|
||||
global int_macro_dict
|
||||
strs = int_macro_dict.keys()
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
def run_int_macro(client, args):
|
||||
global int_macro_dict
|
||||
global int_conns
|
||||
if len(args) == 0:
|
||||
print("usage: rim [macro name]")
|
||||
return
|
||||
if args[0] in int_conns:
|
||||
print("%s is already running!" % args[0])
|
||||
return
|
||||
macro = int_macro_dict[args[0]]
|
||||
macro.init(args[1:])
|
||||
conn = client.new_conn()
|
||||
int_conns[args[0]] = conn
|
||||
conn.intercept(macro)
|
||||
print("Started %s" % args[0])
|
||||
|
||||
def complete_stop_int_macro(text, line, begidx, endidx):
|
||||
from ..util import autocomplete_starts_with
|
||||
|
||||
global int_conns
|
||||
strs = int_conns.keys()
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
def stop_int_macro(client, args):
|
||||
global int_conns
|
||||
if len(args) > 0:
|
||||
conn = int_conns[args[0]]
|
||||
conn.close()
|
||||
del int_conns[args[0]]
|
||||
print("Stopped %s" % args[0])
|
||||
else:
|
||||
_stop_all_int_macros()
|
||||
|
||||
def _stop_all_int_macros():
|
||||
global int_conns
|
||||
for k, conn in int_conns.items():
|
||||
conn.close()
|
||||
del int_conns[k]
|
||||
print("Stopped %s" % k)
|
||||
|
||||
def list_macros(client, args):
|
||||
global macro_dict
|
||||
global int_macro_dict
|
||||
global int_conns
|
||||
if len(macro_dict) > 0:
|
||||
print('Loaded Macros:')
|
||||
for k, m in macro_dict.items():
|
||||
print(' '+k)
|
||||
|
||||
if len(int_macro_dict) > 0:
|
||||
print('Loaded Intercepting Macros:')
|
||||
for k, m in int_macro_dict.items():
|
||||
pstr = ' '+k
|
||||
if k in int_conns:
|
||||
pstr += ' (' + Colors.GREEN + 'RUNNING' + Colors.ENDC + ')'
|
||||
print(pstr)
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'generate_macro': (generate_macro, None),
|
||||
'generate_int_macro': (generate_int_macro, None),
|
||||
'load_macros': (load_macros_cmd, None),
|
||||
'run_macro': (run_macro, complete_run_macro),
|
||||
'run_int_macro': (run_int_macro, complete_run_int_macro),
|
||||
'stop_int_macro': (stop_int_macro, complete_stop_int_macro),
|
||||
'list_macros': (list_macros, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
('generate_macro', 'gma'),
|
||||
('generate_int_macro', 'gima'),
|
||||
('load_macros', 'lma'),
|
||||
('run_macro', 'rma'),
|
||||
('run_int_macro', 'rim'),
|
||||
('stop_int_macro', 'sim'),
|
||||
('list_macros', 'lsma'),
|
||||
])
|
325
pappyproxy/interface/mangle.py
Normal file
325
pappyproxy/interface/mangle.py
Normal file
|
@ -0,0 +1,325 @@
|
|||
import curses
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import threading
|
||||
from ..macros import InterceptMacro
|
||||
from ..proxy import MessageError, parse_request, parse_response
|
||||
from ..colors import url_formatter
|
||||
|
||||
edit_queue = []
|
||||
|
||||
class InterceptorMacro(InterceptMacro):
|
||||
"""
|
||||
A class representing a macro that modifies requests as they pass through the
|
||||
proxy
|
||||
"""
|
||||
def __init__(self):
|
||||
InterceptMacro.__init__(self)
|
||||
self.name = "InterceptorMacro"
|
||||
|
||||
def mangle_request(self, request):
|
||||
# This function gets called to mangle/edit requests passed through the proxy
|
||||
|
||||
# Write original request to the temp file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tfName = tf.name
|
||||
tf.write(request.full_message())
|
||||
|
||||
mangled_req = request
|
||||
front = False
|
||||
while True:
|
||||
# Have the console edit the file
|
||||
event = edit_file(tfName, front=front)
|
||||
event.wait()
|
||||
if event.canceled:
|
||||
return request
|
||||
|
||||
# Create new mangled request from edited file
|
||||
with open(tfName, 'rb') as f:
|
||||
text = f.read()
|
||||
|
||||
os.remove(tfName)
|
||||
|
||||
# Check if dropped
|
||||
if text == '':
|
||||
return None
|
||||
|
||||
try:
|
||||
mangled_req = parse_request(text)
|
||||
except MessageError as e:
|
||||
print("could not parse request: %s" % str(e))
|
||||
front = True
|
||||
continue
|
||||
mangled_req.dest_host = request.dest_host
|
||||
mangled_req.dest_port = request.dest_port
|
||||
mangled_req.use_tls = request.use_tls
|
||||
break
|
||||
return mangled_req
|
||||
|
||||
def mangle_response(self, request, response):
|
||||
# This function gets called to mangle/edit respones passed through the proxy
|
||||
|
||||
# Write original response to the temp file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tfName = tf.name
|
||||
tf.write(response.full_message())
|
||||
|
||||
mangled_rsp = response
|
||||
while True:
|
||||
# Have the console edit the file
|
||||
event = edit_file(tfName, front=True)
|
||||
event.wait()
|
||||
if event.canceled:
|
||||
return response
|
||||
|
||||
# Create new mangled response from edited file
|
||||
with open(tfName, 'rb') as f:
|
||||
text = f.read()
|
||||
|
||||
os.remove(tfName)
|
||||
|
||||
# Check if dropped
|
||||
if text == '':
|
||||
return None
|
||||
|
||||
try:
|
||||
mangled_rsp = parse_response(text)
|
||||
except MessageError as e:
|
||||
print("could not parse response: %s" % str(e))
|
||||
front = True
|
||||
continue
|
||||
break
|
||||
return mangled_rsp
|
||||
|
||||
def mangle_websocket(self, request, response, message):
|
||||
# This function gets called to mangle/edit respones passed through the proxy
|
||||
|
||||
# Write original response to the temp file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tfName = tf.name
|
||||
tf.write(b"# ")
|
||||
if message.to_server:
|
||||
tf.write(b"OUTGOING to")
|
||||
else:
|
||||
tf.write(b"INCOMING from")
|
||||
desturl = 'ws' + url_formatter(request)[4:] # replace http:// with ws://
|
||||
tf.write(b' ' + desturl.encode())
|
||||
tf.write(b" -- Note that this line is ignored\n")
|
||||
tf.write(message.message)
|
||||
|
||||
mangled_msg = message
|
||||
while True:
|
||||
# Have the console edit the file
|
||||
event = edit_file(tfName, front=True)
|
||||
event.wait()
|
||||
if event.canceled:
|
||||
return message
|
||||
|
||||
# Create new mangled response from edited file
|
||||
with open(tfName, 'rb') as f:
|
||||
text = f.read()
|
||||
_, text = text.split(b'\n', 1)
|
||||
|
||||
os.remove(tfName)
|
||||
|
||||
# Check if dropped
|
||||
if text == '':
|
||||
return None
|
||||
|
||||
mangled_msg.message = text
|
||||
# if messages can be invalid, check for it here and continue if invalid
|
||||
break
|
||||
return mangled_msg
|
||||
|
||||
|
||||
class EditEvent:
|
||||
|
||||
def __init__(self):
|
||||
self.e = threading.Event()
|
||||
self.canceled = False
|
||||
|
||||
def wait(self):
|
||||
self.e.wait()
|
||||
|
||||
def set(self):
|
||||
self.e.set()
|
||||
|
||||
def cancel(self):
|
||||
self.canceled = True
|
||||
self.set()
|
||||
|
||||
###############
|
||||
## Helper funcs
|
||||
|
||||
def edit_file(fname, front=False):
|
||||
global edit_queue
|
||||
# Adds the filename to the edit queue. Returns an event that is set once
|
||||
# the file is edited and the editor is closed
|
||||
#e = threading.Event()
|
||||
e = EditEvent()
|
||||
if front:
|
||||
edit_queue = [(fname, e, threading.current_thread())] + edit_queue
|
||||
else:
|
||||
edit_queue.append((fname, e, threading.current_thread()))
|
||||
return e
|
||||
|
||||
def execute_repeater(client, reqid):
|
||||
#script_loc = os.path.join(pappy.session.config.pappy_dir, "plugins", "vim_repeater", "repeater.vim")
|
||||
maddr = client.maddr
|
||||
if maddr is None:
|
||||
print("Client has no message address, cannot run repeater")
|
||||
return
|
||||
storage, reqid = client.parse_reqid(reqid)
|
||||
script_loc = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
||||
"repeater", "repeater.vim")
|
||||
args = (["vim", "-S", script_loc, "-c", "RepeaterSetup %s %s %s"%(reqid, storage.storage_id, client.maddr)])
|
||||
subprocess.call(args)
|
||||
|
||||
class CloudToButt(InterceptMacro):
|
||||
|
||||
def __init__(self):
|
||||
InterceptMacro.__init__(self)
|
||||
self.name = 'cloudtobutt'
|
||||
self.intercept_requests = True
|
||||
self.intercept_responses = True
|
||||
self.intercept_ws = True
|
||||
|
||||
def mangle_response(self, request, response):
|
||||
response.body = response.body.replace(b"cloud", b"butt")
|
||||
response.body = response.body.replace(b"Cloud", b"Butt")
|
||||
return response
|
||||
|
||||
def mangle_request(self, request):
|
||||
request.body = request.body.replace(b"foo", b"bar")
|
||||
request.body = request.body.replace(b"Foo", b"Bar")
|
||||
return request
|
||||
|
||||
def mangle_websocket(self, request, response, wsm):
|
||||
wsm.message = wsm.message.replace(b"world", b"zawarudo")
|
||||
wsm.message = wsm.message.replace(b"zawarudo", b"ZAWARUDO")
|
||||
return wsm
|
||||
|
||||
def repeater(client, args):
|
||||
"""
|
||||
Open a request in the repeater
|
||||
Usage: repeater <reqid>
|
||||
"""
|
||||
# This is not async on purpose. start_editor acts up if this is called
|
||||
# with inline callbacks. As a result, check_reqid and get_unmangled
|
||||
# cannot be async
|
||||
reqid = args[0]
|
||||
req = client.req_by_id(reqid)
|
||||
execute_repeater(client, reqid)
|
||||
|
||||
def intercept(client, args):
|
||||
"""
|
||||
Intercept requests and/or responses and edit them with before passing them along
|
||||
Usage: intercept <reqid>
|
||||
"""
|
||||
global edit_queue
|
||||
|
||||
req_names = ('req', 'request', 'requests')
|
||||
rsp_names = ('rsp', 'response', 'responses')
|
||||
ws_names = ('ws', 'websocket')
|
||||
|
||||
mangle_macro = InterceptorMacro()
|
||||
if any(a in req_names for a in args):
|
||||
mangle_macro.intercept_requests = True
|
||||
if any(a in rsp_names for a in args):
|
||||
mangle_macro.intercept_responses = True
|
||||
if any(a in ws_names for a in args):
|
||||
mangle_macro.intercept_ws = True
|
||||
if not args:
|
||||
mangle_macro.intercept_requests = True
|
||||
|
||||
intercepting = []
|
||||
if mangle_macro.intercept_requests:
|
||||
intercepting.append('Requests')
|
||||
if mangle_macro.intercept_responses:
|
||||
intercepting.append('Responses')
|
||||
if mangle_macro.intercept_ws:
|
||||
intercepting.append('Websocket Messages')
|
||||
if not mangle_macro.intercept_requests and not mangle_macro.intercept_responses and not mangle_macro.intercept_ws:
|
||||
intercept_str = 'NOTHING WHY ARE YOU DOING THIS' # WHYYYYYYYY
|
||||
else:
|
||||
intercept_str = ', '.join(intercepting)
|
||||
|
||||
## Interceptor loop
|
||||
stdscr = curses.initscr()
|
||||
curses.noecho()
|
||||
curses.cbreak()
|
||||
stdscr.nodelay(True)
|
||||
|
||||
conn = client.new_conn()
|
||||
try:
|
||||
conn.intercept(mangle_macro)
|
||||
editnext = False
|
||||
while True:
|
||||
stdscr.addstr(0, 0, "Currently intercepting: %s" % intercept_str)
|
||||
stdscr.clrtoeol()
|
||||
stdscr.addstr(1, 0, "%d item(s) in queue." % len(edit_queue))
|
||||
stdscr.clrtoeol()
|
||||
if editnext:
|
||||
stdscr.addstr(2, 0, "Waiting for next item... Press 'q' to quit or 'b' to quit waiting")
|
||||
else:
|
||||
stdscr.addstr(2, 0, "Press 'n' to edit the next item or 'q' to quit interceptor.")
|
||||
stdscr.clrtoeol()
|
||||
|
||||
c = stdscr.getch()
|
||||
if c == ord('q'):
|
||||
return
|
||||
elif c == ord('n'):
|
||||
editnext = True
|
||||
elif c == ord('b'):
|
||||
editnext = False
|
||||
|
||||
if editnext and edit_queue:
|
||||
editnext = False
|
||||
(to_edit, event, t) = edit_queue.pop(0)
|
||||
editor = 'vi'
|
||||
if 'EDITOR' in os.environ:
|
||||
editor = os.environ['EDITOR']
|
||||
additional_args = []
|
||||
if editor == 'vim':
|
||||
# prevent adding additional newline
|
||||
additional_args.append('-b')
|
||||
subprocess.call([editor, to_edit] + additional_args)
|
||||
stdscr.clear()
|
||||
event.set()
|
||||
t.join()
|
||||
finally:
|
||||
conn.close()
|
||||
# Now that the connection is closed, make sure the rest of the threads finish/error out
|
||||
while len(edit_queue) > 0:
|
||||
(fname, event, t) = edit_queue.pop(0)
|
||||
event.cancel()
|
||||
t.join()
|
||||
curses.nocbreak()
|
||||
stdscr.keypad(0)
|
||||
curses.echo()
|
||||
curses.endwin()
|
||||
|
||||
###############
|
||||
## Plugin hooks
|
||||
|
||||
def test_macro(client, args):
|
||||
c2b = CloudToButt()
|
||||
conn = client.new_conn()
|
||||
with client.new_conn() as conn:
|
||||
conn.intercept(c2b)
|
||||
print("intercept started")
|
||||
input("Press enter to quit...")
|
||||
print("past raw input")
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'intercept': (intercept, None),
|
||||
'c2b': (test_macro, None),
|
||||
'repeater': (repeater, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
('intercept', 'ic'),
|
||||
('repeater', 'rp'),
|
||||
])
|
||||
|
187
pappyproxy/interface/misc.py
Normal file
187
pappyproxy/interface/misc.py
Normal file
|
@ -0,0 +1,187 @@
|
|||
import argparse
|
||||
import sys
|
||||
import tempfile
|
||||
import subprocess
|
||||
from ..util import copy_to_clipboard, confirm, printable_data, Capturing, load_reqlist
|
||||
from ..console import CommandError
|
||||
from ..proxy import InterceptMacro
|
||||
from ..colors import url_formatter, verb_color, Colors, scode_color
|
||||
|
||||
class WatchMacro(InterceptMacro):
|
||||
|
||||
def __init__(self, client):
|
||||
InterceptMacro.__init__(self)
|
||||
self.name = "WatchMacro"
|
||||
self.client = client
|
||||
|
||||
def mangle_request(self, request):
|
||||
if self.client.is_in_context(request):
|
||||
printstr = "> "
|
||||
printstr += verb_color(request.method) + request.method + Colors.ENDC + " "
|
||||
printstr += url_formatter(request, colored=True)
|
||||
print(printstr)
|
||||
|
||||
return request
|
||||
|
||||
def mangle_response(self, request, response):
|
||||
if self.client.is_in_context(request):
|
||||
printstr = "< "
|
||||
printstr += verb_color(request.method) + request.method + Colors.ENDC + ' '
|
||||
printstr += url_formatter(request, colored=True)
|
||||
printstr += " \u2192 "
|
||||
response_code = str(response.status_code) + ' ' + response.reason
|
||||
response_code = scode_color(response_code) + response_code + Colors.ENDC
|
||||
printstr += response_code
|
||||
print(printstr)
|
||||
|
||||
return response
|
||||
|
||||
def mangle_websocket(self, request, response, message):
|
||||
if self.client.is_in_context(request):
|
||||
printstr = ""
|
||||
if message.to_server:
|
||||
printstr += ">"
|
||||
else:
|
||||
printstr += "<"
|
||||
printstr += "ws(b={}) ".format(message.is_binary)
|
||||
printstr += printable_data(message.message)
|
||||
print(printstr)
|
||||
|
||||
return message
|
||||
|
||||
def message_address(client, args):
|
||||
msg_addr = client.maddr
|
||||
if msg_addr is None:
|
||||
print("Client has no message address")
|
||||
return
|
||||
print(msg_addr)
|
||||
if len(args) > 0 and args[0] == "-c":
|
||||
try:
|
||||
copy_to_clipboard(msg_addr.encode())
|
||||
print("Copied to clipboard!")
|
||||
except:
|
||||
print("Could not copy address to clipboard")
|
||||
|
||||
def ping(client, args):
|
||||
print(client.ping())
|
||||
|
||||
def watch(client, args):
|
||||
macro = WatchMacro(client)
|
||||
macro.intercept_requests = True
|
||||
macro.intercept_responses = True
|
||||
macro.intercept_ws = True
|
||||
|
||||
with client.new_conn() as conn:
|
||||
conn.intercept(macro)
|
||||
print("Watching requests. Press <Enter> to quit...")
|
||||
input()
|
||||
|
||||
def submit(client, cargs):
|
||||
"""
|
||||
Resubmit some requests, optionally with modified headers and cookies.
|
||||
|
||||
Usage: submit <reqid(s)> [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
|
||||
"""
|
||||
#Usage: submit reqids [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
|
||||
|
||||
if len(cargs) == 0:
|
||||
raise CommandError("Missing request id(s)")
|
||||
|
||||
parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__)
|
||||
#parser.add_argument('reqids')
|
||||
parser.add_argument('-m', '--inmem', action='store_true', help='Store resubmitted requests in memory without storing them in the data file')
|
||||
parser.add_argument('-u', '--unique', action='store_true', help='Only resubmit one request per endpoint (different URL parameters are different endpoints)')
|
||||
parser.add_argument('-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)')
|
||||
parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting')
|
||||
parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting')
|
||||
parser.add_argument('-o', '--copycookies', help='Copy the cookies used in another request')
|
||||
|
||||
reqids = cargs[0]
|
||||
args = parser.parse_args(cargs[1:])
|
||||
|
||||
headers = {}
|
||||
cookies = {}
|
||||
clear_cookies = False
|
||||
|
||||
if args.headers:
|
||||
for h in args.headers:
|
||||
k, v = h.split('=', 1)
|
||||
headers[k] = v
|
||||
|
||||
if args.copycookies:
|
||||
reqid = args.copycookies
|
||||
req = client.req_by_id(reqid)
|
||||
clear_cookies = True
|
||||
for k, v in req.cookie_iter():
|
||||
cookies[k] = v
|
||||
|
||||
if args.cookies:
|
||||
for c in args.cookies:
|
||||
k, v = c.split('=', 1)
|
||||
cookies[k] = v
|
||||
|
||||
if args.unique and args.uniquepath:
|
||||
raise CommandError('Both -u and -p cannot be given as arguments')
|
||||
|
||||
# Get requests to submit
|
||||
#reqs = [r.copy() for r in client.in_context_requests()]
|
||||
reqs = client.in_context_requests()
|
||||
|
||||
# Apply cookies and headers
|
||||
for req in reqs:
|
||||
if clear_cookies:
|
||||
req.headers.delete("Cookie")
|
||||
for k, v in cookies.items():
|
||||
req.set_cookie(k, v)
|
||||
for k, v in headers.items():
|
||||
req.headers.set(k, v)
|
||||
|
||||
conf_message = "You're about to submit %d requests, continue?" % len(reqs)
|
||||
if not confirm(conf_message):
|
||||
return
|
||||
|
||||
# Filter unique paths
|
||||
if args.uniquepath or args.unique:
|
||||
endpoints = set()
|
||||
new_reqs = []
|
||||
for r in reqs:
|
||||
if unique_path_and_args:
|
||||
s = r.url.geturl()
|
||||
else:
|
||||
s = r.url.geturl(include_params=False)
|
||||
|
||||
if not s in endpoints:
|
||||
new_reqs.append(r)
|
||||
endpoints.add(s)
|
||||
reqs = new_reqs
|
||||
|
||||
# Tag and send them
|
||||
for req in reqs:
|
||||
req.tags.add('resubmitted')
|
||||
sys.stdout.write(client.get_reqid(req) + " ")
|
||||
sys.stdout.flush()
|
||||
|
||||
storage = client.disk_storage.storage_id
|
||||
if args.inmem:
|
||||
storage = client.inmem_storage.storage_id
|
||||
|
||||
client.submit(req, storage=storage)
|
||||
sys.stdout.write("\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def run_with_less(client, args):
|
||||
with Capturing() as output:
|
||||
client.console.run_args(args)
|
||||
with tempfile.NamedTemporaryFile() as tf:
|
||||
tf.write(output.val.encode())
|
||||
subprocess.call(['less', '-R', tf.name])
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'maddr': (message_address, None),
|
||||
'ping': (ping, None),
|
||||
'submit': (submit, None),
|
||||
'watch': (watch, None),
|
||||
'less': (run_with_less, None),
|
||||
})
|
0
pappyproxy/interface/repeater/__init__.py
Normal file
0
pappyproxy/interface/repeater/__init__.py
Normal file
1607
pappyproxy/interface/repeater/repeater.py
Normal file
1607
pappyproxy/interface/repeater/repeater.py
Normal file
File diff suppressed because it is too large
Load diff
20
pappyproxy/interface/repeater/repeater.vim
Normal file
20
pappyproxy/interface/repeater/repeater.vim
Normal file
|
@ -0,0 +1,20 @@
|
|||
if !has('python')
|
||||
echo "Vim must support python in order to use the repeater"
|
||||
finish
|
||||
endif
|
||||
|
||||
" Settings to make life easier
|
||||
set hidden
|
||||
|
||||
let s:pyscript = resolve(expand('<sfile>:p:h') . '/repeater.py')
|
||||
|
||||
function! RepeaterAction(...)
|
||||
execute 'pyfile ' . s:pyscript
|
||||
endfunc
|
||||
|
||||
command! -nargs=* RepeaterSetup call RepeaterAction('setup', <f-args>)
|
||||
command! RepeaterSubmitBuffer call RepeaterAction('submit')
|
||||
|
||||
" Bind forward to <leader>f
|
||||
nnoremap <leader>f :RepeaterSubmitBuffer<CR>
|
||||
|
64
pappyproxy/interface/tags.py
Normal file
64
pappyproxy/interface/tags.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
from ..console import CommandError
|
||||
from ..util import confirm, load_reqlist
|
||||
|
||||
def tag_cmd(client, args):
|
||||
if len(args) == 0:
|
||||
raise CommandError("Usage: tag <tag> [reqid1] [reqid2] ...")
|
||||
if not args[0]:
|
||||
raise CommandError("Tag cannot be empty")
|
||||
tag = args[0]
|
||||
if len(args) == 1:
|
||||
reqids = '*'
|
||||
else:
|
||||
reqids = ','.join(args[1:])
|
||||
reqs = [r for r in load_reqlist(client, reqids, headers_only=True)]
|
||||
if len(reqs) > 10:
|
||||
cnt = confirm("You are about to tag {} requests with \"{}\". Continue?".format(len(reqs), tag))
|
||||
if not cnt:
|
||||
return
|
||||
for reqh in reqs:
|
||||
reqid = client.get_reqid(reqh)
|
||||
client.add_tag(reqid, tag)
|
||||
|
||||
def untag_cmd(client, args):
|
||||
if len(args) == 0:
|
||||
raise CommandError("Usage: untag <tag> [reqid1] [reqid2] ...")
|
||||
if not args[0]:
|
||||
raise CommandError("Tag cannot be empty")
|
||||
tag = args[0]
|
||||
if len(args) == 1:
|
||||
reqids = '*'
|
||||
else:
|
||||
reqids = ','.join(args[1:])
|
||||
reqs = [r for r in load_reqlist(client, reqids, headers_only=True)]
|
||||
if len(reqs) > 10:
|
||||
cnt = confirm("You are about to remove the \"{}\" tag from {} requests. Continue?".format(tag, len(reqs)))
|
||||
if not cnt:
|
||||
return
|
||||
for reqh in reqs:
|
||||
reqid = client.get_reqid(reqh)
|
||||
client.remove_tag(reqid, tag)
|
||||
|
||||
def clrtag_cmd(client, args):
|
||||
if len(args) == 0:
|
||||
raise CommandError("Usage: clrtag [reqid1] [reqid2] ...")
|
||||
reqids = []
|
||||
if len(args) == 1:
|
||||
reqids = '*'
|
||||
else:
|
||||
reqids = ','.join(args[1:])
|
||||
reqs = [r for r in load_reqlist(client, reqids, headers_only=True)]
|
||||
if len(reqs) > 5:
|
||||
cnt = confirm("You are about to clear ALL TAGS from {} requests. Continue?".format(len(reqs)))
|
||||
if not cnt:
|
||||
return
|
||||
for reqh in reqs:
|
||||
reqid = client.get_reqid(reqh)
|
||||
client.clear_tag(reqid)
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'clrtag': (clrtag_cmd, None),
|
||||
'untag': (untag_cmd, None),
|
||||
'tag': (tag_cmd, None),
|
||||
})
|
7
pappyproxy/interface/test.py
Normal file
7
pappyproxy/interface/test.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
|
||||
def test_cmd(client, args):
|
||||
print("args:", ', '.join(args))
|
||||
print("ping:", client.ping())
|
||||
|
||||
def load_cmds(cons):
|
||||
cons.set_cmd("test", test_cmd)
|
741
pappyproxy/interface/view.py
Normal file
741
pappyproxy/interface/view.py
Normal file
|
@ -0,0 +1,741 @@
|
|||
import datetime
|
||||
import json
|
||||
import pygments
|
||||
import pprint
|
||||
import re
|
||||
import shlex
|
||||
import urllib
|
||||
|
||||
from ..util import print_table, print_request_rows, get_req_data_row, datetime_string, maybe_hexdump, load_reqlist
|
||||
from ..colors import Colors, Styles, verb_color, scode_color, path_formatter, color_string, url_formatter, pretty_msg, pretty_headers
|
||||
from ..console import CommandError
|
||||
from pygments.formatters import TerminalFormatter
|
||||
from pygments.lexers.data import JsonLexer
|
||||
from pygments.lexers.html import XmlLexer
|
||||
from urllib.parse import parse_qs, unquote
|
||||
|
||||
###################
|
||||
## Helper functions
|
||||
|
||||
def view_full_message(request, headers_only=False, try_ws=False):
|
||||
def _print_message(mes):
|
||||
print_str = ''
|
||||
if mes.to_server == False:
|
||||
print_str += Colors.BLUE
|
||||
print_str += '< Incoming'
|
||||
else:
|
||||
print_str += Colors.GREEN
|
||||
print_str += '> Outgoing'
|
||||
print_str += Colors.ENDC
|
||||
if mes.unmangled:
|
||||
print_str += ', ' + Colors.UNDERLINE + 'mangled' + Colors.ENDC
|
||||
t_plus = "??"
|
||||
if request.time_start:
|
||||
t_plus = mes.timestamp - request.time_start
|
||||
print_str += ', binary = %s, T+%ss\n' % (mes.is_binary, t_plus.total_seconds())
|
||||
|
||||
print_str += Colors.ENDC
|
||||
print_str += maybe_hexdump(mes.message).decode()
|
||||
print_str += '\n'
|
||||
return print_str
|
||||
|
||||
if headers_only:
|
||||
print(pretty_headers(request))
|
||||
else:
|
||||
if try_ws and request.ws_messages:
|
||||
print_str = ''
|
||||
print_str += Styles.TABLE_HEADER
|
||||
print_str += "Websocket session handshake\n"
|
||||
print_str += Colors.ENDC
|
||||
print_str += pretty_msg(request)
|
||||
print_str += '\n'
|
||||
print_str += Styles.TABLE_HEADER
|
||||
print_str += "Websocket session \n"
|
||||
print_str += Colors.ENDC
|
||||
for wsm in request.ws_messages:
|
||||
print_str += _print_message(wsm)
|
||||
if wsm.unmangled:
|
||||
print_str += Colors.YELLOW
|
||||
print_str += '-'*10
|
||||
print_str += Colors.ENDC
|
||||
print_str += ' vv UNMANGLED vv '
|
||||
print_str += Colors.YELLOW
|
||||
print_str += '-'*10
|
||||
print_str += Colors.ENDC
|
||||
print_str += '\n'
|
||||
print_str += _print_message(wsm.unmangled)
|
||||
print_str += Colors.YELLOW
|
||||
print_str += '-'*20 + '-'*len(' ^^ UNMANGLED ^^ ')
|
||||
print_str += '\n'
|
||||
print_str += Colors.ENDC
|
||||
print(print_str)
|
||||
else:
|
||||
print(pretty_msg(request))
|
||||
|
||||
def print_request_extended(client, request):
|
||||
# Prints extended info for the request
|
||||
title = "Request Info (reqid=%s)" % client.get_reqid(request)
|
||||
print(Styles.TABLE_HEADER + title + Colors.ENDC)
|
||||
reqlen = len(request.body)
|
||||
reqlen = '%d bytes' % reqlen
|
||||
rsplen = 'No response'
|
||||
|
||||
mangle_str = 'Nothing mangled'
|
||||
if request.unmangled:
|
||||
mangle_str = 'Request'
|
||||
|
||||
if request.response:
|
||||
response_code = str(request.response.status_code) + \
|
||||
' ' + request.response.reason
|
||||
response_code = scode_color(response_code) + response_code + Colors.ENDC
|
||||
rsplen = request.response.content_length
|
||||
rsplen = '%d bytes' % rsplen
|
||||
|
||||
if request.response.unmangled:
|
||||
if mangle_str == 'Nothing mangled':
|
||||
mangle_str = 'Response'
|
||||
else:
|
||||
mangle_str += ' and Response'
|
||||
else:
|
||||
response_code = ''
|
||||
|
||||
time_str = '--'
|
||||
if request.time_end is not None and request.time_start is not None:
|
||||
time_delt = request.time_end - request.time_start
|
||||
time_str = "%.2f sec" % time_delt.total_seconds()
|
||||
|
||||
if request.use_tls:
|
||||
is_ssl = 'YES'
|
||||
else:
|
||||
is_ssl = Colors.RED + 'NO' + Colors.ENDC
|
||||
|
||||
if request.time_start:
|
||||
time_made_str = datetime_string(request.time_start)
|
||||
else:
|
||||
time_made_str = '--'
|
||||
|
||||
verb = verb_color(request.method) + request.method + Colors.ENDC
|
||||
host = color_string(request.dest_host)
|
||||
|
||||
colored_tags = [color_string(t) for t in request.tags]
|
||||
|
||||
print_pairs = []
|
||||
print_pairs.append(('Made on', time_made_str))
|
||||
print_pairs.append(('ID', client.get_reqid(request)))
|
||||
print_pairs.append(('URL', url_formatter(request, colored=True)))
|
||||
print_pairs.append(('Host', host))
|
||||
print_pairs.append(('Path', path_formatter(request.url.path)))
|
||||
print_pairs.append(('Verb', verb))
|
||||
print_pairs.append(('Status Code', response_code))
|
||||
print_pairs.append(('Request Length', reqlen))
|
||||
print_pairs.append(('Response Length', rsplen))
|
||||
if request.response and request.response.unmangled:
|
||||
print_pairs.append(('Unmangled Response Length', request.response.unmangled.content_length))
|
||||
print_pairs.append(('Time', time_str))
|
||||
print_pairs.append(('Port', request.dest_port))
|
||||
print_pairs.append(('SSL', is_ssl))
|
||||
print_pairs.append(('Mangled', mangle_str))
|
||||
print_pairs.append(('Tags', ', '.join(colored_tags)))
|
||||
|
||||
for k, v in print_pairs:
|
||||
print(Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v))
|
||||
|
||||
def pretty_print_body(fmt, body):
|
||||
try:
|
||||
bstr = body.decode()
|
||||
if fmt.lower() == 'json':
|
||||
d = json.loads(bstr.strip())
|
||||
s = json.dumps(d, indent=4, sort_keys=True)
|
||||
print(pygments.highlight(s, JsonLexer(), TerminalFormatter()))
|
||||
elif fmt.lower() == 'form':
|
||||
qs = parse_qs(bstr, keep_blank_values=True)
|
||||
for k, vs in qs.items():
|
||||
for v in vs:
|
||||
s = Colors.GREEN
|
||||
s += '%s: ' % unquote(k)
|
||||
s += Colors.ENDC
|
||||
if v == '':
|
||||
s += Colors.RED
|
||||
s += 'EMPTY'
|
||||
s += Colors.ENDC
|
||||
else:
|
||||
s += unquote(v)
|
||||
print(s)
|
||||
elif fmt.lower() == 'text':
|
||||
print(bstr)
|
||||
elif fmt.lower() == 'xml':
|
||||
import xml.dom.minidom
|
||||
xml = xml.dom.minidom.parseString(bstr)
|
||||
print(pygments.highlight(xml.toprettyxml(), XmlLexer(), TerminalFormatter()))
|
||||
else:
|
||||
raise CommandError('"%s" is not a valid format' % fmt)
|
||||
except CommandError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
raise CommandError('Body could not be parsed as "{}": {}'.format(fmt, e))
|
||||
|
||||
def print_params(client, req, params=None):
|
||||
if not req.url.parameters() and not req.body:
|
||||
print('Request %s has no url or data parameters' % client.get_reqid(req))
|
||||
print('')
|
||||
if req.url.parameters():
|
||||
print(Styles.TABLE_HEADER + "Url Params" + Colors.ENDC)
|
||||
for k, v in req.url.param_iter():
|
||||
if params is None or (params and k in params):
|
||||
print(Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v))
|
||||
print('')
|
||||
if req.body:
|
||||
print(Styles.TABLE_HEADER + "Body/POST Params" + Colors.ENDC)
|
||||
pretty_print_body(guess_pretty_print_fmt(req), req.body)
|
||||
print('')
|
||||
if 'cookie' in req.headers:
|
||||
print(Styles.TABLE_HEADER + "Cookies" + Colors.ENDC)
|
||||
for k, v in req.cookie_iter():
|
||||
if params is None or (params and k in params):
|
||||
print(Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v))
|
||||
print('')
|
||||
# multiform request when we support it
|
||||
|
||||
def guess_pretty_print_fmt(msg):
|
||||
if 'content-type' in msg.headers:
|
||||
if 'json' in msg.headers.get('content-type'):
|
||||
return 'json'
|
||||
elif 'www-form' in msg.headers.get('content-type'):
|
||||
return 'form'
|
||||
elif 'application/xml' in msg.headers.get('content-type'):
|
||||
return 'xml'
|
||||
return 'text'
|
||||
|
||||
def print_tree(tree):
|
||||
# Prints a tree. Takes in a sorted list of path tuples
|
||||
_print_tree_helper(tree, 0, [])
|
||||
|
||||
def _get_tree_prefix(depth, print_bars, last):
|
||||
if depth == 0:
|
||||
return u''
|
||||
else:
|
||||
ret = u''
|
||||
pb = print_bars + [True]
|
||||
for i in range(depth):
|
||||
if pb[i]:
|
||||
ret += u'\u2502 '
|
||||
else:
|
||||
ret += u' '
|
||||
if last:
|
||||
ret += u'\u2514\u2500 '
|
||||
else:
|
||||
ret += u'\u251c\u2500 '
|
||||
return ret
|
||||
|
||||
def _print_tree_helper(tree, depth, print_bars):
|
||||
# Takes in a tree and prints it at the given depth
|
||||
if tree == [] or tree == [()]:
|
||||
return
|
||||
while tree[0] == ():
|
||||
tree = tree[1:]
|
||||
if tree == [] or tree == [()]:
|
||||
return
|
||||
if len(tree) == 1 and len(tree[0]) == 1:
|
||||
print(_get_tree_prefix(depth, print_bars + [False], True) + tree[0][0])
|
||||
return
|
||||
|
||||
curkey = tree[0][0]
|
||||
subtree = []
|
||||
for row in tree:
|
||||
if row[0] != curkey:
|
||||
if curkey == '':
|
||||
curkey = '/'
|
||||
print(_get_tree_prefix(depth, print_bars, False) + curkey)
|
||||
if depth == 0:
|
||||
_print_tree_helper(subtree, depth+1, print_bars + [False])
|
||||
else:
|
||||
_print_tree_helper(subtree, depth+1, print_bars + [True])
|
||||
curkey = row[0]
|
||||
subtree = []
|
||||
subtree.append(row[1:])
|
||||
if curkey == '':
|
||||
curkey = '/'
|
||||
print(_get_tree_prefix(depth, print_bars, True) + curkey)
|
||||
_print_tree_helper(subtree, depth+1, print_bars + [False])
|
||||
|
||||
|
||||
def add_param(found_params, kind: str, k: str, v: str, reqid: str):
|
||||
if type(k) is not str:
|
||||
raise Exception("BAD")
|
||||
if not k in found_params:
|
||||
found_params[k] = {}
|
||||
if kind in found_params[k]:
|
||||
found_params[k][kind].append((reqid, v))
|
||||
else:
|
||||
found_params[k][kind] = [(reqid, v)]
|
||||
|
||||
def print_param_info(param_info):
|
||||
for k, d in param_info.items():
|
||||
print(Styles.TABLE_HEADER + k + Colors.ENDC)
|
||||
for param_type, valpairs in d.items():
|
||||
print(param_type)
|
||||
value_ids = {}
|
||||
for reqid, val in valpairs:
|
||||
ids = value_ids.get(val, [])
|
||||
ids.append(reqid)
|
||||
value_ids[val] = ids
|
||||
for val, ids in value_ids.items():
|
||||
if len(ids) <= 15:
|
||||
idstr = ', '.join(ids)
|
||||
else:
|
||||
idstr = ', '.join(ids[:15]) + '...'
|
||||
if val == '':
|
||||
printstr = (Colors.RED + 'BLANK' + Colors.ENDC + 'x%d (%s)') % (len(ids), idstr)
|
||||
else:
|
||||
printstr = (Colors.GREEN + '%s' + Colors.ENDC + 'x%d (%s)') % (val, len(ids), idstr)
|
||||
print(printstr)
|
||||
print('')
|
||||
|
||||
def path_tuple(url):
|
||||
return tuple(url.path.split('/'))
|
||||
|
||||
####################
|
||||
## Command functions
|
||||
|
||||
def list_reqs(client, args):
|
||||
"""
|
||||
List the most recent in-context requests. By default shows the most recent 25
|
||||
Usage: list [a|num]
|
||||
|
||||
If `a` is given, all the in-context requests are shown. If a number is given,
|
||||
that many requests will be shown.
|
||||
"""
|
||||
if len(args) > 0:
|
||||
if args[0][0].lower() == 'a':
|
||||
print_count = 0
|
||||
else:
|
||||
try:
|
||||
print_count = int(args[0])
|
||||
except:
|
||||
print("Please enter a valid argument for list")
|
||||
return
|
||||
else:
|
||||
print_count = 25
|
||||
|
||||
rows = []
|
||||
reqs = client.in_context_requests(headers_only=True, max_results=print_count)
|
||||
for req in reqs:
|
||||
rows.append(get_req_data_row(req, client=client))
|
||||
print_request_rows(rows)
|
||||
|
||||
def view_full_request(client, args):
|
||||
"""
|
||||
View the full data of the request
|
||||
Usage: view_full_request <reqid(s)>
|
||||
"""
|
||||
if not args:
|
||||
raise CommandError("Request id is required")
|
||||
reqs = load_reqlist(client, args[0])
|
||||
for req in reqs:
|
||||
print('-- Request id=%s --------------------' % req.db_id)
|
||||
view_full_message(req, try_ws=True)
|
||||
|
||||
def view_full_response(client, args):
|
||||
"""
|
||||
View the full data of the response associated with a request
|
||||
Usage: view_full_response <reqid>
|
||||
"""
|
||||
if not args:
|
||||
raise CommandError("Request id is required")
|
||||
reqs = load_reqlist(client, args[0])
|
||||
for req in reqs:
|
||||
if not req.response:
|
||||
print("-- Request {} does not have an associated response".format(reqid))
|
||||
else:
|
||||
print('-- Request id=%s --------------------' % req.db_id)
|
||||
view_full_message(req.response)
|
||||
|
||||
def view_request_headers(client, args):
|
||||
"""
|
||||
View the headers of the request
|
||||
Usage: view_request_headers <reqid(s)>
|
||||
"""
|
||||
if not args:
|
||||
raise CommandError("Request id is required")
|
||||
reqs = load_reqlist(client, args[0], headers_only=True)
|
||||
for req in reqs:
|
||||
print('-- Request id=%s --------------------' % req.db_id)
|
||||
view_full_message(req, headers_only=True)
|
||||
|
||||
def view_response_headers(client, args):
|
||||
"""
|
||||
View the full data of the response associated with a request
|
||||
Usage: view_full_response <reqid>
|
||||
"""
|
||||
if not args:
|
||||
raise CommandError("Request id is required")
|
||||
reqs = load_reqlist(client, args[0], headers_only=True)
|
||||
for req in reqs:
|
||||
if not req.response:
|
||||
print("-- Request {} does not have an associated response".format(reqid))
|
||||
else:
|
||||
print('-- Request id=%s --------------------' % req.db_id)
|
||||
view_full_message(req.response, headers_only=True)
|
||||
|
||||
def view_request_info(client, args):
|
||||
"""
|
||||
View information about request
|
||||
Usage: view_request_info <reqid(s)>
|
||||
"""
|
||||
if not args:
|
||||
raise CommandError("Request id is required")
|
||||
reqs = load_reqlist(client, args[0], headers_only=True)
|
||||
for req in reqs:
|
||||
print_request_extended(client, req)
|
||||
print('')
|
||||
if not args:
|
||||
raise CommandError("Request id is required")
|
||||
|
||||
def pretty_print_request(client, args):
|
||||
"""
|
||||
Print the body of the request pretty printed.
|
||||
Usage: pretty_print_request <format> <reqid(s)>
|
||||
"""
|
||||
if len(args) < 2:
|
||||
raise CommandError("Usage: pretty_print_request <format> <reqid(s)>")
|
||||
print_type = args[0]
|
||||
reqs = load_reqlist(client, args[1])
|
||||
for req in reqs:
|
||||
print('-- Request id=%s --------------------' % req.db_id)
|
||||
try:
|
||||
pretty_print_body(print_type, req.body)
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
|
||||
def pretty_print_response(client, args):
|
||||
"""
|
||||
Print the body of the response pretty printed.
|
||||
Usage: pretty_print_response <format> <reqid(s)>
|
||||
"""
|
||||
if len(args) < 2:
|
||||
raise CommandError("Usage: pretty_print_request <format> <reqid(s)>")
|
||||
print_type = args[0]
|
||||
reqs = load_reqlist(client, args[1])
|
||||
for req in reqs:
|
||||
print('-- Request id=%s --------------------' % req.db_id)
|
||||
if not req.response:
|
||||
print("request {} does not have an associated response".format(reqid))
|
||||
continue
|
||||
try:
|
||||
pretty_print_body(print_type, req.response.body)
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
|
||||
def print_params_cmd(client, args):
|
||||
"""
|
||||
View the parameters of a request
|
||||
Usage: print_params <reqid(s)> [key 1] [key 2] ...
|
||||
"""
|
||||
if not args:
|
||||
raise CommandError("Request id is required")
|
||||
if len(args) > 1:
|
||||
keys = args[1:]
|
||||
else:
|
||||
keys = None
|
||||
|
||||
reqs = load_reqlist(client, args[0])
|
||||
for req in reqs:
|
||||
print('-- Request id=%s --------------------' % req.db_id)
|
||||
print_params(client, req, keys)
|
||||
|
||||
def get_param_info(client, args):
|
||||
if len(args) == 0:
|
||||
raise CommandError("Request ID(s) required")
|
||||
reqs = load_reqlist(client, args[0])
|
||||
args = args[1:]
|
||||
|
||||
if args and args[0] == 'ct':
|
||||
contains = True
|
||||
args = args[1:]
|
||||
else:
|
||||
contains = False
|
||||
|
||||
if args:
|
||||
params = tuple(args)
|
||||
else:
|
||||
params = None
|
||||
|
||||
def check_key(k, params, contains):
|
||||
if contains:
|
||||
for p in params:
|
||||
if p.lower() in k.lower():
|
||||
return True
|
||||
else:
|
||||
if params is None or k in params:
|
||||
return True
|
||||
return False
|
||||
|
||||
found_params = {}
|
||||
|
||||
for req in reqs:
|
||||
prefixed_id = client.get_reqid(req)
|
||||
for k, v in req.url.param_iter():
|
||||
if type(k) is not str:
|
||||
raise Exception("BAD")
|
||||
if check_key(k, params, contains):
|
||||
add_param(found_params, 'Url Parameter', k, v, prefixed_id)
|
||||
for k, v in req.param_iter():
|
||||
if check_key(k, params, contains):
|
||||
add_param(found_params, 'POST Parameter', k, v, prefixed_id)
|
||||
for k, v in req.cookie_iter():
|
||||
if check_key(k, params, contains):
|
||||
add_param(found_params, 'Cookie', k, v, prefixed_id)
|
||||
print_param_info(found_params)
|
||||
|
||||
def find_urls(client, args):
|
||||
if len(args) > 0:
|
||||
reqs = load_reqlist(client, args[0])
|
||||
else:
|
||||
reqs = client.in_context_requests_iter() # update to take reqlist
|
||||
|
||||
url_regexp = b'((?:http|ftp|https)://(?:[\w_-]+(?:(?:\.[\w_-]+)+))(?:[\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)'
|
||||
urls = set()
|
||||
for req in reqs:
|
||||
urls |= set(re.findall(url_regexp, req.full_message()))
|
||||
if req.response:
|
||||
urls |= set(re.findall(url_regexp, req.response.full_message()))
|
||||
for url in sorted(urls):
|
||||
print(url.decode())
|
||||
|
||||
def site_map(client, args):
|
||||
"""
|
||||
Print the site map. Only includes requests in the current context.
|
||||
Usage: site_map
|
||||
"""
|
||||
if len(args) > 0 and args[0] == 'p':
|
||||
paths = True
|
||||
else:
|
||||
paths = False
|
||||
all_reqs = client.in_context_requests(headers_only=True)
|
||||
reqs_by_host = {}
|
||||
for req in all_reqs:
|
||||
reqs_by_host.setdefault(req.dest_host, []).append(req)
|
||||
for host, reqs in reqs_by_host.items():
|
||||
paths_set = set()
|
||||
for req in reqs:
|
||||
if req.response and req.response.status_code != 404:
|
||||
paths_set.add(path_tuple(req.url))
|
||||
tree = sorted(list(paths_set))
|
||||
print(host)
|
||||
if paths:
|
||||
for p in tree:
|
||||
print ('/'.join(list(p)))
|
||||
else:
|
||||
print_tree(tree)
|
||||
print("")
|
||||
|
||||
def save_request(client, args):
|
||||
if not args:
|
||||
raise CommandError("Request id is required")
|
||||
reqs = load_reqlist(client, args[0])
|
||||
for req in reqs:
|
||||
if len(args) >= 2:
|
||||
fname = args[1]
|
||||
else:
|
||||
fname = "req_%s" % client.get_reqid(req)
|
||||
|
||||
with open(fname, 'wb') as f:
|
||||
f.write(req.full_message())
|
||||
print('Request written to {}'.format(fname))
|
||||
|
||||
def save_response(client, args):
|
||||
if not args:
|
||||
raise CommandError("Request id(s) is required")
|
||||
reqs = load_reqlist(client, args[0])
|
||||
for req in reqs:
|
||||
if req.response:
|
||||
rsp = req.response
|
||||
if len(args) >= 2:
|
||||
fname = args[1]
|
||||
else:
|
||||
fname = "rsp_%s" % client.get_reqid(req)
|
||||
|
||||
with open(fname, 'wb') as f:
|
||||
f.write(rsp.full_message())
|
||||
print('Response written to {}'.format(fname))
|
||||
else:
|
||||
print('Request {} does not have a response'.format(req.reqid))
|
||||
|
||||
def dump_response(client, args):
|
||||
"""
|
||||
Dump the data of the response to a file.
|
||||
Usage: dump_response <id> <filename>
|
||||
"""
|
||||
# dump the data of a response
|
||||
if not args:
|
||||
raise CommandError("Request id(s) is required")
|
||||
reqs = load_reqlist(client, args[0])
|
||||
for req in reqs:
|
||||
if req.response:
|
||||
rsp = req.response
|
||||
if len(args) >= 2:
|
||||
fname = args[1]
|
||||
else:
|
||||
fname = req.url.path.split('/')[-1]
|
||||
|
||||
with open(fname, 'wb') as f:
|
||||
f.write(rsp.body)
|
||||
print('Response body written to {}'.format(fname))
|
||||
else:
|
||||
print('Request {} does not have a response'.format(req.reqid))
|
||||
|
||||
def get_surrounding_lines(s, n, lines):
|
||||
left = n
|
||||
right = n
|
||||
lines_left = 0
|
||||
lines_right = 0
|
||||
|
||||
# move left until we find enough lines or hit the edge
|
||||
while left > 0 and lines_left < lines:
|
||||
if s[left] == '\n':
|
||||
lines_left += 1
|
||||
left -= 1
|
||||
|
||||
# move right until we find enough lines or hit the edge
|
||||
while right < len(s) and lines_right < lines:
|
||||
if s[right] == '\n':
|
||||
lines_right += 1
|
||||
right += 1
|
||||
|
||||
return s[left:right]
|
||||
|
||||
def print_search_header(reqid, locstr):
|
||||
printstr = Styles.TABLE_HEADER
|
||||
printstr += "Result(s) for request {} ({})".format(reqid, locstr)
|
||||
printstr += Colors.ENDC
|
||||
print(printstr)
|
||||
|
||||
def highlight_str(s, substr):
|
||||
highlighted = Colors.BGYELLOW + Colors.BLACK + Colors.BOLD + substr + Colors.ENDC
|
||||
return s.replace(substr, highlighted)
|
||||
|
||||
def search_message(mes, substr, lines, reqid, locstr):
|
||||
header_printed = False
|
||||
for m in re.finditer(substr, mes):
|
||||
if not header_printed:
|
||||
print_search_header(reqid, locstr)
|
||||
header_printed = True
|
||||
n = m.start()
|
||||
linestr = get_surrounding_lines(mes, n, lines)
|
||||
linelist = linestr.split('\n')
|
||||
linestr = '\n'.join(line[:500] for line in linelist)
|
||||
toprint = highlight_str(linestr, substr)
|
||||
print(toprint)
|
||||
print('-'*50)
|
||||
|
||||
def search(client, args):
|
||||
search_str = args[0]
|
||||
lines = 2
|
||||
if len(args) > 1:
|
||||
lines = int(args[1])
|
||||
for req in client.in_context_requests_iter():
|
||||
reqid = client.get_reqid(req)
|
||||
reqheader_printed = False
|
||||
try:
|
||||
mes = req.full_message().decode()
|
||||
search_message(mes, search_str, lines, reqid, "Request")
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
if req.response:
|
||||
try:
|
||||
mes = req.response.full_message().decode()
|
||||
search_message(mes, search_str, lines, reqid, "Response")
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
wsheader_printed = False
|
||||
for wsm in req.ws_messages:
|
||||
if not wsheader_printed:
|
||||
print_search_header(client.get_reqid(req), reqid, "Websocket Messages")
|
||||
wsheader_printed = True
|
||||
if search_str in wsm.message:
|
||||
print(highlight_str(wsm.message, search_str))
|
||||
|
||||
|
||||
# @crochet.wait_for(timeout=None)
|
||||
# @defer.inlineCallbacks
|
||||
# def view_request_bytes(line):
|
||||
# """
|
||||
# View the raw bytes of the request. Use this if you want to redirect output to a file.
|
||||
# Usage: view_request_bytes <reqid(s)>
|
||||
# """
|
||||
# args = shlex.split(line)
|
||||
# if not args:
|
||||
# raise CommandError("Request id is required")
|
||||
# reqid = args[0]
|
||||
|
||||
# reqs = yield load_reqlist(reqid)
|
||||
# for req in reqs:
|
||||
# if len(reqs) > 1:
|
||||
# print 'Request %s:' % req.reqid
|
||||
# print req.full_message
|
||||
# if len(reqs) > 1:
|
||||
# print '-'*30
|
||||
# print ''
|
||||
|
||||
# @crochet.wait_for(timeout=None)
|
||||
# @defer.inlineCallbacks
|
||||
# def view_response_bytes(line):
|
||||
# """
|
||||
# View the full data of the response associated with a request
|
||||
# Usage: view_request_bytes <reqid(s)>
|
||||
# """
|
||||
# reqs = yield load_reqlist(line)
|
||||
# for req in reqs:
|
||||
# if req.response:
|
||||
# if len(reqs) > 1:
|
||||
# print '-'*15 + (' %s ' % req.reqid) + '-'*15
|
||||
# print req.response.full_message
|
||||
# else:
|
||||
# print "Request %s does not have a response" % req.reqid
|
||||
|
||||
|
||||
###############
|
||||
## Plugin hooks
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'list': (list_reqs, None),
|
||||
'view_full_request': (view_full_request, None),
|
||||
'view_full_response': (view_full_response, None),
|
||||
'view_request_headers': (view_request_headers, None),
|
||||
'view_response_headers': (view_response_headers, None),
|
||||
'view_request_info': (view_request_info, None),
|
||||
'pretty_print_request': (pretty_print_request, None),
|
||||
'pretty_print_response': (pretty_print_response, None),
|
||||
'print_params': (print_params_cmd, None),
|
||||
'param_info': (get_param_info, None),
|
||||
'urls': (find_urls, None),
|
||||
'site_map': (site_map, None),
|
||||
'dump_response': (dump_response, None),
|
||||
'save_request': (save_request, None),
|
||||
'save_response': (save_response, None),
|
||||
'search': (search, None),
|
||||
# 'view_request_bytes': (view_request_bytes, None),
|
||||
# 'view_response_bytes': (view_response_bytes, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
('list', 'ls'),
|
||||
('view_full_request', 'vfq'),
|
||||
('view_full_request', 'kjq'),
|
||||
('view_request_headers', 'vhq'),
|
||||
('view_response_headers', 'vhs'),
|
||||
('view_full_response', 'vfs'),
|
||||
('view_full_response', 'kjs'),
|
||||
('view_request_info', 'viq'),
|
||||
('pretty_print_request', 'ppq'),
|
||||
('pretty_print_response', 'pps'),
|
||||
('print_params', 'pprm'),
|
||||
('param_info', 'pri'),
|
||||
('site_map', 'sm'),
|
||||
('save_request', 'savereq'),
|
||||
('save_response', 'saversp'),
|
||||
# ('view_request_bytes', 'vbq'),
|
||||
# ('view_response_bytes', 'vbs'),
|
||||
# #('dump_response', 'dr'),
|
||||
])
|
Loading…
Add table
Add a link
Reference in a new issue