Version 0.2.13
This commit is contained in:
parent
54c1f5e409
commit
f8795a4c48
20 changed files with 2425 additions and 875 deletions
|
@ -3,6 +3,7 @@ import pappyproxy
|
|||
|
||||
from pappyproxy.util import PappyException, confirm, autocomplete_startswith
|
||||
from pappyproxy.http import Request
|
||||
from pappyproxy.context import save_context, delete_saved_context, get_saved_context, get_all_saved_contexts
|
||||
from twisted.internet import defer
|
||||
|
||||
class BuiltinFilters(object):
|
||||
|
@ -157,9 +158,9 @@ def filter_prune(line):
|
|||
act_reqs = yield pappyproxy.pappy.main_context.get_reqs()
|
||||
inact_reqs = set(Request.cache.req_ids()).difference(set(act_reqs))
|
||||
message = 'This will delete %d/%d requests. You can NOT undo this!! Continue?' % (len(inact_reqs), (len(inact_reqs) + len(act_reqs)))
|
||||
print message
|
||||
# if not confirm(message, 'n'):
|
||||
# defer.returnValue(None)
|
||||
#print message
|
||||
if not confirm(message, 'n'):
|
||||
defer.returnValue(None)
|
||||
|
||||
for reqid in inact_reqs:
|
||||
try:
|
||||
|
@ -170,6 +171,55 @@ def filter_prune(line):
|
|||
print 'Deleted %d requests' % len(inact_reqs)
|
||||
defer.returnValue(None)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _save_filters_to(key):
|
||||
if key == '':
|
||||
raise PappyException("Must give name to save filters as")
|
||||
strs = pappyproxy.plugin.get_active_filter_strings()
|
||||
yield save_context(key, strs, pappyproxy.http.dbpool)
|
||||
defer.returnValue(strs)
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def save_filter_set(line):
|
||||
if line == '':
|
||||
raise PappyException("Must give name to save filters as")
|
||||
strs = yield _save_filters_to(line)
|
||||
print 'Filters saved to %s:' % line
|
||||
for s in strs:
|
||||
print ' %s' % s
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def load_filter_set(line):
|
||||
if line == '':
|
||||
raise PappyException("Must give name to save filters as")
|
||||
strs = yield get_saved_context(line, pappyproxy.http.dbpool)
|
||||
yield _save_filters_to('_')
|
||||
pappyproxy.pappy.main_context.set_filters([])
|
||||
for s in strs:
|
||||
yield pappyproxy.pappy.main_context.add_filter_string(s)
|
||||
print 'Set the context to:'
|
||||
for s in strs:
|
||||
print ' %s' % s
|
||||
|
||||
def delete_filter_set(line):
|
||||
if line == '':
|
||||
raise PappyException("Must give name to save filters as")
|
||||
delete_saved_context(line, pappyproxy.http.dbpool)
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def list_filter_set(line):
|
||||
print 'Saved contexts:'
|
||||
contexts = yield get_all_saved_contexts(pappyproxy.http.dbpool)
|
||||
for k in sorted(contexts.keys()):
|
||||
v = contexts[k]
|
||||
print '%s' % k
|
||||
for s in v:
|
||||
print ' %s' % s
|
||||
print ''
|
||||
|
||||
###############
|
||||
## Plugin hooks
|
||||
|
||||
|
@ -185,6 +235,10 @@ def load_cmds(cmd):
|
|||
'filter_up': (filter_up, None),
|
||||
'builtin_filter': (builtin_filter, complete_builtin_filter),
|
||||
'filter': (filtercmd, complete_filtercmd),
|
||||
'save_context': (save_filter_set, None),
|
||||
'load_context': (load_filter_set, None),
|
||||
'delete_context': (delete_filter_set, None),
|
||||
'list_contexts': (list_filter_set, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
#('filter_prune', ''),
|
||||
|
@ -198,4 +252,8 @@ def load_cmds(cmd):
|
|||
('builtin_filter', 'fbi'),
|
||||
('filter', 'f'),
|
||||
('filter', 'fl'),
|
||||
('save_context', 'sc'),
|
||||
('load_context', 'lc'),
|
||||
('delete_context', 'dc'),
|
||||
('list_contexts', 'cls'),
|
||||
])
|
||||
|
|
|
@ -111,7 +111,7 @@ def complete_run_int_macro(text, line, begidx, endidx):
|
|||
running = []
|
||||
not_running = []
|
||||
for macro in loaded_int_macros:
|
||||
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
|
||||
if macro.name in [m.name for m in active_intercepting_macros()]:
|
||||
running.append(macro)
|
||||
else:
|
||||
not_running.append(macro)
|
||||
|
@ -151,7 +151,7 @@ def complete_stop_int_macro(text, line, begidx, endidx):
|
|||
running = []
|
||||
not_running = []
|
||||
for macro in loaded_int_macros:
|
||||
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
|
||||
if macro.name in [m.name for m in active_intercepting_macros()]:
|
||||
running.append(macro)
|
||||
else:
|
||||
not_running.append(macro)
|
||||
|
@ -187,7 +187,7 @@ def list_int_macros(line):
|
|||
running = []
|
||||
not_running = []
|
||||
for macro in loaded_int_macros:
|
||||
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
|
||||
if macro.name in [m.name for m in active_intercepting_macros()]:
|
||||
running.append(macro)
|
||||
else:
|
||||
not_running.append(macro)
|
||||
|
|
|
@ -27,8 +27,10 @@ class MangleInterceptMacro(InterceptMacro):
|
|||
self.name = 'Pappy Interceptor Macro'
|
||||
self.intercept_requests = False
|
||||
self.intercept_responses = False
|
||||
self.intercept_ws = False
|
||||
self.async_req = True
|
||||
self.async_rsp = True
|
||||
self.async_ws = True
|
||||
|
||||
def __repr__(self):
|
||||
return "<MangleInterceptingMacro>"
|
||||
|
@ -99,6 +101,38 @@ class MangleInterceptMacro(InterceptMacro):
|
|||
retrsp = mangled_rsp
|
||||
|
||||
defer.returnValue(retrsp)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def async_mangle_ws(self, request, message):
|
||||
# This function gets called to mangle/edit respones passed through the proxy
|
||||
|
||||
retmsg = message
|
||||
# Write original message to the temp file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tfName = tf.name
|
||||
tf.write(retmsg.contents)
|
||||
|
||||
# Have the console edit the file
|
||||
yield edit_file(tfName, front=True)
|
||||
|
||||
# Create new mangled message from edited file
|
||||
with open(tfName, 'r') as f:
|
||||
text = f.read()
|
||||
|
||||
os.remove(tfName)
|
||||
|
||||
# Check if dropped
|
||||
if text == '':
|
||||
pappyproxy.proxy.log('Websocket message dropped!')
|
||||
defer.returnValue(None)
|
||||
|
||||
mangled_message = message.copy()
|
||||
mangled_message.contents = text
|
||||
|
||||
if mangled_message.contents != message.contents:
|
||||
retmsg = mangled_message
|
||||
|
||||
defer.returnValue(retmsg)
|
||||
|
||||
|
||||
###############
|
||||
|
@ -155,29 +189,38 @@ def intercept(line):
|
|||
args = shlex.split(line)
|
||||
intercept_requests = False
|
||||
intercept_responses = False
|
||||
intercept_ws = True
|
||||
intercept_ws
|
||||
|
||||
req_names = ('req', 'request', 'requests')
|
||||
rsp_names = ('rsp', 'response', 'responses')
|
||||
ws_names = ('ws', 'websocket')
|
||||
|
||||
if any(a in req_names for a in args):
|
||||
intercept_requests = True
|
||||
if any(a in rsp_names for a in args):
|
||||
intercept_responses = True
|
||||
if any(a in req_names for a in args):
|
||||
intercept_ws = True
|
||||
if not args:
|
||||
intercept_requests = True
|
||||
|
||||
if intercept_requests and intercept_responses:
|
||||
intercept_str = 'Requests and responses'
|
||||
elif intercept_requests:
|
||||
intercept_str = 'Requests'
|
||||
elif intercept_responses:
|
||||
intercept_str = 'Responses'
|
||||
else:
|
||||
intercepting = []
|
||||
if intercept_requests:
|
||||
intercepting.append('Requests')
|
||||
if intercept_responses:
|
||||
intercepting.append('Responses')
|
||||
if intercept_ws:
|
||||
intercepting.append('Websocket Messages')
|
||||
if not intercept_requests and not intercept_responses and not intercept_ws:
|
||||
intercept_str = 'NOTHING'
|
||||
else:
|
||||
intercept_str = ', '.join(intercepting)
|
||||
|
||||
mangle_macro = MangleInterceptMacro()
|
||||
mangle_macro.intercept_requests = intercept_requests
|
||||
mangle_macro.intercept_responses = intercept_responses
|
||||
mangle_macro.intercept_ws = intercept_ws
|
||||
|
||||
add_intercepting_macro('pappy_intercept', mangle_macro)
|
||||
|
||||
|
|
|
@ -200,7 +200,7 @@ def submit(line):
|
|||
"""
|
||||
Resubmit some requests, optionally with modified headers and cookies.
|
||||
|
||||
Usage: submit reqids [-h] [-m] [-u] [-p] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
|
||||
Usage: submit reqids [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
|
||||
"""
|
||||
|
||||
parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__)
|
||||
|
@ -210,16 +210,25 @@ def submit(line):
|
|||
parser.add_argument('-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)')
|
||||
parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting')
|
||||
parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting')
|
||||
parser.add_argument('-o', '--copycookies', help='Copy the cookies used in another request')
|
||||
args = parser.parse_args(shlex.split(line))
|
||||
|
||||
headers = {}
|
||||
cookies = {}
|
||||
clear_cookies = False
|
||||
|
||||
if args.headers:
|
||||
for h in args.headers:
|
||||
k, v = h.split('=', 1)
|
||||
headers[k] = v
|
||||
|
||||
if args.copycookies:
|
||||
reqid = args.copycookies
|
||||
req = yield Request.load_request(reqid)
|
||||
clear_cookies = True
|
||||
for k, v in req.cookies.all_pairs():
|
||||
cookies[k] = v
|
||||
|
||||
if args.cookies:
|
||||
for c in args.cookies:
|
||||
k, v = c.split('=', 1)
|
||||
|
@ -232,23 +241,9 @@ def submit(line):
|
|||
|
||||
reqs = yield load_reqlist(args.reqids)
|
||||
|
||||
if args.unique or args.uniquepath:
|
||||
endpoints = set()
|
||||
new_reqs = []
|
||||
for r in reqs:
|
||||
if args.unique:
|
||||
s = r.url
|
||||
else:
|
||||
s = r.path
|
||||
|
||||
if not s in endpoints:
|
||||
new_reqs.append(r.copy())
|
||||
endpoints.add(s)
|
||||
reqs = new_reqs
|
||||
else:
|
||||
reqs = [r.copy() for r in reqs]
|
||||
|
||||
for req in reqs:
|
||||
if clear_cookies:
|
||||
req.cookies.clear()
|
||||
newsession.apply_req(req)
|
||||
|
||||
conf_message = "You're about to submit %d requests, continue?" % len(reqs)
|
||||
|
@ -258,12 +253,9 @@ def submit(line):
|
|||
for r in reqs:
|
||||
r.tags.add('resubmitted')
|
||||
|
||||
if args.inmem:
|
||||
yield async_submit_requests(reqs)
|
||||
for req in reqs:
|
||||
add_to_history(req)
|
||||
else:
|
||||
yield async_submit_requests(reqs, save=True)
|
||||
save = not args.inmem
|
||||
yield async_submit_requests(reqs, save=save, save_in_mem=args.inmem,
|
||||
unique_paths=args.uniquepath, unique_path_and_args=args.unique)
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
|
|
|
@ -4,10 +4,11 @@ import json
|
|||
import pappyproxy
|
||||
import pygments
|
||||
import pprint
|
||||
import re
|
||||
import shlex
|
||||
import urllib
|
||||
|
||||
from pappyproxy.util import PappyException, utc2local, load_reqlist, print_table, print_request_rows, get_req_data_row
|
||||
from pappyproxy.util import PappyException, utc2local, load_reqlist, print_table, print_request_rows, get_req_data_row, datetime_string, maybe_hexdump
|
||||
from pappyproxy.http import Request, repeatable_parse_qs
|
||||
from twisted.internet import defer
|
||||
from pappyproxy.plugin import async_main_context_ids
|
||||
|
@ -19,11 +20,58 @@ from pygments.lexers.html import XmlLexer
|
|||
###################
|
||||
## Helper functions
|
||||
|
||||
def view_full_message(request, headers_only=False):
|
||||
def view_full_message(request, headers_only=False, try_ws=False):
|
||||
def _print_message(mes):
|
||||
print_str = ''
|
||||
if mes.direction == 'INCOMING':
|
||||
print_str += Colors.BLUE
|
||||
print_str += '< Incoming'
|
||||
elif mes.direction == 'OUTGOING':
|
||||
print_str += Colors.GREEN
|
||||
print_str += '> Outgoing'
|
||||
else:
|
||||
print_str += Colors.RED
|
||||
print_str += '? ERROR: Unknown direction'
|
||||
if mes.unmangled:
|
||||
print_str += ', mangled'
|
||||
print_str += ', binary = %s\n' % mes.is_binary
|
||||
print_str += Colors.ENDC
|
||||
print_str += maybe_hexdump(mes.contents)
|
||||
print_str += '\n'
|
||||
return print_str
|
||||
|
||||
if headers_only:
|
||||
print request.headers_section_pretty
|
||||
else:
|
||||
print request.full_message_pretty
|
||||
if try_ws and request.websocket_messages:
|
||||
print_str = ''
|
||||
print_str += Styles.TABLE_HEADER
|
||||
print_str += "Websocket session handshake\n"
|
||||
print_str += Colors.ENDC
|
||||
print_str += request.full_message_pretty
|
||||
print_str += '\n'
|
||||
print_str += Styles.TABLE_HEADER
|
||||
print_str += "Websocket session \n"
|
||||
print_str += Colors.ENDC
|
||||
for mes in request.websocket_messages:
|
||||
print_str += _print_message(mes)
|
||||
if mes.unmangled:
|
||||
print_str += Colors.YELLOW
|
||||
print_str += '-'*10
|
||||
print_str += Colors.ENDC
|
||||
print_str += ' ^^ UNMANGLED ^^ '
|
||||
print_str += Colors.YELLOW
|
||||
print_str += '-'*10
|
||||
print_str += Colors.ENDC
|
||||
print_str += '\n'
|
||||
print_str += _print_message(mes.unmangled)
|
||||
print_str += Colors.YELLOW
|
||||
print_str += '-'*20 + '-'*len(' ^^ UNMANGLED ^^ ')
|
||||
print_str += '\n'
|
||||
print_str += Colors.ENDC
|
||||
print print_str
|
||||
else:
|
||||
print request.full_message_pretty
|
||||
|
||||
def print_request_extended(request):
|
||||
# Prints extended info for the request
|
||||
|
@ -63,8 +111,7 @@ def print_request_extended(request):
|
|||
is_ssl = 'NO'
|
||||
|
||||
if request.time_start:
|
||||
dtobj = utc2local(request.time_start)
|
||||
time_made_str = dtobj.strftime('%a, %b %d, %Y, %I:%M:%S %p')
|
||||
time_made_str = datetime_string(request.time_start)
|
||||
else:
|
||||
time_made_str = '--'
|
||||
|
||||
|
@ -325,7 +372,7 @@ def view_full_request(line):
|
|||
for req in reqs:
|
||||
if len(reqs) > 1:
|
||||
print 'Request %s:' % req.reqid
|
||||
view_full_message(req)
|
||||
view_full_message(req, try_ws=True)
|
||||
if len(reqs) > 1:
|
||||
print '-'*30
|
||||
print ''
|
||||
|
@ -498,6 +545,20 @@ def get_param_info(line):
|
|||
add_param(found_params, 'Cookie', k, v, req.reqid)
|
||||
print_param_info(found_params)
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def find_urls(line):
|
||||
args = shlex.split(line)
|
||||
reqs = yield load_reqlist(args[0])
|
||||
|
||||
url_regexp = r'((?:http|ftp|https)://(?:[\w_-]+(?:(?:\.[\w_-]+)+))(?:[\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)'
|
||||
urls = set()
|
||||
for req in reqs:
|
||||
urls |= set(re.findall(url_regexp, req.full_message))
|
||||
if req.response:
|
||||
urls |= set(re.findall(url_regexp, req.response.full_message))
|
||||
for url in sorted(urls):
|
||||
print url
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
|
@ -568,6 +629,7 @@ def load_cmds(cmd):
|
|||
'param_info': (get_param_info, None),
|
||||
'site_map': (site_map, None),
|
||||
'dump_response': (dump_response, None),
|
||||
'urls': (find_urls, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
('list', 'ls'),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue