Version 0.2.13

This commit is contained in:
Rob Glew 2016-10-12 14:07:13 -05:00
parent 54c1f5e409
commit f8795a4c48
20 changed files with 2425 additions and 875 deletions

View file

@ -54,7 +54,7 @@ class CommServer(LineReceiver):
def action_error_handler(self, error, result):
if debug:
print error.getTraceback()
return_data = {'success': False, 'message': str(error.getErrorMessage())}
return_data = {'success': False, 'message': str(error.getTraceback())}
result.update(result)
error.trap(Exception)
self.sendLine(json.dumps(return_data))
@ -85,7 +85,7 @@ class CommServer(LineReceiver):
raise PappyException("Request with given ID does not exist, cannot fetch associated response.")
if req.response:
rsp = yield Response.load_response(req.response.rspid)
rsp = req.response
dat = json.loads(rsp.to_json())
else:
dat = {}
@ -100,9 +100,7 @@ class CommServer(LineReceiver):
req.host = data['host'].encode('utf-8')
req.port = data['port']
req.is_ssl = data['is_ssl']
yield Request.submit_request(req,
save_request=True,
intercepting_macros=active_intercepting_macros())
yield req.async_submit(mangle=True)
if 'tags' in data:
req.tags = set(data['tags'])
yield req.async_deep_save()

View file

@ -48,10 +48,10 @@ class PappyConfig(object):
.. data:: listeners
The list of active listeners. It is a list of tuples of the format (port, interface)
The list of active listeners. It is a list of dictionaries of the form `{"port": 8000, "interface": "127.0.0.1"}`
Not modifiable after startup. Configured in the ``config.json`` file for the project.
:Default: ``[(8000, '127.0.0.1')]``
:Default: ``[]``
.. data:: socks_proxy
@ -149,7 +149,7 @@ class PappyConfig(object):
self.debug_to_file = False
self.debug_verbosity = 0
self.listeners = [(8000, '127.0.0.1')]
self.listeners = []
self.socks_proxy = None
self.http_proxy = None

View file

@ -1,8 +1,9 @@
import crochet
import re
import shlex
import json
from .http import Request, RepeatableDict
from .http import Request, Response, RepeatableDict
from twisted.internet import defer
from util import PappyException
@ -53,6 +54,15 @@ class Context(object):
self.active_filters.append(filt)
self.cache_reset()
@defer.inlineCallbacks
def add_filter_string(self, filtstr):
"""
Add a filter to the context by filter string
"""
f = Filter(filtstr)
yield f.generate()
self.add_filter(f)
def filter_up(self):
"""
Removes the last filter that was applied to the context.
@ -452,6 +462,15 @@ def gen_filter_by_inverse(args):
def f(req):
return not filt(req)
defer.returnValue(f)
def gen_filter_by_websocket(args):
def f(req):
if not req.response:
return False
if Response.is_ws_upgrade(req.response):
return True
return False
return f
@defer.inlineCallbacks
def filter_reqs(reqids, filters):
@ -568,6 +587,64 @@ def async_set_tag(tag, reqs):
Request.cache.add(req)
reset_context_caches()
@defer.inlineCallbacks
def save_context(name, filter_strings, dbpool):
"""
Saves the filter strings to the datafile using their name
"""
rows = yield dbpool.runQuery(
"""
SELECT id FROM saved_contexts WHERE context_name=?;
""", (name,)
)
list_str = json.dumps(filter_strings)
if len(rows) > 0:
yield dbpool.runQuery(
"""
UPDATE saved_contexts SET filter_strings=?
WHERE context_name=?;
""", (list_str, name)
)
else:
yield dbpool.runQuery(
"""
INSERT INTO saved_contexts (context_name, filter_strings)
VALUES (?,?);
""", (name, list_str)
)
@defer.inlineCallbacks
def delete_saved_context(name, dbpool):
yield dbpool.runQuery(
"""
DELETE FROM saved_contexts WHERE context_name=?;
""", (name,)
)
@defer.inlineCallbacks
def get_saved_context(name, dbpool):
rows = yield dbpool.runQuery(
"""
SELECT filter_strings FROM saved_contexts WHERE context_name=?;
""", (name,)
)
if len(rows) == 0:
raise PappyException("Saved context with name %s does not exist" % name)
filter_strs = json.loads(rows[0][0])
defer.returnValue(filter_strs)
@defer.inlineCallbacks
def get_all_saved_contexts(dbpool):
rows = yield dbpool.runQuery(
"""
SELECT context_name, filter_strings FROM saved_contexts;
""",
)
all_strs = {}
for row in rows:
all_strs[row[0]] = json.loads(row[1])
defer.returnValue(all_strs)
@crochet.wait_for(timeout=180.0)
@defer.inlineCallbacks
def set_tag(tag, reqs):
@ -660,6 +737,9 @@ class Filter(object):
"saved": gen_filter_by_saved,
"svd": gen_filter_by_saved,
"websocket": gen_filter_by_websocket,
"ws": gen_filter_by_websocket,
}
_async_filter_functions = {

File diff suppressed because it is too large Load diff

View file

@ -92,9 +92,11 @@ class InterceptMacro(object):
self.short_name = None
self.intercept_requests = False
self.intercept_responses = False
self.intercept_ws = False
self.async_req = False
self.async_rsp = False
self.async_ws = False
def __repr__(self):
return "<InterceptingMacro (%s)>" % self.name
@ -108,6 +110,9 @@ class InterceptMacro(object):
def mangle_response(self, request):
return request.response
def mangle_ws(self, request, message):
return message
@defer.inlineCallbacks
def async_mangle_request(self, request):
defer.returnValue(request)
@ -115,6 +120,10 @@ class InterceptMacro(object):
@defer.inlineCallbacks
def async_mangle_response(self, request):
defer.returnValue(request.response)
@defer.inlineCallbacks
def async_mangle_ws(self, request, message):
defer.returnValue(messsage)
class FileInterceptMacro(InterceptMacro):
"""
@ -165,6 +174,9 @@ class FileInterceptMacro(InterceptMacro):
if hasattr(self.source, 'mangle_response') and \
hasattr(self.source, 'async_mangle_response'):
raise PappyException('Intercepting macro in %s cannot define both mangle_response and async_mangle_response' % self.filename)
if hasattr(self.source, 'mangle_ws') and \
hasattr(self.source, 'async_mangle_ws'):
raise PappyException('Intercepting macro in %s cannot define both mangle_ws and async_mangle_ws' % self.filename)
else:
self.source = None
@ -187,6 +199,15 @@ class FileInterceptMacro(InterceptMacro):
else:
self.intercept_responses = False
if self.source and hasattr(self.source, 'mangle_ws'):
self.intercept_ws = True
self.async_ws = False
elif self.source and hasattr(self.source, 'async_mangle_ws'):
self.intercept_ws = True
self.async_ws = True
else:
self.intercept_ws = False
def init(self, args):
if hasattr(self.source, 'init'):
self.source.init(args)
@ -203,6 +224,12 @@ class FileInterceptMacro(InterceptMacro):
return rsp
return request.response
def mangle_ws(self, request, message):
if hasattr(self.source, 'mangle_ws'):
mangled_ws = self.source.mangle_ws(request, message)
return mangled_ws
return message
@defer.inlineCallbacks
def async_mangle_request(self, request):
if hasattr(self.source, 'async_mangle_request'):
@ -275,6 +302,73 @@ class MacroTemplate(object):
def template_argstring(cls, template):
return cls._template_data[template][2]
## Other functions
@defer.inlineCallbacks
def async_mangle_ws(self, request, message):
if hasattr(self.source, 'async_mangle_ws'):
mangled_ws = yield self.source.async_mangle_ws(request, message)
defer.returnValue(mangled_ws)
defer.returnValue(message)
class MacroTemplate(object):
_template_data = {
'macro': ('macro.py.template',
'Generic macro template',
'[reqids]',
'macro_{fname}.py',
gen_template_args_macro),
'intmacro': ('intmacro.py.template',
'Generic intercepting macro template',
'',
'int_{fname}.py',
gen_template_generator_noargs('intmacro')),
'modheader': ('macro_header.py.template',
'Modify a header in the request and the response if it exists.',
'',
'int_{fname}.py',
gen_template_generator_noargs('modheader')),
'resubmit': ('macro_resubmit.py.template',
'Resubmit all in-context requests',
'',
'macro_{fname}.py',
gen_template_generator_noargs('resubmit')),
}
@classmethod
def fill_template(cls, template, subs):
loader = FileSystemLoader(session.config.pappy_dir+'/templates')
env = Environment(loader=loader)
template = env.get_template(cls._template_data[template][0])
return template.render(zip=zip, **subs)
@classmethod
@defer.inlineCallbacks
def fill_template_args(cls, template, args=[]):
ret = cls._template_data[template][4](args)
if isinstance(ret, defer.Deferred):
ret = yield ret
defer.returnValue(ret)
@classmethod
def template_filename(cls, template, fname):
return cls._template_data[template][3].format(fname=fname)
@classmethod
def template_list(cls):
return [k for k, v in cls._template_data.iteritems()]
@classmethod
def template_description(cls, template):
return cls._template_data[template][1]
@classmethod
def template_argstring(cls, template):
return cls._template_data[template][2]
## Other functions
def load_macros(loc):
@ -376,7 +470,7 @@ def mangle_request(request, intmacros):
defer.returnValue((request, False))
cur_req = request.copy()
for k, macro in intmacros.iteritems():
for macro in intmacros:
if macro.intercept_requests:
if macro.async_req:
cur_req = yield macro.async_mangle_request(cur_req.copy())
@ -389,7 +483,8 @@ def mangle_request(request, intmacros):
mangled = False
if not cur_req == request or \
not cur_req.host == request.host or \
not cur_req.port == request.port:
not cur_req.port == request.port or \
not cur_req.is_ssl == request.is_ssl:
# copy unique data to new request and clear it off old one
cur_req.unmangled = request
cur_req.unmangled.is_unmangled_version = True
@ -415,10 +510,10 @@ def mangle_response(request, intmacros):
defer.returnValue(False)
old_rsp = request.response
# We copy so that changes to request.response doesn't mangle the original response
request.response = request.response.copy()
for k, macro in intmacros.iteritems():
for macro in intmacros:
if macro.intercept_responses:
# We copy so that changes to request.response doesn't mangle the original response
request.response = request.response.copy()
if macro.async_rsp:
request.response = yield macro.async_mangle_response(request)
else:
@ -437,3 +532,31 @@ def mangle_response(request, intmacros):
else:
request.response = old_rsp
defer.returnValue(mangled)
@defer.inlineCallbacks
def mangle_websocket_message(message, request, intmacros):
# Mangle messages with list of intercepting macros
if not intmacros:
defer.returnValue((message, False))
cur_msg = message.copy()
for macro in intmacros:
if macro.intercept_ws:
if macro.async_ws:
cur_msg = yield macro.async_mangle_ws(request, cur_msg.copy())
else:
cur_msg = macro.mangle_ws(request, cur_msg.copy())
if cur_msg is None:
defer.returnValue((None, True))
mangled = False
if not cur_msg == message:
# copy unique data to new request and clear it off old one
cur_msg.unmangled = message
cur_msg.unmangled.is_unmangled_version = True
mangled = True
else:
# return the original request
cur_msg = message
defer.returnValue((cur_msg, mangled))

View file

@ -9,8 +9,13 @@ The :class:`pappyproxy.pappy.PappySession` object for the current session. Mainl
used for accessing the session's config information.
"""
import argparse
import crochet
import txaio
crochet.no_setup()
txaio.use_twisted()
import argparse
import datetime
import os
import schema.update
@ -33,7 +38,6 @@ from twisted.internet.error import CannotListenError
from twisted.internet.protocol import ServerFactory
from twisted.internet.threads import deferToThread
crochet.no_setup()
main_context = context.Context()
all_contexts = [main_context]
@ -107,7 +111,8 @@ class PappySession(object):
listen_strs = []
self.ports = []
for listener in self.config.listeners:
server_factory = proxy.ProxyServerFactory(save_all=True)
#server_factory = proxy.ProxyServerFactory(save_all=True)
server_factory = proxy.ProxyProtocolFactory()
try:
if 'forward_host_ssl' in listener and listener['forward_host_ssl']:
server_factory.force_ssl = True
@ -284,8 +289,8 @@ def inturrupt_handler(signal, frame):
quit_confirm_time = datetime.datetime.now() + datetime.timedelta(0, 10)
else:
d = session.cleanup()
d.addCallback(lambda _: reactor.stop())
d.addCallback(lambda _: os._exit(1)) # Sorry blocking threads :(
d.addBoth(lambda _: reactor.stop())
d.addBoth(lambda _: os._exit(1)) # Sorry blocking threads :(
if __name__ == '__main__':
start()

View file

@ -13,8 +13,6 @@ import stat
import crochet
from twisted.internet import defer
from .proxy import add_intercepting_macro as proxy_add_intercepting_macro
from .proxy import remove_intercepting_macro as proxy_remove_intercepting_macro
from .colors import Colors
from .util import PappyException
@ -95,7 +93,7 @@ def add_intercepting_macro(name, macro):
passed along.
"""
for factory in pappyproxy.pappy.session.server_factories:
proxy_add_intercepting_macro(name, macro, factory.intercepting_macros)
factory.add_intercepting_macro(macro, name=name)
def remove_intercepting_macro(name):
"""
@ -105,17 +103,18 @@ def remove_intercepting_macro(name):
macro you would like to stop.
"""
for factory in pappyproxy.pappy.session.server_factories:
proxy_remove_intercepting_macro(name, factory.intercepting_macros)
factory.remove_intercepting_macro(name=name)
def active_intercepting_macros():
"""
Returns a dict of the active intercepting macro objects. Modifying
this list will not affect which macros are active.
"""
ret = {}
for factory in pappyproxy.pappy.session.server_factories:
for k, v in factory.intercepting_macros.iteritems():
ret[k] = v
# every factory should have the same int macros so screw it we'll
# just use the macros from the first one
ret = []
if len(pappyproxy.pappy.session.server_factories) > 0:
ret = pappyproxy.pappy.session.server_factories[0].get_macro_list()
return ret
def in_memory_reqs():
@ -181,6 +180,16 @@ def add_to_history(req):
pappyproxy.http.Request.cache.add(req)
pappyproxy.context.reset_context_caches()
def get_active_filter_strings():
"""
Returns a list of filter strings representing the currently active filters
"""
filts = pappyproxy.pappy.main_context.active_filters
strs = []
for f in filts:
strs.append(f.filter_string)
return strs
def run_cmd(cmd):
"""
Run a command as if you typed it into the console. Try and use
@ -217,3 +226,32 @@ def require_modules(*largs):
return func(*args, **kwargs)
return wr2
return wr
def set_context_to_saved(name):
"""
Sets the current context to the context saved under the given name.
Raises PappyException if name does not exist
"""
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def delete_saved_context(name):
"""
Deletes the saved context with the given name.
Raises PappyException if name does not exist
"""
def save_current_context(name):
"""
Saves the current context under the given name.
"""
def save_context(name, filter_strs):
"""
Takes a list of filter strings and saves it as a context under the given name.
:param name: The name to save the context under
:type name: string
:param filter_strs: The in-order list of filter strings of the context to save.
:type filter_strs: List of strings
"""

View file

@ -3,6 +3,7 @@ import pappyproxy
from pappyproxy.util import PappyException, confirm, autocomplete_startswith
from pappyproxy.http import Request
from pappyproxy.context import save_context, delete_saved_context, get_saved_context, get_all_saved_contexts
from twisted.internet import defer
class BuiltinFilters(object):
@ -157,9 +158,9 @@ def filter_prune(line):
act_reqs = yield pappyproxy.pappy.main_context.get_reqs()
inact_reqs = set(Request.cache.req_ids()).difference(set(act_reqs))
message = 'This will delete %d/%d requests. You can NOT undo this!! Continue?' % (len(inact_reqs), (len(inact_reqs) + len(act_reqs)))
print message
# if not confirm(message, 'n'):
# defer.returnValue(None)
#print message
if not confirm(message, 'n'):
defer.returnValue(None)
for reqid in inact_reqs:
try:
@ -170,6 +171,55 @@ def filter_prune(line):
print 'Deleted %d requests' % len(inact_reqs)
defer.returnValue(None)
@defer.inlineCallbacks
def _save_filters_to(key):
if key == '':
raise PappyException("Must give name to save filters as")
strs = pappyproxy.plugin.get_active_filter_strings()
yield save_context(key, strs, pappyproxy.http.dbpool)
defer.returnValue(strs)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def save_filter_set(line):
if line == '':
raise PappyException("Must give name to save filters as")
strs = yield _save_filters_to(line)
print 'Filters saved to %s:' % line
for s in strs:
print ' %s' % s
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def load_filter_set(line):
if line == '':
raise PappyException("Must give name to save filters as")
strs = yield get_saved_context(line, pappyproxy.http.dbpool)
yield _save_filters_to('_')
pappyproxy.pappy.main_context.set_filters([])
for s in strs:
yield pappyproxy.pappy.main_context.add_filter_string(s)
print 'Set the context to:'
for s in strs:
print ' %s' % s
def delete_filter_set(line):
if line == '':
raise PappyException("Must give name to save filters as")
delete_saved_context(line, pappyproxy.http.dbpool)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def list_filter_set(line):
print 'Saved contexts:'
contexts = yield get_all_saved_contexts(pappyproxy.http.dbpool)
for k in sorted(contexts.keys()):
v = contexts[k]
print '%s' % k
for s in v:
print ' %s' % s
print ''
###############
## Plugin hooks
@ -185,6 +235,10 @@ def load_cmds(cmd):
'filter_up': (filter_up, None),
'builtin_filter': (builtin_filter, complete_builtin_filter),
'filter': (filtercmd, complete_filtercmd),
'save_context': (save_filter_set, None),
'load_context': (load_filter_set, None),
'delete_context': (delete_filter_set, None),
'list_contexts': (list_filter_set, None),
})
cmd.add_aliases([
#('filter_prune', ''),
@ -198,4 +252,8 @@ def load_cmds(cmd):
('builtin_filter', 'fbi'),
('filter', 'f'),
('filter', 'fl'),
('save_context', 'sc'),
('load_context', 'lc'),
('delete_context', 'dc'),
('list_contexts', 'cls'),
])

View file

@ -111,7 +111,7 @@ def complete_run_int_macro(text, line, begidx, endidx):
running = []
not_running = []
for macro in loaded_int_macros:
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
if macro.name in [m.name for m in active_intercepting_macros()]:
running.append(macro)
else:
not_running.append(macro)
@ -151,7 +151,7 @@ def complete_stop_int_macro(text, line, begidx, endidx):
running = []
not_running = []
for macro in loaded_int_macros:
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
if macro.name in [m.name for m in active_intercepting_macros()]:
running.append(macro)
else:
not_running.append(macro)
@ -187,7 +187,7 @@ def list_int_macros(line):
running = []
not_running = []
for macro in loaded_int_macros:
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
if macro.name in [m.name for m in active_intercepting_macros()]:
running.append(macro)
else:
not_running.append(macro)

View file

@ -27,8 +27,10 @@ class MangleInterceptMacro(InterceptMacro):
self.name = 'Pappy Interceptor Macro'
self.intercept_requests = False
self.intercept_responses = False
self.intercept_ws = False
self.async_req = True
self.async_rsp = True
self.async_ws = True
def __repr__(self):
return "<MangleInterceptingMacro>"
@ -99,6 +101,38 @@ class MangleInterceptMacro(InterceptMacro):
retrsp = mangled_rsp
defer.returnValue(retrsp)
@defer.inlineCallbacks
def async_mangle_ws(self, request, message):
# This function gets called to mangle/edit respones passed through the proxy
retmsg = message
# Write original message to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(retmsg.contents)
# Have the console edit the file
yield edit_file(tfName, front=True)
# Create new mangled message from edited file
with open(tfName, 'r') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
pappyproxy.proxy.log('Websocket message dropped!')
defer.returnValue(None)
mangled_message = message.copy()
mangled_message.contents = text
if mangled_message.contents != message.contents:
retmsg = mangled_message
defer.returnValue(retmsg)
###############
@ -155,29 +189,38 @@ def intercept(line):
args = shlex.split(line)
intercept_requests = False
intercept_responses = False
intercept_ws = True
intercept_ws
req_names = ('req', 'request', 'requests')
rsp_names = ('rsp', 'response', 'responses')
ws_names = ('ws', 'websocket')
if any(a in req_names for a in args):
intercept_requests = True
if any(a in rsp_names for a in args):
intercept_responses = True
if any(a in req_names for a in args):
intercept_ws = True
if not args:
intercept_requests = True
if intercept_requests and intercept_responses:
intercept_str = 'Requests and responses'
elif intercept_requests:
intercept_str = 'Requests'
elif intercept_responses:
intercept_str = 'Responses'
else:
intercepting = []
if intercept_requests:
intercepting.append('Requests')
if intercept_responses:
intercepting.append('Responses')
if intercept_ws:
intercepting.append('Websocket Messages')
if not intercept_requests and not intercept_responses and not intercept_ws:
intercept_str = 'NOTHING'
else:
intercept_str = ', '.join(intercepting)
mangle_macro = MangleInterceptMacro()
mangle_macro.intercept_requests = intercept_requests
mangle_macro.intercept_responses = intercept_responses
mangle_macro.intercept_ws = intercept_ws
add_intercepting_macro('pappy_intercept', mangle_macro)

View file

@ -200,7 +200,7 @@ def submit(line):
"""
Resubmit some requests, optionally with modified headers and cookies.
Usage: submit reqids [-h] [-m] [-u] [-p] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
Usage: submit reqids [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
"""
parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__)
@ -210,16 +210,25 @@ def submit(line):
parser.add_argument('-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)')
parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting')
parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting')
parser.add_argument('-o', '--copycookies', help='Copy the cookies used in another request')
args = parser.parse_args(shlex.split(line))
headers = {}
cookies = {}
clear_cookies = False
if args.headers:
for h in args.headers:
k, v = h.split('=', 1)
headers[k] = v
if args.copycookies:
reqid = args.copycookies
req = yield Request.load_request(reqid)
clear_cookies = True
for k, v in req.cookies.all_pairs():
cookies[k] = v
if args.cookies:
for c in args.cookies:
k, v = c.split('=', 1)
@ -232,23 +241,9 @@ def submit(line):
reqs = yield load_reqlist(args.reqids)
if args.unique or args.uniquepath:
endpoints = set()
new_reqs = []
for r in reqs:
if args.unique:
s = r.url
else:
s = r.path
if not s in endpoints:
new_reqs.append(r.copy())
endpoints.add(s)
reqs = new_reqs
else:
reqs = [r.copy() for r in reqs]
for req in reqs:
if clear_cookies:
req.cookies.clear()
newsession.apply_req(req)
conf_message = "You're about to submit %d requests, continue?" % len(reqs)
@ -258,12 +253,9 @@ def submit(line):
for r in reqs:
r.tags.add('resubmitted')
if args.inmem:
yield async_submit_requests(reqs)
for req in reqs:
add_to_history(req)
else:
yield async_submit_requests(reqs, save=True)
save = not args.inmem
yield async_submit_requests(reqs, save=save, save_in_mem=args.inmem,
unique_paths=args.uniquepath, unique_path_and_args=args.unique)
def load_cmds(cmd):
cmd.set_cmds({

View file

@ -4,10 +4,11 @@ import json
import pappyproxy
import pygments
import pprint
import re
import shlex
import urllib
from pappyproxy.util import PappyException, utc2local, load_reqlist, print_table, print_request_rows, get_req_data_row
from pappyproxy.util import PappyException, utc2local, load_reqlist, print_table, print_request_rows, get_req_data_row, datetime_string, maybe_hexdump
from pappyproxy.http import Request, repeatable_parse_qs
from twisted.internet import defer
from pappyproxy.plugin import async_main_context_ids
@ -19,11 +20,58 @@ from pygments.lexers.html import XmlLexer
###################
## Helper functions
def view_full_message(request, headers_only=False):
def view_full_message(request, headers_only=False, try_ws=False):
def _print_message(mes):
print_str = ''
if mes.direction == 'INCOMING':
print_str += Colors.BLUE
print_str += '< Incoming'
elif mes.direction == 'OUTGOING':
print_str += Colors.GREEN
print_str += '> Outgoing'
else:
print_str += Colors.RED
print_str += '? ERROR: Unknown direction'
if mes.unmangled:
print_str += ', mangled'
print_str += ', binary = %s\n' % mes.is_binary
print_str += Colors.ENDC
print_str += maybe_hexdump(mes.contents)
print_str += '\n'
return print_str
if headers_only:
print request.headers_section_pretty
else:
print request.full_message_pretty
if try_ws and request.websocket_messages:
print_str = ''
print_str += Styles.TABLE_HEADER
print_str += "Websocket session handshake\n"
print_str += Colors.ENDC
print_str += request.full_message_pretty
print_str += '\n'
print_str += Styles.TABLE_HEADER
print_str += "Websocket session \n"
print_str += Colors.ENDC
for mes in request.websocket_messages:
print_str += _print_message(mes)
if mes.unmangled:
print_str += Colors.YELLOW
print_str += '-'*10
print_str += Colors.ENDC
print_str += ' ^^ UNMANGLED ^^ '
print_str += Colors.YELLOW
print_str += '-'*10
print_str += Colors.ENDC
print_str += '\n'
print_str += _print_message(mes.unmangled)
print_str += Colors.YELLOW
print_str += '-'*20 + '-'*len(' ^^ UNMANGLED ^^ ')
print_str += '\n'
print_str += Colors.ENDC
print print_str
else:
print request.full_message_pretty
def print_request_extended(request):
# Prints extended info for the request
@ -63,8 +111,7 @@ def print_request_extended(request):
is_ssl = 'NO'
if request.time_start:
dtobj = utc2local(request.time_start)
time_made_str = dtobj.strftime('%a, %b %d, %Y, %I:%M:%S %p')
time_made_str = datetime_string(request.time_start)
else:
time_made_str = '--'
@ -325,7 +372,7 @@ def view_full_request(line):
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
view_full_message(req)
view_full_message(req, try_ws=True)
if len(reqs) > 1:
print '-'*30
print ''
@ -498,6 +545,20 @@ def get_param_info(line):
add_param(found_params, 'Cookie', k, v, req.reqid)
print_param_info(found_params)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def find_urls(line):
args = shlex.split(line)
reqs = yield load_reqlist(args[0])
url_regexp = r'((?:http|ftp|https)://(?:[\w_-]+(?:(?:\.[\w_-]+)+))(?:[\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)'
urls = set()
for req in reqs:
urls |= set(re.findall(url_regexp, req.full_message))
if req.response:
urls |= set(re.findall(url_regexp, req.response.full_message))
for url in sorted(urls):
print url
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
@ -568,6 +629,7 @@ def load_cmds(cmd):
'param_info': (get_param_info, None),
'site_map': (site_map, None),
'dump_response': (dump_response, None),
'urls': (find_urls, None),
})
cmd.add_aliases([
('list', 'ls'),

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,39 @@
from twisted.internet import defer
"""
Schema v8
Creates a table for saved contexts and for web socket messages. Saved contexts
are saved as a json list of filter strings.
"""
update_queries = [
"""
CREATE TABLE saved_contexts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
context_name TEXT UNIQUE,
filter_strings TEXT
);
""",
"""
CREATE TABLE websocket_messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
parent_request INTEGER REFERENCES requests(id),
unmangled_id INTEGER REFERENCES websocket_messages(id),
is_binary INTEGER,
direction INTEGER,
time_sent REAL,
contents BLOB
);
""",
"""
UPDATE schema_meta SET version=8;
"""
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)

View file

@ -1,34 +1,29 @@
import sys
{% include 'macroheader.py.template' %}
from pappyproxy.http import submit_requests
def run_macro(args):
# Get IDs of in-context requests
reqids = main_context_ids()
reqids.reverse() # Resubmit earliest first
reqs = []
# Create session jar (uncomment jar functions to use)
#jar = Session() # Create a cookie jar
# Iterate over each request and submit it
to_submit = []
for rid in reqids:
print rid,
sys.stdout.flush()
r = request_by_id(rid)
r = r.copy()
req = request_by_id(rid).copy()
#jar.apply_req(r) # Apply headers/cookies from the cookie jar
#####################
# Modify request here
###################
# Modify `req` here
r.submit()
#jar.save_rsp(r.response, save_all=True) # Update the cookie jar from the response
#r.save() # Save the request to the data file
reqs.append(r)
print ''
# Store the requests in memory
set_tag('resubmit', reqs)
req.tags.add('resubmit')
to_submit.append(req)
#############################
# Modify resubmit params here
submit_requests(to_submit,
mangle=False,
save=False,
save_in_mem=False,
unique_paths=False,
unique_paths_and_args=False,
)

View file

@ -24,12 +24,15 @@ def by_lines_and_full_helper(Type, id_attr, load_func, header_lines, data=''):
# after calling update() on it,
# created by serializing and unserializing to json)
print '-'*30
t_lines = Type()
for l in header_lines:
t_lines.add_line(l)
t_lines.add_data('\r\n'.join(header_lines) + '\r\n')
# for l in header_lines:
# t_lines.add_line(l)
if data:
t_lines.add_data(data)
print '-'*30
t_fulls = '\r\n'.join(header_lines)+'\r\n'
t_fulls += data
@ -396,10 +399,10 @@ def test_message_build():
'Content-Length: 100\r\n\r\n')
raw += 'A'*100
m = http.HTTPMessage()
m.add_line('foobar')
m.add_line('a: b')
m.add_line('Content-Length: 100')
m.add_line('')
m.add_data('foobar\r\n')
m.add_data('a: b\r\n')
m.add_data('Content-Length: 100\r\n')
m.add_data('\r\n')
assert not m.complete
m.add_data('A'*50)
assert not m.complete
@ -421,10 +424,10 @@ def test_message_build_chunked():
'Content-Length: 100\r\n\r\n')
raw += 'A'*100
m = http.HTTPMessage()
m.add_line('foobar')
m.add_line('a: b')
m.add_line('Transfer-Encoding: chunked')
m.add_line('')
m.add_data('foobar\r\n')
m.add_data('a: b\r\n')
m.add_data('Transfer-Encoding: chunked\r\n')
m.add_data('\r\n')
assert not m.complete
m.add_data('%x\r\n' % 50)
m.add_data('A'*50)
@ -588,7 +591,7 @@ def test_headers_end():
]
r = http.Request()
for l in header_lines:
r.add_line(l)
r.add_data(l+'\r\n')
assert not r.complete
assert r.headers_complete
@ -761,17 +764,18 @@ def test_request_update_content_length():
'Content-Length: 4\r\n\r\n'
'AAAAAAAAAA'), update_content_length=True)
assert r.complete
assert r.full_request == (('GET / HTTP/1.1\r\n'
'Content-Length: 10\r\n\r\n'
'AAAAAAAAAA'))
def test_request_blank_url_params():
r = http.Request()
r.add_line('GET /this/??-asdf/ HTTP/1.1')
r.add_data('GET /this/??-asdf/ HTTP/1.1\r\n')
assert r.full_request == ('GET /this/??-asdf/ HTTP/1.1\r\n\r\n')
r = http.Request()
r.add_line('GET /this/??-asdf/?a=b&c&d=ef HTTP/1.1')
r.add_data('GET /this/??-asdf/?a=b&c&d=ef HTTP/1.1\r\n')
assert r.full_request == ('GET /this/??-asdf/?a=b&c&d=ef HTTP/1.1\r\n\r\n')
assert r.url_params['?-asdf/?a'] == 'b'
assert r.url_params['c'] == None
@ -784,26 +788,26 @@ def test_request_blank():
def test_request_blank_headers():
r = http.Request(('GET / HTTP/1.1\r\n'
'Header: \r\n'
'Header2:\r\n'))
'Header2:\r\n\r\n'))
assert r.headers['header'] == ''
assert r.headers['header2'] == ''
def test_request_blank_cookies():
r = http.Request(('GET / HTTP/1.1\r\n'
'Cookie: \r\n'))
'Cookie: \r\n\r\n'))
assert r.cookies[''] == ''
r = http.Request(('GET / HTTP/1.1\r\n'
'Cookie: a=b; ; c=d\r\n'))
'Cookie: a=b; ; c=d\r\n\r\n'))
assert r.cookies[''] == ''
r = http.Request(('GET / HTTP/1.1\r\n'
'Cookie: a=b; foo; c=d\r\n'))
'Cookie: a=b; foo; c=d\r\n\r\n'))
assert r.cookies['foo'] == ''
def test_request_set_url():
r = http.Request('GET / HTTP/1.1\r\n')
r = http.Request('GET / HTTP/1.1\r\n\r\n')
r.url = 'www.AAAA.BBBB'
assert r.host == 'www.AAAA.BBBB'
assert r.port == 80
@ -830,7 +834,7 @@ def test_request_set_url():
assert r.is_ssl
def test_request_set_url_params():
r = http.Request('GET / HTTP/1.1\r\n')
r = http.Request('GET / HTTP/1.1\r\n\r\n')
r.url = 'www.AAAA.BBBB?a=b&c=d#foo'
assert r.url_params.all_pairs() == [('a','b'), ('c','d')]
assert r.fragment == 'foo'
@ -889,13 +893,16 @@ def test_request_absolute_url():
r = http.Request(('GET /foo/path HTTP/1.1\r\n'
'Host: www.example.faketld\r\n\r\n'))
assert r.full_message == ('GET /foo/path HTTP/1.1\r\n'
'Host: www.example.faketld\r\n\r\n')
'Host: www.example.faketld\r\n'
'\r\n')
r.path_type = http.PATH_ABSOLUTE
assert r.full_message == ('GET http://www.example.faketld/foo/path HTTP/1.1\r\n'
'Host: www.example.faketld\r\n\r\n')
'Host: www.example.faketld\r\n'
'\r\n')
r.is_ssl = True
assert r.full_message == ('GET https://www.example.faketld/foo/path HTTP/1.1\r\n'
'Host: www.example.faketld\r\n\r\n')
'Host: www.example.faketld\r\n'
'\r\n')
def test_proxy_auth():
r = http.Request(('GET /foo/path HTTP/1.1\r\n'
@ -1075,9 +1082,9 @@ def test_response_chunked_gzip():
def test_response_early_completion():
r = http.Response()
r.start_line = 'HTTP/1.1 200 OK'
r.add_line('Content-Length: 0')
r.add_data('Content-Length: 0\r\n')
assert not r.complete
r.add_line('')
r.add_data('\r\n')
assert r.complete
def test_response_cookies():
@ -1117,6 +1124,7 @@ def test_response_repeated_cookies():
'Set-Cookie: foo=buzz\r\n'
'\r\n'))
expected_pairs = [('foo', 'bar'), ('baz', 'buzz'), ('foo', 'buzz')]
assert r.complete
check_response_cookies(expected_pairs, r)
def test_repeated_response_headers():
@ -1175,6 +1183,7 @@ def test_response_update_modified_headers():
'content-length: 4\r\n\r\n'
'AAAA')
assert r.headers['content-length'] == '4'
assert r.complete
def test_response_update_cookies():
r = http.Response()
@ -1204,6 +1213,7 @@ def test_response_update_content_length():
'Content-Length: 4\r\n\r\n'
'AAAAAAAAAA'), update_content_length=True)
assert r.complete
assert r.full_response == (('HTTP/1.1 200 OK\r\n'
'Content-Length: 10\r\n\r\n'
'AAAAAAAAAA'))
@ -1273,7 +1283,7 @@ def test_response_blank():
def test_response_blank_headers():
r = http.Response(('HTTP/1.1 200 OK\r\n'
'Header: \r\n'
'Header2:\r\n'))
'Header2:\r\n\r\n'))
assert r.headers['header'] == ''
assert r.headers['header2'] == ''
@ -1311,7 +1321,7 @@ def test_response_add_cookie():
def test_response_set_cookie():
r = http.Response(('HTTP/1.1 200 OK\r\n'
'Content-Length: 0\r\n'))
'Content-Length: 0\r\n\r\n'))
r.set_cookie(http.ResponseCookie('foo=bar'))
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
'Content-Length: 0\r\n'
@ -1344,3 +1354,4 @@ def test_response_short_statusline():
assert r.response_text == ''
assert r.version == 'HTTP/1.1'
assert r.response_code == 407

View file

@ -1,5 +1,6 @@
import StringIO
import datetime
import hashlib
import re
import string
import sys
@ -34,6 +35,11 @@ class PappyStringTransport(StringTransport):
def finish(self):
# Called when a finishable producer finishes
self.producerState = 'stopped'
def pop_value(self):
v = self.value()
self.clear()
return v
def registerProducer(self, producer, streaming):
StringTransport.registerProducer(self, producer, streaming)
@ -43,13 +49,14 @@ class PappyStringTransport(StringTransport):
self.producer.resumeProducing()
def loseConnection(self):
StringTransport.loseconnection(self)
assert False
StringTransport.loseConnection(self)
self.complete_deferred.callback(None)
def startTLS(self, context, factory):
pass
def printable_data(data):
def printable_data(data, colors=True):
"""
Return ``data``, but replaces unprintable characters with periods.
@ -61,15 +68,17 @@ def printable_data(data):
colored = False
for c in data:
if c in string.printable:
if colored:
if colored and colors:
chars.append(Colors.ENDC)
colored = False
colored = False
chars.append(c)
else:
if not colored:
if (not colored) and colors:
chars.append(Styles.UNPRINTABLE_DATA)
colored = True
colored = True
chars.append('.')
if colors:
chars.append(Colors.ENDC)
return ''.join(chars)
def remove_color(s):
@ -93,6 +102,11 @@ def hexdump(src, length=16):
lines.append("%04x %-*s %s\n" % (c, length*3, hex, printable))
return ''.join(lines)
def maybe_hexdump(s):
if any(c not in string.printable for c in s):
return hexdump(s)
return s
# Taken from http://stackoverflow.com/questions/16571150/how-to-capture-stdout-output-from-a-python-function-call
# then modified
class Capturing():
@ -356,3 +370,40 @@ def autocomplete_startswith(text, lst, allow_spaces=False):
ret = [s for s in ret if ' ' not in s]
return ret
def short_data(data):
l = 1024
if len(data) > l:
return printable_data(data[:l], colors=False)
else:
return printable_data(data, colors=False)
def print_traceback():
import traceback; print '\n'.join([l.strip() for l in traceback.format_stack()])
def traceback_on_call(obj, func):
old = getattr(obj, func)
def patched(*args, **kwargs):
print_traceback()
old(*args, **kwargs)
setattr(obj, func, patched)
def sha1(data):
m = hashlib.sha1()
m.update(data)
return m.digest()
def datetime_string(dt):
dtobj = utc2local(dt)
time_made_str = dtobj.strftime('%a, %b %d, %Y, %I:%M:%S.%f %p')
return time_made_str
def html_escape(s, quote=None):
'''Replace special characters "&", "<" and ">" to HTML-safe sequences.
If the optional flag quote is true, the quotation mark character (")
is also translated.'''
s = s.replace("&", "&amp;") # Must be done first!
s = s.replace("<", "&lt;")
s = s.replace(">", "&gt;")
if quote:
s = s.replace('"', "&quot;")
return s