A fork of pappy proxy
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

276 lines
8.8 KiB

9 years ago
import argparse
9 years ago
import crochet
import pappyproxy
import shlex
9 years ago
import sys
9 years ago
9 years ago
from pappyproxy.colors import Colors, Styles, path_formatter, host_color, scode_color, verb_color
9 years ago
from pappyproxy.util import PappyException, remove_color, confirm, load_reqlist, Capturing
9 years ago
from pappyproxy.macros import InterceptMacro
9 years ago
from pappyproxy.requestcache import RequestCache
9 years ago
from pappyproxy.session import Session
9 years ago
from pappyproxy.pappy import session
9 years ago
from pappyproxy.plugin import add_intercepting_macro, remove_intercepting_macro, add_to_history
from pappyproxy.http import async_submit_requests, Request
9 years ago
from twisted.internet import defer
9 years ago
from twisted.enterprise import adbapi
9 years ago
9 years ago
class PrintStreamInterceptMacro(InterceptMacro):
"""
Intercepting macro that prints requests and responses as they go through
the proxy
"""
def __init__(self):
InterceptMacro.__init__(self)
self.name = 'Pappy Interceptor Macro'
self.intercept_requests = False
self.intercept_responses = False
self.async_req = False
self.async_rsp = False
def __repr__(self):
return "<PrintStreamInterceptingMacro>"
@staticmethod
def _print_request(req):
s = verb_color(req.verb)+'> '+req.verb+' '+Colors.ENDC
s += req.url_color
s += ', len=' + str(len(req.body))
print s
9 years ago
sys.stdout.flush()
9 years ago
@staticmethod
def _print_response(req):
response_code = str(req.response.response_code) + \
' ' + req.response.response_text
s = scode_color(response_code)
s += '< '
s += response_code
s += Colors.ENDC
s += ' '
s += req.url_color
s += ', len=' + str(len(req.response.body))
print s
9 years ago
sys.stdout.flush()
9 years ago
def mangle_request(self, request):
PrintStreamInterceptMacro._print_request(request)
return request
def mangle_response(self, request):
PrintStreamInterceptMacro._print_response(request)
return request.response
9 years ago
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
9 years ago
def clrmem(line):
"""
Delete all in-memory only requests
Usage: clrmem
"""
9 years ago
to_delete = list(pappyproxy.http.Request.cache.inmem_reqs)
9 years ago
for r in to_delete:
9 years ago
try:
yield r.deep_delete()
except PappyException as e:
print str(e)
9 years ago
def gencerts(line):
"""
Generate CA cert and private CA file
Usage: gencerts [/path/to/put/certs/in]
"""
9 years ago
dest_dir = line or pappyproxy.pappy.session.config.cert_dir
9 years ago
message = "This will overwrite any existing certs in %s. Are you sure?" % dest_dir
if not confirm(message, 'n'):
return False
print "Generating certs to %s" % dest_dir
pappyproxy.proxy.generate_ca_certs(dest_dir)
def log(line):
"""
Display the log in real time. Honestly it probably doesn't work.
Usage: log [verbosity (default is 1)]
verbosity=1: Show connections as they're made/lost, some additional info
verbosity=3: Show full requests/responses as they are processed by the proxy
"""
try:
verbosity = int(line.strip())
except:
verbosity = 1
9 years ago
pappyproxy.pappy.session.config.debug_verbosity = verbosity
9 years ago
raw_input()
9 years ago
pappyproxy.pappy.session.config.debug_verbosity = 0
9 years ago
9 years ago
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def save(line):
args = shlex.split(line)
reqids = args[0]
reqs = yield load_reqlist(reqids)
for req in reqs:
yield req.async_deep_save()
9 years ago
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def export(line):
"""
Write the full request/response of a request/response to a file.
Usage: export [req|rsp] <reqid(s)>
"""
args = shlex.split(line)
if len(args) < 2:
print 'Requires req/rsp and and request id(s)'
defer.returnValue(None)
if args[0] not in ('req', 'rsp'):
raise PappyException('Request or response not specified')
reqs = yield load_reqlist(args[1])
for req in reqs:
try:
if args[0] == 'req':
fname = 'req_%s.txt'%req.reqid
with open(fname, 'w') as f:
f.write(req.full_request)
print 'Full request written to %s' % fname
elif args[0] == 'rsp':
fname = 'rsp_%s.txt'%req.reqid
with open(fname, 'w') as f:
f.write(req.full_response)
print 'Full response written to %s' % fname
except PappyException as e:
print 'Unable to export %s: %s' % (req.reqid, e)
9 years ago
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def merge_datafile(line):
"""
Add all the requests/responses from another data file to the current one
"""
def set_text_factory(conn):
conn.text_factory = str
line = line.strip()
other_dbpool = adbapi.ConnectionPool("sqlite3", line,
check_same_thread=False,
cp_openfun=set_text_factory,
cp_max=1)
try:
count = 0
other_cache = RequestCache(cust_dbpool=other_dbpool)
yield other_cache.load_ids()
for req_d in other_cache.req_it():
count += 1
req = yield req_d
r = req.copy()
yield r.async_deep_save()
print 'Added %d requests' % count
finally:
other_dbpool.close()
9 years ago
9 years ago
def watch_proxy(line):
print 'Watching proxy... press ENTER to exit'
macro = PrintStreamInterceptMacro()
macro.intercept_requests = True
macro.intercept_responses = True
try:
add_intercepting_macro('pappy_watch_proxy', macro)
raw_input()
finally:
try:
remove_intercepting_macro('pappy_watch_proxy')
except PappyException:
pass
9 years ago
def run_without_color(line):
with Capturing() as output:
9 years ago
session.cons.onecmd(line.strip())
9 years ago
print remove_color(output.val)
9 years ago
def version(line):
import pappyproxy
print pappyproxy.__version__
9 years ago
@crochet.wait_for(timeout=180.0)
@defer.inlineCallbacks
def submit(line):
"""
Resubmit some requests, optionally with modified headers and cookies.
8 years ago
Usage: submit reqids [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
9 years ago
"""
parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__)
parser.add_argument('reqids')
parser.add_argument('-m', '--inmem', action='store_true', help='Store resubmitted requests in memory without storing them in the data file')
parser.add_argument('-u', '--unique', action='store_true', help='Only resubmit one request per endpoint (different URL parameters are different endpoints)')
parser.add_argument('-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)')
parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting')
parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting')
8 years ago
parser.add_argument('-o', '--copycookies', help='Copy the cookies used in another request')
9 years ago
args = parser.parse_args(shlex.split(line))
headers = {}
cookies = {}
8 years ago
clear_cookies = False
9 years ago
if args.headers:
for h in args.headers:
k, v = h.split('=', 1)
headers[k] = v
8 years ago
if args.copycookies:
reqid = args.copycookies
req = yield Request.load_request(reqid)
clear_cookies = True
for k, v in req.cookies.all_pairs():
cookies[k] = v
9 years ago
if args.cookies:
for c in args.cookies:
k, v = c.split('=', 1)
cookies[k] = v
if args.unique and args.uniquepath:
raise PappyException('Both -u and -p cannot be given as arguments')
newsession = Session(cookie_vals=cookies, header_vals=headers)
reqs = yield load_reqlist(args.reqids)
for req in reqs:
8 years ago
if clear_cookies:
req.cookies.clear()
9 years ago
newsession.apply_req(req)
conf_message = "You're about to submit %d requests, continue?" % len(reqs)
if not confirm(conf_message):
defer.returnValue(None)
for r in reqs:
r.tags.add('resubmitted')
8 years ago
save = not args.inmem
yield async_submit_requests(reqs, save=save, save_in_mem=args.inmem,
unique_paths=args.uniquepath, unique_path_and_args=args.unique)
9 years ago
9 years ago
def load_cmds(cmd):
cmd.set_cmds({
'clrmem': (clrmem, None),
'gencerts': (gencerts, None),
9 years ago
'sv': (save, None),
9 years ago
'export': (export, None),
'log': (log, None),
9 years ago
'merge': (merge_datafile, None),
'nocolor': (run_without_color, None),
9 years ago
'watch': (watch_proxy, None),
9 years ago
'version': (version, None),
9 years ago
'submit': (submit, None)
9 years ago
})
cmd.add_aliases([
#('rpy', ''),
])