Release 0.0.1

See README diff for changes
This commit is contained in:
Rob Glew 2015-11-23 22:44:31 -06:00
parent 6633423420
commit 4e6801e4d8
39 changed files with 917 additions and 443 deletions

3
pappyproxy/.coveragerc Normal file
View file

@ -0,0 +1,3 @@
[run]
omit = tests/*, schema/*, console.py, vim_repeater/*

9
pappyproxy/Makefile Normal file
View file

@ -0,0 +1,9 @@
install-third-party:
pip install -e ..
test:
py.test -rw --twisted --cov-config .coveragerc --cov=. tests/
test-verbose:
py.test -v -rw --twisted --cov-config .coveragerc --cov-report term-missing --cov=. tests/

0
pappyproxy/__init__.py Normal file
View file

7
pappyproxy/__main__.py Normal file
View file

@ -0,0 +1,7 @@
import pappy
from twisted.internet import reactor
if __name__ == '__main__':
reactor.callWhenRunning(pappy.main)
reactor.run()

111
pappyproxy/comm.py Normal file
View file

@ -0,0 +1,111 @@
import base64
import json
import pappyproxy
from twisted.protocols.basic import LineReceiver
from twisted.internet import defer
from util import PappyException
"""
comm.py
Handles creating a listening server bound to localhost that other processes can
use to interact with the proxy.
"""
comm_port = 0
debug = True
def set_comm_port(port):
global comm_port
comm_port = port
class CommServer(LineReceiver):
def __init__(self):
self.delimiter = '\n'
self.action_handlers = {
'ping': self.action_ping,
'get_request': self.action_get_request,
'get_response': self.action_get_response,
'submit': self.action_submit_request,
}
def lineReceived(self, line):
if line == '':
return
try:
command_data = json.loads(line)
command = command_data['action']
valid = False
if command in self.action_handlers:
valid = True
result = {'success': True}
func_defer = self.action_handlers[command](command_data)
func_defer.addCallback(self.action_result_handler, result)
func_defer.addErrback(self.action_error_handler, result)
if not valid:
raise PappyException('%s is an invalid command' % command_data['action'])
except PappyException as e:
return_data = {'success': False, 'message': str(e)}
self.sendLine(json.dumps(return_data))
def action_result_handler(self, data, result):
result.update(data)
self.sendLine(json.dumps(result))
def action_error_handler(self, error, result):
if debug:
print error.getTraceback()
return_data = {'success': False, 'message': 'Debug mode enabled, traceback on main terminal'}
else:
return_data = {'success': False, 'message': str(error.getErrorMessage())}
result.update(result)
self.sendLine(json.dumps(return_data))
error.trap(Exception)
return True
def action_ping(self, data):
return defer.succeed({'ping': 'pong'})
@defer.inlineCallbacks
def action_get_request(self, data):
try:
reqid = int(data['reqid'])
except KeyError:
raise PappyException("Request with given ID does not exist")
req = yield pappyproxy.http.Request.load_request(reqid)
dat = json.loads(req.to_json())
defer.returnValue(dat)
@defer.inlineCallbacks
def action_get_response(self, data):
try:
reqid = int(data['reqid'])
except KeyError:
raise PappyException("Request with given ID does not exist, cannot fetch associated response.")
req = yield pappyproxy.http.Request.load_request(reqid)
if req.response:
rsp = yield pappyproxy.http.Response.load_response(req.response.rspid)
dat = json.loads(rsp.to_json())
else:
dat = {}
defer.returnValue(dat)
@defer.inlineCallbacks
def action_submit_request(self, data):
try:
req = pappyproxy.http.Request(base64.b64decode(data['full_request']))
req.port = data['port']
req.is_ssl = data['is_ssl']
except:
raise PappyException("Error parsing request")
req_sub = yield req.submit_self()
yield req_sub.deep_save()
retdata = {}
retdata['request'] = json.loads(req_sub.to_json())
if req_sub.response:
retdata['response'] = json.loads(req_sub.response.to_json())
defer.returnValue(retdata)

71
pappyproxy/config.py Normal file
View file

@ -0,0 +1,71 @@
import imp
import json
import os
import shutil
PAPPY_DIR = os.path.dirname(os.path.realpath(__file__))
CERT_DIR = PAPPY_DIR
DATAFILE = 'data.db'
DEBUG_DIR = None
DEBUG_TO_FILE = False
DEBUG_VERBOSITY = 0
LISTENERS = [(8000, '127.0.0.1')]
SSL_CA_FILE = 'certificate.crt'
SSL_PKEY_FILE = 'private.key'
def get_default_config():
default_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'default_user_config.json')
with open(default_config_file) as f:
settings = json.load(f)
return settings
def load_settings(proj_config):
global CERT_DIR
global DATAFILE
global DEBUG_DIR
global DEBUG_TO_FILE
global DEBUG_VERBOSITY
global LISTENERS
global PAPPY_DIR
global SSL_CA_FILE
global SSL_PKEY_FILE
# Substitution dictionary
subs = {}
subs['PAPPYDIR'] = PAPPY_DIR
# Data file settings
if 'data_file' in proj_config:
DATAFILE = proj_config["data_file"].format(**subs)
# Debug settings
if 'debug_dir' in proj_config:
if proj_config['debug_dir']:
DEBUG_TO_FILE = True
DEBUG_DIR = proj_config["debug_dir"].format(**subs)
# Cert directory settings
if 'cert_dir' in proj_config:
CERT_DIR = proj_config["cert_dir"].format(**subs)
# Listener settings
if "proxy_listeners" in proj_config:
LISTENERS = []
for l in proj_config["proxy_listeners"]:
LISTENERS.append((l['port'], l['interface']))
def load_from_file(fname):
# Make sure we have a config file
if not os.path.isfile(fname):
print "Copying default config to %s" % fname
default_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'default_user_config.json')
shutil.copyfile(default_config_file, fname)
# Load local project config
with open(fname, 'r') as f:
proj_config = json.load(f)
load_settings(proj_config)

792
pappyproxy/console.py Normal file
View file

@ -0,0 +1,792 @@
import cmd2
import crochet
import curses
import datetime
import os
import pappyproxy
import pygments
import shlex
import string
import subprocess
import sys
import termios
import time
from twisted.internet import defer, reactor
from pappyproxy.util import PappyException
from pygments.lexers import get_lexer_for_mimetype
from pygments.formatters import TerminalFormatter
"""
console.py
Functions and classes involved with interacting with console input and output
"""
# http://www.termsys.demon.co.uk/vtansi.htm#cursor
SAVE_CURSOR = '\x1b[7'
UNSAVE_CURSOR = '\x1b[8'
LINE_UP = '\x1b[1A'
LINE_ERASE = '\x1b[2K'
PRINT_LINE = '\x1b[1i'
edit_queue = []
def print_pappy_errors(func):
def catch(*args, **kwargs):
try:
func(*args, **kwargs)
except PappyException as e:
print str(e)
return catch
class ProxyCmd(cmd2.Cmd):
def __init__(self, *args, **kwargs):
self.alerts = []
self.prompt = 'itsPappyTime> '
self.debug = True
cmd2.Cmd.__init__(self, *args, **kwargs)
def add_alert(self, alert):
self.alerts.append(alert)
def postcmd(self, stop, line):
for l in self.alerts:
print '[!] ', l
self.alerts = []
return stop
def help_view_request_headers(self):
print ("View information about request\n"
"Usage: view_request_info <reqid> [u]"
"If 'u' is given as an additional argument, the unmangled version "
"of the request will be displayed.")
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_view_request_info(self, line):
args = shlex.split(line)
try:
reqid = int(args[0])
showid = reqid
except:
raise PappyException("Enter a valid number for the request id")
req = yield pappyproxy.http.Request.load_request(reqid)
showreq = req
show_unmangled = False
if len(args) > 1 and args[1][0].lower() == 'u':
if not req.unmangled:
raise PappyException("Request was not mangled")
show_unmangled = True
showreq = req.unmangled
print ''
print_request_extended(showreq)
print ''
def help_view_request_headers(self):
print ("View the headers of the request\n"
"Usage: view_request_headers <reqid> [u]"
"If 'u' is given as an additional argument, the unmangled version "
"of the request will be displayed.")
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_view_request_headers(self, line):
args = shlex.split(line)
try:
reqid = int(args[0])
showid = reqid
except:
raise PappyException("Enter a valid number for the request id")
req = yield pappyproxy.http.Request.load_request(reqid)
showreq = req
show_unmangled = False
if len(args) > 1 and args[1][0].lower() == 'u':
if not req.unmangled:
raise PappyException("Request was not mangled")
show_unmangled = True
showreq = req.unmangled
if show_unmangled:
print 'UNMANGLED --------------------'
print ''
view_full_request(showreq, True)
def help_view_full_request(self):
print ("View the full data of the request\n"
"Usage: view_full_request <reqid> [u]\n"
"If 'u' is given as an additional argument, the unmangled version "
"of the request will be displayed.")
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_view_full_request(self, line):
args = shlex.split(line)
try:
reqid = int(args[0])
showid = reqid
except:
raise PappyException("Enter a valid number for the request id")
req = yield pappyproxy.http.Request.load_request(reqid)
showreq = req
show_unmangled = False
if len(args) > 1 and args[1][0].lower() == 'u':
if not req.unmangled:
raise PappyException("Request was not mangled")
show_unmangled = True
showreq = req.unmangled
if show_unmangled:
print 'UNMANGLED --------------------'
print ''
view_full_request(showreq)
def help_view_response_headers(self):
print ("View the headers of the response\n"
"Usage: view_response_headers <reqid>")
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_view_response_headers(self, line):
args = shlex.split(line)
try:
reqid = int(args[0])
showid = reqid
except:
raise PappyException("Enter a valid number for the request id")
req = yield pappyproxy.http.Request.load_request(reqid)
showrsp = req.response
show_unmangled = False
if len(args) > 1 and args[1][0].lower() == 'u':
if not req.response.unmangled:
raise PappyException("Response was not mangled")
show_unmangled = True
showrsp = req.response.unmangled
if show_unmangled:
print ''
print 'UNMANGLED --------------------'
print ''
view_full_response(showrsp, True)
def help_view_full_response(self):
print ("View the full data of the response associated with a request\n"
"Usage: view_full_response <reqid>")
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_view_full_response(self, line):
args = shlex.split(line)
try:
reqid = int(args[0])
showid = reqid
except:
raise PappyException("Enter a valid number for the request id")
req = yield pappyproxy.http.Request.load_request(reqid)
showrsp = req.response
show_unmangled = False
if len(args) > 1 and args[1][0].lower() == 'u':
if not req.response.unmangled:
raise PappyException("Response was not mangled")
show_unmangled = True
showrsp = req.response.unmangled
if show_unmangled:
print ''
print 'UNMANGLED --------------------'
print ''
view_full_response(showrsp)
def help_list(self):
print ("List request/response pairs in the current context\n"
"Usage: list")
@print_pappy_errors
def do_list(self, line):
args = shlex.split(line)
if len(args) > 0:
if args[0][0].lower() == 'a':
print_count = -1
else:
try:
print_count = int(args[0])
except:
print "Please enter a valid argument for list"
return
else:
print_count = 25
pappyproxy.context.sort()
if print_count > 0:
to_print = pappyproxy.context.active_requests[:]
to_print = sorted(to_print, key=lambda x: x.reqid, reverse=True)
to_print = to_print[:print_count]
print_requests(to_print)
else:
print_requests(pappyproxy.context.active_requests)
def help_filter(self):
print ("Apply a filter to the current context\n"
"Usage: filter <filter string>\n"
"See README.md for information on filter strings")
@print_pappy_errors
def do_filter(self, line):
if not line:
raise PappyException("Filter string required")
filter_to_add = pappyproxy.context.Filter(line)
pappyproxy.context.add_filter(filter_to_add)
def help_filter_clear(self):
print ("Reset the context so that it contains no filters (ignores scope)\n"
"Usage: filter_clear")
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_filter_clear(self, line):
pappyproxy.context.active_filters = []
yield pappyproxy.context.reload_from_storage()
def help_filter_list(self):
print ("Print the filters that make up the current context\n"
"Usage: filter_list")
@print_pappy_errors
def do_filter_list(self, line):
for f in pappyproxy.context.active_filters:
print f.filter_string
def help_scope_save(self):
print ("Set the scope to be the current context. Saved between launches\n"
"Usage: scope_save")
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_scope_save(self, line):
pappyproxy.context.save_scope()
yield pappyproxy.context.store_scope(pappyproxy.http.dbpool)
def help_scope_reset(self):
print ("Set the context to be the scope (view in-scope items)\n"
"Usage: scope_reset")
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_scope_reset(self, line):
yield pappyproxy.context.reset_to_scope()
def help_scope_delete(self):
print ("Delete the scope so that it contains all request/response pairs\n"
"Usage: scope_delete")
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_scope_delete(self, line):
pappyproxy.context.set_scope([])
yield pappyproxy.context.store_scope(pappyproxy.http.dbpool)
def help_scope_list(self):
print ("Print the filters that make up the scope\n"
"Usage: scope_list")
@print_pappy_errors
def do_scope_list(self, line):
pappyproxy.context.print_scope()
def help_repeater(self):
print ("Open a request in the repeater\n"
"Usage: repeater <reqid>")
@print_pappy_errors
def do_repeater(self, line):
args = shlex.split(line)
try:
reqid = int(args[0])
except:
raise PappyException("Enter a valid number for the request id")
repid = reqid
if len(args) > 1 and args[1][0].lower() == 'u':
umid = get_unmangled(reqid)
if umid is not None:
repid = umid
pappyproxy.repeater.start_editor(repid)
def help_submit(self):
print "Submit a request again (NOT IMPLEMENTED)"
@print_pappy_errors
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def do_submit(self, line):
pass
# reqid = int(line)
# req = yield http.Request.load_request(reqid)
# rsp = yield req.submit()
# print printable_data(rsp.full_response)
def help_intercept(self):
print ("Intercept requests and/or responses and edit them with before passing them along\n"
"Usage: intercept <reqid>")
@print_pappy_errors
def do_intercept(self, line):
global edit_queue
args = shlex.split(line)
intercept_requests = False
intercept_responses = False
req_names = ('req', 'request', 'requests')
rsp_names = ('rsp', 'response', 'responses')
if any(a in req_names for a in args):
intercept_requests = True
if any(a in rsp_names for a in args):
intercept_responses = True
if intercept_requests and intercept_responses:
intercept_str = 'Requests and responses'
elif intercept_requests:
intercept_str = 'Requests'
elif intercept_responses:
intercept_str = 'Responses'
else:
intercept_str = 'NOTHING'
pappyproxy.mangle.set_intercept_requests(intercept_requests)
pappyproxy.mangle.set_intercept_responses(intercept_responses)
## Interceptor loop
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
try:
editnext = False
stdscr.nodelay(True)
while True:
stdscr.addstr(0, 0, "Currently intercepting: %s" % intercept_str)
stdscr.clrtoeol()
stdscr.addstr(1, 0, "%d item(s) in queue." % len(edit_queue))
stdscr.clrtoeol()
if editnext:
stdscr.addstr(2, 0, "Waiting for next item... Press 'q' to quit or 'b' to quit waiting")
else:
stdscr.addstr(2, 0, "Press 'n' to edit the next item or 'q' to quit interceptor.")
stdscr.clrtoeol()
c = stdscr.getch()
if c == ord('q'):
break
elif c == ord('n'):
editnext = True
elif c == ord('b'):
editnext = False
if editnext and edit_queue:
editnext = False
(to_edit, deferred) = edit_queue.pop(0)
editor = 'vi'
if 'EDITOR' in os.environ:
editor = os.environ['EDITOR']
subprocess.call([editor, to_edit])
stdscr.clear()
deferred.callback(None)
finally:
curses.nocbreak()
stdscr.keypad(0)
curses.echo()
curses.endwin()
pappyproxy.mangle.set_intercept_requests(False)
pappyproxy.mangle.set_intercept_responses(False)
# Send remaining requests along
while len(edit_queue) > 0:
(fname, deferred) = edit_queue.pop(0)
deferred.callback(None)
def help_gencerts(self):
print ("Generate CA cert and private CA file\n"
"Usage: gencerts [/path/to/put/certs/in]")
@print_pappy_errors
def do_gencerts(self, line):
dest_dir = line or pappyproxy.config.CERT_DIR
print "This will overwrite any existing certs in %s. Are you sure?" % dest_dir
print "(y/N)",
answer = raw_input()
if not answer or answer[0].lower() != 'y':
return False
print "Generating certs to %s" % dest_dir
pappyproxy.proxy.generate_ca_certs(dest_dir)
def help_log(self):
print ("View the log\n"
"Usage: log [verbosity (default is 1)]\n"
"verbosity=1: Show connections as they're made/lost, some additional info\n"
"verbosity=3: Show full requests/responses as they are processed by the proxy")
@print_pappy_errors
def do_log(self, line):
try:
verbosity = int(line.strip())
except:
verbosity = 1
pappyproxy.config.DEBUG_VERBOSITY = verbosity
raw_input()
pappyproxy.config.DEBUG_VERBOSITY = 0
@print_pappy_errors
def do_testerror(self, line):
raise PappyException("Test error")
@print_pappy_errors
def do_EOF(self):
print "EOF"
return True
### ABBREVIATIONS
def help_ls(self):
self.help_list()
@print_pappy_errors
def do_ls(self, line):
self.onecmd('list %s' % line)
def help_sr(self):
self.help_scope_reset()
@print_pappy_errors
def do_sr(self, line):
self.onecmd('scope_reset %s' % line)
def help_sls(self):
self.help_scope_list()
@print_pappy_errors
def do_sls(self, line):
self.onecmd('scope_list %s' % line)
def help_viq(self):
self.help_view_request_info()
@print_pappy_errors
def do_viq(self, line):
self.onecmd('view_request_info %s' % line)
def help_vhq(self):
self.help_view_request_headers()
@print_pappy_errors
def do_vhq(self, line):
self.onecmd('view_request_headers %s' % line)
def help_vfq(self):
self.help_view_full_request()
@print_pappy_errors
def do_vfq(self, line):
self.onecmd('view_full_request %s' % line)
def help_vhs(self):
self.help_view_response_headers()
@print_pappy_errors
def do_vhs(self, line):
self.onecmd('view_response_headers %s' % line)
def help_vfs(self):
self.help_view_full_response()
@print_pappy_errors
def do_vfs(self, line):
self.onecmd('view_full_response %s' % line)
def help_fl(self):
self.help_filter()
@print_pappy_errors
def do_fl(self, line):
self.onecmd('filter %s' % line)
def help_f(self):
self.help_filter()
@print_pappy_errors
def do_f(self, line):
self.onecmd('filter %s' % line)
def help_fls(self):
self.help_filter_list()
@print_pappy_errors
def do_fls(self, line):
self.onecmd('filter_list %s' % line)
def help_fc(self):
self.help_filter_clear()
@print_pappy_errors
def do_fc(self, line):
self.onecmd('filter_clear %s' % line)
def help_rp(self):
self.help_repeater()
@print_pappy_errors
def do_rp(self, line):
self.onecmd('repeater %s' % line)
def help_ic(self):
self.help_intercept()
@print_pappy_errors
def do_ic(self, line):
self.onecmd('intercept %s' % line)
def cmd_failure(cmd):
print "FAILURE"
def edit_file(fname, front=False):
global edit_queue
# Adds the filename to the edit queue. Returns a deferred that is fired once
# the file is edited and the editor is closed
d = defer.Deferred()
if front:
edit_queue = [(fname, d)] + edit_queue
else:
edit_queue.append((fname, d))
return d
def print_table(coldata, rows):
# Coldata: List of dicts with info on how to print the columns.
# name: heading to give column
# width: (optional) maximum width before truncating. 0 for unlimited
# Rows: List of tuples with the data to print
# Get the width of each column
widths = []
headers = []
for data in coldata:
if 'name' in data:
headers.append(data['name'])
else:
headers.append('')
empty_headers = True
for h in headers:
if h != '':
empty_headers = False
if not empty_headers:
rows = [headers] + rows
for i in range(len(coldata)):
col = coldata[i]
if 'width' in col and col['width'] > 0:
maxwidth = col['width']
else:
maxwidth = 0
colwidth = 0
for row in rows:
printstr = str(row[i])
if len(printstr) > colwidth:
colwidth = len(printstr)
if maxwidth > 0 and colwidth > maxwidth:
widths.append(maxwidth)
else:
widths.append(colwidth)
# Print rows
padding = 2
for row in rows:
for (col, width) in zip(row, widths):
printstr = str(col)
if len(printstr) > width:
for i in range(len(printstr)-4, len(printstr)-1):
printstr=printstr[:width]
printstr=printstr[:-3]+'...'
sys.stdout.write(printstr)
sys.stdout.write(' '*(width-len(printstr)))
sys.stdout.write(' '*padding)
sys.stdout.write('\n')
sys.stdout.flush()
def printable_data(data):
chars = []
for c in data:
if c in string.printable:
chars += c
else:
chars += '.'
return ''.join(chars)
@crochet.wait_for(timeout=30.0)
@defer.inlineCallbacks
def get_unmangled(reqid):
req = yield pappyproxy.http.Request.load_request(reqid)
if req.unmangled:
defer.returnValue(req.unmangled.reqid)
else:
defer.returnValue(None)
def view_full_request(request, headers_only=False):
if headers_only:
print printable_data(request.raw_headers)
else:
print printable_data(request.full_request)
def view_full_response(response, headers_only=False):
def check_type(response, against):
if 'Content-Type' in response.headers and against in response.headers['Content-Type']:
return True
return False
if headers_only:
print printable_data(response.raw_headers)
else:
print response.raw_headers,
to_print = printable_data(response.raw_data)
if 'content-type' in response.headers:
try:
lexer = get_lexer_for_mimetype(response.headers['content-type'].split(';')[0])
to_print = pygments.highlight(to_print, lexer, TerminalFormatter())
except ClassNotFound:
pass
print to_print
def print_requests(requests):
# Print a table with info on all the requests in the list
cols = [
{'name':'ID'},
{'name':'Verb'},
{'name': 'Host'},
{'name':'Path', 'width':40},
{'name':'S-Code'},
{'name':'Req Len'},
{'name':'Rsp Len'},
{'name':'Time'},
{'name':'Mngl'},
]
rows = []
for request in requests:
rid = request.reqid
method = request.verb
host = request.headers['host']
path = request.full_path
reqlen = len(request.raw_data)
rsplen = 'N/A'
mangle_str = '--'
if request.unmangled:
mangle_str = 'q'
if request.response:
response_code = str(request.response.response_code) + \
' ' + request.response.response_text
rsplen = len(request.response.raw_data)
if request.response.unmangled:
if mangle_str == '--':
mangle_str = 's'
else:
mangle_str += '/s'
else:
response_code = ''
time_str = '--'
if request.time_start and request.time_end:
time_delt = request.time_end - request.time_start
time_str = "%.2f" % time_delt.total_seconds()
port = request.port
if request.is_ssl:
is_ssl = 'YES'
else:
is_ssl = 'NO'
rows.append([rid, method, host, path, response_code,
reqlen, rsplen, time_str, mangle_str])
print_table(cols, rows)
def print_request_extended(request):
# Prints extended info for the request
title = "Request Info (reqid=%d)" % request.reqid
print title
print '-'*len(title)
reqlen = len(request.raw_data)
reqlen = '%d bytes' % reqlen
rsplen = 'No response'
mangle_str = 'Nothing mangled'
if request.unmangled:
mangle_str = 'Request'
if request.response:
response_code = str(request.response.response_code) + \
' ' + request.response.response_text
rsplen = len(request.response.raw_data)
rsplen = '%d bytes' % rsplen
if request.response.unmangled:
if mangle_str == 'Nothing mangled':
mangle_str = 'Response'
else:
mangle_str += ' and Response'
else:
response_code = ''
time_str = '--'
if request.time_start and request.time_end:
time_delt = request.time_end - request.time_start
time_str = "%.2f sec" % time_delt.total_seconds()
port = request.port
if request.is_ssl:
is_ssl = 'YES'
else:
is_ssl = 'NO'
if request.time_start:
time_made_str = request.time_start.strftime('%a, %b %d, %Y, %I:%M:%S %p')
else:
time_made_str = '--'
print 'Made on %s' % time_made_str
print 'ID: %d' % request.reqid
print 'Verb: %s' % request.verb
print 'Host: %s' % request.host
print 'Path: %s' % request.full_path
print 'Status Code: %s' % response_code
print 'Request Length: %s' % reqlen
print 'Response Length: %s' % rsplen
if request.response.unmangled:
print 'Unmangled Response Length: %s bytes' % len(request.response.unmangled.full_response)
print 'Time: %s' % time_str
print 'Port: %s' % request.port
print 'SSL: %s' % is_ssl
print 'Mangled: %s' % mangle_str

505
pappyproxy/context.py Normal file
View file

@ -0,0 +1,505 @@
from pappyproxy import http
from twisted.internet import defer
from util import PappyException
import shlex
"""
context.py
Functions and classes involved with managing the current context and filters
"""
scope = []
base_filters = []
active_filters = []
active_requests = []
class FilterParseError(PappyException):
pass
class Filter(object):
def __init__(self, filter_string):
self.filter_func = self.from_filter_string(filter_string)
self.filter_string = filter_string
def __call__(self, *args, **kwargs):
return self.filter_func(*args, **kwargs)
@staticmethod
def from_filter_string(filter_string):
args = shlex.split(filter_string)
field = args[0]
relation = args[1]
new_filter = None
negate = False
if relation[0] == 'n' and len(relation) > 1:
negate = True
relation = relation[1:]
# Raises exception if invalid
comparer = get_relation(relation)
if len(args) > 2:
val1 = args[2]
elif relation not in ('ex',):
raise PappyException('%s requires a value' % relation)
else:
val1 = None
if len(args) > 3:
comp2 = args[3]
else:
comp2 = None
if len(args) > 4:
val2 = args[4]
else:
comp2 = None
if field in ("all",):
new_filter = gen_filter_by_all(comparer, val1, negate)
elif field in ("host", "domain", "hs", "dm"):
new_filter = gen_filter_by_host(comparer, val1, negate)
elif field in ("path", "pt"):
new_filter = gen_filter_by_path(comparer, val1, negate)
elif field in ("body", "bd", "data", "dt"):
new_filter = gen_filter_by_body(comparer, val1, negate)
elif field in ("verb", "vb"):
new_filter = gen_filter_by_verb(comparer, val1, negate)
elif field in ("param", "pm"):
if len(args) > 4:
comparer2 = get_relation(comp2)
new_filter = gen_filter_by_params(comparer, val1,
comparer2, val2, negate)
else:
new_filter = gen_filter_by_params(comparer, val1,
negate=negate)
elif field in ("header", "hd"):
if len(args) > 4:
comparer2 = get_relation(comp2)
new_filter = gen_filter_by_headers(comparer, val1,
comparer2, val2, negate)
else:
new_filter = gen_filter_by_headers(comparer, val1,
negate=negate)
elif field in ("rawheaders", "rh"):
new_filter = gen_filter_by_raw_headers(comparer, val1, negate)
elif field in ("sentcookie", "sck"):
if len(args) > 4:
comparer2 = get_relation(comp2)
new_filter = gen_filter_by_submitted_cookies(comparer, val1,
comparer2, val2, negate)
else:
new_filter = gen_filter_by_submitted_cookies(comparer, val1,
negate=negate)
elif field in ("setcookie", "stck"):
if len(args) > 4:
comparer2 = get_relation(comp2)
new_filter = gen_filter_by_set_cookies(comparer, val1,
comparer2, val2, negate)
else:
new_filter = gen_filter_by_set_cookies(comparer, val1,
negate=negate)
elif field in ("statuscode", "sc", "responsecode"):
new_filter = gen_filter_by_response_code(comparer, val1, negate)
elif field in ("responsetime", "rt"):
pass
else:
raise FilterParseError("%s is not a valid field" % field)
if new_filter is not None:
return new_filter
else:
raise FilterParseError("Error creating filter")
def filter_reqs(requests, filters):
to_delete = []
# Could definitely be more efficient, but it stays like this until
# it impacts performance
for filt in filters:
for req in requests:
if not filt(req):
to_delete.append(req)
new_requests = [r for r in requests if r not in to_delete]
requests = new_requests
to_delete = []
return requests
def cmp_is(a, b):
return str(a) == str(b)
def cmp_contains(a, b):
return (b.lower() in a.lower())
def cmp_exists(a, b=None):
return (a is not None)
def cmp_len_eq(a, b):
return (len(a) == int(b))
def cmp_len_gt(a, b):
return (len(a) > int(b))
def cmp_len_lt(a, b):
return (len(a) < int(b))
def cmp_eq(a, b):
return (int(a) == int(b))
def cmp_gt(a, b):
return (int(a) > int(b))
def cmp_lt(a, b):
return (int(a) < int(b))
def gen_filter_by_attr(comparer, val, attr, negate=False):
"""
Filters by an attribute whose name is shared by the request and response
objects
"""
def f(req):
req_match = comparer(getattr(req, attr), val)
if req.response:
rsp_match = comparer(getattr(req.response, attr), val)
else:
rsp_match = False
result = req_match or rsp_match
if negate:
return not result
else:
return result
return f
def gen_filter_by_all(comparer, val, negate=False):
def f(req):
req_match = comparer(req.full_request, val)
if req.response:
rsp_match = comparer(req.response.full_response, val)
else:
rsp_match = False
result = req_match or rsp_match
if negate:
return not result
else:
return result
return f
def gen_filter_by_host(comparer, val, negate=False):
def f(req):
result = comparer(req.host, val)
if negate:
return not result
else:
return result
return f
def gen_filter_by_body(comparer, val, negate=False):
return gen_filter_by_attr(comparer, val, 'raw_data', negate=negate)
def gen_filter_by_raw_headers(comparer, val, negate=False):
return gen_filter_by_attr(comparer, val, 'raw_headers', negate=negate)
def gen_filter_by_response_code(comparer, val, negate=False):
def f(req):
if req.response:
result = comparer(req.response.response_code, val)
else:
result = False
if negate:
return not result
else:
return result
return f
def gen_filter_by_path(comparer, val, negate=False):
def f(req):
result = comparer(req.path, val)
if negate:
return not result
else:
return result
return f
def gen_filter_by_responsetime(comparer, val, negate=False):
def f(req):
result = comparer(req.rsptime, val)
if negate:
return not result
else:
return result
return f
def gen_filter_by_verb(comparer, val, negate=False):
def f(req):
result = comparer(req.verb, val)
if negate:
return not result
else:
return result
return f
def check_repeatable_dict(d, comparer1, val1, comparer2=None, val2=None, negate=False):
result = False
for k, v in d.all_pairs():
if comparer2:
key_matches = comparer1(k, val1)
val_matches = comparer2(v, val2)
if key_matches and val_matches:
result = True
break
else:
# We check if the first value matches either
key_matches = comparer1(k, val1)
val_matches = comparer1(v, val1)
if key_matches or val_matches:
result = True
break
if negate:
return not result
else:
return result
def gen_filter_by_repeatable_dict_attr(attr, keycomparer, keyval, valcomparer=None,
valval=None, negate=False, check_req=True,
check_rsp=True):
def f(req):
matched = False
d = getattr(req, attr)
if check_req and check_repeatable_dict(d, keycomparer, keyval, valcomparer, valval):
matched = True
if check_rsp and req.response:
d = getattr(req.response, attr)
if check_repeatable_dict(d, keycomparer, keyval, valcomparer, valval):
matched = True
if negate:
return not matched
else:
return matched
return f
def gen_filter_by_headers(keycomparer, keyval, valcomparer=None, valval=None,
negate=False):
return gen_filter_by_repeatable_dict_attr('headers', keycomparer, keyval,
valcomparer, valval, negate=negate)
def gen_filter_by_submitted_cookies(keycomparer, keyval, valcomparer=None,
valval=None, negate=False):
return gen_filter_by_repeatable_dict_attr('cookies', keycomparer, keyval,
valcomparer, valval, negate=negate,
check_rsp=False)
def gen_filter_by_set_cookies(keycomparer, keyval, valcomparer=None,
valval=None, negate=False):
def f(req):
if not req.response:
return False
for k, c in req.response.cookies.all_pairs():
if keycomparer(c.key, keyval):
if not valcomparer:
return True
else:
if valcomparer(c.val, valval):
return True
return False
return f
def gen_filter_by_get_params(keycomparer, keyval, valcomparer=None, valval=None,
negate=False):
def f(req):
matched = False
for k, v in req.get_params.all_pairs():
if keycomparer(k, keyval):
if not valcomparer:
matched = True
else:
if valcomparer(v, valval):
matched = True
if negate:
return not matched
else:
return matched
return f
def gen_filter_by_post_params(keycomparer, keyval, valcomparer=None, valval=None,
negate=False):
def f(req):
matched = False
for k, v in req.post_params.all_pairs():
if keycomparer(k, keyval):
if not valcomparer:
matched = True
else:
if valcomparer(v, valval):
matched = True
if negate:
return not matched
else:
return matched
return f
def gen_filter_by_params(keycomparer, keyval, valcomparer=None, valval=None,
negate=False):
def f(req):
matched = False
# purposely don't pass negate here, otherwise we get double negatives
f1 = gen_filter_by_post_params(keycomparer, keyval, valcomparer, valval)
f2 = gen_filter_by_get_params(keycomparer, keyval, valcomparer, valval)
if f1(req):
matched = True
if f2(req):
matched = True
if negate:
return not matched
else:
return matched
return f
def get_relation(s):
# Gets the relation function associated with the string
# Returns none if not found
if s in ("is",):
return cmp_is
elif s in ("contains", "ct"):
return cmp_contains
elif s in ("containsr", "ctr"):
# TODO
raise PappyException("Contains (regexp) is not implemented yet. Sorry.")
elif s in ("exists", "ex"):
return cmp_exists
elif s in ("Leq"):
return cmp_len_eq
elif s in ("Lgt"):
return cmp_len_gt
elif s in ("Llt"):
return cmp_len_lt
elif s in ("eq"):
return cmp_eq
elif s in ("gt"):
return cmp_gt
elif s in ("lt"):
return cmp_lt
raise FilterParseError("Invalid relation: %s" % s)
@defer.inlineCallbacks
def init():
yield reload_from_storage()
@defer.inlineCallbacks
def reload_from_storage():
global active_requests
active_requests = yield http.Request.load_from_filters(active_filters)
def add_filter(filt):
global active_requests
global active_filters
active_filters.append(filt)
active_requests = filter_reqs(active_requests, active_filters)
def add_request(req):
global active_requests
if passes_filters(req, active_filters):
active_requests.append(req)
def filter_recheck():
global active_requests
global active_filters
new_reqs = []
for req in active_requests:
if passes_filters(req, active_filters):
new_reqs.append(req)
active_requests = new_reqs
def passes_filters(request, filters):
for filt in filters:
if not filt(request):
return False
return True
def sort(key=None):
global active_requests
if key:
active_requests = sorted(active_requests, key=key)
else:
active_requests = sorted(active_requests, key=lambda r: r.reqid)
def in_scope(request):
global scope
return passes_filters(request, scope)
def set_scope(filters):
global scope
scope = filters
def save_scope():
global active_filters
global scope
scope = active_filters[:]
@defer.inlineCallbacks
def reset_to_scope():
global active_filters
global scope
active_filters = scope[:]
yield reload_from_storage()
def print_scope():
global scope
for f in scope:
print f.filter_string
@defer.inlineCallbacks
def store_scope(dbpool):
# Delete the old scope
yield dbpool.runQuery(
"""
DELETE FROM scope
"""
);
# Insert the new scope
i = 0
for f in scope:
yield dbpool.runQuery(
"""
INSERT INTO scope (filter_order, filter_string) VALUES (?, ?);
""",
(i, f.filter_string)
);
i += 1
@defer.inlineCallbacks
def load_scope(dbpool):
global scope
rows = yield dbpool.runQuery(
"""
SELECT filter_order, filter_string FROM scope;
""",
)
rows = sorted(rows, key=lambda r: int(r[0]))
new_scope = []
for row in rows:
new_filter = Filter(row[1])
new_scope.append(new_filter)
scope = new_scope

View file

@ -0,0 +1,7 @@
{
"data_file": "./data.db",
"cert_dir": "{PAPPYDIR}/certs",
"proxy_listeners": [
{"port": 8000, "interface": "127.0.0.1"}
]
}

1229
pappyproxy/http.py Normal file

File diff suppressed because it is too large Load diff

121
pappyproxy/mangle.py Normal file
View file

@ -0,0 +1,121 @@
import os
import string
import subprocess
import tempfile
import http
import pappyproxy
from twisted.internet import defer
active_requests = {}
intercept_requests = False
intercept_responses = False
def set_intercept_requests(val):
global intercept_requests
intercept_requests = val
def set_intercept_responses(val):
global intercept_responses
intercept_responses = val
@defer.inlineCallbacks
def mangle_request(request, connection_id):
# This function gets called to mangle/edit requests passed through the proxy
global intercept_requests
orig_req = http.Request(request.full_request)
orig_req.port = request.port
orig_req.is_ssl = request.is_ssl
retreq = orig_req
if pappyproxy.context.in_scope(orig_req):
if intercept_requests: # if we want to mangle...
# Write original request to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(orig_req.full_request)
# Have the console edit the file
yield pappyproxy.console.edit_file(tfName)
# Create new mangled request from edited file
with open(tfName, 'r') as f:
mangled_req = http.Request(f.read(), update_content_length=True)
mangled_req.is_ssl = orig_req.is_ssl
mangled_req.port = orig_req.port
os.remove(tfName)
# Check if dropped
if mangled_req.full_request == '':
pappyproxy.proxy.log('Request dropped!')
defer.returnValue(None)
# Check if it changed
if mangled_req.full_request != orig_req.full_request:
# Set the object's metadata
mangled_req.unmangled = orig_req
retreq = mangled_req
# Add our request to the context
pappyproxy.context.add_request(retreq)
else:
pappyproxy.proxy.log('Out of scope! Request passed along unharmed', id=connection_id)
active_requests[connection_id] = retreq
retreq.submitted = True
defer.returnValue(retreq)
@defer.inlineCallbacks
def mangle_response(response, connection_id):
# This function gets called to mangle/edit respones passed through the proxy
global intercept_responses
#response = string.replace(response, 'cloud', 'butt')
#response = string.replace(response, 'Cloud', 'Butt')
myreq = active_requests[connection_id]
orig_rsp = http.Response(response.full_response)
retrsp = orig_rsp
if pappyproxy.context.in_scope(myreq):
if intercept_responses: # If we want to mangle...
# Write original request to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(orig_rsp.full_response)
# Have the console edit the file
yield pappyproxy.console.edit_file(tfName, front=True)
# Create new mangled request from edited file
with open(tfName, 'r') as f:
mangled_rsp = http.Response(f.read(), update_content_length=True)
os.remove(tfName)
# Check if dropped
if mangled_rsp.full_response == '':
pappyproxy.proxy.log('Response dropped!')
defer.returnValue(None)
if mangled_rsp.full_response != orig_rsp.full_response:
mangled_rsp.unmangled = orig_rsp
retrsp = mangled_rsp
if not myreq.reqid:
myreq.save()
if myreq.unmangled:
myreq.unmangled.save()
myreq.response = retrsp
else:
pappyproxy.proxy.log('Out of scope! Response passed along unharmed', id=connection_id)
del active_requests[connection_id]
myreq.response = retrsp
pappyproxy.context.filter_recheck()
defer.returnValue(myreq)
def connection_lost(connection_id):
del active_requests[connection_id]

119
pappyproxy/pappy.py Executable file
View file

@ -0,0 +1,119 @@
#!/usr/bin/env python2
import argparse
import cmd2
import crochet
import imp
import os
import schema.update
import shutil
import sys
import sqlite3
import tempfile
from pappyproxy import console
from pappyproxy import config
from pappyproxy import comm
from pappyproxy import http
from pappyproxy import context
from pappyproxy import proxy
from twisted.enterprise import adbapi
from twisted.internet import reactor, defer
from twisted.internet.threads import deferToThread
from twisted.internet.protocol import ServerFactory
crochet.no_setup()
def parse_args():
# parses sys.argv and returns a settings dictionary
parser = argparse.ArgumentParser(description='An intercepting proxy for testing web applications.')
parser.add_argument('-l', '--lite', help='Run the proxy in "lite" mode', action='store_true')
args = parser.parse_args(sys.argv[1:])
settings = {}
if args.lite:
settings['lite'] = True
else:
settings['lite'] = False
return settings
def set_text_factory(conn):
conn.text_factory = str
def delete_datafile():
print 'Deleting temporary datafile'
os.remove(config.DATAFILE)
@defer.inlineCallbacks
def main():
settings = parse_args()
if settings['lite']:
conf_settings = config.get_default_config()
conf_settings['debug_dir'] = None
conf_settings['debug_to_file'] = False
with tempfile.NamedTemporaryFile(delete=False) as tf:
conf_settings['data_file'] = tf.name
print 'Temporary datafile is %s' % tf.name
delete_data_on_quit = True
config.load_settings(conf_settings)
else:
# Initialize config
config.load_from_file('./config.json')
delete_data_on_quit = False
# If the data file doesn't exist, create it with restricted permissions
if not os.path.isfile(config.DATAFILE):
with os.fdopen(os.open(config.DATAFILE, os.O_CREAT, 0o0600), 'r') as f:
pass
dbpool = adbapi.ConnectionPool("sqlite3", config.DATAFILE,
check_same_thread=False,
cp_openfun=set_text_factory,
cp_max=1)
yield schema.update.update_schema(dbpool)
http.init(dbpool)
yield context.init()
# Run the proxy
if config.DEBUG_DIR and os.path.exists(config.DEBUG_DIR):
shutil.rmtree(config.DEBUG_DIR)
print 'Removing old debugging output'
factory = ServerFactory()
factory.protocol = proxy.ProxyServer
listen_strs = []
for listener in config.LISTENERS:
reactor.listenTCP(listener[0], factory, interface=listener[1])
listener_str = 'port %d' % listener[0]
if listener[1] not in ('127.0.0.1', 'localhost'):
listener_str += ' (bound to %s)' % listener[1]
listen_strs.append(listener_str)
if listen_strs:
print 'Proxy is listening on %s' % (', '.join(listen_strs))
com_factory = ServerFactory()
com_factory.protocol = comm.CommServer
# Make the port different for every instance of pappy, then pass it to
# anything we run. Otherwise we can only have it running once on a machine
comm_port = reactor.listenTCP(0, com_factory, interface='127.0.0.1')
comm.set_comm_port(comm_port.getHost().port)
sys.argv = [sys.argv[0]] # cmd2 tries to parse args
d = deferToThread(console.ProxyCmd().cmdloop)
d.addCallback(lambda ignored: reactor.stop())
if delete_data_on_quit:
d.addCallback(lambda ignored: delete_datafile())
# Load the scope
yield context.load_scope(http.dbpool)
context.reset_to_scope()
def start():
reactor.callWhenRunning(main)
reactor.run()
if __name__ == '__main__':
start()

384
pappyproxy/proxy.py Normal file
View file

@ -0,0 +1,384 @@
import datetime
import gzip
import os
import random
import re
import schema.update
import shutil
import string
import StringIO
import sys
import urlparse
import zlib
from OpenSSL import SSL
from pappyproxy import config
from pappyproxy import console
from pappyproxy import context
from pappyproxy import http
from pappyproxy import mangle
from pappyproxy.util import PappyException
from twisted.enterprise import adbapi
from twisted.internet import reactor, ssl
from twisted.internet.protocol import ClientFactory
from twisted.protocols.basic import LineReceiver
from twisted.internet import defer
from OpenSSL import crypto
next_connection_id = 1
cached_certs = {}
def get_next_connection_id():
global next_connection_id
ret_id = next_connection_id
next_connection_id += 1
return ret_id
def log(message, id=None, symbol='*', verbosity_level=1):
if config.DEBUG_TO_FILE and not os.path.exists(config.DEBUG_DIR):
os.makedirs(config.DEBUG_DIR)
if id:
debug_str = '[%s](%d) %s' % (symbol, id, message)
if config.DEBUG_TO_FILE:
with open(config.DEBUG_DIR+'/connection_%d.log' % id, 'a') as f:
f.write(debug_str+'\n')
else:
debug_str = '[%s] %s' % (symbol, message)
if config.DEBUG_TO_FILE:
with open(config.DEBUG_DIR+'/debug.log', 'a') as f:
f.write(debug_str+'\n')
if config.DEBUG_VERBOSITY >= verbosity_level:
print debug_str
def log_request(request, id=None, symbol='*', verbosity_level=3):
r_split = request.split('\r\n')
for l in r_split:
log(l, id, symbol, verbosity_level)
class ClientTLSContext(ssl.ClientContextFactory):
isClient = 1
def getContext(self):
return SSL.Context(SSL.TLSv1_METHOD)
class ProxyClient(LineReceiver):
def __init__(self, request):
self.factory = None
self._response_sent = False
self._sent = False
self.request = request
self.data_defer = defer.Deferred()
self._response_obj = http.Response()
def log(self, message, symbol='*', verbosity_level=1):
log(message, id=self.factory.connection_id, symbol=symbol, verbosity_level=verbosity_level)
def lineReceived(self, *args, **kwargs):
line = args[0]
if line is None:
line = ''
self._response_obj.add_line(line)
self.log(line, symbol='r<', verbosity_level=3)
if self._response_obj.headers_complete:
if self._response_obj.complete:
self.handle_response_end()
return
self.log("Headers end, length given, waiting for data", verbosity_level=3)
self.setRawMode()
def rawDataReceived(self, *args, **kwargs):
data = args[0]
if not self._response_obj.complete:
if data:
s = console.printable_data(data)
dlines = s.split('\n')
for l in dlines:
self.log(l, symbol='<rd', verbosity_level=3)
self._response_obj.add_data(data)
if self._response_obj.complete:
self.handle_response_end()
def connectionMade(self):
self._connection_made()
@defer.inlineCallbacks
def _connection_made(self):
self.log('Connection established, sending request...', verbosity_level=3)
# Make sure to add errback
lines = self.request.full_request.splitlines()
for l in lines:
self.log(l, symbol='>r', verbosity_level=3)
mangled_request = yield mangle.mangle_request(self.request,
self.factory.connection_id)
if mangled_request is None:
self.transport.loseConnection()
return
if context.in_scope(mangled_request):
yield mangled_request.deep_save()
if not self._sent:
self.transport.write(mangled_request.full_request)
self._sent = True
self.data_defer.callback(mangled_request.full_request)
def handle_response_end(self, *args, **kwargs):
self.log("Remote response finished, returning data to original stream")
self.transport.loseConnection()
assert self._response_obj.full_response
self.factory.return_response(self._response_obj)
class ProxyClientFactory(ClientFactory):
def __init__(self, request):
self.request = request
#self.proxy_server = None
self.connection_id = -1
self.data_defer = defer.Deferred()
self.start_time = datetime.datetime.now()
self.end_time = None
def log(self, message, symbol='*', verbosity_level=1):
log(message, id=self.connection_id, symbol=symbol, verbosity_level=verbosity_level)
def buildProtocol(self, addr):
p = ProxyClient(self.request)
p.factory = self
return p
def clientConnectionFailed(self, connector, reason):
self.log("Connection failed with remote server: %s" % reason.getErrorMessage())
def clientConnectionLost(self, connector, reason):
self.log("Connection lost with remote server: %s" % reason.getErrorMessage())
@defer.inlineCallbacks
def return_response(self, response):
self.end_time = datetime.datetime.now()
log_request(console.printable_data(response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
mangled_reqrsp_pair = yield mangle.mangle_response(response, self.connection_id)
if mangled_reqrsp_pair:
log_request(console.printable_data(mangled_reqrsp_pair.response.full_response),
id=self.connection_id, symbol='<', verbosity_level=3)
mangled_reqrsp_pair.time_start = self.start_time
mangled_reqrsp_pair.time_end = self.end_time
if context.in_scope(mangled_reqrsp_pair):
yield mangled_reqrsp_pair.deep_save()
self.data_defer.callback(mangled_reqrsp_pair)
class ProxyServer(LineReceiver):
def log(self, message, symbol='*', verbosity_level=1):
log(message, id=self.connection_id, symbol=symbol, verbosity_level=verbosity_level)
def __init__(self, *args, **kwargs):
global next_connection_id
self.connection_id = get_next_connection_id()
self._request_obj = http.Request()
self._connect_response = False
self._forward = True
self._connect_uri = None
def lineReceived(self, *args, **kwargs):
line = args[0]
self.log(line, symbol='>', verbosity_level=3)
self._request_obj.add_line(line)
if self._request_obj.verb.upper() == 'CONNECT':
self._connect_response = True
self._forward = False
self._connect_uri = self._request_obj.url
if self._request_obj.headers_complete:
self.setRawMode()
if self._request_obj.complete:
self.setLineMode()
try:
self.full_request_received()
except PappyException as e:
print str(e)
def rawDataReceived(self, *args, **kwargs):
data = args[0]
self._request_obj.add_data(data)
self.log(data, symbol='d>', verbosity_level=3)
if self._request_obj.complete:
try:
self.full_request_received()
except PappyException as e:
print str(e)
def full_request_received(self, *args, **kwargs):
global cached_certs
self.log('End of request', verbosity_level=3)
if self._connect_response:
self.log('Responding to browser CONNECT request', verbosity_level=3)
okay_str = 'HTTP/1.1 200 Connection established\r\n\r\n'
self.transport.write(okay_str)
# Generate a cert for the hostname
if not self._request_obj.host in cached_certs:
log("Generating cert for '%s'" % self._request_obj.host,
verbosity_level=3)
(pkey, cert) = generate_cert(self._request_obj.host,
config.CERT_DIR)
cached_certs[self._request_obj.host] = (pkey, cert)
else:
log("Using cached cert for %s" % self._request_obj.host, verbosity_level=3)
(pkey, cert) = cached_certs[self._request_obj.host]
ctx = ServerTLSContext(
private_key=pkey,
certificate=cert,
)
self.transport.startTLS(ctx, self.factory)
if self._forward:
self.log("Forwarding to %s on %d" % (self._request_obj.host, self._request_obj.port))
factory = ProxyClientFactory(self._request_obj)
factory.proxy_server = self
factory.connection_id = self.connection_id
factory.data_defer.addCallback(self.send_response_back)
if self._request_obj.is_ssl:
self.log("Accessing over SSL...", verbosity_level=3)
reactor.connectSSL(self._request_obj.host, self._request_obj.port, factory, ClientTLSContext())
else:
self.log("Accessing over TCP...", verbosity_level=3)
reactor.connectTCP(self._request_obj.host, self._request_obj.port, factory)
# Reset per-request variables
self.log("Resetting per-request data", verbosity_level=3)
self._connect_response = False
self._forward = True
self._request_obj = http.Request()
if self._connect_uri:
self._request_obj.url = self._connect_uri
self.setLineMode()
def send_response_back(self, response):
if response is not None:
self.transport.write(response.response.full_response)
self.transport.loseConnection()
def connectionLost(self, reason):
self.log('Connection lost with browser: %s' % reason.getErrorMessage())
class ServerTLSContext(ssl.ContextFactory):
def __init__(self, private_key, certificate):
self.private_key = private_key
self.certificate = certificate
self.sslmethod = SSL.TLSv1_METHOD
self.cacheContext()
def cacheContext(self):
ctx = SSL.Context(self.sslmethod)
ctx.use_certificate(self.certificate)
ctx.use_privatekey(self.private_key)
self._context = ctx
def __getstate__(self):
d = self.__dict__.copy()
del d['_context']
return d
def __setstate__(self, state):
self.__dict__ = state
self.cacheContext()
def getContext(self):
"""Create an SSL context.
"""
return self._context
def generate_cert_serial():
# Generates a random serial to be used for the cert
return random.getrandbits(8*20)
def load_certs_from_dir(cert_dir):
try:
with open(cert_dir+'/'+config.SSL_CA_FILE, 'rt') as f:
ca_raw = f.read()
except IOError:
raise PappyException("Could not load CA cert!")
try:
with open(cert_dir+'/'+config.SSL_PKEY_FILE, 'rt') as f:
ca_key_raw = f.read()
except IOError:
raise PappyException("Could not load CA private key!")
return (ca_raw, ca_key_raw)
def generate_cert(hostname, cert_dir):
(ca_raw, ca_key_raw) = load_certs_from_dir(cert_dir)
ca_cert = crypto.load_certificate(crypto.FILETYPE_PEM, ca_raw)
ca_key = crypto.load_privatekey(crypto.FILETYPE_PEM, ca_key_raw)
key = crypto.PKey()
key.generate_key(crypto.TYPE_RSA, 2048)
cert = crypto.X509()
cert.get_subject().CN = hostname
cert.set_serial_number(generate_cert_serial())
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(10*365*24*60*60)
cert.set_issuer(ca_cert.get_subject())
cert.set_pubkey(key)
cert.sign(ca_key, "sha256")
return (key, cert)
def generate_ca_certs(cert_dir):
# Make directory if necessary
if not os.path.exists(cert_dir):
os.makedirs(cert_dir)
# Private key
print "Generating private key... ",
key = crypto.PKey()
key.generate_key(crypto.TYPE_RSA, 2048)
with os.fdopen(os.open(cert_dir+'/'+config.SSL_PKEY_FILE, os.O_WRONLY | os.O_CREAT, 0o0600), 'w') as f:
f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, key))
print "Done!"
# Hostname doesn't matter since it's a client cert
print "Generating client cert... ",
cert = crypto.X509()
cert.get_subject().C = 'US' # Country name
cert.get_subject().ST = 'Michigan' # State or province name
cert.get_subject().L = 'Ann Arbor' # Locality name
cert.get_subject().O = 'Pappy Proxy' # Organization name
#cert.get_subject().OU = '' # Organizational unit name
cert.get_subject().CN = 'Pappy Proxy' # Common name
cert.set_serial_number(generate_cert_serial())
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(10*365*24*60*60)
cert.set_issuer(cert.get_subject())
cert.add_extensions([
crypto.X509Extension("basicConstraints", True,
"CA:TRUE, pathlen:0"),
crypto.X509Extension("keyUsage", True,
"keyCertSign, cRLSign"),
crypto.X509Extension("subjectKeyIdentifier", False, "hash",
subject=cert),
])
cert.set_pubkey(key)
cert.sign(key, 'sha256')
with os.fdopen(os.open(cert_dir+'/'+config.SSL_CA_FILE, os.O_WRONLY | os.O_CREAT, 0o0600), 'w') as f:
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
print "Done!"

9
pappyproxy/repeater.py Normal file
View file

@ -0,0 +1,9 @@
import subprocess
import os
from pappyproxy import comm
def start_editor(reqid):
script_loc = os.path.join(os.path.dirname(__file__), "vim_repeater", "repeater.vim")
#print "RepeaterSetup %d %d"%(reqid, comm_port)
subprocess.call(["vim", "-S", script_loc, "-c", "RepeaterSetup %d %d"%(reqid, comm.comm_port)])

View file

View file

@ -0,0 +1,54 @@
from twisted.internet import defer
"""
Schema v1
Description:
The initial schema for the first version of the proxy. It includes the creation
of the schema_meta table and other data tables.
"""
update_queries = [
"""
CREATE TABLE responses (
id INTEGER PRIMARY KEY AUTOINCREMENT,
full_response BLOB NOT NULL,
unmangled_id INTEGER REFERENCES responses(id)
);
""",
"""
CREATE TABLE requests (
id INTEGER PRIMARY KEY AUTOINCREMENT,
full_request BLOB NOT NULL,
tag TEXT,
submitted INTEGER NOT NULL,
response_id INTEGER REFERENCES responses(id),
unmangled_id INTEGER REFERENCES requests(id),
start_datetime TEXT,
end_datetime TEXT
);
""",
"""
CREATE TABLE schema_meta (
version INTEGER NOT NULL
);
""",
"""
CREATE TABLE scope (
filter_order INTEGER NOT NULL,
filter_string TEXT NOT NULL
);
""",
"""
INSERT INTO schema_meta (version) VALUES (1);
""",
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)

View file

@ -0,0 +1,37 @@
from pappyproxy import http
from twisted.internet import defer
"""
Schema v2
Description:
Adds support for specifying the port of a request and specify its port. This
lets requests that have the port/ssl settings specified in the CONNECT request
maintain that information.
"""
update_queries = [
"""
ALTER TABLE requests ADD COLUMN port INTEGER;
""",
"""
ALTER TABLE requests ADD COLUMN is_ssl INTEGER;
""",
"""
UPDATE schema_meta SET version=2;
""",
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)
# Load each request and save them again for any request that specified a port
# or protocol in the host header.
http.init(dbpool)
reqs = yield http.Request.load_from_filters([])
for req in reqs:
yield req.deep_save()

View file

@ -0,0 +1,58 @@
import os
import glob
import imp
from twisted.internet import reactor
from twisted.enterprise import adbapi
from twisted.internet import defer
@defer.inlineCallbacks
def get_schema_version(dbpool):
schema_exists = yield dbpool.runQuery("SELECT name FROM sqlite_master WHERE type='table' AND name='schema_meta';")
if not schema_exists:
# If we get an empty list, we have no schema
defer.returnValue(0)
else:
schema_version_result = yield dbpool.runQuery("SELECT version FROM schema_meta;")
# There should only be one row in the meta table
assert(len(schema_version_result) == 1)
# Return the retrieved version
version = schema_version_result[0][0]
defer.returnValue(version)
def add_schema_files(schemas):
# Finds and imports all schema_*.py files into the list
module_files = glob.glob(os.path.dirname(os.path.abspath(__file__)) + "/schema_*.py")
for mod in module_files:
module_name = os.path.basename(os.path.splitext(mod)[0])
newmod = imp.load_source('%s'%module_name, mod)
schemas.append( (module_name, newmod) )
@defer.inlineCallbacks
def update_schema(dbpool):
# Update the database schema to the latest version
schema_version = yield get_schema_version(dbpool)
if schema_version == 0:
verbose_update = False
else:
verbose_update = True
schemas = []
add_schema_files(schemas)
schemas = sorted(schemas, key=lambda tup: tup[0])
for i in range(schema_version, len(schemas)):
# schemas[0] is v1, schemas[1] is v2, etc
if verbose_update:
print "Updating datafaile schema to version %d" % (i+1)
yield schemas[i][1].update(dbpool)
@defer.inlineCallbacks
def main():
dbpool = adbapi.ConnectionPool("sqlite3", "data.db", check_same_thread=False)
yield update_schema(dbpool)
reactor.stop()
if __name__ == '__main__':
reactor.callWhenRunning(main)
reactor.run()

View file

View file

@ -0,0 +1,402 @@
import pytest
from pappyproxy import context
from pappyproxy.http import Request, Response, ResponseCookie
@pytest.fixture
def http_request():
return Request('GET / HTTP/1.1\r\n')
def test_filter_reqs():
pass
def test_gen_filter_by_all_request():
f = context.gen_filter_by_all(context.cmp_contains, 'hello')
fn = context.gen_filter_by_all(context.cmp_contains, 'hello', negate=True)
# Nowhere
r = Request('GET / HTTP/1.1\r\n')
assert not f(r)
assert fn(r)
# Verb
r = Request('hello / HTTP/1.1\r\n')
assert f(r)
assert not fn(r)
# Path
r = Request('GET /hello HTTP/1.1\r\n')
assert f(r)
assert not fn(r)
# Data
r = Request('GET / HTTP/1.1\r\n')
r.raw_data = 'hello'
assert f(r)
assert not fn(r)
# Header key
r = Request('GET / HTTP/1.1\r\n')
r.headers['hello'] = 'goodbye'
assert f(r)
assert not fn(r)
# Header value
r = Request('GET / HTTP/1.1\r\n')
r.headers['goodbye'] = 'hello'
assert f(r)
assert not fn(r)
# Nowhere in headers
r = Request('GET / HTTP/1.1\r\n')
r.headers['goodbye'] = 'for real'
assert not f(r)
assert fn(r)
# Cookie key
r = Request('GET / HTTP/1.1\r\n')
r.cookies['hello'] = 'world'
r.update_from_objects()
assert f(r)
assert not fn(r)
# Cookie value
r = Request('GET / HTTP/1.1\r\n')
r.cookies['world'] = 'hello'
r.update_from_objects()
assert f(r)
assert not fn(r)
# Nowhere in cookie
r = Request('GET / HTTP/1.1\r\n')
r.cookies['world'] = 'sucks'
r.update_from_objects()
assert not f(r)
assert fn(r)
def test_gen_filter_by_all_response(http_request):
f = context.gen_filter_by_all(context.cmp_contains, 'hello')
fn = context.gen_filter_by_all(context.cmp_contains, 'hello', negate=True)
# Nowhere
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
assert not f(http_request)
assert fn(http_request)
# Response text
r = Response('HTTP/1.1 200 hello\r\n')
http_request.response = r
assert f(http_request)
assert not fn(http_request)
# Data
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.raw_data = 'hello'
assert f(http_request)
assert not fn(http_request)
# Header key
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.headers['hello'] = 'goodbye'
assert f(http_request)
assert not fn(http_request)
# Header value
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.headers['goodbye'] = 'hello'
assert f(http_request)
assert not fn(http_request)
# Nowhere in headers
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.headers['goodbye'] = 'for real'
assert not f(http_request)
assert fn(http_request)
# Cookie key
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.add_cookie(ResponseCookie('hello=goodbye'))
r.update_from_objects()
assert f(http_request)
assert not fn(http_request)
# Cookie value
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.add_cookie(ResponseCookie('goodbye=hello'))
r.update_from_objects()
assert f(http_request)
assert not fn(http_request)
# Nowhere in cookie
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.add_cookie(ResponseCookie('goodbye=for real'))
r.update_from_objects()
assert not f(http_request)
assert fn(http_request)
def test_filter_by_host(http_request):
f = context.gen_filter_by_host(context.cmp_contains, 'sexy')
fn = context.gen_filter_by_host(context.cmp_contains, 'sexy', negate=True)
http_request.headers['Host'] = 'google.com'
http_request.headers['MiscHeader'] = 'vim.sexy'
assert not f(http_request)
assert fn(http_request)
http_request.headers['Host'] = 'vim.sexy'
http_request.update_from_text()
assert http_request.host == 'vim.sexy'
assert f(http_request)
assert not fn(http_request)
def test_filter_by_body():
f = context.gen_filter_by_body(context.cmp_contains, 'sexy')
fn = context.gen_filter_by_body(context.cmp_contains, 'sexy', negate=True)
# Test request bodies
r = Request()
r.status_line = 'GET /sexy HTTP/1.1'
r.headers['Header'] = 'sexy'
r.raw_data = 'foo'
assert not f(r)
assert fn(r)
r.raw_data = 'sexy'
assert f(r)
assert not fn(r)
# Test response bodies
r = Request()
rsp = Response()
rsp.status_line = 'HTTP/1.1 200 OK'
rsp.headers['sexy'] = 'sexy'
r.status_line = 'GET /sexy HTTP/1.1'
r.headers['Header'] = 'sexy'
r.response = rsp
assert not f(r)
assert fn(r)
rsp.raw_data = 'sexy'
assert f(r)
assert not fn(r)
def test_filter_by_response_code(http_request):
f = context.gen_filter_by_response_code(context.cmp_eq, 200)
fn = context.gen_filter_by_response_code(context.cmp_eq, 200, negate=True)
r = Response()
http_request.response = r
r.status_line = 'HTTP/1.1 404 Not Found'
assert not f(http_request)
assert fn(http_request)
r.status_line = 'HTTP/1.1 200 OK'
assert f(http_request)
assert not fn(http_request)
def test_filter_by_raw_headers_request():
f1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:')
fn1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:', negate=True)
f2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader')
fn2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader', negate=True)
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
r.response = rsp
r.headers['Header'] = 'Sexy'
assert not f1(r)
assert fn1(r)
assert not f2(r)
assert fn2(r)
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
r.response = rsp
r.headers['Sexy'] = 'sexy'
assert f1(r)
assert not fn1(r)
assert not f2(r)
assert fn2(r)
r.headers['OtherHeader'] = 'sexy'
r.headers['Header'] = 'foo'
assert f1(r)
assert not fn1(r)
assert f2(r)
assert not fn2(r)
def test_filter_by_raw_headers_response():
f1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:')
fn1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:', negate=True)
f2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader')
fn2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader', negate=True)
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
r.response = rsp
rsp.headers['Header'] = 'Sexy'
assert not f1(r)
assert fn1(r)
assert not f2(r)
assert fn2(r)
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
r.response = rsp
rsp.headers['Sexy'] = 'sexy'
assert f1(r)
assert not fn1(r)
assert not f2(r)
assert fn2(r)
rsp.headers['OtherHeader'] = 'sexy'
rsp.headers['Header'] = 'foo'
assert f1(r)
assert not fn1(r)
assert f2(r)
assert not fn2(r)
def test_filter_by_path(http_request):
f = context.gen_filter_by_path(context.cmp_contains, 'porn') # find the fun websites
fn = context.gen_filter_by_path(context.cmp_contains, 'porn', negate=True) # find the boring websites
http_request.status_line = 'GET / HTTP/1.1'
assert not f(http_request)
assert fn(http_request)
http_request.status_line = 'GET /path/to/great/porn HTTP/1.1'
assert f(http_request)
assert not fn(http_request)
http_request.status_line = 'GET /path/to/porn/great HTTP/1.1'
assert f(http_request)
assert not fn(http_request)
def test_gen_filter_by_submitted_cookies():
f1 = context.gen_filter_by_submitted_cookies(context.cmp_contains, 'Session')
f2 = context.gen_filter_by_submitted_cookies(context.cmp_contains, 'Cookie',
context.cmp_contains, 'CookieVal')
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: foo=bar\r\n'
'\r\n'))
assert not f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: Session=bar\r\n'
'\r\n'))
assert f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: Session=bar; CookieThing=NoMatch\r\n'
'\r\n'))
assert f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: Session=bar; CookieThing=CookieValue\r\n'
'\r\n'))
assert f1(r)
assert f2(r)
def test_gen_filter_by_set_cookies():
f1 = context.gen_filter_by_set_cookies(context.cmp_contains, 'Session')
f2 = context.gen_filter_by_set_cookies(context.cmp_contains, 'Cookie',
context.cmp_contains, 'CookieVal')
r = Request('GET / HTTP/1.1\r\n\r\n')
rsp = Response(('HTTP/1.1 200 OK\r\n'
'Set-Cookie: foo=bar\r\n'
'\r\n'))
r.response = rsp
assert not f1(r)
assert not f2(r)
r = Request('GET / HTTP/1.1\r\n\r\n')
rsp = Response(('HTTP/1.1 200 OK\r\n'
'Set-Cookie: foo=bar\r\n'
'Set-Cookie: Session=Banana\r\n'
'\r\n'))
r.response = rsp
assert f1(r)
assert not f2(r)
r = Request('GET / HTTP/1.1\r\n\r\n')
rsp = Response(('HTTP/1.1 200 OK\r\n'
'Set-Cookie: foo=bar\r\n'
'Set-Cookie: Session=Banana\r\n'
'Set-Cookie: CookieThing=NoMatch\r\n'
'\r\n'))
r.response = rsp
assert f1(r)
assert not f2(r)
r = Request('GET / HTTP/1.1\r\n\r\n')
rsp = Response(('HTTP/1.1 200 OK\r\n'
'Set-Cookie: foo=bar\r\n'
'Set-Cookie: Session=Banana\r\n'
'Set-Cookie: CookieThing=CookieValue\r\n'
'\r\n'))
r.response = rsp
assert f1(r)
assert f2(r)
def test_filter_by_params_get():
f1 = context.gen_filter_by_params(context.cmp_contains, 'Session')
f2 = context.gen_filter_by_params(context.cmp_contains, 'Cookie',
context.cmp_contains, 'CookieVal')
r = Request('GET / HTTP/1.1\r\n\r\n')
assert not f1(r)
assert not f2(r)
r = Request('GET /?Session=foo HTTP/1.1\r\n\r\n')
assert f1(r)
assert not f2(r)
r = Request('GET /?Session=foo&CookieThing=Fail HTTP/1.1\r\n\r\n')
assert f1(r)
assert not f2(r)
r = Request('GET /?Session=foo&CookieThing=CookieValue HTTP/1.1\r\n\r\n')
assert f1(r)
assert f2(r)
def test_filter_by_params_post():
f1 = context.gen_filter_by_params(context.cmp_contains, 'Session')
f2 = context.gen_filter_by_params(context.cmp_contains, 'Cookie',
context.cmp_contains, 'CookieVal')
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.raw_data = 'foo=bar'
assert not f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.raw_data = 'Session=bar'
assert f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.raw_data = 'Session=bar&Cookie=foo'
assert f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.raw_data = 'Session=bar&CookieThing=CookieValue'
assert f1(r)
assert f2(r)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,220 @@
import pytest
import mock
import twisted.internet
import twisted.test
from pappyproxy import http
from pappyproxy import mangle
from pappyproxy.proxy import ProxyClient, ProxyClientFactory, ProxyServer
from testutil import mock_deferred, func_deleted, func_ignored_deferred, func_ignored, no_tcp
from twisted.internet.protocol import ServerFactory
from twisted.test.iosim import FakeTransport
from twisted.internet import defer, reactor
####################
## Fixtures
@pytest.fixture
def unconnected_proxyserver(mocker):
mocker.patch("twisted.test.iosim.FakeTransport.startTLS")
mocker.patch("pappyproxy.proxy.load_certs_from_dir", new=mock_generate_cert)
factory = ServerFactory()
factory.protocol = ProxyServer
protocol = factory.buildProtocol(('127.0.0.1', 0))
protocol.makeConnection(FakeTransport(protocol, True))
return protocol
@pytest.fixture
def proxyserver(mocker):
mocker.patch("twisted.test.iosim.FakeTransport.startTLS")
mocker.patch("pappyproxy.proxy.load_certs_from_dir", new=mock_generate_cert)
factory = ServerFactory()
factory.protocol = ProxyServer
protocol = factory.buildProtocol(('127.0.0.1', 0))
protocol.makeConnection(FakeTransport(protocol, True))
protocol.lineReceived('CONNECT https://www.AAAA.BBBB:443 HTTP/1.1')
protocol.lineReceived('')
protocol.transport.getOutBuffer()
return protocol
@pytest.fixture
def proxy_connection():
@defer.inlineCallbacks
def gen_connection(send_data):
factory = ProxyClientFactory(http.Request(send_data))
protocol = factory.buildProtocol(None)
tr = FakeTransport(protocol, True)
protocol.makeConnection(tr)
sent = yield protocol.data_defer
defer.returnValue((protocol, sent, factory.data_defer))
return gen_connection
## Autorun fixtures
# @pytest.fixture(autouse=True)
# def no_mangle(mocker):
# # Don't call anything in mangle.py
# mocker.patch("mangle.mangle_request", notouch_mangle_req)
# mocker.patch("mangle.mangle_response", notouch_mangle_rsp)
@pytest.fixture(autouse=True)
def ignore_save(mocker):
mocker.patch("pappyproxy.http.Request.deep_save", func_ignored_deferred)
####################
## Mock functions
def mock_generate_cert(cert_dir):
private_key = ('-----BEGIN PRIVATE KEY-----\n'
'MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDAoClrYUEB7lM0\n'
'zQaKkXZVG2d1Bu9hV8urpx0gNXMbyZ2m3xb+sKZju/FHPuWenA4KaN5gRUT+oLfv\n'
'tnF6Ia0jpRNWnX0Fyn/irdg1BWGJn7k7mJ2D0NXZQczn2+xxY05599NfGWqNKCYy\n'
'jhSwPsUK+sGJqi7aSDdlS97ZTjrQVTTFsC0+kSu4lS5fsWXxqrKLa6Ao8W7abVRO\n'
'JHazh/cxM4UKpgWU+E6yD4o4ZgHY+SMTVUh/IOM8DuOVyLEWtx4oLNiLMlpWT3qy\n'
'4IMpOF6VuU6JF2HGV13SoJfhsLXsPRbLVTAnZvJcZwtgDm6NfKapU8W8olkDV1Bf\n'
'YQEMSNX7AgMBAAECggEBAII0wUrAdrzjaIMsg9tu8FofKBPHGFDok9f4Iov/FUwX\n'
'QOXnrxeCOTb5d+L89SH9ws/ui0LwD+8+nJcA8DvqP6r0jtnhov0jIMcNVDSi6oeo\n'
'3AEY7ICJzcQJ4oRn+K+8vPNdPhfuikPYe9l4iSuJgpAlaGWyD/GlFyz12DFz2/Wu\n'
'NIcqR1ucvezRHn3eGMtvDv2WGaN4ifUc30k8XgSUesmwSI6beb5+hxq7wXfsurnP\n'
'EUrPY9ts3lfiAgxzTKOuj1VR5hn7cJyLN8jF0mZs4D6eSSHorIddhmaNiCq5ZbMd\n'
'QdlDiPvnXHT41OoXOb7tDEt7SGoiRh2noCZ1aZiSziECgYEA+tuPPLYWU6JRB6EW\n'
'PhbcXQbh3vML7eT1q7DOz0jYCojgT2+k7EWSI8T830oQyjbpe3Z86XEgH7UBjUgq\n'
'27nJ4E6dQDYGbYCKEklOoCGLE7A60i1feIz8otOQRrbQ4jcpibEgscA6gzHmunYf\n'
'De5euUgYW+Rq2Vmr6/NzUaUgui8CgYEAxJMDwPOGgiLM1cczlaSIU9Obz+cVnwWn\n'
'nsdKYMto2V3yKLydDfjsgOgzxHOxxy+5L645TPxK6CkiISuhJ93kAFFtx+1sCBCT\n'
'tVzY5robVAekxA9tlPIxtsn3+/axx3n6HnV0oA/XtxkuOS5JImgEdXqFwJZkerGE\n'
'waftIU2FCfUCgYEArl8+ErJzlJEIiCgWIPSdGuD00pfZW/TCPCT7rKRy3+fDHBR7\n'
'7Gxzp/9+0utV/mnrJBH5w/8JmGCmgoF+oRtk01FyBzdGgolN8GYajD6kwPvH917o\n'
'tRAzcC9lY3IigoxbiEWid0wqoBVoz4XaEkH2gA44OG/vQcQOOEYSi9cfh6sCgYBg\n'
'KLaOXdJvuIxRCzgNvMW/k+VFh3pJJx//COg2f2qT4mQCT3nYiutOh8hDEoFluc+y\n'
'Jlz7bvNJrE14wnn8IYxWJ383bMoLC+jlsDyeaW3S5kZQbmehk/SDwTrg86W1udKD\n'
'sdtSLU3N0LCO4jh+bzm3Ki9hrXALoOkbPoU+ZEhvPQKBgQDf79XQ3RNxZSk+eFyq\n'
'qD8ytVqxEoD+smPDflXXseVH6o+pNWrF8+A0KqmO8c+8KVzWj/OfULO6UbKd3E+x\n'
'4JGkWu9yF1lEgtHgibF2ER8zCSIL4ikOEasPCkrKj5SrS4Q+j4u5ha76dIc2CVu1\n'
'hkX2PQ1xU4ocu06k373sf73A4Q==\n'
'-----END PRIVATE KEY-----')
ca_key = ('-----BEGIN CERTIFICATE-----\n'
'MIIDjzCCAncCFQCjC8r+I4xa7JoGUJYGOTcqDROA0DANBgkqhkiG9w0BAQsFADBg\n'
'MQswCQYDVQQGEwJVUzERMA8GA1UECBMITWljaGlnYW4xEjAQBgNVBAcTCUFubiBB\n'
'cmJvcjEUMBIGA1UEChMLUGFwcHkgUHJveHkxFDASBgNVBAMTC1BhcHB5IFByb3h5\n'
'MB4XDTE1MTEyMDIxMTEzOVoXDTI1MTExNzIxMTEzOVowYDELMAkGA1UEBhMCVVMx\n'
'ETAPBgNVBAgTCE1pY2hpZ2FuMRIwEAYDVQQHEwlBbm4gQXJib3IxFDASBgNVBAoT\n'
'C1BhcHB5IFByb3h5MRQwEgYDVQQDEwtQYXBweSBQcm94eTCCASIwDQYJKoZIhvcN\n'
'AQEBBQADggEPADCCAQoCggEBAMCgKWthQQHuUzTNBoqRdlUbZ3UG72FXy6unHSA1\n'
'cxvJnabfFv6wpmO78Uc+5Z6cDgpo3mBFRP6gt++2cXohrSOlE1adfQXKf+Kt2DUF\n'
'YYmfuTuYnYPQ1dlBzOfb7HFjTnn3018Zao0oJjKOFLA+xQr6wYmqLtpIN2VL3tlO\n'
'OtBVNMWwLT6RK7iVLl+xZfGqsotroCjxbtptVE4kdrOH9zEzhQqmBZT4TrIPijhm\n'
'Adj5IxNVSH8g4zwO45XIsRa3Higs2IsyWlZPerLggyk4XpW5TokXYcZXXdKgl+Gw\n'
'tew9FstVMCdm8lxnC2AObo18pqlTxbyiWQNXUF9hAQxI1fsCAwEAAaNFMEMwEgYD\n'
'VR0TAQH/BAgwBgEB/wIBADAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFNo5o+5e\n'
'a0sNMlW/75VgGJCv2AcJMA0GCSqGSIb3DQEBCwUAA4IBAQBdJDhxbmoEe27bD8me\n'
'YTcLGjs/StKkSil7rLbX+tBCwtkm5UEEejBuAhKk2FuAXW8yR1FqKJSZwVCAocBT\n'
'Bo/+97Ee+h7ywrRFhATEr9D/TbbHKOjCjDzOMl9yLZa2DKErZjbI30ZD6NafWS/X\n'
'hx5X1cGohHcVVzT4jIgUEU70vvYfNn8CTZm4oJ7qqRe/uQPUYy0rwvbd60oprtGg\n'
'jNv1H5R4ODHUMBXAI9H7ft9cWrd0fBQjxhoj8pvgJXEZ52flXSqQc7qHLg1wO/zC\n'
'RUgpTcNAb2qCssBKbj+c1vKEPRUJfw6UYb0s1462rQNc8BgZiKaNbwokFmkAnjUg\n'
'AvnX\n'
'-----END CERTIFICATE-----')
return (ca_key, private_key)
def notouch_mangle_req(request, conn_id):
orig_req = http.Request(request.full_request)
orig_req.port = request.port
orig_req.is_ssl = request.is_ssl
d = mock_deferred(orig_req)
return d
def notouch_mangle_rsp(response, conn_id):
req = http.Request()
orig_rsp = http.Response(response.full_response)
req.response = orig_rsp
d = mock_deferred(req)
return d
def req_mangler_change(request, conn_id):
req = http.Request('GET /mangled HTTP/1.1\r\n\r\n')
d = mock_deferred(req)
return d
def rsp_mangler_change(request, conn_id):
req = http.Request()
rsp = http.Response('HTTP/1.1 500 MANGLED\r\n\r\n')
req.response = rsp
d = mock_deferred(req)
return d
####################
## Unit test tests
def test_proxy_server_fixture(unconnected_proxyserver):
unconnected_proxyserver.transport.write('hello')
assert unconnected_proxyserver.transport.getOutBuffer() == 'hello'
@pytest.inlineCallbacks
def test_mock_deferreds():
d = mock_deferred('Hello!')
r = yield d
assert r == 'Hello!'
def test_deleted():
with pytest.raises(NotImplementedError):
reactor.connectTCP("www.google.com", "80", ServerFactory)
with pytest.raises(NotImplementedError):
reactor.connectSSL("www.google.com", "80", ServerFactory)
####################
## Proxy Server Tests
def test_proxy_server_connect(unconnected_proxyserver, mocker):
mocker.patch("twisted.internet.reactor.connectSSL")
unconnected_proxyserver.lineReceived('CONNECT https://www.dddddd.fff:433 HTTP/1.1')
unconnected_proxyserver.lineReceived('')
assert unconnected_proxyserver.transport.getOutBuffer() == 'HTTP/1.1 200 Connection established\r\n\r\n'
assert unconnected_proxyserver._request_obj.is_ssl
def test_proxy_server_basic(proxyserver, mocker):
mocker.patch("twisted.internet.reactor.connectSSL")
mocker.patch('pappyproxy.proxy.ProxyServer.setRawMode')
proxyserver.lineReceived('GET / HTTP/1.1')
proxyserver.lineReceived('')
assert proxyserver.setRawMode.called
args, kwargs = twisted.internet.reactor.connectSSL.call_args
assert args[0] == 'www.AAAA.BBBB'
assert args[1] == 443
@pytest.inlineCallbacks
def test_proxy_client_basic(mocker, proxy_connection):
mocker.patch('pappyproxy.mangle.mangle_request', new=notouch_mangle_req)
mocker.patch('pappyproxy.mangle.mangle_response', new=notouch_mangle_rsp)
# Make the connection
(prot, sent, resp_deferred) = yield proxy_connection('GET / HTTP/1.1\r\n\r\n')
assert sent == 'GET / HTTP/1.1\r\n\r\n'
prot.lineReceived('HTTP/1.1 200 OK')
prot.lineReceived('Content-Length: 0')
prot.lineReceived('')
ret_req = yield resp_deferred
response = ret_req.response.full_response
assert response == 'HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n'
@pytest.inlineCallbacks
def test_proxy_client_mangle_req(mocker, proxy_connection):
mocker.patch('pappyproxy.mangle.mangle_request', new=req_mangler_change)
mocker.patch('pappyproxy.mangle.mangle_response', new=notouch_mangle_rsp)
# Make the connection
(prot, sent, resp_deferred) = yield proxy_connection('GET / HTTP/1.1\r\n\r\n')
assert sent == 'GET /mangled HTTP/1.1\r\n\r\n'
@pytest.inlineCallbacks
def test_proxy_client_basic(mocker, proxy_connection):
mocker.patch('pappyproxy.mangle.mangle_request', new=notouch_mangle_req)
mocker.patch('pappyproxy.mangle.mangle_response', new=rsp_mangler_change)
# Make the connection
(prot, sent, resp_deferred) = yield proxy_connection('GET / HTTP/1.1\r\n\r\n')
prot.lineReceived('HTTP/1.1 200 OK')
prot.lineReceived('Content-Length: 0')
prot.lineReceived('')
ret_req = yield resp_deferred
response = ret_req.response.full_response
assert response == 'HTTP/1.1 500 MANGLED\r\n\r\n'

View file

@ -0,0 +1,42 @@
import pytest
from twisted.internet import defer
class ClassDeleted():
pass
def func_deleted(*args, **kwargs):
raise NotImplementedError()
def func_ignored(*args, **kwargs):
pass
def func_ignored_deferred(*args, **kwargs):
return mock_deferred(None)
def mock_deferred(value):
# Generates a function that can be used to make a deferred that can be used
# to mock out deferred-returning responses
def g(data):
return value
d = defer.Deferred()
d.addCallback(g)
d.callback(None)
return d
@pytest.fixture(autouse=True)
def no_tcp(mocker):
# Don't make tcp connections
mocker.patch("twisted.internet.reactor.connectTCP", new=func_deleted)
mocker.patch("twisted.internet.reactor.connectSSL", new=func_deleted)
@pytest.fixture
def ignore_tcp(mocker):
# Don't make tcp connections
mocker.patch("twisted.internet.reactor.connectTCP", new=func_ignored)
mocker.patch("twisted.internet.reactor.connectSSL", new=func_ignored)
@pytest.fixture(autouse=True)
def no_database(mocker):
# Don't make database queries
mocker.patch("twisted.enterprise.adbapi.ConnectionPool",
new=ClassDeleted)

3
pappyproxy/util.py Normal file
View file

@ -0,0 +1,3 @@
class PappyException(Exception):
pass

View file

View file

@ -0,0 +1,130 @@
import base64
import vim
import sys
import socket
import json
class CommError(Exception):
pass
def communicate(data):
global PAPPY_PORT
# Submits data to the comm port of the proxy
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', int(vim.eval('s:commport'))))
datastr = json.dumps(data)
# Send our data
total_sent = 0
while total_sent < len(data):
sent = s.send(datastr[total_sent:])
assert sent != 0
total_sent += sent
s.send('\n')
# Get our response
retstr = ''
c = ''
while c != '\n':
retstr = retstr + c
c = s.recv(1)
assert c != ''
result = json.loads(retstr)
if not result['success']:
vim.command('echoerr %s' % result['message'])
raise CommError(result['message'])
return result
def read_line(conn):
data = ''
c = ''
while c != '\n':
data = data + c
c = conn.read(1)
return data
def run_command(command):
funcs = {
"setup": set_up_windows,
"submit": submit_current_buffer,
}
if command in funcs:
funcs[command]()
def set_buffer_content(buf, text):
buf[:] = None
first = True
for l in text.split('\n'):
if first:
buf[0] = l
first = False
else:
buf.append(l)
def set_up_windows():
reqid = vim.eval("a:2")
comm_port = vim.eval("a:3")
vim.command("let s:commport=%d"%int(comm_port))
# Get the left buffer
vim.command("new")
vim.command("only")
b2 = vim.current.buffer
vim.command("let s:b2=bufnr('$')")
# Vsplit new file
vim.command("vnew")
b1 = vim.current.buffer
vim.command("let s:b1=bufnr('$')")
# Get the request
comm_data = {"action": "get_request", "reqid": reqid}
try:
reqdata = communicate(comm_data)
except CommError:
return
comm_data = {"action": "get_response", "reqid": reqid}
try:
rspdata = communicate(comm_data)
except CommError:
return
# Set up the buffers
set_buffer_content(b1, base64.b64decode(reqdata['full_request']))
if 'full_response' in rspdata:
set_buffer_content(b2, base64.b64decode(rspdata['full_response']))
# Save the port/ssl setting
vim.command("let s:repport=%d" % int(reqdata['port']))
if reqdata['is_ssl']:
vim.command("let s:repisssl=1")
else:
vim.command("let s:repisssl=0")
def submit_current_buffer():
curbuf = vim.current.buffer
b2_id = vim.eval("s:b2")
b2 = vim.buffers[int(b2_id)]
vim.command("let s:b1=bufnr('$')")
vim.command("only")
vim.command("rightbelow vertical new")
vim.command("b %s" % b2_id)
vim.command("wincmd h")
full_request = '\n'.join(curbuf)
commdata = {'action': 'submit',
'full_request': base64.b64encode(full_request),
'port':int(vim.eval("s:repport"))}
if vim.eval("s:repisssl") == '1':
commdata["is_ssl"] = True
else:
commdata["is_ssl"] = False
result = communicate(commdata)
set_buffer_content(b2, base64.b64decode(result['response']['full_response']))
# (left, right) = set_up_windows()
# set_buffer_content(left, 'Hello\nWorld')
# set_buffer_content(right, 'Hello\nOther\nWorld')
#print "Arg is %s" % vim.eval("a:arg")
run_command(vim.eval("a:1"))

View file

@ -0,0 +1,17 @@
if !has('python')
echo "Vim must support python in order to use the repeater"
finish
endif
let s:pyscript = resolve(expand('<sfile>:p:h') . '/repeater.py')
function! RepeaterAction(...)
execute 'pyfile ' . s:pyscript
endfunc
command! -nargs=* RepeaterSetup call RepeaterAction('setup', <f-args>)
command! RepeaterSubmitBuffer call RepeaterAction('submit')
" Bind forward to <leader>f
nnoremap <leader>f :RepeaterSubmitBuffer<CR>