Version 0.2.7

This commit is contained in:
Rob Glew 2016-02-18 15:29:43 -06:00
parent fda0166e72
commit f4274e1e82
27 changed files with 2136 additions and 493 deletions

View file

@ -7,3 +7,6 @@ test:
test-verbose:
py.test -v -rw --twisted --cov-config .coveragerc --cov-report term-missing --cov=. tests/
test-macros:
py.test -v -rw --twisted tests/test_macros.py

View file

@ -63,6 +63,8 @@ class Styles:
KV_KEY = Colors.GREEN
KV_VAL = Colors.ENDC
UNPRINTABLE_DATA = Colors.CYAN
def verb_color(verb):
if verb and verb == 'GET':

View file

@ -1,3 +1,4 @@
import sys
import base64
import json
@ -20,6 +21,7 @@ def set_comm_port(port):
comm_port = port
class CommServer(LineReceiver):
MAX_LENGTH=sys.maxint
def __init__(self):
self.delimiter = '\n'
@ -32,6 +34,7 @@ class CommServer(LineReceiver):
def lineReceived(self, line):
from .http import Request, Response
line = line.strip()
if line == '':
return
@ -98,7 +101,7 @@ class CommServer(LineReceiver):
@defer.inlineCallbacks
def action_submit_request(self, data):
message = base64.b64decode(data['full_message'])
req = yield Request.submit_new(data['host'], data['port'], data['is_ssl'], message)
req = yield Request.submit_new(data['host'].encode('utf-8'), data['port'], data['is_ssl'], message)
if 'tags' in data:
req.tags = set(data['tags'])
yield req.async_deep_save()

View file

@ -46,6 +46,20 @@ The configuration settings for the proxy.
:Default: ``[(8000, '127.0.0.1')]``
.. data: SOCKS_PROXY
Details for a SOCKS proxy. It is a dict with the following key/values::
host: The SOCKS proxy host
port: The proxy port
username: Username (optional)
password: Password (optional)
If null, no proxy will be used.
:Default: ``null``
.. data: PLUGIN_DIRS
List of directories that plugins are loaded from. Not modifiable.
@ -87,6 +101,7 @@ DEBUG_TO_FILE = False
DEBUG_VERBOSITY = 0
LISTENERS = [(8000, '127.0.0.1')]
SOCKS_PROXY = None
SSL_CA_FILE = 'certificate.crt'
SSL_PKEY_FILE = 'private.key'
@ -112,6 +127,7 @@ def load_settings(proj_config):
global DEBUG_TO_FILE
global DEBUG_VERBOSITY
global LISTENERS
global SOCKS_PROXY
global PAPPY_DIR
global DATA_DIR
global SSL_CA_FILE
@ -141,7 +157,30 @@ def load_settings(proj_config):
if "proxy_listeners" in proj_config:
LISTENERS = []
for l in proj_config["proxy_listeners"]:
LISTENERS.append((l['port'], l['interface']))
ll = {}
if 'forward_host_ssl' in l:
l['forward_host_ssl'] = l['forward_host_ssl'].encode('utf-8')
if 'forward_host' in l:
l['forward_host'] = l['forward_host'].encode('utf-8')
LISTENERS.append(l)
# SOCKS proxy settings
if "socks_proxy" in proj_config:
SOCKS_PROXY = None
if proj_config['socks_proxy'] is not None:
conf = proj_config['socks_proxy']
if 'host' in conf and 'port' in conf:
SOCKS_PROXY = {}
SOCKS_PROXY['host'] = conf['host'].encode('utf-8')
SOCKS_PROXY['port'] = conf['port']
if 'username' in conf:
if 'password' in conf:
SOCKS_PROXY['username'] = conf['username'].encode('utf-8')
SOCKS_PROXY['password'] = conf['password'].encode('utf-8')
else:
print 'SOCKS proxy has a username but no password. Ignoring creds.'
else:
print 'Host is missing host/port.'
# History saving settings
if "history_size" in proj_config:

View file

@ -122,15 +122,19 @@ class Filter(object):
@staticmethod
@defer.inlineCallbacks
def from_filter_string(filter_string):
def from_filter_string(filter_string=None, parsed_args=None):
"""
from_filter_string(filter_string)
Create a filter from a filter string.
Create a filter from a filter string. If passed a list of arguments, they
will be used instead of parsing the string.
:rtype: Deferred that returns a :class:`pappyproxy.context.Filter`
"""
args = shlex.split(filter_string)
if parsed_args is not None:
args = parsed_args
else:
args = shlex.split(filter_string)
if len(args) == 0:
raise PappyException('Field is required')
field = args[0]
@ -145,12 +149,20 @@ class Filter(object):
new_filter = gen_filter_by_path(field_args)
elif field in ("body", "bd", "data", "dt"):
new_filter = gen_filter_by_body(field_args)
elif field in ("reqbody", "qbd", "reqdata", "qdt"):
new_filter = gen_filter_by_req_body(field_args)
elif field in ("rspbody", "sbd", "qspdata", "sdt"):
new_filter = gen_filter_by_rsp_body(field_args)
elif field in ("verb", "vb"):
new_filter = gen_filter_by_verb(field_args)
elif field in ("param", "pm"):
new_filter = gen_filter_by_params(field_args)
elif field in ("header", "hd"):
new_filter = gen_filter_by_headers(field_args)
elif field in ("reqheader", "qhd"):
new_filter = gen_filter_by_request_headers(field_args)
elif field in ("rspheader", "shd"):
new_filter = gen_filter_by_response_headers(field_args)
elif field in ("rawheaders", "rh"):
new_filter = gen_filter_by_raw_headers(field_args)
elif field in ("sentcookie", "sck"):
@ -169,6 +181,8 @@ class Filter(object):
new_filter = yield gen_filter_by_before(field_args)
elif field in ("after", "af"):
new_filter = yield gen_filter_by_after(field_args)
elif field in ("inv",):
new_filter = yield gen_filter_by_inverse(field_args)
else:
raise FilterParseError("%s is not a valid field" % field)
@ -181,33 +195,53 @@ class Filter(object):
defer.returnValue(new_filter)
def cmp_is(a, b):
if a is None or b is None:
return False
return str(a) == str(b)
def cmp_contains(a, b):
if a is None or b is None:
return False
return (b.lower() in a.lower())
def cmp_exists(a, b=None):
if a is None or b is None:
return False
return (a is not None and a != [])
def cmp_len_eq(a, b):
if a is None or b is None:
return False
return (len(a) == int(b))
def cmp_len_gt(a, b):
if a is None or b is None:
return False
return (len(a) > int(b))
def cmp_len_lt(a, b):
if a is None or b is None:
return False
return (len(a) < int(b))
def cmp_eq(a, b):
if a is None or b is None:
return False
return (int(a) == int(b))
def cmp_gt(a, b):
if a is None or b is None:
return False
return (int(a) > int(b))
def cmp_lt(a, b):
if a is None or b is None:
return False
return (int(a) < int(b))
def cmp_containsr(a, b):
if a is None or b is None:
return False
try:
if re.search(b, a):
return True
@ -328,38 +362,50 @@ def compval_from_args_repdict(args):
return retfunc
def gen_filter_by_all(args):
compval_from_args(args) # try and throw an error
compval = compval_from_args(args)
def f(req):
compval = compval_from_args(args)
if args[0][0] == 'n':
return compval(req.full_message) and (not req.response or compval(req.response.full_message))
return compval(req.full_message) and ((not req.response) or compval(req.response.full_message))
else:
return compval(req.full_message) or (req.response and compval(req.response.full_message))
return f
def gen_filter_by_host(args):
compval_from_args(args) # try and throw an error
compval = compval_from_args(args)
def f(req):
compval = compval_from_args(args)
return compval(req.host)
return f
def gen_filter_by_body(args):
compval_from_args(args) # try and throw an error
compval = compval_from_args(args)
def f(req):
compval = compval_from_args(args)
if args[0][0] == 'n':
return compval(req.body) and (not req.response or compval(req.response.body))
return compval(req.body) and ((not req.response) or compval(req.response.body))
else:
return compval(req.body) or (req.response and compval(req.response.body))
return f
def gen_filter_by_raw_headers(args):
compval_from_args(args) # try and throw an error
def gen_filter_by_req_body(args):
compval = compval_from_args(args)
def f(req):
return compval(req.body)
return f
def gen_filter_by_rsp_body(args):
compval = compval_from_args(args)
def f(req):
compval = compval_from_args(args)
if args[0][0] == 'n':
return compval(req.headers_section) and (not req.response or compval(req.response.headers_section))
return (not req.response) or compval(req.response.body)
else:
return req.response and compval(req.response.body)
return f
def gen_filter_by_raw_headers(args):
compval = compval_from_args(args)
def f(req):
if args[0][0] == 'n':
# compval already negates comparison
return compval(req.headers_section) and ((not req.response) or compval(req.response.headers_section))
else:
return compval(req.headers_section) or (req.response and compval(req.response.headers_section))
return f
@ -374,30 +420,26 @@ def gen_filter_by_response_code(args):
return f
def gen_filter_by_path(args):
compval_from_args(args)
compval = compval_from_args(args)
def f(req):
compval = compval_from_args(args)
return compval(req.path)
return f
def gen_filter_by_responsetime(args):
compval_from_args(args)
compval = compval_from_args(args)
def f(req):
compval = compval_from_args(args)
return compval(req.rsptime)
return f
def gen_filter_by_verb(args):
compval_from_args(args)
compval = compval_from_args(args)
def f(req):
compval = compval_from_args(args)
return compval(req.verb)
return f
def gen_filter_by_tag(args):
compval_from_args(args)
compval = compval_from_args(args)
def f(req):
compval = compval_from_args(args)
for tag in req.tags:
if compval(tag):
return True
@ -418,7 +460,7 @@ def gen_filter_by_saved(args):
def gen_filter_by_before(args):
if len(args) != 1:
raise PappyException('Invalid number of arguments')
r = yield http.Request.load_request(args[0])
r = yield Request.load_request(args[0])
def f(req):
if req.time_start is None:
return False
@ -431,7 +473,7 @@ def gen_filter_by_before(args):
def gen_filter_by_after(reqid, negate=False):
if len(args) != 1:
raise PappyException('Invalid number of arguments')
r = yield http.Request.load_request(args[0])
r = yield Request.load_request(args[0])
def f(req):
if req.time_start is None:
return False
@ -444,11 +486,26 @@ def gen_filter_by_headers(args):
comparer = compval_from_args_repdict(args)
def f(req):
if args[0][0] == 'n':
return comparer(req.headers) and (not req.response or comparer(req.response.headers))
return comparer(req.headers) and ((not req.response) or comparer(req.response.headers))
else:
return comparer(req.headers) or (req.response and comparer(req.response.headers))
return f
def gen_filter_by_request_headers(args):
comparer = compval_from_args_repdict(args)
def f(req):
return comparer(req.headers)
return f
def gen_filter_by_response_headers(args):
comparer = compval_from_args_repdict(args)
def f(req):
if args[0][0] == 'n':
return (not req.response) or comparer(req.response.headers)
else:
return req.response and comparer(req.response.headers)
return f
def gen_filter_by_submitted_cookies(args):
comparer = compval_from_args_repdict(args)
def f(req):
@ -484,6 +541,13 @@ def gen_filter_by_params(args):
return comparer(req.url_params) or comparer(req.post_params)
return f
@defer.inlineCallbacks
def gen_filter_by_inverse(args):
filt = yield Filter.from_filter_string(parsed_args=args)
def f(req):
return not filt(req)
defer.returnValue(f)
@defer.inlineCallbacks
def filter_reqs(reqids, filters):
to_delete = set()

View file

@ -4,5 +4,6 @@
"history_size": 1000,
"proxy_listeners": [
{"port": 8000, "interface": "127.0.0.1"}
]
],
"socks_proxy": null
}

View file

@ -541,14 +541,21 @@ class HTTPMessage(object):
# Initializes instance variables too
self.clear()
self.metadata_unique_keys = tuple()
if full_message is not None:
self._from_full_message(full_message, update_content_length)
def __eq__(self, other):
# TODO check meta
if self.full_message != other.full_message:
return False
if self.get_metadata() != other.get_metadata():
m1 = self.get_metadata()
m2 = other.get_metadata()
for k in self.metadata_unique_keys:
if k in m1:
del m1[k]
if k in m2:
del m2[k]
if m1 != m2:
return False
return True
@ -556,7 +563,7 @@ class HTTPMessage(object):
if not self.complete:
raise PappyException("Cannot copy incomplete http messages")
retmsg = self.__class__(self.full_message)
retmsg.set_metadata(self.get_metadata())
retmsg.set_metadata(self.get_metadata(include_unique=False))
return retmsg
def copy(self):
@ -840,8 +847,13 @@ class HTTPMessage(object):
"""
Called when the body of the message is complete
"""
self.body = _decode_encoded(self._data_obj.body,
self._encoding_type)
try:
self.body = _decode_encoded(self._data_obj.body,
self._encoding_type)
except IOError as e:
# Screw handling it gracefully, this is the server's fault.
print 'Error decoding request, storing raw data in body instead'
self.body = self._data_obj.body
def update_from_body(self):
"""
@ -982,6 +994,9 @@ class Request(HTTPMessage):
# instance vars
HTTPMessage.__init__(self, full_request, update_content_length)
# metadata that is unique to a specific Request instance
self.metadata_unique_keys = ('reqid',)
# After message init so that other instance vars are initialized
self._set_dict_callbacks()
@ -1267,7 +1282,7 @@ class Request(HTTPMessage):
###########
## Metadata
def get_metadata(self):
def get_metadata(self, include_unique=True):
data = {}
if self.port is not None:
data['port'] = self.port
@ -1277,6 +1292,10 @@ class Request(HTTPMessage):
if self.response:
data['response_id'] = self.response.rspid
data['tags'] = list(self.tags)
if not include_unique:
for k in self.metadata_unique_keys:
if k in data:
del data[k]
return data
def set_metadata(self, data):
@ -1344,7 +1363,7 @@ class Request(HTTPMessage):
# Updates metadata that's based off of data
HTTPMessage.update_from_body(self)
if 'content-type' in self.headers:
if self.headers['content-type'] == 'application/x-www-form-urlencoded':
if 'application/x-www-form-urlencoded' in self.headers['content-type']:
self.post_params = repeatable_parse_qs(self.body)
self._set_dict_callbacks()
@ -1501,10 +1520,7 @@ class Request(HTTPMessage):
else:
use_cache = Request.cache
if not self.reqid:
print 'adding'
use_cache.add(self)
else:
print 'else adding'
@defer.inlineCallbacks
def async_save(self, cust_dbpool=None, cust_cache=None):
@ -2035,17 +2051,21 @@ class Request(HTTPMessage):
:type full_request: string
:rtype: Twisted deferred that calls back with a Request
"""
from .proxy import ProxyClientFactory, get_next_connection_id, ClientTLSContext
from .proxy import ProxyClientFactory, get_next_connection_id, ClientTLSContext, get_endpoint
from .config import SOCKS_PROXY
new_req = Request(full_request)
new_req.is_ssl = is_ssl
new_req.port = port
factory = ProxyClientFactory(new_req, save_all=False)
new_req._host = host
factory = ProxyClientFactory(new_req, save_all=False, stream_response=False, return_transport=None)
factory.intercepting_macros = {}
factory.connection_id = get_next_connection_id()
if is_ssl:
reactor.connectSSL(host, port, factory, ClientTLSContext())
else:
reactor.connectTCP(host, port, factory)
yield factory.prepare_request()
endpoint = get_endpoint(host, port, is_ssl,
socks_config=SOCKS_PROXY)
yield endpoint.connect(factory)
new_req = yield factory.data_defer
defer.returnValue(new_req)
@ -2099,11 +2119,14 @@ class Response(HTTPMessage):
def __init__(self, full_response=None, update_content_length=True):
# Resets instance variables
self.clear()
# Called after instance vars since some callbacks depend on
# instance vars
HTTPMessage.__init__(self, full_response, update_content_length)
# metadata that is unique to a specific Response instance
self.metadata_unique_keys = ('rspid',)
# After message init so that other instance vars are initialized
self._set_dict_callbacks()
@ -2190,9 +2213,13 @@ class Response(HTTPMessage):
###########
## Metadata
def get_metadata(self):
def get_metadata(self, include_unique=True):
data = {}
data['rspid'] = self.rspid
if not include_unique:
for k in self.metadata_unique_keys:
if k in data:
del data[k]
return data
def set_metadata(self, data):

View file

@ -73,10 +73,8 @@ class InterceptMacro(object):
self.intercept_requests = False
self.intercept_responses = False
self.do_req = False
self.do_rsp = False
self.do_async_req = False
self.do_async_rsp = False
self.async_req = False
self.async_rsp = False
def __repr__(self):
return "<InterceptingMacro (%s)>" % self.name
@ -301,3 +299,79 @@ def gen_imacro(short_name='', long_name=''):
template = env.get_template('intmacro.py.template')
return template.render(**subs)
@defer.inlineCallbacks
def mangle_request(request, intmacros):
"""
Mangle a request with a list of intercepting macros.
Returns a tuple that contains the resulting request (with its unmangled
value set if needed) and a bool that states whether the request was modified
Returns (None, True) if the request was dropped.
:rtype: (Request, Bool)
"""
# Mangle requests with list of intercepting macros
if not intmacros:
defer.returnValue((request, False))
cur_req = request.copy()
for k, macro in intmacros.iteritems():
if macro.intercept_requests:
if macro.async_req:
cur_req = yield macro.async_mangle_request(cur_req.copy())
else:
cur_req = macro.mangle_request(cur_req.copy())
if cur_req is None:
defer.returnValue((None, True))
mangled = False
if not cur_req == request or \
not cur_req.host == request.host or \
not cur_req.port == request.port:
# copy unique data to new request and clear it off old one
cur_req.unmangled = request
cur_req.unmangled.is_unmangled_version = True
if request.response:
cur_req.response = request.response
request.response = None
mangled = True
else:
# return the original request
cur_req = request
defer.returnValue((cur_req, mangled))
@defer.inlineCallbacks
def mangle_response(request, intmacros):
"""
Mangle a request's response with a list of intercepting macros.
Returns a bool stating whether the request's response was modified.
Unmangled values will be updated as needed.
:rtype: Bool
"""
if not intmacros:
defer.returnValue(False)
old_rsp = request.response
# We copy so that changes to request.response doesn't mangle the original response
request.response = request.response.copy()
for k, macro in intmacros.iteritems():
if macro.intercept_responses:
if macro.async_rsp:
request.response = yield macro.async_mangle_response(request)
else:
request.response = macro.mangle_response(request)
if request.response is None:
defer.returnValue(True)
mangled = False
if not old_rsp == request.response:
request.response.rspid = old_rsp
old_rsp.rspid = None
request.response.unmangled = old_rsp
request.response.unmangled.is_unmangled_version = True
mangled = True
else:
request.response = old_rsp
defer.returnValue(mangled)

View file

@ -26,7 +26,7 @@ from twisted.internet.protocol import ServerFactory
from twisted.internet.threads import deferToThread
crochet.no_setup()
server_factory = None
server_factories = []
main_context = context.Context()
all_contexts = [main_context]
plugin_loader = None
@ -69,7 +69,7 @@ def custom_int_handler(signum, frame):
@defer.inlineCallbacks
def main():
global server_factory
global server_factories
global plugin_loader
global cons
settings = parse_args()
@ -116,17 +116,24 @@ def main():
if config.DEBUG_DIR and os.path.exists(config.DEBUG_DIR):
shutil.rmtree(config.DEBUG_DIR)
print 'Removing old debugging output'
server_factory = proxy.ProxyServerFactory(save_all=True)
listen_strs = []
ports = []
for listener in config.LISTENERS:
server_factory = proxy.ProxyServerFactory(save_all=True)
try:
port = reactor.listenTCP(listener[0], server_factory, interface=listener[1])
listener_str = 'port %d' % listener[0]
if listener[1] not in ('127.0.0.1', 'localhost'):
listener_str += ' (bound to %s)' % listener[1]
if 'forward_host_ssl' in listener and listener['forward_host_ssl']:
server_factory.force_ssl = True
server_factory.forward_host = listener['forward_host_ssl']
elif 'forward_host' in listener and listener['forward_host']:
server_factory.force_ssl = False
server_factory.forward_host = listener['forward_host']
port = reactor.listenTCP(listener['port'], server_factory, interface=listener['interface'])
listener_str = 'port %d' % listener['port']
if listener['interface'] not in ('127.0.0.1', 'localhost'):
listener_str += ' (bound to %s)' % listener['interface']
listen_strs.append(listener_str)
ports.append(port)
server_factories.append(server_factory)
except CannotListenError as e:
print repr(e)
if listen_strs:

View file

@ -13,6 +13,7 @@ import stat
from .proxy import add_intercepting_macro as proxy_add_intercepting_macro
from .proxy import remove_intercepting_macro as proxy_remove_intercepting_macro
from .colors import Colors
from .util import PappyException
from twisted.internet import defer
@ -93,7 +94,8 @@ def add_intercepting_macro(name, macro):
only use this if you may need to modify messages before they are
passed along.
"""
proxy_add_intercepting_macro(name, macro, pappyproxy.pappy.server_factory.intercepting_macros)
for factory in pappyproxy.pappy.server_factories:
proxy_add_intercepting_macro(name, macro, factory.intercepting_macros)
def remove_intercepting_macro(name):
"""
@ -102,14 +104,18 @@ def remove_intercepting_macro(name):
:func:`pappyproxy.plugin.add_intercepting_macro` to identify which
macro you would like to stop.
"""
proxy_remove_intercepting_macro(name, pappyproxy.pappy.server_factory.intercepting_macros)
for factory in pappyproxy.pappy.server_factories:
proxy_remove_intercepting_macro(name, factory.intercepting_macros)
def active_intercepting_macros():
"""
Returns a list of the active intercepting macro objects. Modifying
this list will not affect which macros are active.
"""
return [v for k, v in pappyproxy.pappy.server_factory.intercepting_macros.iteritems() ]
ret = []
for factory in pappyproxy.pappy.server_factories:
ret += [v for k, v in factory.intercepting_macros.iteritems() ]
return ret
def in_memory_reqs():
"""
@ -158,3 +164,33 @@ def run_cmd(cmd):
existing APIs to do what you want before using this.
"""
pappyproxy.pappy.cons.onecmd(cmd)
def require_modules(*largs):
"""
A wrapper to make sure that plugin dependencies are installed. For example,
if a command requires the ``psutil`` and ``objgraph`` package, you should
format your command like::
@require_modules('psutil', 'objgraph')
def my_command(line):
import objgraph
import psutil
# ... rest of command ...
If you try to run the command without being able to import all of the required
modules, the command will print an error and not run the command.
"""
def wr(func):
def wr2(*args, **kwargs):
missing = []
for l in largs:
try:
imp.find_module(l)
except ImportError:
missing.append(l)
if missing:
print 'Command requires %s module(s)' % (', '.join([Colors.RED+m+Colors.ENDC for m in missing]))
else:
return func(*args, **kwargs)
return wr2
return wr

View file

@ -11,6 +11,7 @@ from pappyproxy.util import PappyException
from pappyproxy.requestcache import RequestCache
from pappyproxy.console import print_requests
from pappyproxy.pappy import heapstats, cons
from pappyproxy.plugin import require_modules
from twisted.internet import defer
def cache_info(line):
@ -23,19 +24,16 @@ def cache_info(line):
rs = sorted(rl, key=lambda r: Request.cache._last_used[r.reqid], reverse=True)
print_requests(rs)
@require_modules('psutil')
def memory_info(line):
try:
import psutil
except ImportError:
raise PappyException('This command requires the psutil package')
import psutil
proc = psutil.Process(os.getpid())
mem = proc.memory_info().rss
megabyte = (float(mem)/1024)/1024
print 'Memory usage: {0:.2f} Mb ({1} bytes)'.format(megabyte, mem)
@require_modules('guppy')
def heap_info(line):
if heapstats is None:
raise PappyException('Command requires the guppy library')
size = heapstats.heap().size
print 'Heap usage: {0:.2f} Mb'.format(size/(1024.0*1024.0))
print heapstats.heap()
@ -54,11 +52,9 @@ def limit_info(line):
print 'Soft limit is now:', soft
print 'Hard limit is now:', hard
@require_modules('objgraph')
def graph_randobj(line):
try:
import objgraph
except ImportError:
raise PappyException('This command requires the objgraph library')
import objgraph
args = shlex.split(line)
if len(args) > 1:
fname = args[1]

View file

@ -2,12 +2,13 @@ import HTMLParser
import StringIO
import base64
import clipboard
import datetime
import gzip
import shlex
import string
import urllib
from pappyproxy.util import PappyException, hexdump
from pappyproxy.util import PappyException, hexdump, printable_data
def print_maybe_bin(s):
binary = False
@ -231,6 +232,14 @@ def gzip_encode_raw(line):
to a file.
"""
print _code_helper(line, gzip_encode_helper, copy=False)
def unix_time_decode_helper(line):
unix_time = int(line.strip())
dtime = datetime.datetime.fromtimestamp(unix_time)
return dtime.strftime('%Y-%m-%d %H:%M:%S')
def unix_time_decode(line):
print _code_helper(line, unix_time_decode_helper)
def load_cmds(cmd):
cmd.set_cmds({
@ -254,6 +263,7 @@ def load_cmds(cmd):
'html_encode_raw': (html_encode_raw, None),
'gzip_decode_raw': (gzip_decode_raw, None),
'gzip_encode_raw': (gzip_encode_raw, None),
'unixtime_decode': (unix_time_decode, None),
})
cmd.add_aliases([
('base64_decode', 'b64d'),
@ -276,4 +286,5 @@ def load_cmds(cmd):
('html_encode_raw', 'htmler'),
('gzip_decode_raw', 'gzdr'),
('gzip_encode_raw', 'gzer'),
('unixtime_decode', 'uxtd'),
])

View file

@ -96,9 +96,13 @@ def run_int_macro(line):
if args[0] not in int_macro_dict:
raise PappyException('%s not a loaded intercepting macro' % line)
macro = int_macro_dict[args[0]]
macro.init(args[1:])
add_intercepting_macro(macro.name, macro)
print '"%s" started' % macro.name
try:
macro.init(args[1:])
add_intercepting_macro(macro.name, macro)
print '"%s" started' % macro.name
except Exception as e:
print 'Error initializing macro:'
raise e
def stop_int_macro(line):
"""

View file

@ -58,6 +58,7 @@ class MangleInterceptMacro(InterceptMacro):
defer.returnValue(None)
mangled_req = Request(text, update_content_length=True)
mangled_req._host = request.host
mangled_req.port = request.port
mangled_req.is_ssl = request.is_ssl
@ -126,7 +127,6 @@ def check_reqid(reqid):
def start_editor(reqid):
script_loc = os.path.join(config.PAPPY_DIR, "plugins", "vim_repeater", "repeater.vim")
#print "RepeaterSetup %d %d"%(reqid, comm_port)
subprocess.call(["vim", "-S", script_loc, "-c", "RepeaterSetup %s %d"%(reqid, comm.comm_port)])
####################

View file

@ -66,7 +66,10 @@ def clrmem(line):
"""
to_delete = list(pappyproxy.http.Request.cache.inmem_reqs)
for r in to_delete:
yield r.deep_delete()
try:
yield r.deep_delete()
except PappyException as e:
print str(e)
def gencerts(line):
"""

View file

@ -74,9 +74,10 @@ def print_request_extended(request):
print_pairs = []
print_pairs.append(('Made on', time_made_str))
print_pairs.append(('ID', request.reqid))
print_pairs.append(('Verb', verb))
print_pairs.append(('URL', request.url_color))
print_pairs.append(('Host', host))
print_pairs.append(('Path', path_formatter(request.full_path)))
print_pairs.append(('Verb', verb))
print_pairs.append(('Status Code', response_code))
print_pairs.append(('Request Length', reqlen))
print_pairs.append(('Response Length', rsplen))
@ -97,6 +98,14 @@ def print_tree(tree):
# Prints a tree. Takes in a sorted list of path tuples
_print_tree_helper(tree, 0, [])
def guess_pretty_print_fmt(msg):
if 'content-type' in msg.headers:
if 'json' in msg.headers['content-type']:
return 'json'
elif 'www-form' in msg.headers['content-type']:
return 'form'
return 'text'
def pretty_print_body(fmt, body):
try:
if fmt.lower() == 'json':
@ -111,6 +120,8 @@ def pretty_print_body(fmt, body):
s += Colors.ENDC
s += urllib.unquote(v)
print s
elif fmt.lower() == 'text':
print body
else:
raise PappyException('"%s" is not a valid format' % fmt)
except PappyException as e:
@ -165,8 +176,59 @@ def _print_tree_helper(tree, depth, print_bars):
curkey = '/'
print _get_tree_prefix(depth, print_bars, True) + curkey
_print_tree_helper(subtree, depth+1, print_bars + [False])
def print_params(req, params=None):
if not req.url_params.all_pairs() and not req.body:
print 'Request %s has no url or data parameters' % req.reqid
print ''
if req.url_params.all_pairs():
print Styles.TABLE_HEADER + "Url Params" + Colors.ENDC
for k, v in req.url_params.all_pairs():
if params is None or (params and k in params):
print Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v)
print ''
if req.body:
print Styles.TABLE_HEADER + "Body/POST Params" + Colors.ENDC
pretty_print_body(guess_pretty_print_fmt(req), req.body)
print ''
if req.cookies.all_pairs():
print Styles.TABLE_HEADER + "Cookies" + Colors.ENDC
for k, v in req.cookies.all_pairs():
if params is None or (params and k in params):
print Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v)
print ''
# multiform request when we support it
def add_param(found_params, kind, k, v, reqid):
if not k in found_params:
found_params[k] = {}
if kind in found_params[k]:
found_params[k][kind].append((reqid, v))
else:
found_params[k][kind] = [(reqid, v)]
def print_param_info(param_info):
for k, d in param_info.iteritems():
print Styles.TABLE_HEADER + k + Colors.ENDC
for param_type, valpairs in d.iteritems():
print param_type
value_ids = {}
for reqid, val in valpairs:
ids = value_ids.get(val, [])
ids.append(reqid)
value_ids[val] = ids
for val, ids in value_ids.iteritems():
if len(ids) <= 15:
idstr = ', '.join(ids)
else:
idstr = ', '.join(ids[:15]) + '...'
if val == '':
printstr = (Colors.RED + 'BLANK' + Colors.ENDC + 'x%d (%s)') % (len(ids), idstr)
else:
printstr = (Colors.GREEN + '%s' + Colors.ENDC + 'x%d (%s)') % (val, len(ids), idstr)
print printstr
print ''
####################
## Command functions
@ -359,6 +421,70 @@ def pretty_print_response(line):
else:
print 'No response associated with request %s' % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def print_params_cmd(line):
"""
View the headers of the request
Usage: view_request_headers <reqid(s)>
"""
args = shlex.split(line)
reqid = args[0]
if len(args) > 1:
keys = args[1:]
else:
keys = None
reqs = yield load_reqlist(reqid)
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
print_params(req, keys)
if len(reqs) > 1:
print '-'*30
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def get_param_info(line):
args = shlex.split(line)
if args and args[0] == 'ct':
contains = True
args = args[1:]
else:
contains = False
if args:
params = tuple(args)
else:
params = None
def check_key(k, params, contains):
if contains:
for p in params:
if p.lower() in k.lower():
return True
else:
if params is None or k in params:
return True
return False
found_params = {}
ids = yield main_context_ids()
for i in ids:
req = yield Request.load_request(i)
for k, v in req.url_params.all_pairs():
if check_key(k, params, contains):
add_param(found_params, 'Url Parameter', k, v, req.reqid)
for k, v in req.post_params.all_pairs():
if check_key(k, params, contains):
add_param(found_params, 'POST Parameter', k, v, req.reqid)
for k, v in req.cookies.all_pairs():
if check_key(k, params, contains):
add_param(found_params, 'Cookie', k, v, req.reqid)
print_param_info(found_params)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def dump_response(line):
@ -387,6 +513,11 @@ def site_map(line):
Print the site map. Only includes requests in the current context.
Usage: site_map
"""
args = shlex.split(line)
if len(args) > 0 and args[0] == 'p':
paths = True
else:
paths = False
ids = yield main_context_ids()
paths_set = set()
for reqid in ids:
@ -394,7 +525,11 @@ def site_map(line):
if req.response and req.response.response_code != 404:
paths_set.add(req.path_tuple)
tree = sorted(list(paths_set))
print_tree(tree)
if paths:
for p in tree:
print ('/'.join(list(p)))
else:
print_tree(tree)
###############
@ -412,6 +547,8 @@ def load_cmds(cmd):
'view_full_response': (view_full_response, None),
'view_response_bytes': (view_response_bytes, None),
'pretty_print_response': (pretty_print_response, None),
'print_params': (print_params_cmd, None),
'param_info': (get_param_info, None),
'site_map': (site_map, None),
'dump_response': (dump_response, None),
})
@ -420,12 +557,16 @@ def load_cmds(cmd):
('view_request_info', 'viq'),
('view_request_headers', 'vhq'),
('view_full_request', 'vfq'),
('view_full_request', 'kjq'),
('view_request_bytes', 'vbq'),
('pretty_print_request', 'ppq'),
('view_response_headers', 'vhs'),
('view_full_response', 'vfs'),
('view_full_response', 'kjs'),
('view_response_bytes', 'vbs'),
('pretty_print_response', 'pps'),
('print_params', 'pprm'),
('param_info', 'pri'),
('site_map', 'sm'),
#('dump_response', 'dr'),
])

View file

@ -119,7 +119,7 @@ def submit_current_buffer():
full_request = '\n'.join(curbuf)
commdata = {'action': 'submit',
'full_message': base64.b64encode(full_request),
'tags': {'repeater'},
'tags': ['repeater'],
'port': int(vim.eval("s:repport")),
'host': vim.eval("s:rephost")}
if vim.eval("s:repisssl") == '1':

View file

@ -1,3 +1,4 @@
import collections
import copy
import datetime
import os
@ -8,6 +9,7 @@ from OpenSSL import crypto
from pappyproxy import config
from pappyproxy import context
from pappyproxy import http
from pappyproxy import macros
from pappyproxy.util import PappyException, printable_data
from twisted.internet import defer
from twisted.internet import reactor, ssl
@ -56,6 +58,34 @@ def log_request(request, id=None, symbol='*', verbosity_level=3):
r_split = request.split('\r\n')
for l in r_split:
log(l, id, symbol, verbosity_level)
def get_endpoint(target_host, target_port, target_ssl, socks_config=None):
# Imports go here to allow mocking for tests
from twisted.internet.endpoints import SSL4ClientEndpoint, TCP4ClientEndpoint
from txsocksx.client import SOCKS5ClientEndpoint
from txsocksx.tls import TLSWrapClientEndpoint
from twisted.internet.interfaces import IOpenSSLClientConnectionCreator
if socks_config is not None:
sock_host = socks_config['host']
sock_port = int(socks_config['port'])
methods = {'anonymous': ()}
if 'username' in socks_config and 'password' in socks_config:
methods['login'] = (socks_config['username'], socks_config['password'])
tcp_endpoint = TCP4ClientEndpoint(reactor, sock_host, sock_port)
socks_endpoint = SOCKS5ClientEndpoint(target_host, target_port, tcp_endpoint, methods=methods)
if target_ssl:
endpoint = TLSWrapClientEndpoint(ClientTLSContext(), socks_endpoint)
else:
endpoint = socks_endpoint
else:
if target_ssl:
endpoint = SSL4ClientEndpoint(reactor, target_host, target_port,
ClientTLSContext())
else:
endpoint = TCP4ClientEndpoint(reactor, target_host, target_port)
return endpoint
class ClientTLSContext(ssl.ClientContextFactory):
isClient = 1
@ -71,6 +101,7 @@ class ProxyClient(LineReceiver):
self._sent = False
self.request = request
self.data_defer = defer.Deferred()
self.completed = False
self._response_obj = http.Response()
@ -83,24 +114,12 @@ class ProxyClient(LineReceiver):
line = ''
self._response_obj.add_line(line)
self.log(line, symbol='r<', verbosity_level=3)
if self.factory.stream_response:
self.log('Returning line back through stream')
self.factory.return_transport.write(line+'\r\n')
else:
self.log('Not streaming, not returning')
self.log(self.factory.stream_response)
if self._response_obj.headers_complete:
if self._response_obj.complete:
self.handle_response_end()
return
self.log("Headers end, length given, waiting for data", verbosity_level=3)
self.setRawMode()
def rawDataReceived(self, *args, **kwargs):
data = args[0]
self.log('Returning data back through stream')
if self.factory.stream_response:
self.factory.return_transport.write(data)
if not self._response_obj.complete:
if data:
if config.DEBUG_TO_FILE or config.DEBUG_VERBOSITY > 0:
@ -110,71 +129,21 @@ class ProxyClient(LineReceiver):
self.log(l, symbol='<rd', verbosity_level=3)
self._response_obj.add_data(data)
def dataReceived(self, data):
if self.factory.stream_response:
self.factory.return_transport.write(data)
LineReceiver.dataReceived(self, data)
if not self.completed:
if self._response_obj.complete:
self.completed = True
self.handle_response_end()
def connectionMade(self):
self._connection_made()
@defer.inlineCallbacks
def _connection_made(self):
self.log('Connection established, sending request...', verbosity_level=3)
# Make sure to add errback
self.log("Connection made, sending request", verbosity_level=3)
lines = self.request.full_request.splitlines()
for l in lines:
self.log(l, symbol='>r', verbosity_level=3)
sendreq = self.request
if context.in_scope(sendreq):
to_mangle = copy.copy(self.factory.intercepting_macros).iteritems()
if self.factory.save_all:
# It isn't the actual time, but this should work in case
# we do an 'ls' before it gets a real time saved
self.request.time_start = datetime.datetime.utcnow()
if self.factory.stream_response and not to_mangle:
self.request.async_deep_save()
else:
yield self.request.async_deep_save()
## Run intercepting macros
# if we don't copy it, when we delete a macro from the console,
# we get a crash. We do a shallow copy to keep the macro
# instances the same.
for k, macro in to_mangle:
if macro.intercept_requests:
if macro.async_req:
sendreq = yield macro.async_mangle_request(sendreq)
else:
sendreq = macro.mangle_request(sendreq)
if sendreq is None:
self.log('Request dropped, losing connection')
self.transport.loseConnection()
self.request = None
self.data_defer.callback(None)
if self.factory.save_all:
yield sendreq.async_deep_save()
defer.returnValue(None)
if sendreq != self.request:
sendreq.unmangled = self.request
if self.factory.save_all:
sendreq.time_start = datetime.datetime.utcnow()
yield sendreq.async_deep_save()
else:
self.log("Request out of scope, passing along unmangled")
if not self._sent:
self.factory.start_time = datetime.datetime.utcnow()
self.transport.write(sendreq.full_request)
self.request = sendreq
self.request.submitted = True
self._sent = True
self.data_defer.callback(sendreq)
defer.returnValue(None)
def connectionLost(self, reason):
pass
self.transport.write(self.request.full_request)
def handle_response_end(self, *args, **kwargs):
self.log("Remote response finished, returning data to original stream")
@ -182,7 +151,13 @@ class ProxyClient(LineReceiver):
self.log('Response ended, losing connection')
self.transport.loseConnection()
assert self._response_obj.full_response
self.factory.return_request_pair(self.request)
self.data_defer.callback(self.request)
def clientConnectionFailed(self, connector, reason):
self.log("Connection with remote server failed: %s" % reason)
def clientConnectionLost(self, connector, reason):
self.log("Connection with remote server lost: %s" % reason)
class ProxyClientFactory(ClientFactory):
@ -202,9 +177,13 @@ class ProxyClientFactory(ClientFactory):
def log(self, message, symbol='*', verbosity_level=1):
log(message, id=self.connection_id, symbol=symbol, verbosity_level=verbosity_level)
def buildProtocol(self, addr):
def buildProtocol(self, addr, _do_callback=True):
# _do_callback is intended to help with testing and should not be modified
p = ProxyClient(self.request)
p.factory = self
self.log("Building protocol", verbosity_level=3)
if _do_callback:
p.data_defer.addCallback(self.return_request_pair)
return p
def clientConnectionFailed(self, connector, reason):
@ -213,8 +192,44 @@ class ProxyClientFactory(ClientFactory):
def clientConnectionLost(self, connector, reason):
self.log("Connection lost with remote server: %s" % reason.getErrorMessage())
@defer.inlineCallbacks
def prepare_request(self):
"""
Prepares request for submitting
Saves the associated request with a temporary start time, mangles it, then
saves the mangled version with an update start time.
"""
sendreq = self.request
if context.in_scope(sendreq):
mangle_macros = copy.copy(self.intercepting_macros)
self.request.time_start = datetime.datetime.utcnow()
if self.save_all:
if self.stream_response and not mangle_macros:
self.request.async_deep_save()
else:
yield self.request.async_deep_save()
(sendreq, mangled) = yield macros.mangle_request(sendreq, mangle_macros)
if sendreq and mangled and self.save_all:
self.start_time = datetime.datetime.utcnow()
sendreq.time_start = self.start_time
yield sendreq.async_deep_save()
else:
self.log("Request out of scope, passing along unmangled")
self.request = sendreq
defer.returnValue(self.request)
@defer.inlineCallbacks
def return_request_pair(self, request):
"""
If the request is in scope, it saves the completed request,
sets the start/end time, mangles the response, saves the
mangled version, then writes the response back through the
transport.
"""
self.end_time = datetime.datetime.utcnow()
if config.DEBUG_TO_FILE or config.DEBUG_VERBOSITY > 0:
log_request(printable_data(request.response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
@ -222,39 +237,18 @@ class ProxyClientFactory(ClientFactory):
request.time_start = self.start_time
request.time_end = self.end_time
if context.in_scope(request):
to_mangle = copy.copy(self.intercepting_macros).iteritems()
mangle_macros = copy.copy(self.intercepting_macros)
if self.save_all:
if self.stream_response and not to_mangle:
if self.stream_response and not mangle_macros:
request.async_deep_save()
else:
yield request.async_deep_save()
# if we don't copy it, when we delete a macro from the console,
# we get a crash. We do a shallow copy to keep the macro
# instances the same.
old_rsp = request.response
for k, macro in to_mangle:
if macro.intercept_responses:
if macro.async_rsp:
mangled_rsp = yield macro.async_mangle_response(request)
else:
mangled_rsp = macro.mangle_response(request)
mangled = yield macros.mangle_response(request, mangle_macros)
if mangled_rsp is None:
request.response = None
self.data_defer.callback(request)
if self.save_all:
yield request.async_deep_save()
self.log("Response dropped, losing connection")
self.transport.loseConnection()
defer.returnValue(None)
request.response = mangled_rsp
if request.response != old_rsp:
request.response.unmangled = old_rsp
if self.save_all:
yield request.async_deep_save()
if mangled and self.save_all:
yield request.async_deep_save()
if request.response and (config.DEBUG_TO_FILE or config.DEBUG_VERBOSITY > 0):
log_request(printable_data(request.response.full_response),
@ -267,8 +261,10 @@ class ProxyClientFactory(ClientFactory):
class ProxyServerFactory(ServerFactory):
def __init__(self, save_all=False):
self.intercepting_macros = {}
self.intercepting_macros = collections.OrderedDict()
self.save_all = save_all
self.force_ssl = False
self.forward_host = None
def buildProtocol(self, addr):
prot = ProxyServer()
@ -288,101 +284,167 @@ class ProxyServer(LineReceiver):
self._connect_response = False
self._forward = True
self._connect_uri = None
self._connect_host = None
self._connect_ssl = None
self._connect_port = None
self._client_factory = None
def lineReceived(self, *args, **kwargs):
line = args[0]
self.log(line, symbol='>', verbosity_level=3)
self._request_obj.add_line(line)
if self._request_obj.verb.upper() == 'CONNECT':
self._connect_response = True
self._forward = False
self._connect_uri = self._request_obj.url
if self._request_obj.headers_complete:
self.setRawMode()
if self._request_obj.complete:
self.setLineMode()
try:
self.full_request_received()
except PappyException as e:
print str(e)
def rawDataReceived(self, *args, **kwargs):
data = args[0]
self._request_obj.add_data(data)
self.log(data, symbol='d>', verbosity_level=3)
def dataReceived(self, *args, **kwargs):
# receives the data then checks if the request is complete.
# if it is, it calls full_Request_received
LineReceiver.dataReceived(self, *args, **kwargs)
if self._request_obj.complete:
try:
self.full_request_received()
except PappyException as e:
print str(e)
def full_request_received(self, *args, **kwargs):
def _start_tls(self, cert_host=None):
# Generate a cert for the hostname and start tls
if cert_host is None:
host = self._request_obj.host
else:
host = cert_host
if not host in cached_certs:
log("Generating cert for '%s'" % host,
verbosity_level=3)
(pkey, cert) = generate_cert(host,
config.CERT_DIR)
cached_certs[host] = (pkey, cert)
else:
log("Using cached cert for %s" % host, verbosity_level=3)
(pkey, cert) = cached_certs[host]
ctx = ServerTLSContext(
private_key=pkey,
certificate=cert,
)
self.transport.startTLS(ctx, self.factory)
def _connect_okay(self):
self.log('Responding to browser CONNECT request', verbosity_level=3)
okay_str = 'HTTP/1.1 200 Connection established\r\n\r\n'
self.transport.write(okay_str)
def full_request_received(self):
global cached_certs
self.log('End of request', verbosity_level=3)
if self._connect_response:
self.log('Responding to browser CONNECT request', verbosity_level=3)
okay_str = 'HTTP/1.1 200 Connection established\r\n\r\n'
self.transport.write(okay_str)
forward = True
if self._request_obj.verb.upper() == 'CONNECT':
self._connect_okay()
self._start_tls()
self._connect_uri = self._request_obj.url
self._connect_host = self._request_obj.host
self._connect_ssl = True # do we just assume connect means ssl?
self._connect_port = self._request_obj.port
self.log('uri=%s, ssl=%s, connect_port=%s' % (self._connect_uri, self._connect_ssl, self._connect_port), verbosity_level=3)
forward = False
# Generate a cert for the hostname
if not self._request_obj.host in cached_certs:
log("Generating cert for '%s'" % self._request_obj.host,
verbosity_level=3)
(pkey, cert) = generate_cert(self._request_obj.host,
config.CERT_DIR)
cached_certs[self._request_obj.host] = (pkey, cert)
else:
log("Using cached cert for %s" % self._request_obj.host, verbosity_level=3)
(pkey, cert) = cached_certs[self._request_obj.host]
ctx = ServerTLSContext(
private_key=pkey,
certificate=cert,
)
self.transport.startTLS(ctx, self.factory)
# if self._request_obj.host == 'pappy':
# self._create_pappy_response()
# forward = False
if self._forward:
self.log("Forwarding to %s on %d" % (self._request_obj.host, self._request_obj.port))
if not self.factory.intercepting_macros:
stream = True
else:
# We only want to call send_response_back if we're not streaming
stream = False
self.log('Creating client factory, stream=%s' % stream)
factory = ProxyClientFactory(self._request_obj,
save_all=self.factory.save_all,
stream_response=stream,
return_transport=self.transport)
factory.intercepting_macros = self.factory.intercepting_macros
factory.connection_id = self.connection_id
if not stream:
factory.data_defer.addCallback(self.send_response_back)
if self._request_obj.is_ssl:
self.log("Accessing over SSL...", verbosity_level=3)
reactor.connectSSL(self._request_obj.host, self._request_obj.port, factory, ClientTLSContext())
else:
self.log("Accessing over TCP...", verbosity_level=3)
reactor.connectTCP(self._request_obj.host, self._request_obj.port, factory)
# Reset per-request variables
# if _request_obj.host is a listener, forward = False
if forward:
self._generate_and_submit_client()
self._reset()
def _reset(self):
# Reset per-request variables and have the request default to using
# some parameters from the connect request
self.log("Resetting per-request data", verbosity_level=3)
self._connect_response = False
self._forward = True
self._request_obj = http.Request()
if self._connect_uri:
self._request_obj.url = self._connect_uri
if self._connect_host:
self._request_obj._host = self._connect_host
if self._connect_ssl:
self._request_obj.is_ssl = self._connect_ssl
if self._connect_port:
self._request_obj.port = self._connect_port
self.setLineMode()
def _generate_and_submit_client(self):
"""
Sets up self._client_factory with self._request_obj then calls back to
submit the request
"""
self.log("Forwarding to %s on %d" % (self._request_obj.host, self._request_obj.port))
if self.factory.intercepting_macros:
stream = False
else:
stream = True
self.log('Creating client factory, stream=%s' % stream)
self._client_factory = ProxyClientFactory(self._request_obj,
save_all=self.factory.save_all,
stream_response=stream,
return_transport=self.transport)
self._client_factory.intercepting_macros = self.factory.intercepting_macros
self._client_factory.connection_id = self.connection_id
if not stream:
self._client_factory.data_defer.addCallback(self.send_response_back)
d = self._client_factory.prepare_request()
d.addCallback(self._make_remote_connection)
return d
@defer.inlineCallbacks
def _make_remote_connection(self, req):
"""
Creates an endpoint to the target server using the given configuration
options then connects to the endpoint using self._client_factory
"""
self._request_obj = req
# If we have a socks proxy, wrap the endpoint in it
if context.in_scope(self._request_obj):
# Modify the request connection settings to match settings in the factory
if self.factory.force_ssl:
self._request_obj.is_ssl = True
if self.factory.forward_host:
self._request_obj.host = self.factory.forward_host
# Get connection from the request
endpoint = get_endpoint(self._request_obj.host,
self._request_obj.port,
self._request_obj.is_ssl,
socks_config=config.SOCKS_PROXY)
else:
endpoint = get_endpoint(self._request_obj.host,
self._request_obj.port,
self._request_obj.is_ssl)
# Connect via the endpoint
self.log("Accessing using endpoint")
yield endpoint.connect(self._client_factory)
self.log("Connected")
def send_response_back(self, response):
if response is not None:
self.transport.write(response.response.full_response)
self.log("Response sent back, losing connection")
self.transport.loseConnection()
def connectionMade(self):
if self.factory.force_ssl:
self._start_tls(self.factory.forward_host)
def connectionLost(self, reason):
self.log('Connection lost with browser: %s' % reason.getErrorMessage())
@ -425,7 +487,7 @@ def load_certs_from_dir(cert_dir):
with open(cert_dir+'/'+config.SSL_CA_FILE, 'rt') as f:
ca_raw = f.read()
except IOError:
raise PappyException("Could not load CA cert!")
raise PappyException("Could not load CA cert! Generate certs using the `gencerts` command then add the .crt file to your browser.")
try:
with open(cert_dir+'/'+config.SSL_PKEY_FILE, 'rt') as f:

View file

@ -186,8 +186,8 @@ class RequestCache(object):
break
@defer.inlineCallbacks
def load_by_tag(tag):
reqs = yield load_requests_by_tag(tag, cust_cache=self, cust_dbpool=self.dbpool)
def load_by_tag(self, tag):
reqs = yield pappyproxy.http.Request.load_requests_by_tag(tag, cust_cache=self, cust_dbpool=self.dbpool)
for req in reqs:
self.add(req)
defer.returnValue(reqs)

View file

@ -0,0 +1,65 @@
import pytest
import string
import mock
from collections import OrderedDict
from testutil import mock_deferred, func_deleted, TLSStringTransport, freeze, mock_int_macro, no_tcp
from pappyproxy.http import Request, Response
from pappyproxy import macros
class CloudToButtMacro(macros.InterceptMacro):
def __init__(self):
macros.InterceptMacro.__init__(self)
self.intercept_requests = True
self.intercept_responses = True
def mangle_request(self, request):
return Request(string.replace(request.full_message, 'cloud', 'butt'))
def mangle_response(self, response):
return Response(string.replace(response.full_message, 'cloud', 'butt'))
@pytest.fixture
def httprequest():
return Request(('POST /test-request HTTP/1.1\r\n'
'Content-Length: 4\r\n'
'\r\n'
'AAAA'))
@pytest.inlineCallbacks
def test_mangle_request_simple(httprequest):
orig_req = httprequest.copy() # in case it gets mangled
(new_req, mangled) = yield macros.mangle_request(orig_req, {})
assert new_req == orig_req
assert httprequest == orig_req
assert not mangled
@pytest.inlineCallbacks
def test_mangle_request_single(httprequest):
orig_req = httprequest.copy() # in case it gets mangled
macro = mock_int_macro(modified_req=('GET /modified HTTP/1.1\r\n\r\n'))
expected_req = Request('GET /modified HTTP/1.1\r\n\r\n')
(new_req, mangled) = yield macros.mangle_request(orig_req, {'testmacro': macro})
assert new_req == expected_req
assert httprequest == orig_req
assert httprequest.unmangled is None
assert new_req.unmangled == orig_req
assert mangled
@pytest.inlineCallbacks
def test_mangle_request_multiple(httprequest):
orig_req = httprequest.copy() # in case it gets mangled
macro = mock_int_macro(modified_req=('GET /cloud HTTP/1.1\r\n\r\n'))
macro2 = CloudToButtMacro()
intmacros = OrderedDict()
intmacros['testmacro'] = macro
intmacros['testmacro2'] = macro2
(new_req, mangled) = yield macros.mangle_request(orig_req, intmacros)
expected_req = Request('GET /butt HTTP/1.1\r\n\r\n')
assert new_req == expected_req
assert httprequest == orig_req
assert httprequest.unmangled is None
assert new_req.unmangled == orig_req
assert mangled

View file

@ -1,82 +1,56 @@
import os
import pytest
import mock
import twisted.internet
import twisted.test
import random
import datetime
import pappyproxy
from pappyproxy import http
from pappyproxy import macros
from pappyproxy import config
from pappyproxy.proxy import ProxyClient, ProxyClientFactory, ProxyServerFactory
from testutil import mock_deferred, func_deleted, func_ignored_deferred, func_ignored, no_tcp
from twisted.internet.protocol import ServerFactory
from twisted.test.iosim import FakeTransport
from twisted.internet import defer, reactor
from pappyproxy.proxy import ProxyClientFactory, ProxyServerFactory
from testutil import mock_deferred, func_deleted, TLSStringTransport, freeze, mock_int_macro, no_tcp
####################
## Fixtures
MANGLED_REQ = 'GET /mangled HTTP/1.1\r\n\r\n'
MANGLED_RSP = 'HTTP/1.1 500 MANGLED\r\nContent-Length: 0\r\n\r\n'
@pytest.fixture
def unconnected_proxyserver(mocker):
mocker.patch("twisted.test.iosim.FakeTransport.startTLS")
mocker.patch("pappyproxy.proxy.load_certs_from_dir", new=mock_generate_cert)
factory = ProxyServerFactory()
protocol = factory.buildProtocol(('127.0.0.1', 0))
protocol.makeConnection(FakeTransport(protocol, True))
return protocol
@pytest.fixture
def proxyserver(mocker):
mocker.patch("twisted.test.iosim.FakeTransport.startTLS")
mocker.patch("pappyproxy.proxy.load_certs_from_dir", new=mock_generate_cert)
factory = ProxyServerFactory()
protocol = factory.buildProtocol(('127.0.0.1', 0))
protocol.makeConnection(FakeTransport(protocol, True))
protocol.lineReceived('CONNECT https://www.AAAA.BBBB:443 HTTP/1.1')
protocol.lineReceived('')
protocol.transport.getOutBuffer()
return protocol
@pytest.fixture
def proxy_connection():
@defer.inlineCallbacks
def gen_connection(send_data, new_req=False, new_rsp=False,
drop_req=False, drop_rsp=False):
factory = ProxyClientFactory(http.Request(send_data))
macro = gen_mangle_macro(new_req, new_rsp, drop_req, drop_rsp)
factory.intercepting_macros['pappy_mangle'] = macro
protocol = factory.buildProtocol(None)
tr = FakeTransport(protocol, True)
protocol.makeConnection(tr)
sent = yield protocol.data_defer
print sent
defer.returnValue((protocol, sent, factory.data_defer))
return gen_connection
@pytest.fixture
def in_scope_true(mocker):
new_in_scope = mock.MagicMock()
new_in_scope.return_value = True
mocker.patch("pappyproxy.context.in_scope", new=new_in_scope)
return new_in_scope
@pytest.fixture
def in_scope_false(mocker):
new_in_scope = mock.MagicMock()
new_in_scope.return_value = False
mocker.patch("pappyproxy.context.in_scope", new=new_in_scope)
return new_in_scope
## Autorun fixtures
@pytest.fixture(autouse=True)
def ignore_save(mocker):
mocker.patch("pappyproxy.http.Request.async_deep_save", func_ignored_deferred)
def proxy_patches(mocker):
#mocker.patch("twisted.test.iosim.FakeTransport.startTLS")
mocker.patch("pappyproxy.proxy.load_certs_from_dir", new=mock_generate_cert)
@pytest.fixture
def server_factory():
return gen_server_factory()
def socks_config(mocker, config):
mocker.patch('pappyproxy.config.SOCKS_PROXY', new=config)
def gen_server_factory(int_macros={}):
factory = ProxyServerFactory()
factory.save_all = True
factory.intercepting_macros = int_macros
return factory
def gen_server_protocol(int_macros={}):
server_factory = gen_server_factory(int_macros=int_macros)
protocol = server_factory.buildProtocol(('127.0.0.1', 0))
tr = TLSStringTransport()
protocol.makeConnection(tr)
return protocol
def gen_client_protocol(req, stream_response=False):
return_transport = TLSStringTransport()
factory = ProxyClientFactory(req,
save_all=True,
stream_response=stream_response,
return_transport=return_transport)
protocol = factory.buildProtocol(('127.0.0.1', 0), _do_callback=False)
tr = TLSStringTransport()
protocol.makeConnection(tr)
return protocol
@pytest.fixture
def server_protocol():
return gen_server_protocol()
def mock_req_async_save(req):
req.reqid = str(random.randint(1,1000000))
return mock_deferred()
####################
## Mock functions
@ -134,151 +108,522 @@ def mock_generate_cert(cert_dir):
'-----END CERTIFICATE-----')
return (ca_key, private_key)
def gen_mangle_macro(modified_req=None, modified_rsp=None,
drop_req=False, drop_rsp=False):
macro = mock.MagicMock()
if modified_req or drop_req:
macro.async_req = True
macro.intercept_requests = True
if drop_req:
newreq = None
else:
newreq = http.Request(modified_req)
macro.async_mangle_request.return_value = mock_deferred(newreq)
else:
macro.intercept_requests = False
########
## Tests
if modified_rsp or drop_rsp:
macro.async_rsp = True
macro.intercept_responses = True
if drop_rsp:
newrsp = None
else:
newrsp = http.Response(modified_rsp)
macro.async_mangle_response.return_value = mock_deferred(newrsp)
else:
macro.intercept_responses = False
return macro
def notouch_mangle_req(request):
d = mock_deferred(request)
return d
def notouch_mangle_rsp(request):
d = mock_deferred(request.response)
return d
def req_mangler_change(request):
req = http.Request('GET /mangled HTTP/1.1\r\n\r\n')
d = mock_deferred(req)
return d
def rsp_mangler_change(request):
rsp = http.Response('HTTP/1.1 500 MANGLED\r\n\r\n')
d = mock_deferred(rsp)
return d
def req_mangler_drop(request):
return mock_deferred(None)
def rsp_mangler_drop(request):
return mock_deferred(None)
####################
## Unit test tests
def test_proxy_server_fixture(unconnected_proxyserver):
unconnected_proxyserver.transport.write('hello')
assert unconnected_proxyserver.transport.getOutBuffer() == 'hello'
@pytest.inlineCallbacks
def test_mock_deferreds():
d = mock_deferred('Hello!')
r = yield d
assert r == 'Hello!'
def test_deleted():
def test_no_tcp():
from twisted.internet.endpoints import SSL4ClientEndpoint, TCP4ClientEndpoint
from txsocksx.client import SOCKS5ClientEndpoint
from txsocksx.tls import TLSWrapClientEndpoint
with pytest.raises(NotImplementedError):
reactor.connectTCP("www.google.com", "80", ServerFactory)
SSL4ClientEndpoint('aasdfasdf.sdfwerqwer')
with pytest.raises(NotImplementedError):
reactor.connectSSL("www.google.com", "80", ServerFactory)
TCP4ClientEndpoint('aasdfasdf.sdfwerqwer')
with pytest.raises(NotImplementedError):
SOCKS5ClientEndpoint('aasdfasdf.sdfwerqwer')
with pytest.raises(NotImplementedError):
TLSWrapClientEndpoint('asdf.2341')
################
### Proxy Server
def test_proxy_server_connect(mocker, server_protocol):
mstarttls = mocker.patch('pappyproxy.tests.testutil.TLSStringTransport.startTLS')
server_protocol.dataReceived('CONNECT https://www.AAAA.BBBB:443 HTTP/1.1\r\n\r\n')
assert server_protocol.transport.value() == 'HTTP/1.1 200 Connection established\r\n\r\n'
assert mstarttls.called
def test_proxy_server_forward_basic(mocker, server_protocol):
mforward = mocker.patch('pappyproxy.proxy.ProxyServer._generate_and_submit_client')
mreset = mocker.patch('pappyproxy.proxy.ProxyServer._reset')
req_contents = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'\r\n'
'ABCD')
server_protocol.dataReceived(req_contents)
assert mforward.called
assert mreset.called
assert server_protocol._request_obj.full_message == req_contents
def test_proxy_server_connect_uri(mocker, server_protocol):
mforward = mocker.patch('pappyproxy.proxy.ProxyServer._generate_and_submit_client')
server_protocol.dataReceived('CONNECT https://www.AAAA.BBBB:443 HTTP/1.1\r\n\r\n')
server_protocol.dataReceived('GET /fooo HTTP/1.1\r\nTest-Header: foo\r\n\r\n')
assert server_protocol._connect_uri == 'https://www.AAAA.BBBB'
assert server_protocol._request_obj.url == 'https://www.AAAA.BBBB'
assert server_protocol._request_obj.port == 443
## ProxyServer._generate_and_submit_client
def test_proxy_server_create_client_factory(mocker, server_protocol):
mfactory = mock.MagicMock()
mfactory_class = mocker.patch('pappyproxy.proxy.ProxyClientFactory')
mfactory_class.return_value = mfactory
mocker.patch('pappyproxy.proxy.ProxyServer._make_remote_connection')
mfactory.prepare_request.return_value = mock_deferred(None)
full_req = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'\r\n'
'ABCD')
server_protocol.connection_id = 100
server_protocol.dataReceived(full_req)
# Make sure we created a ClientFactory with the right arguments
f_args, f_kwargs = mfactory_class.call_args
assert len(f_args) == 1
# Make sure the request got to the client class
req = f_args[0]
assert req.full_message == full_req
# Make sure the correct settings got to the proxy
assert f_kwargs['stream_response'] == True
assert f_kwargs['save_all'] == True
# Make sure we initialized the client factory
assert mfactory.prepare_request.called
assert mfactory.connection_id == 100
assert server_protocol._make_remote_connection.called # should be immediately called because mock deferred
####################
## Proxy Server Tests
def test_proxy_server_no_streaming_with_int_macros(mocker):
mfactory = mock.MagicMock()
mfactory_class = mocker.patch('pappyproxy.proxy.ProxyClientFactory')
mfactory_class.return_value = mfactory
def test_proxy_server_connect(unconnected_proxyserver, mocker, in_scope_true):
mocker.patch("twisted.internet.reactor.connectSSL")
unconnected_proxyserver.lineReceived('CONNECT https://www.dddddd.fff:433 HTTP/1.1')
unconnected_proxyserver.lineReceived('')
assert unconnected_proxyserver.transport.getOutBuffer() == 'HTTP/1.1 200 Connection established\r\n\r\n'
assert unconnected_proxyserver._request_obj.is_ssl
mocker.patch('pappyproxy.proxy.ProxyServer._make_remote_connection')
mfactory.prepare_request.return_value = mock_deferred(None)
full_req = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'\r\n'
'ABCD')
int_macros = [{'mockmacro': mock_int_macro(modified_req='GET / HTTP/1.1\r\n\r\n')}]
server_protocol = gen_server_protocol(int_macros=int_macros)
server_protocol.dataReceived(full_req)
f_args, f_kwargs = mfactory_class.call_args
assert f_kwargs['stream_response'] == False
def test_proxy_server_basic(proxyserver, mocker, in_scope_true):
mocker.patch("twisted.internet.reactor.connectSSL")
mocker.patch('pappyproxy.proxy.ProxyServer.setRawMode')
proxyserver.lineReceived('GET / HTTP/1.1')
proxyserver.lineReceived('')
## ProxyServer._make_remote_connection
assert proxyserver.setRawMode.called
args, kwargs = twisted.internet.reactor.connectSSL.call_args
assert args[0] == 'www.AAAA.BBBB'
assert args[1] == 443
@pytest.inlineCallbacks
def test_proxy_server_make_tcp_connection(mocker, server_protocol):
mtcpe_class = mocker.patch("twisted.internet.endpoints.TCP4ClientEndpoint")
mtcpe_class.return_value = mtcpe = mock.MagicMock()
mtcpe.connect.return_value = mock_deferred()
server_protocol._client_factory = mock.MagicMock() # We already tested that this gets set up correctly
req = http.Request("GET / HTTP/1.1\r\n\r\n")
req.host = 'Foo.Bar.Brazzers'
req.port = 80085
server_protocol._request_obj = req
yield server_protocol._make_remote_connection(req)
targs, tkwargs = mtcpe_class.call_args
assert targs[1] == 'Foo.Bar.Brazzers'
assert targs[2] == 80085
assert tkwargs == {}
mtcpe.connect.assert_called_once_with(server_protocol._client_factory)
@pytest.inlineCallbacks
def test_proxy_server_make_ssl_connection(mocker, server_protocol):
mssle_class = mocker.patch("twisted.internet.endpoints.SSL4ClientEndpoint")
mssle_class.return_value = mssle = mock.MagicMock()
mssle.connect.return_value = mock_deferred()
server_protocol._client_factory = mock.MagicMock() # We already tested that this gets set up correctly
req = http.Request("GET / HTTP/1.1\r\n\r\n", is_ssl=True)
req.host = 'Foo.Bar.Brazzers'
req.port = 80085
server_protocol._request_obj = req
yield server_protocol._make_remote_connection(req)
targs, tkwargs = mssle_class.call_args
assert targs[1] == 'Foo.Bar.Brazzers'
assert targs[2] == 80085
assert tkwargs == {}
mssle.connect.assert_called_once_with(server_protocol._client_factory)
@pytest.inlineCallbacks
def test_proxy_server_make_tcp_connection_socks(mocker):
socks_config(mocker, {'host': '12345', 'port': 5555})
tls_wrap_class = mocker.patch("txsocksx.tls.TLSWrapClientEndpoint")
@pytest.inlineCallbacks
def test_proxy_client_nomangle(mocker, proxy_connection, in_scope_true):
# Make the connection
(prot, sent, retreq_deferred) = \
yield proxy_connection('GET / HTTP/1.1\r\n\r\n', None, None)
assert sent.full_request == 'GET / HTTP/1.1\r\n\r\n'
prot.lineReceived('HTTP/1.1 200 OK')
prot.lineReceived('Content-Length: 0')
prot.lineReceived('')
ret_req = yield retreq_deferred
response = ret_req.response.full_response
assert response == 'HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n'
mtcpe_class = mocker.patch("twisted.internet.endpoints.TCP4ClientEndpoint")
mtcpe_class.return_value = mtcpe = mock.MagicMock()
socks_class = mocker.patch("txsocksx.client.SOCKS5ClientEndpoint")
socks_class.return_value = sockse = mock.MagicMock()
server_protocol = gen_server_protocol()
server_protocol._client_factory = mock.MagicMock() # We already tested that this gets set up correctly
req = http.Request("GET / HTTP/1.1\r\n\r\n")
req.host = 'Foo.Bar.Brazzers'
req.port = 80085
server_protocol._request_obj = req
yield server_protocol._make_remote_connection(req)
sargs, skwargs = socks_class.call_args
targs, tkwargs = mtcpe_class.call_args
assert targs[1] == '12345'
assert targs[2] == 5555
assert sargs[0] == 'Foo.Bar.Brazzers'
assert sargs[1] == 80085
assert sargs[2] == mtcpe
assert skwargs == {'methods': {'anonymous': ()}}
assert not tls_wrap_class.called
sockse.connect.assert_called_once_with(server_protocol._client_factory)
@pytest.inlineCallbacks
def test_proxy_client_mangle_req(mocker, proxy_connection, in_scope_true):
# Make the connection
(prot, sent, retreq_deferred) = \
yield proxy_connection('GET / HTTP/1.1\r\n\r\n', MANGLED_REQ, None)
assert sent.full_request == 'GET /mangled HTTP/1.1\r\n\r\n'
def test_proxy_server_make_ssl_connection_socks(mocker):
socks_config(mocker, {'host': '12345', 'port': 5555})
tls_wrap_class = mocker.patch("txsocksx.tls.TLSWrapClientEndpoint")
tls_wrape = tls_wrap_class.return_value = mock.MagicMock()
mtcpe_class = mocker.patch("twisted.internet.endpoints.TCP4ClientEndpoint")
mtcpe_class.return_value = mtcpe = mock.MagicMock()
socks_class = mocker.patch("txsocksx.client.SOCKS5ClientEndpoint")
socks_class.return_value = sockse = mock.MagicMock()
server_protocol = gen_server_protocol()
server_protocol._client_factory = mock.MagicMock() # We already tested that this gets set up correctly
req = http.Request("GET / HTTP/1.1\r\n\r\n")
req.host = 'Foo.Bar.Brazzers'
req.port = 80085
req.is_ssl = True
server_protocol._request_obj = req
yield server_protocol._make_remote_connection(req)
sargs, skwargs = socks_class.call_args
targs, tkwargs = mtcpe_class.call_args
assert targs[1] == '12345'
assert targs[2] == 5555
assert sargs[0] == 'Foo.Bar.Brazzers'
assert sargs[1] == 80085
assert sargs[2] == mtcpe
assert skwargs == {'methods': {'anonymous': ()}}
assert not sockse.called
tls_wrape.connect.assert_called_once_with(server_protocol._client_factory)
@pytest.inlineCallbacks
def test_proxy_client_mangle_rsp(mocker, proxy_connection, in_scope_true):
# Make the connection
(prot, sent, retreq_deferred) = \
yield proxy_connection('GET / HTTP/1.1\r\n\r\n', None, MANGLED_RSP)
prot.lineReceived('HTTP/1.1 200 OK')
prot.lineReceived('Content-Length: 0')
prot.lineReceived('')
req = yield retreq_deferred
response = req.response.full_response
assert response == 'HTTP/1.1 500 MANGLED\r\nContent-Length: 0\r\n\r\n'
def test_proxy_server_make_ssl_connection_socks_username_only(mocker):
socks_config(mocker, {'host': '12345', 'port': 5555, 'username': 'foo'})
tls_wrap_class = mocker.patch("txsocksx.tls.TLSWrapClientEndpoint")
tls_wrape = tls_wrap_class.return_value = mock.MagicMock()
mtcpe_class = mocker.patch("twisted.internet.endpoints.TCP4ClientEndpoint")
mtcpe_class.return_value = mtcpe = mock.MagicMock()
socks_class = mocker.patch("txsocksx.client.SOCKS5ClientEndpoint")
socks_class.return_value = sockse = mock.MagicMock()
server_protocol = gen_server_protocol()
server_protocol._client_factory = mock.MagicMock() # We already tested that this gets set up correctly
req = http.Request("GET / HTTP/1.1\r\n\r\n")
req.host = 'Foo.Bar.Brazzers'
req.port = 80085
req.is_ssl = True
server_protocol._request_obj = req
yield server_protocol._make_remote_connection(req)
sargs, skwargs = socks_class.call_args
targs, tkwargs = mtcpe_class.call_args
assert targs[1] == '12345'
assert targs[2] == 5555
assert sargs[0] == 'Foo.Bar.Brazzers'
assert sargs[1] == 80085
assert sargs[2] == mtcpe
assert skwargs == {'methods': {'anonymous': ()}}
assert not sockse.called
tls_wrape.connect.assert_called_once_with(server_protocol._client_factory)
@pytest.inlineCallbacks
def test_proxy_drop_req(mocker, proxy_connection, in_scope_true):
(prot, sent, retreq_deferred) = \
yield proxy_connection('GET / HTTP/1.1\r\n\r\n', None, None, True, False)
assert sent is None
def test_proxy_server_make_ssl_connection_socks_username_password(mocker):
socks_config(mocker, {'host': '12345', 'port': 5555, 'username': 'foo', 'password': 'password'})
tls_wrap_class = mocker.patch("txsocksx.tls.TLSWrapClientEndpoint")
tls_wrape = tls_wrap_class.return_value = mock.MagicMock()
mtcpe_class = mocker.patch("twisted.internet.endpoints.TCP4ClientEndpoint")
mtcpe_class.return_value = mtcpe = mock.MagicMock()
socks_class = mocker.patch("txsocksx.client.SOCKS5ClientEndpoint")
socks_class.return_value = sockse = mock.MagicMock()
server_protocol = gen_server_protocol()
server_protocol._client_factory = mock.MagicMock() # We already tested that this gets set up correctly
req = http.Request("GET / HTTP/1.1\r\n\r\n")
req.host = 'Foo.Bar.Brazzers'
req.port = 80085
req.is_ssl = True
server_protocol._request_obj = req
yield server_protocol._make_remote_connection(req)
sargs, skwargs = socks_class.call_args
targs, tkwargs = mtcpe_class.call_args
assert targs[1] == '12345'
assert targs[2] == 5555
assert sargs[0] == 'Foo.Bar.Brazzers'
assert sargs[1] == 80085
assert sargs[2] == mtcpe
assert skwargs == {'methods': {'login': ('foo','password'), 'anonymous': ()}}
assert not sockse.called
tls_wrape.connect.assert_called_once_with(server_protocol._client_factory)
########################
### Proxy Client Factory
@pytest.inlineCallbacks
def test_proxy_drop_rsp(mocker, proxy_connection, in_scope_true):
(prot, sent, retreq_deferred) = \
yield proxy_connection('GET / HTTP/1.1\r\n\r\n', None, None, False, True)
prot.lineReceived('HTTP/1.1 200 OK')
prot.lineReceived('Content-Length: 0')
prot.lineReceived('')
retreq = yield retreq_deferred
assert retreq.response is None
def test_proxy_client_factory_prepare_reqs_simple(mocker, freeze):
import datetime
freeze.freeze(datetime.datetime(2015, 1, 1, 3, 30, 15, 50))
req = http.Request('GET / HTTP/1.1\r\n\r\n')
rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
rsave.return_value = mock_deferred()
mocker.patch('pappyproxy.context.in_scope').return_value = True
mocker.patch('pappyproxy.macros.mangle_request').return_value = mock_deferred((req, False))
cf = ProxyClientFactory(req,
save_all=False,
stream_response=False,
return_transport=None)
yield cf.prepare_request()
assert req.time_start == datetime.datetime(2015, 1, 1, 3, 30, 15, 50)
assert req.reqid is None
assert not rsave.called
assert len(rsave.mock_calls) == 0
@pytest.inlineCallbacks
def test_proxy_client_360_noscope(mocker, proxy_connection, in_scope_false):
# Make the connection
(prot, sent, retreq_deferred) = yield proxy_connection('GET / HTTP/1.1\r\n\r\n')
assert sent.full_request == 'GET / HTTP/1.1\r\n\r\n'
prot.lineReceived('HTTP/1.1 200 OK')
prot.lineReceived('Content-Length: 0')
prot.lineReceived('')
req = yield retreq_deferred
assert req.response.full_response == 'HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n'
def test_proxy_client_factory_prepare_reqs_360_noscope(mocker, freeze):
import datetime
freeze.freeze(datetime.datetime(2015, 1, 1, 3, 30, 15, 50))
req = http.Request('GET / HTTP/1.1\r\n\r\n')
rsave = mocker.patch('pappyproxy.http.Request.async_deep_save')
rsave.return_value = mock_deferred()
mocker.patch('pappyproxy.context.in_scope').return_value = False
mocker.patch('pappyproxy.macros.mangle_request', new=func_deleted)
cf = ProxyClientFactory(req,
save_all=True,
stream_response=False,
return_transport=None)
yield cf.prepare_request()
assert req.time_start == None
assert req.reqid is None
assert not rsave.called
assert len(rsave.mock_calls) == 0
@pytest.inlineCallbacks
def test_proxy_client_factory_prepare_reqs_save(mocker, freeze):
freeze.freeze(datetime.datetime(2015, 1, 1, 3, 30, 15, 50))
req = http.Request('GET / HTTP/1.1\r\n\r\n')
rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
mocker.patch('pappyproxy.context.in_scope').return_value = True
mocker.patch('pappyproxy.macros.mangle_request').return_value = mock_deferred((req, False))
cf = ProxyClientFactory(req,
save_all=True,
stream_response=False,
return_transport=None)
yield cf.prepare_request()
assert req.time_start == datetime.datetime(2015, 1, 1, 3, 30, 15, 50)
assert req.reqid is not None
assert rsave.called
assert len(rsave.mock_calls) == 1
@pytest.inlineCallbacks
def test_proxy_client_factory_prepare_reqs_360_noscope_save(mocker, freeze):
freeze.freeze(datetime.datetime(2015, 1, 1, 3, 30, 15, 50))
req = http.Request('GET / HTTP/1.1\r\n\r\n')
mangreq = http.Request('BOOO / HTTP/1.1\r\n\r\n')
rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
mocker.patch('pappyproxy.context.in_scope').return_value = False
mocker.patch('pappyproxy.macros.mangle_request', side_effect=func_deleted)
cf = ProxyClientFactory(req,
save_all=True,
stream_response=False,
return_transport=None)
yield cf.prepare_request()
assert req.time_start == None
assert req.reqid is None
assert not rsave.called
assert len(rsave.mock_calls) == 0
@pytest.inlineCallbacks
def test_proxy_client_factory_prepare_mangle_req(mocker, freeze):
freeze.freeze(datetime.datetime(2015, 1, 1, 3, 30, 15, 50))
req = http.Request('GET / HTTP/1.1\r\n\r\n')
mangreq = http.Request('BOOO / HTTP/1.1\r\n\r\n')
def inc_day_mangle(x, y):
freeze.delta(days=1)
return mock_deferred((mangreq, True))
rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
mocker.patch('pappyproxy.context.in_scope').return_value = True
mocker.patch('pappyproxy.macros.mangle_request', side_effect=inc_day_mangle)
cf = ProxyClientFactory(req,
save_all=True,
stream_response=False,
return_transport=None)
yield cf.prepare_request()
assert cf.request == mangreq
assert req.time_start == datetime.datetime(2015, 1, 1, 3, 30, 15, 50)
assert cf.request.time_start == datetime.datetime(2015, 1, 2, 3, 30, 15, 50)
assert cf.request.reqid is not None
assert len(rsave.mock_calls) == 2
@pytest.inlineCallbacks
def test_proxy_client_factory_prepare_mangle_req_drop(mocker, freeze):
freeze.freeze(datetime.datetime(2015, 1, 1, 3, 30, 15, 50))
def inc_day_mangle(x, y):
freeze.delta(days=1)
return mock_deferred((None, True))
req = http.Request('GET / HTTP/1.1\r\n\r\n')
rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
mocker.patch('pappyproxy.context.in_scope').return_value = True
mocker.patch('pappyproxy.macros.mangle_request', side_effect=inc_day_mangle)
cf = ProxyClientFactory(req,
save_all=True,
stream_response=False,
return_transport=None)
yield cf.prepare_request()
assert cf.request is None
assert req.time_start == datetime.datetime(2015, 1, 1, 3, 30, 15, 50)
assert len(rsave.mock_calls) == 1
@pytest.inlineCallbacks
def test_proxy_client_factory_prepare_mangle_req(mocker, freeze):
freeze.freeze(datetime.datetime(2015, 1, 1, 3, 30, 15, 50))
req = http.Request('GET / HTTP/1.1\r\n\r\n')
mangreq = http.Request('BOOO / HTTP/1.1\r\n\r\n')
def inc_day_mangle(x, y):
freeze.delta(days=1)
return mock_deferred((mangreq, True))
rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
mocker.patch('pappyproxy.context.in_scope').return_value = True
mocker.patch('pappyproxy.macros.mangle_request', side_effect=inc_day_mangle)
cf = ProxyClientFactory(req,
save_all=True,
stream_response=False,
return_transport=None)
yield cf.prepare_request()
assert cf.request == mangreq
assert req.time_start == datetime.datetime(2015, 1, 1, 3, 30, 15, 50)
assert cf.request.time_start == datetime.datetime(2015, 1, 2, 3, 30, 15, 50)
assert cf.request.reqid is not None
assert len(rsave.mock_calls) == 2
### return_request_pair
# @pytest.inlineCallbacks
# def test_proxy_client_factory_prepare_mangle_rsp(mocker, freeze):
# freeze.freeze(datetime.datetime(2015, 1, 1, 3, 30, 15, 50))
# rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
# mocker.patch('pappyproxy.context.in_scope').return_value = True
# req = http.Request('GET / HTTP/1.1\r\n\r\n')
# req.reqid = 1
# rsp = http.Response('HTTP/1.1 200 OK\r\n\r\n')
# req.response = rsp
# mocker.patch('pappyproxy.macros.mangle_response').return_value = (req, False)
# cf = ProxyClientFactory(req,
# save_all=False,
# stream_response=False,
# return_transport=None)
# result = yield cf.return_request_pair(req)
# assert result == req
# assert req.time_start == datetime.datetime(2015, 1, 1, 3, 30, 15, 50)
# assert len(rsave.mock_calls) == 0
### ProxyClient tests
@pytest.inlineCallbacks
def test_proxy_client_simple(mocker):
rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
req = http.Request('GET / HTTP/1.1\r\n\r\n')
client = gen_client_protocol(req, stream_response=False)
assert client.transport.value() == 'GET / HTTP/1.1\r\n\r\n'
client.transport.clear()
rsp = 'HTTP/1.1 200 OKILE DOKELY\r\n\r\n'
client.dataReceived(rsp)
retpair = yield client.data_defer
assert retpair.response.full_message == rsp
@pytest.inlineCallbacks
def test_proxy_client_stream(mocker):
rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
req = http.Request('GET / HTTP/1.1\r\n\r\n')
client = gen_client_protocol(req, stream_response=True)
client.transport.clear()
client.dataReceived('HTTP/1.1 404 GET FUCKE')
assert client.factory.return_transport.value() == 'HTTP/1.1 404 GET FUCKE'
client.factory.return_transport.clear()
client.dataReceived('D ASSHOLE\r\nContent-Length: 4\r\n\r\nABCD')
assert client.factory.return_transport.value() == 'D ASSHOLE\r\nContent-Length: 4\r\n\r\nABCD'
retpair = yield client.data_defer
assert retpair.response.full_message == 'HTTP/1.1 404 GET FUCKED ASSHOLE\r\nContent-Length: 4\r\n\r\nABCD'
@pytest.inlineCallbacks
def test_proxy_client_nostream(mocker):
rsave = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
req = http.Request('GET / HTTP/1.1\r\n\r\n')
client = gen_client_protocol(req, stream_response=False)
client.transport.clear()
client.dataReceived('HTTP/1.1 404 GET FUCKE')
assert client.factory.return_transport.value() == ''
client.factory.return_transport.clear()
client.dataReceived('D ASSHOLE\r\nContent-Length: 4\r\n\r\nABCD')
assert client.factory.return_transport.value() == ''
retpair = yield client.data_defer
assert retpair.response.full_message == 'HTTP/1.1 404 GET FUCKED ASSHOLE\r\nContent-Length: 4\r\n\r\nABCD'

View file

@ -3,12 +3,19 @@ import mock
import pytest
import StringIO
from twisted.internet import defer
from twisted.test.proto_helpers import StringTransport
from pappyproxy import http
next_mock_id = 0
class ClassDeleted():
pass
class TLSStringTransport(StringTransport):
def startTLS(self, context, factory):
pass
def func_deleted(*args, **kwargs):
raise NotImplementedError()
@ -18,7 +25,7 @@ def func_ignored(*args, **kwargs):
def func_ignored_deferred(*args, **kwargs):
return mock_deferred(None)
def mock_deferred(value):
def mock_deferred(value=None):
# Generates a function that can be used to make a deferred that can be used
# to mock out deferred-returning responses
def g(data):
@ -33,6 +40,10 @@ def no_tcp(mocker):
# Don't make tcp connections
mocker.patch("twisted.internet.reactor.connectTCP", new=func_deleted)
mocker.patch("twisted.internet.reactor.connectSSL", new=func_deleted)
mocker.patch("twisted.internet.endpoints.SSL4ClientEndpoint", new=func_deleted)
mocker.patch("twisted.internet.endpoints.TCP4ClientEndpoint", new=func_deleted)
mocker.patch("txsocksx.client.SOCKS5ClientEndpoint", new=func_deleted)
mocker.patch("txsocksx.tls.TLSWrapClientEndpoint", new=func_deleted)
@pytest.fixture
def ignore_tcp(mocker):
@ -73,3 +84,71 @@ def mock_deep_save(mocker, fake_saving):
def print_fuck(*args, **kwargs):
print 'fuck'
@pytest.fixture
def freeze(monkeypatch):
""" Now() manager patches datetime return a fixed, settable, value
(freezes time)
stolen from http://stackoverflow.com/a/28073449
"""
import datetime
original = datetime.datetime
class FreezeMeta(type):
def __instancecheck__(self, instance):
if type(instance) == original or type(instance) == Freeze:
return True
class Freeze(datetime.datetime):
__metaclass__ = FreezeMeta
@classmethod
def freeze(cls, val, utcval=None):
cls.utcfrozen = utcval
cls.frozen = val
@classmethod
def now(cls):
return cls.frozen
@classmethod
def utcnow(cls):
# added since requests use utcnow
return cls.utcfrozen or cls.frozen
@classmethod
def delta(cls, timedelta=None, **kwargs):
""" Moves time fwd/bwd by the delta"""
from datetime import timedelta as td
if not timedelta:
timedelta = td(**kwargs)
cls.frozen += timedelta
monkeypatch.setattr(datetime, 'datetime', Freeze)
Freeze.freeze(original.now())
return Freeze
def mock_int_macro(modified_req=None, modified_rsp=None,
drop_req=False, drop_rsp=False):
macro = mock.MagicMock()
if modified_req or drop_req:
macro.async_req = True
macro.intercept_requests = True
if drop_req:
newreq = None
else:
newreq = http.Request(modified_req)
macro.async_mangle_request.return_value = mock_deferred(newreq)
else:
macro.intercept_requests = False
if modified_rsp or drop_rsp:
macro.async_rsp = True
macro.intercept_responses = True
if drop_rsp:
newrsp = None
else:
newrsp = http.Response(modified_rsp)
macro.async_mangle_response.return_value = mock_deferred(newrsp)
else:
macro.intercept_responses = False
return macro

View file

@ -3,6 +3,8 @@ import string
import time
import datetime
from .colors import Colors, Styles
class PappyException(Exception):
"""
The exception class for Pappy. If a plugin command raises one of these, the
@ -19,10 +21,17 @@ def printable_data(data):
:rtype: String
"""
chars = []
colored = False
for c in data:
if c in string.printable:
if colored:
chars.append(Colors.ENDC)
colored = False
chars.append(c)
else:
if not colored:
chars.append(Styles.UNPRINTABLE_DATA)
colored = True
chars.append('.')
return ''.join(chars)
@ -43,6 +52,6 @@ def hexdump(src, length=16):
for c in xrange(0, len(src), length):
chars = src[c:c+length]
hex = ' '.join(["%02x" % ord(x) for x in chars])
printable = ''.join(["%s" % ((ord(x) <= 127 and FILTER[ord(x)]) or '.') for x in chars])
printable = ''.join(["%s" % ((ord(x) <= 127 and FILTER[ord(x)]) or Styles.UNPRINTABLE_DATA+'.'+Colors.ENDC) for x in chars])
lines.append("%04x %-*s %s\n" % (c, length*3, hex, printable))
return ''.join(lines)