Version 0.2.0

This commit is contained in:
Rob Glew 2016-01-19 18:00:34 -06:00
parent 312b985229
commit 26376eaaec
43 changed files with 4699 additions and 2668 deletions

View file

@ -1,10 +1,10 @@
import base64
import json
import pappyproxy
from twisted.protocols.basic import LineReceiver
from twisted.internet import defer
from util import PappyException
from .http import Request, Response
"""
comm.py
@ -31,6 +31,8 @@ class CommServer(LineReceiver):
}
def lineReceived(self, line):
from .http import Request, Response
if line == '':
return
try:
@ -71,7 +73,7 @@ class CommServer(LineReceiver):
def action_get_request(self, data):
try:
reqid = data['reqid']
req = yield pappyproxy.http.Request.load_request(reqid)
req = yield Request.load_request(reqid)
except KeyError:
raise PappyException("Request with given ID does not exist")
@ -82,12 +84,12 @@ class CommServer(LineReceiver):
def action_get_response(self, data):
try:
reqid = data['reqid']
req = yield pappyproxy.http.Request.load_request(reqid)
req = yield Request.load_request(reqid)
except KeyError:
raise PappyException("Request with given ID does not exist, cannot fetch associated response.")
if req.response:
rsp = yield pappyproxy.http.Response.load_response(req.response.rspid)
rsp = yield Response.load_response(req.response.rspid)
dat = json.loads(rsp.to_json())
else:
dat = {}
@ -95,13 +97,8 @@ class CommServer(LineReceiver):
@defer.inlineCallbacks
def action_submit_request(self, data):
try:
req = pappyproxy.http.Request(base64.b64decode(data['full_request']))
req.port = data['port']
req.is_ssl = data['is_ssl']
except:
raise PappyException("Error parsing request")
yield req.async_submit()
message = base64.b64decode(data['full_message'])
req = yield Request.submit_new(data['host'], data['port'], data['is_ssl'], message)
yield req.async_deep_save()
retdata = {}

View file

@ -1,20 +1,89 @@
import imp
"""
The configuration settings for the proxy.
.. data:: CERT_DIR
The location of the CA certs that Pappy will use. This can be configured in the
``config.json`` file for a project.
:Default: ``{DATADIR}/certs``
.. data:: PAPPY_DIR
The file where pappy's scripts are located. Don't write anything here, and you
probably don't need to write anything here. Use DATA_DIR instead.
:Default: Wherever the scripts are installed
.. data:: DATA_DIR
The data directory. This is where files that have to be read by Pappy every time
it's run are put. For example, plugins are stored in ``{DATADIR}/plugins`` and
certs are by default stored in ``{DATADIR}/certs``. This defaults to ``~/.pappy``
and isn't configurable right now.
:Default: ``~/.pappy``
.. data:: DATAFILE
The location of the CA certs that Pappy will use. This can be configured in the
``config.json`` file for a project.
:Default: ``data.db``
.. data:: DEBUG_DIR
The directory to write debug output to. Don't put this outside the project folder
since it writes all the request data to this directory. You probably won't need
to use this. Configured in the ``config.json`` file for the project.
:Default: None
.. data: LISTENERS
The list of active listeners. It is a list of tuples of the format (port, interface)
Not modifiable after startup. Configured in the ``config.json`` file for the project.
:Default: ``[(8000, '127.0.0.1')]``
.. data: PLUGIN_DIRS
List of directories that plugins are loaded from. Not modifiable.
:Default: ``['{DATA_DIR}/plugins', '{PAPPY_DIR}/plugins']``
.. data: CONFIG_DICT
The dictionary read from config.json. When writing plugins, use this to load
configuration options for your plugin.
"""
import json
import os
import shutil
PAPPY_DIR = os.path.dirname(os.path.realpath(__file__))
DATA_DIR = os.path.join(os.path.expanduser('~'), '.pappy')
DATA_DIR
CERT_DIR = os.path.join(DATA_DIR, 'certs')
DATAFILE = 'data.db'
DEBUG_DIR = None
DEBUG_TO_FILE = False
DEBUG_VERBOSITY = 0
LISTENERS = [(8000, '127.0.0.1')]
SSL_CA_FILE = 'certificate.crt'
SSL_PKEY_FILE = 'private.key'
PLUGIN_DIRS = [os.path.join(DATA_DIR, 'plugins'), os.path.join(PAPPY_DIR, 'plugins')]
CONFIG_DICT = {}
def get_default_config():
default_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'default_user_config.json')
@ -61,6 +130,7 @@ def load_settings(proj_config):
def load_from_file(fname):
global CONFIG_DICT
# Make sure we have a config file
if not os.path.isfile(fname):
print "Copying default config to %s" % fname
@ -70,5 +140,5 @@ def load_from_file(fname):
# Load local project config
with open(fname, 'r') as f:
proj_config = json.load(f)
load_settings(proj_config)
CONFIG_DICT = json.load(f)
load_settings(CONFIG_DICT)

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,5 @@
import os
import itertools
from .config import PAPPY_DIR
def from_file(fname, intro=False):

View file

@ -3,11 +3,12 @@ import imp
import os
import random
import re
import stat
from pappyproxy import http
from pappyproxy import config
from twisted.internet import defer
from jinja2 import Environment, FileSystemLoader
from pappyproxy import config
from pappyproxy.util import PappyException
from twisted.internet import defer
class Macro(object):
"""
@ -39,6 +40,9 @@ class Macro(object):
if self.filename:
match = re.findall('.*macro_(.*).py$', self.filename)
self.file_name = match[0]
st = os.stat(self.filename)
if (st.st_mode & stat.S_IWOTH):
raise PappyException("Refusing to load world-writable macro: %s" % self.filename)
module_name = os.path.basename(os.path.splitext(self.filename)[0])
self.source = imp.load_source('%s'%module_name, self.filename)
if not hasattr(self.source, 'MACRO_NAME'):
@ -57,17 +61,49 @@ class Macro(object):
# Execute the macro
if self.source:
self.source.run_macro(args)
class InterceptMacro(object):
"""
A class representing a macro that modifies requests as they pass through the
proxy
"""
def __init__(self, filename=''):
def __init__(self):
self.name = ''
self.short_name = None
self.intercept_requests = True
self.intercept_responses = True
self.intercept_requests = False
self.intercept_responses = False
self.do_req = False
self.do_rsp = False
self.do_async_req = False
self.do_async_rsp = False
def __repr__(self):
return "<InterceptingMacro (%s)>" % self.name
def init(self, args):
pass
def mangle_request(self, request):
return request
def mangle_response(self, request):
return request.response
@defer.inlineCallbacks
def async_mangle_request(self, request):
defer.returnValue(request)
@defer.inlineCallbacks
def async_mangle_response(self, request):
defer.returnValue(request.response)
class FileInterceptMacro(InterceptMacro):
"""
An intercepting macro that loads a macro from a file.
"""
def __init__(self, filename=''):
InterceptMacro.__init__(self)
self.file_name = '' # name from the file
self.filename = filename or '' # filename we load from
self.source = None
@ -85,36 +121,6 @@ class InterceptMacro(object):
s += ' (%s)' % ('/'.join(names))
return "<InterceptingMacro %s>" % s
@property
def do_req(self):
if (self.source and hasattr(self.source, 'async_mangle_request') or \
self.source and hasattr(self.source, 'mangle_request')) and \
self.intercept_requests:
return True
return False
@property
def do_rsp(self):
if (self.source and hasattr(self.source, 'async_mangle_response') or \
self.source and hasattr(self.source, 'mangle_response')) and \
self.intercept_responses:
return True
return False
@property
def async_req(self):
if self.source and hasattr(self.source, 'async_mangle_request'):
return True
else:
return False
@property
def async_rsp(self):
if self.source and hasattr(self.source, 'async_mangle_response'):
return True
else:
return False
def load(self):
if self.filename:
match = re.findall('.*int_(.*).py$', self.filename)
@ -122,6 +128,9 @@ class InterceptMacro(object):
self.file_name = match[0]
else:
self.file_name = self.filename
st = os.stat(self.filename)
if (st.st_mode & stat.S_IWOTH):
raise PappyException("Refusing to load world-writable macro: %s" % self.filename)
module_name = os.path.basename(os.path.splitext(self.filename)[0])
self.source = imp.load_source('%s'%module_name, self.filename)
self.name = self.source.MACRO_NAME
@ -141,9 +150,28 @@ class InterceptMacro(object):
else:
self.source = None
def init(self, line):
# Update what we can do
if self.source and hasattr(self.source, 'mangle_request'):
self.intercept_requests = True
self.async_req = False
elif self.source and hasattr(self.source, 'async_mangle_request'):
self.intercept_requests = True
self.async_req = True
else:
self.intercept_requests = True
if self.source and hasattr(self.source, 'mangle_response'):
self.intercept_responses = True
self.async_rsp = False
elif self.source and hasattr(self.source, 'async_mangle_response'):
self.intercept_responses = True
self.async_rsp = True
else:
self.intercept_responses = False
def init(self, args):
if hasattr(self.source, 'init'):
self.source.init(line)
self.source.init(args)
def mangle_request(self, request):
if hasattr(self.source, 'mangle_request'):
@ -178,12 +206,18 @@ def load_macros(loc):
macro_files = glob.glob(loc + "/macro_*.py")
macro_objs = []
for f in macro_files:
macro_objs.append(Macro(f))
try:
macro_objs.append(Macro(f))
except PappyException as e:
print str(e)
int_macro_files = glob.glob(loc + "/int_*.py")
int_macro_objs = []
for f in int_macro_files:
int_macro_objs.append(InterceptMacro(f))
try:
int_macro_objs.append(FileInterceptMacro(f))
except PappyException as e:
print str(e)
return (macro_objs, int_macro_objs)
def req_obj_def(req):
@ -198,6 +232,8 @@ def req_obj_def(req):
else:
if req.port != 80:
params.append('port=%d'%req.port)
if 'host' in req.headers and req.host != req.headers['host']:
params.append('host=%d'%req.host)
if params:
req_params = ', '+', '.join(params)
else:
@ -223,7 +259,6 @@ def macro_from_requests(reqs, short_name='', long_name=''):
subs['short_name'] = short_name
n = 0
req_lines = []
req_params = []
for req in reqs:

View file

@ -1,31 +1,54 @@
#!/usr/bin/env python2
import argparse
import cmd2
import crochet
import datetime
import imp
import os
import schema.update
import shutil
import sys
import sqlite3
import tempfile
from pappyproxy import console
from pappyproxy import config
from pappyproxy import comm
from pappyproxy import http
from pappyproxy import context
from pappyproxy import proxy
from . import comm
from . import config
from . import context
from . import http
from . import plugin
from . import proxy
from .console import ProxyCmd
from twisted.enterprise import adbapi
from twisted.internet import reactor, defer
from twisted.internet.threads import deferToThread
from twisted.internet.protocol import ServerFactory
from twisted.internet.error import CannotListenError
from twisted.internet.protocol import ServerFactory
from twisted.internet.threads import deferToThread
crochet.no_setup()
server_factory = None
main_context = context.Context()
all_contexts = [main_context]
plugin_loader = None
cons = None
@defer.inlineCallbacks
def wait_for_saves(ignored):
reset = True
printed = False
lastprint = 0
while reset:
reset = False
togo = 0
for c in all_contexts:
for r in c.all_reqs:
if r.reqid == '--':
reset = True
togo += 1
d = defer.Deferred()
d.callback(None)
yield d
if togo % 10 == 0 and lastprint != togo:
lastprint = togo
print '%d requests left to be saved (probably won\'t work)' % togo
def parse_args():
# parses sys.argv and returns a settings dictionary
@ -51,6 +74,9 @@ def delete_datafile():
@defer.inlineCallbacks
def main():
global server_factory
global plugin_loader
global cons
settings = parse_args()
load_start = datetime.datetime.now()
@ -77,7 +103,12 @@ def main():
check_same_thread=False,
cp_openfun=set_text_factory,
cp_max=1)
yield schema.update.update_schema(dbpool)
try:
yield schema.update.update_schema(dbpool, config.DATAFILE)
except Exception as e:
print 'Error updating schema: %s' % e
print 'Exiting...'
reactor.stop()
http.init(dbpool)
yield context.init()
@ -85,17 +116,17 @@ def main():
if config.DEBUG_DIR and os.path.exists(config.DEBUG_DIR):
shutil.rmtree(config.DEBUG_DIR)
print 'Removing old debugging output'
serv_factory = proxy.ProxyServerFactory(save_all=True)
server_factory = proxy.ProxyServerFactory(save_all=True)
listen_strs = []
listening = False
ports = []
for listener in config.LISTENERS:
try:
reactor.listenTCP(listener[0], serv_factory, interface=listener[1])
listening = True
port = reactor.listenTCP(listener[0], server_factory, interface=listener[1])
listener_str = 'port %d' % listener[0]
if listener[1] not in ('127.0.0.1', 'localhost'):
listener_str += ' (bound to %s)' % listener[1]
listen_strs.append(listener_str)
ports.append(port)
except CannotListenError as e:
print repr(e)
if listen_strs:
@ -112,19 +143,31 @@ def main():
# Load the scope
yield context.load_scope(http.dbpool)
context.reset_to_scope()
context.reset_to_scope(main_context)
# Apologize for slow start times
load_end = datetime.datetime.now()
load_time = (load_end - load_start)
if load_time.total_seconds() > 20:
print 'Startup was slow (%s)! Sorry!' % load_time
print 'Database has {0} requests (~{1:.2f}ms per request)'.format(len(context.active_requests), ((load_time.total_seconds()/len(context.active_requests))*1000))
print 'Database has {0} requests (~{1:.2f}ms per request)'.format(len(main_context.active_requests), ((load_time.total_seconds()/len(main_context.active_requests))*1000))
sys.argv = [sys.argv[0]] # cmd2 tries to parse args
cons = console.ProxyCmd()
console.set_proxy_server_factory(serv_factory)
cons = ProxyCmd()
plugin_loader = plugin.PluginLoader(cons)
for d in config.PLUGIN_DIRS:
if not os.path.exists(d):
os.makedirs(d)
plugin_loader.load_directory(d)
@defer.inlineCallbacks
def close_listeners(ignored):
for port in ports:
yield port.stopListening()
d = deferToThread(cons.cmdloop)
d.addCallback(close_listeners)
d.addCallback(wait_for_saves)
d.addCallback(lambda ignored: reactor.stop())
if delete_data_on_quit:
d.addCallback(lambda ignored: delete_datafile())

150
pappyproxy/plugin.py Normal file
View file

@ -0,0 +1,150 @@
"""
This module contains all the api calls written for use in plugins. If you want
to do anything that is't allowed through these function calls or through the
functions provided for macros, contact me and I'll see what I can do to add some
more functionality into the next version.
"""
import glob
import imp
import os
import pappyproxy
import stat
from .proxy import add_intercepting_macro as proxy_add_intercepting_macro
from .proxy import remove_intercepting_macro as proxy_remove_intercepting_macro
from .util import PappyException
class Plugin(object):
def __init__(self, cmd, fname=None):
self.cmd = cmd
self.filename = ''
self.source = None
self.module_name = ''
if fname:
self.filename = fname
self.load_file(fname)
def load_file(self, fname):
module_name = os.path.basename(os.path.splitext(fname)[0])
if os.path.basename(fname) == '__init__.py':
return
st = os.stat(fname)
if (st.st_mode & stat.S_IWOTH):
raise PappyException("Refusing to load world-writable plugin: %s" % fname)
self.source = imp.load_source('%s'%module_name, fname)
if hasattr(self.source, 'load_cmds'):
self.source.load_cmds(self.cmd)
else:
print ('WARNING: %s does not define load_cmds. It will not be '
'possible to interact with the plugin through the console.' % fname)
self.module_name = module_name
class PluginLoader(object):
def __init__(self, cmd):
self.cmd = cmd
self.loaded_plugins = []
self.plugins_by_name = {}
def load_plugin(self, fname):
p = Plugin(self.cmd, fname)
self.loaded_plugins.append(p)
self.plugins_by_name[p.module_name] = p
def load_directory(self, directory):
fnames = glob.glob(os.path.join(directory, '*.py'))
for fname in fnames:
try:
self.load_plugin(fname)
except PappyException as e:
print str(e)
##########################
## Plugin helper functions
def plugin_by_name(name):
"""
Returns an interface to access the methods of a plugin from its name.
For example, to call the ``foo`` function from the ``bar`` plugin
you would call ``plugin_by_name('bar').foo()``.
"""
import pappyproxy.pappy
if name in pappyproxy.pappy.plugin_loader.plugins_by_name:
return pappyproxy.pappy.plugin_loader.plugins_by_name[name].source
else:
raise PappyException('No plugin with name %s is loaded' % name)
def add_intercepting_macro(name, macro):
"""
Adds an intercepting macro to the proxy. You can either use a
:class:`pappyproxy.macros.FileInterceptMacro` to load an intercepting macro
from the disk, or you can create your own using an :class:`pappyproxy.macros.InterceptMacro`
for a base class. You must give a unique name that will be used in
:func:`pappyproxy.plugin.remove_intercepting_macro` to deactivate it. Remember
that activating an intercepting macro will disable request streaming and will
affect performance. So please try and only use this if you may need to modify
messages before they are passed along.
"""
proxy_add_intercepting_macro(name, macro, pappyproxy.pappy.server_factory.intercepting_macros)
def remove_intercepting_macro(name):
"""
Stops an active intercepting macro. You must pass in the name that you used
when calling :func:`pappyproxy.plugin.add_intercepting_macro` to identify
which macro you would like to stop.
"""
proxy_remove_intercepting_macro(name, pappyproxy.pappy.server_factory.intercepting_macros)
def active_intercepting_macros():
"""
Returns a list of the active intercepting macro objects. Modifying this list
will not affect which macros are active.
"""
return pappyproxy.pappy.server_factory.intercepting_macros[:]
def in_memory_reqs():
"""
Returns a list containing all out of the requests which exist in memory only
(requests with an m## style id).
You can call either :func:`pappyproxy.http.Request.save` or
:func:`pappyproxy.http.Request.async_save` to save the request to the data file.
"""
return list(pappyproxy.context.Context.in_memory_requests)
def all_reqs():
"""
Returns a list containing all the requests in history (including requests
that only exist in memory). Modifying this list will not modify requests
included in the history. However, you can edit the requests
in this list then call either :func:`pappyproxy.http.Request.save` or
:func:`pappyproxy.http.Request.async_save` to modify the actual request.
"""
return list(pappyproxy.context.Context.all_reqs)
def main_context():
"""
Returns the context object representing the main context. Use this to interact
with the context. The returned object can be modified
at will. Avoid modifying any class values (ie all_reqs, in_memory_requests)
and use the class methods to add/remove requests. See the documentation on
:class:`pappyproxy.context.Context` for more information.
"""
return pappyproxy.pappy.main_context
def add_req(req):
"""
Adds a request to the history. Will not do anything to requests which are
already in history. If the request is not saved, it will be given an m## id.
"""
pappyproxy.pappy.main_context.add_request(req)
def run_cmd(cmd):
"""
Run a command as if you typed it into the console. Try and use existing APIs
to do what you want before using this.
"""
pappyproxy.pappy.cons.onecmd(cmd)

View file

View file

@ -0,0 +1,192 @@
import crochet
import pappyproxy
from pappyproxy.console import confirm
from pappyproxy.util import PappyException
from twisted.internet import defer
class BuiltinFilters(object):
_filters = {
'not_image': (
['path nctr "(\.png$|\.jpg$|\.gif$)"'],
'Filter out image requests',
),
'not_jscss': (
['path nctr "(\.js$|\.css$)"'],
'Filter out javascript and css files',
),
}
@staticmethod
@defer.inlineCallbacks
def get(name):
if name not in BuiltinFilters._filters:
raise PappyException('%s not a bult in filter' % name)
if name in BuiltinFilters._filters:
filters = [pappyproxy.context.Filter(f) for f in BuiltinFilters._filters[name][0]]
for f in filters:
yield f.generate()
defer.returnValue(filters)
raise PappyException('"%s" is not a built-in filter' % name)
@staticmethod
def list():
return [k for k, v in BuiltinFilters._filters.iteritems()]
@staticmethod
def help(name):
if name not in BuiltinFilters._filters:
raise PappyException('"%s" is not a built-in filter' % name)
return pappyproxy.context.Filter(BuiltinFilters._filters[name][1])
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def filtercmd(line):
"""
Apply a filter to the current context
Usage: filter <filter string>
See README.md for information on filter strings
"""
if not line:
raise PappyException("Filter string required")
filter_to_add = pappyproxy.context.Filter(line)
yield filter_to_add.generate()
pappyproxy.pappy.main_context.add_filter(filter_to_add)
def complete_builtin_filter(text, line, begidx, endidx):
all_names = BuiltinFilters.list()
if not text:
ret = all_names[:]
else:
ret = [n for n in all_names if n.startswith(text)]
return ret
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def builtin_filter(line):
if not line:
raise PappyException("Filter name required")
filters_to_add = yield BuiltinFilters.get(line)
for f in filters_to_add:
print f.filter_string
pappyproxy.pappy.main_context.add_filter(f)
defer.returnValue(None)
def filter_up(line):
"""
Remove the last applied filter
Usage: filter_up
"""
pappyproxy.pappy.main_context.filter_up()
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def filter_clear(line):
"""
Reset the context so that it contains no filters (ignores scope)
Usage: filter_clear
"""
pappyproxy.pappy.main_context.active_filters = []
yield pappyproxy.context.reload_from_storage()
def filter_list(line):
"""
Print the filters that make up the current context
Usage: filter_list
"""
for f in pappyproxy.pappy.main_context.active_filters:
print f.filter_string
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def scope_save(line):
"""
Set the scope to be the current context. Saved between launches
Usage: scope_save
"""
pappyproxy.context.save_scope(pappyproxy.pappy.main_context)
yield pappyproxy.context.store_scope(pappyproxy.http.dbpool)
def scope_reset(line):
"""
Set the context to be the scope (view in-scope items)
Usage: scope_reset
"""
pappyproxy.context.reset_to_scope(pappyproxy.pappy.main_context)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def scope_delete(line):
"""
Delete the scope so that it contains all request/response pairs
Usage: scope_delete
"""
pappyproxy.context.set_scope([])
yield pappyproxy.context.store_scope(pappyproxy.http.dbpool)
def scope_list(line):
"""
Print the filters that make up the scope
Usage: scope_list
"""
pappyproxy.context.print_scope()
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def filter_prune(line):
"""
Delete all out of context requests from the data file.
CANNOT BE UNDONE!! Be careful!
Usage: filter_prune
"""
# Delete filtered items from datafile
print ''
print 'Currently active filters:'
for f in pappyproxy.pappy.main_context.active_filters:
print '> %s' % f.filter_string
# We copy so that we're not removing items from a set we're iterating over
reqs = list(pappyproxy.pappy.main_context.inactive_requests)
act_reqs = list(pappyproxy.pappy.main_context.active_requests)
message = 'This will delete %d/%d requests. You can NOT undo this!! Continue?' % (len(reqs), (len(reqs) + len(act_reqs)))
if not confirm(message, 'n'):
defer.returnValue(None)
for r in reqs:
yield r.deep_delete()
print 'Deleted %d requests' % len(reqs)
defer.returnValue(None)
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'filter_prune': (filter_prune, None),
'scope_list': (scope_list, None),
'scope_delete': (scope_delete, None),
'scope_reset': (scope_reset, None),
'scope_save': (scope_save, None),
'filter_list': (filter_list, None),
'filter_clear': (filter_clear, None),
'filter_up': (filter_up, None),
'builtin_filter': (builtin_filter, complete_builtin_filter),
'filter': (filtercmd, None),
})
cmd.add_aliases([
#('filter_prune', ''),
('scope_list', 'sls'),
#('scope_delete', ''),
('scope_reset', 'sr'),
#('scope_save', ''),
('filter_list', 'fls'),
('filter_clear', 'fc'),
('filter_up', 'fu'),
('builtin_filter', 'fbi'),
('filter', 'f'),
('filter', 'fl'),
])

View file

@ -0,0 +1,215 @@
import crochet
import pappyproxy
import shlex
from pappyproxy.plugin import active_intercepting_macros, add_intercepting_macro, remove_intercepting_macro
from pappyproxy.console import load_reqlist
from pappyproxy.macros import load_macros, macro_from_requests, gen_imacro
from pappyproxy.util import PappyException
from twisted.internet import defer
loaded_macros = []
loaded_int_macros = []
macro_dict = {}
int_macro_dict = {}
def load_macros_cmd(line):
"""
Load macros from a directory. By default loads macros in the current directory.
Usage: load_macros [dir]
"""
global macro_dict
global int_macro_dict
global loaded_macros
global loaded_int_macros
if line:
load_dir = line
else:
load_dir = '.'
(to_load, int_to_load) = load_macros(load_dir)
if not to_load and not int_to_load:
raise PappyException('No macros to load.')
macro_dict = {}
loaded_macros = []
int_macro_dict = {}
loaded_int_macros = []
for macro in to_load:
if macro.name in macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.name)
elif macro.short_name and macro.short_name in macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.short_name)
elif macro.file_name in macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.file_name)
else:
macro_dict[macro.name] = macro
macro_dict[macro.file_name] = macro
if macro.short_name:
macro_dict[macro.short_name] = macro
loaded_macros.append(macro)
print 'Loaded "%s"' % macro
for macro in int_to_load:
if macro.name in int_macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.name)
elif macro.short_name and macro.short_name in int_macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.short_name)
elif macro.file_name in int_macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.file_name)
else:
int_macro_dict[macro.name] = macro
int_macro_dict[macro.file_name] = macro
if macro.short_name:
int_macro_dict[macro.short_name] = macro
loaded_int_macros.append(macro)
print 'Loaded "%s"' % macro
def run_macro(line):
"""
Run a macro
Usage: run_macro <macro name or macro short name>
"""
global macro_dict
global loaded_macros
args = shlex.split(line)
if not args:
raise PappyException('You must give a macro to run. You can give its short name, or the name in the filename.')
mname = args[0]
if mname not in macro_dict:
raise PappyException('%s not a loaded macro' % mname)
macro = macro_dict[mname]
macro.execute(args[1:])
def run_int_macro(line):
"""
Activate an intercepting macro
Usage: run_int_macro <macro name or macro short name>
Macro can be stopped with stop_int_macro
"""
global int_macro_dict
global loaded_int_macros
args = shlex.split(line)
if len(args) == 0:
raise PappyException('You must give an intercepting macro to run. You can give its short name, or the name in the filename.')
if args[0] not in int_macro_dict:
raise PappyException('%s not a loaded intercepting macro' % line)
macro = int_macro_dict[args[0]]
macro.init(args[1:])
add_intercepting_macro(macro.name, macro)
print '"%s" started' % macro.name
def stop_int_macro(line):
"""
Stop a running intercepting macro
Usage: stop_int_macro <macro name or macro short name>
"""
global int_macro_dict
global loaded_int_macros
if not line:
raise PappyException('You must give an intercepting macro to run. You can give its short name, or the name in the filename.')
if line not in int_macro_dict:
raise PappyException('%s not a loaded intercepting macro' % line)
macro = int_macro_dict[line]
remove_intercepting_macro(macro.name)
print '"%s" stopped' % macro.name
def list_int_macros(line):
"""
List all active/inactive intercepting macros
"""
global int_macro_dict
global loaded_int_macros
running = []
not_running = []
for macro in loaded_int_macros:
if macro.name in active_intercepting_macros():
running.append(macro)
else:
not_running.append(macro)
if not running and not not_running:
print 'No loaded intercepting macros'
if running:
print 'Active intercepting macros:'
for m in running:
print ' %s' % m
if not_running:
print 'Inactive intercepting macros:'
for m in not_running:
print ' %s' % m
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def generate_macro(line):
"""
Generate a macro script with request objects
Usage: generate_macro <name> [reqs]
"""
if line == '':
raise PappyException('Macro name is required')
args = shlex.split(line)
name = args[0]
if len(args) > 1:
reqs = yield load_reqlist(args[1])
else:
reqs = []
script_str = macro_from_requests(reqs)
fname = 'macro_%s.py' % name
with open(fname, 'wc') as f:
f.write(script_str)
print 'Wrote script to %s' % fname
def generate_int_macro(line):
"""
Generate an intercepting macro script
Usage: generate_int_macro <name>
"""
if line == '':
raise PappyException('Macro name is required')
args = shlex.split(line)
name = args[0]
script_str = gen_imacro()
fname = 'int_%s.py' % name
with open(fname, 'wc') as f:
f.write(script_str)
print 'Wrote script to %s' % fname
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def rpy(line):
"""
Copy python object definitions of requests.
Usage: rpy <reqs>
"""
reqs = yield load_reqlist(line)
for req in reqs:
print pappyproxy.macros.req_obj_def(req)
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'rpy': (rpy, None),
'generate_int_macro': (generate_int_macro, None),
'generate_macro': (generate_macro, None),
'list_int_macros': (list_int_macros, None),
'stop_int_macro': (stop_int_macro, None),
'run_int_macro': (run_int_macro, None),
'run_macro': (run_macro, None),
'load_macros': (load_macros_cmd, None),
})
cmd.add_aliases([
#('rpy', ''),
('generate_int_macro', 'gima'),
('generate_macro', 'gma'),
('list_int_macros', 'lsim'),
('stop_int_macro', 'sim'),
('run_int_macro', 'rim'),
('run_macro', 'rma'),
('load_macros', 'lma'),
])

View file

@ -0,0 +1,243 @@
import crochet
import curses
import os
import pappyproxy
import shlex
import subprocess
import tempfile
from pappyproxy.util import PappyException
from pappyproxy.macros import InterceptMacro
from pappyproxy.http import Request, Response
from pappyproxy.plugin import add_intercepting_macro, remove_intercepting_macro
from pappyproxy import comm, config
from twisted.internet import defer
PLUGIN_ID="manglecmds"
edit_queue = []
class MangleInterceptMacro(InterceptMacro):
"""
A class representing a macro that modifies requests as they pass through the
proxy
"""
def __init__(self):
InterceptMacro.__init__(self)
self.name = 'Pappy Interceptor Macro'
self.intercept_requests = False
self.intercept_responses = False
self.async_req = True
self.async_rsp = True
def __repr__(self):
return "<MangleInterceptingMacro>" % self.name
@defer.inlineCallbacks
def async_mangle_request(self, request):
# This function gets called to mangle/edit requests passed through the proxy
retreq = request
# Write original request to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(request.full_request)
# Have the console edit the file
yield edit_file(tfName)
# Create new mangled request from edited file
with open(tfName, 'r') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
pappyproxy.proxy.log('Request dropped!')
defer.returnValue(None)
mangled_req = Request(text, update_content_length=True)
mangled_req.port = request.port
mangled_req.is_ssl = request.is_ssl
# Check if it changed
if mangled_req.full_request != request.full_request:
retreq = mangled_req
defer.returnValue(retreq)
@defer.inlineCallbacks
def async_mangle_response(self, request):
# This function gets called to mangle/edit respones passed through the proxy
retrsp = request.response
# Write original response to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(request.response.full_response)
# Have the console edit the file
yield edit_file(tfName, front=True)
# Create new mangled response from edited file
with open(tfName, 'r') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
pappyproxy.proxy.log('Response dropped!')
defer.returnValue(None)
mangled_rsp = Response(text, update_content_length=True)
if mangled_rsp.full_response != request.response.full_response:
mangled_rsp.unmangled = request.response
retrsp = mangled_rsp
defer.returnValue(retrsp)
###############
## Helper funcs
def edit_file(fname, front=False):
global edit_queue
# Adds the filename to the edit queue. Returns a deferred that is fired once
# the file is edited and the editor is closed
d = defer.Deferred()
if front:
edit_queue = [(fname, d)] + edit_queue
else:
edit_queue.append((fname, d))
return d
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def check_reqid(reqid):
# Used for the repeater command. Must not be async
try:
yield pappyproxy.http.Request.load_request(reqid)
except:
raise PappyException('"%s" is not a valid request id' % reqid)
defer.returnValue(None)
def start_editor(reqid):
script_loc = os.path.join(config.PAPPY_DIR, "plugins", "vim_repeater", "repeater.vim")
#print "RepeaterSetup %d %d"%(reqid, comm_port)
subprocess.call(["vim", "-S", script_loc, "-c", "RepeaterSetup %s %d"%(reqid, comm.comm_port)])
####################
## Command functions
def repeater(line):
"""
Open a request in the repeater
Usage: repeater <reqid>
"""
# This is not async on purpose. start_editor acts up if this is called
# with inline callbacks. As a result, check_reqid and get_unmangled
# cannot be async
args = shlex.split(line)
reqid = args[0]
check_reqid(reqid)
start_editor(reqid)
def intercept(line):
"""
Intercept requests and/or responses and edit them with before passing them along
Usage: intercept <reqid>
"""
global edit_queue
args = shlex.split(line)
intercept_requests = False
intercept_responses = False
req_names = ('req', 'request', 'requests')
rsp_names = ('rsp', 'response', 'responses')
if any(a in req_names for a in args):
intercept_requests = True
if any(a in rsp_names for a in args):
intercept_responses = True
if intercept_requests and intercept_responses:
intercept_str = 'Requests and responses'
elif intercept_requests:
intercept_str = 'Requests'
elif intercept_responses:
intercept_str = 'Responses'
else:
intercept_str = 'NOTHING'
mangle_macro = MangleInterceptMacro()
mangle_macro.intercept_requests = intercept_requests
mangle_macro.intercept_responses = intercept_responses
add_intercepting_macro('pappy_intercept', mangle_macro)
## Interceptor loop
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
try:
editnext = False
stdscr.nodelay(True)
while True:
stdscr.addstr(0, 0, "Currently intercepting: %s" % intercept_str)
stdscr.clrtoeol()
stdscr.addstr(1, 0, "%d item(s) in queue." % len(edit_queue))
stdscr.clrtoeol()
if editnext:
stdscr.addstr(2, 0, "Waiting for next item... Press 'q' to quit or 'b' to quit waiting")
else:
stdscr.addstr(2, 0, "Press 'n' to edit the next item or 'q' to quit interceptor.")
stdscr.clrtoeol()
c = stdscr.getch()
if c == ord('q'):
break
elif c == ord('n'):
editnext = True
elif c == ord('b'):
editnext = False
if editnext and edit_queue:
editnext = False
(to_edit, deferred) = edit_queue.pop(0)
editor = 'vi'
if 'EDITOR' in os.environ:
editor = os.environ['EDITOR']
subprocess.call([editor, to_edit])
stdscr.clear()
deferred.callback(None)
finally:
curses.nocbreak()
stdscr.keypad(0)
curses.echo()
curses.endwin()
try:
remove_intercepting_macro('pappy_intercept')
except PappyException:
pass
# Send remaining requests along
while len(edit_queue) > 0:
(fname, deferred) = edit_queue.pop(0)
deferred.callback(None)
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'intercept': (intercept, None),
'repeater': (repeater, None),
})
cmd.add_aliases([
('intercept', 'ic'),
('repeater', 'rp'),
])

View file

@ -0,0 +1,85 @@
import crochet
import pappyproxy
import shlex
from pappyproxy.console import confirm, load_reqlist
from pappyproxy.util import PappyException
from twisted.internet import defer
def clrmem(line):
"""
Delete all in-memory only requests
Usage: clrmem
"""
to_delete = list(pappyproxy.context.Context.in_memory_requests)
for r in to_delete:
pappyproxy.context.Context.remove_request(r)
def gencerts(line):
"""
Generate CA cert and private CA file
Usage: gencerts [/path/to/put/certs/in]
"""
dest_dir = line or pappyproxy.config.CERT_DIR
message = "This will overwrite any existing certs in %s. Are you sure?" % dest_dir
if not confirm(message, 'n'):
return False
print "Generating certs to %s" % dest_dir
pappyproxy.proxy.generate_ca_certs(dest_dir)
def log(line):
"""
Display the log in real time. Honestly it probably doesn't work.
Usage: log [verbosity (default is 1)]
verbosity=1: Show connections as they're made/lost, some additional info
verbosity=3: Show full requests/responses as they are processed by the proxy
"""
try:
verbosity = int(line.strip())
except:
verbosity = 1
pappyproxy.config.DEBUG_VERBOSITY = verbosity
raw_input()
pappyproxy.config.DEBUG_VERBOSITY = 0
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def export(line):
"""
Write the full request/response of a request/response to a file.
Usage: export [req|rsp] <reqid(s)>
"""
args = shlex.split(line)
if len(args) < 2:
print 'Requires req/rsp and and request id(s)'
defer.returnValue(None)
if args[0] not in ('req', 'rsp'):
raise PappyException('Request or response not specified')
reqs = yield load_reqlist(args[1])
for req in reqs:
try:
if args[0] == 'req':
fname = 'req_%s.txt'%req.reqid
with open(fname, 'w') as f:
f.write(req.full_request)
print 'Full request written to %s' % fname
elif args[0] == 'rsp':
fname = 'rsp_%s.txt'%req.reqid
with open(fname, 'w') as f:
f.write(req.full_response)
print 'Full response written to %s' % fname
except PappyException as e:
print 'Unable to export %s: %s' % (req.reqid, e)
def load_cmds(cmd):
cmd.set_cmds({
'clrmem': (clrmem, None),
'gencerts': (gencerts, None),
'export': (export, None),
'log': (log, None),
})
cmd.add_aliases([
#('rpy', ''),
])

View file

@ -0,0 +1,102 @@
import crochet
import pappyproxy
import shlex
from pappyproxy.plugin import main_context
from pappyproxy.console import load_reqlist
from pappyproxy.util import PappyException
from twisted.internet import defer
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def tag(line):
"""
Add a tag to requests.
Usage: tag <tag> [request ids]
You can tag as many requests as you want at the same time. If no
ids are given, the tag will be applied to all in-context requests.
"""
args = shlex.split(line)
if len(args) == 0:
raise PappyException('Tag name is required')
tag = args[0]
if len(args) > 1:
reqs = yield load_reqlist(args[1], False)
ids = [r.reqid for r in reqs]
print 'Tagging %s with %s' % (', '.join(ids), tag)
else:
print "Tagging all in-context requests with %s" % tag
reqs = main_context().active_requests
for req in reqs:
if tag not in req.tags:
req.tags.append(tag)
if req.saved:
yield req.async_save()
add_req(req)
else:
print 'Request %s already has tag %s' % (req.reqid, tag)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def untag(line):
"""
Remove a tag from requests
Usage: untag <tag> <request ids>
You can provide as many request ids as you want and the tag will
be removed from all of them. If no ids are given, the tag will
be removed from all in-context requests.
"""
args = shlex.split(line)
if len(args) == 0:
raise PappyException("Tag and request ids are required")
tag = args[0]
ids = []
if len(args) > 1:
reqs = yield load_reqlist(args[1], False)
ids = [r.reqid for r in reqs]
else:
print "Untagging all in-context requests with tag %s" % tag
reqs = main_context().active_requests
for req in reqs:
if tag in req.tags:
req.tags.remove(tag)
if req.saved:
yield req.async_save()
if ids:
print 'Tag %s removed from %s' % (tag, ', '.join(ids))
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def clrtag(line):
"""
Clear all the tags from requests
Usage: clrtag <request ids>
"""
args = shlex.split(line)
if len(args) == 0:
raise PappyException('No request IDs given')
reqs = yield load_reqlist(args[0], False)
for req in reqs:
if req.tags:
req.tags = []
print 'Tags cleared from request %s' % (req.reqid)
if req.saved:
yield req.async_save()
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'clrtag': (clrtag, None),
'untag': (untag, None),
'tag': (tag, None),
})
cmd.add_aliases([
#('rpy', ''),
])

328
pappyproxy/plugins/view.py Normal file
View file

@ -0,0 +1,328 @@
import crochet
import datetime
import pappyproxy
import shlex
from pappyproxy.console import load_reqlist, print_table, print_requests
from pappyproxy.util import PappyException
from pappyproxy.plugin import main_context
from pappyproxy.http import Request
from twisted.internet import defer
###################
## Helper functions
def view_full_message(request, headers_only=False):
if headers_only:
print request.headers_section_pretty
else:
print request.full_message_pretty
def print_request_extended(request):
# Prints extended info for the request
title = "Request Info (reqid=%s)" % request.reqid
print title
print '-'*len(title)
reqlen = len(request.body)
reqlen = '%d bytes' % reqlen
rsplen = 'No response'
mangle_str = 'Nothing mangled'
if request.unmangled:
mangle_str = 'Request'
if request.response:
response_code = str(request.response.response_code) + \
' ' + request.response.response_text
rsplen = len(request.response.body)
rsplen = '%d bytes' % rsplen
if request.response.unmangled:
if mangle_str == 'Nothing mangled':
mangle_str = 'Response'
else:
mangle_str += ' and Response'
else:
response_code = ''
time_str = '--'
if request.time_start and request.time_end:
time_delt = request.time_end - request.time_start
time_str = "%.2f sec" % time_delt.total_seconds()
if request.is_ssl:
is_ssl = 'YES'
else:
is_ssl = 'NO'
if request.time_start:
time_made_str = request.time_start.strftime('%a, %b %d, %Y, %I:%M:%S %p')
else:
time_made_str = '--'
print 'Made on %s' % time_made_str
print 'ID: %s' % request.reqid
print 'Verb: %s' % request.verb
print 'Host: %s' % request.host
print 'Path: %s' % request.full_path
print 'Status Code: %s' % response_code
print 'Request Length: %s' % reqlen
print 'Response Length: %s' % rsplen
if request.response and request.response.unmangled:
print 'Unmangled Response Length: %s bytes' % len(request.response.unmangled.full_response)
print 'Time: %s' % time_str
print 'Port: %s' % request.port
print 'SSL: %s' % is_ssl
print 'Mangled: %s' % mangle_str
print 'Tags: %s' % (', '.join(request.tags))
if request.plugin_data:
print 'Plugin Data: %s' % (request.plugin_data)
def get_site_map(reqs):
# Takes in a list of requests and returns a tree representing the site map
paths_set = set()
for req in reqs:
paths_set.add(req.path_tuple)
paths = sorted(list(paths_set))
return paths
def print_tree(tree):
# Prints a tree. Takes in a sorted list of path tuples
_print_tree_helper(tree, 0, [])
def _get_tree_prefix(depth, print_bars, last):
if depth == 0:
return u''
else:
ret = u''
pb = print_bars + [True]
for i in range(depth):
if pb[i]:
ret += u'\u2502 '
else:
ret += u' '
if last:
ret += u'\u2514\u2500\u2500 '
else:
ret += u'\u251c\u2500\u2500 '
return ret
def _print_tree_helper(tree, depth, print_bars):
# Takes in a tree and prints it at the given depth
if tree == [] or tree == [()]:
return
while tree[0] == ():
tree = tree[1:]
if tree == [] or tree == [()]:
return
if len(tree) == 1 and len(tree[0]) == 1:
print _get_tree_prefix(depth, print_bars + [False], True) + tree[0][0]
return
curkey = tree[0][0]
subtree = []
for row in tree:
if row[0] != curkey:
if curkey == '':
curkey = '/'
print _get_tree_prefix(depth, print_bars, False) + curkey
if depth == 0:
_print_tree_helper(subtree, depth+1, print_bars + [False])
else:
_print_tree_helper(subtree, depth+1, print_bars + [True])
curkey = row[0]
subtree = []
subtree.append(row[1:])
if curkey == '':
curkey = '/'
print _get_tree_prefix(depth, print_bars, True) + curkey
_print_tree_helper(subtree, depth+1, print_bars + [False])
####################
## Command functions
def list_reqs(line):
"""
List the most recent in-context requests. By default shows the most recent 25
Usage: list [a|num]
If `a` is given, all the in-context requests are shown. If a number is given,
that many requests will be shown.
"""
args = shlex.split(line)
if len(args) > 0:
if args[0][0].lower() == 'a':
print_count = -1
else:
try:
print_count = int(args[0])
except:
print "Please enter a valid argument for list"
return
else:
print_count = 25
def key_reqtime(req):
if req.time_start is None:
return -1
else:
return (req.time_start-datetime.datetime(1970,1,1)).total_seconds()
to_print = sorted(main_context().active_requests, key=key_reqtime, reverse=True)
if print_count > 0:
to_print = to_print[:print_count]
print_requests(to_print)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_request_info(line):
"""
View information about request
Usage: view_request_info <reqid> [u]
If 'u' is given as an additional argument, the unmangled version
of the request will be displayed.
"""
args = shlex.split(line)
reqids = args[0]
reqs = yield load_reqlist(reqids)
for req in reqs:
print ''
print_request_extended(req)
print ''
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_request_headers(line):
"""
View the headers of the request
Usage: view_request_headers <reqid> [u]
If 'u' is given as an additional argument, the unmangled version
of the request will be displayed.
"""
args = shlex.split(line)
reqid = args[0]
reqs = yield load_reqlist(reqid)
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
print ''
view_full_message(req, True)
if len(reqs) > 1:
print '-'*30
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_full_request(line):
"""
View the full data of the request
Usage: view_full_request <reqid> [u]
If 'u' is given as an additional argument, the unmangled version
of the request will be displayed.
"""
args = shlex.split(line)
reqid = args[0]
reqs = yield load_reqlist(reqid)
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
print ''
view_full_message(req)
if len(reqs) > 1:
print '-'*30
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_response_headers(line):
"""
View the headers of the response
Usage: view_response_headers <reqid>
"""
reqs = yield load_reqlist(line)
for req in reqs:
if req.response:
if len(reqs) > 1:
print '-'*15 + (' %s ' % req.reqid) + '-'*15
view_full_message(req.response, True)
else:
print "Request %s does not have a response" % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_full_response(line):
"""
View the full data of the response associated with a request
Usage: view_full_response <reqid>
"""
reqs = yield load_reqlist(line)
for req in reqs:
if req.response:
if len(reqs) > 1:
print '-'*15 + (' %s ' % req.reqid) + '-'*15
view_full_message(req.response)
else:
print "Request %s does not have a response" % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def dump_response(line):
"""
Dump the data of the response to a file.
Usage: dump_response <id> <filename>
"""
# dump the data of a response
args = shlex.split(line)
reqid = args[0]
req = yield Request.load_request(reqid)
rsp = req.response
if len(args) >= 2:
fname = args[1]
else:
fname = req.path.split('/')[-1]
with open(fname, 'w') as f:
f.write(rsp.body)
print 'Response data written to %s' % fname
def site_map(line):
"""
Print the site map. Only includes requests in the current context.
Usage: site_map
"""
to_print = [r for r in main_context().active_requests if not r.response or r.response.response_code != 404]
tree = get_site_map(to_print)
print_tree(tree)
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'list': (list_reqs, None),
'view_request_info': (view_request_info, None),
'view_request_headers': (view_request_headers, None),
'view_full_request': (view_full_request, None),
'view_response_headers': (view_response_headers, None),
'view_full_response': (view_full_response, None),
'site_map': (site_map, None),
'dump_response': (dump_response, None),
})
cmd.add_aliases([
('list', 'ls'),
('view_request_info', 'viq'),
('view_request_headers', 'vhq'),
('view_full_request', 'vfq'),
('view_response_headers', 'vhs'),
('site_map', 'sm'),
('view_full_response', 'vfs'),
#('dump_response', 'dr'),
])

View file

@ -0,0 +1,135 @@
import base64
import vim
import sys
import socket
import json
class CommError(Exception):
pass
def escape(s):
return s.replace("'", "''")
def communicate(data):
global PAPPY_PORT
# Submits data to the comm port of the proxy
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', int(vim.eval('s:commport'))))
datastr = json.dumps(data)
# Send our data
total_sent = 0
while total_sent < len(data):
sent = s.send(datastr[total_sent:])
assert sent != 0
total_sent += sent
s.send('\n')
# Get our response
retstr = ''
c = ''
while c != '\n':
retstr = retstr + c
c = s.recv(1)
assert c != ''
result = json.loads(retstr)
if not result['success']:
vim.command("echoerr '%s'" % escape(result['message']))
raise CommError(result['message'])
return result
def read_line(conn):
data = ''
c = ''
while c != '\n':
data = data + c
c = conn.read(1)
return data
def run_command(command):
funcs = {
"setup": set_up_windows,
"submit": submit_current_buffer,
}
if command in funcs:
funcs[command]()
def set_buffer_content(buf, text):
buf[:] = None
first = True
for l in text.split('\n'):
if first:
buf[0] = l
first = False
else:
buf.append(l)
def set_up_windows():
reqid = vim.eval("a:2")
comm_port = vim.eval("a:3")
vim.command("let s:commport=%d"%int(comm_port))
# Get the left buffer
vim.command("new")
vim.command("only")
b2 = vim.current.buffer
vim.command("let s:b2=bufnr('$')")
# Vsplit new file
vim.command("vnew")
b1 = vim.current.buffer
vim.command("let s:b1=bufnr('$')")
# Get the request
comm_data = {"action": "get_request", "reqid": reqid}
try:
reqdata = communicate(comm_data)
except CommError:
return
comm_data = {"action": "get_response", "reqid": reqid}
try:
rspdata = communicate(comm_data)
except CommError:
return
# Set up the buffers
set_buffer_content(b1, base64.b64decode(reqdata['full_message']))
if 'full_message' in rspdata:
set_buffer_content(b2, base64.b64decode(rspdata['full_message']))
# Save the port, ssl, host setting
vim.command("let s:repport=%d" % int(reqdata['port']))
vim.command("let s:rephost='%s'" % escape(reqdata['host']))
if reqdata['is_ssl']:
vim.command("let s:repisssl=1")
else:
vim.command("let s:repisssl=0")
def submit_current_buffer():
curbuf = vim.current.buffer
b2_id = int(vim.eval("s:b2"))
b2 = vim.buffers[b2_id]
vim.command("let s:b1=bufnr('$')")
vim.command("only")
vim.command("rightbelow vertical new")
vim.command("b %d" % b2_id)
vim.command("wincmd h")
full_request = '\n'.join(curbuf)
commdata = {'action': 'submit',
'full_message': base64.b64encode(full_request),
'port': int(vim.eval("s:repport")),
'host': vim.eval("s:rephost")}
if vim.eval("s:repisssl") == '1':
commdata["is_ssl"] = True
else:
commdata["is_ssl"] = False
result = communicate(commdata)
set_buffer_content(b2, base64.b64decode(result['response']['full_message']))
# (left, right) = set_up_windows()
# set_buffer_content(left, 'Hello\nWorld')
# set_buffer_content(right, 'Hello\nOther\nWorld')
#print "Arg is %s" % vim.eval("a:arg")
run_command(vim.eval("a:1"))

View file

@ -0,0 +1,17 @@
if !has('python')
echo "Vim must support python in order to use the repeater"
finish
endif
let s:pyscript = resolve(expand('<sfile>:p:h') . '/repeater.py')
function! RepeaterAction(...)
execute 'pyfile ' . s:pyscript
endfunc
command! -nargs=* RepeaterSetup call RepeaterAction('setup', <f-args>)
command! RepeaterSubmitBuffer call RepeaterAction('submit')
" Bind forward to <leader>f
nnoremap <leader>f :RepeaterSubmitBuffer<CR>

View file

@ -1,30 +1,18 @@
import copy
import datetime
import gzip
import os
import random
import re
import schema.update
import shutil
import string
import StringIO
import sys
import urlparse
import zlib
from OpenSSL import SSL
from OpenSSL import crypto
from pappyproxy import config
from pappyproxy import console
from pappyproxy import context
from pappyproxy import http
from pappyproxy import mangle
from pappyproxy.util import PappyException
from twisted.enterprise import adbapi
from pappyproxy.util import PappyException, printable_data
from twisted.internet import defer
from twisted.internet import reactor, ssl
from twisted.internet.protocol import ClientFactory, ServerFactory
from twisted.protocols.basic import LineReceiver
from twisted.internet import defer
from OpenSSL import crypto
next_connection_id = 1
@ -43,7 +31,7 @@ def add_intercepting_macro(key, macro, int_macro_dict):
def remove_intercepting_macro(key, int_macro_dict):
if not key in int_macro_dict:
raise PappyException('Macro with key %s not loaded' % key)
raise PappyException('Macro with key %s not currently running' % key)
del int_macro_dict[key]
def log(message, id=None, symbol='*', verbosity_level=1):
@ -94,6 +82,12 @@ class ProxyClient(LineReceiver):
line = ''
self._response_obj.add_line(line)
self.log(line, symbol='r<', verbosity_level=3)
if self.factory.stream_response:
self.log('Returning line back through stream')
self.factory.return_transport.write(line+'\r\n')
else:
self.log('Not streaming, not returning')
self.log(self.factory.stream_response)
if self._response_obj.headers_complete:
if self._response_obj.complete:
self.handle_response_end()
@ -103,9 +97,12 @@ class ProxyClient(LineReceiver):
def rawDataReceived(self, *args, **kwargs):
data = args[0]
self.log('Returning data back through stream')
if self.factory.stream_response:
self.factory.return_transport.write(data)
if not self._response_obj.complete:
if data:
s = console.printable_data(data)
s = printable_data(data)
dlines = s.split('\n')
for l in dlines:
self.log(l, symbol='<rd', verbosity_level=3)
@ -127,23 +124,29 @@ class ProxyClient(LineReceiver):
sendreq = self.request
if context.in_scope(sendreq):
to_mangle = copy.copy(self.factory.intercepting_macros).iteritems()
if self.factory.save_all:
yield sendreq.async_deep_save()
# It isn't the actual time, but this should work in case
# we do an 'ls' before it gets a real time saved
sendreq.time_start = datetime.datetime.now()
if self.factory.stream_response and not to_mangle:
self.request.async_deep_save()
else:
yield self.request.async_deep_save()
## Run intercepting macros
# if we don't copy it, when we delete a macro from the console,
# we get a crash. We do a shallow copy to keep the macro
# instances the same.
to_mangle = copy.copy(self.factory.intercepting_macros).iteritems()
for k, macro in to_mangle:
if macro.do_req:
if macro.intercept_requests:
if macro.async_req:
sendreq = yield macro.async_mangle_request(sendreq)
else:
sendreq = macro.mangle_request(sendreq)
if sendreq is None:
self.log('Request dropped, losing connection')
self.transport.loseConnection()
self.request = None
self.data_defer.callback(None)
@ -153,9 +156,8 @@ class ProxyClient(LineReceiver):
if sendreq != self.request:
sendreq.unmangled = self.request
if self.factory.save_all:
yield sendreq.async_deep_save()
if self.factory.save_all:
yield sendreq.async_deep_save()
else:
self.log("Request out of scope, passing along unmangled")
@ -174,6 +176,7 @@ class ProxyClient(LineReceiver):
def handle_response_end(self, *args, **kwargs):
self.log("Remote response finished, returning data to original stream")
self.request.response = self._response_obj
self.log('Response ended, losing connection')
self.transport.loseConnection()
assert self._response_obj.full_response
self.factory.return_request_pair(self.request)
@ -181,15 +184,17 @@ class ProxyClient(LineReceiver):
class ProxyClientFactory(ClientFactory):
def __init__(self, request, save_all=False):
def __init__(self, request, save_all=False, stream_response=False,
return_transport=None):
self.request = request
#self.proxy_server = None
self.intercepting_macros = {}
self.connection_id = -1
self.data_defer = defer.Deferred()
self.start_time = datetime.datetime.now()
self.end_time = None
self.save_all = save_all
self.stream_response = stream_response
self.return_transport = return_transport
self.intercepting_macros = {}
def log(self, message, symbol='*', verbosity_level=1):
log(message, id=self.connection_id, symbol=symbol, verbosity_level=verbosity_level)
@ -208,22 +213,24 @@ class ProxyClientFactory(ClientFactory):
@defer.inlineCallbacks
def return_request_pair(self, request):
self.end_time = datetime.datetime.now()
log_request(console.printable_data(request.response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
log_request(printable_data(request.response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
request.time_start = self.start_time
request.time_end = self.end_time
if context.in_scope(request):
to_mangle = copy.copy(self.intercepting_macros).iteritems()
if self.save_all:
yield request.async_deep_save()
if self.stream_response and not to_mangle:
request.async_deep_save()
else:
yield request.async_deep_save()
# if we don't copy it, when we delete a macro from the console,
# we get a crash. We do a shallow copy to keep the macro
# instances the same.
to_mangle = copy.copy(self.intercepting_macros).iteritems()
old_rsp = request.response
for k, macro in to_mangle:
if macro.do_rsp:
if macro.intercept_responses:
if macro.async_rsp:
mangled_rsp = yield macro.async_mangle_response(request)
else:
@ -234,6 +241,7 @@ class ProxyClientFactory(ClientFactory):
self.data_defer.callback(request)
if self.save_all:
yield request.async_deep_save()
self.log("Response dropped, losing connection")
self.transport.loseConnection()
defer.returnValue(None)
@ -241,15 +249,11 @@ class ProxyClientFactory(ClientFactory):
if request.response != old_rsp:
request.response.unmangled = old_rsp
if self.save_all:
yield request.async_deep_save()
# re-check after all the mangling
context.filter_recheck()
if self.save_all:
yield request.async_deep_save()
if request.response:
log_request(console.printable_data(request.response.full_response),
log_request(printable_data(request.response.full_response),
id=self.connection_id, symbol='<', verbosity_level=3)
else:
self.log("Response out of scope, passing along unmangled")
@ -340,11 +344,20 @@ class ProxyServer(LineReceiver):
if self._forward:
self.log("Forwarding to %s on %d" % (self._request_obj.host, self._request_obj.port))
if not self.factory.intercepting_macros:
stream = True
else:
# We only want to call send_response_back if we're not streaming
stream = False
self.log('Creating client factory, stream=%s' % stream)
factory = ProxyClientFactory(self._request_obj,
save_all=self.factory.save_all)
save_all=self.factory.save_all,
stream_response=stream,
return_transport=self.transport)
factory.intercepting_macros = self.factory.intercepting_macros
factory.connection_id = self.connection_id
factory.data_defer.addCallback(self.send_response_back)
if not stream:
factory.data_defer.addCallback(self.send_response_back)
if self._request_obj.is_ssl:
self.log("Accessing over SSL...", verbosity_level=3)
reactor.connectSSL(self._request_obj.host, self._request_obj.port, factory, ClientTLSContext())
@ -364,6 +377,7 @@ class ProxyServer(LineReceiver):
def send_response_back(self, response):
if response is not None:
self.transport.write(response.response.full_response)
self.log("Response sent back, losing connection")
self.transport.loseConnection()
def connectionLost(self, reason):

View file

@ -19,9 +19,6 @@ update_queries = [
ALTER TABLE requests ADD COLUMN is_ssl INTEGER;
""",
"""
UPDATE schema_meta SET version=2;
""",
]
@defer.inlineCallbacks
@ -29,9 +26,30 @@ def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)
# Load each request and save them again for any request that specified a port
# or protocol in the host header.
http.init(dbpool)
reqs = yield http.Request.load_from_filters([])
for req in reqs:
yield req.deep_save()
# Update metadata for each request
reqrows = yield dbpool.runQuery(
"""
SELECT id, full_request
FROM requests;
""",
)
# Create an object and get its port/is_ssl
for reqrow in reqrows:
reqid = reqrow[0]
fullreq = reqrow[1]
r = http.Request(fullreq)
port = r.port
is_ssl = r.is_ssl
yield dbpool.runQuery(
"""
UPDATE requests SET port=?,is_ssl=? WHERE id=?;
""",
(port, is_ssl, reqid)
)
yield dbpool.runQuery(
"""
UPDATE schema_meta SET version=2;
"""
)

View file

@ -0,0 +1,50 @@
from pappyproxy import http
from twisted.internet import defer
"""
Schema v4
Description:
Adds additional metadata to the database for requests. Mainly it stores the host
that a request was sent to so that pappy doesn't have to guess from the host
header.
"""
update_queries = [
"""
ALTER TABLE requests ADD COLUMN host TEXT;
""",
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)
# Update metadata for each request
reqrows = yield dbpool.runQuery(
"""
SELECT id, full_request
FROM requests;
""",
)
# Create an object that will parse the host from the request
for reqrow in reqrows:
reqid = reqrow[0]
fullreq = reqrow[1]
r = http.Request(fullreq)
host = r.host
if r.host:
yield dbpool.runQuery(
"""
UPDATE requests SET host=? WHERE id=?;
""",
(host, reqid)
)
yield dbpool.runQuery(
"""
UPDATE schema_meta SET version=4;
"""
)

View file

@ -0,0 +1,29 @@
from pappyproxy import http
from twisted.internet import defer
"""
Schema v5
Description:
Adds a column to the requests table which will store a dict that plugins can
use to store metadata about requests.
"""
update_queries = [
"""
ALTER TABLE requests ADD COLUMN plugin_data TEXT;
""",
"""
UPDATE requests SET plugin_data="{}";
""",
"""
UPDATE schema_meta SET version=5;
"""
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)

View file

@ -1,6 +1,7 @@
import os
import glob
import imp
import random
from twisted.internet import reactor
from twisted.enterprise import adbapi
@ -29,9 +30,28 @@ def add_schema_files(schemas):
module_name = os.path.basename(os.path.splitext(mod)[0])
newmod = imp.load_source('%s'%module_name, mod)
schemas.append( (module_name, newmod) )
def copy_file(a, b):
a_bytes = a.read(1024)
while a_bytes:
b.write(a_bytes)
a_bytes = a.read(1024)
def create_backup(filename):
backup = filename + '.bak'
while os.path.isfile(backup):
backup = '%s.%d' % (backup, random.randint(0, 9999999999))
# Make sure backup file has secure permissions
with os.fdopen(os.open(backup, os.O_CREAT, 0o0600), 'r') as f:
pass
# Copy the datafile
with open(filename, 'r') as a:
with open(backup, 'w') as b:
copy_file(a, b)
return backup
@defer.inlineCallbacks
def update_schema(dbpool):
def update_schema(dbpool, filename):
# Update the database schema to the latest version
schema_version = yield get_schema_version(dbpool)
if schema_version == 0:
@ -41,11 +61,32 @@ def update_schema(dbpool):
schemas = []
add_schema_files(schemas)
schemas = sorted(schemas, key=lambda tup: tup[0])
for i in range(schema_version, len(schemas)):
# schemas[0] is v1, schemas[1] is v2, etc
to_run = range(schema_version, len(schemas))
if len(to_run) > 0:
# Back up data file
if verbose_update:
print "Updating datafaile schema to version %d" % (i+1)
yield schemas[i][1].update(dbpool)
print 'Backing up data file'
backup = create_backup(filename)
if verbose_update:
print 'Backed up to %s' % backup
try:
for i in to_run:
# schemas[0] is v1, schemas[1] is v2, etc
if verbose_update:
print "Updating datafaile schema to version %d" % (i+1)
yield schemas[i][1].update(dbpool)
# Delete backup
os.remove(backup)
if verbose_update:
print 'Update successful! Deleted backup'
except Exception as e:
# restore the backup
print 'Update failed, restoring backup'
with open(filename, 'w') as a:
with open(backup, 'r') as b:
copy_file(b, a)
os.remove(backup)
raise e
@defer.inlineCallbacks
def main():

View file

@ -2,9 +2,15 @@ from pappyproxy.session import Session
MACRO_NAME = '{{macro_name}}'
SHORT_NAME = '{{short_name}}'
runargs = []
def init(args):
runargs = args
def mangle_request(request):
global runargs
return request
def mangle_response(request):
global runargs
return request.response

View file

@ -1,4 +1,4 @@
from pappyproxy.http import Request, get_request, post_request
from pappyproxy.http import Request, get_request, post_request, request_by_id
from pappyproxy.context import set_tag
from pappyproxy.iter import *

View file

@ -0,0 +1,211 @@
import pytest
import mock
import pappyproxy
from pappyproxy.mangle import async_mangle_request, async_mangle_response
from pappyproxy.http import Request, Response
from testutil import no_tcp, no_database, func_deleted, mock_deferred, mock_deep_save, fake_saving
def retf(r):
return False
@pytest.fixture
def ignore_edit(mocker):
new_edit = mock.MagicMock()
new_edit.return_value = mock_deferred(None)
new_plugin = mock.MagicMock()
new_plugin.return_value = new_edit
mocker.patch('pappyproxy.plugin.plugin_by_name', new=new_plugin)
@pytest.fixture
def ignore_delete(mocker):
new_os_remove = mock.MagicMock()
mocker.patch('os.remove', new=new_os_remove)
return new_os_remove
@pytest.fixture(autouse=True)
def no_logging(mocker):
mocker.patch('pappyproxy.proxy.log')
@pytest.fixture
def req():
r = Request()
r.start_line = 'GET / HTTP/1.1'
r.host = 'www.ffffff.eeeeee'
r.body = 'AAAA'
return r
@pytest.fixture
def req_w_rsp(req):
r = Response()
r.start_line = 'HTTP/1.1 200 OK'
r.headers['Test-Header'] = 'ABC123'
r.body = 'AAAA'
req.response = r
return req
@pytest.fixture
def mock_tempfile(mocker):
new_tfile_obj = mock.MagicMock()
tfile_instance = mock.MagicMock()
new_tfile_obj.return_value.__enter__.return_value = tfile_instance
tfile_instance.name = 'mockTemporaryFile'
mocker.patch('tempfile.NamedTemporaryFile', new=new_tfile_obj)
new_open = mock.MagicMock()
fake_file = mock.MagicMock(spec=file)
new_open.return_value.__enter__.return_value = fake_file
mocker.patch('__builtin__.open', new_open)
return (new_tfile_obj, tfile_instance, new_open, fake_file)
########################
## Test request mangling
@pytest.inlineCallbacks
def test_mangle_request_edit(req, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req
new_contents = ('GET / HTTP/1.1\r\n'
'Content-Length: 4\r\n\r\n'
'BBBB')
fake_file.read.return_value = new_contents
new_req = yield async_mangle_request(r)
assert not mock_deep_save.called
assert tfile_obj.called
assert tfile_instance.write.called
assert tfile_instance.write.call_args == ((r.full_request,),)
assert new_open.called
assert fake_file.read.called
assert new_req.full_request == new_contents
@pytest.inlineCallbacks
def test_mangle_request_edit_newlines(req, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req
new_contents = ('GET / HTTP/1.1\r\n'
'Test-Head: FOOBIE\n'
'Content-Length: 4\n\r\n'
'BBBB')
fake_file.read.return_value = new_contents
new_req = yield async_mangle_request(r)
assert new_req.full_request == ('GET / HTTP/1.1\r\n'
'Test-Head: FOOBIE\r\n'
'Content-Length: 4\r\n\r\n'
'BBBB')
assert new_req.headers['Test-Head'] == 'FOOBIE'
@pytest.inlineCallbacks
def test_mangle_request_drop(req, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req
new_contents = ''
fake_file.read.return_value = new_contents
new_req = yield async_mangle_request(r)
assert new_req is None
@pytest.inlineCallbacks
def test_mangle_request_edit_len(req, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req
new_contents = ('GET / HTTP/1.1\r\n'
'Test-Head: FOOBIE\n'
'Content-Length: 4\n\r\n'
'BBBBAAAA')
fake_file.read.return_value = new_contents
new_req = yield async_mangle_request(r)
assert new_req.full_request == ('GET / HTTP/1.1\r\n'
'Test-Head: FOOBIE\r\n'
'Content-Length: 8\r\n\r\n'
'BBBBAAAA')
#########################
## Test response mangling
@pytest.inlineCallbacks
def test_mangle_response_edit(req_w_rsp, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is on, edit
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req_w_rsp
old_rsp = r.response.full_response
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
'Content-Length: 4\r\n'
'Other-Header: foobles\r\n\r\n'
'BBBB')
fake_file.read.return_value = new_contents
mangled_rsp = yield async_mangle_response(r)
assert not mock_deep_save.called
assert tfile_obj.called
assert tfile_instance.write.called
assert tfile_instance.write.call_args == ((old_rsp,),)
assert new_open.called
assert fake_file.read.called
assert mangled_rsp.full_response == new_contents
@pytest.inlineCallbacks
def test_mangle_response_newlines(req_w_rsp, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req_w_rsp
old_rsp = r.response.full_response
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\n'
'Content-Length: 4\n'
'Other-Header: foobles\r\n\n'
'BBBB')
fake_file.read.return_value = new_contents
mangled_rsp = yield async_mangle_response(r)
assert mangled_rsp.full_response == ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
'Content-Length: 4\r\n'
'Other-Header: foobles\r\n\r\n'
'BBBB')
assert mangled_rsp.headers['Other-Header'] == 'foobles'
@pytest.inlineCallbacks
def test_mangle_response_drop(req_w_rsp, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req_w_rsp
old_rsp = r.response.full_response
new_contents = ''
fake_file.read.return_value = new_contents
mangled_rsp = yield async_mangle_response(r)
assert mangled_rsp is None
@pytest.inlineCallbacks
def test_mangle_response_new_len(req_w_rsp, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req_w_rsp
old_rsp = r.response.full_response
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\n'
'Content-Length: 4\n'
'Other-Header: foobles\r\n\n'
'BBBBAAAA')
fake_file.read.return_value = new_contents
mangled_rsp = yield async_mangle_response(r)
assert mangled_rsp.full_response == ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
'Content-Length: 8\r\n'
'Other-Header: foobles\r\n\r\n'
'BBBBAAAA')

View file

@ -11,8 +11,8 @@ def test_filter_reqs():
pass
def test_gen_filter_by_all_request():
f = context.gen_filter_by_all(context.cmp_contains, 'hello')
fn = context.gen_filter_by_all(context.cmp_contains, 'hello', negate=True)
f = context.gen_filter_by_all(['ct', 'hello'])
fn = context.gen_filter_by_all(['nct', 'hello'])
# Nowhere
r = Request('GET / HTTP/1.1\r\n')
@ -31,7 +31,7 @@ def test_gen_filter_by_all_request():
# Data
r = Request('GET / HTTP/1.1\r\n')
r.raw_data = 'hello'
r.body = 'hello'
assert f(r)
assert not fn(r)
@ -73,8 +73,8 @@ def test_gen_filter_by_all_request():
def test_gen_filter_by_all_response(http_request):
f = context.gen_filter_by_all(context.cmp_contains, 'hello')
fn = context.gen_filter_by_all(context.cmp_contains, 'hello', negate=True)
f = context.gen_filter_by_all(['ct', 'hello'])
fn = context.gen_filter_by_all(['nct', 'hello'])
# Nowhere
r = Response('HTTP/1.1 200 OK\r\n')
@ -91,7 +91,7 @@ def test_gen_filter_by_all_response(http_request):
# Data
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.raw_data = 'hello'
r.body = 'hello'
assert f(http_request)
assert not fn(http_request)
@ -138,8 +138,8 @@ def test_gen_filter_by_all_response(http_request):
assert fn(http_request)
def test_filter_by_host(http_request):
f = context.gen_filter_by_host(context.cmp_contains, 'sexy')
fn = context.gen_filter_by_host(context.cmp_contains, 'sexy', negate=True)
f = context.gen_filter_by_host(['ct', 'sexy'])
fn = context.gen_filter_by_host(['nct', 'sexy'])
http_request.headers['Host'] = 'google.com'
http_request.headers['MiscHeader'] = 'vim.sexy'
@ -152,55 +152,55 @@ def test_filter_by_host(http_request):
assert not fn(http_request)
def test_filter_by_body():
f = context.gen_filter_by_body(context.cmp_contains, 'sexy')
fn = context.gen_filter_by_body(context.cmp_contains, 'sexy', negate=True)
f = context.gen_filter_by_body(['ct', 'sexy'])
fn = context.gen_filter_by_body(['nct', 'sexy'])
# Test request bodies
r = Request()
r.status_line = 'GET /sexy HTTP/1.1'
r.start_line = 'GET /sexy HTTP/1.1'
r.headers['Header'] = 'sexy'
r.raw_data = 'foo'
r.body = 'foo'
assert not f(r)
assert fn(r)
r.raw_data = 'sexy'
r.body = 'sexy'
assert f(r)
assert not fn(r)
# Test response bodies
r = Request()
rsp = Response()
rsp.status_line = 'HTTP/1.1 200 OK'
rsp.start_line = 'HTTP/1.1 200 OK'
rsp.headers['sexy'] = 'sexy'
r.status_line = 'GET /sexy HTTP/1.1'
r.start_line = 'GET /sexy HTTP/1.1'
r.headers['Header'] = 'sexy'
r.response = rsp
assert not f(r)
assert fn(r)
rsp.raw_data = 'sexy'
rsp.body = 'sexy'
assert f(r)
assert not fn(r)
def test_filter_by_response_code(http_request):
f = context.gen_filter_by_response_code(context.cmp_eq, 200)
fn = context.gen_filter_by_response_code(context.cmp_eq, 200, negate=True)
f = context.gen_filter_by_response_code(['eq', '200'])
fn = context.gen_filter_by_response_code(['neq', '200'])
r = Response()
http_request.response = r
r.status_line = 'HTTP/1.1 404 Not Found'
r.start_line = 'HTTP/1.1 404 Not Found'
assert not f(http_request)
assert fn(http_request)
r.status_line = 'HTTP/1.1 200 OK'
r.start_line = 'HTTP/1.1 200 OK'
assert f(http_request)
assert not fn(http_request)
def test_filter_by_raw_headers_request():
f1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:')
fn1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:', negate=True)
f2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader')
fn2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader', negate=True)
f1 = context.gen_filter_by_raw_headers(['ct', 'Sexy:'])
fn1 = context.gen_filter_by_raw_headers(['nct', 'Sexy:'])
f2 = context.gen_filter_by_raw_headers(['ct', 'sexy\r\nHeader'])
fn2 = context.gen_filter_by_raw_headers(['nct', 'sexy\r\nHeader'])
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
@ -228,10 +228,10 @@ def test_filter_by_raw_headers_request():
assert not fn2(r)
def test_filter_by_raw_headers_response():
f1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:')
fn1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:', negate=True)
f2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader')
fn2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader', negate=True)
f1 = context.gen_filter_by_raw_headers(['ct', 'Sexy:'])
fn1 = context.gen_filter_by_raw_headers(['nct', 'Sexy:'])
f2 = context.gen_filter_by_raw_headers(['ct', 'sexy\r\nHeader'])
fn2 = context.gen_filter_by_raw_headers(['nct', 'sexy\r\nHeader'])
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
@ -259,25 +259,24 @@ def test_filter_by_raw_headers_response():
assert not fn2(r)
def test_filter_by_path(http_request):
f = context.gen_filter_by_path(context.cmp_contains, 'porn') # find the fun websites
fn = context.gen_filter_by_path(context.cmp_contains, 'porn', negate=True) # find the boring websites
f = context.gen_filter_by_path(['ct', 'porn']) # find the fun websites
fn = context.gen_filter_by_path(['nct', 'porn']) # find the boring websites
http_request.status_line = 'GET / HTTP/1.1'
http_request.start_line = 'GET / HTTP/1.1'
assert not f(http_request)
assert fn(http_request)
http_request.status_line = 'GET /path/to/great/porn HTTP/1.1'
http_request.start_line = 'GET /path/to/great/porn HTTP/1.1'
assert f(http_request)
assert not fn(http_request)
http_request.status_line = 'GET /path/to/porn/great HTTP/1.1'
http_request.start_line = 'GET /path/to/porn/great HTTP/1.1'
assert f(http_request)
assert not fn(http_request)
def test_gen_filter_by_submitted_cookies():
f1 = context.gen_filter_by_submitted_cookies(context.cmp_contains, 'Session')
f2 = context.gen_filter_by_submitted_cookies(context.cmp_contains, 'Cookie',
context.cmp_contains, 'CookieVal')
f1 = context.gen_filter_by_submitted_cookies(['ct', 'Session'])
f2 = context.gen_filter_by_submitted_cookies(['ct', 'Cookie', 'nct', 'CookieVal'])
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: foo=bar\r\n'
'\r\n'))
@ -294,18 +293,17 @@ def test_gen_filter_by_submitted_cookies():
'Cookie: Session=bar; CookieThing=NoMatch\r\n'
'\r\n'))
assert f1(r)
assert not f2(r)
assert f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: Session=bar; CookieThing=CookieValue\r\n'
'\r\n'))
assert f1(r)
assert f2(r)
assert not f2(r)
def test_gen_filter_by_set_cookies():
f1 = context.gen_filter_by_set_cookies(context.cmp_contains, 'Session')
f2 = context.gen_filter_by_set_cookies(context.cmp_contains, 'Cookie',
context.cmp_contains, 'CookieVal')
f1 = context.gen_filter_by_set_cookies(['ct', 'Session'])
f2 = context.gen_filter_by_set_cookies(['ct', 'Cookie', 'ct', 'CookieVal'])
r = Request('GET / HTTP/1.1\r\n\r\n')
rsp = Response(('HTTP/1.1 200 OK\r\n'
@ -345,9 +343,8 @@ def test_gen_filter_by_set_cookies():
assert f2(r)
def test_filter_by_params_get():
f1 = context.gen_filter_by_params(context.cmp_contains, 'Session')
f2 = context.gen_filter_by_params(context.cmp_contains, 'Cookie',
context.cmp_contains, 'CookieVal')
f1 = context.gen_filter_by_params(['ct', 'Session'])
f2 = context.gen_filter_by_params(['ct', 'Cookie', 'ct', 'CookieVal'])
r = Request('GET / HTTP/1.1\r\n\r\n')
assert not f1(r)
@ -366,30 +363,29 @@ def test_filter_by_params_get():
assert f2(r)
def test_filter_by_params_post():
f1 = context.gen_filter_by_params(context.cmp_contains, 'Session')
f2 = context.gen_filter_by_params(context.cmp_contains, 'Cookie',
context.cmp_contains, 'CookieVal')
f1 = context.gen_filter_by_params(['ct', 'Session'])
f2 = context.gen_filter_by_params(['ct', 'Cookie', 'ct', 'CookieVal'])
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.raw_data = 'foo=bar'
r.body = 'foo=bar'
assert not f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.raw_data = 'Session=bar'
r.body = 'Session=bar'
assert f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.raw_data = 'Session=bar&Cookie=foo'
r.body = 'Session=bar&Cookie=foo'
assert f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.raw_data = 'Session=bar&CookieThing=CookieValue'
r.body = 'Session=bar&CookieThing=CookieValue'
assert f1(r)
assert f2(r)

View file

@ -86,7 +86,7 @@ def test_chunked_simple():
full_data += '0\r\n\r\n'
c.add_data(full_data)
assert c.complete
assert c.raw_data == 'A'*5
assert c.body == 'A'*5
def test_chunked_hex():
# Test hex lengths
@ -97,7 +97,7 @@ def test_chunked_hex():
full_data += '0\r\n\r\n'
c.add_data(full_data)
assert c.complete
assert c.raw_data == 'A'*0xAF
assert c.body == 'A'*0xAF
c = http.ChunkedData()
full_data = 'AF\r\n'
@ -106,7 +106,7 @@ def test_chunked_hex():
full_data += '0\r\n\r\n'
c.add_data(full_data)
assert c.complete
assert c.raw_data == 'A'*0xAF
assert c.body == 'A'*0xAF
c = http.ChunkedData()
full_data = 'aF\r\n'
@ -115,7 +115,7 @@ def test_chunked_hex():
full_data += '0\r\n\r\n'
c.add_data(full_data)
assert c.complete
assert c.raw_data == 'A'*0xAF
assert c.body == 'A'*0xAF
def test_chunked_leading_zeros():
# Test leading zeros
@ -126,7 +126,7 @@ def test_chunked_leading_zeros():
full_data += '0\r\n\r\n'
c.add_data(full_data)
assert c.complete
assert c.raw_data == 'A'*0xAF
assert c.body == 'A'*0xAF
def test_chunked_one_char_add():
# Test adding one character at a time
@ -138,7 +138,7 @@ def test_chunked_one_char_add():
for ch in full_data:
c.add_data(ch)
assert c.complete
assert c.raw_data == 'A'*0xAF
assert c.body == 'A'*0xAF
def test_chunked_incomplete():
# Tests that complete isn't true until the data is received
@ -168,11 +168,11 @@ def test_length_data_simple():
assert not l.complete
l.add_data('A'*100)
assert l.complete
assert l.raw_data == 'A'*100
assert l.body == 'A'*100
l = http.LengthData(0)
assert l.complete
assert l.raw_data == ''
assert l.body == ''
# Test incomplete
l = http.LengthData(100)
@ -185,7 +185,7 @@ def test_length_one_character():
for i in range(100):
l.add_data('A')
assert l.complete
assert l.raw_data == 'A'*100
assert l.body == 'A'*100
# Test adding one character at a time (incomplete)
l = http.LengthData(100)
@ -198,7 +198,7 @@ def test_length_overflow():
l = http.LengthData(100)
l.add_data('A'*400)
assert l.complete
assert l.raw_data == 'A'*100
assert l.body == 'A'*100
# Test throwing an exception when adding data after complete
l = http.LengthData(100)
@ -369,7 +369,80 @@ def test_response_cookie_blank():
assert c.val == ''
assert c.path == '/'
assert c.secure
####################
## HTTPMessage tests
def test_message_simple():
raw = ('foobar\r\n'
'a: b\r\n'
'Content-Length: 100\r\n\r\n')
raw += 'A'*100
m = http.HTTPMessage(raw)
assert m.complete
assert m.malformed == False
assert m.start_line == 'foobar'
assert m.body == 'A'*100
assert m.headers.all_pairs() == [('a', 'b'), ('Content-Length', '100')]
assert m.headers['A'] == 'b'
assert m.headers_section == ('foobar\r\n'
'a: b\r\n'
'Content-Length: 100\r\n\r\n')
assert m.full_message == raw
def test_message_build():
raw = ('foobar\r\n'
'a: b\r\n'
'Content-Length: 100\r\n\r\n')
raw += 'A'*100
m = http.HTTPMessage()
m.add_line('foobar')
m.add_line('a: b')
m.add_line('Content-Length: 100')
m.add_line('')
assert not m.complete
m.add_data('A'*50)
assert not m.complete
m.add_data('A'*50)
assert m.complete
assert m.malformed == False
assert m.start_line == 'foobar'
assert m.body == 'A'*100
assert m.headers.all_pairs() == [('a', 'b'), ('Content-Length', '100')]
assert m.headers['A'] == 'b'
assert m.headers_section == ('foobar\r\n'
'a: b\r\n'
'Content-Length: 100\r\n\r\n')
assert m.full_message == raw
def test_message_build_chunked():
raw = ('foobar\r\n'
'a: b\r\n'
'Content-Length: 100\r\n\r\n')
raw += 'A'*100
m = http.HTTPMessage()
m.add_line('foobar')
m.add_line('a: b')
m.add_line('Transfer-Encoding: chunked')
m.add_line('')
assert not m.complete
m.add_data('%x\r\n' % 50)
m.add_data('A'*50)
m.add_data('\r\n')
m.add_data('%x\r\n' % 50)
m.add_data('A'*50)
m.add_data('\r\n')
m.add_data('0\r\n')
assert m.complete
assert m.malformed == False
assert m.start_line == 'foobar'
assert m.body == 'A'*100
assert m.headers.all_pairs() == [('a', 'b'), ('Content-Length', '100')]
assert m.headers['A'] == 'b'
assert m.headers_section == ('foobar\r\n'
'a: b\r\n'
'Content-Length: 100\r\n\r\n')
assert m.full_message == raw
####################
## Request tests
@ -398,7 +471,7 @@ def test_request_simple():
assert r.is_ssl == False
assert r.path == '/'
assert r.port == 80
assert r.status_line == 'GET / HTTP/1.1'
assert r.start_line == 'GET / HTTP/1.1'
assert r.verb == 'GET'
assert r.version == 'HTTP/1.1'
assert r.headers['Content-Length'] == '100'
@ -409,7 +482,7 @@ def test_request_simple():
assert r.headers['Host'] == 'www.test.com'
assert r.headers['Connection'] == 'Keep-Alive'
assert r.headers['Cache-Control'] == 'no-cache'
assert r.raw_data == 'A'*100
assert r.body == 'A'*100
test(rf)
test(rl)
test(ru)
@ -536,6 +609,7 @@ def test_request_parse_host():
rf, rl, ru, rj = req_by_lines_and_full(header_lines)
def test(r):
assert r.complete
assert r.port == 443
assert r.host == 'www.test.com'
assert r.is_ssl
test(rf)
@ -574,7 +648,7 @@ def test_repeated_request_headers():
def test_request_update_statusline():
r = http.Request()
r.status_line = 'GET / HTTP/1.1'
r.start_line = 'GET / HTTP/1.1'
assert r.verb == 'GET'
assert r.path == '/'
assert r.version == 'HTTP/1.1'
@ -584,7 +658,7 @@ def test_request_update_statusline():
def test_request_update_cookies():
r = http.Request()
r.status_line = 'GET / HTTP/1.1'
r.start_line = 'GET / HTTP/1.1'
# Check new cookies
r.cookies['foo'] = 'bar'
@ -607,7 +681,7 @@ def test_request_update_cookies():
def test_request_update_headers():
r = http.Request()
r.status_line = 'GET / HTTP/1.1'
r.start_line = 'GET / HTTP/1.1'
r.headers['Content-Length'] = '0'
r.headers['Test-Header'] = 'Test Value'
r.headers['Other-Header'] = 'Other Value'
@ -624,11 +698,11 @@ def test_request_update_headers():
def test_request_modified_headers():
r = http.Request()
r.status_line = 'GET / HTTP/1.1'
r.start_line = 'GET / HTTP/1.1'
r.headers['content-length'] = '100'
r.headers['cookie'] = 'abc=123'
r.cookies['abc'] = '456'
r.raw_data = 'AAAA'
r.body = 'AAAA'
assert r.full_request == ('GET / HTTP/1.1\r\n'
'content-length: 4\r\n'
'cookie: abc=456\r\n\r\n'
@ -638,33 +712,34 @@ def test_request_modified_headers():
def test_request_update_data():
r = http.Request()
r.status_line = 'GET / HTTP/1.1'
r.start_line = 'GET / HTTP/1.1'
r.headers['content-length'] = 500
r.raw_data = 'AAAA'
r.body = 'AAAA'
assert r.full_request == ('GET / HTTP/1.1\r\n'
'content-length: 4\r\n'
'\r\n'
'AAAA')
def test_request_to_json():
r = http.Request()
r.status_line = 'GET / HTTP/1.1'
r.start_line = 'GET / HTTP/1.1'
r.headers['content-length'] = 500
r.tags = ['foo', 'bar']
r.raw_data = 'AAAA'
r.body = 'AAAA'
r.reqid = '1'
rsp = http.Response()
rsp.status_line = 'HTTP/1.1 200 OK'
rsp.start_line = 'HTTP/1.1 200 OK'
rsp.rspid = '2'
r.response = rsp
expected_reqdata = {u'full_request': unicode(base64.b64encode(r.full_request)),
expected_reqdata = {u'full_message': unicode(base64.b64encode(r.full_request)),
u'response_id': str(rsp.rspid),
u'port': 80,
u'is_ssl': False,
u'tags': ['foo', 'bar'],
u'reqid': str(r.reqid),
u'host': '',
}
assert json.loads(r.to_json()) == expected_reqdata
@ -764,7 +839,7 @@ def test_request_copy():
def test_request_url_blankpath():
r = http.Request()
r.status_line = 'GET / HTTP/1.1'
r.start_line = 'GET / HTTP/1.1'
r.url = 'https://www.google.com'
r.headers['Host'] = r.host
r.url_params.from_dict({'foo': 'bar'})
@ -789,10 +864,10 @@ def test_response_simple():
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data)
def test(r):
assert r.complete
assert r.raw_data == data
assert r.body == data
assert r.response_code == 200
assert r.response_text == 'OK'
assert r.status_line == 'HTTP/1.1 200 OK'
assert r.start_line == 'HTTP/1.1 200 OK'
assert r.version == 'HTTP/1.1'
assert r.headers['Date'] == 'Thu, 22 Oct 2015 00:37:17 GMT'
@ -826,7 +901,7 @@ def test_response_chunked():
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data)
def test(r):
assert r.complete
assert r.raw_data == 'A'*0xAF + 'B'*0xBF
assert r.body == 'A'*0xAF + 'B'*0xBF
test(rf)
test(rl)
@ -851,7 +926,7 @@ def test_response_gzip():
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data_comp)
def test(r):
assert r.complete
assert r.raw_data == data_decomp
assert r.body == data_decomp
test(rf)
test(rl)
@ -876,7 +951,7 @@ def test_response_deflate():
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data_comp)
def test(r):
assert r.complete
assert r.raw_data == data_decomp
assert r.body == data_decomp
test(rf)
test(rl)
@ -907,7 +982,7 @@ def test_response_chunked_gzip():
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data_chunked)
def test(r):
assert r.complete
assert r.raw_data == data_decomp
assert r.body == data_decomp
assert r.headers['Content-Length'] == str(len(data_decomp))
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
'Date: Thu, 22 Oct 2015 00:37:17 GMT\r\n'
@ -924,7 +999,7 @@ def test_response_chunked_gzip():
def test_response_early_completion():
r = http.Response()
r.status_line = 'HTTP/1.1 200 OK'
r.start_line = 'HTTP/1.1 200 OK'
r.add_line('Content-Length: 0')
assert not r.complete
r.add_line('')
@ -992,7 +1067,7 @@ def test_repeated_response_headers():
def test_response_update_statusline():
r = http.Response()
r.status_line = 'HTTP/1.1 200 OK'
r.start_line = 'HTTP/1.1 200 OK'
assert r.version == 'HTTP/1.1'
assert r.response_code == 200
assert r.response_text == 'OK'
@ -1002,7 +1077,7 @@ def test_response_update_statusline():
def test_response_update_headers():
r = http.Response()
r.status_line = 'HTTP/1.1 200 OK'
r.start_line = 'HTTP/1.1 200 OK'
r.headers['Test-Header'] = 'Test Value'
r.headers['Other-Header'] = 'Other Value'
@ -1018,9 +1093,9 @@ def test_response_update_headers():
def test_response_update_modified_headers():
r = http.Response()
r.status_line = 'HTTP/1.1 200 OK'
r.start_line = 'HTTP/1.1 200 OK'
r.headers['content-length'] = '500'
r.raw_data = 'AAAA'
r.body = 'AAAA'
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
'content-length: 4\r\n\r\n'
'AAAA')
@ -1028,7 +1103,7 @@ def test_response_update_modified_headers():
def test_response_update_cookies():
r = http.Response()
r.status_line = 'HTTP/1.1 200 OK'
r.start_line = 'HTTP/1.1 200 OK'
# Test by adding headers
r.headers['Set-Cookie'] = 'abc=123'
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
@ -1042,7 +1117,7 @@ def test_response_update_cookies():
assert r.cookies['abc'].val == '456'
r = http.Response()
r.status_line = 'HTTP/1.1 200 OK'
r.start_line = 'HTTP/1.1 200 OK'
# Test by adding cookie objects
c = http.ResponseCookie('abc=123; secure')
r.cookies['abc'] = c
@ -1060,10 +1135,10 @@ def test_response_update_content_length():
def test_response_to_json():
rsp = http.Response()
rsp.status_line = 'HTTP/1.1 200 OK'
rsp.start_line = 'HTTP/1.1 200 OK'
rsp.rspid = 2
expected_reqdata = {'full_response': base64.b64encode(rsp.full_response),
expected_reqdata = {'full_message': base64.b64encode(rsp.full_response),
'rspid': rsp.rspid,
#'tag': r.tag,
}

View file

@ -6,7 +6,6 @@ import twisted.test
from pappyproxy import http
from pappyproxy import macros
from pappyproxy import mangle
from pappyproxy import config
from pappyproxy.proxy import ProxyClient, ProxyClientFactory, ProxyServerFactory
from testutil import mock_deferred, func_deleted, func_ignored_deferred, func_ignored, no_tcp
@ -18,7 +17,7 @@ from twisted.internet import defer, reactor
## Fixtures
MANGLED_REQ = 'GET /mangled HTTP/1.1\r\n\r\n'
MANGLED_RSP = 'HTTP/1.1 500 MANGLED\r\n\r\n'
MANGLED_RSP = 'HTTP/1.1 500 MANGLED\r\nContent-Length: 0\r\n\r\n'
@pytest.fixture
def unconnected_proxyserver(mocker):
@ -140,25 +139,25 @@ def gen_mangle_macro(modified_req=None, modified_rsp=None,
macro = mock.MagicMock()
if modified_req or drop_req:
macro.async_req = True
macro.do_req = True
macro.intercept_requests = True
if drop_req:
newreq = None
else:
newreq = http.Request(modified_req)
macro.async_mangle_request.return_value = mock_deferred(newreq)
else:
macro.do_req = False
macro.intercept_requests = False
if modified_rsp or drop_rsp:
macro.async_rsp = True
macro.do_rsp = True
macro.intercept_responses = True
if drop_rsp:
newrsp = None
else:
newrsp = http.Response(modified_rsp)
macro.async_mangle_response.return_value = mock_deferred(newrsp)
else:
macro.do_rsp = False
macro.intercept_responses = False
return macro
def notouch_mangle_req(request):
@ -255,7 +254,7 @@ def test_proxy_client_mangle_rsp(mocker, proxy_connection, in_scope_true):
prot.lineReceived('')
req = yield retreq_deferred
response = req.response.full_response
assert response == 'HTTP/1.1 500 MANGLED\r\n\r\n'
assert response == 'HTTP/1.1 500 MANGLED\r\nContent-Length: 0\r\n\r\n'
@pytest.inlineCallbacks
def test_proxy_drop_req(mocker, proxy_connection, in_scope_true):

View file

@ -5,13 +5,13 @@ from pappyproxy.http import Request, Response, ResponseCookie
@pytest.fixture
def req():
r = Request()
r.status_line = 'GET / HTTP/1.1'
r.start_line = 'GET / HTTP/1.1'
return r
@pytest.fixture
def rsp():
r = Response()
r.status_line = 'HTTP/1.1 200 OK'
r.start_line = 'HTTP/1.1 200 OK'
return r
def test_session_basic(req, rsp):
@ -96,7 +96,7 @@ def test_session_mixed(req, rsp):
assert 'auth' not in rsp.headers
r = Response()
r.status_line = 'HTTP/1.1 200 OK'
r.start_line = 'HTTP/1.1 200 OK'
r.set_cookie(ResponseCookie('state=bazzers'))
r.set_cookie(ResponseCookie('session=buzzers'))
s.get_rsp(r)

View file

@ -1,3 +1,24 @@
import string
class PappyException(Exception):
"""
The exception class for Pappy. If a plugin command raises one of these, the
message will be printed to the console rather than displaying a traceback.
"""
pass
def printable_data(data):
"""
Return ``data``, but replaces unprintable characters with periods.
:param data: The data to make printable
:type data: String
:rtype: String
"""
chars = []
for c in data:
if c in string.printable:
chars += c
else:
chars += '.'
return ''.join(chars)