Version 0.2.6

master
Rob Glew 9 years ago
parent 10f30bfb47
commit aa81432556
  1. 1
      MANIFEST.in
  2. 7
      README.md
  3. 4
      docs/source/conf.py
  4. 2
      pappyproxy/.coveragerc
  5. 2
      pappyproxy/comm.py
  6. 3
      pappyproxy/console.py
  7. 6
      pappyproxy/context.py
  8. 126
      pappyproxy/http.py
  9. 4
      pappyproxy/macros.py
  10. 9
      pappyproxy/pappy.py
  11. 2
      pappyproxy/plugins/manglecmds.py
  12. 63
      pappyproxy/plugins/misc.py
  13. 6
      pappyproxy/plugins/tagcmds.py
  14. 29
      pappyproxy/plugins/view.py
  15. 2
      pappyproxy/plugins/vim_repeater/repeater.py
  16. 51
      pappyproxy/proxy.py
  17. 17
      pappyproxy/templates/intmacro.py.template
  18. 35
      pappyproxy/templates/macro.py.template
  19. 23
      pappyproxy/tests/test_http.py
  20. 4
      pappyproxy/util.py
  21. 2
      setup.py

@ -4,4 +4,5 @@ recursive-include pappyproxy *.json
recursive-include pappyproxy *.py
recursive-include pappyproxy *.vim
recursive-include pappyproxy *.txt
recursive-include pappyproxy *.template
include docs/source/overview.rst

@ -229,11 +229,13 @@ The following commands can be used to view requests and responses
| `vhs <id(s)>` | view_response_headers, vhs | [V]iew [H]eaders of a Re[S]ponse. Prints just the headers of a response associated with a request. |
| `vbs <id(s)>` | view_response_bytes, vbs | [V]iew [B]ytes of Re[S]ponse, prints the full response including headers and data without coloring or additional newlines. Use this if you want to write a response to a file. |
| `pps <id(s)> [format]` | pretty_print_response, pps | Pretty print a response. If a format is given, it will try and print the body of the response with that format. Otherwise it will make a guess based off of the Content-Type header. |
| `watch` | watch | Print requests and responses in real time as they pass through the proxy. |
Available formats for `ppq` and `pps` commands:
| Format | Description |
|:-------|:------------|
| `form` | Print POST data submitted from a form (normal post data) |
| `json` | Print as JSON |
The table shown by `ls` will have the following columns:
@ -952,6 +954,11 @@ Changelog
---------
The boring part of the readme
* 0.2.6
* Fix pip being dumb
* `watch` command to watch requests/responses in real time
* Added `pp[qs] form <id>` to print POST data
* Bugfixes
* 0.2.5
* Requests sent with repeater now are given `repeater` tag
* Add ppq and pps commands

@ -59,9 +59,9 @@ author = u'Rob Glew'
# built documents.
#
# The short X.Y version.
version = u'0.2.5'
version = u'0.2.6'
# The full version, including alpha/beta/rc tags.
release = u'0.2.5'
release = u'0.2.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.

@ -1,3 +1,3 @@
[run]
omit = tests/*, schema/*, console.py, vim_repeater/*
omit = tests/*, schema/*, plugins/*, templates/*

@ -100,7 +100,7 @@ class CommServer(LineReceiver):
message = base64.b64decode(data['full_message'])
req = yield Request.submit_new(data['host'], data['port'], data['is_ssl'], message)
if 'tags' in data:
req.tags = data['tags']
req.tags = set(data['tags'])
yield req.async_deep_save()
retdata = {}

@ -262,7 +262,8 @@ class ProxyCmd(cmd2.Cmd):
"""
def __init__(self, *args, **kwargs):
self.prompt = 'pappy' + Colors.YELLOW + '> ' + Colors.ENDC
# the \x01/\x02 are to make the prompt behave properly with the readline library
self.prompt = 'pappy\x01' + Colors.YELLOW + '\x02> \x01' + Colors.ENDC + '\x02'
self.debug = True
self._cmds = {}

@ -446,7 +446,7 @@ def gen_filter_by_headers(args):
if args[0][0] == 'n':
return comparer(req.headers) and (not req.response or comparer(req.response.headers))
else:
return comparer(req.headers) and (req.response and comparer(req.response.headers))
return comparer(req.headers) or (req.response and comparer(req.response.headers))
return f
def gen_filter_by_submitted_cookies(args):
@ -576,7 +576,7 @@ def clear_tag(tag):
# Remove a tag from every request
reqs = yield Request.cache.load_by_tag(tag)
for req in reqs:
req.tags.remove(tag)
req.tags.discard(tag)
if req.saved:
yield req.async_save()
reset_context_caches()
@ -595,7 +595,7 @@ def async_set_tag(tag, reqs):
"""
yield clear_tag(tag)
for req in reqs:
req.tags.append(tag)
req.tags.add(tag)
Request.cache.add(req)
reset_context_caches()

@ -14,6 +14,7 @@ import weakref
from .util import PappyException, printable_data
from .requestcache import RequestCache
from .colors import Colors, host_color, path_formatter
from pygments.formatters import TerminalFormatter
from pygments.lexers import get_lexer_for_mimetype, HttpLexer
from twisted.internet import defer, reactor
@ -747,8 +748,12 @@ class HTTPMessage(object):
self.handle_start_line(line)
self._first_line = False
else:
key, val = line.split(':', 1)
val = val.strip()
if ':' in line:
key, val = line.split(':', 1)
val = val.strip()
else:
key = line
val = None
if self.handle_header(key, val):
self.headers.append(key, val, do_callback=False)
@ -784,6 +789,8 @@ class HTTPMessage(object):
:param key: Header value
:type line: string
"""
if val is None:
return True
stripped = False
if key.lower() == 'content-encoding':
if val in ('gzip', 'x-gzip'):
@ -1104,29 +1111,39 @@ class Request(HTTPMessage):
@raw_data.setter
def raw_data(self, val):
self.body = val
@property
def url(self):
"""
The full url of the request including url params, protocol, etc.
ie `https://www.google.com`, `http://foo.fakewebsite.com:1234/path?a=b`.
When setting the URL, the port, is_ssl, path, url params, host, etc are all
automatically updated.
:getter: Returns the url of the request
:setter: Sets the url of the request and updates metadata
:type: string
"""
def _url_helper(self, colored=False):
retstr = ''
if self.is_ssl:
retstr = 'https://'
retstr += 'https://'
else:
retstr = 'http://'
retstr += self.host
if colored:
retstr += Colors.RED
retstr += 'http'
retstr += Colors.ENDC
retstr += '://'
else:
retstr += 'http://'
if colored:
retstr += host_color(self.host)
retstr += self.host
retstr += Colors.ENDC
else:
retstr += self.host
if not ((self.is_ssl and self.port == 443) or \
(not self.is_ssl and self.port == 80)):
retstr += ':%d' % self.port
if colored:
retstr += ':'
retstr += Colors.MAGENTA
retstr += str(self.port)
retstr += Colors.ENDC
else:
retstr += ':%d' % self.port
if self.path and self.path != '/':
retstr += self.path
if colored:
retstr += path_formatter(self.path)
else:
retstr += self.path
if self.url_params:
retstr += '?'
pairs = []
@ -1136,6 +1153,30 @@ class Request(HTTPMessage):
if self.fragment:
retstr += '#%s' % self.fragment
return retstr
@property
def url(self):
"""
The full url of the request including url params, protocol, etc.
ie `https://www.google.com`, `http://foo.fakewebsite.com:1234/path?a=b`.
When setting the URL, the port, is_ssl, path, url params, host, etc are all
automatically updated.
:getter: Returns the url of the request
:setter: Sets the url of the request and updates metadata
:type: string
"""
return self._url_helper(False)
@property
def url_color(self):
"""
same as .url, except colored. Used for printing URLs to the terminal.
:getter: Returns the url of the request
:type: string
"""
return self._url_helper(True)
@url.setter
def url(self, val):
@ -1235,7 +1276,7 @@ class Request(HTTPMessage):
data['reqid'] = self.reqid
if self.response:
data['response_id'] = self.response.rspid
data['tags'] = self.tags
data['tags'] = list(self.tags)
return data
def set_metadata(self, data):
@ -1248,14 +1289,14 @@ class Request(HTTPMessage):
if 'port' in data:
self.port = data['port']
if 'tags' in data:
self.tags = data['tags']
self.tags = set(data['tags'])
def reset_metadata(self):
self.port = 80
self.is_ssl = False
self.reqid = None
self._host = ''
self.tags = []
self.tags = set()
def get_plugin_dict(self, name):
"""
@ -1321,6 +1362,7 @@ class Request(HTTPMessage):
pairs = []
for k, v in self.post_params.all_pairs():
pairs.append('%s=%s' % (k, v))
self.headers['Content-Type'] = 'application/x-www-form-urlencoded'
self.body = '&'.join(pairs)
def update_from_headers(self):
@ -1410,6 +1452,8 @@ class Request(HTTPMessage):
def handle_header(self, key, val):
# We may have duplicate headers
if val is None:
return True
keep = HTTPMessage.handle_header(self, key, val)
if not keep:
return False
@ -1451,6 +1495,17 @@ class Request(HTTPMessage):
#######################
## Data store functions
def save_in_mem(self, cust_cache=None):
if cust_cache:
use_cache = cust_cache
else:
use_cache = Request.cache
if not self.reqid:
print 'adding'
use_cache.add(self)
else:
print 'else adding'
@defer.inlineCallbacks
def async_save(self, cust_dbpool=None, cust_cache=None):
"""
@ -1785,9 +1840,9 @@ class Request(HTTPMessage):
""",
(req.reqid,)
)
req.tags = []
req.tags = set()
for row in rows:
req.tags.append(row[0])
req.tags.add(row[0])
defer.returnValue(req)
@staticmethod
@ -2228,29 +2283,14 @@ class Response(HTTPMessage):
self._end_after_headers = True
def handle_header(self, key, val):
if val is None:
return True
keep = HTTPMessage.handle_header(self, key, val)
if not keep:
return False
stripped = False
if key.lower() == 'content-encoding':
if val in ('gzip', 'x-gzip'):
self._encoding_type = ENCODE_GZIP
elif val in ('deflate'):
self._encoding_type = ENCODE_DEFLATE
# We send our requests already decoded, so we don't want a header
# saying it's encoded
if self._encoding_type != ENCODE_NONE:
stripped = True
elif key.lower() == 'transfer-encoding' and val.lower() == 'chunked':
self._data_obj = ChunkedData()
self.complete = self._data_obj.complete
stripped = True
elif key.lower() == 'content-length':
# We use our own content length
self._data_obj = LengthData(int(val))
elif key.lower() == 'set-cookie':
if key.lower() == 'set-cookie':
cookie = ResponseCookie(val)
self.cookies.append(cookie.key, cookie, do_callback=False)

@ -283,7 +283,7 @@ def macro_from_requests(reqs, short_name='', long_name=''):
loader = FileSystemLoader(config.PAPPY_DIR+'/templates')
env = Environment(loader=loader)
template = env.get_template('macro.py')
template = env.get_template('macro.py.template')
return template.render(zip=zip, **subs)
def gen_imacro(short_name='', long_name=''):
@ -298,6 +298,6 @@ def gen_imacro(short_name='', long_name=''):
loader = FileSystemLoader(config.PAPPY_DIR+'/templates')
env = Environment(loader=loader)
template = env.get_template('intmacro.py')
template = env.get_template('intmacro.py.template')
return template.render(**subs)

@ -6,6 +6,7 @@ import datetime
import os
import schema.update
import shutil
import signal
import sys
import tempfile
@ -16,6 +17,7 @@ from . import http
from . import plugin
from . import proxy
from . import requestcache
from . import util
from .console import ProxyCmd
from twisted.enterprise import adbapi
from twisted.internet import reactor, defer
@ -61,6 +63,10 @@ def delete_datafile():
print 'Deleting temporary datafile'
os.remove(config.DATAFILE)
def custom_int_handler(signum, frame):
# sorry
print "Sorry, we can't kill things partway through otherwise the data file might be left in a corrupt state"
@defer.inlineCallbacks
def main():
global server_factory
@ -68,6 +74,9 @@ def main():
global cons
settings = parse_args()
if not os.path.exists(config.DATA_DIR):
os.makedirs(config.DATA_DIR)
if settings['lite']:
conf_settings = config.get_default_config()
conf_settings['debug_dir'] = None

@ -31,7 +31,7 @@ class MangleInterceptMacro(InterceptMacro):
self.async_rsp = True
def __repr__(self):
return "<MangleInterceptingMacro>" % self.name
return "<MangleInterceptingMacro>"
@defer.inlineCallbacks
def async_mangle_request(self, request):

@ -2,13 +2,61 @@ import crochet
import pappyproxy
import shlex
from pappyproxy.colors import Colors, Styles, path_formatter, host_color, scode_color, verb_color
from pappyproxy.console import confirm, load_reqlist, Capturing
from pappyproxy.util import PappyException, remove_color
from pappyproxy.macros import InterceptMacro
from pappyproxy.requestcache import RequestCache
from pappyproxy.pappy import cons
from pappyproxy.plugin import add_intercepting_macro, remove_intercepting_macro
from twisted.internet import defer
from twisted.enterprise import adbapi
class PrintStreamInterceptMacro(InterceptMacro):
"""
Intercepting macro that prints requests and responses as they go through
the proxy
"""
def __init__(self):
InterceptMacro.__init__(self)
self.name = 'Pappy Interceptor Macro'
self.intercept_requests = False
self.intercept_responses = False
self.async_req = False
self.async_rsp = False
def __repr__(self):
return "<PrintStreamInterceptingMacro>"
@staticmethod
def _print_request(req):
s = verb_color(req.verb)+'> '+req.verb+' '+Colors.ENDC
s += req.url_color
s += ', len=' + str(len(req.body))
print s
@staticmethod
def _print_response(req):
response_code = str(req.response.response_code) + \
' ' + req.response.response_text
s = scode_color(response_code)
s += '< '
s += response_code
s += Colors.ENDC
s += ' '
s += req.url_color
s += ', len=' + str(len(req.response.body))
print s
def mangle_request(self, request):
PrintStreamInterceptMacro._print_request(request)
return request
def mangle_response(self, request):
PrintStreamInterceptMacro._print_response(request)
return request.response
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def clrmem(line):
@ -115,6 +163,20 @@ def merge_datafile(line):
finally:
other_dbpool.close()
def watch_proxy(line):
print 'Watching proxy... press ENTER to exit'
macro = PrintStreamInterceptMacro()
macro.intercept_requests = True
macro.intercept_responses = True
try:
add_intercepting_macro('pappy_watch_proxy', macro)
raw_input()
finally:
try:
remove_intercepting_macro('pappy_watch_proxy')
except PappyException:
pass
def run_without_color(line):
with Capturing() as output:
cons.onecmd(line.strip())
@ -129,6 +191,7 @@ def load_cmds(cmd):
'log': (log, None),
'merge': (merge_datafile, None),
'nocolor': (run_without_color, None),
'watch': (watch_proxy, None),
})
cmd.add_aliases([
#('rpy', ''),

@ -32,7 +32,7 @@ def tag(line):
for reqid in reqids:
req = yield Request.load_request(reqid)
if tag not in req.tags:
req.tags.append(tag)
req.tags.add(tag)
if req.saved:
yield req.async_save()
else:
@ -64,7 +64,7 @@ def untag(line):
for reqid in reqids:
req = yield Request.load_request(reqid)
if tag in req.tags:
req.tags.remove(tag)
req.tags.discard(tag)
if req.saved:
yield req.async_save()
if ids:
@ -84,7 +84,7 @@ def clrtag(line):
for req in reqs:
if req.tags:
req.tags = []
req.tags = set()
print 'Tags cleared from request %s' % (req.reqid)
if req.saved:
yield req.async_save()

@ -5,10 +5,11 @@ import pappyproxy
import pygments
import pprint
import shlex
import urllib
from pappyproxy.console import load_reqlist, print_table, print_request_rows, get_req_data_row
from pappyproxy.util import PappyException, utc2local
from pappyproxy.http import Request
from pappyproxy.http import Request, repeatable_parse_qs
from twisted.internet import defer
from pappyproxy.plugin import main_context_ids
from pappyproxy.colors import Colors, Styles, verb_color, scode_color, path_formatter, host_color
@ -97,15 +98,25 @@ def print_tree(tree):
_print_tree_helper(tree, 0, [])
def pretty_print_body(fmt, body):
if fmt.lower() == 'json':
try:
try:
if fmt.lower() == 'json':
d = json.loads(body.strip())
except:
raise PappyException('Body could not be parsed as JSON')
s = json.dumps(d, indent=4, sort_keys=True)
print pygments.highlight(s, JsonLexer(), TerminalFormatter())
else:
raise PappyException('%s is not a valid format' % fmt)
s = json.dumps(d, indent=4, sort_keys=True)
print pygments.highlight(s, JsonLexer(), TerminalFormatter())
elif fmt.lower() == 'form':
qs = repeatable_parse_qs(body)
for k, v in qs.all_pairs():
s = Colors.GREEN
s += '%s: ' % urllib.unquote(k)
s += Colors.ENDC
s += urllib.unquote(v)
print s
else:
raise PappyException('"%s" is not a valid format' % fmt)
except PappyException as e:
raise e
except:
raise PappyException('Body could not be parsed as "%s"' % fmt)
def _get_tree_prefix(depth, print_bars, last):
if depth == 0:

@ -119,7 +119,7 @@ def submit_current_buffer():
full_request = '\n'.join(curbuf)
commdata = {'action': 'submit',
'full_message': base64.b64encode(full_request),
'tags': ['repeater'],
'tags': {'repeater'},
'port': int(vim.eval("s:repport")),
'host': vim.eval("s:rephost")}
if vim.eval("s:repisssl") == '1':

@ -35,26 +35,27 @@ def remove_intercepting_macro(key, int_macro_dict):
del int_macro_dict[key]
def log(message, id=None, symbol='*', verbosity_level=1):
if config.DEBUG_TO_FILE and not os.path.exists(config.DEBUG_DIR):
os.makedirs(config.DEBUG_DIR)
if id:
debug_str = '[%s](%d) %s' % (symbol, id, message)
if config.DEBUG_TO_FILE:
with open(config.DEBUG_DIR+'/connection_%d.log' % id, 'a') as f:
f.write(debug_str+'\n')
else:
debug_str = '[%s] %s' % (symbol, message)
if config.DEBUG_TO_FILE:
with open(config.DEBUG_DIR+'/debug.log', 'a') as f:
f.write(debug_str+'\n')
if config.DEBUG_VERBOSITY >= verbosity_level:
print debug_str
if config.DEBUG_TO_FILE or config.DEBUG_VERBOSITY > 0:
if config.DEBUG_TO_FILE and not os.path.exists(config.DEBUG_DIR):
os.makedirs(config.DEBUG_DIR)
if id:
debug_str = '[%s](%d) %s' % (symbol, id, message)
if config.DEBUG_TO_FILE:
with open(config.DEBUG_DIR+'/connection_%d.log' % id, 'a') as f:
f.write(debug_str+'\n')
else:
debug_str = '[%s] %s' % (symbol, message)
if config.DEBUG_TO_FILE:
with open(config.DEBUG_DIR+'/debug.log', 'a') as f:
f.write(debug_str+'\n')
if config.DEBUG_VERBOSITY >= verbosity_level:
print debug_str
def log_request(request, id=None, symbol='*', verbosity_level=3):
r_split = request.split('\r\n')
for l in r_split:
log(l, id, symbol, verbosity_level)
if config.DEBUG_TO_FILE or config.DEBUG_VERBOSITY > 0:
r_split = request.split('\r\n')
for l in r_split:
log(l, id, symbol, verbosity_level)
class ClientTLSContext(ssl.ClientContextFactory):
isClient = 1
@ -102,10 +103,11 @@ class ProxyClient(LineReceiver):
self.factory.return_transport.write(data)
if not self._response_obj.complete:
if data:
s = printable_data(data)
dlines = s.split('\n')
for l in dlines:
self.log(l, symbol='<rd', verbosity_level=3)
if config.DEBUG_TO_FILE or config.DEBUG_VERBOSITY > 0:
s = printable_data(data)
dlines = s.split('\n')
for l in dlines:
self.log(l, symbol='<rd', verbosity_level=3)
self._response_obj.add_data(data)
if self._response_obj.complete:
@ -214,7 +216,8 @@ class ProxyClientFactory(ClientFactory):
@defer.inlineCallbacks
def return_request_pair(self, request):
self.end_time = datetime.datetime.utcnow()
log_request(printable_data(request.response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
if config.DEBUG_TO_FILE or config.DEBUG_VERBOSITY > 0:
log_request(printable_data(request.response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
request.time_start = self.start_time
request.time_end = self.end_time
@ -253,7 +256,7 @@ class ProxyClientFactory(ClientFactory):
if self.save_all:
yield request.async_deep_save()
if request.response:
if request.response and (config.DEBUG_TO_FILE or config.DEBUG_VERBOSITY > 0):
log_request(printable_data(request.response.full_response),
id=self.connection_id, symbol='<', verbosity_level=3)
else:

@ -0,0 +1,17 @@
from pappyproxy.session import Session
MACRO_NAME = '{{macro_name}}'
SHORT_NAME = '{{short_name}}'
runargs = []
def init(args):
global runargs
runargs = args
def mangle_request(request):
global runargs
return request
def mangle_response(request):
global runargs
return request.response

@ -0,0 +1,35 @@
from pappyproxy.http import Request, get_request, post_request, request_by_id
from pappyproxy.context import set_tag
from pappyproxy.iter import *
## Iterator cheat sheet:
# fuzz_path_trav() - Values for fuzzing path traversal
# fuzz_sqli() - Values for fuzzing SQLi
# fuzz_xss() - Values for fuzzing XSS
# common_passwords() - Common passwords
# common_usernames() - Common usernames
# fuzz_dirs() - Common web paths (ie /wp-admin)
MACRO_NAME = '{{macro_name}}'
SHORT_NAME = '{{short_name}}'
###########
## Requests
# It's suggested that you call .copy() on these and then edit attributes
# as needed to create modified requests
##
{% set count = 1 %}{% for params, lines in zip(req_params, req_lines) %}
req{{ count }} = Request(({% for line in lines %}
'{{ line }}'{% endfor %}{% set count = count+1 %}
){{ params }})
{% endfor %}
def run_macro(args):
# Example:
# req = req1.copy() # Copy req1
# req.submit() # Submit the request to get a response
# print req.response.raw_headers # print the response headers
# req.save() # save the request to the data file
# or copy req1 into a loop and use string substitution to automate requests
pass

@ -443,6 +443,18 @@ def test_message_build_chunked():
'a: b\r\n'
'Content-Length: 100\r\n\r\n')
assert m.full_message == raw
def test_message_badheader():
raw = ('startline\r\n'
'a: b\r\n'
'Content-Encoding\r\n'
'd: e\r\n'
'f:g\r\n'
'\r\n')
m = http.HTTPMessage(raw)
assert m.headers['a'] == 'b'
assert m.headers['content-encoding'] is None
assert m.headers['f'] == 'g'
####################
## Request tests
@ -723,7 +735,7 @@ def test_request_to_json():
r = http.Request()
r.start_line = 'GET / HTTP/1.1'
r.headers['content-length'] = 500
r.tags = ['foo', 'bar']
r.tags = {'foo', 'bar'}
r.body = 'AAAA'
r.reqid = '1'
@ -863,6 +875,15 @@ def test_request_modify_header2():
'Connection: keep-alive\r\n'
'\r\n'
'a|b|c|d')
r2.post_params['foo'] = 'barr'
assert r2.full_message == ('POST /some/path HTTP/1.1\r\n'
'Host: test.host.thing\r\n'
'User-Agent: Moziller/6.9\r\n'
'Content-Length: 8\r\n'
'Connection: keep-alive\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n'
'\r\n'
'foo=barr')
####################

@ -21,9 +21,9 @@ def printable_data(data):
chars = []
for c in data:
if c in string.printable:
chars += c
chars.append(c)
else:
chars += '.'
chars.append('.')
return ''.join(chars)
def remove_color(s):

@ -3,7 +3,7 @@
import pkgutil
from setuptools import setup, find_packages
VERSION = '0.2.5'
VERSION = '0.2.6'
setup(name='pappyproxy',
version=VERSION,

Loading…
Cancel
Save