Version 0.2.6

This commit is contained in:
Rob Glew 2016-02-05 10:49:58 -06:00
parent 10f30bfb47
commit aa81432556
21 changed files with 304 additions and 96 deletions

View file

@ -31,7 +31,7 @@ class MangleInterceptMacro(InterceptMacro):
self.async_rsp = True
def __repr__(self):
return "<MangleInterceptingMacro>" % self.name
return "<MangleInterceptingMacro>"
@defer.inlineCallbacks
def async_mangle_request(self, request):

View file

@ -2,13 +2,61 @@ import crochet
import pappyproxy
import shlex
from pappyproxy.colors import Colors, Styles, path_formatter, host_color, scode_color, verb_color
from pappyproxy.console import confirm, load_reqlist, Capturing
from pappyproxy.util import PappyException, remove_color
from pappyproxy.macros import InterceptMacro
from pappyproxy.requestcache import RequestCache
from pappyproxy.pappy import cons
from pappyproxy.plugin import add_intercepting_macro, remove_intercepting_macro
from twisted.internet import defer
from twisted.enterprise import adbapi
class PrintStreamInterceptMacro(InterceptMacro):
"""
Intercepting macro that prints requests and responses as they go through
the proxy
"""
def __init__(self):
InterceptMacro.__init__(self)
self.name = 'Pappy Interceptor Macro'
self.intercept_requests = False
self.intercept_responses = False
self.async_req = False
self.async_rsp = False
def __repr__(self):
return "<PrintStreamInterceptingMacro>"
@staticmethod
def _print_request(req):
s = verb_color(req.verb)+'> '+req.verb+' '+Colors.ENDC
s += req.url_color
s += ', len=' + str(len(req.body))
print s
@staticmethod
def _print_response(req):
response_code = str(req.response.response_code) + \
' ' + req.response.response_text
s = scode_color(response_code)
s += '< '
s += response_code
s += Colors.ENDC
s += ' '
s += req.url_color
s += ', len=' + str(len(req.response.body))
print s
def mangle_request(self, request):
PrintStreamInterceptMacro._print_request(request)
return request
def mangle_response(self, request):
PrintStreamInterceptMacro._print_response(request)
return request.response
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def clrmem(line):
@ -115,6 +163,20 @@ def merge_datafile(line):
finally:
other_dbpool.close()
def watch_proxy(line):
print 'Watching proxy... press ENTER to exit'
macro = PrintStreamInterceptMacro()
macro.intercept_requests = True
macro.intercept_responses = True
try:
add_intercepting_macro('pappy_watch_proxy', macro)
raw_input()
finally:
try:
remove_intercepting_macro('pappy_watch_proxy')
except PappyException:
pass
def run_without_color(line):
with Capturing() as output:
cons.onecmd(line.strip())
@ -129,6 +191,7 @@ def load_cmds(cmd):
'log': (log, None),
'merge': (merge_datafile, None),
'nocolor': (run_without_color, None),
'watch': (watch_proxy, None),
})
cmd.add_aliases([
#('rpy', ''),

View file

@ -32,7 +32,7 @@ def tag(line):
for reqid in reqids:
req = yield Request.load_request(reqid)
if tag not in req.tags:
req.tags.append(tag)
req.tags.add(tag)
if req.saved:
yield req.async_save()
else:
@ -64,7 +64,7 @@ def untag(line):
for reqid in reqids:
req = yield Request.load_request(reqid)
if tag in req.tags:
req.tags.remove(tag)
req.tags.discard(tag)
if req.saved:
yield req.async_save()
if ids:
@ -84,7 +84,7 @@ def clrtag(line):
for req in reqs:
if req.tags:
req.tags = []
req.tags = set()
print 'Tags cleared from request %s' % (req.reqid)
if req.saved:
yield req.async_save()

View file

@ -5,10 +5,11 @@ import pappyproxy
import pygments
import pprint
import shlex
import urllib
from pappyproxy.console import load_reqlist, print_table, print_request_rows, get_req_data_row
from pappyproxy.util import PappyException, utc2local
from pappyproxy.http import Request
from pappyproxy.http import Request, repeatable_parse_qs
from twisted.internet import defer
from pappyproxy.plugin import main_context_ids
from pappyproxy.colors import Colors, Styles, verb_color, scode_color, path_formatter, host_color
@ -97,15 +98,25 @@ def print_tree(tree):
_print_tree_helper(tree, 0, [])
def pretty_print_body(fmt, body):
if fmt.lower() == 'json':
try:
try:
if fmt.lower() == 'json':
d = json.loads(body.strip())
except:
raise PappyException('Body could not be parsed as JSON')
s = json.dumps(d, indent=4, sort_keys=True)
print pygments.highlight(s, JsonLexer(), TerminalFormatter())
else:
raise PappyException('%s is not a valid format' % fmt)
s = json.dumps(d, indent=4, sort_keys=True)
print pygments.highlight(s, JsonLexer(), TerminalFormatter())
elif fmt.lower() == 'form':
qs = repeatable_parse_qs(body)
for k, v in qs.all_pairs():
s = Colors.GREEN
s += '%s: ' % urllib.unquote(k)
s += Colors.ENDC
s += urllib.unquote(v)
print s
else:
raise PappyException('"%s" is not a valid format' % fmt)
except PappyException as e:
raise e
except:
raise PappyException('Body could not be parsed as "%s"' % fmt)
def _get_tree_prefix(depth, print_bars, last):
if depth == 0:

View file

@ -119,7 +119,7 @@ def submit_current_buffer():
full_request = '\n'.join(curbuf)
commdata = {'action': 'submit',
'full_message': base64.b64encode(full_request),
'tags': ['repeater'],
'tags': {'repeater'},
'port': int(vim.eval("s:repport")),
'host': vim.eval("s:rephost")}
if vim.eval("s:repisssl") == '1':