Version 0.2.0
This commit is contained in:
parent
312b985229
commit
26376eaaec
43 changed files with 4699 additions and 2668 deletions
0
pappyproxy/plugins/__init__.py
Normal file
0
pappyproxy/plugins/__init__.py
Normal file
192
pappyproxy/plugins/filter.py
Normal file
192
pappyproxy/plugins/filter.py
Normal file
|
@ -0,0 +1,192 @@
|
|||
import crochet
|
||||
import pappyproxy
|
||||
|
||||
from pappyproxy.console import confirm
|
||||
from pappyproxy.util import PappyException
|
||||
from twisted.internet import defer
|
||||
|
||||
class BuiltinFilters(object):
|
||||
_filters = {
|
||||
'not_image': (
|
||||
['path nctr "(\.png$|\.jpg$|\.gif$)"'],
|
||||
'Filter out image requests',
|
||||
),
|
||||
'not_jscss': (
|
||||
['path nctr "(\.js$|\.css$)"'],
|
||||
'Filter out javascript and css files',
|
||||
),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@defer.inlineCallbacks
|
||||
def get(name):
|
||||
if name not in BuiltinFilters._filters:
|
||||
raise PappyException('%s not a bult in filter' % name)
|
||||
if name in BuiltinFilters._filters:
|
||||
filters = [pappyproxy.context.Filter(f) for f in BuiltinFilters._filters[name][0]]
|
||||
for f in filters:
|
||||
yield f.generate()
|
||||
defer.returnValue(filters)
|
||||
raise PappyException('"%s" is not a built-in filter' % name)
|
||||
|
||||
@staticmethod
|
||||
def list():
|
||||
return [k for k, v in BuiltinFilters._filters.iteritems()]
|
||||
|
||||
@staticmethod
|
||||
def help(name):
|
||||
if name not in BuiltinFilters._filters:
|
||||
raise PappyException('"%s" is not a built-in filter' % name)
|
||||
return pappyproxy.context.Filter(BuiltinFilters._filters[name][1])
|
||||
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def filtercmd(line):
|
||||
"""
|
||||
Apply a filter to the current context
|
||||
Usage: filter <filter string>
|
||||
See README.md for information on filter strings
|
||||
"""
|
||||
if not line:
|
||||
raise PappyException("Filter string required")
|
||||
|
||||
filter_to_add = pappyproxy.context.Filter(line)
|
||||
yield filter_to_add.generate()
|
||||
pappyproxy.pappy.main_context.add_filter(filter_to_add)
|
||||
|
||||
def complete_builtin_filter(text, line, begidx, endidx):
|
||||
all_names = BuiltinFilters.list()
|
||||
if not text:
|
||||
ret = all_names[:]
|
||||
else:
|
||||
ret = [n for n in all_names if n.startswith(text)]
|
||||
return ret
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def builtin_filter(line):
|
||||
if not line:
|
||||
raise PappyException("Filter name required")
|
||||
|
||||
filters_to_add = yield BuiltinFilters.get(line)
|
||||
for f in filters_to_add:
|
||||
print f.filter_string
|
||||
pappyproxy.pappy.main_context.add_filter(f)
|
||||
defer.returnValue(None)
|
||||
|
||||
def filter_up(line):
|
||||
"""
|
||||
Remove the last applied filter
|
||||
Usage: filter_up
|
||||
"""
|
||||
pappyproxy.pappy.main_context.filter_up()
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def filter_clear(line):
|
||||
"""
|
||||
Reset the context so that it contains no filters (ignores scope)
|
||||
Usage: filter_clear
|
||||
"""
|
||||
pappyproxy.pappy.main_context.active_filters = []
|
||||
yield pappyproxy.context.reload_from_storage()
|
||||
|
||||
def filter_list(line):
|
||||
"""
|
||||
Print the filters that make up the current context
|
||||
Usage: filter_list
|
||||
"""
|
||||
for f in pappyproxy.pappy.main_context.active_filters:
|
||||
print f.filter_string
|
||||
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def scope_save(line):
|
||||
"""
|
||||
Set the scope to be the current context. Saved between launches
|
||||
Usage: scope_save
|
||||
"""
|
||||
pappyproxy.context.save_scope(pappyproxy.pappy.main_context)
|
||||
yield pappyproxy.context.store_scope(pappyproxy.http.dbpool)
|
||||
|
||||
def scope_reset(line):
|
||||
"""
|
||||
Set the context to be the scope (view in-scope items)
|
||||
Usage: scope_reset
|
||||
"""
|
||||
pappyproxy.context.reset_to_scope(pappyproxy.pappy.main_context)
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def scope_delete(line):
|
||||
"""
|
||||
Delete the scope so that it contains all request/response pairs
|
||||
Usage: scope_delete
|
||||
"""
|
||||
pappyproxy.context.set_scope([])
|
||||
yield pappyproxy.context.store_scope(pappyproxy.http.dbpool)
|
||||
|
||||
def scope_list(line):
|
||||
"""
|
||||
Print the filters that make up the scope
|
||||
Usage: scope_list
|
||||
"""
|
||||
pappyproxy.context.print_scope()
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def filter_prune(line):
|
||||
"""
|
||||
Delete all out of context requests from the data file.
|
||||
CANNOT BE UNDONE!! Be careful!
|
||||
Usage: filter_prune
|
||||
"""
|
||||
# Delete filtered items from datafile
|
||||
print ''
|
||||
print 'Currently active filters:'
|
||||
for f in pappyproxy.pappy.main_context.active_filters:
|
||||
print '> %s' % f.filter_string
|
||||
|
||||
# We copy so that we're not removing items from a set we're iterating over
|
||||
reqs = list(pappyproxy.pappy.main_context.inactive_requests)
|
||||
act_reqs = list(pappyproxy.pappy.main_context.active_requests)
|
||||
message = 'This will delete %d/%d requests. You can NOT undo this!! Continue?' % (len(reqs), (len(reqs) + len(act_reqs)))
|
||||
if not confirm(message, 'n'):
|
||||
defer.returnValue(None)
|
||||
|
||||
for r in reqs:
|
||||
yield r.deep_delete()
|
||||
print 'Deleted %d requests' % len(reqs)
|
||||
defer.returnValue(None)
|
||||
|
||||
###############
|
||||
## Plugin hooks
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'filter_prune': (filter_prune, None),
|
||||
'scope_list': (scope_list, None),
|
||||
'scope_delete': (scope_delete, None),
|
||||
'scope_reset': (scope_reset, None),
|
||||
'scope_save': (scope_save, None),
|
||||
'filter_list': (filter_list, None),
|
||||
'filter_clear': (filter_clear, None),
|
||||
'filter_up': (filter_up, None),
|
||||
'builtin_filter': (builtin_filter, complete_builtin_filter),
|
||||
'filter': (filtercmd, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
#('filter_prune', ''),
|
||||
('scope_list', 'sls'),
|
||||
#('scope_delete', ''),
|
||||
('scope_reset', 'sr'),
|
||||
#('scope_save', ''),
|
||||
('filter_list', 'fls'),
|
||||
('filter_clear', 'fc'),
|
||||
('filter_up', 'fu'),
|
||||
('builtin_filter', 'fbi'),
|
||||
('filter', 'f'),
|
||||
('filter', 'fl'),
|
||||
])
|
215
pappyproxy/plugins/macrocmds.py
Normal file
215
pappyproxy/plugins/macrocmds.py
Normal file
|
@ -0,0 +1,215 @@
|
|||
import crochet
|
||||
import pappyproxy
|
||||
import shlex
|
||||
|
||||
from pappyproxy.plugin import active_intercepting_macros, add_intercepting_macro, remove_intercepting_macro
|
||||
from pappyproxy.console import load_reqlist
|
||||
from pappyproxy.macros import load_macros, macro_from_requests, gen_imacro
|
||||
from pappyproxy.util import PappyException
|
||||
from twisted.internet import defer
|
||||
|
||||
loaded_macros = []
|
||||
loaded_int_macros = []
|
||||
macro_dict = {}
|
||||
int_macro_dict = {}
|
||||
|
||||
def load_macros_cmd(line):
|
||||
"""
|
||||
Load macros from a directory. By default loads macros in the current directory.
|
||||
Usage: load_macros [dir]
|
||||
"""
|
||||
global macro_dict
|
||||
global int_macro_dict
|
||||
global loaded_macros
|
||||
global loaded_int_macros
|
||||
|
||||
if line:
|
||||
load_dir = line
|
||||
else:
|
||||
load_dir = '.'
|
||||
(to_load, int_to_load) = load_macros(load_dir)
|
||||
if not to_load and not int_to_load:
|
||||
raise PappyException('No macros to load.')
|
||||
|
||||
macro_dict = {}
|
||||
loaded_macros = []
|
||||
int_macro_dict = {}
|
||||
loaded_int_macros = []
|
||||
|
||||
for macro in to_load:
|
||||
if macro.name in macro_dict:
|
||||
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.name)
|
||||
elif macro.short_name and macro.short_name in macro_dict:
|
||||
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.short_name)
|
||||
elif macro.file_name in macro_dict:
|
||||
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.file_name)
|
||||
else:
|
||||
macro_dict[macro.name] = macro
|
||||
macro_dict[macro.file_name] = macro
|
||||
if macro.short_name:
|
||||
macro_dict[macro.short_name] = macro
|
||||
loaded_macros.append(macro)
|
||||
print 'Loaded "%s"' % macro
|
||||
|
||||
for macro in int_to_load:
|
||||
if macro.name in int_macro_dict:
|
||||
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.name)
|
||||
elif macro.short_name and macro.short_name in int_macro_dict:
|
||||
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.short_name)
|
||||
elif macro.file_name in int_macro_dict:
|
||||
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.file_name)
|
||||
else:
|
||||
int_macro_dict[macro.name] = macro
|
||||
int_macro_dict[macro.file_name] = macro
|
||||
if macro.short_name:
|
||||
int_macro_dict[macro.short_name] = macro
|
||||
loaded_int_macros.append(macro)
|
||||
print 'Loaded "%s"' % macro
|
||||
|
||||
def run_macro(line):
|
||||
"""
|
||||
Run a macro
|
||||
Usage: run_macro <macro name or macro short name>
|
||||
"""
|
||||
global macro_dict
|
||||
global loaded_macros
|
||||
args = shlex.split(line)
|
||||
if not args:
|
||||
raise PappyException('You must give a macro to run. You can give its short name, or the name in the filename.')
|
||||
mname = args[0]
|
||||
if mname not in macro_dict:
|
||||
raise PappyException('%s not a loaded macro' % mname)
|
||||
macro = macro_dict[mname]
|
||||
macro.execute(args[1:])
|
||||
|
||||
def run_int_macro(line):
|
||||
"""
|
||||
Activate an intercepting macro
|
||||
Usage: run_int_macro <macro name or macro short name>
|
||||
Macro can be stopped with stop_int_macro
|
||||
"""
|
||||
global int_macro_dict
|
||||
global loaded_int_macros
|
||||
args = shlex.split(line)
|
||||
if len(args) == 0:
|
||||
raise PappyException('You must give an intercepting macro to run. You can give its short name, or the name in the filename.')
|
||||
if args[0] not in int_macro_dict:
|
||||
raise PappyException('%s not a loaded intercepting macro' % line)
|
||||
macro = int_macro_dict[args[0]]
|
||||
macro.init(args[1:])
|
||||
add_intercepting_macro(macro.name, macro)
|
||||
print '"%s" started' % macro.name
|
||||
|
||||
def stop_int_macro(line):
|
||||
"""
|
||||
Stop a running intercepting macro
|
||||
Usage: stop_int_macro <macro name or macro short name>
|
||||
"""
|
||||
global int_macro_dict
|
||||
global loaded_int_macros
|
||||
if not line:
|
||||
raise PappyException('You must give an intercepting macro to run. You can give its short name, or the name in the filename.')
|
||||
if line not in int_macro_dict:
|
||||
raise PappyException('%s not a loaded intercepting macro' % line)
|
||||
macro = int_macro_dict[line]
|
||||
remove_intercepting_macro(macro.name)
|
||||
print '"%s" stopped' % macro.name
|
||||
|
||||
def list_int_macros(line):
|
||||
"""
|
||||
List all active/inactive intercepting macros
|
||||
"""
|
||||
global int_macro_dict
|
||||
global loaded_int_macros
|
||||
running = []
|
||||
not_running = []
|
||||
for macro in loaded_int_macros:
|
||||
if macro.name in active_intercepting_macros():
|
||||
running.append(macro)
|
||||
else:
|
||||
not_running.append(macro)
|
||||
|
||||
if not running and not not_running:
|
||||
print 'No loaded intercepting macros'
|
||||
|
||||
if running:
|
||||
print 'Active intercepting macros:'
|
||||
for m in running:
|
||||
print ' %s' % m
|
||||
|
||||
if not_running:
|
||||
print 'Inactive intercepting macros:'
|
||||
for m in not_running:
|
||||
print ' %s' % m
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def generate_macro(line):
|
||||
"""
|
||||
Generate a macro script with request objects
|
||||
Usage: generate_macro <name> [reqs]
|
||||
"""
|
||||
if line == '':
|
||||
raise PappyException('Macro name is required')
|
||||
args = shlex.split(line)
|
||||
name = args[0]
|
||||
if len(args) > 1:
|
||||
reqs = yield load_reqlist(args[1])
|
||||
else:
|
||||
reqs = []
|
||||
script_str = macro_from_requests(reqs)
|
||||
fname = 'macro_%s.py' % name
|
||||
with open(fname, 'wc') as f:
|
||||
f.write(script_str)
|
||||
print 'Wrote script to %s' % fname
|
||||
|
||||
def generate_int_macro(line):
|
||||
"""
|
||||
Generate an intercepting macro script
|
||||
Usage: generate_int_macro <name>
|
||||
"""
|
||||
if line == '':
|
||||
raise PappyException('Macro name is required')
|
||||
args = shlex.split(line)
|
||||
name = args[0]
|
||||
script_str = gen_imacro()
|
||||
fname = 'int_%s.py' % name
|
||||
with open(fname, 'wc') as f:
|
||||
f.write(script_str)
|
||||
print 'Wrote script to %s' % fname
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def rpy(line):
|
||||
"""
|
||||
Copy python object definitions of requests.
|
||||
Usage: rpy <reqs>
|
||||
"""
|
||||
reqs = yield load_reqlist(line)
|
||||
for req in reqs:
|
||||
print pappyproxy.macros.req_obj_def(req)
|
||||
|
||||
###############
|
||||
## Plugin hooks
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'rpy': (rpy, None),
|
||||
'generate_int_macro': (generate_int_macro, None),
|
||||
'generate_macro': (generate_macro, None),
|
||||
'list_int_macros': (list_int_macros, None),
|
||||
'stop_int_macro': (stop_int_macro, None),
|
||||
'run_int_macro': (run_int_macro, None),
|
||||
'run_macro': (run_macro, None),
|
||||
'load_macros': (load_macros_cmd, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
#('rpy', ''),
|
||||
('generate_int_macro', 'gima'),
|
||||
('generate_macro', 'gma'),
|
||||
('list_int_macros', 'lsim'),
|
||||
('stop_int_macro', 'sim'),
|
||||
('run_int_macro', 'rim'),
|
||||
('run_macro', 'rma'),
|
||||
('load_macros', 'lma'),
|
||||
])
|
243
pappyproxy/plugins/manglecmds.py
Normal file
243
pappyproxy/plugins/manglecmds.py
Normal file
|
@ -0,0 +1,243 @@
|
|||
import crochet
|
||||
import curses
|
||||
import os
|
||||
import pappyproxy
|
||||
import shlex
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from pappyproxy.util import PappyException
|
||||
from pappyproxy.macros import InterceptMacro
|
||||
from pappyproxy.http import Request, Response
|
||||
from pappyproxy.plugin import add_intercepting_macro, remove_intercepting_macro
|
||||
from pappyproxy import comm, config
|
||||
from twisted.internet import defer
|
||||
|
||||
PLUGIN_ID="manglecmds"
|
||||
|
||||
edit_queue = []
|
||||
|
||||
class MangleInterceptMacro(InterceptMacro):
|
||||
"""
|
||||
A class representing a macro that modifies requests as they pass through the
|
||||
proxy
|
||||
"""
|
||||
def __init__(self):
|
||||
InterceptMacro.__init__(self)
|
||||
self.name = 'Pappy Interceptor Macro'
|
||||
self.intercept_requests = False
|
||||
self.intercept_responses = False
|
||||
self.async_req = True
|
||||
self.async_rsp = True
|
||||
|
||||
def __repr__(self):
|
||||
return "<MangleInterceptingMacro>" % self.name
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def async_mangle_request(self, request):
|
||||
# This function gets called to mangle/edit requests passed through the proxy
|
||||
|
||||
retreq = request
|
||||
# Write original request to the temp file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tfName = tf.name
|
||||
tf.write(request.full_request)
|
||||
|
||||
# Have the console edit the file
|
||||
yield edit_file(tfName)
|
||||
|
||||
# Create new mangled request from edited file
|
||||
with open(tfName, 'r') as f:
|
||||
text = f.read()
|
||||
|
||||
os.remove(tfName)
|
||||
|
||||
# Check if dropped
|
||||
if text == '':
|
||||
pappyproxy.proxy.log('Request dropped!')
|
||||
defer.returnValue(None)
|
||||
|
||||
mangled_req = Request(text, update_content_length=True)
|
||||
mangled_req.port = request.port
|
||||
mangled_req.is_ssl = request.is_ssl
|
||||
|
||||
# Check if it changed
|
||||
if mangled_req.full_request != request.full_request:
|
||||
retreq = mangled_req
|
||||
|
||||
defer.returnValue(retreq)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def async_mangle_response(self, request):
|
||||
# This function gets called to mangle/edit respones passed through the proxy
|
||||
|
||||
retrsp = request.response
|
||||
# Write original response to the temp file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tfName = tf.name
|
||||
tf.write(request.response.full_response)
|
||||
|
||||
# Have the console edit the file
|
||||
yield edit_file(tfName, front=True)
|
||||
|
||||
# Create new mangled response from edited file
|
||||
with open(tfName, 'r') as f:
|
||||
text = f.read()
|
||||
|
||||
os.remove(tfName)
|
||||
|
||||
# Check if dropped
|
||||
if text == '':
|
||||
pappyproxy.proxy.log('Response dropped!')
|
||||
defer.returnValue(None)
|
||||
|
||||
mangled_rsp = Response(text, update_content_length=True)
|
||||
|
||||
if mangled_rsp.full_response != request.response.full_response:
|
||||
mangled_rsp.unmangled = request.response
|
||||
retrsp = mangled_rsp
|
||||
|
||||
defer.returnValue(retrsp)
|
||||
|
||||
|
||||
###############
|
||||
## Helper funcs
|
||||
|
||||
def edit_file(fname, front=False):
|
||||
global edit_queue
|
||||
# Adds the filename to the edit queue. Returns a deferred that is fired once
|
||||
# the file is edited and the editor is closed
|
||||
d = defer.Deferred()
|
||||
if front:
|
||||
edit_queue = [(fname, d)] + edit_queue
|
||||
else:
|
||||
edit_queue.append((fname, d))
|
||||
return d
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def check_reqid(reqid):
|
||||
# Used for the repeater command. Must not be async
|
||||
try:
|
||||
yield pappyproxy.http.Request.load_request(reqid)
|
||||
except:
|
||||
raise PappyException('"%s" is not a valid request id' % reqid)
|
||||
defer.returnValue(None)
|
||||
|
||||
def start_editor(reqid):
|
||||
script_loc = os.path.join(config.PAPPY_DIR, "plugins", "vim_repeater", "repeater.vim")
|
||||
#print "RepeaterSetup %d %d"%(reqid, comm_port)
|
||||
subprocess.call(["vim", "-S", script_loc, "-c", "RepeaterSetup %s %d"%(reqid, comm.comm_port)])
|
||||
|
||||
####################
|
||||
## Command functions
|
||||
|
||||
def repeater(line):
|
||||
"""
|
||||
Open a request in the repeater
|
||||
Usage: repeater <reqid>
|
||||
"""
|
||||
# This is not async on purpose. start_editor acts up if this is called
|
||||
# with inline callbacks. As a result, check_reqid and get_unmangled
|
||||
# cannot be async
|
||||
args = shlex.split(line)
|
||||
reqid = args[0]
|
||||
|
||||
check_reqid(reqid)
|
||||
start_editor(reqid)
|
||||
|
||||
def intercept(line):
|
||||
"""
|
||||
Intercept requests and/or responses and edit them with before passing them along
|
||||
Usage: intercept <reqid>
|
||||
"""
|
||||
global edit_queue
|
||||
args = shlex.split(line)
|
||||
intercept_requests = False
|
||||
intercept_responses = False
|
||||
|
||||
req_names = ('req', 'request', 'requests')
|
||||
rsp_names = ('rsp', 'response', 'responses')
|
||||
|
||||
if any(a in req_names for a in args):
|
||||
intercept_requests = True
|
||||
if any(a in rsp_names for a in args):
|
||||
intercept_responses = True
|
||||
|
||||
if intercept_requests and intercept_responses:
|
||||
intercept_str = 'Requests and responses'
|
||||
elif intercept_requests:
|
||||
intercept_str = 'Requests'
|
||||
elif intercept_responses:
|
||||
intercept_str = 'Responses'
|
||||
else:
|
||||
intercept_str = 'NOTHING'
|
||||
|
||||
mangle_macro = MangleInterceptMacro()
|
||||
mangle_macro.intercept_requests = intercept_requests
|
||||
mangle_macro.intercept_responses = intercept_responses
|
||||
|
||||
add_intercepting_macro('pappy_intercept', mangle_macro)
|
||||
|
||||
## Interceptor loop
|
||||
stdscr = curses.initscr()
|
||||
curses.noecho()
|
||||
curses.cbreak()
|
||||
|
||||
try:
|
||||
editnext = False
|
||||
stdscr.nodelay(True)
|
||||
while True:
|
||||
stdscr.addstr(0, 0, "Currently intercepting: %s" % intercept_str)
|
||||
stdscr.clrtoeol()
|
||||
stdscr.addstr(1, 0, "%d item(s) in queue." % len(edit_queue))
|
||||
stdscr.clrtoeol()
|
||||
if editnext:
|
||||
stdscr.addstr(2, 0, "Waiting for next item... Press 'q' to quit or 'b' to quit waiting")
|
||||
else:
|
||||
stdscr.addstr(2, 0, "Press 'n' to edit the next item or 'q' to quit interceptor.")
|
||||
stdscr.clrtoeol()
|
||||
|
||||
c = stdscr.getch()
|
||||
if c == ord('q'):
|
||||
break
|
||||
elif c == ord('n'):
|
||||
editnext = True
|
||||
elif c == ord('b'):
|
||||
editnext = False
|
||||
|
||||
if editnext and edit_queue:
|
||||
editnext = False
|
||||
(to_edit, deferred) = edit_queue.pop(0)
|
||||
editor = 'vi'
|
||||
if 'EDITOR' in os.environ:
|
||||
editor = os.environ['EDITOR']
|
||||
subprocess.call([editor, to_edit])
|
||||
stdscr.clear()
|
||||
deferred.callback(None)
|
||||
finally:
|
||||
curses.nocbreak()
|
||||
stdscr.keypad(0)
|
||||
curses.echo()
|
||||
curses.endwin()
|
||||
try:
|
||||
remove_intercepting_macro('pappy_intercept')
|
||||
except PappyException:
|
||||
pass
|
||||
# Send remaining requests along
|
||||
while len(edit_queue) > 0:
|
||||
(fname, deferred) = edit_queue.pop(0)
|
||||
deferred.callback(None)
|
||||
|
||||
###############
|
||||
## Plugin hooks
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'intercept': (intercept, None),
|
||||
'repeater': (repeater, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
('intercept', 'ic'),
|
||||
('repeater', 'rp'),
|
||||
])
|
85
pappyproxy/plugins/misc.py
Normal file
85
pappyproxy/plugins/misc.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
import crochet
|
||||
import pappyproxy
|
||||
import shlex
|
||||
|
||||
from pappyproxy.console import confirm, load_reqlist
|
||||
from pappyproxy.util import PappyException
|
||||
from twisted.internet import defer
|
||||
|
||||
def clrmem(line):
|
||||
"""
|
||||
Delete all in-memory only requests
|
||||
Usage: clrmem
|
||||
"""
|
||||
to_delete = list(pappyproxy.context.Context.in_memory_requests)
|
||||
for r in to_delete:
|
||||
pappyproxy.context.Context.remove_request(r)
|
||||
|
||||
def gencerts(line):
|
||||
"""
|
||||
Generate CA cert and private CA file
|
||||
Usage: gencerts [/path/to/put/certs/in]
|
||||
"""
|
||||
dest_dir = line or pappyproxy.config.CERT_DIR
|
||||
message = "This will overwrite any existing certs in %s. Are you sure?" % dest_dir
|
||||
if not confirm(message, 'n'):
|
||||
return False
|
||||
print "Generating certs to %s" % dest_dir
|
||||
pappyproxy.proxy.generate_ca_certs(dest_dir)
|
||||
|
||||
def log(line):
|
||||
"""
|
||||
Display the log in real time. Honestly it probably doesn't work.
|
||||
Usage: log [verbosity (default is 1)]
|
||||
verbosity=1: Show connections as they're made/lost, some additional info
|
||||
verbosity=3: Show full requests/responses as they are processed by the proxy
|
||||
"""
|
||||
try:
|
||||
verbosity = int(line.strip())
|
||||
except:
|
||||
verbosity = 1
|
||||
pappyproxy.config.DEBUG_VERBOSITY = verbosity
|
||||
raw_input()
|
||||
pappyproxy.config.DEBUG_VERBOSITY = 0
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def export(line):
|
||||
"""
|
||||
Write the full request/response of a request/response to a file.
|
||||
Usage: export [req|rsp] <reqid(s)>
|
||||
"""
|
||||
args = shlex.split(line)
|
||||
if len(args) < 2:
|
||||
print 'Requires req/rsp and and request id(s)'
|
||||
defer.returnValue(None)
|
||||
|
||||
if args[0] not in ('req', 'rsp'):
|
||||
raise PappyException('Request or response not specified')
|
||||
|
||||
reqs = yield load_reqlist(args[1])
|
||||
for req in reqs:
|
||||
try:
|
||||
if args[0] == 'req':
|
||||
fname = 'req_%s.txt'%req.reqid
|
||||
with open(fname, 'w') as f:
|
||||
f.write(req.full_request)
|
||||
print 'Full request written to %s' % fname
|
||||
elif args[0] == 'rsp':
|
||||
fname = 'rsp_%s.txt'%req.reqid
|
||||
with open(fname, 'w') as f:
|
||||
f.write(req.full_response)
|
||||
print 'Full response written to %s' % fname
|
||||
except PappyException as e:
|
||||
print 'Unable to export %s: %s' % (req.reqid, e)
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'clrmem': (clrmem, None),
|
||||
'gencerts': (gencerts, None),
|
||||
'export': (export, None),
|
||||
'log': (log, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
#('rpy', ''),
|
||||
])
|
102
pappyproxy/plugins/tagcmds.py
Normal file
102
pappyproxy/plugins/tagcmds.py
Normal file
|
@ -0,0 +1,102 @@
|
|||
import crochet
|
||||
import pappyproxy
|
||||
import shlex
|
||||
|
||||
from pappyproxy.plugin import main_context
|
||||
from pappyproxy.console import load_reqlist
|
||||
from pappyproxy.util import PappyException
|
||||
from twisted.internet import defer
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def tag(line):
|
||||
"""
|
||||
Add a tag to requests.
|
||||
Usage: tag <tag> [request ids]
|
||||
You can tag as many requests as you want at the same time. If no
|
||||
ids are given, the tag will be applied to all in-context requests.
|
||||
"""
|
||||
args = shlex.split(line)
|
||||
if len(args) == 0:
|
||||
raise PappyException('Tag name is required')
|
||||
tag = args[0]
|
||||
|
||||
if len(args) > 1:
|
||||
reqs = yield load_reqlist(args[1], False)
|
||||
ids = [r.reqid for r in reqs]
|
||||
print 'Tagging %s with %s' % (', '.join(ids), tag)
|
||||
else:
|
||||
print "Tagging all in-context requests with %s" % tag
|
||||
reqs = main_context().active_requests
|
||||
|
||||
for req in reqs:
|
||||
if tag not in req.tags:
|
||||
req.tags.append(tag)
|
||||
if req.saved:
|
||||
yield req.async_save()
|
||||
add_req(req)
|
||||
else:
|
||||
print 'Request %s already has tag %s' % (req.reqid, tag)
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def untag(line):
|
||||
"""
|
||||
Remove a tag from requests
|
||||
Usage: untag <tag> <request ids>
|
||||
You can provide as many request ids as you want and the tag will
|
||||
be removed from all of them. If no ids are given, the tag will
|
||||
be removed from all in-context requests.
|
||||
"""
|
||||
args = shlex.split(line)
|
||||
if len(args) == 0:
|
||||
raise PappyException("Tag and request ids are required")
|
||||
tag = args[0]
|
||||
|
||||
ids = []
|
||||
if len(args) > 1:
|
||||
reqs = yield load_reqlist(args[1], False)
|
||||
ids = [r.reqid for r in reqs]
|
||||
else:
|
||||
print "Untagging all in-context requests with tag %s" % tag
|
||||
reqs = main_context().active_requests
|
||||
|
||||
for req in reqs:
|
||||
if tag in req.tags:
|
||||
req.tags.remove(tag)
|
||||
if req.saved:
|
||||
yield req.async_save()
|
||||
if ids:
|
||||
print 'Tag %s removed from %s' % (tag, ', '.join(ids))
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def clrtag(line):
|
||||
"""
|
||||
Clear all the tags from requests
|
||||
Usage: clrtag <request ids>
|
||||
"""
|
||||
args = shlex.split(line)
|
||||
if len(args) == 0:
|
||||
raise PappyException('No request IDs given')
|
||||
reqs = yield load_reqlist(args[0], False)
|
||||
|
||||
for req in reqs:
|
||||
if req.tags:
|
||||
req.tags = []
|
||||
print 'Tags cleared from request %s' % (req.reqid)
|
||||
if req.saved:
|
||||
yield req.async_save()
|
||||
|
||||
###############
|
||||
## Plugin hooks
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'clrtag': (clrtag, None),
|
||||
'untag': (untag, None),
|
||||
'tag': (tag, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
#('rpy', ''),
|
||||
])
|
328
pappyproxy/plugins/view.py
Normal file
328
pappyproxy/plugins/view.py
Normal file
|
@ -0,0 +1,328 @@
|
|||
import crochet
|
||||
import datetime
|
||||
import pappyproxy
|
||||
import shlex
|
||||
|
||||
from pappyproxy.console import load_reqlist, print_table, print_requests
|
||||
from pappyproxy.util import PappyException
|
||||
from pappyproxy.plugin import main_context
|
||||
from pappyproxy.http import Request
|
||||
from twisted.internet import defer
|
||||
|
||||
###################
|
||||
## Helper functions
|
||||
|
||||
def view_full_message(request, headers_only=False):
|
||||
if headers_only:
|
||||
print request.headers_section_pretty
|
||||
else:
|
||||
print request.full_message_pretty
|
||||
|
||||
def print_request_extended(request):
|
||||
# Prints extended info for the request
|
||||
title = "Request Info (reqid=%s)" % request.reqid
|
||||
print title
|
||||
print '-'*len(title)
|
||||
reqlen = len(request.body)
|
||||
reqlen = '%d bytes' % reqlen
|
||||
rsplen = 'No response'
|
||||
|
||||
mangle_str = 'Nothing mangled'
|
||||
if request.unmangled:
|
||||
mangle_str = 'Request'
|
||||
|
||||
if request.response:
|
||||
response_code = str(request.response.response_code) + \
|
||||
' ' + request.response.response_text
|
||||
rsplen = len(request.response.body)
|
||||
rsplen = '%d bytes' % rsplen
|
||||
|
||||
if request.response.unmangled:
|
||||
if mangle_str == 'Nothing mangled':
|
||||
mangle_str = 'Response'
|
||||
else:
|
||||
mangle_str += ' and Response'
|
||||
else:
|
||||
response_code = ''
|
||||
|
||||
time_str = '--'
|
||||
if request.time_start and request.time_end:
|
||||
time_delt = request.time_end - request.time_start
|
||||
time_str = "%.2f sec" % time_delt.total_seconds()
|
||||
|
||||
if request.is_ssl:
|
||||
is_ssl = 'YES'
|
||||
else:
|
||||
is_ssl = 'NO'
|
||||
|
||||
if request.time_start:
|
||||
time_made_str = request.time_start.strftime('%a, %b %d, %Y, %I:%M:%S %p')
|
||||
else:
|
||||
time_made_str = '--'
|
||||
|
||||
print 'Made on %s' % time_made_str
|
||||
print 'ID: %s' % request.reqid
|
||||
print 'Verb: %s' % request.verb
|
||||
print 'Host: %s' % request.host
|
||||
print 'Path: %s' % request.full_path
|
||||
print 'Status Code: %s' % response_code
|
||||
print 'Request Length: %s' % reqlen
|
||||
print 'Response Length: %s' % rsplen
|
||||
if request.response and request.response.unmangled:
|
||||
print 'Unmangled Response Length: %s bytes' % len(request.response.unmangled.full_response)
|
||||
print 'Time: %s' % time_str
|
||||
print 'Port: %s' % request.port
|
||||
print 'SSL: %s' % is_ssl
|
||||
print 'Mangled: %s' % mangle_str
|
||||
print 'Tags: %s' % (', '.join(request.tags))
|
||||
if request.plugin_data:
|
||||
print 'Plugin Data: %s' % (request.plugin_data)
|
||||
|
||||
def get_site_map(reqs):
|
||||
# Takes in a list of requests and returns a tree representing the site map
|
||||
paths_set = set()
|
||||
for req in reqs:
|
||||
paths_set.add(req.path_tuple)
|
||||
paths = sorted(list(paths_set))
|
||||
return paths
|
||||
|
||||
def print_tree(tree):
|
||||
# Prints a tree. Takes in a sorted list of path tuples
|
||||
_print_tree_helper(tree, 0, [])
|
||||
|
||||
def _get_tree_prefix(depth, print_bars, last):
|
||||
if depth == 0:
|
||||
return u''
|
||||
else:
|
||||
ret = u''
|
||||
pb = print_bars + [True]
|
||||
for i in range(depth):
|
||||
if pb[i]:
|
||||
ret += u'\u2502 '
|
||||
else:
|
||||
ret += u' '
|
||||
if last:
|
||||
ret += u'\u2514\u2500\u2500 '
|
||||
else:
|
||||
ret += u'\u251c\u2500\u2500 '
|
||||
return ret
|
||||
|
||||
def _print_tree_helper(tree, depth, print_bars):
|
||||
# Takes in a tree and prints it at the given depth
|
||||
if tree == [] or tree == [()]:
|
||||
return
|
||||
while tree[0] == ():
|
||||
tree = tree[1:]
|
||||
if tree == [] or tree == [()]:
|
||||
return
|
||||
if len(tree) == 1 and len(tree[0]) == 1:
|
||||
print _get_tree_prefix(depth, print_bars + [False], True) + tree[0][0]
|
||||
return
|
||||
|
||||
curkey = tree[0][0]
|
||||
subtree = []
|
||||
for row in tree:
|
||||
if row[0] != curkey:
|
||||
if curkey == '':
|
||||
curkey = '/'
|
||||
print _get_tree_prefix(depth, print_bars, False) + curkey
|
||||
if depth == 0:
|
||||
_print_tree_helper(subtree, depth+1, print_bars + [False])
|
||||
else:
|
||||
_print_tree_helper(subtree, depth+1, print_bars + [True])
|
||||
curkey = row[0]
|
||||
subtree = []
|
||||
subtree.append(row[1:])
|
||||
if curkey == '':
|
||||
curkey = '/'
|
||||
print _get_tree_prefix(depth, print_bars, True) + curkey
|
||||
_print_tree_helper(subtree, depth+1, print_bars + [False])
|
||||
|
||||
|
||||
####################
|
||||
## Command functions
|
||||
|
||||
def list_reqs(line):
|
||||
"""
|
||||
List the most recent in-context requests. By default shows the most recent 25
|
||||
Usage: list [a|num]
|
||||
|
||||
If `a` is given, all the in-context requests are shown. If a number is given,
|
||||
that many requests will be shown.
|
||||
"""
|
||||
args = shlex.split(line)
|
||||
if len(args) > 0:
|
||||
if args[0][0].lower() == 'a':
|
||||
print_count = -1
|
||||
else:
|
||||
try:
|
||||
print_count = int(args[0])
|
||||
except:
|
||||
print "Please enter a valid argument for list"
|
||||
return
|
||||
else:
|
||||
print_count = 25
|
||||
|
||||
def key_reqtime(req):
|
||||
if req.time_start is None:
|
||||
return -1
|
||||
else:
|
||||
return (req.time_start-datetime.datetime(1970,1,1)).total_seconds()
|
||||
|
||||
to_print = sorted(main_context().active_requests, key=key_reqtime, reverse=True)
|
||||
if print_count > 0:
|
||||
to_print = to_print[:print_count]
|
||||
print_requests(to_print)
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def view_request_info(line):
|
||||
"""
|
||||
View information about request
|
||||
Usage: view_request_info <reqid> [u]
|
||||
If 'u' is given as an additional argument, the unmangled version
|
||||
of the request will be displayed.
|
||||
"""
|
||||
args = shlex.split(line)
|
||||
reqids = args[0]
|
||||
|
||||
reqs = yield load_reqlist(reqids)
|
||||
|
||||
for req in reqs:
|
||||
print ''
|
||||
print_request_extended(req)
|
||||
print ''
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def view_request_headers(line):
|
||||
"""
|
||||
View the headers of the request
|
||||
Usage: view_request_headers <reqid> [u]
|
||||
If 'u' is given as an additional argument, the unmangled version
|
||||
of the request will be displayed.
|
||||
"""
|
||||
args = shlex.split(line)
|
||||
reqid = args[0]
|
||||
|
||||
reqs = yield load_reqlist(reqid)
|
||||
for req in reqs:
|
||||
if len(reqs) > 1:
|
||||
print 'Request %s:' % req.reqid
|
||||
print ''
|
||||
view_full_message(req, True)
|
||||
if len(reqs) > 1:
|
||||
print '-'*30
|
||||
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def view_full_request(line):
|
||||
"""
|
||||
View the full data of the request
|
||||
Usage: view_full_request <reqid> [u]
|
||||
If 'u' is given as an additional argument, the unmangled version
|
||||
of the request will be displayed.
|
||||
"""
|
||||
args = shlex.split(line)
|
||||
reqid = args[0]
|
||||
|
||||
reqs = yield load_reqlist(reqid)
|
||||
for req in reqs:
|
||||
if len(reqs) > 1:
|
||||
print 'Request %s:' % req.reqid
|
||||
print ''
|
||||
view_full_message(req)
|
||||
if len(reqs) > 1:
|
||||
print '-'*30
|
||||
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def view_response_headers(line):
|
||||
"""
|
||||
View the headers of the response
|
||||
Usage: view_response_headers <reqid>
|
||||
"""
|
||||
reqs = yield load_reqlist(line)
|
||||
for req in reqs:
|
||||
if req.response:
|
||||
if len(reqs) > 1:
|
||||
print '-'*15 + (' %s ' % req.reqid) + '-'*15
|
||||
view_full_message(req.response, True)
|
||||
else:
|
||||
print "Request %s does not have a response" % req.reqid
|
||||
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def view_full_response(line):
|
||||
"""
|
||||
View the full data of the response associated with a request
|
||||
Usage: view_full_response <reqid>
|
||||
"""
|
||||
reqs = yield load_reqlist(line)
|
||||
for req in reqs:
|
||||
if req.response:
|
||||
if len(reqs) > 1:
|
||||
print '-'*15 + (' %s ' % req.reqid) + '-'*15
|
||||
view_full_message(req.response)
|
||||
else:
|
||||
print "Request %s does not have a response" % req.reqid
|
||||
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def dump_response(line):
|
||||
"""
|
||||
Dump the data of the response to a file.
|
||||
Usage: dump_response <id> <filename>
|
||||
"""
|
||||
# dump the data of a response
|
||||
args = shlex.split(line)
|
||||
reqid = args[0]
|
||||
req = yield Request.load_request(reqid)
|
||||
rsp = req.response
|
||||
if len(args) >= 2:
|
||||
fname = args[1]
|
||||
else:
|
||||
fname = req.path.split('/')[-1]
|
||||
|
||||
with open(fname, 'w') as f:
|
||||
f.write(rsp.body)
|
||||
print 'Response data written to %s' % fname
|
||||
|
||||
def site_map(line):
|
||||
"""
|
||||
Print the site map. Only includes requests in the current context.
|
||||
Usage: site_map
|
||||
"""
|
||||
to_print = [r for r in main_context().active_requests if not r.response or r.response.response_code != 404]
|
||||
tree = get_site_map(to_print)
|
||||
print_tree(tree)
|
||||
|
||||
|
||||
###############
|
||||
## Plugin hooks
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'list': (list_reqs, None),
|
||||
'view_request_info': (view_request_info, None),
|
||||
'view_request_headers': (view_request_headers, None),
|
||||
'view_full_request': (view_full_request, None),
|
||||
'view_response_headers': (view_response_headers, None),
|
||||
'view_full_response': (view_full_response, None),
|
||||
'site_map': (site_map, None),
|
||||
'dump_response': (dump_response, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
('list', 'ls'),
|
||||
('view_request_info', 'viq'),
|
||||
('view_request_headers', 'vhq'),
|
||||
('view_full_request', 'vfq'),
|
||||
('view_response_headers', 'vhs'),
|
||||
('site_map', 'sm'),
|
||||
('view_full_response', 'vfs'),
|
||||
#('dump_response', 'dr'),
|
||||
])
|
0
pappyproxy/plugins/vim_repeater/__init__.py
Normal file
0
pappyproxy/plugins/vim_repeater/__init__.py
Normal file
135
pappyproxy/plugins/vim_repeater/repeater.py
Normal file
135
pappyproxy/plugins/vim_repeater/repeater.py
Normal file
|
@ -0,0 +1,135 @@
|
|||
import base64
|
||||
import vim
|
||||
import sys
|
||||
import socket
|
||||
import json
|
||||
|
||||
class CommError(Exception):
|
||||
pass
|
||||
|
||||
def escape(s):
|
||||
return s.replace("'", "''")
|
||||
|
||||
def communicate(data):
|
||||
global PAPPY_PORT
|
||||
# Submits data to the comm port of the proxy
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect(('127.0.0.1', int(vim.eval('s:commport'))))
|
||||
datastr = json.dumps(data)
|
||||
|
||||
# Send our data
|
||||
total_sent = 0
|
||||
while total_sent < len(data):
|
||||
sent = s.send(datastr[total_sent:])
|
||||
assert sent != 0
|
||||
total_sent += sent
|
||||
s.send('\n')
|
||||
|
||||
# Get our response
|
||||
retstr = ''
|
||||
c = ''
|
||||
while c != '\n':
|
||||
retstr = retstr + c
|
||||
c = s.recv(1)
|
||||
assert c != ''
|
||||
result = json.loads(retstr)
|
||||
if not result['success']:
|
||||
vim.command("echoerr '%s'" % escape(result['message']))
|
||||
raise CommError(result['message'])
|
||||
return result
|
||||
|
||||
def read_line(conn):
|
||||
data = ''
|
||||
c = ''
|
||||
while c != '\n':
|
||||
data = data + c
|
||||
c = conn.read(1)
|
||||
return data
|
||||
|
||||
def run_command(command):
|
||||
funcs = {
|
||||
"setup": set_up_windows,
|
||||
"submit": submit_current_buffer,
|
||||
}
|
||||
if command in funcs:
|
||||
funcs[command]()
|
||||
|
||||
def set_buffer_content(buf, text):
|
||||
buf[:] = None
|
||||
first = True
|
||||
for l in text.split('\n'):
|
||||
if first:
|
||||
buf[0] = l
|
||||
first = False
|
||||
else:
|
||||
buf.append(l)
|
||||
|
||||
def set_up_windows():
|
||||
reqid = vim.eval("a:2")
|
||||
comm_port = vim.eval("a:3")
|
||||
vim.command("let s:commport=%d"%int(comm_port))
|
||||
# Get the left buffer
|
||||
vim.command("new")
|
||||
vim.command("only")
|
||||
b2 = vim.current.buffer
|
||||
vim.command("let s:b2=bufnr('$')")
|
||||
|
||||
# Vsplit new file
|
||||
vim.command("vnew")
|
||||
b1 = vim.current.buffer
|
||||
vim.command("let s:b1=bufnr('$')")
|
||||
|
||||
# Get the request
|
||||
comm_data = {"action": "get_request", "reqid": reqid}
|
||||
try:
|
||||
reqdata = communicate(comm_data)
|
||||
except CommError:
|
||||
return
|
||||
|
||||
comm_data = {"action": "get_response", "reqid": reqid}
|
||||
try:
|
||||
rspdata = communicate(comm_data)
|
||||
except CommError:
|
||||
return
|
||||
|
||||
# Set up the buffers
|
||||
set_buffer_content(b1, base64.b64decode(reqdata['full_message']))
|
||||
if 'full_message' in rspdata:
|
||||
set_buffer_content(b2, base64.b64decode(rspdata['full_message']))
|
||||
|
||||
# Save the port, ssl, host setting
|
||||
vim.command("let s:repport=%d" % int(reqdata['port']))
|
||||
vim.command("let s:rephost='%s'" % escape(reqdata['host']))
|
||||
|
||||
if reqdata['is_ssl']:
|
||||
vim.command("let s:repisssl=1")
|
||||
else:
|
||||
vim.command("let s:repisssl=0")
|
||||
|
||||
def submit_current_buffer():
|
||||
curbuf = vim.current.buffer
|
||||
b2_id = int(vim.eval("s:b2"))
|
||||
b2 = vim.buffers[b2_id]
|
||||
vim.command("let s:b1=bufnr('$')")
|
||||
vim.command("only")
|
||||
vim.command("rightbelow vertical new")
|
||||
vim.command("b %d" % b2_id)
|
||||
vim.command("wincmd h")
|
||||
|
||||
full_request = '\n'.join(curbuf)
|
||||
commdata = {'action': 'submit',
|
||||
'full_message': base64.b64encode(full_request),
|
||||
'port': int(vim.eval("s:repport")),
|
||||
'host': vim.eval("s:rephost")}
|
||||
if vim.eval("s:repisssl") == '1':
|
||||
commdata["is_ssl"] = True
|
||||
else:
|
||||
commdata["is_ssl"] = False
|
||||
result = communicate(commdata)
|
||||
set_buffer_content(b2, base64.b64decode(result['response']['full_message']))
|
||||
|
||||
# (left, right) = set_up_windows()
|
||||
# set_buffer_content(left, 'Hello\nWorld')
|
||||
# set_buffer_content(right, 'Hello\nOther\nWorld')
|
||||
#print "Arg is %s" % vim.eval("a:arg")
|
||||
run_command(vim.eval("a:1"))
|
17
pappyproxy/plugins/vim_repeater/repeater.vim
Normal file
17
pappyproxy/plugins/vim_repeater/repeater.vim
Normal file
|
@ -0,0 +1,17 @@
|
|||
if !has('python')
|
||||
echo "Vim must support python in order to use the repeater"
|
||||
finish
|
||||
endif
|
||||
|
||||
let s:pyscript = resolve(expand('<sfile>:p:h') . '/repeater.py')
|
||||
|
||||
function! RepeaterAction(...)
|
||||
execute 'pyfile ' . s:pyscript
|
||||
endfunc
|
||||
|
||||
command! -nargs=* RepeaterSetup call RepeaterAction('setup', <f-args>)
|
||||
command! RepeaterSubmitBuffer call RepeaterAction('submit')
|
||||
|
||||
" Bind forward to <leader>f
|
||||
nnoremap <leader>f :RepeaterSubmitBuffer<CR>
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue