Version 0.2.10
This commit is contained in:
parent
9648bc44cc
commit
d2f0e5c222
17 changed files with 625 additions and 1061 deletions
|
@ -1,7 +1,7 @@
|
|||
import crochet
|
||||
import pappyproxy
|
||||
|
||||
from pappyproxy.util import PappyException, confirm
|
||||
from pappyproxy.util import PappyException, confirm, autocomplete_startswith
|
||||
from pappyproxy.http import Request
|
||||
from twisted.internet import defer
|
||||
|
||||
|
@ -40,6 +40,11 @@ class BuiltinFilters(object):
|
|||
return pappyproxy.context.Filter(BuiltinFilters._filters[name][1])
|
||||
|
||||
|
||||
def complete_filtercmd(text, line, begidx, endidx):
|
||||
strs = [k for k, v in pappyproxy.context.Filter._filter_functions.iteritems()]
|
||||
strs += [k for k, v in pappyproxy.context.Filter._async_filter_functions.iteritems()]
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def filtercmd(line):
|
||||
|
@ -179,7 +184,7 @@ def load_cmds(cmd):
|
|||
'filter_clear': (filter_clear, None),
|
||||
'filter_up': (filter_up, None),
|
||||
'builtin_filter': (builtin_filter, complete_builtin_filter),
|
||||
'filter': (filtercmd, None),
|
||||
'filter': (filtercmd, complete_filtercmd),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
#('filter_prune', ''),
|
||||
|
|
|
@ -4,7 +4,7 @@ import shlex
|
|||
|
||||
from pappyproxy.plugin import active_intercepting_macros, add_intercepting_macro, remove_intercepting_macro
|
||||
from pappyproxy.macros import load_macros, macro_from_requests, gen_imacro
|
||||
from pappyproxy.util import PappyException, load_reqlist
|
||||
from pappyproxy.util import PappyException, load_reqlist, autocomplete_startswith
|
||||
from twisted.internet import defer
|
||||
|
||||
loaded_macros = []
|
||||
|
@ -64,6 +64,11 @@ def load_macros_cmd(line):
|
|||
int_macro_dict[macro.short_name] = macro
|
||||
loaded_int_macros.append(macro)
|
||||
print 'Loaded "%s"' % macro
|
||||
|
||||
def complete_run_macro(text, line, begidx, endidx):
|
||||
global macro_dict
|
||||
strs = [k for k,v in macro_dict.iteritems()]
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
def run_macro(line):
|
||||
"""
|
||||
|
@ -81,6 +86,24 @@ def run_macro(line):
|
|||
macro = macro_dict[mname]
|
||||
macro.execute(args[1:])
|
||||
|
||||
def complete_run_int_macro(text, line, begidx, endidx):
|
||||
global int_macro_dict
|
||||
global loaded_int_macros
|
||||
running = []
|
||||
not_running = []
|
||||
for macro in loaded_int_macros:
|
||||
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
|
||||
running.append(macro)
|
||||
else:
|
||||
not_running.append(macro)
|
||||
strs = []
|
||||
for m in not_running:
|
||||
strs.append(macro.name)
|
||||
strs.append(macro.file_name)
|
||||
if macro.short_name:
|
||||
strs.append(macro.short_name)
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
def run_int_macro(line):
|
||||
"""
|
||||
Activate an intercepting macro
|
||||
|
@ -103,6 +126,24 @@ def run_int_macro(line):
|
|||
print 'Error initializing macro:'
|
||||
raise e
|
||||
|
||||
def complete_stop_int_macro(text, line, begidx, endidx):
|
||||
global int_macro_dict
|
||||
global loaded_int_macros
|
||||
running = []
|
||||
not_running = []
|
||||
for macro in loaded_int_macros:
|
||||
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
|
||||
running.append(macro)
|
||||
else:
|
||||
not_running.append(macro)
|
||||
strs = []
|
||||
for m in running:
|
||||
strs.append(macro.name)
|
||||
strs.append(macro.file_name)
|
||||
if macro.short_name:
|
||||
strs.append(macro.short_name)
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
def stop_int_macro(line):
|
||||
"""
|
||||
Stop a running intercepting macro
|
||||
|
@ -201,9 +242,9 @@ def load_cmds(cmd):
|
|||
'generate_int_macro': (generate_int_macro, None),
|
||||
'generate_macro': (generate_macro, None),
|
||||
'list_int_macros': (list_int_macros, None),
|
||||
'stop_int_macro': (stop_int_macro, None),
|
||||
'run_int_macro': (run_int_macro, None),
|
||||
'run_macro': (run_macro, None),
|
||||
'stop_int_macro': (stop_int_macro, complete_stop_int_macro),
|
||||
'run_int_macro': (run_int_macro, complete_run_int_macro),
|
||||
'run_macro': (run_macro, complete_run_macro),
|
||||
'load_macros': (load_macros_cmd, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
|
|
|
@ -186,6 +186,10 @@ def run_without_color(line):
|
|||
with Capturing() as output:
|
||||
session.cons.onecmd(line.strip())
|
||||
print remove_color(output.val)
|
||||
|
||||
def version(line):
|
||||
import pappyproxy
|
||||
print pappyproxy.__version__
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
|
@ -197,6 +201,7 @@ def load_cmds(cmd):
|
|||
'merge': (merge_datafile, None),
|
||||
'nocolor': (run_without_color, None),
|
||||
'watch': (watch_proxy, None),
|
||||
'version': (version, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
#('rpy', ''),
|
||||
|
|
|
@ -2,7 +2,7 @@ import crochet
|
|||
import pappyproxy
|
||||
import shlex
|
||||
|
||||
from pappyproxy.plugin import main_context_ids
|
||||
from pappyproxy.plugin import async_main_context_ids
|
||||
from pappyproxy.util import PappyException, load_reqlist
|
||||
from twisted.internet import defer
|
||||
from pappyproxy.http import Request
|
||||
|
@ -26,7 +26,7 @@ def tag(line):
|
|||
print 'Tagging %s with %s' % (', '.join(reqids), tag)
|
||||
else:
|
||||
print "Tagging all in-context requests with %s" % tag
|
||||
reqids = yield main_context_ids()
|
||||
reqids = yield async_main_context_ids()
|
||||
|
||||
for reqid in reqids:
|
||||
req = yield Request.load_request(reqid)
|
||||
|
@ -58,7 +58,7 @@ def untag(line):
|
|||
print 'Removing tag %s from %s' % (tag, ', '.join(reqids))
|
||||
else:
|
||||
print "Removing tag %s from all in-context requests" % tag
|
||||
reqids = yield main_context_ids()
|
||||
reqids = yield async_main_context_ids()
|
||||
|
||||
for reqid in reqids:
|
||||
req = yield Request.load_request(reqid)
|
||||
|
|
|
@ -10,7 +10,7 @@ import urllib
|
|||
from pappyproxy.util import PappyException, utc2local, load_reqlist, print_table, print_request_rows, get_req_data_row
|
||||
from pappyproxy.http import Request, repeatable_parse_qs
|
||||
from twisted.internet import defer
|
||||
from pappyproxy.plugin import main_context_ids
|
||||
from pappyproxy.plugin import async_main_context_ids
|
||||
from pappyproxy.colors import Colors, Styles, verb_color, scode_color, path_formatter, host_color
|
||||
from pygments.formatters import TerminalFormatter
|
||||
from pygments.lexers.data import JsonLexer
|
||||
|
@ -255,7 +255,7 @@ def list_reqs(line):
|
|||
print_count = 25
|
||||
|
||||
rows = []
|
||||
ids = yield main_context_ids(print_count)
|
||||
ids = yield async_main_context_ids(print_count)
|
||||
for i in ids:
|
||||
req = yield Request.load_request(i)
|
||||
rows.append(get_req_data_row(req))
|
||||
|
@ -477,7 +477,7 @@ def get_param_info(line):
|
|||
|
||||
found_params = {}
|
||||
|
||||
ids = yield main_context_ids()
|
||||
ids = yield async_main_context_ids()
|
||||
for i in ids:
|
||||
req = yield Request.load_request(i)
|
||||
for k, v in req.url_params.all_pairs():
|
||||
|
@ -501,17 +501,20 @@ def dump_response(line):
|
|||
"""
|
||||
# dump the data of a response
|
||||
args = shlex.split(line)
|
||||
reqid = args[0]
|
||||
req = yield Request.load_request(reqid)
|
||||
rsp = req.response
|
||||
if len(args) >= 2:
|
||||
fname = args[1]
|
||||
else:
|
||||
fname = req.path.split('/')[-1]
|
||||
reqs = yield load_reqlist(args[0])
|
||||
for req in reqs:
|
||||
if req.response:
|
||||
rsp = req.response
|
||||
if len(args) >= 2:
|
||||
fname = args[1]
|
||||
else:
|
||||
fname = req.path.split('/')[-1]
|
||||
|
||||
with open(fname, 'w') as f:
|
||||
f.write(rsp.body)
|
||||
print 'Response data written to %s' % fname
|
||||
with open(fname, 'w') as f:
|
||||
f.write(rsp.body)
|
||||
print 'Response data written to %s' % fname
|
||||
else:
|
||||
print 'Request %s does not have a response' % req.reqid
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
|
@ -525,7 +528,7 @@ def site_map(line):
|
|||
paths = True
|
||||
else:
|
||||
paths = False
|
||||
ids = yield main_context_ids()
|
||||
ids = yield async_main_context_ids()
|
||||
paths_set = set()
|
||||
for reqid in ids:
|
||||
req = yield Request.load_request(reqid)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue