Version 0.2.10
This commit is contained in:
parent
9648bc44cc
commit
d2f0e5c222
17 changed files with 625 additions and 1061 deletions
14
README.md
14
README.md
|
@ -174,6 +174,8 @@ Configuration for each project is done in the `config.json` file. The file is a
|
|||
| `debug_dir` (optional) | Where connection debug info should be stored. If not present, debug info is not saved to a file. |
|
||||
| `cert_dir` | Where the CA cert and the private key for the CA cert are stored |
|
||||
| `proxy_listeners` | A list of dicts which describe which ports the proxy will listen on. Each item is a dict with "port" and "interface" values which determine which port and interface to listen on. For example, if port=8000 and the interface is 127.0.0.1, the proxy will only accept connections from localhost on port 8000. To accept connections from anywhere, set the interface to 0.0.0.0. |
|
||||
| `socks_proxy` | A dictionary with details on how to connect to an upstream SOCKS proxy to send all in-scope requests through. See the secion on upstream SOCKS proxies for more information. |
|
||||
| `http_proxy` | A dictionary with details on how to connect to an upstream http proxy to send all in-scope requests through. See the section on upstream http proxies for more information. |
|
||||
|
||||
The following tokens will also be replaced with values:
|
||||
|
||||
|
@ -379,6 +381,11 @@ Some arguments can take multiple IDs for an argument. To pass multiple IDs to a
|
|||
* `viq 1,2,u3` View information about requests 1, 2, and the unmangled version of 3
|
||||
* `gma foo 4,5,6` Generate a macro with definitions for requests 4, 5, and 6
|
||||
|
||||
In addition, you can pass in a wildcard to include all in context requests.
|
||||
|
||||
* `viq *` View information about all in-context requests
|
||||
* `dump_response *` Dump the responses of all in-context requests (will overwrite duplicates)
|
||||
|
||||
Context
|
||||
-------
|
||||
The context is a set of filters that define which requests are considered "active". Only requests in the current context are displayed with `ls`. By default, the context includes every single request that passes through the proxy. You can limit down the current context by applying filters. Filters apply rules such as "the response code must equal 500" or "the host must contain google.com". Once you apply one or more filters, only requests/responses which pass every active filter will be a part of the current context.
|
||||
|
@ -1123,6 +1130,13 @@ Changelog
|
|||
---------
|
||||
The boring part of the readme
|
||||
|
||||
* 0.2.10
|
||||
* Add wildcard support for requests that can take in multiple request ids
|
||||
* Update dump_response to dump multiple requests at the same time
|
||||
* More autocompleters (macro commands, field for filters)
|
||||
* Add non-async function to get in-context request IDs. Now macros can scan over all in-context stuff and do things with them.
|
||||
* Improve sessions to be used to maintain state with macros
|
||||
* Bugfixes
|
||||
* 0.2.9
|
||||
* Fix bugs/clean up some code
|
||||
* 0.2.8
|
||||
|
|
|
@ -1 +1 @@
|
|||
__version__ = '0.2.9'
|
||||
__version__ = '0.2.10'
|
||||
|
|
|
@ -127,7 +127,7 @@ def repeatable_parse_qs(s):
|
|||
@crochet.wait_for(timeout=180.0)
|
||||
@defer.inlineCallbacks
|
||||
def request_by_id(reqid):
|
||||
req = Request.load_request(str(reqid))
|
||||
req = yield Request.load_request(str(reqid))
|
||||
defer.returnValue(req)
|
||||
|
||||
##########
|
||||
|
@ -2133,7 +2133,9 @@ class Request(HTTPMessage):
|
|||
def submit_request(request,
|
||||
save_request=False,
|
||||
intercepting_macros={},
|
||||
stream_transport=None):
|
||||
stream_transport=None,
|
||||
_factory_string_transport=False,
|
||||
_conn_info=None):
|
||||
"""
|
||||
submit_request(request, save_request=False, intercepting_macros={}, stream_transport=None)
|
||||
|
||||
|
@ -2152,6 +2154,9 @@ class Request(HTTPMessage):
|
|||
|
||||
from .proxy import ProxyClientFactory, get_next_connection_id, get_endpoint
|
||||
from .pappy import session
|
||||
from .tests.testutil import TLSStringTransport
|
||||
|
||||
# _factory__string_transport, _conn_classes are only for unit tests. Do not use.
|
||||
|
||||
factory = None
|
||||
if stream_transport is None:
|
||||
|
@ -2164,6 +2169,16 @@ class Request(HTTPMessage):
|
|||
save_all=save_request,
|
||||
stream_response=True,
|
||||
return_transport=stream_transport)
|
||||
|
||||
# Set up stuff for unit test if needed
|
||||
if _factory_string_transport:
|
||||
factory._use_string_transport = True
|
||||
if _conn_info is not None:
|
||||
# Pass factory back to unit test
|
||||
_conn_info['factory'] = factory
|
||||
factory._conn_info = _conn_info
|
||||
|
||||
# Set up factory settings
|
||||
factory.intercepting_macros = intercepting_macros
|
||||
factory.connection_id = get_next_connection_id()
|
||||
factory.connect()
|
||||
|
|
|
@ -10,7 +10,9 @@ import imp
|
|||
import os
|
||||
import pappyproxy
|
||||
import stat
|
||||
import crochet
|
||||
|
||||
from twisted.internet import defer
|
||||
from .proxy import add_intercepting_macro as proxy_add_intercepting_macro
|
||||
from .proxy import remove_intercepting_macro as proxy_remove_intercepting_macro
|
||||
from .colors import Colors
|
||||
|
@ -146,7 +148,7 @@ def req_history(num=-1, ids=None, include_unmangled=False):
|
|||
"""
|
||||
return pappyproxy.Request.cache.req_it(num=num, ids=ids, include_unmangled=include_unmangled)
|
||||
|
||||
def main_context_ids(n=-1):
|
||||
def async_main_context_ids(n=-1):
|
||||
"""
|
||||
Returns a deferred that resolves into a list of up to ``n`` of the
|
||||
most recent requests in the main context. You can then use
|
||||
|
@ -156,6 +158,17 @@ def main_context_ids(n=-1):
|
|||
"""
|
||||
return pappyproxy.pappy.main_context.get_reqs(n)
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def main_context_ids(*args, **kwargs):
|
||||
"""
|
||||
Same as :func:`pappyproxy.plugin.async_main_context_ids` but can be called
|
||||
from macros and other non-async only functions. Cannot be called in async
|
||||
functions.
|
||||
"""
|
||||
ret = yield async_main_context_ids(*args, **kwargs)
|
||||
defer.returnValue(ret)
|
||||
|
||||
def run_cmd(cmd):
|
||||
"""
|
||||
Run a command as if you typed it into the console. Try and use
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import crochet
|
||||
import pappyproxy
|
||||
|
||||
from pappyproxy.util import PappyException, confirm
|
||||
from pappyproxy.util import PappyException, confirm, autocomplete_startswith
|
||||
from pappyproxy.http import Request
|
||||
from twisted.internet import defer
|
||||
|
||||
|
@ -40,6 +40,11 @@ class BuiltinFilters(object):
|
|||
return pappyproxy.context.Filter(BuiltinFilters._filters[name][1])
|
||||
|
||||
|
||||
def complete_filtercmd(text, line, begidx, endidx):
|
||||
strs = [k for k, v in pappyproxy.context.Filter._filter_functions.iteritems()]
|
||||
strs += [k for k, v in pappyproxy.context.Filter._async_filter_functions.iteritems()]
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
def filtercmd(line):
|
||||
|
@ -179,7 +184,7 @@ def load_cmds(cmd):
|
|||
'filter_clear': (filter_clear, None),
|
||||
'filter_up': (filter_up, None),
|
||||
'builtin_filter': (builtin_filter, complete_builtin_filter),
|
||||
'filter': (filtercmd, None),
|
||||
'filter': (filtercmd, complete_filtercmd),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
#('filter_prune', ''),
|
||||
|
|
|
@ -4,7 +4,7 @@ import shlex
|
|||
|
||||
from pappyproxy.plugin import active_intercepting_macros, add_intercepting_macro, remove_intercepting_macro
|
||||
from pappyproxy.macros import load_macros, macro_from_requests, gen_imacro
|
||||
from pappyproxy.util import PappyException, load_reqlist
|
||||
from pappyproxy.util import PappyException, load_reqlist, autocomplete_startswith
|
||||
from twisted.internet import defer
|
||||
|
||||
loaded_macros = []
|
||||
|
@ -65,6 +65,11 @@ def load_macros_cmd(line):
|
|||
loaded_int_macros.append(macro)
|
||||
print 'Loaded "%s"' % macro
|
||||
|
||||
def complete_run_macro(text, line, begidx, endidx):
|
||||
global macro_dict
|
||||
strs = [k for k,v in macro_dict.iteritems()]
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
def run_macro(line):
|
||||
"""
|
||||
Run a macro
|
||||
|
@ -81,6 +86,24 @@ def run_macro(line):
|
|||
macro = macro_dict[mname]
|
||||
macro.execute(args[1:])
|
||||
|
||||
def complete_run_int_macro(text, line, begidx, endidx):
|
||||
global int_macro_dict
|
||||
global loaded_int_macros
|
||||
running = []
|
||||
not_running = []
|
||||
for macro in loaded_int_macros:
|
||||
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
|
||||
running.append(macro)
|
||||
else:
|
||||
not_running.append(macro)
|
||||
strs = []
|
||||
for m in not_running:
|
||||
strs.append(macro.name)
|
||||
strs.append(macro.file_name)
|
||||
if macro.short_name:
|
||||
strs.append(macro.short_name)
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
def run_int_macro(line):
|
||||
"""
|
||||
Activate an intercepting macro
|
||||
|
@ -103,6 +126,24 @@ def run_int_macro(line):
|
|||
print 'Error initializing macro:'
|
||||
raise e
|
||||
|
||||
def complete_stop_int_macro(text, line, begidx, endidx):
|
||||
global int_macro_dict
|
||||
global loaded_int_macros
|
||||
running = []
|
||||
not_running = []
|
||||
for macro in loaded_int_macros:
|
||||
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
|
||||
running.append(macro)
|
||||
else:
|
||||
not_running.append(macro)
|
||||
strs = []
|
||||
for m in running:
|
||||
strs.append(macro.name)
|
||||
strs.append(macro.file_name)
|
||||
if macro.short_name:
|
||||
strs.append(macro.short_name)
|
||||
return autocomplete_startswith(text, strs)
|
||||
|
||||
def stop_int_macro(line):
|
||||
"""
|
||||
Stop a running intercepting macro
|
||||
|
@ -201,9 +242,9 @@ def load_cmds(cmd):
|
|||
'generate_int_macro': (generate_int_macro, None),
|
||||
'generate_macro': (generate_macro, None),
|
||||
'list_int_macros': (list_int_macros, None),
|
||||
'stop_int_macro': (stop_int_macro, None),
|
||||
'run_int_macro': (run_int_macro, None),
|
||||
'run_macro': (run_macro, None),
|
||||
'stop_int_macro': (stop_int_macro, complete_stop_int_macro),
|
||||
'run_int_macro': (run_int_macro, complete_run_int_macro),
|
||||
'run_macro': (run_macro, complete_run_macro),
|
||||
'load_macros': (load_macros_cmd, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
|
|
|
@ -187,6 +187,10 @@ def run_without_color(line):
|
|||
session.cons.onecmd(line.strip())
|
||||
print remove_color(output.val)
|
||||
|
||||
def version(line):
|
||||
import pappyproxy
|
||||
print pappyproxy.__version__
|
||||
|
||||
def load_cmds(cmd):
|
||||
cmd.set_cmds({
|
||||
'clrmem': (clrmem, None),
|
||||
|
@ -197,6 +201,7 @@ def load_cmds(cmd):
|
|||
'merge': (merge_datafile, None),
|
||||
'nocolor': (run_without_color, None),
|
||||
'watch': (watch_proxy, None),
|
||||
'version': (version, None),
|
||||
})
|
||||
cmd.add_aliases([
|
||||
#('rpy', ''),
|
||||
|
|
|
@ -2,7 +2,7 @@ import crochet
|
|||
import pappyproxy
|
||||
import shlex
|
||||
|
||||
from pappyproxy.plugin import main_context_ids
|
||||
from pappyproxy.plugin import async_main_context_ids
|
||||
from pappyproxy.util import PappyException, load_reqlist
|
||||
from twisted.internet import defer
|
||||
from pappyproxy.http import Request
|
||||
|
@ -26,7 +26,7 @@ def tag(line):
|
|||
print 'Tagging %s with %s' % (', '.join(reqids), tag)
|
||||
else:
|
||||
print "Tagging all in-context requests with %s" % tag
|
||||
reqids = yield main_context_ids()
|
||||
reqids = yield async_main_context_ids()
|
||||
|
||||
for reqid in reqids:
|
||||
req = yield Request.load_request(reqid)
|
||||
|
@ -58,7 +58,7 @@ def untag(line):
|
|||
print 'Removing tag %s from %s' % (tag, ', '.join(reqids))
|
||||
else:
|
||||
print "Removing tag %s from all in-context requests" % tag
|
||||
reqids = yield main_context_ids()
|
||||
reqids = yield async_main_context_ids()
|
||||
|
||||
for reqid in reqids:
|
||||
req = yield Request.load_request(reqid)
|
||||
|
|
|
@ -10,7 +10,7 @@ import urllib
|
|||
from pappyproxy.util import PappyException, utc2local, load_reqlist, print_table, print_request_rows, get_req_data_row
|
||||
from pappyproxy.http import Request, repeatable_parse_qs
|
||||
from twisted.internet import defer
|
||||
from pappyproxy.plugin import main_context_ids
|
||||
from pappyproxy.plugin import async_main_context_ids
|
||||
from pappyproxy.colors import Colors, Styles, verb_color, scode_color, path_formatter, host_color
|
||||
from pygments.formatters import TerminalFormatter
|
||||
from pygments.lexers.data import JsonLexer
|
||||
|
@ -255,7 +255,7 @@ def list_reqs(line):
|
|||
print_count = 25
|
||||
|
||||
rows = []
|
||||
ids = yield main_context_ids(print_count)
|
||||
ids = yield async_main_context_ids(print_count)
|
||||
for i in ids:
|
||||
req = yield Request.load_request(i)
|
||||
rows.append(get_req_data_row(req))
|
||||
|
@ -477,7 +477,7 @@ def get_param_info(line):
|
|||
|
||||
found_params = {}
|
||||
|
||||
ids = yield main_context_ids()
|
||||
ids = yield async_main_context_ids()
|
||||
for i in ids:
|
||||
req = yield Request.load_request(i)
|
||||
for k, v in req.url_params.all_pairs():
|
||||
|
@ -501,8 +501,9 @@ def dump_response(line):
|
|||
"""
|
||||
# dump the data of a response
|
||||
args = shlex.split(line)
|
||||
reqid = args[0]
|
||||
req = yield Request.load_request(reqid)
|
||||
reqs = yield load_reqlist(args[0])
|
||||
for req in reqs:
|
||||
if req.response:
|
||||
rsp = req.response
|
||||
if len(args) >= 2:
|
||||
fname = args[1]
|
||||
|
@ -512,6 +513,8 @@ def dump_response(line):
|
|||
with open(fname, 'w') as f:
|
||||
f.write(rsp.body)
|
||||
print 'Response data written to %s' % fname
|
||||
else:
|
||||
print 'Request %s does not have a response' % req.reqid
|
||||
|
||||
@crochet.wait_for(timeout=None)
|
||||
@defer.inlineCallbacks
|
||||
|
@ -525,7 +528,7 @@ def site_map(line):
|
|||
paths = True
|
||||
else:
|
||||
paths = False
|
||||
ids = yield main_context_ids()
|
||||
ids = yield async_main_context_ids()
|
||||
paths_set = set()
|
||||
for reqid in ids:
|
||||
req = yield Request.load_request(reqid)
|
||||
|
|
|
@ -262,13 +262,17 @@ class ProxyClientFactory(ClientFactory):
|
|||
self.intercepting_macros = {}
|
||||
self.use_as_proxy = False
|
||||
self.sendback_function = None
|
||||
self.dropped_request = False
|
||||
|
||||
# Only used for unit tests. Do not use.
|
||||
self._use_string_transport = False
|
||||
self._conn_info = None
|
||||
|
||||
def log(self, message, symbol='*', verbosity_level=1):
|
||||
log(message, id=self.connection_id, symbol=symbol, verbosity_level=verbosity_level)
|
||||
|
||||
def buildProtocol(self, addr, _do_callback=True):
|
||||
def buildProtocol(self, addr):
|
||||
from pappyproxy.pappy import session
|
||||
# _do_callback is intended to help with testing and should not be modified
|
||||
if self.use_as_proxy and context.in_scope(self.request):
|
||||
p = UpstreamHTTPProxyClient(self.request)
|
||||
if 'username' in session.config.http_proxy and 'password' in session.config.http_proxy:
|
||||
|
@ -279,7 +283,6 @@ class ProxyClientFactory(ClientFactory):
|
|||
p = ProxyClient(self.request)
|
||||
p.factory = self
|
||||
self.log("Building protocol", verbosity_level=3)
|
||||
if _do_callback:
|
||||
p.data_defer.addCallback(self.return_request_pair)
|
||||
return p
|
||||
|
||||
|
@ -310,8 +313,15 @@ class ProxyClientFactory(ClientFactory):
|
|||
else:
|
||||
yield self.request.async_deep_save()
|
||||
|
||||
(sendreq, mangled) = yield macros.mangle_request(sendreq, mangle_macros)
|
||||
|
||||
(mangreq, mangled) = yield macros.mangle_request(sendreq, mangle_macros)
|
||||
if mangreq is None:
|
||||
self.log("Request dropped. Closing connections.")
|
||||
self.request.tags.add('dropped')
|
||||
self.request.response = None
|
||||
self.dropped_request = True
|
||||
defer.returnValue(None)
|
||||
else:
|
||||
sendreq = mangreq
|
||||
if sendreq and mangled and self.save_all:
|
||||
self.start_time = datetime.datetime.utcnow()
|
||||
sendreq.time_start = self.start_time
|
||||
|
@ -337,7 +347,7 @@ class ProxyClientFactory(ClientFactory):
|
|||
from pappyproxy.pappy import session
|
||||
|
||||
self.end_time = datetime.datetime.utcnow()
|
||||
if session.config.debug_to_file or session.config.debug_verbosity > 0:
|
||||
if session.config.debug_to_file or session.config.debug_verbosity > 0 and request.response:
|
||||
log_request(printable_data(request.response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
|
||||
|
||||
request.time_start = self.start_time
|
||||
|
@ -351,6 +361,7 @@ class ProxyClientFactory(ClientFactory):
|
|||
else:
|
||||
yield request.async_deep_save()
|
||||
|
||||
if request.response:
|
||||
mangled = yield macros.mangle_response(request, mangle_macros)
|
||||
|
||||
if mangled and self.save_all:
|
||||
|
@ -369,6 +380,10 @@ class ProxyClientFactory(ClientFactory):
|
|||
from pappyproxy.pappy import session
|
||||
|
||||
yield self.prepare_request()
|
||||
if self.dropped_request:
|
||||
self.data_defer.callback(self.request)
|
||||
defer.returnValue(None)
|
||||
|
||||
if context.in_scope(self.request):
|
||||
# Get connection using config
|
||||
endpoint = get_endpoint(self.request.host,
|
||||
|
@ -380,8 +395,22 @@ class ProxyClientFactory(ClientFactory):
|
|||
# Just forward it normally
|
||||
endpoint = get_endpoint(self.request.host,
|
||||
self.request.port,
|
||||
self.request.is_ssl)
|
||||
self.request.is_ssl,
|
||||
socks_config=None,
|
||||
use_http_proxy=False)
|
||||
|
||||
if self._use_string_transport:
|
||||
from pappyproxy.tests.testutil import TLSStringTransport
|
||||
# "Connect" via string transport
|
||||
protocol = self.buildProtocol(('127.0.0.1', 0))
|
||||
|
||||
# Pass the protocol back to the test
|
||||
if self._conn_info:
|
||||
self._conn_info['protocol'] = protocol
|
||||
|
||||
tr = TLSStringTransport()
|
||||
protocol.makeConnection(tr)
|
||||
else:
|
||||
# Connect via the endpoint
|
||||
self.log("Accessing using endpoint")
|
||||
yield endpoint.connect(self)
|
||||
|
@ -529,6 +558,14 @@ class ProxyServer(LineReceiver):
|
|||
def send_response_back(self, request):
|
||||
if request.response is not None:
|
||||
self.transport.write(request.response.full_response)
|
||||
else:
|
||||
droppedrsp = http.Response(('HTTP/1.1 200 OK\r\n'
|
||||
'Connection: close\r\n'
|
||||
'Cache-control: no-cache\r\n'
|
||||
'Pragma: no-cache\r\n'
|
||||
'Cache-control: no-store\r\n'
|
||||
'X-Frame-Options: DENY\r\n\r\n'))
|
||||
self.transport.write(droppedrsp.full_message)
|
||||
self.log("Response sent back, losing connection")
|
||||
self.transport.loseConnection()
|
||||
|
||||
|
|
|
@ -1,9 +1,24 @@
|
|||
from .http import ResponseCookie
|
||||
|
||||
class Session(object):
|
||||
"""
|
||||
A class used to maintain a session over multiple requests. Can remember cookies
|
||||
and apply a specific header to requests. It is also possible to give the session
|
||||
a list of cookie names and it will only save those cookies.
|
||||
"""
|
||||
|
||||
def __init__(self, cookie_names=None, header_names=None,
|
||||
cookie_vals=None, header_vals=None):
|
||||
"""
|
||||
Session(self, cookie_names=None, header_names=None, cookie_vals=None, header_vals=None)
|
||||
Constructor
|
||||
|
||||
:param cookie_names: A whitelist for cookies that should be saved from :func:`~pappyproxy.session.Session.save_req` and :func:`~pappyproxy.session.Session.save_rsp` in the session. If no values are given, all cookies will be saved.
|
||||
:param header_names: A whitelist for headers that should be saved from :func:`~pappyproxy.session.Session.save_req` in the session. If no values are given, no headers will be saved.
|
||||
:param cookie_vals: A dictionary of cookies to populate the session session with. The key should be the cookie name, and the value can be either a string or a :class:`~pappyproxy.http.ResponseCookie`. If a :class:`~pappyproxy.http.ResponseCookie` is given, its flags will be used in :func:`~pappyproxy.session.Session.apply_rsp`.
|
||||
:param header_vals: A dictionary of header values to populate the session with. The key should be the header name and the value should be a string which should be the header value.
|
||||
"""
|
||||
|
||||
self.cookies = cookie_names or []
|
||||
self.headers = header_names or []
|
||||
self.cookie_vals = cookie_vals or {}
|
||||
|
@ -19,25 +34,61 @@ class Session(object):
|
|||
if k not in self.headers:
|
||||
self.headers.append(k)
|
||||
|
||||
def apply_req(self, req):
|
||||
for k, v in self.cookie_vals.iteritems():
|
||||
def _cookie_obj(k, v):
|
||||
"""
|
||||
Returns the value as a cookie object regardless of if the cookie is a string or a ResponseCookie.
|
||||
"""
|
||||
if isinstance(v, ResponseCookie):
|
||||
req.cookies[v.key] = v.val
|
||||
return v
|
||||
else:
|
||||
req.cookies[k] = v
|
||||
cookie_str = '%s=%s' % (k, v)
|
||||
return ResponseCookie(cookie_str)
|
||||
|
||||
def _cookie_val(v):
|
||||
"""
|
||||
Returns the value of the cookie regardless of if the value is a string or a ResponseCookie
|
||||
"""
|
||||
if isinstance(v, ResponseCookie):
|
||||
return v.val
|
||||
else:
|
||||
return v
|
||||
|
||||
def apply_req(self, req):
|
||||
"""
|
||||
apply_req(request)
|
||||
|
||||
Apply saved headers and cookies to the request
|
||||
"""
|
||||
|
||||
for k, v in self.cookie_vals.iteritems():
|
||||
req.cookies[k] = self._cookie_val(v)
|
||||
for k, v in self.header_vals.iteritems():
|
||||
req.headers[k] = v
|
||||
|
||||
def apply_rsp(self, rsp):
|
||||
"""
|
||||
apply_rsp(response)
|
||||
|
||||
Will add a Set-Cookie header for each saved cookie. Will not
|
||||
apply any saved headers. If the cookie was added from a call to
|
||||
:func:`~pappyproxy.session.Session.save_rsp`, the Set-Cookie flags
|
||||
will be the same as the original response.
|
||||
"""
|
||||
|
||||
for k, v in self.cookie_vals.iteritems():
|
||||
if isinstance(v, ResponseCookie):
|
||||
rsp.set_cookie(v)
|
||||
else:
|
||||
cookie_str = '%s=%s' % (k, v)
|
||||
rsp.set_cookie(ResponseCookie(cookie_str))
|
||||
val = self._cookie_obj(v)
|
||||
rsp.set_cookie(val)
|
||||
# Don't apply headers to responses
|
||||
|
||||
def get_req(self, req, cookies=None, headers=None):
|
||||
def save_req(self, req, cookies=None, headers=None):
|
||||
"""
|
||||
save_req(req, cookies=None, headers=None)
|
||||
|
||||
Updates the state of the session from the given request.
|
||||
Cookie and headers can be added to their whitelists by passing in a list
|
||||
for either ``cookies`` or ``headers``.
|
||||
"""
|
||||
|
||||
if cookies:
|
||||
for c in cookies:
|
||||
if c not in self.cookies:
|
||||
|
@ -64,7 +115,14 @@ class Session(object):
|
|||
if header in self.headers:
|
||||
self.header_vals[header] = req.headers[header]
|
||||
|
||||
def get_rsp(self, rsp, cookies=None):
|
||||
def save_rsp(self, rsp, cookies=None):
|
||||
"""
|
||||
save_rsp(rsp, cookies=None)
|
||||
|
||||
Update the state of the session from the response. Only cookies can be
|
||||
updated from a response. Additional values can be added to the whitelist
|
||||
by passing in a list of values for the ``cookies`` parameter.
|
||||
"""
|
||||
if cookies:
|
||||
for c in cookies:
|
||||
if c not in self.cookies:
|
||||
|
@ -80,3 +138,38 @@ class Session(object):
|
|||
for k, v in rsp.cookies.all_pairs():
|
||||
if v.key in self.cookies:
|
||||
self.cookie_vals[v.key] = v
|
||||
|
||||
def set_cookie(key, val):
|
||||
"""
|
||||
set_cookie(key, val)
|
||||
|
||||
Set a cookie in the session. ``val`` can be either a string or a :class:`~pappyproxy.http.ResponseCookie`.
|
||||
If a :class:`~pappyproxy.http.ResponseCookie` is used, make sure its ``key`` value is the same as
|
||||
the key passed in to the function.
|
||||
"""
|
||||
self.cookie_vals[key] = val
|
||||
|
||||
def get_cookie(key):
|
||||
"""
|
||||
get_cookie(key)
|
||||
|
||||
Returns a string with the value of the cookie with the given string, even if the value is a :class:`~pappyproxy.http.ResponseCookie`.
|
||||
If you want to get a :class:`~pappyproxy.http.ResponseCookie`, use :func:`~pappyproxy.session.Session.get_rsp_cookie`.
|
||||
"""
|
||||
if not key in self.cookie_vals:
|
||||
raise KeyError('Cookie is not stored in session.')
|
||||
v = self.cookie_vals[key]
|
||||
return self._cookie_val(v)
|
||||
|
||||
def get_rsp_cookie(key):
|
||||
"""
|
||||
get_rsp_cookie(key)
|
||||
|
||||
Returns the :class:`~pappyproxy.http.ResponseCookie` associated with the key
|
||||
regardless of if the value is stored as a string or a :class:`~pappyproxy.http.ResponseCookie`.
|
||||
"""
|
||||
if not key in self.cookie_vals:
|
||||
raise KeyError('Cookie is not stored in session.')
|
||||
v = self.cookie_vals[key]
|
||||
return self._cookie_obj(v)
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from pappyproxy.http import Request, get_request, post_request, request_by_id
|
||||
from pappyproxy.plugin import main_context_ids
|
||||
from pappyproxy.context import set_tag
|
||||
from pappyproxy.iter import *
|
||||
|
||||
|
@ -12,7 +13,7 @@ from pappyproxy.iter import *
|
|||
|
||||
MACRO_NAME = '{{macro_name}}'
|
||||
SHORT_NAME = '{{short_name}}'
|
||||
|
||||
{% if req_lines %}
|
||||
###########
|
||||
## Requests
|
||||
# It's suggested that you call .copy() on these and then edit attributes
|
||||
|
@ -23,7 +24,7 @@ SHORT_NAME = '{{short_name}}'
|
|||
req{{ count }} = Request(({% for line in lines %}
|
||||
'{{ line }}'{% endfor %}{% set count = count+1 %}
|
||||
){{ params }})
|
||||
{% endfor %}
|
||||
{% endfor %}{% endif %}
|
||||
|
||||
def run_macro(args):
|
||||
# Example:
|
||||
|
|
|
@ -1,209 +0,0 @@
|
|||
import pytest
|
||||
import mock
|
||||
import pappyproxy
|
||||
|
||||
from pappyproxy.mangle import async_mangle_request, async_mangle_response
|
||||
from pappyproxy.http import Request, Response
|
||||
from testutil import no_tcp, no_database, func_deleted, mock_deferred, mock_deep_save, fake_saving
|
||||
|
||||
def retf(r):
|
||||
return False
|
||||
|
||||
@pytest.fixture
|
||||
def ignore_edit(mocker):
|
||||
new_edit = mock.MagicMock()
|
||||
new_edit.return_value = mock_deferred(None)
|
||||
mocker.patch('pappyproxy.console.edit_file', new=new_edit)
|
||||
|
||||
@pytest.fixture
|
||||
def ignore_delete(mocker):
|
||||
new_os_remove = mock.MagicMock()
|
||||
mocker.patch('os.remove', new=new_os_remove)
|
||||
return new_os_remove
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def no_logging(mocker):
|
||||
mocker.patch('pappyproxy.proxy.log')
|
||||
|
||||
@pytest.fixture
|
||||
def req():
|
||||
r = Request()
|
||||
r.status_line = 'GET / HTTP/1.1'
|
||||
r.host = 'www.ffffff.eeeeee'
|
||||
r.raw_data = 'AAAA'
|
||||
return r
|
||||
|
||||
@pytest.fixture
|
||||
def req_w_rsp(req):
|
||||
r = Response()
|
||||
r.status_line = 'HTTP/1.1 200 OK'
|
||||
r.headers['Test-Header'] = 'ABC123'
|
||||
r.raw_data = 'AAAA'
|
||||
req.response = r
|
||||
return req
|
||||
|
||||
@pytest.fixture
|
||||
def mock_tempfile(mocker):
|
||||
new_tfile_obj = mock.MagicMock()
|
||||
tfile_instance = mock.MagicMock()
|
||||
new_tfile_obj.return_value.__enter__.return_value = tfile_instance
|
||||
|
||||
tfile_instance.name = 'mockTemporaryFile'
|
||||
mocker.patch('tempfile.NamedTemporaryFile', new=new_tfile_obj)
|
||||
|
||||
new_open = mock.MagicMock()
|
||||
fake_file = mock.MagicMock(spec=file)
|
||||
new_open.return_value.__enter__.return_value = fake_file
|
||||
mocker.patch('__builtin__.open', new_open)
|
||||
|
||||
return (new_tfile_obj, tfile_instance, new_open, fake_file)
|
||||
|
||||
|
||||
########################
|
||||
## Test request mangling
|
||||
|
||||
@pytest.inlineCallbacks
|
||||
def test_mangle_request_edit(req, mock_deep_save, mock_tempfile,
|
||||
ignore_edit, ignore_delete):
|
||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
||||
r = req
|
||||
new_contents = ('GET / HTTP/1.1\r\n'
|
||||
'Content-Length: 4\r\n\r\n'
|
||||
'BBBB')
|
||||
fake_file.read.return_value = new_contents
|
||||
new_req = yield async_mangle_request(r)
|
||||
assert not mock_deep_save.called
|
||||
assert tfile_obj.called
|
||||
assert tfile_instance.write.called
|
||||
assert tfile_instance.write.call_args == ((r.full_request,),)
|
||||
assert new_open.called
|
||||
assert fake_file.read.called
|
||||
|
||||
assert new_req.full_request == new_contents
|
||||
|
||||
@pytest.inlineCallbacks
|
||||
def test_mangle_request_edit_newlines(req, mock_deep_save, mock_tempfile,
|
||||
ignore_edit, ignore_delete):
|
||||
# Intercepting is off, request in scope
|
||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
||||
r = req
|
||||
new_contents = ('GET / HTTP/1.1\r\n'
|
||||
'Test-Head: FOOBIE\n'
|
||||
'Content-Length: 4\n\r\n'
|
||||
'BBBB')
|
||||
fake_file.read.return_value = new_contents
|
||||
new_req = yield async_mangle_request(r)
|
||||
|
||||
assert new_req.full_request == ('GET / HTTP/1.1\r\n'
|
||||
'Test-Head: FOOBIE\r\n'
|
||||
'Content-Length: 4\r\n\r\n'
|
||||
'BBBB')
|
||||
assert new_req.headers['Test-Head'] == 'FOOBIE'
|
||||
|
||||
@pytest.inlineCallbacks
|
||||
def test_mangle_request_drop(req, mock_deep_save, mock_tempfile,
|
||||
ignore_edit, ignore_delete):
|
||||
# Intercepting is off, request in scope
|
||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
||||
r = req
|
||||
new_contents = ''
|
||||
fake_file.read.return_value = new_contents
|
||||
new_req = yield async_mangle_request(r)
|
||||
|
||||
assert new_req is None
|
||||
|
||||
@pytest.inlineCallbacks
|
||||
def test_mangle_request_edit_len(req, mock_deep_save, mock_tempfile,
|
||||
ignore_edit, ignore_delete):
|
||||
# Intercepting is off, request in scope
|
||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
||||
r = req
|
||||
new_contents = ('GET / HTTP/1.1\r\n'
|
||||
'Test-Head: FOOBIE\n'
|
||||
'Content-Length: 4\n\r\n'
|
||||
'BBBBAAAA')
|
||||
fake_file.read.return_value = new_contents
|
||||
new_req = yield async_mangle_request(r)
|
||||
|
||||
assert new_req.full_request == ('GET / HTTP/1.1\r\n'
|
||||
'Test-Head: FOOBIE\r\n'
|
||||
'Content-Length: 8\r\n\r\n'
|
||||
'BBBBAAAA')
|
||||
|
||||
|
||||
#########################
|
||||
## Test response mangling
|
||||
|
||||
@pytest.inlineCallbacks
|
||||
def test_mangle_response_edit(req_w_rsp, mock_deep_save, mock_tempfile,
|
||||
ignore_edit, ignore_delete):
|
||||
# Intercepting is on, edit
|
||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
||||
r = req_w_rsp
|
||||
old_rsp = r.response.full_response
|
||||
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
|
||||
'Content-Length: 4\r\n'
|
||||
'Other-Header: foobles\r\n\r\n'
|
||||
'BBBB')
|
||||
fake_file.read.return_value = new_contents
|
||||
mangled_rsp = yield async_mangle_response(r)
|
||||
assert not mock_deep_save.called
|
||||
assert tfile_obj.called
|
||||
assert tfile_instance.write.called
|
||||
assert tfile_instance.write.call_args == ((old_rsp,),)
|
||||
assert new_open.called
|
||||
assert fake_file.read.called
|
||||
|
||||
assert mangled_rsp.full_response == new_contents
|
||||
|
||||
@pytest.inlineCallbacks
|
||||
def test_mangle_response_newlines(req_w_rsp, mock_deep_save, mock_tempfile,
|
||||
ignore_edit, ignore_delete):
|
||||
# Intercepting is off, request in scope
|
||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
||||
r = req_w_rsp
|
||||
old_rsp = r.response.full_response
|
||||
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\n'
|
||||
'Content-Length: 4\n'
|
||||
'Other-Header: foobles\r\n\n'
|
||||
'BBBB')
|
||||
fake_file.read.return_value = new_contents
|
||||
mangled_rsp = yield async_mangle_response(r)
|
||||
|
||||
assert mangled_rsp.full_response == ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
|
||||
'Content-Length: 4\r\n'
|
||||
'Other-Header: foobles\r\n\r\n'
|
||||
'BBBB')
|
||||
assert mangled_rsp.headers['Other-Header'] == 'foobles'
|
||||
|
||||
@pytest.inlineCallbacks
|
||||
def test_mangle_response_drop(req_w_rsp, mock_deep_save, mock_tempfile,
|
||||
ignore_edit, ignore_delete):
|
||||
# Intercepting is off, request in scope
|
||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
||||
r = req_w_rsp
|
||||
old_rsp = r.response.full_response
|
||||
new_contents = ''
|
||||
fake_file.read.return_value = new_contents
|
||||
mangled_rsp = yield async_mangle_response(r)
|
||||
|
||||
assert mangled_rsp is None
|
||||
|
||||
@pytest.inlineCallbacks
|
||||
def test_mangle_response_new_len(req_w_rsp, mock_deep_save, mock_tempfile,
|
||||
ignore_edit, ignore_delete):
|
||||
# Intercepting is off, request in scope
|
||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
||||
r = req_w_rsp
|
||||
old_rsp = r.response.full_response
|
||||
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\n'
|
||||
'Content-Length: 4\n'
|
||||
'Other-Header: foobles\r\n\n'
|
||||
'BBBBAAAA')
|
||||
fake_file.read.return_value = new_contents
|
||||
mangled_rsp = yield async_mangle_response(r)
|
||||
|
||||
assert mangled_rsp.full_response == ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
|
||||
'Content-Length: 8\r\n'
|
||||
'Other-Header: foobles\r\n\r\n'
|
||||
'BBBBAAAA')
|
File diff suppressed because it is too large
Load diff
|
@ -12,9 +12,7 @@ class ClassDeleted():
|
|||
pass
|
||||
|
||||
class TLSStringTransport(StringTransport):
|
||||
|
||||
def startTLS(self, context, factory):
|
||||
pass
|
||||
startTLS = mock.MagicMock()
|
||||
|
||||
class PappySession(object):
|
||||
|
||||
|
|
|
@ -119,12 +119,21 @@ def load_reqlist(line, allow_special=True, ids_only=False):
|
|||
:Returns: Twisted deferred
|
||||
"""
|
||||
from .http import Request
|
||||
from .plugin import async_main_context_ids
|
||||
# Parses a comma separated list of ids and returns a list of those requests
|
||||
# prints any errors
|
||||
if not line:
|
||||
raise PappyException('Request id(s) required')
|
||||
ids = re.split(',\s*', line)
|
||||
reqs = []
|
||||
|
||||
if line == '*':
|
||||
ids = yield async_main_context_ids()
|
||||
for i in ids:
|
||||
req = yield Request.load_request(i)
|
||||
reqs.append(req)
|
||||
defer.returnValue(reqs)
|
||||
|
||||
ids = re.split(',\s*', line)
|
||||
if not ids_only:
|
||||
for reqid in ids:
|
||||
try:
|
||||
|
@ -336,3 +345,14 @@ def copy_to_clipboard(text):
|
|||
|
||||
def clipboard_contents():
|
||||
return pyperclip.paste()
|
||||
|
||||
def autocomplete_startswith(text, lst, allow_spaces=False):
|
||||
ret = None
|
||||
if not text:
|
||||
ret = lst[:]
|
||||
else:
|
||||
ret = [n for n in lst if n.startswith(text)]
|
||||
if not allow_spaces:
|
||||
ret = [s for s in ret if ' ' not in s]
|
||||
return ret
|
||||
|
||||
|
|
1
setup.py
1
setup.py
|
@ -26,6 +26,7 @@ setup(name='pappyproxy',
|
|||
'cmd2>=0.6.8',
|
||||
'crochet>=1.4.0',
|
||||
'Jinja2>=2.8',
|
||||
'lxml>=3.6.0',
|
||||
'pygments>=2.0.2',
|
||||
'pyperclip>=1.5.26',
|
||||
'pytest-cov>=2.2.0',
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue