Version 0.2.12

This commit is contained in:
Rob Glew 2016-05-02 13:17:25 -04:00
parent 992edab315
commit f28ab4fe96
32 changed files with 2324 additions and 754 deletions

View file

@ -1 +1 @@
__version__ = '0.2.11'
__version__ = '0.2.12'

View file

@ -12,7 +12,7 @@ Handles creating a listening server bound to localhost that other processes can
use to interact with the proxy.
"""
debug = True
debug = False
class CommServer(LineReceiver):
MAX_LENGTH=sys.maxint

View file

@ -1108,6 +1108,15 @@ class Request(HTTPMessage):
if self.unmangled:
retreq.unmangled = self.unmangled.copy()
return retreq
def duplicate(self):
retreq = self.copy()
retreq.reqid = self.reqid
if self.unmangled:
retreq.unmangled = self.unmangled.duplicate()
if self.response:
retreq.response = self.response.duplicate()
return retreq
@property
def rsptime(self):
@ -1907,8 +1916,9 @@ class Request(HTTPMessage):
if use_cache:
use_cache.evict(self.reqid)
Request.cache.ordered_ids.remove(self.reqid)
Request.cache.all_ids.remove(self.reqid)
if self.reqid in Request.cache.ordered_ids:
Request.cache.ordered_ids.remove(self.reqid)
if self.reqid in Request.cache.req_times:
del Request.cache.req_times[self.reqid]
if self.reqid in Request.cache.inmem_reqs:
@ -2143,16 +2153,17 @@ class Request(HTTPMessage):
if ret_unmangled:
if not r.unmangled:
raise PappyException("Request %s was not mangled"%r.reqid)
return r.unmangled
return r.unmangled.duplicate()
if rsp_unmangled:
if not r.response:
raise PappyException("Request %s does not have a response" % r.reqid)
if not r.response.unmangled:
raise PappyException("Response to request %s was not mangled" % r.reqid)
r.response = r.response.unmangled
return r
retreq = r.duplicate()
retreq.response = retreq.response.unmangled
return retreq
else:
return r
return r.duplicate()
# Get it through the cache
if use_cache and cache_to_use:
@ -2234,9 +2245,13 @@ class Request(HTTPMessage):
# Set up factory settings
factory.intercepting_macros = intercepting_macros
factory.connection_id = get_next_connection_id()
factory.connect()
new_req = yield factory.data_defer
request.response = new_req.response
try:
yield factory.connect()
new_req = yield factory.data_defer
request.response = new_req.response
except Exception as e:
request.response = None
raise e
defer.returnValue(request)
@defer.inlineCallbacks
@ -2271,9 +2286,14 @@ class Request(HTTPMessage):
Submits the request using its host, port, etc. and updates its response value
to the resulting response.
Cannot be called in async functions.
If an error is encountered while submitting the request, it is printed
to the console.
This is what you should use to submit your requests in macros.
"""
yield self.async_submit(mangle=mangle)
try:
yield self.async_submit(mangle=mangle)
except Exception as e:
print 'Submitting request to %s failed: %s' % (self.host, str(e))
class Response(HTTPMessage):
@ -2316,6 +2336,13 @@ class Response(HTTPMessage):
retrsp.unmangled = self.unmangled.copy()
return retrsp
def duplicate(self):
retrsp = self.copy()
retrsp.rspid = self.rspid
if self.unmangled:
retrsp.unmangled = self.unmangled.duplicate()
return retrsp
@property
def raw_headers(self):
"""

View file

@ -139,7 +139,7 @@ def scope_list(line):
"""
pappyproxy.context.print_scope()
@crochet.wait_for(timeout=None)
#@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def filter_prune(line):
"""
@ -155,11 +155,11 @@ def filter_prune(line):
# We copy so that we're not removing items from a set we're iterating over
act_reqs = yield pappyproxy.pappy.main_context.get_reqs()
inact_reqs = Request.cache.all_ids.difference(set(act_reqs))
inact_reqs = inact_reqs.difference(set(Request.cache.unmangled_ids))
inact_reqs = set(Request.cache.req_ids()).difference(set(act_reqs))
message = 'This will delete %d/%d requests. You can NOT undo this!! Continue?' % (len(inact_reqs), (len(inact_reqs) + len(act_reqs)))
if not confirm(message, 'n'):
defer.returnValue(None)
print message
# if not confirm(message, 'n'):
# defer.returnValue(None)
for reqid in inact_reqs:
try:

View file

@ -3,6 +3,9 @@ if !has('python')
finish
endif
" Settings to make life easier
set hidden
let s:pyscript = resolve(expand('<sfile>:p:h') . '/repeater.py')
function! RepeaterAction(...)

View file

@ -14,6 +14,7 @@ from twisted.internet import defer
from twisted.internet import reactor, ssl
from twisted.internet.protocol import ClientFactory, ServerFactory
from twisted.protocols.basic import LineReceiver
from twisted.python.failure import Failure
next_connection_id = 1
@ -86,23 +87,49 @@ def get_endpoint(target_host, target_port, target_ssl, socks_config=None, use_ht
tcp_endpoint = TCP4ClientEndpoint(reactor, sock_host, sock_port)
socks_endpoint = SOCKS5ClientEndpoint(target_host, target_port, tcp_endpoint, methods=methods)
if target_ssl:
endpoint = TLSWrapClientEndpoint(ClientTLSContext(), socks_endpoint)
endpoint = TLSWrapClientEndpoint(ssl.ClientContextFactory(), socks_endpoint)
else:
endpoint = socks_endpoint
else:
if target_ssl:
endpoint = SSL4ClientEndpoint(reactor, target_host, target_port,
ClientTLSContext())
ssl.ClientContextFactory())
else:
endpoint = TCP4ClientEndpoint(reactor, target_host, target_port)
return endpoint
def is_wildcardable_domain_name(domain):
"""
Guesses if this is a domain that can have a wildcard CN
"""
parts = domain.split('.')
if len(parts) <= 2:
# can't wildcard single names or root domains
return False
if len(parts) != 4:
return True
for part in parts:
try:
v = int(part)
if v < 0 or v > 255:
return True
except ValueError:
return True
return False
def get_wildcard_cn(domain):
"""
Returns a wildcard CN for the domain given
"""
top_parts = domain.split('.')[1:] # Wildcards the first subdomain
return '*.' + '.'.join(top_parts) # convert to *.example.com
def get_most_general_cn(domain):
if is_wildcardable_domain_name(domain):
return get_wildcard_cn(domain)
else:
return domain
class ClientTLSContext(ssl.ClientContextFactory):
isClient = 1
def getContext(self):
return SSL.Context(SSL.TLSv1_METHOD)
class ProxyClient(LineReceiver):
def __init__(self, request):
@ -113,6 +140,7 @@ class ProxyClient(LineReceiver):
self.data_defer = defer.Deferred()
self.completed = False
self.stream_response = True # used so child classes can temporarily turn off response streaming
self.data_received = False # we assume something's wrong until we get some data
self._response_obj = http.Response()
@ -143,6 +171,7 @@ class ProxyClient(LineReceiver):
self._response_obj.add_data(data)
def dataReceived(self, data):
self.data_received = True
if self.factory.stream_response and self.stream_response:
self.factory.return_transport.write(data)
LineReceiver.dataReceived(self, data)
@ -166,12 +195,37 @@ class ProxyClient(LineReceiver):
assert self._response_obj.full_response
self.data_defer.callback(self.request)
def respond_failure(self, message):
"""
Closes the connection to the remote server and returns an error message.
The request object will have a response of None.
"""
#self.transport.loseConnection()
self.data_defer.errback(PappyException(message))
def clientConnectionFailed(self, connector, reason):
self.log("Connection with remote server failed: %s" % reason)
def clientConnectionLost(self, connector, reason):
self.log("Connection with remote server lost: %s" % reason)
def _guess_failure_reason(self, failure):
message = failure.getErrorMessage()
try:
failure.raiseException()
except SSL.Error as e:
message = 'Error performing SSL handshake'
except Exception as e:
pass
return message
def connectionLost(self, reason):
self.log("Connection lost: %s" % reason)
if not self.data_received:
self.request.response = None
message = self._guess_failure_reason(reason)
self.respond_failure("Connection lost: %s" % message)
class UpstreamHTTPProxyClient(ProxyClient):
def __init__(self, request):
@ -240,7 +294,7 @@ class UpstreamHTTPProxyClient(ProxyClient):
self._sent = False
self.log("Starting TLS", verbosity_level=3)
self.transport.startTLS(ClientTLSContext())
self.transport.startTLS(ssl.ClientContextFactory())
self.log("TLS started", verbosity_level=3)
lines = self.request.full_message.splitlines()
for l in lines:
@ -284,6 +338,7 @@ class ProxyClientFactory(ClientFactory):
p.factory = self
self.log("Building protocol", verbosity_level=3)
p.data_defer.addCallback(self.return_request_pair)
p.data_defer.addErrback(self._data_defer_errback)
return p
def clientConnectionFailed(self, connector, reason):
@ -416,6 +471,9 @@ class ProxyClientFactory(ClientFactory):
yield endpoint.connect(self)
self.log("Connected")
def _data_defer_errback(self, message):
self.data_defer.errback(message)
class ProxyServerFactory(ServerFactory):
def __init__(self, save_all=False):
@ -479,15 +537,16 @@ class ProxyServer(LineReceiver):
host = self._request_obj.host
else:
host = cert_host
cn_host = get_most_general_cn(host)
if not host in cached_certs:
log("Generating cert for '%s'" % host,
log("Generating cert for '%s'" % cn_host,
verbosity_level=3)
(pkey, cert) = generate_cert(host,
(pkey, cert) = generate_cert(cn_host,
session.config.cert_dir)
cached_certs[host] = (pkey, cert)
cached_certs[cn_host] = (pkey, cert)
else:
log("Using cached cert for %s" % host, verbosity_level=3)
(pkey, cert) = cached_certs[host]
log("Using cached cert for %s" % cn_host, verbosity_level=3)
(pkey, cert) = cached_certs[cn_host]
ctx = ServerTLSContext(
private_key=pkey,
certificate=cert,
@ -537,6 +596,7 @@ class ProxyServer(LineReceiver):
stream_transport=return_transport)
if return_transport is None:
d.addCallback(self.send_response_back)
d.addErrback(self.send_error_back)
self._reset()
def _reset(self):
@ -568,6 +628,19 @@ class ProxyServer(LineReceiver):
self.transport.write(droppedrsp.full_message)
self.log("Response sent back, losing connection")
self.transport.loseConnection()
def send_error_back(self, failure):
errorrsp = http.Response(('HTTP/1.1 200 OK\r\n'
'Connection: close\r\n'
'Cache-control: no-cache\r\n'
'Pragma: no-cache\r\n'
'Cache-control: no-store\r\n'
'X-Frame-Options: DENY\r\n'
'Content-Length: %d\r\n\r\n'
'%s') % (len(str(failure.getErrorMessage())), str(failure.getErrorMessage())))
self.transport.write(errorrsp.full_message)
self.log("Error response sent back, losing connection")
self.transport.loseConnection()
def connectionMade(self):
if self.factory.force_ssl:

View file

@ -185,6 +185,24 @@ class RequestCache(object):
if count >= num and num != -1:
break
def req_ids(self, num=-1, ids=None, include_unmangled=False):
"""
Returns a list of IDs
"""
retids = []
over = list(self.ordered_ids)
for reqid in over:
if ids is not None and reqid not in ids:
continue
if not include_unmangled and reqid in self.unmangled_ids:
continue
if reqid in self.all_ids:
retids.append(reqid)
if len(retids) >= num and num != -1:
break
return retids
@defer.inlineCallbacks
def load_by_tag(self, tag):
reqs = yield pappyproxy.http.Request.load_requests_by_tag(tag, cust_cache=self, cust_dbpool=self.dbpool)

View file

@ -110,3 +110,24 @@ def test_cache_inmem_evict():
assert reqs[1] in cache.inmem_reqs
assert reqs[2] in cache.inmem_reqs
assert reqs[3] in cache.inmem_reqs
def test_req_ids():
reqs = gen_reqs(5)
cache = RequestCache(3)
cache.add(reqs[0])
cache.add(reqs[1])
cache.add(reqs[2])
cache.add(reqs[3])
assert cache.req_ids() == ['4', '3', '2', '1']
def test_req_ids_unmangled():
reqs = gen_reqs(5)
cache = RequestCache(3)
reqs[0].unmangled = reqs[4]
cache.add(reqs[0])
cache.add(reqs[4])
cache.add(reqs[1])
cache.add(reqs[2])
cache.add(reqs[3])
assert cache.req_ids() == ['4', '3', '2', '1']
assert cache.req_ids(include_unmangled=True) == ['4', '3', '2', '5', '1']