Initial release
This commit is contained in:
parent
03fabf16e8
commit
f6ebcd271b
33 changed files with 5028 additions and 0 deletions
3
pappy-proxy/.coveragerc
Normal file
3
pappy-proxy/.coveragerc
Normal file
|
@ -0,0 +1,3 @@
|
|||
[run]
|
||||
omit = tests/*, schema/*
|
||||
|
9
pappy-proxy/Makefile
Normal file
9
pappy-proxy/Makefile
Normal file
|
@ -0,0 +1,9 @@
|
|||
|
||||
install-third-party:
|
||||
pip install -r requirements.txt
|
||||
|
||||
test:
|
||||
py.test -rw --twisted --cov-config .coveragerc --cov=. tests/
|
||||
|
||||
test-verbose:
|
||||
py.test -v -rw --twisted --cov-config .coveragerc --cov-report term-missing --cov=. tests/
|
0
pappy-proxy/__init__.py
Normal file
0
pappy-proxy/__init__.py
Normal file
7
pappy-proxy/__main__.py
Normal file
7
pappy-proxy/__main__.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
import pappy
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
if __name__ == '__main__':
|
||||
reactor.callWhenRunning(pappy.main)
|
||||
reactor.run()
|
22
pappy-proxy/certs/certificate.crt
Normal file
22
pappy-proxy/certs/certificate.crt
Normal file
|
@ -0,0 +1,22 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIDjzCCAncCFQDmrLdMg37vTWXeF9Zp0WjQmQWF1jANBgkqhkiG9w0BAQsFADBg
|
||||
MQswCQYDVQQGEwJVUzERMA8GA1UECAwITWljaGlnYW4xEjAQBgNVBAcMCUFubiBB
|
||||
cmJvcjEUMBIGA1UECgwLUGFwcHkgUHJveHkxFDASBgNVBAMMC1BhcHB5IFByb3h5
|
||||
MB4XDTE1MTAyNjE2MDYxMVoXDTI1MTAyMzE2MDYxMVowYDELMAkGA1UEBhMCVVMx
|
||||
ETAPBgNVBAgMCE1pY2hpZ2FuMRIwEAYDVQQHDAlBbm4gQXJib3IxFDASBgNVBAoM
|
||||
C1BhcHB5IFByb3h5MRQwEgYDVQQDDAtQYXBweSBQcm94eTCCASIwDQYJKoZIhvcN
|
||||
AQEBBQADggEPADCCAQoCggEBAPNQo64jLgvKVKNqqLi0cDBfWqp+ZhEDaGdm3Rjl
|
||||
AFerqmDHyAeCu1GENQAwcmmeXCwMYSbjcMHSrExR+rcQRxvJ8OOp2doP43+T9hd8
|
||||
rZt+PPOiBVG0cUrfdsVdbUyGjPmZFtWaiSVG2gUOdO2m7jK5WwIEcW5u6vEfmgco
|
||||
/JLvtdgGZGIlsZGeQGcJdeZ6LaPKLHxPAkgRQduQTpK5nKiFi0Aqj4AsqddcZ4fo
|
||||
X3zGsypkt0NVTn4nMZLR9Ml5mwzTltr9BBtSVqMIMwqVkKLkGFdaIFsY5dK3UYUV
|
||||
vqLGB6ubheULLjmkv9FJLmaHfnLb2jjA17K+y3QKosMVldcCAwEAAaNFMEMwEgYD
|
||||
VR0TAQH/BAgwBgEB/wIBADAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFNo5o+5e
|
||||
a0sNMlW/75VgGJCv2AcJMA0GCSqGSIb3DQEBCwUAA4IBAQBMbpA8XkEvtpErHsy/
|
||||
FCtzQGmn88idU43fFSi0bcsWWc1ekapd7iTramItvZ8OCZD3/oVE4VIwumuJuoVk
|
||||
OU/Tip0e+haPV5f1JImdsk2f20WJ0lJ5CyrrRcddqgVrcQbB8DwaJSJRXzrSD9Cp
|
||||
UDfJhIh2zxRolGql29X6QiFukV3CIHn2hF+QYlMrxkoI0e4r6sDtmN4/VccgADdH
|
||||
pQeVz4z/ZxKBIh7Xol8K6Qr+gXnlkbp3n5WXGHbv4YsK995z9yVZpuLPUHbpnSzr
|
||||
KVJ5I4joA22uc2tqeKvfp4QsE8fa/nVNRv/LZZeCdg0zrXXpE9RoxNirwEcQwAo1
|
||||
x25g
|
||||
-----END CERTIFICATE-----
|
28
pappy-proxy/certs/private.key
Normal file
28
pappy-proxy/certs/private.key
Normal file
|
@ -0,0 +1,28 @@
|
|||
-----BEGIN PRIVATE KEY-----
|
||||
MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDzUKOuIy4LylSj
|
||||
aqi4tHAwX1qqfmYRA2hnZt0Y5QBXq6pgx8gHgrtRhDUAMHJpnlwsDGEm43DB0qxM
|
||||
Ufq3EEcbyfDjqdnaD+N/k/YXfK2bfjzzogVRtHFK33bFXW1Mhoz5mRbVmoklRtoF
|
||||
DnTtpu4yuVsCBHFuburxH5oHKPyS77XYBmRiJbGRnkBnCXXmei2jyix8TwJIEUHb
|
||||
kE6SuZyohYtAKo+ALKnXXGeH6F98xrMqZLdDVU5+JzGS0fTJeZsM05ba/QQbUlaj
|
||||
CDMKlZCi5BhXWiBbGOXSt1GFFb6ixgerm4XlCy45pL/RSS5mh35y29o4wNeyvst0
|
||||
CqLDFZXXAgMBAAECggEBAJxlD+ClkjpX4lFsBGk86gPdtrxyJI74/snAD4up3q97
|
||||
kzdEEuno+Rhrf1nQyinjdWGGz4ecl+St0rv30cyLdPmCswjTK0mD/voJFByCsmCJ
|
||||
IwqC8SJUdqHmw0QXSmLu9XyWD1xbSZ4hTZAEe9op+1+1Tq8cRgDy4Kb+ZhYGHVsf
|
||||
4o1RFGBCtSGLFBC908xZnQlqzGHtCuiBecJiWqoFK+mm3TgEUp4VDPRSPsWDWYnJ
|
||||
KxciTSE9roBF7VAe5ocTRdn+tj9GVaNaBLqb1XhkU41wZxVMoid0OVgxkmyEdAyR
|
||||
lL1/zVyQDgJbke4t6dgu4NCAoPWXKZP1zxNa1Ied51kCgYEA+h2X7MO8rYyWHGT7
|
||||
EZoPpHSrR3F1MnsRgXnkVt5dSrwAQlLmQmmWnjVtEQM72Eox1Czdz+GjILpvfwNF
|
||||
fktzDa1GghO5TdDibcchG01qLeqEj0vgvtCP1YFLeCBZJv4yPxpaHWhyUOYPWoXq
|
||||
Mze7yYbkh2uYORPKgu+N4b4oH90CgYEA+QoWQ+44j2jld4DLvYpW/tf2kvKkmFl5
|
||||
43KSVXkDHSnEfO+RFpFQ8rCOKetlMbcuQMakTz++fh3smHWGZ/S1Hm1ZUIRQqCzq
|
||||
m1dTg8PX6pH9e7/0gebFqQWtGhWQdnSWmGZAEnAnmFq6DrDB0FHvfS+VePC1knEJ
|
||||
/Aw4l+YFy0MCgYA60YLM1ysj1Q/oFYdFmGldT2KIJpJdELwJKtUb6Kcf0B5vendT
|
||||
3ujgw8emXJBSSQB22SZAoNtv8ugNgoNxM+UWrk0KggDt39Wf41hRx17U9XW/DSUJ
|
||||
OprYptNMqK7OkLDYTiYrDEj15WRu8VcmPFEZD3PmtNLTeWgCart+/u0IsQKBgQCG
|
||||
xSirdl1xbmjPtQmM9zKBE0pC18CvGazWo4gBbU18GMBWhCbWOam+zEEC+np23xTO
|
||||
xTDiGjLyeSsyjldAJrNlVfPBmPk1KamEi0uMwQ01ye+NaqHdMo/BGmtE9GqLUCi3
|
||||
LI576+nhjyelD46zN8QM0RVor4rzRu0KU2rE+RwllQKBgQDZ1j5Uhblxn+WJ1/z3
|
||||
xZfP23VJLVCCvBIXaHENCl01/9hSBFqH0K+EUUfeJesWoh7KSdaiHXGRR1XdB1rs
|
||||
Bmzh4wPgIlcc8CPmJxZ09fM2ggHSZf1baV8lEf64/N3OnENDvUAepzwIe0IhKs1i
|
||||
pzpCgCGttWxEZJvcug4AOulfQA==
|
||||
-----END PRIVATE KEY-----
|
106
pappy-proxy/comm.py
Normal file
106
pappy-proxy/comm.py
Normal file
|
@ -0,0 +1,106 @@
|
|||
import base64
|
||||
import http
|
||||
import json
|
||||
|
||||
from twisted.protocols.basic import LineReceiver
|
||||
from twisted.internet import defer
|
||||
from util import PappyException
|
||||
|
||||
"""
|
||||
comm.py
|
||||
Handles creating a listening server bound to localhost that other processes can
|
||||
use to interact with the proxy.
|
||||
"""
|
||||
|
||||
comm_port = 0
|
||||
debug = True
|
||||
|
||||
def set_comm_port(port):
|
||||
global comm_port
|
||||
comm_port = port
|
||||
|
||||
class CommServer(LineReceiver):
|
||||
|
||||
def __init__(self):
|
||||
self.delimiter = '\n'
|
||||
self.action_handlers = {
|
||||
'ping': self.action_ping,
|
||||
'get_request': self.action_get_request,
|
||||
'get_response': self.action_get_response,
|
||||
'submit': self.action_submit_request,
|
||||
}
|
||||
|
||||
def lineReceived(self, line):
|
||||
if line == '':
|
||||
return
|
||||
try:
|
||||
command_data = json.loads(line)
|
||||
command = command_data['action']
|
||||
valid = False
|
||||
if command in self.action_handlers:
|
||||
valid = True
|
||||
result = {'success': True}
|
||||
func_defer = self.action_handlers[command](command_data)
|
||||
func_defer.addCallback(self.action_result_handler, result)
|
||||
func_defer.addErrback(self.action_error_handler, result)
|
||||
if not valid:
|
||||
raise PappyException('%s is an invalid command' % command_data['action'])
|
||||
except PappyException as e:
|
||||
return_data = {'success': False, 'message': str(e)}
|
||||
self.sendLine(json.dumps(return_data))
|
||||
|
||||
def action_result_handler(self, data, result):
|
||||
result.update(data)
|
||||
self.sendLine(json.dumps(result))
|
||||
|
||||
def action_error_handler(self, error, result):
|
||||
if debug:
|
||||
print error.getTraceback()
|
||||
return_data = {'success': False, 'message': 'Debug mode enabled, traceback on main terminal'}
|
||||
else:
|
||||
return_data = {'success': False, 'message': str(error.getErrorMessage())}
|
||||
result.update(result)
|
||||
self.sendLine(json.dumps(return_data))
|
||||
error.trap(Exception)
|
||||
return True
|
||||
|
||||
def action_ping(self, data):
|
||||
return defer.succeed({'ping': 'pong'})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def action_get_request(self, data):
|
||||
try:
|
||||
reqid = int(data['reqid'])
|
||||
except KeyError:
|
||||
raise PappyException("Request with given ID does not exist")
|
||||
|
||||
req = yield http.Request.load_request(reqid)
|
||||
dat = json.loads(req.to_json())
|
||||
defer.returnValue(dat)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def action_get_response(self, data):
|
||||
try:
|
||||
reqid = int(data['reqid'])
|
||||
except KeyError:
|
||||
raise PappyException("Request with given ID does not exist, cannot fetch associated response.")
|
||||
|
||||
req = yield http.Request.load_request(reqid)
|
||||
rsp = yield http.Response.load_response(req.response.rspid)
|
||||
dat = json.loads(rsp.to_json())
|
||||
defer.returnValue(dat)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def action_submit_request(self, data):
|
||||
try:
|
||||
req = http.Request(base64.b64decode(data['full_request']))
|
||||
except:
|
||||
raise PappyException("Error parsing request")
|
||||
req_sub = yield req.submit_self()
|
||||
yield req_sub.deep_save()
|
||||
|
||||
retdata = {}
|
||||
retdata['request'] = json.loads(req_sub.to_json())
|
||||
if req_sub.response:
|
||||
retdata['response'] = json.loads(req_sub.response.to_json())
|
||||
defer.returnValue(retdata)
|
51
pappy-proxy/config.py
Normal file
51
pappy-proxy/config.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
import imp
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Make sure we have a config file
|
||||
if not os.path.isfile('./config.json'):
|
||||
print "Copying default config to directory"
|
||||
default_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
||||
'default_user_config.json')
|
||||
shutil.copyfile(default_config_file, './config.json')
|
||||
|
||||
# Load local project config
|
||||
with open('./config.json', 'r') as f:
|
||||
proj_config = json.load(f)
|
||||
|
||||
# Substitution dictionary
|
||||
subs = {}
|
||||
subs['PAPPYDIR'] = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Data file settings
|
||||
if 'data_file' in proj_config:
|
||||
DATAFILE = proj_config["data_file"].format(**subs)
|
||||
else:
|
||||
DATAFILE = 'data.db'
|
||||
|
||||
# Debug settings
|
||||
if 'debug_dir' in proj_config:
|
||||
DEBUG_TO_FILE = True
|
||||
DEBUG_DIR = proj_config["debug_dir"].format(**subs)
|
||||
else:
|
||||
DEBUG_DIR = None
|
||||
DEBUG_TO_FILE = False
|
||||
DEBUG_VERBOSITY = 0
|
||||
|
||||
# Cert directory settings
|
||||
if 'cert_dir' in proj_config:
|
||||
CERT_DIR = proj_config["cert_dir"].format(**subs)
|
||||
else:
|
||||
CERT_DIR = './certs'
|
||||
SSL_PKEY_FILE = 'private.key'
|
||||
SSL_CA_FILE = 'certificate.crt'
|
||||
|
||||
# Listener settings
|
||||
if "proxy_listeners" in proj_config:
|
||||
LISTENERS = []
|
||||
for l in proj_config["proxy_listeners"]:
|
||||
LISTENERS.append((l['port'], l['interface']))
|
||||
else:
|
||||
LISTENERS = [(8000, '127.0.0.1')]
|
||||
|
626
pappy-proxy/console.py
Normal file
626
pappy-proxy/console.py
Normal file
|
@ -0,0 +1,626 @@
|
|||
import cmd2
|
||||
import config
|
||||
import context
|
||||
import crochet
|
||||
import mangle
|
||||
import proxy
|
||||
import repeater
|
||||
import select
|
||||
import shlex
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
import termios
|
||||
import time
|
||||
|
||||
import http
|
||||
from twisted.internet import defer, reactor
|
||||
from util import PappyException
|
||||
|
||||
"""
|
||||
console.py
|
||||
|
||||
Functions and classes involved with interacting with console input and output
|
||||
"""
|
||||
|
||||
# http://www.termsys.demon.co.uk/vtansi.htm#cursor
|
||||
SAVE_CURSOR = '\x1b[7'
|
||||
UNSAVE_CURSOR = '\x1b[8'
|
||||
LINE_UP = '\x1b[1A'
|
||||
LINE_ERASE = '\x1b[2K'
|
||||
PRINT_LINE = '\x1b[1i'
|
||||
|
||||
edit_queue = []
|
||||
|
||||
def print_pappy_errors(func):
|
||||
def catch(*args, **kwargs):
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
except PappyException as e:
|
||||
print str(e)
|
||||
return catch
|
||||
|
||||
class ProxyCmd(cmd2.Cmd):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.alerts = []
|
||||
self.prompt = 'itsPappyTime> '
|
||||
self.debug = True
|
||||
cmd2.Cmd.__init__(self, *args, **kwargs)
|
||||
|
||||
def add_alert(self, alert):
|
||||
self.alerts.append(alert)
|
||||
|
||||
def postcmd(self, stop, line):
|
||||
for l in self.alerts:
|
||||
print '[!] ', l
|
||||
self.alerts = []
|
||||
return stop
|
||||
|
||||
def help_view_request_headers(self):
|
||||
print ("View the headers of the request\n"
|
||||
"Usage: view_request_headers <reqid> [u]"
|
||||
"If 'u' is given as an additional argument, the unmangled version "
|
||||
"of the request will be displayed.")
|
||||
|
||||
@print_pappy_errors
|
||||
@crochet.wait_for(timeout=5.0)
|
||||
@defer.inlineCallbacks
|
||||
def do_view_request_headers(self, line):
|
||||
args = shlex.split(line)
|
||||
try:
|
||||
reqid = int(args[0])
|
||||
showid = reqid
|
||||
except:
|
||||
raise PappyException("Enter a valid number for the request id")
|
||||
|
||||
req = yield http.Request.load_request(reqid)
|
||||
showreq = req
|
||||
|
||||
show_unmangled = False
|
||||
if len(args) > 1 and args[1][0].lower() == 'u':
|
||||
if not req.unmangled:
|
||||
raise PappyException("Request was not mangled")
|
||||
show_unmangled = True
|
||||
showreq = req.unmangled
|
||||
|
||||
print ''
|
||||
print_requests([showreq])
|
||||
if show_unmangled:
|
||||
print ''
|
||||
print 'UNMANGLED --------------------'
|
||||
print ''
|
||||
view_full_request(showreq, True)
|
||||
|
||||
def help_view_full_request(self):
|
||||
print ("View the full data of the request\n"
|
||||
"Usage: view_full_request <reqid> [u]\n"
|
||||
"If 'u' is given as an additional argument, the unmangled version "
|
||||
"of the request will be displayed.")
|
||||
|
||||
@print_pappy_errors
|
||||
@crochet.wait_for(timeout=5.0)
|
||||
@defer.inlineCallbacks
|
||||
def do_view_full_request(self, line):
|
||||
args = shlex.split(line)
|
||||
try:
|
||||
reqid = int(args[0])
|
||||
showid = reqid
|
||||
except:
|
||||
raise PappyException("Enter a valid number for the request id")
|
||||
|
||||
req = yield http.Request.load_request(reqid)
|
||||
showreq = req
|
||||
|
||||
show_unmangled = False
|
||||
if len(args) > 1 and args[1][0].lower() == 'u':
|
||||
if not req.unmangled:
|
||||
raise PappyException("Request was not mangled")
|
||||
show_unmangled = True
|
||||
showreq = req.unmangled
|
||||
|
||||
print ''
|
||||
print_requests([showreq])
|
||||
if show_unmangled:
|
||||
print ''
|
||||
print 'UNMANGLED --------------------'
|
||||
print ''
|
||||
view_full_request(showreq)
|
||||
|
||||
def help_view_response_headers(self):
|
||||
print ("View the headers of the response\n"
|
||||
"Usage: view_response_headers <reqid>")
|
||||
|
||||
@print_pappy_errors
|
||||
@crochet.wait_for(timeout=5.0)
|
||||
@defer.inlineCallbacks
|
||||
def do_view_response_headers(self, line):
|
||||
args = shlex.split(line)
|
||||
try:
|
||||
reqid = int(args[0])
|
||||
showid = reqid
|
||||
except:
|
||||
raise PappyException("Enter a valid number for the request id")
|
||||
|
||||
req = yield http.Request.load_request(reqid)
|
||||
showrsp = req.response
|
||||
|
||||
show_unmangled = False
|
||||
if len(args) > 1 and args[1][0].lower() == 'u':
|
||||
if not req.response.unmangled:
|
||||
raise PappyException("Response was not mangled")
|
||||
show_unmangled = True
|
||||
showrsp = req.response.unmangled
|
||||
|
||||
print ''
|
||||
print_requests([req])
|
||||
if show_unmangled:
|
||||
print ''
|
||||
print 'UNMANGLED --------------------'
|
||||
print ''
|
||||
view_full_response(showrsp, True)
|
||||
|
||||
def help_view_full_response(self):
|
||||
print ("View the full data of the response associated with a request\n"
|
||||
"Usage: view_full_response <reqid>")
|
||||
|
||||
@print_pappy_errors
|
||||
@crochet.wait_for(timeout=5.0)
|
||||
@defer.inlineCallbacks
|
||||
def do_view_full_response(self, line):
|
||||
args = shlex.split(line)
|
||||
try:
|
||||
reqid = int(args[0])
|
||||
showid = reqid
|
||||
except:
|
||||
raise PappyException("Enter a valid number for the request id")
|
||||
|
||||
req = yield http.Request.load_request(reqid)
|
||||
showrsp = req.response
|
||||
|
||||
show_unmangled = False
|
||||
if len(args) > 1 and args[1][0].lower() == 'u':
|
||||
if not req.response.unmangled:
|
||||
raise PappyException("Response was not mangled")
|
||||
show_unmangled = True
|
||||
showrsp = req.response.unmangled
|
||||
|
||||
print ''
|
||||
print_requests([req])
|
||||
if show_unmangled:
|
||||
print ''
|
||||
print 'UNMANGLED --------------------'
|
||||
print ''
|
||||
view_full_response(showrsp)
|
||||
|
||||
def help_list(self):
|
||||
print ("List request/response pairs in the current context\n"
|
||||
"Usage: list")
|
||||
|
||||
@print_pappy_errors
|
||||
def do_list(self, line):
|
||||
args = shlex.split(line)
|
||||
if len(args) > 0:
|
||||
if args[0][0].lower() == 'a':
|
||||
print_count = -1
|
||||
else:
|
||||
try:
|
||||
print_count = int(args[0])
|
||||
except:
|
||||
print "Please enter a valid argument for list"
|
||||
return
|
||||
else:
|
||||
print_count = 50
|
||||
|
||||
context.sort()
|
||||
if print_count > 0:
|
||||
to_print = context.active_requests[:]
|
||||
to_print = sorted(to_print, key=lambda x: x.reqid, reverse=True)
|
||||
to_print = to_print[:print_count]
|
||||
print_requests(to_print)
|
||||
else:
|
||||
print_requests(context.active_requests)
|
||||
|
||||
def help_filter(self):
|
||||
print ("Apply a filter to the current context\n"
|
||||
"Usage: filter <filter string>\n"
|
||||
"See README.md for information on filter strings")
|
||||
|
||||
@print_pappy_errors
|
||||
def do_filter(self, line):
|
||||
if not line:
|
||||
raise PappyException("Filter string required")
|
||||
|
||||
filter_to_add = context.Filter(line)
|
||||
context.add_filter(filter_to_add)
|
||||
|
||||
def help_filter_clear(self):
|
||||
print ("Reset the context so that it contains no filters (ignores scope)\n"
|
||||
"Usage: filter_clear")
|
||||
|
||||
@print_pappy_errors
|
||||
@crochet.wait_for(timeout=5.0)
|
||||
@defer.inlineCallbacks
|
||||
def do_filter_clear(self, line):
|
||||
context.active_filters = []
|
||||
yield context.reload_from_storage()
|
||||
|
||||
def help_filter_list(self):
|
||||
print ("Print the filters that make up the current context\n"
|
||||
"Usage: filter_list")
|
||||
|
||||
@print_pappy_errors
|
||||
def do_filter_list(self, line):
|
||||
for f in context.active_filters:
|
||||
print f.filter_string
|
||||
|
||||
|
||||
def help_scope_save(self):
|
||||
print ("Set the scope to be the current context. Saved between launches\n"
|
||||
"Usage: scope_save")
|
||||
|
||||
@print_pappy_errors
|
||||
@crochet.wait_for(timeout=5.0)
|
||||
@defer.inlineCallbacks
|
||||
def do_scope_save(self, line):
|
||||
context.save_scope()
|
||||
yield context.store_scope(http.dbpool)
|
||||
|
||||
def help_scope_reset(self):
|
||||
print ("Set the context to be the scope (view in-scope items)\n"
|
||||
"Usage: scope_reset")
|
||||
|
||||
@print_pappy_errors
|
||||
@crochet.wait_for(timeout=5.0)
|
||||
@defer.inlineCallbacks
|
||||
def do_scope_reset(self, line):
|
||||
yield context.reset_to_scope()
|
||||
|
||||
def help_scope_delete(self):
|
||||
print ("Delete the scope so that it contains all request/response pairs\n"
|
||||
"Usage: scope_delete")
|
||||
|
||||
@print_pappy_errors
|
||||
@crochet.wait_for(timeout=5.0)
|
||||
@defer.inlineCallbacks
|
||||
def do_scope_delete(self, line):
|
||||
context.set_scope([])
|
||||
yield context.store_scope(http.dbpool)
|
||||
|
||||
def help_scope_list(self):
|
||||
print ("Print the filters that make up the scope\n"
|
||||
"Usage: scope_list")
|
||||
|
||||
@print_pappy_errors
|
||||
def do_scope_list(self, line):
|
||||
context.print_scope()
|
||||
|
||||
def help_repeater(self):
|
||||
print ("Open a request in the repeater\n"
|
||||
"Usage: repeater <reqid>")
|
||||
|
||||
@print_pappy_errors
|
||||
def do_repeater(self, line):
|
||||
repeater.start_editor(int(line))
|
||||
|
||||
def help_submit(self):
|
||||
print "Submit a request again (NOT IMPLEMENTED)"
|
||||
|
||||
@print_pappy_errors
|
||||
@crochet.wait_for(timeout=5.0)
|
||||
@defer.inlineCallbacks
|
||||
def do_submit(self, line):
|
||||
pass
|
||||
# reqid = int(line)
|
||||
# req = yield http.Request.load_request(reqid)
|
||||
# rsp = yield req.submit()
|
||||
# print printable_data(rsp.full_response)
|
||||
|
||||
def help_intercept(self):
|
||||
print ("Intercept requests and/or responses and edit them with vim before passing them along\n"
|
||||
"Usage: intercept <reqid>")
|
||||
|
||||
@print_pappy_errors
|
||||
def do_intercept(self, line):
|
||||
global edit_queue
|
||||
args = shlex.split(line)
|
||||
intercept_requests = False
|
||||
intercept_responses = False
|
||||
|
||||
req_names = ('req', 'request', 'requests')
|
||||
rsp_names = ('rsp', 'response', 'responses')
|
||||
|
||||
if any(a in req_names for a in args):
|
||||
intercept_requests = True
|
||||
if any(a in rsp_names for a in args):
|
||||
intercept_responses = True
|
||||
|
||||
if intercept_requests:
|
||||
print "Intercepting reqeusts"
|
||||
if intercept_responses:
|
||||
print "Intercepting responses"
|
||||
|
||||
mangle.set_intercept_requests(intercept_requests)
|
||||
mangle.set_intercept_responses(intercept_responses)
|
||||
while 1:
|
||||
if select.select([sys.stdin,],[],[],0.0)[0]:
|
||||
break;
|
||||
else:
|
||||
if len(edit_queue) > 0:
|
||||
(to_edit, deferred) = edit_queue.pop(0)
|
||||
# Edit the file
|
||||
subprocess.call(['vim', to_edit])
|
||||
# Fire the callback
|
||||
deferred.callback(None)
|
||||
time.sleep(0.2)
|
||||
|
||||
# Send remaining requests along
|
||||
while len(edit_queue) > 0:
|
||||
(fname, deferred) = edit_queue.pop(0)
|
||||
deferred.callback(None)
|
||||
|
||||
# Flush stdin so that anything we typed doesn't go into the prompt
|
||||
termios.tcflush(sys.stdin, termios.TCIOFLUSH)
|
||||
mangle.set_intercept_requests(False)
|
||||
mangle.set_intercept_responses(False)
|
||||
|
||||
def help_gencerts(self):
|
||||
print ("Generate CA cert and private CA file\n"
|
||||
"Usage: gencerts [/path/to/put/certs/in]")
|
||||
|
||||
@print_pappy_errors
|
||||
def do_gencerts(self, line):
|
||||
dest_dir = line or config.CERT_DIR
|
||||
print "This will overwrite any existing certs in %s. Are you sure?" % dest_dir
|
||||
print "(y/N)",
|
||||
answer = raw_input()
|
||||
if not answer or answer[0].lower() != 'y':
|
||||
return False
|
||||
print "Generating certs to %s" % dest_dir
|
||||
proxy.generate_ca_certs(dest_dir)
|
||||
|
||||
def help_log(self):
|
||||
print ("View the log\n"
|
||||
"Usage: log [verbosity (default is 1)]\n"
|
||||
"verbosity=1: Show connections as they're made/lost, some additional info\n"
|
||||
"verbosity=3: Show full requests/responses as they are processed by the proxy")
|
||||
|
||||
@print_pappy_errors
|
||||
def do_log(self, line):
|
||||
try:
|
||||
verbosity = int(line.strip())
|
||||
except:
|
||||
verbosity = 1
|
||||
config.DEBUG_VERBOSITY = verbosity
|
||||
raw_input()
|
||||
config.DEBUG_VERBOSITY = 0
|
||||
|
||||
@print_pappy_errors
|
||||
def do_testerror(self, line):
|
||||
raise PappyException("Test error")
|
||||
|
||||
@print_pappy_errors
|
||||
def do_EOF(self):
|
||||
print "EOF"
|
||||
return True
|
||||
|
||||
### ABBREVIATIONS
|
||||
def help_ls(self):
|
||||
self.help_list()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_ls(self, line):
|
||||
self.onecmd('list %s' % line)
|
||||
|
||||
def help_sr(self):
|
||||
self.help_scope_reset()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_sr(self, line):
|
||||
self.onecmd('scope_reset %s' % line)
|
||||
|
||||
def help_sls(self):
|
||||
self.help_scope_list()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_sls(self, line):
|
||||
self.onecmd('scope_list %s' % line)
|
||||
|
||||
def help_vhq(self):
|
||||
self.help_view_request_headers()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_vhq(self, line):
|
||||
self.onecmd('view_request_headers %s' % line)
|
||||
|
||||
def help_vfq(self):
|
||||
self.help_view_full_request()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_vfq(self, line):
|
||||
self.onecmd('view_full_request %s' % line)
|
||||
|
||||
def help_vhs(self):
|
||||
self.help_view_response_headers()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_vhs(self, line):
|
||||
self.onecmd('view_response_headers %s' % line)
|
||||
|
||||
def help_vfs(self):
|
||||
self.help_view_full_response()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_vfs(self, line):
|
||||
self.onecmd('view_full_response %s' % line)
|
||||
|
||||
def help_fl(self):
|
||||
self.help_filter()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_fl(self, line):
|
||||
self.onecmd('filter %s' % line)
|
||||
|
||||
def help_fls(self):
|
||||
self.help_filter_list()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_fls(self, line):
|
||||
self.onecmd('filter_list %s' % line)
|
||||
|
||||
def help_fc(self):
|
||||
self.help_filter_clear()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_fc(self, line):
|
||||
self.onecmd('filter_clear %s' % line)
|
||||
|
||||
def help_rp(self):
|
||||
self.help_repeater()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_rp(self, line):
|
||||
self.onecmd('repeater %s' % line)
|
||||
|
||||
def help_ic(self):
|
||||
self.help_intercept()
|
||||
|
||||
@print_pappy_errors
|
||||
def do_ic(self, line):
|
||||
self.onecmd('intercept %s' % line)
|
||||
|
||||
|
||||
|
||||
def cmd_failure(cmd):
|
||||
print "FAILURE"
|
||||
|
||||
def edit_file(fname):
|
||||
global edit_queue
|
||||
# Adds the filename to the edit queue. Returns a deferred that is fired once
|
||||
# the file is edited and the editor is closed
|
||||
d = defer.Deferred()
|
||||
edit_queue.append((fname, d))
|
||||
return d
|
||||
|
||||
def print_table(coldata, rows):
|
||||
# Coldata: List of dicts with info on how to print the columns.
|
||||
# name: heading to give column
|
||||
# width: (optional) maximum width before truncating. 0 for unlimited
|
||||
# Rows: List of tuples with the data to print
|
||||
|
||||
# Get the width of each column
|
||||
widths = []
|
||||
headers = []
|
||||
for data in coldata:
|
||||
if 'name' in data:
|
||||
headers.append(data['name'])
|
||||
else:
|
||||
headers.append('')
|
||||
empty_headers = True
|
||||
for h in headers:
|
||||
if h != '':
|
||||
empty_headers = False
|
||||
if not empty_headers:
|
||||
rows = [headers] + rows
|
||||
|
||||
for i in range(len(coldata)):
|
||||
col = coldata[i]
|
||||
if 'width' in col and col['width'] > 0:
|
||||
maxwidth = col['width']
|
||||
else:
|
||||
maxwidth = 0
|
||||
colwidth = 0
|
||||
for row in rows:
|
||||
printstr = str(row[i])
|
||||
if len(printstr) > colwidth:
|
||||
colwidth = len(printstr)
|
||||
if maxwidth > 0 and colwidth > maxwidth:
|
||||
widths.append(maxwidth)
|
||||
else:
|
||||
widths.append(colwidth)
|
||||
|
||||
# Print rows
|
||||
padding = 2
|
||||
for row in rows:
|
||||
for (col, width) in zip(row, widths):
|
||||
printstr = str(col)
|
||||
if len(printstr) > width:
|
||||
for i in range(len(printstr)-4, len(printstr)-1):
|
||||
printstr=printstr[:width]
|
||||
printstr=printstr[:-3]+'...'
|
||||
sys.stdout.write(printstr)
|
||||
sys.stdout.write(' '*(width-len(printstr)))
|
||||
sys.stdout.write(' '*padding)
|
||||
sys.stdout.write('\n')
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def printable_data(data):
|
||||
chars = []
|
||||
for c in data:
|
||||
if c in string.printable:
|
||||
chars += c
|
||||
else:
|
||||
chars += '.'
|
||||
return ''.join(chars)
|
||||
|
||||
|
||||
def view_full_request(request, headers_only=False):
|
||||
if headers_only:
|
||||
print printable_data(request.raw_headers)
|
||||
else:
|
||||
print printable_data(request.full_request)
|
||||
|
||||
def view_full_response(response, headers_only=False):
|
||||
if headers_only:
|
||||
print printable_data(response.raw_headers)
|
||||
else:
|
||||
print printable_data(response.full_response)
|
||||
|
||||
def print_requests(requests):
|
||||
# Print a table with info on all the requests in the list
|
||||
cols = [
|
||||
{'name':'ID'},
|
||||
{'name':'Method'},
|
||||
{'name': 'Host'},
|
||||
{'name':'Path', 'width':40},
|
||||
{'name':'S-Code'},
|
||||
{'name':'Req Len'},
|
||||
{'name':'Rsp Len'},
|
||||
{'name':'Time'},
|
||||
{'name':'Mngl'},
|
||||
]
|
||||
rows = []
|
||||
for request in requests:
|
||||
rid = request.reqid
|
||||
method = request.verb
|
||||
host = request.headers['host']
|
||||
path = request.path
|
||||
reqlen = len(request.raw_data)
|
||||
rsplen = 'None'
|
||||
mangle_str = '--'
|
||||
|
||||
if request.unmangled:
|
||||
mangle_str = 'q'
|
||||
|
||||
if request.response:
|
||||
response_code = str(request.response.response_code) + \
|
||||
' ' + request.response.response_text
|
||||
rsplen = len(request.response.raw_data)
|
||||
if request.response.unmangled:
|
||||
if mangle_str == '--':
|
||||
mangle_str = 's'
|
||||
else:
|
||||
mangle_str += '/s'
|
||||
else:
|
||||
response_code = ''
|
||||
|
||||
time_str = '--'
|
||||
if request.time_start and request.time_end:
|
||||
time_delt = request.time_end - request.time_start
|
||||
time_str = "%.2f" % time_delt.total_seconds()
|
||||
|
||||
rows.append([rid, method, host, path, response_code,
|
||||
reqlen, rsplen, time_str, mangle_str])
|
||||
print_table(cols, rows)
|
||||
|
490
pappy-proxy/context.py
Normal file
490
pappy-proxy/context.py
Normal file
|
@ -0,0 +1,490 @@
|
|||
from twisted.internet import defer
|
||||
from util import PappyException
|
||||
import http
|
||||
import shlex
|
||||
|
||||
|
||||
"""
|
||||
context.py
|
||||
|
||||
Functions and classes involved with managing the current context and filters
|
||||
"""
|
||||
|
||||
scope = []
|
||||
base_filters = []
|
||||
active_filters = []
|
||||
active_requests = []
|
||||
|
||||
class FilterParseError(PappyException):
|
||||
pass
|
||||
|
||||
class Filter(object):
|
||||
|
||||
def __init__(self, filter_string):
|
||||
self.filter_func = self.from_filter_string(filter_string)
|
||||
self.filter_string = filter_string
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self.filter_func(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def from_filter_string(filter_string):
|
||||
args = shlex.split(filter_string)
|
||||
field = args[0]
|
||||
relation = args[1]
|
||||
new_filter = None
|
||||
|
||||
negate = False
|
||||
if relation[0] == 'n' and len(relation) > 1:
|
||||
negate = True
|
||||
relation = relation[1:]
|
||||
|
||||
# Raises exception if invalid
|
||||
comparer = get_relation(relation)
|
||||
|
||||
if field in ("all",):
|
||||
new_filter = gen_filter_by_all(comparer, args[2], negate)
|
||||
elif field in ("host", "domain", "hs", "dm"):
|
||||
new_filter = gen_filter_by_host(comparer, args[2], negate)
|
||||
elif field in ("path", "pt"):
|
||||
new_filter = gen_filter_by_path(comparer, args[2], negate)
|
||||
elif field in ("body", "bd", "data", "dt"):
|
||||
new_filter = gen_filter_by_body(comparer, args[2], negate)
|
||||
elif field in ("verb", "vb"):
|
||||
new_filter = gen_filter_by_verb(comparer, args[2], negate)
|
||||
elif field in ("param", "pm"):
|
||||
if len(args) > 4:
|
||||
comparer2 = get_relation(args[3])
|
||||
new_filter = gen_filter_by_params(comparer, args[2],
|
||||
comparer2, args[4], negate)
|
||||
else:
|
||||
new_filter = gen_filter_by_params(comparer, args[2],
|
||||
negate=negate)
|
||||
elif field in ("header", "hd"):
|
||||
if len(args) > 4:
|
||||
comparer2 = get_relation(args[3])
|
||||
new_filter = gen_filter_by_headers(comparer, args[2],
|
||||
comparer2, args[4], negate)
|
||||
else:
|
||||
new_filter = gen_filter_by_headers(comparer, args[2],
|
||||
negate=negate)
|
||||
elif field in ("rawheaders", "rh"):
|
||||
new_filter = gen_filter_by_raw_headers(comparer, args[2], negate)
|
||||
elif field in ("sentcookie", "sck"):
|
||||
if len(args) > 4:
|
||||
comparer2 = get_relation(args[3])
|
||||
new_filter = gen_filter_by_submitted_cookies(comparer, args[2],
|
||||
comparer2, args[4], negate)
|
||||
else:
|
||||
new_filter = gen_filter_by_submitted_cookies(comparer, args[2],
|
||||
negate=negate)
|
||||
elif field in ("setcookie", "stck"):
|
||||
if len(args) > 4:
|
||||
comparer2 = get_relation(args[3])
|
||||
new_filter = gen_filter_by_set_cookies(comparer, args[2],
|
||||
comparer2, args[4], negate)
|
||||
else:
|
||||
new_filter = gen_filter_by_set_cookies(comparer, args[2],
|
||||
negate=negate)
|
||||
elif field in ("statuscode", "sc", "responsecode"):
|
||||
new_filter = gen_filter_by_response_code(comparer, args[2], negate)
|
||||
elif field in ("responsetime", "rt"):
|
||||
pass
|
||||
else:
|
||||
raise FilterParseError("%s is not a valid field" % field)
|
||||
|
||||
if new_filter is not None:
|
||||
return new_filter
|
||||
else:
|
||||
raise FilterParseError("Error creating filter")
|
||||
|
||||
|
||||
def filter_reqs(requests, filters):
|
||||
to_delete = []
|
||||
# Could definitely be more efficient, but it stays like this until
|
||||
# it impacts performance
|
||||
for filt in filters:
|
||||
for req in requests:
|
||||
if not filt(req):
|
||||
to_delete.append(req)
|
||||
new_requests = [r for r in requests if r not in to_delete]
|
||||
requests = new_requests
|
||||
to_delete = []
|
||||
return requests
|
||||
|
||||
def cmp_is(a, b):
|
||||
return str(a) == str(b)
|
||||
|
||||
def cmp_contains(a, b):
|
||||
return (b.lower() in a.lower())
|
||||
|
||||
def cmp_exists(a, b=None):
|
||||
return (a is not None)
|
||||
|
||||
def cmp_len_eq(a, b):
|
||||
return (len(a) == int(b))
|
||||
|
||||
def cmp_len_gt(a, b):
|
||||
return (len(a) > int(b))
|
||||
|
||||
def cmp_len_lt(a, b):
|
||||
return (len(a) < int(b))
|
||||
|
||||
def cmp_eq(a, b):
|
||||
return (int(a) == int(b))
|
||||
|
||||
def cmp_gt(a, b):
|
||||
return (int(a) > int(b))
|
||||
|
||||
def cmp_lt(a, b):
|
||||
return (int(a) < int(b))
|
||||
|
||||
|
||||
def gen_filter_by_attr(comparer, val, attr, negate=False):
|
||||
"""
|
||||
Filters by an attribute whose name is shared by the request and response
|
||||
objects
|
||||
"""
|
||||
def f(req):
|
||||
req_match = comparer(getattr(req, attr), val)
|
||||
if req.response:
|
||||
rsp_match = comparer(getattr(req.response, attr), val)
|
||||
else:
|
||||
rsp_match = False
|
||||
|
||||
result = req_match or rsp_match
|
||||
if negate:
|
||||
return not result
|
||||
else:
|
||||
return result
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_all(comparer, val, negate=False):
|
||||
def f(req):
|
||||
req_match = comparer(req.full_request, val)
|
||||
if req.response:
|
||||
rsp_match = comparer(req.response.full_response, val)
|
||||
else:
|
||||
rsp_match = False
|
||||
|
||||
result = req_match or rsp_match
|
||||
if negate:
|
||||
return not result
|
||||
else:
|
||||
return result
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_host(comparer, val, negate=False):
|
||||
def f(req):
|
||||
result = comparer(req.host, val)
|
||||
if negate:
|
||||
return not result
|
||||
else:
|
||||
return result
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_body(comparer, val, negate=False):
|
||||
return gen_filter_by_attr(comparer, val, 'raw_data', negate=negate)
|
||||
|
||||
def gen_filter_by_raw_headers(comparer, val, negate=False):
|
||||
return gen_filter_by_attr(comparer, val, 'raw_headers', negate=negate)
|
||||
|
||||
def gen_filter_by_response_code(comparer, val, negate=False):
|
||||
def f(req):
|
||||
if req.response:
|
||||
result = comparer(req.response.response_code, val)
|
||||
else:
|
||||
result = False
|
||||
if negate:
|
||||
return not result
|
||||
else:
|
||||
return result
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_path(comparer, val, negate=False):
|
||||
def f(req):
|
||||
result = comparer(req.path, val)
|
||||
if negate:
|
||||
return not result
|
||||
else:
|
||||
return result
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_responsetime(comparer, val, negate=False):
|
||||
def f(req):
|
||||
result = comparer(req.rsptime, val)
|
||||
if negate:
|
||||
return not result
|
||||
else:
|
||||
return result
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_verb(comparer, val, negate=False):
|
||||
def f(req):
|
||||
result = comparer(req.verb, val)
|
||||
if negate:
|
||||
return not result
|
||||
else:
|
||||
return result
|
||||
|
||||
return f
|
||||
|
||||
def check_repeatable_dict(d, comparer1, val1, comparer2=None, val2=None, negate=False):
|
||||
result = False
|
||||
for k, v in d.all_pairs():
|
||||
if comparer2:
|
||||
key_matches = comparer1(k, val1)
|
||||
val_matches = comparer2(v, val2)
|
||||
if key_matches and val_matches:
|
||||
result = True
|
||||
break
|
||||
else:
|
||||
# We check if the first value matches either
|
||||
key_matches = comparer1(k, val1)
|
||||
val_matches = comparer1(v, val1)
|
||||
if key_matches or val_matches:
|
||||
result = True
|
||||
break
|
||||
if negate:
|
||||
return not result
|
||||
else:
|
||||
return result
|
||||
|
||||
def gen_filter_by_repeatable_dict_attr(attr, keycomparer, keyval, valcomparer=None,
|
||||
valval=None, negate=False, check_req=True,
|
||||
check_rsp=True):
|
||||
def f(req):
|
||||
matched = False
|
||||
d = getattr(req, attr)
|
||||
if check_req and check_repeatable_dict(d, keycomparer, keyval, valcomparer, valval):
|
||||
matched = True
|
||||
if check_rsp and req.response:
|
||||
d = getattr(req.response, attr)
|
||||
if check_repeatable_dict(d, keycomparer, keyval, valcomparer, valval):
|
||||
matched = True
|
||||
if negate:
|
||||
return not matched
|
||||
else:
|
||||
return matched
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_headers(keycomparer, keyval, valcomparer=None, valval=None,
|
||||
negate=False):
|
||||
return gen_filter_by_repeatable_dict_attr('headers', keycomparer, keyval,
|
||||
valcomparer, valval, negate=negate)
|
||||
|
||||
def gen_filter_by_submitted_cookies(keycomparer, keyval, valcomparer=None,
|
||||
valval=None, negate=False):
|
||||
return gen_filter_by_repeatable_dict_attr('cookies', keycomparer, keyval,
|
||||
valcomparer, valval, negate=negate,
|
||||
check_rsp=False)
|
||||
|
||||
def gen_filter_by_set_cookies(keycomparer, keyval, valcomparer=None,
|
||||
valval=None, negate=False):
|
||||
def f(req):
|
||||
if not req.response:
|
||||
return False
|
||||
|
||||
for k, c in req.response.cookies.all_pairs():
|
||||
if keycomparer(c.key, keyval):
|
||||
if not valcomparer:
|
||||
return True
|
||||
else:
|
||||
if valcomparer(c.val, valval):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_get_params(keycomparer, keyval, valcomparer=None, valval=None,
|
||||
negate=False):
|
||||
def f(req):
|
||||
matched = False
|
||||
for k, v in req.get_params.all_pairs():
|
||||
if keycomparer(k, keyval):
|
||||
if not valcomparer:
|
||||
matched = True
|
||||
else:
|
||||
if valcomparer(v, valval):
|
||||
matched = True
|
||||
if negate:
|
||||
return not matched
|
||||
else:
|
||||
return matched
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_post_params(keycomparer, keyval, valcomparer=None, valval=None,
|
||||
negate=False):
|
||||
def f(req):
|
||||
matched = False
|
||||
for k, v in req.post_params.all_pairs():
|
||||
if keycomparer(k, keyval):
|
||||
if not valcomparer:
|
||||
matched = True
|
||||
else:
|
||||
if valcomparer(v, valval):
|
||||
matched = True
|
||||
if negate:
|
||||
return not matched
|
||||
else:
|
||||
return matched
|
||||
|
||||
|
||||
return f
|
||||
|
||||
def gen_filter_by_params(keycomparer, keyval, valcomparer=None, valval=None,
|
||||
negate=False):
|
||||
def f(req):
|
||||
matched = False
|
||||
# purposely don't pass negate here, otherwise we get double negatives
|
||||
f1 = gen_filter_by_post_params(keycomparer, keyval, valcomparer, valval)
|
||||
f2 = gen_filter_by_get_params(keycomparer, keyval, valcomparer, valval)
|
||||
if f1(req):
|
||||
matched = True
|
||||
if f2(req):
|
||||
matched = True
|
||||
|
||||
if negate:
|
||||
return not matched
|
||||
else:
|
||||
return matched
|
||||
|
||||
return f
|
||||
|
||||
def get_relation(s):
|
||||
# Gets the relation function associated with the string
|
||||
# Returns none if not found
|
||||
if s in ("is",):
|
||||
return cmp_is
|
||||
elif s in ("contains", "ct"):
|
||||
return cmp_contains
|
||||
elif s in ("containsr", "ctr"):
|
||||
# TODO
|
||||
return None
|
||||
elif s in ("exists", "ex"):
|
||||
return cmp_exists
|
||||
elif s in ("Leq", "L="):
|
||||
return cmp_len_eq
|
||||
elif s in ("Lgt", "L>"):
|
||||
return cmp_len_gt
|
||||
elif s in ("Llt", "L<"):
|
||||
return cmp_len_lt
|
||||
elif s in ("eq", "="):
|
||||
return cmp_eq
|
||||
elif s in ("gt", ">"):
|
||||
return cmp_gt
|
||||
elif s in ("lt", "<"):
|
||||
return cmp_lt
|
||||
|
||||
raise FilterParseError("Invalid relation: %s" % s)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def init():
|
||||
yield reload_from_storage()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def reload_from_storage():
|
||||
global active_requests
|
||||
active_requests = yield http.Request.load_from_filters(active_filters)
|
||||
|
||||
def add_filter(filt):
|
||||
global active_requests
|
||||
global active_filters
|
||||
active_filters.append(filt)
|
||||
active_requests = filter_reqs(active_requests, active_filters)
|
||||
|
||||
def add_request(req):
|
||||
global active_requests
|
||||
if passes_filters(req, active_filters):
|
||||
active_requests.append(req)
|
||||
|
||||
def filter_recheck():
|
||||
global active_requests
|
||||
global active_filters
|
||||
new_reqs = []
|
||||
for req in active_requests:
|
||||
if passes_filters(req, active_filters):
|
||||
new_reqs.append(req)
|
||||
active_requests = new_reqs
|
||||
|
||||
def passes_filters(request, filters):
|
||||
for filt in filters:
|
||||
if not filt(request):
|
||||
return False
|
||||
return True
|
||||
|
||||
def sort(key=None):
|
||||
global active_requests
|
||||
if key:
|
||||
active_requests = sorted(active_requests, key=key)
|
||||
else:
|
||||
active_requests = sorted(active_requests, key=lambda r: r.reqid)
|
||||
|
||||
def in_scope(request):
|
||||
global scope
|
||||
return passes_filters(request, scope)
|
||||
|
||||
def set_scope(filters):
|
||||
global scope
|
||||
scope = filters
|
||||
|
||||
def save_scope():
|
||||
global active_filters
|
||||
global scope
|
||||
scope = active_filters[:]
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def reset_to_scope():
|
||||
global active_filters
|
||||
global scope
|
||||
active_filters = scope[:]
|
||||
yield reload_from_storage()
|
||||
|
||||
def print_scope():
|
||||
global scope
|
||||
for f in scope:
|
||||
print f.filter_string
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def store_scope(dbpool):
|
||||
# Delete the old scope
|
||||
yield dbpool.runQuery(
|
||||
"""
|
||||
DELETE FROM scope
|
||||
"""
|
||||
);
|
||||
|
||||
# Insert the new scope
|
||||
i = 0
|
||||
for f in scope:
|
||||
yield dbpool.runQuery(
|
||||
"""
|
||||
INSERT INTO scope (filter_order, filter_string) VALUES (?, ?);
|
||||
""",
|
||||
(i, f.filter_string)
|
||||
);
|
||||
i += 1
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def load_scope(dbpool):
|
||||
global scope
|
||||
rows = yield dbpool.runQuery(
|
||||
"""
|
||||
SELECT filter_order, filter_string FROM scope;
|
||||
""",
|
||||
)
|
||||
rows = sorted(rows, key=lambda r: int(r[0]))
|
||||
new_scope = []
|
||||
for row in rows:
|
||||
new_filter = Filter(row[1])
|
||||
new_scope.append(new_filter)
|
||||
scope = new_scope
|
7
pappy-proxy/default_user_config.json
Normal file
7
pappy-proxy/default_user_config.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"data_file": "./data.db",
|
||||
"cert_dir": "{PAPPYDIR}/certs",
|
||||
"proxy_listeners": [
|
||||
{"port": 8000, "interface": "127.0.0.1"}
|
||||
]
|
||||
}
|
1129
pappy-proxy/http.py
Normal file
1129
pappy-proxy/http.py
Normal file
File diff suppressed because it is too large
Load diff
104
pappy-proxy/mangle.py
Normal file
104
pappy-proxy/mangle.py
Normal file
|
@ -0,0 +1,104 @@
|
|||
import console
|
||||
import context
|
||||
import proxy
|
||||
import string
|
||||
import subprocess
|
||||
import tempfile
|
||||
import http
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
active_requests = {}
|
||||
|
||||
intercept_requests = False
|
||||
intercept_responses = False
|
||||
|
||||
def set_intercept_requests(val):
|
||||
global intercept_requests
|
||||
intercept_requests = val
|
||||
|
||||
def set_intercept_responses(val):
|
||||
global intercept_responses
|
||||
intercept_responses = val
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def mangle_request(request, connection_id):
|
||||
# This function gets called to mangle/edit requests passed through the proxy
|
||||
global intercept_requests
|
||||
|
||||
orig_req = http.Request(request.full_request)
|
||||
retreq = orig_req
|
||||
|
||||
if context.in_scope(orig_req):
|
||||
if intercept_requests: # if we want to mangle...
|
||||
# Write original request to the temp file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tfName = tf.name
|
||||
tf.write(orig_req.full_request)
|
||||
|
||||
# Have the console edit the file
|
||||
yield console.edit_file(tfName)
|
||||
|
||||
# Create new mangled request from edited file
|
||||
with open(tfName, 'r') as f:
|
||||
mangled_req = http.Request(f.read(), update_content_length=True)
|
||||
|
||||
# Check if it changed
|
||||
if mangled_req.full_request != orig_req.full_request:
|
||||
# Set the object's metadata
|
||||
mangled_req.unmangled = orig_req
|
||||
retreq = mangled_req
|
||||
|
||||
# Add our request to the context
|
||||
context.add_request(retreq)
|
||||
else:
|
||||
proxy.log('Out of scope! Request passed along unharmed', id=connection_id)
|
||||
|
||||
active_requests[connection_id] = retreq
|
||||
retreq.submitted = True
|
||||
defer.returnValue(retreq)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def mangle_response(response, connection_id):
|
||||
# This function gets called to mangle/edit respones passed through the proxy
|
||||
global intercept_responses
|
||||
#response = string.replace(response, 'cloud', 'butt')
|
||||
#response = string.replace(response, 'Cloud', 'Butt')
|
||||
|
||||
myreq = active_requests[connection_id]
|
||||
|
||||
orig_rsp = http.Response(response.full_response)
|
||||
retrsp = orig_rsp
|
||||
|
||||
if context.in_scope(myreq):
|
||||
if intercept_responses: # If we want to mangle...
|
||||
# Write original request to the temp file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tfName = tf.name
|
||||
tf.write(orig_rsp.full_response)
|
||||
|
||||
# Have the console edit the file
|
||||
yield console.edit_file(tfName)
|
||||
|
||||
# Create new mangled request from edited file
|
||||
with open(tfName, 'r') as f:
|
||||
mangled_rsp = http.Response(f.read(), update_content_length=True)
|
||||
|
||||
if mangled_rsp.full_response != orig_rsp.full_response:
|
||||
mangled_rsp.unmangled = orig_rsp
|
||||
retrsp = mangled_rsp
|
||||
|
||||
if not myreq.reqid:
|
||||
myreq.save()
|
||||
if myreq.unmangled:
|
||||
myreq.unmangled.save()
|
||||
myreq.response = retrsp
|
||||
else:
|
||||
proxy.log('Out of scope! Response passed along unharmed', id=connection_id)
|
||||
del active_requests[connection_id]
|
||||
myreq.response = retrsp
|
||||
context.filter_recheck()
|
||||
defer.returnValue(myreq)
|
||||
|
||||
def connection_lost(connection_id):
|
||||
del active_requests[connection_id]
|
76
pappy-proxy/pappy.py
Executable file
76
pappy-proxy/pappy.py
Executable file
|
@ -0,0 +1,76 @@
|
|||
#!/usr/bin/env python2
|
||||
|
||||
import cmd2
|
||||
import config
|
||||
import console
|
||||
import comm
|
||||
import context
|
||||
import crochet
|
||||
import http
|
||||
import imp
|
||||
import os
|
||||
import schema.update
|
||||
import proxy
|
||||
import shutil
|
||||
import sys
|
||||
import sqlite3
|
||||
from twisted.enterprise import adbapi
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.internet.threads import deferToThread
|
||||
from twisted.internet.protocol import ServerFactory
|
||||
|
||||
|
||||
crochet.no_setup()
|
||||
|
||||
def set_text_factory(conn):
|
||||
conn.text_factory = str
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def main():
|
||||
# If the data file doesn't exist, create it with restricted permissions
|
||||
if not os.path.isfile(config.DATAFILE):
|
||||
with os.fdopen(os.open(config.DATAFILE, os.O_CREAT, 0o0600), 'r') as f:
|
||||
pass
|
||||
|
||||
# Set up data store
|
||||
dbpool = adbapi.ConnectionPool("sqlite3", config.DATAFILE,
|
||||
check_same_thread=False,
|
||||
cp_openfun=set_text_factory,
|
||||
cp_max=1)
|
||||
yield schema.update.update_schema(dbpool)
|
||||
http.init(dbpool)
|
||||
yield context.init()
|
||||
|
||||
# Run the proxy
|
||||
if config.DEBUG_DIR and os.path.exists(config.DEBUG_DIR):
|
||||
shutil.rmtree(config.DEBUG_DIR)
|
||||
print 'Removing old debugging output'
|
||||
factory = ServerFactory()
|
||||
factory.protocol = proxy.ProxyServer
|
||||
listen_strs = []
|
||||
for listener in config.LISTENERS:
|
||||
reactor.listenTCP(listener[0], factory, interface=listener[1])
|
||||
listener_str = 'port %d' % listener[0]
|
||||
if listener[1] not in ('127.0.0.1', 'localhost'):
|
||||
listener_str += ' (bound to %s)' % listener[1]
|
||||
listen_strs.append(listener_str)
|
||||
if listen_strs:
|
||||
print 'Proxy is listening on %s' % (', '.join(listen_strs))
|
||||
|
||||
com_factory = ServerFactory()
|
||||
com_factory.protocol = comm.CommServer
|
||||
# Make the port different for every instance of pappy, then pass it to
|
||||
# anything we run. Otherwise we can only have it running once on a machine
|
||||
comm_port = reactor.listenTCP(0, com_factory, interface='127.0.0.1')
|
||||
comm.set_comm_port(comm_port.getHost().port)
|
||||
|
||||
d = deferToThread(console.ProxyCmd().cmdloop)
|
||||
d.addCallback(lambda ignored: reactor.stop())
|
||||
|
||||
# Load the scope
|
||||
yield context.load_scope(http.dbpool)
|
||||
context.reset_to_scope()
|
||||
|
||||
if __name__ == '__main__':
|
||||
reactor.callWhenRunning(main)
|
||||
reactor.run()
|
362
pappy-proxy/proxy.py
Normal file
362
pappy-proxy/proxy.py
Normal file
|
@ -0,0 +1,362 @@
|
|||
import config
|
||||
import console
|
||||
import datetime
|
||||
import gzip
|
||||
import mangle
|
||||
import http
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import schema.update
|
||||
import shutil
|
||||
import string
|
||||
import StringIO
|
||||
import sys
|
||||
import urlparse
|
||||
import zlib
|
||||
from OpenSSL import SSL
|
||||
from twisted.enterprise import adbapi
|
||||
from twisted.internet import reactor, ssl
|
||||
from twisted.internet.protocol import ClientFactory
|
||||
from twisted.protocols.basic import LineReceiver
|
||||
from twisted.internet import defer
|
||||
|
||||
from OpenSSL import crypto
|
||||
|
||||
next_connection_id = 1
|
||||
|
||||
cached_certs = {}
|
||||
|
||||
def get_next_connection_id():
|
||||
global next_connection_id
|
||||
ret_id = next_connection_id
|
||||
next_connection_id += 1
|
||||
return ret_id
|
||||
|
||||
def log(message, id=None, symbol='*', verbosity_level=1):
|
||||
|
||||
if config.DEBUG_TO_FILE and not os.path.exists(config.DEBUG_DIR):
|
||||
os.makedirs(config.DEBUG_DIR)
|
||||
if id:
|
||||
debug_str = '[%s](%d) %s' % (symbol, id, message)
|
||||
if config.DEBUG_TO_FILE:
|
||||
with open(config.DEBUG_DIR+'/connection_%d.log' % id, 'a') as f:
|
||||
f.write(debug_str+'\n')
|
||||
else:
|
||||
debug_str = '[%s] %s' % (symbol, message)
|
||||
if config.DEBUG_TO_FILE:
|
||||
with open(config.DEBUG_DIR+'/debug.log', 'a') as f:
|
||||
f.write(debug_str+'\n')
|
||||
if config.DEBUG_VERBOSITY >= verbosity_level:
|
||||
print debug_str
|
||||
|
||||
def log_request(request, id=None, symbol='*', verbosity_level=3):
|
||||
r_split = request.split('\r\n')
|
||||
for l in r_split:
|
||||
log(l, id, symbol, verbosity_level)
|
||||
|
||||
class ClientTLSContext(ssl.ClientContextFactory):
|
||||
isClient = 1
|
||||
def getContext(self):
|
||||
return SSL.Context(SSL.TLSv1_METHOD)
|
||||
|
||||
|
||||
class ProxyClient(LineReceiver):
|
||||
|
||||
def __init__(self, request):
|
||||
self.factory = None
|
||||
self._response_sent = False
|
||||
self._sent = False
|
||||
self.request = request
|
||||
|
||||
self._response_obj = http.Response()
|
||||
|
||||
def log(self, message, symbol='*', verbosity_level=1):
|
||||
log(message, id=self.factory.connection_id, symbol=symbol, verbosity_level=verbosity_level)
|
||||
|
||||
def lineReceived(self, *args, **kwargs):
|
||||
line = args[0]
|
||||
if line is None:
|
||||
line = ''
|
||||
self._response_obj.add_line(line)
|
||||
self.log(line, symbol='r<', verbosity_level=3)
|
||||
if self._response_obj.headers_complete:
|
||||
if self._response_obj.complete:
|
||||
self.handle_response_end()
|
||||
return
|
||||
self.log("Headers end, length given, waiting for data", verbosity_level=3)
|
||||
self.setRawMode()
|
||||
|
||||
def rawDataReceived(self, *args, **kwargs):
|
||||
data = args[0]
|
||||
if not self._response_obj.complete:
|
||||
if data:
|
||||
s = console.printable_data(data)
|
||||
dlines = s.split('\n')
|
||||
for l in dlines:
|
||||
self.log(l, symbol='<rd', verbosity_level=3)
|
||||
self._response_obj.add_data(data)
|
||||
|
||||
if self._response_obj.complete:
|
||||
self.handle_response_end()
|
||||
|
||||
def connectionMade(self):
|
||||
self._connection_made()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _connection_made(self):
|
||||
self.log('Connection established, sending request...', verbosity_level=3)
|
||||
# Make sure to add errback
|
||||
lines = self.request.full_request.splitlines()
|
||||
for l in lines:
|
||||
self.log(l, symbol='>r', verbosity_level=3)
|
||||
mangled_request = yield mangle.mangle_request(self.request,
|
||||
self.factory.connection_id)
|
||||
yield mangled_request.deep_save()
|
||||
if not self._sent:
|
||||
self.transport.write(mangled_request.full_request)
|
||||
self._sent = True
|
||||
|
||||
def handle_response_end(self, *args, **kwargs):
|
||||
self.log("Remote response finished, returning data to original stream")
|
||||
self.transport.loseConnection()
|
||||
assert self._response_obj.full_response
|
||||
self.factory.return_response(self._response_obj)
|
||||
|
||||
|
||||
class ProxyClientFactory(ClientFactory):
|
||||
|
||||
def __init__(self, request):
|
||||
self.request = request
|
||||
#self.proxy_server = None
|
||||
self.connection_id = -1
|
||||
self.data_defer = defer.Deferred()
|
||||
self.start_time = datetime.datetime.now()
|
||||
self.end_time = None
|
||||
|
||||
def log(self, message, symbol='*', verbosity_level=1):
|
||||
log(message, id=self.connection_id, symbol=symbol, verbosity_level=verbosity_level)
|
||||
|
||||
def buildProtocol(self, addr):
|
||||
p = ProxyClient(self.request)
|
||||
p.factory = self
|
||||
return p
|
||||
|
||||
def clientConnectionFailed(self, connector, reason):
|
||||
self.log("Connection failed with remote server: %s" % reason.getErrorMessage())
|
||||
|
||||
def clientConnectionLost(self, connector, reason):
|
||||
self.log("Connection lost with remote server: %s" % reason.getErrorMessage())
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def return_response(self, response):
|
||||
self.end_time = datetime.datetime.now()
|
||||
log_request(console.printable_data(response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
|
||||
mangled_reqrsp_pair = yield mangle.mangle_response(response, self.connection_id)
|
||||
log_request(console.printable_data(mangled_reqrsp_pair.response.full_response),
|
||||
id=self.connection_id, symbol='<', verbosity_level=3)
|
||||
mangled_reqrsp_pair.time_start = self.start_time
|
||||
mangled_reqrsp_pair.time_end = self.end_time
|
||||
yield mangled_reqrsp_pair.deep_save()
|
||||
self.data_defer.callback(mangled_reqrsp_pair)
|
||||
|
||||
|
||||
class ProxyServer(LineReceiver):
|
||||
|
||||
def log(self, message, symbol='*', verbosity_level=1):
|
||||
log(message, id=self.connection_id, symbol=symbol, verbosity_level=verbosity_level)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
global next_connection_id
|
||||
self.connection_id = get_next_connection_id()
|
||||
|
||||
self._request_obj = http.Request()
|
||||
self._connect_response = False
|
||||
self._forward = True
|
||||
self._port = None
|
||||
self._host = None
|
||||
|
||||
def lineReceived(self, *args, **kwargs):
|
||||
line = args[0]
|
||||
self.log(line, symbol='>', verbosity_level=3)
|
||||
self._request_obj.add_line(line)
|
||||
|
||||
if self._request_obj.verb.upper() == 'CONNECT':
|
||||
self._connect_response = True
|
||||
self._forward = False
|
||||
# For if we only get the port in the connect request
|
||||
if self._request_obj.port is not None:
|
||||
self._port = self._request_obj.port
|
||||
if self._request_obj.host is not None:
|
||||
self._host = self._request_obj.host
|
||||
|
||||
if self._request_obj.headers_complete:
|
||||
self.setRawMode()
|
||||
|
||||
if self._request_obj.complete:
|
||||
self.setLineMode()
|
||||
self.full_request_received()
|
||||
|
||||
def rawDataReceived(self, *args, **kwargs):
|
||||
data = args[0]
|
||||
self._request_obj.add_data(data)
|
||||
self.log(data, symbol='d>', verbosity_level=3)
|
||||
|
||||
if self._request_obj.complete:
|
||||
self.full_request_received()
|
||||
|
||||
def full_request_received(self, *args, **kwargs):
|
||||
global cached_certs
|
||||
|
||||
self.log('End of request', verbosity_level=3)
|
||||
|
||||
if self._connect_response:
|
||||
self.log('Responding to browser CONNECT request', verbosity_level=3)
|
||||
okay_str = 'HTTP/1.1 200 Connection established\r\n\r\n'
|
||||
self.transport.write(okay_str)
|
||||
|
||||
# Generate a cert for the hostname
|
||||
if not self._request_obj.host in cached_certs:
|
||||
log("Generating cert for '%s'" % self._request_obj.host,
|
||||
verbosity_level=3)
|
||||
(pkey, cert) = generate_cert(self._request_obj.host,
|
||||
config.CERT_DIR)
|
||||
cached_certs[self._request_obj.host] = (pkey, cert)
|
||||
else:
|
||||
log("Using cached cert for %s" % self._request_obj.host, verbosity_level=3)
|
||||
(pkey, cert) = cached_certs[self._request_obj.host]
|
||||
ctx = ServerTLSContext(
|
||||
private_key=pkey,
|
||||
certificate=cert,
|
||||
)
|
||||
self.transport.startTLS(ctx, self.factory)
|
||||
|
||||
if self._forward:
|
||||
self.log("Forwarding to %s on %d" % (self._request_obj.host, self._request_obj.port))
|
||||
factory = ProxyClientFactory(self._request_obj)
|
||||
factory.proxy_server = self
|
||||
factory.connection_id = self.connection_id
|
||||
factory.data_defer.addCallback(self.send_response_back)
|
||||
if self._request_obj.is_ssl:
|
||||
self.log("Accessing over SSL...", verbosity_level=3)
|
||||
reactor.connectSSL(self._request_obj.host, self._request_obj.port, factory, ClientTLSContext())
|
||||
else:
|
||||
self.log("Accessing over TCP...", verbosity_level=3)
|
||||
reactor.connectTCP(self._request_obj.host, self._request_obj.port, factory)
|
||||
|
||||
# Reset per-request variables
|
||||
self.log("Resetting per-request data", verbosity_level=3)
|
||||
self._connect_response = False
|
||||
self._forward = True
|
||||
self._request_obj = http.Request()
|
||||
if self._port is not None:
|
||||
self._request_obj.port = self._port
|
||||
if self._host is not None:
|
||||
self._request_obj.host = self._host
|
||||
self.setLineMode()
|
||||
|
||||
def send_response_back(self, request):
|
||||
self.transport.write(request.response.full_response)
|
||||
self.transport.loseConnection()
|
||||
|
||||
def connectionLost(self, reason):
|
||||
self.log('Connection lost with browser: %s' % reason.getErrorMessage())
|
||||
|
||||
|
||||
class ServerTLSContext(ssl.ContextFactory):
|
||||
def __init__(self, private_key, certificate):
|
||||
self.private_key = private_key
|
||||
self.certificate = certificate
|
||||
self.sslmethod = SSL.TLSv1_METHOD
|
||||
self.cacheContext()
|
||||
|
||||
def cacheContext(self):
|
||||
ctx = SSL.Context(self.sslmethod)
|
||||
ctx.use_certificate(self.certificate)
|
||||
ctx.use_privatekey(self.private_key)
|
||||
self._context = ctx
|
||||
|
||||
def __getstate__(self):
|
||||
d = self.__dict__.copy()
|
||||
del d['_context']
|
||||
return d
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.__dict__ = state
|
||||
self.cacheContext()
|
||||
|
||||
def getContext(self):
|
||||
"""Create an SSL context.
|
||||
"""
|
||||
return self._context
|
||||
|
||||
|
||||
def generate_cert_serial():
|
||||
# Generates a random serial to be used for the cert
|
||||
return random.getrandbits(8*20)
|
||||
|
||||
|
||||
def generate_cert(hostname, cert_dir):
|
||||
with open(cert_dir+'/'+config.SSL_CA_FILE, 'rt') as f:
|
||||
ca_raw = f.read()
|
||||
with open(cert_dir+'/'+config.SSL_PKEY_FILE, 'rt') as f:
|
||||
ca_key_raw = f.read()
|
||||
ca_cert = crypto.load_certificate(crypto.FILETYPE_PEM, ca_raw)
|
||||
ca_key = crypto.load_privatekey(crypto.FILETYPE_PEM, ca_key_raw)
|
||||
|
||||
key = crypto.PKey()
|
||||
key.generate_key(crypto.TYPE_RSA, 2048)
|
||||
|
||||
cert = crypto.X509()
|
||||
cert.get_subject().CN = hostname
|
||||
cert.set_serial_number(generate_cert_serial())
|
||||
cert.gmtime_adj_notBefore(0)
|
||||
cert.gmtime_adj_notAfter(10*365*24*60*60)
|
||||
cert.set_issuer(ca_cert.get_subject())
|
||||
cert.set_pubkey(key)
|
||||
cert.sign(ca_key, "sha256")
|
||||
|
||||
return (key, cert)
|
||||
|
||||
|
||||
def generate_ca_certs(cert_dir):
|
||||
# Make directory if necessary
|
||||
if not os.path.exists(cert_dir):
|
||||
os.makedirs(cert_dir)
|
||||
|
||||
# Private key
|
||||
print "Generating private key... ",
|
||||
key = crypto.PKey()
|
||||
key.generate_key(crypto.TYPE_RSA, 2048)
|
||||
with os.fdopen(os.open(cert_dir+'/'+config.SSL_PKEY_FILE, os.O_WRONLY | os.O_CREAT, 0o0600), 'w') as f:
|
||||
f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, key))
|
||||
print "Done!"
|
||||
|
||||
# Hostname doesn't matter since it's a client cert
|
||||
print "Generating client cert... ",
|
||||
cert = crypto.X509()
|
||||
cert.get_subject().C = 'US' # Country name
|
||||
cert.get_subject().ST = 'Michigan' # State or province name
|
||||
cert.get_subject().L = 'Ann Arbor' # Locality name
|
||||
cert.get_subject().O = 'Pappy Proxy' # Organization name
|
||||
#cert.get_subject().OU = '' # Organizational unit name
|
||||
cert.get_subject().CN = 'Pappy Proxy' # Common name
|
||||
|
||||
cert.set_serial_number(generate_cert_serial())
|
||||
cert.gmtime_adj_notBefore(0)
|
||||
cert.gmtime_adj_notAfter(10*365*24*60*60)
|
||||
cert.set_issuer(cert.get_subject())
|
||||
cert.add_extensions([
|
||||
crypto.X509Extension("basicConstraints", True,
|
||||
"CA:TRUE, pathlen:0"),
|
||||
crypto.X509Extension("keyUsage", True,
|
||||
"keyCertSign, cRLSign"),
|
||||
crypto.X509Extension("subjectKeyIdentifier", False, "hash",
|
||||
subject=cert),
|
||||
])
|
||||
cert.set_pubkey(key)
|
||||
cert.sign(key, 'sha256')
|
||||
with os.fdopen(os.open(cert_dir+'/'+config.SSL_CA_FILE, os.O_WRONLY | os.O_CREAT, 0o0600), 'w') as f:
|
||||
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
|
||||
print "Done!"
|
||||
|
8
pappy-proxy/repeater.py
Normal file
8
pappy-proxy/repeater.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
import comm
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
def start_editor(reqid):
|
||||
script_loc = os.path.join(os.path.dirname(__file__), "vim_repeater", "repeater.vim")
|
||||
#print "RepeaterSetup %d %d"%(reqid, comm_port)
|
||||
subprocess.call(["vim", "-S", script_loc, "-c", "RepeaterSetup %d %d"%(reqid, comm.comm_port)])
|
0
pappy-proxy/schema/__init__.py
Normal file
0
pappy-proxy/schema/__init__.py
Normal file
54
pappy-proxy/schema/schema_1.py
Normal file
54
pappy-proxy/schema/schema_1.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
from twisted.internet import defer
|
||||
|
||||
"""
|
||||
Schema v1
|
||||
|
||||
Description:
|
||||
The initial schema for the first version of the proxy. It includes the creation
|
||||
of the schema_meta table and other data tables.
|
||||
"""
|
||||
|
||||
update_queries = [
|
||||
"""
|
||||
CREATE TABLE responses (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
full_response BLOB NOT NULL,
|
||||
unmangled_id INTEGER REFERENCES responses(id)
|
||||
);
|
||||
""",
|
||||
|
||||
"""
|
||||
CREATE TABLE requests (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
full_request BLOB NOT NULL,
|
||||
tag TEXT,
|
||||
submitted INTEGER NOT NULL,
|
||||
response_id INTEGER REFERENCES responses(id),
|
||||
unmangled_id INTEGER REFERENCES requests(id),
|
||||
start_datetime TEXT,
|
||||
end_datetime TEXT
|
||||
);
|
||||
""",
|
||||
|
||||
"""
|
||||
CREATE TABLE schema_meta (
|
||||
version INTEGER NOT NULL
|
||||
);
|
||||
""",
|
||||
|
||||
"""
|
||||
CREATE TABLE scope (
|
||||
filter_order INTEGER NOT NULL,
|
||||
filter_string TEXT NOT NULL
|
||||
);
|
||||
""",
|
||||
|
||||
"""
|
||||
INSERT INTO schema_meta (version) VALUES (1);
|
||||
""",
|
||||
]
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update(dbpool):
|
||||
for query in update_queries:
|
||||
yield dbpool.runQuery(query)
|
53
pappy-proxy/schema/update.py
Normal file
53
pappy-proxy/schema/update.py
Normal file
|
@ -0,0 +1,53 @@
|
|||
import os
|
||||
import glob
|
||||
import imp
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.enterprise import adbapi
|
||||
from twisted.internet import defer
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_schema_version(dbpool):
|
||||
schema_exists = yield dbpool.runQuery("SELECT name FROM sqlite_master WHERE type='table' AND name='schema_meta';")
|
||||
if not schema_exists:
|
||||
# If we get an empty list, we have no schema
|
||||
defer.returnValue(0)
|
||||
else:
|
||||
schema_version_result = yield dbpool.runQuery("SELECT version FROM schema_meta;")
|
||||
|
||||
# There should only be one row in the meta table
|
||||
assert(len(schema_version_result) == 1)
|
||||
|
||||
# Return the retrieved version
|
||||
version = schema_version_result[0][0]
|
||||
defer.returnValue(version)
|
||||
|
||||
def add_schema_files(schemas):
|
||||
# Finds and imports all schema_*.py files into the list
|
||||
module_files = glob.glob(os.path.dirname(os.path.abspath(__file__)) + "/schema_*.py")
|
||||
for mod in module_files:
|
||||
module_name = os.path.basename(os.path.splitext(mod)[0])
|
||||
newmod = imp.load_source('%s'%module_name, mod)
|
||||
schemas.append( (module_name, newmod) )
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update_schema(dbpool):
|
||||
# Update the database schema to the latest version
|
||||
schema_version = yield get_schema_version(dbpool)
|
||||
schemas = []
|
||||
add_schema_files(schemas)
|
||||
schemas = sorted(schemas, key=lambda tup: tup[0])
|
||||
for i in range(schema_version, len(schemas)):
|
||||
# schemas[0] is v1, schemas[1] is v2, etc
|
||||
print "Updating datafaile schema to version %d" % (i+1)
|
||||
yield schemas[i][1].update(dbpool)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def main():
|
||||
dbpool = adbapi.ConnectionPool("sqlite3", "data.db", check_same_thread=False)
|
||||
yield update_schema(dbpool)
|
||||
reactor.stop()
|
||||
|
||||
if __name__ == '__main__':
|
||||
reactor.callWhenRunning(main)
|
||||
reactor.run()
|
0
pappy-proxy/tests/__init__.py
Normal file
0
pappy-proxy/tests/__init__.py
Normal file
402
pappy-proxy/tests/test_context.py
Normal file
402
pappy-proxy/tests/test_context.py
Normal file
|
@ -0,0 +1,402 @@
|
|||
import pytest
|
||||
|
||||
import context
|
||||
from http import Request, Response, ResponseCookie
|
||||
|
||||
@pytest.fixture
|
||||
def http_request():
|
||||
return Request('GET / HTTP/1.1\r\n')
|
||||
|
||||
def test_filter_reqs():
|
||||
pass
|
||||
|
||||
def test_gen_filter_by_all_request():
|
||||
f = context.gen_filter_by_all(context.cmp_contains, 'hello')
|
||||
fn = context.gen_filter_by_all(context.cmp_contains, 'hello', negate=True)
|
||||
|
||||
# Nowhere
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
assert not f(r)
|
||||
assert fn(r)
|
||||
|
||||
# Verb
|
||||
r = Request('hello / HTTP/1.1\r\n')
|
||||
assert f(r)
|
||||
assert not fn(r)
|
||||
|
||||
# Path
|
||||
r = Request('GET /hello HTTP/1.1\r\n')
|
||||
assert f(r)
|
||||
assert not fn(r)
|
||||
|
||||
# Data
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
r.raw_data = 'hello'
|
||||
assert f(r)
|
||||
assert not fn(r)
|
||||
|
||||
# Header key
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
r.headers['hello'] = 'goodbye'
|
||||
assert f(r)
|
||||
assert not fn(r)
|
||||
|
||||
# Header value
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
r.headers['goodbye'] = 'hello'
|
||||
assert f(r)
|
||||
assert not fn(r)
|
||||
|
||||
# Nowhere in headers
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
r.headers['goodbye'] = 'for real'
|
||||
assert not f(r)
|
||||
assert fn(r)
|
||||
|
||||
# Cookie key
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
r.cookies['hello'] = 'world'
|
||||
r.update_from_objects()
|
||||
assert f(r)
|
||||
assert not fn(r)
|
||||
|
||||
# Cookie value
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
r.cookies['world'] = 'hello'
|
||||
r.update_from_objects()
|
||||
assert f(r)
|
||||
assert not fn(r)
|
||||
|
||||
# Nowhere in cookie
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
r.cookies['world'] = 'sucks'
|
||||
r.update_from_objects()
|
||||
assert not f(r)
|
||||
assert fn(r)
|
||||
|
||||
|
||||
def test_gen_filter_by_all_response(http_request):
|
||||
f = context.gen_filter_by_all(context.cmp_contains, 'hello')
|
||||
fn = context.gen_filter_by_all(context.cmp_contains, 'hello', negate=True)
|
||||
|
||||
# Nowhere
|
||||
r = Response('HTTP/1.1 200 OK\r\n')
|
||||
http_request.response = r
|
||||
assert not f(http_request)
|
||||
assert fn(http_request)
|
||||
|
||||
# Response text
|
||||
r = Response('HTTP/1.1 200 hello\r\n')
|
||||
http_request.response = r
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
# Data
|
||||
r = Response('HTTP/1.1 200 OK\r\n')
|
||||
http_request.response = r
|
||||
r.raw_data = 'hello'
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
# Header key
|
||||
r = Response('HTTP/1.1 200 OK\r\n')
|
||||
http_request.response = r
|
||||
r.headers['hello'] = 'goodbye'
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
# Header value
|
||||
r = Response('HTTP/1.1 200 OK\r\n')
|
||||
http_request.response = r
|
||||
r.headers['goodbye'] = 'hello'
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
# Nowhere in headers
|
||||
r = Response('HTTP/1.1 200 OK\r\n')
|
||||
http_request.response = r
|
||||
r.headers['goodbye'] = 'for real'
|
||||
assert not f(http_request)
|
||||
assert fn(http_request)
|
||||
|
||||
# Cookie key
|
||||
r = Response('HTTP/1.1 200 OK\r\n')
|
||||
http_request.response = r
|
||||
r.add_cookie(ResponseCookie('hello=goodbye'))
|
||||
r.update_from_objects()
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
# Cookie value
|
||||
r = Response('HTTP/1.1 200 OK\r\n')
|
||||
http_request.response = r
|
||||
r.add_cookie(ResponseCookie('goodbye=hello'))
|
||||
r.update_from_objects()
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
# Nowhere in cookie
|
||||
r = Response('HTTP/1.1 200 OK\r\n')
|
||||
http_request.response = r
|
||||
r.add_cookie(ResponseCookie('goodbye=for real'))
|
||||
r.update_from_objects()
|
||||
assert not f(http_request)
|
||||
assert fn(http_request)
|
||||
|
||||
def test_filter_by_host(http_request):
|
||||
f = context.gen_filter_by_host(context.cmp_contains, 'sexy')
|
||||
fn = context.gen_filter_by_host(context.cmp_contains, 'sexy', negate=True)
|
||||
|
||||
http_request.headers['Host'] = 'google.com'
|
||||
http_request.headers['MiscHeader'] = 'vim.sexy'
|
||||
assert not f(http_request)
|
||||
assert fn(http_request)
|
||||
|
||||
http_request.headers['Host'] = 'vim.sexy'
|
||||
http_request.update_from_text()
|
||||
assert http_request.host == 'vim.sexy'
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
def test_filter_by_body():
|
||||
f = context.gen_filter_by_body(context.cmp_contains, 'sexy')
|
||||
fn = context.gen_filter_by_body(context.cmp_contains, 'sexy', negate=True)
|
||||
|
||||
# Test request bodies
|
||||
r = Request()
|
||||
r.status_line = 'GET /sexy HTTP/1.1'
|
||||
r.headers['Header'] = 'sexy'
|
||||
r.raw_data = 'foo'
|
||||
assert not f(r)
|
||||
assert fn(r)
|
||||
|
||||
r.raw_data = 'sexy'
|
||||
assert f(r)
|
||||
assert not fn(r)
|
||||
|
||||
# Test response bodies
|
||||
r = Request()
|
||||
rsp = Response()
|
||||
rsp.status_line = 'HTTP/1.1 200 OK'
|
||||
rsp.headers['sexy'] = 'sexy'
|
||||
r.status_line = 'GET /sexy HTTP/1.1'
|
||||
r.headers['Header'] = 'sexy'
|
||||
r.response = rsp
|
||||
assert not f(r)
|
||||
assert fn(r)
|
||||
|
||||
rsp.raw_data = 'sexy'
|
||||
assert f(r)
|
||||
assert not fn(r)
|
||||
|
||||
def test_filter_by_response_code(http_request):
|
||||
f = context.gen_filter_by_response_code(context.cmp_eq, 200)
|
||||
fn = context.gen_filter_by_response_code(context.cmp_eq, 200, negate=True)
|
||||
|
||||
r = Response()
|
||||
http_request.response = r
|
||||
r.status_line = 'HTTP/1.1 404 Not Found'
|
||||
assert not f(http_request)
|
||||
assert fn(http_request)
|
||||
|
||||
r.status_line = 'HTTP/1.1 200 OK'
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
def test_filter_by_raw_headers_request():
|
||||
f1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:')
|
||||
fn1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:', negate=True)
|
||||
f2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader')
|
||||
fn2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader', negate=True)
|
||||
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
rsp = Response('HTTP/1.1 200 OK\r\n')
|
||||
r.response = rsp
|
||||
r.headers['Header'] = 'Sexy'
|
||||
assert not f1(r)
|
||||
assert fn1(r)
|
||||
assert not f2(r)
|
||||
assert fn2(r)
|
||||
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
rsp = Response('HTTP/1.1 200 OK\r\n')
|
||||
r.response = rsp
|
||||
r.headers['Sexy'] = 'sexy'
|
||||
assert f1(r)
|
||||
assert not fn1(r)
|
||||
assert not f2(r)
|
||||
assert fn2(r)
|
||||
|
||||
r.headers['OtherHeader'] = 'sexy'
|
||||
r.headers['Header'] = 'foo'
|
||||
assert f1(r)
|
||||
assert not fn1(r)
|
||||
assert f2(r)
|
||||
assert not fn2(r)
|
||||
|
||||
def test_filter_by_raw_headers_response():
|
||||
f1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:')
|
||||
fn1 = context.gen_filter_by_raw_headers(context.cmp_contains, 'Sexy:', negate=True)
|
||||
f2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader')
|
||||
fn2 = context.gen_filter_by_raw_headers(context.cmp_contains, 'sexy\r\nHeader', negate=True)
|
||||
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
rsp = Response('HTTP/1.1 200 OK\r\n')
|
||||
r.response = rsp
|
||||
rsp.headers['Header'] = 'Sexy'
|
||||
assert not f1(r)
|
||||
assert fn1(r)
|
||||
assert not f2(r)
|
||||
assert fn2(r)
|
||||
|
||||
r = Request('GET / HTTP/1.1\r\n')
|
||||
rsp = Response('HTTP/1.1 200 OK\r\n')
|
||||
r.response = rsp
|
||||
rsp.headers['Sexy'] = 'sexy'
|
||||
assert f1(r)
|
||||
assert not fn1(r)
|
||||
assert not f2(r)
|
||||
assert fn2(r)
|
||||
|
||||
rsp.headers['OtherHeader'] = 'sexy'
|
||||
rsp.headers['Header'] = 'foo'
|
||||
assert f1(r)
|
||||
assert not fn1(r)
|
||||
assert f2(r)
|
||||
assert not fn2(r)
|
||||
|
||||
def test_filter_by_path(http_request):
|
||||
f = context.gen_filter_by_path(context.cmp_contains, 'porn') # find the fun websites
|
||||
fn = context.gen_filter_by_path(context.cmp_contains, 'porn', negate=True) # find the boring websites
|
||||
|
||||
http_request.status_line = 'GET / HTTP/1.1'
|
||||
assert not f(http_request)
|
||||
assert fn(http_request)
|
||||
|
||||
http_request.status_line = 'GET /path/to/great/porn HTTP/1.1'
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
http_request.status_line = 'GET /path/to/porn/great HTTP/1.1'
|
||||
assert f(http_request)
|
||||
assert not fn(http_request)
|
||||
|
||||
def test_gen_filter_by_submitted_cookies():
|
||||
f1 = context.gen_filter_by_submitted_cookies(context.cmp_contains, 'Session')
|
||||
f2 = context.gen_filter_by_submitted_cookies(context.cmp_contains, 'Cookie',
|
||||
context.cmp_contains, 'CookieVal')
|
||||
r = Request(('GET / HTTP/1.1\r\n'
|
||||
'Cookie: foo=bar\r\n'
|
||||
'\r\n'))
|
||||
assert not f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request(('GET / HTTP/1.1\r\n'
|
||||
'Cookie: Session=bar\r\n'
|
||||
'\r\n'))
|
||||
assert f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request(('GET / HTTP/1.1\r\n'
|
||||
'Cookie: Session=bar; CookieThing=NoMatch\r\n'
|
||||
'\r\n'))
|
||||
assert f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request(('GET / HTTP/1.1\r\n'
|
||||
'Cookie: Session=bar; CookieThing=CookieValue\r\n'
|
||||
'\r\n'))
|
||||
assert f1(r)
|
||||
assert f2(r)
|
||||
|
||||
def test_gen_filter_by_set_cookies():
|
||||
f1 = context.gen_filter_by_set_cookies(context.cmp_contains, 'Session')
|
||||
f2 = context.gen_filter_by_set_cookies(context.cmp_contains, 'Cookie',
|
||||
context.cmp_contains, 'CookieVal')
|
||||
|
||||
r = Request('GET / HTTP/1.1\r\n\r\n')
|
||||
rsp = Response(('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: foo=bar\r\n'
|
||||
'\r\n'))
|
||||
r.response = rsp
|
||||
assert not f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request('GET / HTTP/1.1\r\n\r\n')
|
||||
rsp = Response(('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: foo=bar\r\n'
|
||||
'Set-Cookie: Session=Banana\r\n'
|
||||
'\r\n'))
|
||||
r.response = rsp
|
||||
assert f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request('GET / HTTP/1.1\r\n\r\n')
|
||||
rsp = Response(('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: foo=bar\r\n'
|
||||
'Set-Cookie: Session=Banana\r\n'
|
||||
'Set-Cookie: CookieThing=NoMatch\r\n'
|
||||
'\r\n'))
|
||||
r.response = rsp
|
||||
assert f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request('GET / HTTP/1.1\r\n\r\n')
|
||||
rsp = Response(('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: foo=bar\r\n'
|
||||
'Set-Cookie: Session=Banana\r\n'
|
||||
'Set-Cookie: CookieThing=CookieValue\r\n'
|
||||
'\r\n'))
|
||||
r.response = rsp
|
||||
assert f1(r)
|
||||
assert f2(r)
|
||||
|
||||
def test_filter_by_params_get():
|
||||
f1 = context.gen_filter_by_params(context.cmp_contains, 'Session')
|
||||
f2 = context.gen_filter_by_params(context.cmp_contains, 'Cookie',
|
||||
context.cmp_contains, 'CookieVal')
|
||||
|
||||
r = Request('GET / HTTP/1.1\r\n\r\n')
|
||||
assert not f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request('GET /?Session=foo HTTP/1.1\r\n\r\n')
|
||||
assert f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request('GET /?Session=foo&CookieThing=Fail HTTP/1.1\r\n\r\n')
|
||||
assert f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request('GET /?Session=foo&CookieThing=CookieValue HTTP/1.1\r\n\r\n')
|
||||
assert f1(r)
|
||||
assert f2(r)
|
||||
|
||||
def test_filter_by_params_post():
|
||||
f1 = context.gen_filter_by_params(context.cmp_contains, 'Session')
|
||||
f2 = context.gen_filter_by_params(context.cmp_contains, 'Cookie',
|
||||
context.cmp_contains, 'CookieVal')
|
||||
|
||||
r = Request(('GET / HTTP/1.1\r\n'
|
||||
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
|
||||
r.raw_data = 'foo=bar'
|
||||
assert not f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request(('GET / HTTP/1.1\r\n'
|
||||
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
|
||||
r.raw_data = 'Session=bar'
|
||||
assert f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request(('GET / HTTP/1.1\r\n'
|
||||
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
|
||||
r.raw_data = 'Session=bar&Cookie=foo'
|
||||
assert f1(r)
|
||||
assert not f2(r)
|
||||
|
||||
r = Request(('GET / HTTP/1.1\r\n'
|
||||
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
|
||||
r.raw_data = 'Session=bar&CookieThing=CookieValue'
|
||||
assert f1(r)
|
||||
assert f2(r)
|
994
pappy-proxy/tests/test_http.py
Normal file
994
pappy-proxy/tests/test_http.py
Normal file
|
@ -0,0 +1,994 @@
|
|||
import base64
|
||||
import gzip
|
||||
import json
|
||||
import pytest
|
||||
import StringIO
|
||||
import zlib
|
||||
|
||||
from pappy import http
|
||||
|
||||
####################
|
||||
# Helper Functions
|
||||
|
||||
class TException(Exception):
|
||||
pass
|
||||
|
||||
def by_lines_and_full_helper(Type, id_attr, load_func, header_lines, data=''):
|
||||
# Creates a request/response and returns versions created/recreated in
|
||||
# different ways. All of them should be equivalent.
|
||||
# Returned:
|
||||
# (created with constructor,
|
||||
# created with add_line and add_data
|
||||
# after calling update() on it,
|
||||
# created by serializing and unserializing to json)
|
||||
|
||||
t_lines = Type()
|
||||
for l in header_lines:
|
||||
t_lines.add_line(l)
|
||||
|
||||
if data:
|
||||
t_lines.add_data(data)
|
||||
|
||||
t_fulls = '\r\n'.join(header_lines)+'\r\n'
|
||||
t_fulls += data
|
||||
t_full = Type(t_fulls)
|
||||
t_updated = Type(t_fulls)
|
||||
|
||||
t_json = Type(t_fulls)
|
||||
t_json.from_json(t_json.to_json())
|
||||
|
||||
return (t_full, t_lines, t_updated, t_json)
|
||||
|
||||
def req_by_lines_and_full(header_lines, data=''):
|
||||
# Generates r_full, r_lines using the given header lines and data
|
||||
# r_lines is created with add_line/add_data and r_full is created with
|
||||
# the constructor
|
||||
return by_lines_and_full_helper(http.Request, 'reqid',
|
||||
http.Request.load_request,
|
||||
header_lines, data)
|
||||
|
||||
def rsp_by_lines_and_full(header_lines, data=''):
|
||||
# Generates r_full, r_lines using the given header lines and data
|
||||
# r_lines is created with add_line/add_data and r_full is created with
|
||||
# the constructor
|
||||
return by_lines_and_full_helper(http.Response, 'rspid',
|
||||
http.Response.load_response,
|
||||
header_lines, data)
|
||||
|
||||
def gzip_string(string):
|
||||
out = StringIO.StringIO()
|
||||
with gzip.GzipFile(fileobj=out, mode="w") as f:
|
||||
f.write(string)
|
||||
return out.getvalue()
|
||||
|
||||
def deflate_string(string):
|
||||
return StringIO.StringIO(zlib.compress(string)).read()
|
||||
|
||||
def check_response_cookies(exp_pairs, rsp):
|
||||
pairs = rsp.cookies.all_pairs()
|
||||
pairs = [(c.key, c.val) for k, c in pairs]
|
||||
assert pairs == exp_pairs
|
||||
|
||||
|
||||
####################
|
||||
# Data storage
|
||||
|
||||
def test_chunked_simple():
|
||||
# Test a simple add_data
|
||||
c = http.ChunkedData()
|
||||
assert (not c.complete)
|
||||
|
||||
full_data = '5\r\n'
|
||||
full_data += 'A'*5
|
||||
full_data += '\r\n'
|
||||
full_data += '0\r\n\r\n'
|
||||
c.add_data(full_data)
|
||||
assert c.complete
|
||||
assert c.raw_data == 'A'*5
|
||||
|
||||
def test_chunked_hex():
|
||||
# Test hex lengths
|
||||
c = http.ChunkedData()
|
||||
full_data = 'af\r\n'
|
||||
full_data += 'A'*0xAF
|
||||
full_data += '\r\n'
|
||||
full_data += '0\r\n\r\n'
|
||||
c.add_data(full_data)
|
||||
assert c.complete
|
||||
assert c.raw_data == 'A'*0xAF
|
||||
|
||||
c = http.ChunkedData()
|
||||
full_data = 'AF\r\n'
|
||||
full_data += 'A'*0xAF
|
||||
full_data += '\r\n'
|
||||
full_data += '0\r\n\r\n'
|
||||
c.add_data(full_data)
|
||||
assert c.complete
|
||||
assert c.raw_data == 'A'*0xAF
|
||||
|
||||
c = http.ChunkedData()
|
||||
full_data = 'aF\r\n'
|
||||
full_data += 'A'*0xAF
|
||||
full_data += '\r\n'
|
||||
full_data += '0\r\n\r\n'
|
||||
c.add_data(full_data)
|
||||
assert c.complete
|
||||
assert c.raw_data == 'A'*0xAF
|
||||
|
||||
def test_chunked_leading_zeros():
|
||||
# Test leading zeros
|
||||
c = http.ChunkedData()
|
||||
full_data = '000000000000000aF\r\n'
|
||||
full_data += 'A'*0xAF
|
||||
full_data += '\r\n'
|
||||
full_data += '0\r\n\r\n'
|
||||
c.add_data(full_data)
|
||||
assert c.complete
|
||||
assert c.raw_data == 'A'*0xAF
|
||||
|
||||
def test_chunked_one_char_add():
|
||||
# Test adding one character at a time
|
||||
c = http.ChunkedData()
|
||||
full_data = 'af\r\n'
|
||||
full_data += 'A'*0xAF
|
||||
full_data += '\r\n'
|
||||
full_data += '0\r\n\r\n'
|
||||
for ch in full_data:
|
||||
c.add_data(ch)
|
||||
assert c.complete
|
||||
assert c.raw_data == 'A'*0xAF
|
||||
|
||||
def test_chunked_incomplete():
|
||||
# Tests that complete isn't true until the data is received
|
||||
full_data = 'af\r\n'
|
||||
full_data += 'A'*0xAF
|
||||
full_data += '\r\n'
|
||||
full_data += '0' # right now we're fine ending on 0 without \r\n
|
||||
for i in range(len(full_data)-1):
|
||||
c = http.ChunkedData()
|
||||
c.add_data(full_data[:i])
|
||||
assert not c.complete
|
||||
|
||||
# Test incomplete one character at a time
|
||||
full_data = 'af\r\n'
|
||||
full_data += 'A'*0xAF
|
||||
full_data += '\r\n'
|
||||
full_data += '0' # right now we're fine ending on 0 without \r\n
|
||||
for i in range(len(full_data)-1):
|
||||
c = http.ChunkedData()
|
||||
for ii in range(i):
|
||||
c.add_data(full_data[ii])
|
||||
assert not c.complete
|
||||
|
||||
def test_length_data_simple():
|
||||
# Basic test
|
||||
l = http.LengthData(100)
|
||||
assert not l.complete
|
||||
l.add_data('A'*100)
|
||||
assert l.complete
|
||||
assert l.raw_data == 'A'*100
|
||||
|
||||
l = http.LengthData(0)
|
||||
assert l.complete
|
||||
assert l.raw_data == ''
|
||||
|
||||
# Test incomplete
|
||||
l = http.LengthData(100)
|
||||
l.add_data('A'*99)
|
||||
assert not l.complete
|
||||
|
||||
def test_length_one_character():
|
||||
# Test adding one character at a time
|
||||
l = http.LengthData(100)
|
||||
for i in range(100):
|
||||
l.add_data('A')
|
||||
assert l.complete
|
||||
assert l.raw_data == 'A'*100
|
||||
|
||||
# Test adding one character at a time (incomplete)
|
||||
l = http.LengthData(100)
|
||||
for i in range(99):
|
||||
l.add_data('A')
|
||||
assert not l.complete
|
||||
|
||||
def test_length_overflow():
|
||||
# Test only saving the given number of chars
|
||||
l = http.LengthData(100)
|
||||
l.add_data('A'*400)
|
||||
assert l.complete
|
||||
assert l.raw_data == 'A'*100
|
||||
|
||||
# Test throwing an exception when adding data after complete
|
||||
l = http.LengthData(100)
|
||||
l.add_data('A'*100)
|
||||
with pytest.raises(http.DataAlreadyComplete):
|
||||
l.add_data('A')
|
||||
|
||||
def test_repeatable_dict_simple():
|
||||
d = http.RepeatableDict()
|
||||
assert not 'foo' in d
|
||||
d['foo'] = 'bar'
|
||||
assert 'foo' in d
|
||||
d['baz'] = 'fuzz'
|
||||
d.append('foo', 'fizz')
|
||||
assert d['foo'] == 'fizz'
|
||||
assert d['baz'] == 'fuzz'
|
||||
assert d.all_vals('foo') == ['bar', 'fizz']
|
||||
assert d.all_pairs() == [('foo', 'bar'),
|
||||
('baz', 'fuzz'),
|
||||
('foo', 'fizz')]
|
||||
assert not 'fee' in d
|
||||
d.add_pairs([('fee', 'fi'),
|
||||
('foo', 'fo')])
|
||||
assert 'fee' in d
|
||||
assert d['fee'] == 'fi'
|
||||
assert d['baz'] == 'fuzz'
|
||||
assert d['foo'] == 'fo'
|
||||
assert d.all_vals('foo') == ['bar', 'fizz', 'fo']
|
||||
assert d.all_pairs() == [('foo', 'bar'),
|
||||
('baz', 'fuzz'),
|
||||
('foo', 'fizz'),
|
||||
('fee', 'fi'),
|
||||
('foo', 'fo')]
|
||||
|
||||
def test_repeatable_dict_constructor():
|
||||
d = http.RepeatableDict([('foo','bar'),('baz','fuzz')])
|
||||
assert 'foo' in d
|
||||
assert d['foo'] == 'bar'
|
||||
assert d['baz'] == 'fuzz'
|
||||
assert d.all_vals('foo') == ['bar']
|
||||
assert d.all_pairs() == [('foo', 'bar'),
|
||||
('baz', 'fuzz')]
|
||||
|
||||
def test_repeatable_dict_case_insensitive():
|
||||
def test(d):
|
||||
assert 'foo' in d
|
||||
assert 'fOo' in d
|
||||
assert d['foo'] == 'fuzz'
|
||||
assert d['Foo'] == 'fuzz'
|
||||
assert d['FoO'] == 'fuzz'
|
||||
|
||||
assert d.all_vals('foo') == ['bar', 'fuzz']
|
||||
assert d.all_vals('Foo') == ['bar', 'fuzz']
|
||||
assert d.all_vals('FoO') == ['bar', 'fuzz']
|
||||
|
||||
assert d.all_pairs() == [('foo', 'bar'),
|
||||
('fOo', 'fuzz')]
|
||||
|
||||
d = http.RepeatableDict([('foo','bar'),('fOo','fuzz')], case_insensitive=True)
|
||||
test(d)
|
||||
|
||||
d = http.RepeatableDict(case_insensitive=True)
|
||||
d['foo'] = 'bar'
|
||||
d.append('fOo', 'fuzz')
|
||||
test(d)
|
||||
|
||||
d = http.RepeatableDict(case_insensitive=True)
|
||||
d.add_pairs([('foo','bar'),('fOo','fuzz')])
|
||||
test(d)
|
||||
|
||||
def test_repeatable_dict_overwrite():
|
||||
d = http.RepeatableDict([('foo','bar'),('foo','fuzz'),('bar','baz')])
|
||||
d['foo'] = 'asdf'
|
||||
assert d.all_vals('foo') == ['asdf']
|
||||
|
||||
def test_repeatable_dict_deletion():
|
||||
d = http.RepeatableDict([('foo','bar'),('fOo','fuzz'),('bar','baz')],
|
||||
case_insensitive=True)
|
||||
assert 'foo' in d
|
||||
del d['foo']
|
||||
assert not 'foo' in d
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
x = d['foo']
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
x = d['fOo']
|
||||
|
||||
assert d['bar'] == 'baz'
|
||||
assert d.all_vals('foo') == []
|
||||
|
||||
def test_repeatable_dict_callback():
|
||||
def f():
|
||||
raise TException()
|
||||
|
||||
r = http.RepeatableDict()
|
||||
r['a'] = 'b'
|
||||
r.add_pairs([('c', 'd')])
|
||||
r.update('a', 'c')
|
||||
|
||||
r.set_modify_callback(f)
|
||||
with pytest.raises(TException):
|
||||
r['a'] = 'b'
|
||||
with pytest.raises(TException):
|
||||
r.add_pairs([('c', 'd')])
|
||||
with pytest.raises(TException):
|
||||
r.update('a', 'c')
|
||||
|
||||
|
||||
####################
|
||||
## Cookies
|
||||
|
||||
def test_response_cookie_simple():
|
||||
s = 'ck=1234;'
|
||||
c = http.ResponseCookie(s)
|
||||
assert c.key == 'ck'
|
||||
assert c.val == '1234'
|
||||
assert not c.secure
|
||||
assert not c.http_only
|
||||
assert c.domain is None
|
||||
assert c.expires is None
|
||||
assert c.max_age is None
|
||||
assert c.path is None
|
||||
|
||||
def test_response_cookie_params():
|
||||
s = 'ck=1234; Expires=Wed, 09 Jun 2021 10:18:14 GMT; secure; httponly; path=/; max-age=12; domain=.foo.bar'
|
||||
c = http.ResponseCookie(s)
|
||||
assert c.key == 'ck'
|
||||
assert c.val == '1234'
|
||||
assert c.domain == '.foo.bar'
|
||||
assert c.expires == 'Wed, 09 Jun 2021 10:18:14 GMT'
|
||||
assert c.http_only
|
||||
assert c.max_age == 12
|
||||
assert c.path == '/'
|
||||
assert c.secure
|
||||
|
||||
def test_response_cookie_parsing():
|
||||
s = 'ck=1234=567;Expires=Wed, 09 Jun 2021 10:18:14 GMT;secure;httponly;path=/;max-age=12;domain=.foo.bar'
|
||||
c = http.ResponseCookie(s)
|
||||
assert c.key == 'ck'
|
||||
assert c.val == '1234=567'
|
||||
assert c.domain == '.foo.bar'
|
||||
assert c.expires == 'Wed, 09 Jun 2021 10:18:14 GMT'
|
||||
assert c.http_only
|
||||
assert c.max_age == 12
|
||||
assert c.path == '/'
|
||||
assert c.secure
|
||||
|
||||
def test_response_cookie_generate():
|
||||
pass
|
||||
|
||||
|
||||
####################
|
||||
## Request tests
|
||||
|
||||
def test_request_simple():
|
||||
header_lines = [
|
||||
'GET / HTTP/1.1',
|
||||
'Content-Type: text/xml; charset="utf-8"',
|
||||
'Accept-Encoding: gzip,deflate',
|
||||
'User-Agent: TestAgent',
|
||||
'Host: www.test.com',
|
||||
'Content-Length: 100',
|
||||
'Connection: Keep-Alive',
|
||||
'Cache-Control: no-cache',
|
||||
'',
|
||||
]
|
||||
headers = '\r\n'.join(header_lines)+'\r\n'
|
||||
data = 'A'*100
|
||||
rf, rl, ru, rj = req_by_lines_and_full(header_lines, data)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.fragment == None
|
||||
assert r.full_request == headers+data
|
||||
assert r.header_len == len(headers)
|
||||
assert r.headers_complete
|
||||
assert r.host == 'www.test.com'
|
||||
assert r.is_ssl == False
|
||||
assert r.path == '/'
|
||||
assert r.port == 80
|
||||
assert r.status_line == 'GET / HTTP/1.1'
|
||||
assert r.verb == 'GET'
|
||||
assert r.version == 'HTTP/1.1'
|
||||
assert r.headers['Content-Length'] == '100'
|
||||
assert r.headers['CoNtent-lENGTH'] == '100'
|
||||
assert r.headers['Content-Type'] == 'text/xml; charset="utf-8"'
|
||||
assert r.headers['Accept-Encoding'] == 'gzip,deflate'
|
||||
assert r.headers['User-Agent'] == 'TestAgent'
|
||||
assert r.headers['Host'] == 'www.test.com'
|
||||
assert r.headers['Connection'] == 'Keep-Alive'
|
||||
assert r.headers['Cache-Control'] == 'no-cache'
|
||||
assert r.raw_data == 'A'*100
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_request_urlparams():
|
||||
header_lines = [
|
||||
'GET /?p1=foo&p2=bar#frag HTTP/1.1',
|
||||
'Content-Length: 0',
|
||||
'',
|
||||
]
|
||||
rf, rl, ru, rj = req_by_lines_and_full(header_lines)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.fragment == 'frag'
|
||||
assert r.get_params['p1'] == 'foo'
|
||||
assert r.get_params['p2'] == 'bar'
|
||||
assert r.full_request == ('GET /?p1=foo&p2=bar#frag HTTP/1.1\r\n'
|
||||
'Content-Length: 0\r\n'
|
||||
'\r\n')
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_request_questionmark_url():
|
||||
header_lines = [
|
||||
'GET /path/??/to/?p1=foo&p2=bar#frag HTTP/1.1',
|
||||
'Content-Length: 0',
|
||||
'',
|
||||
]
|
||||
rf, rl, ru, rj = req_by_lines_and_full(header_lines)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.fragment == 'frag'
|
||||
assert r.get_params['?/to/?p1'] == 'foo'
|
||||
assert r.get_params['p2'] == 'bar'
|
||||
assert r.full_request == ('GET /path/??/to/?p1=foo&p2=bar#frag HTTP/1.1\r\n'
|
||||
'Content-Length: 0\r\n'
|
||||
'\r\n')
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_request_postparams():
|
||||
header_lines = [
|
||||
'GET / HTTP/1.1',
|
||||
'Content-Length: 9',
|
||||
'Content-Type: application/x-www-form-urlencoded',
|
||||
'',
|
||||
]
|
||||
data = 'a=b&c=dee'
|
||||
rf, rl, ru, rj = req_by_lines_and_full(header_lines, data)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.post_params['a'] == 'b'
|
||||
assert r.post_params['c'] == 'dee'
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_headers_end():
|
||||
header_lines = [
|
||||
'GET / HTTP/1.1',
|
||||
'Content-Type: text/xml; charset="utf-8"',
|
||||
'Accept-Encoding: gzip,deflate',
|
||||
'User-Agent: TestAgent',
|
||||
'Host: www.test.com',
|
||||
'Content-Length: 100',
|
||||
'Connection: Keep-Alive',
|
||||
'Cache-Control: no-cache',
|
||||
'',
|
||||
]
|
||||
r = http.Request()
|
||||
for l in header_lines:
|
||||
r.add_line(l)
|
||||
assert not r.complete
|
||||
assert r.headers_complete
|
||||
|
||||
def test_request_cookies():
|
||||
header_lines = [
|
||||
'GET /?p1=foo&p2=bar#frag HTTP/1.1',
|
||||
'Content-Length: 0',
|
||||
'Cookie: abc=WRONG; def=456; ghi=789; abc=123',
|
||||
'',
|
||||
]
|
||||
rf, rl, ru, rj = req_by_lines_and_full(header_lines)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.cookies['abc'] == '123'
|
||||
assert r.cookies['def'] == '456'
|
||||
assert r.cookies['ghi'] == '789'
|
||||
assert r.cookies.all_vals('abc') == ['WRONG', '123']
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_request_parse_host():
|
||||
header_lines = [
|
||||
'GET / HTTP/1.1',
|
||||
'Content-Length: 0',
|
||||
'Host: www.test.com:443',
|
||||
'',
|
||||
]
|
||||
rf, rl, ru, rj = req_by_lines_and_full(header_lines)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.host == 'www.test.com'
|
||||
assert r.is_ssl
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_request_newline_delim():
|
||||
r = http.Request(('GET / HTTP/1.1\n'
|
||||
'Test-Header: foo\r\n'
|
||||
'Other-header: bar\n\r\n'))
|
||||
assert r.full_request == ('GET / HTTP/1.1\r\n'
|
||||
'Test-Header: foo\r\n'
|
||||
'Other-header: bar\r\n\r\n')
|
||||
|
||||
def test_repeated_request_headers():
|
||||
header_lines = [
|
||||
'GET /?p1=foo&p2=bar#frag HTTP/1.1',
|
||||
'Content-Length: 0',
|
||||
'Test-Header: WRONG',
|
||||
'Test-Header: RIGHTiguess',
|
||||
'',
|
||||
]
|
||||
rf, rl, ru, rj = req_by_lines_and_full(header_lines)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.headers['test-header'] == 'RIGHTiguess'
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_request_update_statusline():
|
||||
r = http.Request()
|
||||
r.status_line = 'GET / HTTP/1.1'
|
||||
assert r.verb == 'GET'
|
||||
assert r.path == '/'
|
||||
assert r.version == 'HTTP/1.1'
|
||||
assert not r.complete
|
||||
|
||||
assert r.full_request == 'GET / HTTP/1.1\r\n\r\n'
|
||||
|
||||
def test_request_update_cookies():
|
||||
r = http.Request()
|
||||
r.status_line = 'GET / HTTP/1.1'
|
||||
|
||||
# Check new cookies
|
||||
r.cookies['foo'] = 'bar'
|
||||
r.cookies['baz'] = 'fuzz'
|
||||
assert r.full_request == ('GET / HTTP/1.1\r\n'
|
||||
'Cookie: foo=bar; baz=fuzz\r\n'
|
||||
'\r\n')
|
||||
|
||||
# Check updated cookies (should be updated in place)
|
||||
r.cookies['foo'] = 'buzz'
|
||||
assert r.full_request == ('GET / HTTP/1.1\r\n'
|
||||
'Cookie: foo=buzz; baz=fuzz\r\n'
|
||||
'\r\n')
|
||||
|
||||
# Check repeated cookies
|
||||
r.cookies.append('foo', 'bar')
|
||||
assert r.full_request == ('GET / HTTP/1.1\r\n'
|
||||
'Cookie: foo=buzz; baz=fuzz; foo=bar\r\n'
|
||||
'\r\n')
|
||||
|
||||
def test_request_update_headers():
|
||||
r = http.Request()
|
||||
r.status_line = 'GET / HTTP/1.1'
|
||||
r.headers['Content-Length'] = '0'
|
||||
r.headers['Test-Header'] = 'Test Value'
|
||||
r.headers['Other-Header'] = 'Other Value'
|
||||
r.headers['Host'] = 'www.test.com'
|
||||
r.headers.append('Test-Header', 'Test Value2')
|
||||
assert r.full_request == ('GET / HTTP/1.1\r\n'
|
||||
'Content-Length: 0\r\n'
|
||||
'Test-Header: Test Value\r\n'
|
||||
'Other-Header: Other Value\r\n'
|
||||
'Host: www.test.com\r\n'
|
||||
'Test-Header: Test Value2\r\n'
|
||||
'\r\n')
|
||||
assert r.host == 'www.test.com'
|
||||
|
||||
def test_request_modified_headers():
|
||||
r = http.Request()
|
||||
r.status_line = 'GET / HTTP/1.1'
|
||||
r.headers['content-length'] = '100'
|
||||
r.headers['cookie'] = 'abc=123'
|
||||
r.cookies['abc'] = '456'
|
||||
r.raw_data = 'AAAA'
|
||||
assert r.full_request == ('GET / HTTP/1.1\r\n'
|
||||
'content-length: 4\r\n'
|
||||
'cookie: abc=456\r\n\r\n'
|
||||
'AAAA')
|
||||
assert r.headers['content-length'] == '4'
|
||||
assert r.headers['cookie'] == 'abc=456'
|
||||
|
||||
def test_request_update_data():
|
||||
r = http.Request()
|
||||
r.status_line = 'GET / HTTP/1.1'
|
||||
r.headers['content-length'] = 500
|
||||
r.raw_data = 'AAAA'
|
||||
assert r.full_request == ('GET / HTTP/1.1\r\n'
|
||||
'content-length: 4\r\n'
|
||||
'\r\n'
|
||||
'AAAA')
|
||||
def test_request_to_json():
|
||||
r = http.Request()
|
||||
r.status_line = 'GET / HTTP/1.1'
|
||||
r.headers['content-length'] = 500
|
||||
r.raw_data = 'AAAA'
|
||||
r.reqid = 1
|
||||
|
||||
rsp = http.Response()
|
||||
rsp.status_line = 'HTTP/1.1 200 OK'
|
||||
rsp.rspid = 2
|
||||
|
||||
r.response = rsp
|
||||
|
||||
expected_reqdata = {'full_request': base64.b64encode(r.full_request),
|
||||
'response_id': rsp.rspid,
|
||||
#'tag': r.tag,
|
||||
'reqid': r.reqid,
|
||||
}
|
||||
|
||||
assert json.loads(r.to_json()) == expected_reqdata
|
||||
|
||||
def test_request_update_content_length():
|
||||
r = http.Request(('GET / HTTP/1.1\r\n'
|
||||
'Content-Length: 4\r\n\r\n'
|
||||
'AAAAAAAAAA'), update_content_length=True)
|
||||
|
||||
assert r.full_request == (('GET / HTTP/1.1\r\n'
|
||||
'Content-Length: 10\r\n\r\n'
|
||||
'AAAAAAAAAA'))
|
||||
|
||||
def test_request_blank_get_params():
|
||||
r = http.Request()
|
||||
r.add_line('GET /this/??-asdf/ HTTP/1.1')
|
||||
assert r.full_request == ('GET /this/??-asdf/ HTTP/1.1\r\n\r\n')
|
||||
|
||||
r = http.Request()
|
||||
r.add_line('GET /this/??-asdf/?a=b&c&d=ef HTTP/1.1')
|
||||
assert r.full_request == ('GET /this/??-asdf/?a=b&c&d=ef HTTP/1.1\r\n\r\n')
|
||||
assert r.get_params['?-asdf/?a'] == 'b'
|
||||
assert r.get_params['c'] == None
|
||||
assert r.get_params['d'] == 'ef'
|
||||
|
||||
|
||||
####################
|
||||
## Response tests
|
||||
|
||||
def test_response_simple():
|
||||
header_lines = [
|
||||
'HTTP/1.1 200 OK',
|
||||
'Date: Thu, 22 Oct 2015 00:37:17 GMT',
|
||||
'Cache-Control: private, max-age=0',
|
||||
'Content-Type: text/html; charset=UTF-8',
|
||||
'Server: gws',
|
||||
'Content-Length: 100',
|
||||
'',
|
||||
]
|
||||
data = 'A'*100
|
||||
header_len = len('\r\n'.join(header_lines)+'\r\n')
|
||||
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.header_len == header_len
|
||||
assert r.raw_data == data
|
||||
assert r.response_code == 200
|
||||
assert r.response_text == 'OK'
|
||||
assert r.status_line == 'HTTP/1.1 200 OK'
|
||||
assert r.version == 'HTTP/1.1'
|
||||
|
||||
assert r.headers['Date'] == 'Thu, 22 Oct 2015 00:37:17 GMT'
|
||||
assert r.headers['Cache-Control'] == 'private, max-age=0'
|
||||
assert r.headers['Content-Type'] == 'text/html; charset=UTF-8'
|
||||
assert r.headers['Server'] == 'gws'
|
||||
assert r.headers['Content-Length'] == '100'
|
||||
assert r.headers['CoNTEnT-leNGTH'] == '100'
|
||||
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_response_chunked():
|
||||
header_lines = [
|
||||
'HTTP/1.1 200 OK',
|
||||
'Date: Thu, 22 Oct 2015 00:37:17 GMT',
|
||||
'Cache-Control: private, max-age=0',
|
||||
'Content-Type: text/html; charset=UTF-8',
|
||||
'Server: gws',
|
||||
'Transfer-Encoding: chunked',
|
||||
'',
|
||||
]
|
||||
data = 'af\r\n'
|
||||
data += 'A'*0xAF + '\r\n'
|
||||
data += 'BF\r\n'
|
||||
data += 'B'*0xBF + '\r\n'
|
||||
data += '0\r\n\r\n'
|
||||
|
||||
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.raw_data == 'A'*0xAF + 'B'*0xBF
|
||||
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_response_gzip():
|
||||
data_decomp = 'Hello woru!'
|
||||
data_comp = gzip_string(data_decomp)
|
||||
|
||||
header_lines = [
|
||||
'HTTP/1.1 200 OK',
|
||||
'Date: Thu, 22 Oct 2015 00:37:17 GMT',
|
||||
'Cache-Control: private, max-age=0',
|
||||
'Content-Type: text/html; charset=UTF-8',
|
||||
'Server: gws',
|
||||
'Content-Encoding: gzip',
|
||||
'Content-Length: %d' % len(data_comp),
|
||||
'',
|
||||
]
|
||||
|
||||
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data_comp)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.raw_data == data_decomp
|
||||
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_response_deflate():
|
||||
data_decomp = 'Hello woru!'
|
||||
data_comp = deflate_string(data_decomp)
|
||||
|
||||
header_lines = [
|
||||
'HTTP/1.1 200 OK',
|
||||
'Date: Thu, 22 Oct 2015 00:37:17 GMT',
|
||||
'Cache-Control: private, max-age=0',
|
||||
'Content-Type: text/html; charset=UTF-8',
|
||||
'Server: gws',
|
||||
'Content-Encoding: deflate',
|
||||
'Content-Length: %d' % len(data_comp),
|
||||
'',
|
||||
]
|
||||
|
||||
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data_comp)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.raw_data == data_decomp
|
||||
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_response_chunked_gzip():
|
||||
data_decomp = 'Hello world!'
|
||||
data_comp = gzip_string(data_decomp)
|
||||
assert len(data_comp) > 3
|
||||
data_chunked = '3\r\n'
|
||||
data_chunked += data_comp[:3]
|
||||
data_chunked += '\r\n%x\r\n' % (len(data_comp[3:]))
|
||||
data_chunked += data_comp[3:]
|
||||
data_chunked += '\r\n0\r\n'
|
||||
|
||||
header_lines = [
|
||||
'HTTP/1.1 200 OK',
|
||||
'Date: Thu, 22 Oct 2015 00:37:17 GMT',
|
||||
'Cache-Control: private, max-age=0',
|
||||
'Content-Type: text/html; charset=UTF-8',
|
||||
'Server: gws',
|
||||
'Content-Encoding: gzip',
|
||||
'Transfer-Encoding: chunked',
|
||||
'',
|
||||
]
|
||||
|
||||
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines, data_chunked)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.raw_data == data_decomp
|
||||
assert r.headers['Content-Length'] == str(len(data_decomp))
|
||||
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
|
||||
'Date: Thu, 22 Oct 2015 00:37:17 GMT\r\n'
|
||||
'Cache-Control: private, max-age=0\r\n'
|
||||
'Content-Type: text/html; charset=UTF-8\r\n'
|
||||
'Server: gws\r\n'
|
||||
'Content-Length: %d\r\n\r\n'
|
||||
'%s') % (len(data_decomp), data_decomp)
|
||||
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_response_early_completion():
|
||||
r = http.Response()
|
||||
r.status_line = 'HTTP/1.1 200 OK'
|
||||
r.add_line('Content-Length: 0')
|
||||
assert not r.complete
|
||||
r.add_line('')
|
||||
assert r.complete
|
||||
|
||||
def test_response_cookies():
|
||||
header_lines = [
|
||||
'HTTP/1.1 200 OK',
|
||||
'Content-Length: 0',
|
||||
'Set-Cookie: ck=1234=567;Expires=Wed, 09 Jun 2021 10:18:14 GMT;secure;httponly;path=/;max-age=12;domain=.foo.bar',
|
||||
'Set-Cookie: abc=123',
|
||||
'Set-Cookie: def=456',
|
||||
'',
|
||||
]
|
||||
|
||||
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.cookies['ck'].key == 'ck'
|
||||
assert r.cookies['ck'].val == '1234=567'
|
||||
assert r.cookies['ck'].domain == '.foo.bar'
|
||||
assert r.cookies['ck'].expires == 'Wed, 09 Jun 2021 10:18:14 GMT'
|
||||
assert r.cookies['ck'].http_only
|
||||
assert r.cookies['ck'].max_age == 12
|
||||
assert r.cookies['ck'].path == '/'
|
||||
assert r.cookies['ck'].secure
|
||||
|
||||
assert r.cookies['abc'].val == '123'
|
||||
assert r.cookies['def'].val == '456'
|
||||
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_response_repeated_cookies():
|
||||
r = http.Response(('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: foo=bar\r\n'
|
||||
'Set-Cookie: baz=buzz\r\n'
|
||||
'Set-Cookie: foo=buzz\r\n'
|
||||
'\r\n'))
|
||||
expected_pairs = [('foo', 'bar'), ('baz', 'buzz'), ('foo', 'buzz')]
|
||||
check_response_cookies(expected_pairs, r)
|
||||
|
||||
def test_repeated_response_headers():
|
||||
# Repeated headers can be used for attacks, so ironically we have to handle
|
||||
# them well. We always use the last header as the correct one.
|
||||
header_lines = [
|
||||
'HTTP/1.1 200 OK',
|
||||
'Content-Length: 0',
|
||||
'Test-Head: WRONG',
|
||||
'Test-Head: RIGHTish',
|
||||
'',
|
||||
]
|
||||
|
||||
rf, rl, ru, rj = rsp_by_lines_and_full(header_lines)
|
||||
def test(r):
|
||||
assert r.complete
|
||||
assert r.headers['test-head'] == 'RIGHTish'
|
||||
|
||||
test(rf)
|
||||
test(rl)
|
||||
test(ru)
|
||||
test(rj)
|
||||
|
||||
def test_response_update_statusline():
|
||||
r = http.Response()
|
||||
r.status_line = 'HTTP/1.1 200 OK'
|
||||
assert r.version == 'HTTP/1.1'
|
||||
assert r.response_code == 200
|
||||
assert r.response_text == 'OK'
|
||||
assert not r.complete
|
||||
|
||||
assert r.full_response == 'HTTP/1.1 200 OK\r\n\r\n'
|
||||
|
||||
def test_response_update_headers():
|
||||
r = http.Response()
|
||||
r.status_line = 'HTTP/1.1 200 OK'
|
||||
r.headers['Test-Header'] = 'Test Value'
|
||||
r.headers['Other-Header'] = 'Other Value'
|
||||
|
||||
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
|
||||
'Test-Header: Test Value\r\n'
|
||||
'Other-Header: Other Value\r\n\r\n')
|
||||
|
||||
r.headers.append('Test-Header', 'Other Test Value')
|
||||
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
|
||||
'Test-Header: Test Value\r\n'
|
||||
'Other-Header: Other Value\r\n'
|
||||
'Test-Header: Other Test Value\r\n\r\n')
|
||||
|
||||
def test_response_update_modified_headers():
|
||||
r = http.Response()
|
||||
r.status_line = 'HTTP/1.1 200 OK'
|
||||
r.headers['content-length'] = '500'
|
||||
r.raw_data = 'AAAA'
|
||||
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
|
||||
'content-length: 4\r\n\r\n'
|
||||
'AAAA')
|
||||
assert r.headers['content-length'] == '4'
|
||||
|
||||
def test_response_update_cookies():
|
||||
r = http.Response()
|
||||
r.status_line = 'HTTP/1.1 200 OK'
|
||||
# Test by adding headers
|
||||
r.headers['Set-Cookie'] = 'abc=123'
|
||||
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: abc=123\r\n\r\n')
|
||||
assert r.cookies['abc'].val == '123'
|
||||
r.headers.append('Set-Cookie', 'abc=456')
|
||||
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: abc=123\r\n'
|
||||
'Set-Cookie: abc=456\r\n\r\n'
|
||||
)
|
||||
assert r.cookies['abc'].val == '456'
|
||||
|
||||
r = http.Response()
|
||||
r.status_line = 'HTTP/1.1 200 OK'
|
||||
# Test by adding cookie objects
|
||||
c = http.ResponseCookie('abc=123; secure')
|
||||
r.cookies['abc'] = c
|
||||
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: abc=123; secure\r\n\r\n')
|
||||
|
||||
def test_response_update_content_length():
|
||||
r = http.Response(('HTTP/1.1 200 OK\r\n'
|
||||
'Content-Length: 4\r\n\r\n'
|
||||
'AAAAAAAAAA'), update_content_length=True)
|
||||
|
||||
assert r.full_response == (('HTTP/1.1 200 OK\r\n'
|
||||
'Content-Length: 10\r\n\r\n'
|
||||
'AAAAAAAAAA'))
|
||||
|
||||
def test_response_to_json():
|
||||
rsp = http.Response()
|
||||
rsp.status_line = 'HTTP/1.1 200 OK'
|
||||
rsp.rspid = 2
|
||||
|
||||
expected_reqdata = {'full_response': base64.b64encode(rsp.full_response),
|
||||
'rspid': rsp.rspid,
|
||||
#'tag': r.tag,
|
||||
}
|
||||
|
||||
assert json.loads(rsp.to_json()) == expected_reqdata
|
||||
|
||||
def test_response_update_from_objects_cookies():
|
||||
r = http.Response(('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: foo=bar\r\n'
|
||||
'Set-Cookie: baz=buzz\r\n'
|
||||
'Header: out of fucking nowhere\r\n'
|
||||
'Set-Cookie: foo=buzz\r\n'
|
||||
'\r\n'))
|
||||
expected_pairs = [('foo', 'bar'), ('baz', 'buzz'), ('foo', 'buzz')]
|
||||
check_response_cookies(expected_pairs, r)
|
||||
|
||||
new_pairs = [('foo', http.ResponseCookie('foo=banana')),
|
||||
('baz', http.ResponseCookie('baz=buzz')),
|
||||
('scooby', http.ResponseCookie('scooby=doo')),
|
||||
('foo', http.ResponseCookie('foo=boo'))]
|
||||
r.cookies.clear()
|
||||
r.cookies.add_pairs(new_pairs)
|
||||
|
||||
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
|
||||
'Header: out of fucking nowhere\r\n'
|
||||
'Set-Cookie: foo=banana\r\n'
|
||||
'Set-Cookie: baz=buzz\r\n'
|
||||
'Set-Cookie: scooby=doo\r\n'
|
||||
'Set-Cookie: foo=boo\r\n'
|
||||
'\r\n')
|
||||
expected_pairs = [('foo', 'banana'), ('baz', 'buzz'), ('scooby', 'doo'), ('foo', 'boo')]
|
||||
check_response_cookies(expected_pairs, r)
|
||||
|
||||
def test_response_update_from_objects_cookies_replace():
|
||||
r = http.Response(('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: foo=bar\r\n'
|
||||
'Set-Cookie: baz=buzz\r\n'
|
||||
'Header: out of fucking nowhere\r\n'
|
||||
'Set-Cookie: foo=buzz\r\n'
|
||||
'\r\n'))
|
||||
expected_pairs = [('foo', 'bar'), ('baz', 'buzz'), ('foo', 'buzz')]
|
||||
check_response_cookies(expected_pairs, r)
|
||||
|
||||
|
||||
r.cookies['foo'] = http.ResponseCookie('foo=banana')
|
||||
|
||||
assert r.full_response == ('HTTP/1.1 200 OK\r\n'
|
||||
'Set-Cookie: foo=banana\r\n'
|
||||
'Set-Cookie: baz=buzz\r\n'
|
||||
'Header: out of fucking nowhere\r\n'
|
||||
'\r\n')
|
36
pappy-proxy/tests/test_proxy.py
Normal file
36
pappy-proxy/tests/test_proxy.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
import pytest
|
||||
|
||||
from proxy import ProxyClient, ProxyClientFactory, ProxyServer
|
||||
from testutil import mock_deferred
|
||||
from twisted.internet.protocol import ServerFactory
|
||||
from twisted.test import proto_helpers
|
||||
from twisted.internet import defer
|
||||
|
||||
####################
|
||||
## Fixtures
|
||||
|
||||
@pytest.fixture
|
||||
def proxyserver():
|
||||
factory = ServerFactory()
|
||||
factory.protocol = ProxyServer
|
||||
protocol = factory.buildProtocol(('127.0.0.1', 0))
|
||||
transport = proto_helpers.StringTransport()
|
||||
protocol.makeConnection(transport)
|
||||
return (protocol, transport)
|
||||
|
||||
####################
|
||||
## Basic tests
|
||||
|
||||
def test_proxy_server_fixture(proxyserver):
|
||||
prot = proxyserver[0]
|
||||
tr = proxyserver[1]
|
||||
prot.transport.write('hello')
|
||||
print tr.value()
|
||||
assert tr.value() == 'hello'
|
||||
|
||||
@pytest.inlineCallbacks
|
||||
def test_mock_deferreds(mock_deferred):
|
||||
d = mock_deferred('Hello!')
|
||||
r = yield d
|
||||
assert r == 'Hello!'
|
||||
|
15
pappy-proxy/tests/testutil.py
Normal file
15
pappy-proxy/tests/testutil.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
import pytest
|
||||
from twisted.internet import defer
|
||||
|
||||
@pytest.fixture
|
||||
def mock_deferred():
|
||||
# Generates a function that can be used to make a deferred that can be used
|
||||
# to mock out deferred-returning responses
|
||||
def f(value):
|
||||
def g(data):
|
||||
return value
|
||||
d = defer.Deferred()
|
||||
d.addCallback(g)
|
||||
d.callback(None)
|
||||
return d
|
||||
return f
|
3
pappy-proxy/util.py
Normal file
3
pappy-proxy/util.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
class PappyException(Exception):
|
||||
pass
|
1
pappy-proxy/vim_repeater/.#repeater.vim
Symbolic link
1
pappy-proxy/vim_repeater/.#repeater.vim
Symbolic link
|
@ -0,0 +1 @@
|
|||
glew@localhost.787:1446907770
|
0
pappy-proxy/vim_repeater/__init__.py
Normal file
0
pappy-proxy/vim_repeater/__init__.py
Normal file
116
pappy-proxy/vim_repeater/repeater.py
Normal file
116
pappy-proxy/vim_repeater/repeater.py
Normal file
|
@ -0,0 +1,116 @@
|
|||
import base64
|
||||
import vim
|
||||
import sys
|
||||
import socket
|
||||
import json
|
||||
|
||||
class CommError(Exception):
|
||||
pass
|
||||
|
||||
def communicate(data):
|
||||
global PAPPY_PORT
|
||||
# Submits data to the comm port of the proxy
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect(('127.0.0.1', int(vim.eval('s:commport'))))
|
||||
datastr = json.dumps(data)
|
||||
|
||||
# Send our data
|
||||
total_sent = 0
|
||||
while total_sent < len(data):
|
||||
sent = s.send(datastr[total_sent:])
|
||||
assert sent != 0
|
||||
total_sent += sent
|
||||
s.send('\n')
|
||||
|
||||
# Get our response
|
||||
retstr = ''
|
||||
c = ''
|
||||
while c != '\n':
|
||||
retstr = retstr + c
|
||||
c = s.recv(1)
|
||||
assert c != ''
|
||||
result = json.loads(retstr)
|
||||
if not result['success']:
|
||||
vim.command('echoerr %s' % result['message'])
|
||||
raise CommError(result['message'])
|
||||
return result
|
||||
|
||||
def read_line(conn):
|
||||
data = ''
|
||||
c = ''
|
||||
while c != '\n':
|
||||
data = data + c
|
||||
c = conn.read(1)
|
||||
return data
|
||||
|
||||
def run_command(command):
|
||||
funcs = {
|
||||
"setup": set_up_windows,
|
||||
"submit": submit_current_buffer,
|
||||
}
|
||||
if command in funcs:
|
||||
funcs[command]()
|
||||
|
||||
def set_buffer_content(buf, text):
|
||||
buf[:] = None
|
||||
first = True
|
||||
for l in text.split('\n'):
|
||||
if first:
|
||||
buf[0] = l
|
||||
first = False
|
||||
else:
|
||||
buf.append(l)
|
||||
|
||||
def set_up_windows():
|
||||
reqid = vim.eval("a:2")
|
||||
comm_port = vim.eval("a:3")
|
||||
vim.command("let s:commport=%d"%int(comm_port))
|
||||
# Get the left buffer
|
||||
vim.command("new")
|
||||
vim.command("only")
|
||||
b2 = vim.current.buffer
|
||||
vim.command("let s:b2=bufnr('$')")
|
||||
|
||||
# Vsplit new file
|
||||
vim.command("vnew")
|
||||
b1 = vim.current.buffer
|
||||
vim.command("let s:b1=bufnr('$')")
|
||||
|
||||
# Get the request
|
||||
comm_data = {"action": "get_request", "reqid": reqid}
|
||||
try:
|
||||
reqdata = communicate(comm_data)
|
||||
except CommError:
|
||||
return
|
||||
|
||||
comm_data = {"action": "get_response", "reqid": reqid}
|
||||
try:
|
||||
rspdata = communicate(comm_data)
|
||||
except CommError:
|
||||
return
|
||||
|
||||
# Set up the buffers
|
||||
set_buffer_content(b1, base64.b64decode(reqdata['full_request']))
|
||||
set_buffer_content(b2, base64.b64decode(rspdata['full_response']))
|
||||
|
||||
def submit_current_buffer():
|
||||
curbuf = vim.current.buffer
|
||||
b2_id = vim.eval("s:b2")
|
||||
b2 = vim.buffers[int(b2_id)]
|
||||
vim.command("let s:b1=bufnr('$')")
|
||||
vim.command("only")
|
||||
vim.command("rightbelow vertical new")
|
||||
vim.command("b %s" % b2_id)
|
||||
vim.command("wincmd h")
|
||||
|
||||
full_request = '\n'.join(curbuf)
|
||||
commdata = {'action': 'submit',
|
||||
'full_request': base64.b64encode(full_request)}
|
||||
result = communicate(commdata)
|
||||
set_buffer_content(b2, base64.b64decode(result['response']['full_response']))
|
||||
|
||||
# (left, right) = set_up_windows()
|
||||
# set_buffer_content(left, 'Hello\nWorld')
|
||||
# set_buffer_content(right, 'Hello\nOther\nWorld')
|
||||
#print "Arg is %s" % vim.eval("a:arg")
|
||||
run_command(vim.eval("a:1"))
|
17
pappy-proxy/vim_repeater/repeater.vim
Normal file
17
pappy-proxy/vim_repeater/repeater.vim
Normal file
|
@ -0,0 +1,17 @@
|
|||
if !has('python')
|
||||
echo "Vim must support python in order to use the repeater"
|
||||
finish
|
||||
endif
|
||||
|
||||
let s:pyscript = resolve(expand('<sfile>:p:h') . '/repeater.py')
|
||||
|
||||
function! RepeaterAction(...)
|
||||
execute 'pyfile ' . s:pyscript
|
||||
endfunc
|
||||
|
||||
command! -nargs=* RepeaterSetup call RepeaterAction('setup', <f-args>)
|
||||
command! RepeaterSubmitBuffer call RepeaterAction('submit')
|
||||
|
||||
" Bind forward to <leader>f
|
||||
nnoremap <leader>f :RepeaterSubmitBuffer<CR>
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue