Merge branch 'crypt'
This commit is contained in:
commit
a6f64f8ebc
20 changed files with 699 additions and 1109 deletions
14
README.md
14
README.md
|
@ -174,6 +174,8 @@ Configuration for each project is done in the `config.json` file. The file is a
|
||||||
| `debug_dir` (optional) | Where connection debug info should be stored. If not present, debug info is not saved to a file. |
|
| `debug_dir` (optional) | Where connection debug info should be stored. If not present, debug info is not saved to a file. |
|
||||||
| `cert_dir` | Where the CA cert and the private key for the CA cert are stored |
|
| `cert_dir` | Where the CA cert and the private key for the CA cert are stored |
|
||||||
| `proxy_listeners` | A list of dicts which describe which ports the proxy will listen on. Each item is a dict with "port" and "interface" values which determine which port and interface to listen on. For example, if port=8000 and the interface is 127.0.0.1, the proxy will only accept connections from localhost on port 8000. To accept connections from anywhere, set the interface to 0.0.0.0. |
|
| `proxy_listeners` | A list of dicts which describe which ports the proxy will listen on. Each item is a dict with "port" and "interface" values which determine which port and interface to listen on. For example, if port=8000 and the interface is 127.0.0.1, the proxy will only accept connections from localhost on port 8000. To accept connections from anywhere, set the interface to 0.0.0.0. |
|
||||||
|
| `socks_proxy` | A dictionary with details on how to connect to an upstream SOCKS proxy to send all in-scope requests through. See the secion on upstream SOCKS proxies for more information. |
|
||||||
|
| `http_proxy` | A dictionary with details on how to connect to an upstream http proxy to send all in-scope requests through. See the section on upstream http proxies for more information. |
|
||||||
|
|
||||||
The following tokens will also be replaced with values:
|
The following tokens will also be replaced with values:
|
||||||
|
|
||||||
|
@ -379,6 +381,11 @@ Some arguments can take multiple IDs for an argument. To pass multiple IDs to a
|
||||||
* `viq 1,2,u3` View information about requests 1, 2, and the unmangled version of 3
|
* `viq 1,2,u3` View information about requests 1, 2, and the unmangled version of 3
|
||||||
* `gma foo 4,5,6` Generate a macro with definitions for requests 4, 5, and 6
|
* `gma foo 4,5,6` Generate a macro with definitions for requests 4, 5, and 6
|
||||||
|
|
||||||
|
In addition, you can pass in a wildcard to include all in context requests.
|
||||||
|
|
||||||
|
* `viq *` View information about all in-context requests
|
||||||
|
* `dump_response *` Dump the responses of all in-context requests (will overwrite duplicates)
|
||||||
|
|
||||||
Context
|
Context
|
||||||
-------
|
-------
|
||||||
The context is a set of filters that define which requests are considered "active". Only requests in the current context are displayed with `ls`. By default, the context includes every single request that passes through the proxy. You can limit down the current context by applying filters. Filters apply rules such as "the response code must equal 500" or "the host must contain google.com". Once you apply one or more filters, only requests/responses which pass every active filter will be a part of the current context.
|
The context is a set of filters that define which requests are considered "active". Only requests in the current context are displayed with `ls`. By default, the context includes every single request that passes through the proxy. You can limit down the current context by applying filters. Filters apply rules such as "the response code must equal 500" or "the host must contain google.com". Once you apply one or more filters, only requests/responses which pass every active filter will be a part of the current context.
|
||||||
|
@ -1123,6 +1130,13 @@ Changelog
|
||||||
---------
|
---------
|
||||||
The boring part of the readme
|
The boring part of the readme
|
||||||
|
|
||||||
|
* 0.2.10
|
||||||
|
* Add wildcard support for requests that can take in multiple request ids
|
||||||
|
* Update dump_response to dump multiple requests at the same time
|
||||||
|
* More autocompleters (macro commands, field for filters)
|
||||||
|
* Add non-async function to get in-context request IDs. Now macros can scan over all in-context stuff and do things with them.
|
||||||
|
* Improve sessions to be used to maintain state with macros
|
||||||
|
* Bugfixes
|
||||||
* 0.2.9
|
* 0.2.9
|
||||||
* Fix bugs/clean up some code
|
* Fix bugs/clean up some code
|
||||||
* 0.2.8
|
* 0.2.8
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__version__ = '0.2.9'
|
__version__ = '0.2.10'
|
||||||
|
|
|
@ -130,14 +130,11 @@ class PappyConfig(object):
|
||||||
|
|
||||||
:Default: False
|
:Default: False
|
||||||
|
|
||||||
.. data: salt_file
|
.. data: salt_len
|
||||||
|
|
||||||
Clear-text file containing the salt generated for key derivation. A new salt
|
Length of the nonce-salt value appended to the end of `crypt_file`
|
||||||
will be generated each time the project is encrypted. After successfully
|
|
||||||
decrypting the project file (``project.crypt``), the salt file (``project.salt``)
|
|
||||||
will be deleted.
|
|
||||||
|
|
||||||
:Default: ``project.salt``
|
:Default: 16
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -167,11 +164,11 @@ class PappyConfig(object):
|
||||||
self.global_config_dict = {}
|
self.global_config_dict = {}
|
||||||
|
|
||||||
self.archive = 'project.archive'
|
self.archive = 'project.archive'
|
||||||
|
self.debug = False
|
||||||
self.crypt_dir = 'crypt'
|
self.crypt_dir = 'crypt'
|
||||||
self.crypt_file = 'project.crypt'
|
self.crypt_file = 'project.crypt'
|
||||||
self.crypt_session = False
|
self.crypt_session = False
|
||||||
self.crypt_success = False
|
self.salt_len = 16
|
||||||
self.salt_file = 'project.salt'
|
|
||||||
|
|
||||||
def get_default_config(self):
|
def get_default_config(self):
|
||||||
default_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
default_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
||||||
|
@ -185,8 +182,6 @@ class PappyConfig(object):
|
||||||
pp = os.getcwd() + os.sep
|
pp = os.getcwd() + os.sep
|
||||||
project_files = [pp+f for f in file_glob if os.path.isfile(pp+f)]
|
project_files = [pp+f for f in file_glob if os.path.isfile(pp+f)]
|
||||||
|
|
||||||
if self.salt_file in project_files:
|
|
||||||
project_files.remove(self.salt_file)
|
|
||||||
if self.crypt_file in project_files:
|
if self.crypt_file in project_files:
|
||||||
project_files.remove(self.crypt_file)
|
project_files.remove(self.crypt_file)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import crochet
|
import crochet
|
||||||
|
import getpass
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
import pappyproxy
|
import pappyproxy
|
||||||
|
@ -31,30 +32,36 @@ class Crypto(object):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Leave the crypto working directory
|
# Leave the crypto working directory
|
||||||
os.chdir('../')
|
if self.config.crypt_dir in os.getcwd():
|
||||||
|
os.chdir('../')
|
||||||
|
|
||||||
self.compressor.compress_project()
|
self.compressor.compress_project()
|
||||||
|
|
||||||
|
# Get the password and salt, then derive the key
|
||||||
|
self.crypto_ramp_up()
|
||||||
|
|
||||||
# Create project and crypto archive
|
# Create project and crypto archive
|
||||||
archive_file = open(self.archive, 'rb')
|
archive_file = open(self.archive, 'rb')
|
||||||
archive_crypt = open(self.config.crypt_file, 'wb')
|
archive_crypt = open(self.config.crypt_file, 'wb')
|
||||||
|
|
||||||
# Get the password and salt, then derive the key
|
try:
|
||||||
self.crypto_ramp_up()
|
# Encrypt the archive read as a bytestring
|
||||||
|
fern = Fernet(self.key)
|
||||||
# Encrypt the archive read as a bytestring
|
crypt_token = fern.encrypt(archive_file.read())
|
||||||
fern = Fernet(self.key)
|
archive_crypt.write(crypt_token)
|
||||||
crypt_token = fern.encrypt(archive_file.read())
|
except InvalidToken as e:
|
||||||
archive_crypt.write(crypt_token)
|
raise PappyException("Error encrypting project: ", e)
|
||||||
|
return False
|
||||||
# Store the salt for the next decryption
|
|
||||||
self.create_salt_file()
|
|
||||||
|
|
||||||
archive_file.close()
|
archive_file.close()
|
||||||
archive_crypt.close()
|
archive_crypt.close()
|
||||||
|
|
||||||
|
# Store the salt for the next decryption
|
||||||
|
self.create_salt_file()
|
||||||
|
|
||||||
# Delete clear-text files
|
# Delete clear-text files
|
||||||
self.delete_clear_files()
|
self.delete_clear_files()
|
||||||
|
return True
|
||||||
|
|
||||||
def decrypt_project(self):
|
def decrypt_project(self):
|
||||||
"""
|
"""
|
||||||
|
@ -63,7 +70,11 @@ class Crypto(object):
|
||||||
|
|
||||||
# Decrypt and decompress the project if crypt_file exists
|
# Decrypt and decompress the project if crypt_file exists
|
||||||
if os.path.isfile(self.config.crypt_file):
|
if os.path.isfile(self.config.crypt_file):
|
||||||
archive_crypt = open(self.config.crypt_file, 'rb').read()
|
cf = self.config.crypt_file
|
||||||
|
sl = self.config.salt_len
|
||||||
|
crl = os.path.getsize(cf) - sl
|
||||||
|
|
||||||
|
archive_crypt = open(cf, 'rb').read(crl)
|
||||||
archive_file = open(self.config.archive, 'wb')
|
archive_file = open(self.config.archive, 'wb')
|
||||||
|
|
||||||
retries = 3
|
retries = 3
|
||||||
|
@ -73,18 +84,16 @@ class Crypto(object):
|
||||||
fern = Fernet(self.key)
|
fern = Fernet(self.key)
|
||||||
archive = fern.decrypt(archive_crypt)
|
archive = fern.decrypt(archive_crypt)
|
||||||
break
|
break
|
||||||
except InvalidToken:
|
except InvalidToken as e:
|
||||||
print "Invalid password"
|
print "Invalid decryption: ", e
|
||||||
retries -= 1
|
retries -= 1
|
||||||
# Quit pappy if user doesn't retry
|
# Quit pappy if user doesn't retry
|
||||||
# or if all retries exhuasted
|
# or if all retries exhuasted
|
||||||
if not self.confirm_password_retry() or retries <= 0:
|
if not self.confirm_password_retry() or retries <= 0:
|
||||||
self.config.crypt_success = False
|
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
self.password = None
|
self.password = None
|
||||||
self.key = None
|
self.key = None
|
||||||
self.salt = None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
archive_file.write(archive)
|
archive_file.write(archive)
|
||||||
|
@ -92,11 +101,7 @@ class Crypto(object):
|
||||||
|
|
||||||
self.compressor.decompress_project()
|
self.compressor.decompress_project()
|
||||||
|
|
||||||
# Force generation of new salt and crypt archive
|
|
||||||
self.delete_crypt_files()
|
|
||||||
|
|
||||||
os.chdir(self.config.crypt_dir)
|
os.chdir(self.config.crypt_dir)
|
||||||
self.config.crypt_success = True
|
|
||||||
return True
|
return True
|
||||||
# If project exited before encrypting the working directory
|
# If project exited before encrypting the working directory
|
||||||
# change to the working directory to resume the session
|
# change to the working directory to resume the session
|
||||||
|
@ -116,7 +121,7 @@ class Crypto(object):
|
||||||
|
|
||||||
|
|
||||||
def confirm_password_retry(self):
|
def confirm_password_retry(self):
|
||||||
answer = raw_input("Re-enter your password? (y/n)").strip()
|
answer = raw_input("Re-enter your password? (y/n): ").strip()
|
||||||
if answer[0] == "y" or answer[0] == "Y":
|
if answer[0] == "y" or answer[0] == "Y":
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
@ -125,7 +130,8 @@ class Crypto(object):
|
||||||
def crypto_ramp_up(self):
|
def crypto_ramp_up(self):
|
||||||
if not self.password:
|
if not self.password:
|
||||||
self.get_password()
|
self.get_password()
|
||||||
self.set_salt()
|
if not self.salt:
|
||||||
|
self.set_salt()
|
||||||
self.derive_key()
|
self.derive_key()
|
||||||
|
|
||||||
def get_password(self):
|
def get_password(self):
|
||||||
|
@ -135,27 +141,42 @@ class Crypto(object):
|
||||||
"""
|
"""
|
||||||
encoded_passwd = ""
|
encoded_passwd = ""
|
||||||
try:
|
try:
|
||||||
passwd = raw_input("Enter a password: ").strip()
|
passwd = getpass.getpass("Enter a password: ").strip()
|
||||||
self.password = passwd.encode("utf-8")
|
self.password = passwd.encode("utf-8")
|
||||||
except:
|
except:
|
||||||
raise PappyException("Invalid password, try again")
|
raise PappyException("Invalid password, try again")
|
||||||
|
|
||||||
def set_salt(self):
|
def set_salt(self):
|
||||||
if os.path.isfile(self.config.salt_file):
|
if self.config.crypt_dir in os.getcwd():
|
||||||
|
os.chdir('../')
|
||||||
|
self.set_salt_from_file()
|
||||||
|
os.chdir(self.config.crypt_dir)
|
||||||
|
elif os.path.isfile(self.config.crypt_file):
|
||||||
self.set_salt_from_file()
|
self.set_salt_from_file()
|
||||||
else:
|
else:
|
||||||
self.salt = os.urandom(16)
|
self.salt = os.urandom(16)
|
||||||
|
|
||||||
def set_salt_from_file(self):
|
def set_salt_from_file(self):
|
||||||
try:
|
try:
|
||||||
salt_file = open(self.config.salt_file, 'rb')
|
# Seek to `salt_len` bytes before the EOF
|
||||||
self.salt = salt_file.readline().strip()
|
# then read `salt_len` bytes to retrieve the salt
|
||||||
|
|
||||||
|
# WARNING: must open `crypt_file` in `rb` mode
|
||||||
|
# or `salt_file.seek()` will result in undefined
|
||||||
|
# behavior.
|
||||||
|
salt_file = open(self.config.crypt_file, 'rb')
|
||||||
|
sl = self.config.salt_len
|
||||||
|
# Negate the salt length to seek to the
|
||||||
|
# correct position in the buffer
|
||||||
|
salt_file.seek(-sl, 2)
|
||||||
|
self.salt = salt_file.read(sl)
|
||||||
|
salt_file.close()
|
||||||
except:
|
except:
|
||||||
raise PappyException("Unable to read project.salt")
|
cf = self.config.crypt_file
|
||||||
|
raise PappyException("Unable to read %s" % cf)
|
||||||
|
|
||||||
def create_salt_file(self):
|
def create_salt_file(self):
|
||||||
salt_file = open(self.config.salt_file, 'wb')
|
salt_file = open(self.config.crypt_file, 'a')
|
||||||
|
|
||||||
salt_file.write(self.salt)
|
salt_file.write(self.salt)
|
||||||
salt_file.close()
|
salt_file.close()
|
||||||
|
|
||||||
|
|
|
@ -127,7 +127,7 @@ def repeatable_parse_qs(s):
|
||||||
@crochet.wait_for(timeout=180.0)
|
@crochet.wait_for(timeout=180.0)
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def request_by_id(reqid):
|
def request_by_id(reqid):
|
||||||
req = Request.load_request(str(reqid))
|
req = yield Request.load_request(str(reqid))
|
||||||
defer.returnValue(req)
|
defer.returnValue(req)
|
||||||
|
|
||||||
##########
|
##########
|
||||||
|
@ -2133,7 +2133,9 @@ class Request(HTTPMessage):
|
||||||
def submit_request(request,
|
def submit_request(request,
|
||||||
save_request=False,
|
save_request=False,
|
||||||
intercepting_macros={},
|
intercepting_macros={},
|
||||||
stream_transport=None):
|
stream_transport=None,
|
||||||
|
_factory_string_transport=False,
|
||||||
|
_conn_info=None):
|
||||||
"""
|
"""
|
||||||
submit_request(request, save_request=False, intercepting_macros={}, stream_transport=None)
|
submit_request(request, save_request=False, intercepting_macros={}, stream_transport=None)
|
||||||
|
|
||||||
|
@ -2152,6 +2154,9 @@ class Request(HTTPMessage):
|
||||||
|
|
||||||
from .proxy import ProxyClientFactory, get_next_connection_id, get_endpoint
|
from .proxy import ProxyClientFactory, get_next_connection_id, get_endpoint
|
||||||
from .pappy import session
|
from .pappy import session
|
||||||
|
from .tests.testutil import TLSStringTransport
|
||||||
|
|
||||||
|
# _factory__string_transport, _conn_classes are only for unit tests. Do not use.
|
||||||
|
|
||||||
factory = None
|
factory = None
|
||||||
if stream_transport is None:
|
if stream_transport is None:
|
||||||
|
@ -2164,6 +2169,16 @@ class Request(HTTPMessage):
|
||||||
save_all=save_request,
|
save_all=save_request,
|
||||||
stream_response=True,
|
stream_response=True,
|
||||||
return_transport=stream_transport)
|
return_transport=stream_transport)
|
||||||
|
|
||||||
|
# Set up stuff for unit test if needed
|
||||||
|
if _factory_string_transport:
|
||||||
|
factory._use_string_transport = True
|
||||||
|
if _conn_info is not None:
|
||||||
|
# Pass factory back to unit test
|
||||||
|
_conn_info['factory'] = factory
|
||||||
|
factory._conn_info = _conn_info
|
||||||
|
|
||||||
|
# Set up factory settings
|
||||||
factory.intercepting_macros = intercepting_macros
|
factory.intercepting_macros = intercepting_macros
|
||||||
factory.connection_id = get_next_connection_id()
|
factory.connection_id = get_next_connection_id()
|
||||||
factory.connect()
|
factory.connect()
|
||||||
|
|
|
@ -26,6 +26,7 @@ from . import context
|
||||||
from . import crypto
|
from . import crypto
|
||||||
from . import http
|
from . import http
|
||||||
from .console import ProxyCmd
|
from .console import ProxyCmd
|
||||||
|
from .util import PappyException
|
||||||
from twisted.enterprise import adbapi
|
from twisted.enterprise import adbapi
|
||||||
from twisted.internet import reactor, defer
|
from twisted.internet import reactor, defer
|
||||||
from twisted.internet.error import CannotListenError
|
from twisted.internet.error import CannotListenError
|
||||||
|
@ -71,12 +72,13 @@ class PappySession(object):
|
||||||
from . import proxy, plugin
|
from . import proxy, plugin
|
||||||
|
|
||||||
if self.config.crypt_session:
|
if self.config.crypt_session:
|
||||||
self.decrypt()
|
if self.decrypt():
|
||||||
|
|
||||||
if self.config.crypt_success:
|
|
||||||
self.config.load_from_file('./config.json')
|
self.config.load_from_file('./config.json')
|
||||||
self.config.global_load_from_file()
|
self.config.global_load_from_file()
|
||||||
self.delete_data_on_quit = False
|
self.delete_data_on_quit = False
|
||||||
|
else:
|
||||||
|
self.complete_defer.callback(None)
|
||||||
|
return
|
||||||
|
|
||||||
# If the data file doesn't exist, create it with restricted permissions
|
# If the data file doesn't exist, create it with restricted permissions
|
||||||
if not os.path.isfile(self.config.datafile):
|
if not os.path.isfile(self.config.datafile):
|
||||||
|
@ -150,19 +152,19 @@ class PappySession(object):
|
||||||
self.complete_defer = deferToThread(self.cons.cmdloop)
|
self.complete_defer = deferToThread(self.cons.cmdloop)
|
||||||
self.complete_defer.addCallback(self.cleanup)
|
self.complete_defer.addCallback(self.cleanup)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def encrypt(self):
|
def encrypt(self):
|
||||||
yield self.crypto.encrypt_project()
|
if self.crypto.encrypt_project():
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def decrypt(self):
|
def decrypt(self):
|
||||||
# Attempt to decrypt project archive
|
# Attempt to decrypt project archive
|
||||||
if self.crypto.decrypt_project():
|
if self.crypto.decrypt_project():
|
||||||
yield True
|
return True
|
||||||
# Quit pappy on failure
|
# Quit pappy on failure
|
||||||
else:
|
else:
|
||||||
reactor.stop()
|
return False
|
||||||
defer.returnValue(None)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def cleanup(self, ignored=None):
|
def cleanup(self, ignored=None):
|
||||||
|
@ -182,6 +184,7 @@ def parse_args():
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='An intercepting proxy for testing web applications.')
|
parser = argparse.ArgumentParser(description='An intercepting proxy for testing web applications.')
|
||||||
parser.add_argument('-l', '--lite', help='Run the proxy in "lite" mode', action='store_true')
|
parser.add_argument('-l', '--lite', help='Run the proxy in "lite" mode', action='store_true')
|
||||||
|
parser.add_argument('-d', '--debug', help='Run the proxy in "debug" mode', action='store_true')
|
||||||
try:
|
try:
|
||||||
hlpmsg = ''.join(['Start pappy in "crypto" mode,',
|
hlpmsg = ''.join(['Start pappy in "crypto" mode,',
|
||||||
'must supply a name for the encrypted',
|
'must supply a name for the encrypted',
|
||||||
|
@ -206,6 +209,10 @@ def parse_args():
|
||||||
else:
|
else:
|
||||||
settings['crypt'] = None
|
settings['crypt'] = None
|
||||||
|
|
||||||
|
if args.debug:
|
||||||
|
settings['debug'] = True
|
||||||
|
else:
|
||||||
|
settings['debug'] = False
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
def set_text_factory(conn):
|
def set_text_factory(conn):
|
||||||
|
@ -252,6 +259,9 @@ def main():
|
||||||
pappy_config.global_load_from_file()
|
pappy_config.global_load_from_file()
|
||||||
session.delete_data_on_quit = False
|
session.delete_data_on_quit = False
|
||||||
|
|
||||||
|
if settings['debug']:
|
||||||
|
pappy_config.debug = True
|
||||||
|
|
||||||
yield session.start()
|
yield session.start()
|
||||||
|
|
||||||
session.complete_defer.addCallback(lambda ignored: reactor.stop())
|
session.complete_defer.addCallback(lambda ignored: reactor.stop())
|
||||||
|
|
|
@ -10,7 +10,9 @@ import imp
|
||||||
import os
|
import os
|
||||||
import pappyproxy
|
import pappyproxy
|
||||||
import stat
|
import stat
|
||||||
|
import crochet
|
||||||
|
|
||||||
|
from twisted.internet import defer
|
||||||
from .proxy import add_intercepting_macro as proxy_add_intercepting_macro
|
from .proxy import add_intercepting_macro as proxy_add_intercepting_macro
|
||||||
from .proxy import remove_intercepting_macro as proxy_remove_intercepting_macro
|
from .proxy import remove_intercepting_macro as proxy_remove_intercepting_macro
|
||||||
from .colors import Colors
|
from .colors import Colors
|
||||||
|
@ -146,7 +148,7 @@ def req_history(num=-1, ids=None, include_unmangled=False):
|
||||||
"""
|
"""
|
||||||
return pappyproxy.Request.cache.req_it(num=num, ids=ids, include_unmangled=include_unmangled)
|
return pappyproxy.Request.cache.req_it(num=num, ids=ids, include_unmangled=include_unmangled)
|
||||||
|
|
||||||
def main_context_ids(n=-1):
|
def async_main_context_ids(n=-1):
|
||||||
"""
|
"""
|
||||||
Returns a deferred that resolves into a list of up to ``n`` of the
|
Returns a deferred that resolves into a list of up to ``n`` of the
|
||||||
most recent requests in the main context. You can then use
|
most recent requests in the main context. You can then use
|
||||||
|
@ -156,6 +158,17 @@ def main_context_ids(n=-1):
|
||||||
"""
|
"""
|
||||||
return pappyproxy.pappy.main_context.get_reqs(n)
|
return pappyproxy.pappy.main_context.get_reqs(n)
|
||||||
|
|
||||||
|
@crochet.wait_for(timeout=None)
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def main_context_ids(*args, **kwargs):
|
||||||
|
"""
|
||||||
|
Same as :func:`pappyproxy.plugin.async_main_context_ids` but can be called
|
||||||
|
from macros and other non-async only functions. Cannot be called in async
|
||||||
|
functions.
|
||||||
|
"""
|
||||||
|
ret = yield async_main_context_ids(*args, **kwargs)
|
||||||
|
defer.returnValue(ret)
|
||||||
|
|
||||||
def run_cmd(cmd):
|
def run_cmd(cmd):
|
||||||
"""
|
"""
|
||||||
Run a command as if you typed it into the console. Try and use
|
Run a command as if you typed it into the console. Try and use
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import crochet
|
import crochet
|
||||||
import pappyproxy
|
import pappyproxy
|
||||||
|
|
||||||
from pappyproxy.util import PappyException, confirm
|
from pappyproxy.util import PappyException, confirm, autocomplete_startswith
|
||||||
from pappyproxy.http import Request
|
from pappyproxy.http import Request
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
@ -40,6 +40,11 @@ class BuiltinFilters(object):
|
||||||
return pappyproxy.context.Filter(BuiltinFilters._filters[name][1])
|
return pappyproxy.context.Filter(BuiltinFilters._filters[name][1])
|
||||||
|
|
||||||
|
|
||||||
|
def complete_filtercmd(text, line, begidx, endidx):
|
||||||
|
strs = [k for k, v in pappyproxy.context.Filter._filter_functions.iteritems()]
|
||||||
|
strs += [k for k, v in pappyproxy.context.Filter._async_filter_functions.iteritems()]
|
||||||
|
return autocomplete_startswith(text, strs)
|
||||||
|
|
||||||
@crochet.wait_for(timeout=None)
|
@crochet.wait_for(timeout=None)
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def filtercmd(line):
|
def filtercmd(line):
|
||||||
|
@ -179,7 +184,7 @@ def load_cmds(cmd):
|
||||||
'filter_clear': (filter_clear, None),
|
'filter_clear': (filter_clear, None),
|
||||||
'filter_up': (filter_up, None),
|
'filter_up': (filter_up, None),
|
||||||
'builtin_filter': (builtin_filter, complete_builtin_filter),
|
'builtin_filter': (builtin_filter, complete_builtin_filter),
|
||||||
'filter': (filtercmd, None),
|
'filter': (filtercmd, complete_filtercmd),
|
||||||
})
|
})
|
||||||
cmd.add_aliases([
|
cmd.add_aliases([
|
||||||
#('filter_prune', ''),
|
#('filter_prune', ''),
|
||||||
|
|
|
@ -4,7 +4,7 @@ import shlex
|
||||||
|
|
||||||
from pappyproxy.plugin import active_intercepting_macros, add_intercepting_macro, remove_intercepting_macro
|
from pappyproxy.plugin import active_intercepting_macros, add_intercepting_macro, remove_intercepting_macro
|
||||||
from pappyproxy.macros import load_macros, macro_from_requests, gen_imacro
|
from pappyproxy.macros import load_macros, macro_from_requests, gen_imacro
|
||||||
from pappyproxy.util import PappyException, load_reqlist
|
from pappyproxy.util import PappyException, load_reqlist, autocomplete_startswith
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
loaded_macros = []
|
loaded_macros = []
|
||||||
|
@ -65,6 +65,11 @@ def load_macros_cmd(line):
|
||||||
loaded_int_macros.append(macro)
|
loaded_int_macros.append(macro)
|
||||||
print 'Loaded "%s"' % macro
|
print 'Loaded "%s"' % macro
|
||||||
|
|
||||||
|
def complete_run_macro(text, line, begidx, endidx):
|
||||||
|
global macro_dict
|
||||||
|
strs = [k for k,v in macro_dict.iteritems()]
|
||||||
|
return autocomplete_startswith(text, strs)
|
||||||
|
|
||||||
def run_macro(line):
|
def run_macro(line):
|
||||||
"""
|
"""
|
||||||
Run a macro
|
Run a macro
|
||||||
|
@ -81,6 +86,24 @@ def run_macro(line):
|
||||||
macro = macro_dict[mname]
|
macro = macro_dict[mname]
|
||||||
macro.execute(args[1:])
|
macro.execute(args[1:])
|
||||||
|
|
||||||
|
def complete_run_int_macro(text, line, begidx, endidx):
|
||||||
|
global int_macro_dict
|
||||||
|
global loaded_int_macros
|
||||||
|
running = []
|
||||||
|
not_running = []
|
||||||
|
for macro in loaded_int_macros:
|
||||||
|
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
|
||||||
|
running.append(macro)
|
||||||
|
else:
|
||||||
|
not_running.append(macro)
|
||||||
|
strs = []
|
||||||
|
for m in not_running:
|
||||||
|
strs.append(macro.name)
|
||||||
|
strs.append(macro.file_name)
|
||||||
|
if macro.short_name:
|
||||||
|
strs.append(macro.short_name)
|
||||||
|
return autocomplete_startswith(text, strs)
|
||||||
|
|
||||||
def run_int_macro(line):
|
def run_int_macro(line):
|
||||||
"""
|
"""
|
||||||
Activate an intercepting macro
|
Activate an intercepting macro
|
||||||
|
@ -103,6 +126,24 @@ def run_int_macro(line):
|
||||||
print 'Error initializing macro:'
|
print 'Error initializing macro:'
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
def complete_stop_int_macro(text, line, begidx, endidx):
|
||||||
|
global int_macro_dict
|
||||||
|
global loaded_int_macros
|
||||||
|
running = []
|
||||||
|
not_running = []
|
||||||
|
for macro in loaded_int_macros:
|
||||||
|
if macro.name in [m.name for k, m in active_intercepting_macros().iteritems()]:
|
||||||
|
running.append(macro)
|
||||||
|
else:
|
||||||
|
not_running.append(macro)
|
||||||
|
strs = []
|
||||||
|
for m in running:
|
||||||
|
strs.append(macro.name)
|
||||||
|
strs.append(macro.file_name)
|
||||||
|
if macro.short_name:
|
||||||
|
strs.append(macro.short_name)
|
||||||
|
return autocomplete_startswith(text, strs)
|
||||||
|
|
||||||
def stop_int_macro(line):
|
def stop_int_macro(line):
|
||||||
"""
|
"""
|
||||||
Stop a running intercepting macro
|
Stop a running intercepting macro
|
||||||
|
@ -201,9 +242,9 @@ def load_cmds(cmd):
|
||||||
'generate_int_macro': (generate_int_macro, None),
|
'generate_int_macro': (generate_int_macro, None),
|
||||||
'generate_macro': (generate_macro, None),
|
'generate_macro': (generate_macro, None),
|
||||||
'list_int_macros': (list_int_macros, None),
|
'list_int_macros': (list_int_macros, None),
|
||||||
'stop_int_macro': (stop_int_macro, None),
|
'stop_int_macro': (stop_int_macro, complete_stop_int_macro),
|
||||||
'run_int_macro': (run_int_macro, None),
|
'run_int_macro': (run_int_macro, complete_run_int_macro),
|
||||||
'run_macro': (run_macro, None),
|
'run_macro': (run_macro, complete_run_macro),
|
||||||
'load_macros': (load_macros_cmd, None),
|
'load_macros': (load_macros_cmd, None),
|
||||||
})
|
})
|
||||||
cmd.add_aliases([
|
cmd.add_aliases([
|
||||||
|
|
|
@ -187,6 +187,10 @@ def run_without_color(line):
|
||||||
session.cons.onecmd(line.strip())
|
session.cons.onecmd(line.strip())
|
||||||
print remove_color(output.val)
|
print remove_color(output.val)
|
||||||
|
|
||||||
|
def version(line):
|
||||||
|
import pappyproxy
|
||||||
|
print pappyproxy.__version__
|
||||||
|
|
||||||
def load_cmds(cmd):
|
def load_cmds(cmd):
|
||||||
cmd.set_cmds({
|
cmd.set_cmds({
|
||||||
'clrmem': (clrmem, None),
|
'clrmem': (clrmem, None),
|
||||||
|
@ -197,6 +201,7 @@ def load_cmds(cmd):
|
||||||
'merge': (merge_datafile, None),
|
'merge': (merge_datafile, None),
|
||||||
'nocolor': (run_without_color, None),
|
'nocolor': (run_without_color, None),
|
||||||
'watch': (watch_proxy, None),
|
'watch': (watch_proxy, None),
|
||||||
|
'version': (version, None),
|
||||||
})
|
})
|
||||||
cmd.add_aliases([
|
cmd.add_aliases([
|
||||||
#('rpy', ''),
|
#('rpy', ''),
|
||||||
|
|
|
@ -2,7 +2,7 @@ import crochet
|
||||||
import pappyproxy
|
import pappyproxy
|
||||||
import shlex
|
import shlex
|
||||||
|
|
||||||
from pappyproxy.plugin import main_context_ids
|
from pappyproxy.plugin import async_main_context_ids
|
||||||
from pappyproxy.util import PappyException, load_reqlist
|
from pappyproxy.util import PappyException, load_reqlist
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from pappyproxy.http import Request
|
from pappyproxy.http import Request
|
||||||
|
@ -26,7 +26,7 @@ def tag(line):
|
||||||
print 'Tagging %s with %s' % (', '.join(reqids), tag)
|
print 'Tagging %s with %s' % (', '.join(reqids), tag)
|
||||||
else:
|
else:
|
||||||
print "Tagging all in-context requests with %s" % tag
|
print "Tagging all in-context requests with %s" % tag
|
||||||
reqids = yield main_context_ids()
|
reqids = yield async_main_context_ids()
|
||||||
|
|
||||||
for reqid in reqids:
|
for reqid in reqids:
|
||||||
req = yield Request.load_request(reqid)
|
req = yield Request.load_request(reqid)
|
||||||
|
@ -58,7 +58,7 @@ def untag(line):
|
||||||
print 'Removing tag %s from %s' % (tag, ', '.join(reqids))
|
print 'Removing tag %s from %s' % (tag, ', '.join(reqids))
|
||||||
else:
|
else:
|
||||||
print "Removing tag %s from all in-context requests" % tag
|
print "Removing tag %s from all in-context requests" % tag
|
||||||
reqids = yield main_context_ids()
|
reqids = yield async_main_context_ids()
|
||||||
|
|
||||||
for reqid in reqids:
|
for reqid in reqids:
|
||||||
req = yield Request.load_request(reqid)
|
req = yield Request.load_request(reqid)
|
||||||
|
|
|
@ -10,7 +10,7 @@ import urllib
|
||||||
from pappyproxy.util import PappyException, utc2local, load_reqlist, print_table, print_request_rows, get_req_data_row
|
from pappyproxy.util import PappyException, utc2local, load_reqlist, print_table, print_request_rows, get_req_data_row
|
||||||
from pappyproxy.http import Request, repeatable_parse_qs
|
from pappyproxy.http import Request, repeatable_parse_qs
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from pappyproxy.plugin import main_context_ids
|
from pappyproxy.plugin import async_main_context_ids
|
||||||
from pappyproxy.colors import Colors, Styles, verb_color, scode_color, path_formatter, host_color
|
from pappyproxy.colors import Colors, Styles, verb_color, scode_color, path_formatter, host_color
|
||||||
from pygments.formatters import TerminalFormatter
|
from pygments.formatters import TerminalFormatter
|
||||||
from pygments.lexers.data import JsonLexer
|
from pygments.lexers.data import JsonLexer
|
||||||
|
@ -255,7 +255,7 @@ def list_reqs(line):
|
||||||
print_count = 25
|
print_count = 25
|
||||||
|
|
||||||
rows = []
|
rows = []
|
||||||
ids = yield main_context_ids(print_count)
|
ids = yield async_main_context_ids(print_count)
|
||||||
for i in ids:
|
for i in ids:
|
||||||
req = yield Request.load_request(i)
|
req = yield Request.load_request(i)
|
||||||
rows.append(get_req_data_row(req))
|
rows.append(get_req_data_row(req))
|
||||||
|
@ -477,7 +477,7 @@ def get_param_info(line):
|
||||||
|
|
||||||
found_params = {}
|
found_params = {}
|
||||||
|
|
||||||
ids = yield main_context_ids()
|
ids = yield async_main_context_ids()
|
||||||
for i in ids:
|
for i in ids:
|
||||||
req = yield Request.load_request(i)
|
req = yield Request.load_request(i)
|
||||||
for k, v in req.url_params.all_pairs():
|
for k, v in req.url_params.all_pairs():
|
||||||
|
@ -501,17 +501,20 @@ def dump_response(line):
|
||||||
"""
|
"""
|
||||||
# dump the data of a response
|
# dump the data of a response
|
||||||
args = shlex.split(line)
|
args = shlex.split(line)
|
||||||
reqid = args[0]
|
reqs = yield load_reqlist(args[0])
|
||||||
req = yield Request.load_request(reqid)
|
for req in reqs:
|
||||||
rsp = req.response
|
if req.response:
|
||||||
if len(args) >= 2:
|
rsp = req.response
|
||||||
fname = args[1]
|
if len(args) >= 2:
|
||||||
else:
|
fname = args[1]
|
||||||
fname = req.path.split('/')[-1]
|
else:
|
||||||
|
fname = req.path.split('/')[-1]
|
||||||
|
|
||||||
with open(fname, 'w') as f:
|
with open(fname, 'w') as f:
|
||||||
f.write(rsp.body)
|
f.write(rsp.body)
|
||||||
print 'Response data written to %s' % fname
|
print 'Response data written to %s' % fname
|
||||||
|
else:
|
||||||
|
print 'Request %s does not have a response' % req.reqid
|
||||||
|
|
||||||
@crochet.wait_for(timeout=None)
|
@crochet.wait_for(timeout=None)
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -525,7 +528,7 @@ def site_map(line):
|
||||||
paths = True
|
paths = True
|
||||||
else:
|
else:
|
||||||
paths = False
|
paths = False
|
||||||
ids = yield main_context_ids()
|
ids = yield async_main_context_ids()
|
||||||
paths_set = set()
|
paths_set = set()
|
||||||
for reqid in ids:
|
for reqid in ids:
|
||||||
req = yield Request.load_request(reqid)
|
req = yield Request.load_request(reqid)
|
||||||
|
|
|
@ -262,13 +262,17 @@ class ProxyClientFactory(ClientFactory):
|
||||||
self.intercepting_macros = {}
|
self.intercepting_macros = {}
|
||||||
self.use_as_proxy = False
|
self.use_as_proxy = False
|
||||||
self.sendback_function = None
|
self.sendback_function = None
|
||||||
|
self.dropped_request = False
|
||||||
|
|
||||||
|
# Only used for unit tests. Do not use.
|
||||||
|
self._use_string_transport = False
|
||||||
|
self._conn_info = None
|
||||||
|
|
||||||
def log(self, message, symbol='*', verbosity_level=1):
|
def log(self, message, symbol='*', verbosity_level=1):
|
||||||
log(message, id=self.connection_id, symbol=symbol, verbosity_level=verbosity_level)
|
log(message, id=self.connection_id, symbol=symbol, verbosity_level=verbosity_level)
|
||||||
|
|
||||||
def buildProtocol(self, addr, _do_callback=True):
|
def buildProtocol(self, addr):
|
||||||
from pappyproxy.pappy import session
|
from pappyproxy.pappy import session
|
||||||
# _do_callback is intended to help with testing and should not be modified
|
|
||||||
if self.use_as_proxy and context.in_scope(self.request):
|
if self.use_as_proxy and context.in_scope(self.request):
|
||||||
p = UpstreamHTTPProxyClient(self.request)
|
p = UpstreamHTTPProxyClient(self.request)
|
||||||
if 'username' in session.config.http_proxy and 'password' in session.config.http_proxy:
|
if 'username' in session.config.http_proxy and 'password' in session.config.http_proxy:
|
||||||
|
@ -279,8 +283,7 @@ class ProxyClientFactory(ClientFactory):
|
||||||
p = ProxyClient(self.request)
|
p = ProxyClient(self.request)
|
||||||
p.factory = self
|
p.factory = self
|
||||||
self.log("Building protocol", verbosity_level=3)
|
self.log("Building protocol", verbosity_level=3)
|
||||||
if _do_callback:
|
p.data_defer.addCallback(self.return_request_pair)
|
||||||
p.data_defer.addCallback(self.return_request_pair)
|
|
||||||
return p
|
return p
|
||||||
|
|
||||||
def clientConnectionFailed(self, connector, reason):
|
def clientConnectionFailed(self, connector, reason):
|
||||||
|
@ -310,17 +313,24 @@ class ProxyClientFactory(ClientFactory):
|
||||||
else:
|
else:
|
||||||
yield self.request.async_deep_save()
|
yield self.request.async_deep_save()
|
||||||
|
|
||||||
(sendreq, mangled) = yield macros.mangle_request(sendreq, mangle_macros)
|
(mangreq, mangled) = yield macros.mangle_request(sendreq, mangle_macros)
|
||||||
|
if mangreq is None:
|
||||||
|
self.log("Request dropped. Closing connections.")
|
||||||
|
self.request.tags.add('dropped')
|
||||||
|
self.request.response = None
|
||||||
|
self.dropped_request = True
|
||||||
|
defer.returnValue(None)
|
||||||
|
else:
|
||||||
|
sendreq = mangreq
|
||||||
|
if sendreq and mangled and self.save_all:
|
||||||
|
self.start_time = datetime.datetime.utcnow()
|
||||||
|
sendreq.time_start = self.start_time
|
||||||
|
yield sendreq.async_deep_save()
|
||||||
|
|
||||||
if sendreq and mangled and self.save_all:
|
if session.config.http_proxy:
|
||||||
self.start_time = datetime.datetime.utcnow()
|
self.use_as_proxy = True
|
||||||
sendreq.time_start = self.start_time
|
if (not self.stream_response) and self.sendback_function:
|
||||||
yield sendreq.async_deep_save()
|
self.data_defer.addCallback(self.sendback_function)
|
||||||
|
|
||||||
if session.config.http_proxy:
|
|
||||||
self.use_as_proxy = True
|
|
||||||
if (not self.stream_response) and self.sendback_function:
|
|
||||||
self.data_defer.addCallback(self.sendback_function)
|
|
||||||
else:
|
else:
|
||||||
self.log("Request out of scope, passing along unmangled")
|
self.log("Request out of scope, passing along unmangled")
|
||||||
self.request = sendreq
|
self.request = sendreq
|
||||||
|
@ -337,7 +347,7 @@ class ProxyClientFactory(ClientFactory):
|
||||||
from pappyproxy.pappy import session
|
from pappyproxy.pappy import session
|
||||||
|
|
||||||
self.end_time = datetime.datetime.utcnow()
|
self.end_time = datetime.datetime.utcnow()
|
||||||
if session.config.debug_to_file or session.config.debug_verbosity > 0:
|
if session.config.debug_to_file or session.config.debug_verbosity > 0 and request.response:
|
||||||
log_request(printable_data(request.response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
|
log_request(printable_data(request.response.full_response), id=self.connection_id, symbol='<m', verbosity_level=3)
|
||||||
|
|
||||||
request.time_start = self.start_time
|
request.time_start = self.start_time
|
||||||
|
@ -351,14 +361,15 @@ class ProxyClientFactory(ClientFactory):
|
||||||
else:
|
else:
|
||||||
yield request.async_deep_save()
|
yield request.async_deep_save()
|
||||||
|
|
||||||
mangled = yield macros.mangle_response(request, mangle_macros)
|
if request.response:
|
||||||
|
mangled = yield macros.mangle_response(request, mangle_macros)
|
||||||
|
|
||||||
if mangled and self.save_all:
|
if mangled and self.save_all:
|
||||||
yield request.async_deep_save()
|
yield request.async_deep_save()
|
||||||
|
|
||||||
if request.response and (session.config.debug_to_file or session.config.debug_verbosity > 0):
|
if request.response and (session.config.debug_to_file or session.config.debug_verbosity > 0):
|
||||||
log_request(printable_data(request.response.full_response),
|
log_request(printable_data(request.response.full_response),
|
||||||
id=self.connection_id, symbol='<', verbosity_level=3)
|
id=self.connection_id, symbol='<', verbosity_level=3)
|
||||||
else:
|
else:
|
||||||
self.log("Response out of scope, passing along unmangled")
|
self.log("Response out of scope, passing along unmangled")
|
||||||
self.data_defer.callback(request)
|
self.data_defer.callback(request)
|
||||||
|
@ -369,6 +380,10 @@ class ProxyClientFactory(ClientFactory):
|
||||||
from pappyproxy.pappy import session
|
from pappyproxy.pappy import session
|
||||||
|
|
||||||
yield self.prepare_request()
|
yield self.prepare_request()
|
||||||
|
if self.dropped_request:
|
||||||
|
self.data_defer.callback(self.request)
|
||||||
|
defer.returnValue(None)
|
||||||
|
|
||||||
if context.in_scope(self.request):
|
if context.in_scope(self.request):
|
||||||
# Get connection using config
|
# Get connection using config
|
||||||
endpoint = get_endpoint(self.request.host,
|
endpoint = get_endpoint(self.request.host,
|
||||||
|
@ -380,12 +395,26 @@ class ProxyClientFactory(ClientFactory):
|
||||||
# Just forward it normally
|
# Just forward it normally
|
||||||
endpoint = get_endpoint(self.request.host,
|
endpoint = get_endpoint(self.request.host,
|
||||||
self.request.port,
|
self.request.port,
|
||||||
self.request.is_ssl)
|
self.request.is_ssl,
|
||||||
|
socks_config=None,
|
||||||
|
use_http_proxy=False)
|
||||||
|
|
||||||
# Connect via the endpoint
|
if self._use_string_transport:
|
||||||
self.log("Accessing using endpoint")
|
from pappyproxy.tests.testutil import TLSStringTransport
|
||||||
yield endpoint.connect(self)
|
# "Connect" via string transport
|
||||||
self.log("Connected")
|
protocol = self.buildProtocol(('127.0.0.1', 0))
|
||||||
|
|
||||||
|
# Pass the protocol back to the test
|
||||||
|
if self._conn_info:
|
||||||
|
self._conn_info['protocol'] = protocol
|
||||||
|
|
||||||
|
tr = TLSStringTransport()
|
||||||
|
protocol.makeConnection(tr)
|
||||||
|
else:
|
||||||
|
# Connect via the endpoint
|
||||||
|
self.log("Accessing using endpoint")
|
||||||
|
yield endpoint.connect(self)
|
||||||
|
self.log("Connected")
|
||||||
|
|
||||||
class ProxyServerFactory(ServerFactory):
|
class ProxyServerFactory(ServerFactory):
|
||||||
|
|
||||||
|
@ -529,6 +558,14 @@ class ProxyServer(LineReceiver):
|
||||||
def send_response_back(self, request):
|
def send_response_back(self, request):
|
||||||
if request.response is not None:
|
if request.response is not None:
|
||||||
self.transport.write(request.response.full_response)
|
self.transport.write(request.response.full_response)
|
||||||
|
else:
|
||||||
|
droppedrsp = http.Response(('HTTP/1.1 200 OK\r\n'
|
||||||
|
'Connection: close\r\n'
|
||||||
|
'Cache-control: no-cache\r\n'
|
||||||
|
'Pragma: no-cache\r\n'
|
||||||
|
'Cache-control: no-store\r\n'
|
||||||
|
'X-Frame-Options: DENY\r\n\r\n'))
|
||||||
|
self.transport.write(droppedrsp.full_message)
|
||||||
self.log("Response sent back, losing connection")
|
self.log("Response sent back, losing connection")
|
||||||
self.transport.loseConnection()
|
self.transport.loseConnection()
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,24 @@
|
||||||
from .http import ResponseCookie
|
from .http import ResponseCookie
|
||||||
|
|
||||||
class Session(object):
|
class Session(object):
|
||||||
|
"""
|
||||||
|
A class used to maintain a session over multiple requests. Can remember cookies
|
||||||
|
and apply a specific header to requests. It is also possible to give the session
|
||||||
|
a list of cookie names and it will only save those cookies.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, cookie_names=None, header_names=None,
|
def __init__(self, cookie_names=None, header_names=None,
|
||||||
cookie_vals=None, header_vals=None):
|
cookie_vals=None, header_vals=None):
|
||||||
|
"""
|
||||||
|
Session(self, cookie_names=None, header_names=None, cookie_vals=None, header_vals=None)
|
||||||
|
Constructor
|
||||||
|
|
||||||
|
:param cookie_names: A whitelist for cookies that should be saved from :func:`~pappyproxy.session.Session.save_req` and :func:`~pappyproxy.session.Session.save_rsp` in the session. If no values are given, all cookies will be saved.
|
||||||
|
:param header_names: A whitelist for headers that should be saved from :func:`~pappyproxy.session.Session.save_req` in the session. If no values are given, no headers will be saved.
|
||||||
|
:param cookie_vals: A dictionary of cookies to populate the session session with. The key should be the cookie name, and the value can be either a string or a :class:`~pappyproxy.http.ResponseCookie`. If a :class:`~pappyproxy.http.ResponseCookie` is given, its flags will be used in :func:`~pappyproxy.session.Session.apply_rsp`.
|
||||||
|
:param header_vals: A dictionary of header values to populate the session with. The key should be the header name and the value should be a string which should be the header value.
|
||||||
|
"""
|
||||||
|
|
||||||
self.cookies = cookie_names or []
|
self.cookies = cookie_names or []
|
||||||
self.headers = header_names or []
|
self.headers = header_names or []
|
||||||
self.cookie_vals = cookie_vals or {}
|
self.cookie_vals = cookie_vals or {}
|
||||||
|
@ -19,25 +34,61 @@ class Session(object):
|
||||||
if k not in self.headers:
|
if k not in self.headers:
|
||||||
self.headers.append(k)
|
self.headers.append(k)
|
||||||
|
|
||||||
|
def _cookie_obj(k, v):
|
||||||
|
"""
|
||||||
|
Returns the value as a cookie object regardless of if the cookie is a string or a ResponseCookie.
|
||||||
|
"""
|
||||||
|
if isinstance(v, ResponseCookie):
|
||||||
|
return v
|
||||||
|
else:
|
||||||
|
cookie_str = '%s=%s' % (k, v)
|
||||||
|
return ResponseCookie(cookie_str)
|
||||||
|
|
||||||
|
def _cookie_val(v):
|
||||||
|
"""
|
||||||
|
Returns the value of the cookie regardless of if the value is a string or a ResponseCookie
|
||||||
|
"""
|
||||||
|
if isinstance(v, ResponseCookie):
|
||||||
|
return v.val
|
||||||
|
else:
|
||||||
|
return v
|
||||||
|
|
||||||
def apply_req(self, req):
|
def apply_req(self, req):
|
||||||
|
"""
|
||||||
|
apply_req(request)
|
||||||
|
|
||||||
|
Apply saved headers and cookies to the request
|
||||||
|
"""
|
||||||
|
|
||||||
for k, v in self.cookie_vals.iteritems():
|
for k, v in self.cookie_vals.iteritems():
|
||||||
if isinstance(v, ResponseCookie):
|
req.cookies[k] = self._cookie_val(v)
|
||||||
req.cookies[v.key] = v.val
|
|
||||||
else:
|
|
||||||
req.cookies[k] = v
|
|
||||||
for k, v in self.header_vals.iteritems():
|
for k, v in self.header_vals.iteritems():
|
||||||
req.headers[k] = v
|
req.headers[k] = v
|
||||||
|
|
||||||
def apply_rsp(self, rsp):
|
def apply_rsp(self, rsp):
|
||||||
|
"""
|
||||||
|
apply_rsp(response)
|
||||||
|
|
||||||
|
Will add a Set-Cookie header for each saved cookie. Will not
|
||||||
|
apply any saved headers. If the cookie was added from a call to
|
||||||
|
:func:`~pappyproxy.session.Session.save_rsp`, the Set-Cookie flags
|
||||||
|
will be the same as the original response.
|
||||||
|
"""
|
||||||
|
|
||||||
for k, v in self.cookie_vals.iteritems():
|
for k, v in self.cookie_vals.iteritems():
|
||||||
if isinstance(v, ResponseCookie):
|
val = self._cookie_obj(v)
|
||||||
rsp.set_cookie(v)
|
rsp.set_cookie(val)
|
||||||
else:
|
|
||||||
cookie_str = '%s=%s' % (k, v)
|
|
||||||
rsp.set_cookie(ResponseCookie(cookie_str))
|
|
||||||
# Don't apply headers to responses
|
# Don't apply headers to responses
|
||||||
|
|
||||||
def get_req(self, req, cookies=None, headers=None):
|
def save_req(self, req, cookies=None, headers=None):
|
||||||
|
"""
|
||||||
|
save_req(req, cookies=None, headers=None)
|
||||||
|
|
||||||
|
Updates the state of the session from the given request.
|
||||||
|
Cookie and headers can be added to their whitelists by passing in a list
|
||||||
|
for either ``cookies`` or ``headers``.
|
||||||
|
"""
|
||||||
|
|
||||||
if cookies:
|
if cookies:
|
||||||
for c in cookies:
|
for c in cookies:
|
||||||
if c not in self.cookies:
|
if c not in self.cookies:
|
||||||
|
@ -64,7 +115,14 @@ class Session(object):
|
||||||
if header in self.headers:
|
if header in self.headers:
|
||||||
self.header_vals[header] = req.headers[header]
|
self.header_vals[header] = req.headers[header]
|
||||||
|
|
||||||
def get_rsp(self, rsp, cookies=None):
|
def save_rsp(self, rsp, cookies=None):
|
||||||
|
"""
|
||||||
|
save_rsp(rsp, cookies=None)
|
||||||
|
|
||||||
|
Update the state of the session from the response. Only cookies can be
|
||||||
|
updated from a response. Additional values can be added to the whitelist
|
||||||
|
by passing in a list of values for the ``cookies`` parameter.
|
||||||
|
"""
|
||||||
if cookies:
|
if cookies:
|
||||||
for c in cookies:
|
for c in cookies:
|
||||||
if c not in self.cookies:
|
if c not in self.cookies:
|
||||||
|
@ -80,3 +138,38 @@ class Session(object):
|
||||||
for k, v in rsp.cookies.all_pairs():
|
for k, v in rsp.cookies.all_pairs():
|
||||||
if v.key in self.cookies:
|
if v.key in self.cookies:
|
||||||
self.cookie_vals[v.key] = v
|
self.cookie_vals[v.key] = v
|
||||||
|
|
||||||
|
def set_cookie(key, val):
|
||||||
|
"""
|
||||||
|
set_cookie(key, val)
|
||||||
|
|
||||||
|
Set a cookie in the session. ``val`` can be either a string or a :class:`~pappyproxy.http.ResponseCookie`.
|
||||||
|
If a :class:`~pappyproxy.http.ResponseCookie` is used, make sure its ``key`` value is the same as
|
||||||
|
the key passed in to the function.
|
||||||
|
"""
|
||||||
|
self.cookie_vals[key] = val
|
||||||
|
|
||||||
|
def get_cookie(key):
|
||||||
|
"""
|
||||||
|
get_cookie(key)
|
||||||
|
|
||||||
|
Returns a string with the value of the cookie with the given string, even if the value is a :class:`~pappyproxy.http.ResponseCookie`.
|
||||||
|
If you want to get a :class:`~pappyproxy.http.ResponseCookie`, use :func:`~pappyproxy.session.Session.get_rsp_cookie`.
|
||||||
|
"""
|
||||||
|
if not key in self.cookie_vals:
|
||||||
|
raise KeyError('Cookie is not stored in session.')
|
||||||
|
v = self.cookie_vals[key]
|
||||||
|
return self._cookie_val(v)
|
||||||
|
|
||||||
|
def get_rsp_cookie(key):
|
||||||
|
"""
|
||||||
|
get_rsp_cookie(key)
|
||||||
|
|
||||||
|
Returns the :class:`~pappyproxy.http.ResponseCookie` associated with the key
|
||||||
|
regardless of if the value is stored as a string or a :class:`~pappyproxy.http.ResponseCookie`.
|
||||||
|
"""
|
||||||
|
if not key in self.cookie_vals:
|
||||||
|
raise KeyError('Cookie is not stored in session.')
|
||||||
|
v = self.cookie_vals[key]
|
||||||
|
return self._cookie_obj(v)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from pappyproxy.http import Request, get_request, post_request, request_by_id
|
from pappyproxy.http import Request, get_request, post_request, request_by_id
|
||||||
|
from pappyproxy.plugin import main_context_ids
|
||||||
from pappyproxy.context import set_tag
|
from pappyproxy.context import set_tag
|
||||||
from pappyproxy.iter import *
|
from pappyproxy.iter import *
|
||||||
|
|
||||||
|
@ -12,7 +13,7 @@ from pappyproxy.iter import *
|
||||||
|
|
||||||
MACRO_NAME = '{{macro_name}}'
|
MACRO_NAME = '{{macro_name}}'
|
||||||
SHORT_NAME = '{{short_name}}'
|
SHORT_NAME = '{{short_name}}'
|
||||||
|
{% if req_lines %}
|
||||||
###########
|
###########
|
||||||
## Requests
|
## Requests
|
||||||
# It's suggested that you call .copy() on these and then edit attributes
|
# It's suggested that you call .copy() on these and then edit attributes
|
||||||
|
@ -23,7 +24,7 @@ SHORT_NAME = '{{short_name}}'
|
||||||
req{{ count }} = Request(({% for line in lines %}
|
req{{ count }} = Request(({% for line in lines %}
|
||||||
'{{ line }}'{% endfor %}{% set count = count+1 %}
|
'{{ line }}'{% endfor %}{% set count = count+1 %}
|
||||||
){{ params }})
|
){{ params }})
|
||||||
{% endfor %}
|
{% endfor %}{% endif %}
|
||||||
|
|
||||||
def run_macro(args):
|
def run_macro(args):
|
||||||
# Example:
|
# Example:
|
||||||
|
|
|
@ -1,209 +0,0 @@
|
||||||
import pytest
|
|
||||||
import mock
|
|
||||||
import pappyproxy
|
|
||||||
|
|
||||||
from pappyproxy.mangle import async_mangle_request, async_mangle_response
|
|
||||||
from pappyproxy.http import Request, Response
|
|
||||||
from testutil import no_tcp, no_database, func_deleted, mock_deferred, mock_deep_save, fake_saving
|
|
||||||
|
|
||||||
def retf(r):
|
|
||||||
return False
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def ignore_edit(mocker):
|
|
||||||
new_edit = mock.MagicMock()
|
|
||||||
new_edit.return_value = mock_deferred(None)
|
|
||||||
mocker.patch('pappyproxy.console.edit_file', new=new_edit)
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def ignore_delete(mocker):
|
|
||||||
new_os_remove = mock.MagicMock()
|
|
||||||
mocker.patch('os.remove', new=new_os_remove)
|
|
||||||
return new_os_remove
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
|
||||||
def no_logging(mocker):
|
|
||||||
mocker.patch('pappyproxy.proxy.log')
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def req():
|
|
||||||
r = Request()
|
|
||||||
r.status_line = 'GET / HTTP/1.1'
|
|
||||||
r.host = 'www.ffffff.eeeeee'
|
|
||||||
r.raw_data = 'AAAA'
|
|
||||||
return r
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def req_w_rsp(req):
|
|
||||||
r = Response()
|
|
||||||
r.status_line = 'HTTP/1.1 200 OK'
|
|
||||||
r.headers['Test-Header'] = 'ABC123'
|
|
||||||
r.raw_data = 'AAAA'
|
|
||||||
req.response = r
|
|
||||||
return req
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_tempfile(mocker):
|
|
||||||
new_tfile_obj = mock.MagicMock()
|
|
||||||
tfile_instance = mock.MagicMock()
|
|
||||||
new_tfile_obj.return_value.__enter__.return_value = tfile_instance
|
|
||||||
|
|
||||||
tfile_instance.name = 'mockTemporaryFile'
|
|
||||||
mocker.patch('tempfile.NamedTemporaryFile', new=new_tfile_obj)
|
|
||||||
|
|
||||||
new_open = mock.MagicMock()
|
|
||||||
fake_file = mock.MagicMock(spec=file)
|
|
||||||
new_open.return_value.__enter__.return_value = fake_file
|
|
||||||
mocker.patch('__builtin__.open', new_open)
|
|
||||||
|
|
||||||
return (new_tfile_obj, tfile_instance, new_open, fake_file)
|
|
||||||
|
|
||||||
|
|
||||||
########################
|
|
||||||
## Test request mangling
|
|
||||||
|
|
||||||
@pytest.inlineCallbacks
|
|
||||||
def test_mangle_request_edit(req, mock_deep_save, mock_tempfile,
|
|
||||||
ignore_edit, ignore_delete):
|
|
||||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
|
||||||
r = req
|
|
||||||
new_contents = ('GET / HTTP/1.1\r\n'
|
|
||||||
'Content-Length: 4\r\n\r\n'
|
|
||||||
'BBBB')
|
|
||||||
fake_file.read.return_value = new_contents
|
|
||||||
new_req = yield async_mangle_request(r)
|
|
||||||
assert not mock_deep_save.called
|
|
||||||
assert tfile_obj.called
|
|
||||||
assert tfile_instance.write.called
|
|
||||||
assert tfile_instance.write.call_args == ((r.full_request,),)
|
|
||||||
assert new_open.called
|
|
||||||
assert fake_file.read.called
|
|
||||||
|
|
||||||
assert new_req.full_request == new_contents
|
|
||||||
|
|
||||||
@pytest.inlineCallbacks
|
|
||||||
def test_mangle_request_edit_newlines(req, mock_deep_save, mock_tempfile,
|
|
||||||
ignore_edit, ignore_delete):
|
|
||||||
# Intercepting is off, request in scope
|
|
||||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
|
||||||
r = req
|
|
||||||
new_contents = ('GET / HTTP/1.1\r\n'
|
|
||||||
'Test-Head: FOOBIE\n'
|
|
||||||
'Content-Length: 4\n\r\n'
|
|
||||||
'BBBB')
|
|
||||||
fake_file.read.return_value = new_contents
|
|
||||||
new_req = yield async_mangle_request(r)
|
|
||||||
|
|
||||||
assert new_req.full_request == ('GET / HTTP/1.1\r\n'
|
|
||||||
'Test-Head: FOOBIE\r\n'
|
|
||||||
'Content-Length: 4\r\n\r\n'
|
|
||||||
'BBBB')
|
|
||||||
assert new_req.headers['Test-Head'] == 'FOOBIE'
|
|
||||||
|
|
||||||
@pytest.inlineCallbacks
|
|
||||||
def test_mangle_request_drop(req, mock_deep_save, mock_tempfile,
|
|
||||||
ignore_edit, ignore_delete):
|
|
||||||
# Intercepting is off, request in scope
|
|
||||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
|
||||||
r = req
|
|
||||||
new_contents = ''
|
|
||||||
fake_file.read.return_value = new_contents
|
|
||||||
new_req = yield async_mangle_request(r)
|
|
||||||
|
|
||||||
assert new_req is None
|
|
||||||
|
|
||||||
@pytest.inlineCallbacks
|
|
||||||
def test_mangle_request_edit_len(req, mock_deep_save, mock_tempfile,
|
|
||||||
ignore_edit, ignore_delete):
|
|
||||||
# Intercepting is off, request in scope
|
|
||||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
|
||||||
r = req
|
|
||||||
new_contents = ('GET / HTTP/1.1\r\n'
|
|
||||||
'Test-Head: FOOBIE\n'
|
|
||||||
'Content-Length: 4\n\r\n'
|
|
||||||
'BBBBAAAA')
|
|
||||||
fake_file.read.return_value = new_contents
|
|
||||||
new_req = yield async_mangle_request(r)
|
|
||||||
|
|
||||||
assert new_req.full_request == ('GET / HTTP/1.1\r\n'
|
|
||||||
'Test-Head: FOOBIE\r\n'
|
|
||||||
'Content-Length: 8\r\n\r\n'
|
|
||||||
'BBBBAAAA')
|
|
||||||
|
|
||||||
|
|
||||||
#########################
|
|
||||||
## Test response mangling
|
|
||||||
|
|
||||||
@pytest.inlineCallbacks
|
|
||||||
def test_mangle_response_edit(req_w_rsp, mock_deep_save, mock_tempfile,
|
|
||||||
ignore_edit, ignore_delete):
|
|
||||||
# Intercepting is on, edit
|
|
||||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
|
||||||
r = req_w_rsp
|
|
||||||
old_rsp = r.response.full_response
|
|
||||||
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
|
|
||||||
'Content-Length: 4\r\n'
|
|
||||||
'Other-Header: foobles\r\n\r\n'
|
|
||||||
'BBBB')
|
|
||||||
fake_file.read.return_value = new_contents
|
|
||||||
mangled_rsp = yield async_mangle_response(r)
|
|
||||||
assert not mock_deep_save.called
|
|
||||||
assert tfile_obj.called
|
|
||||||
assert tfile_instance.write.called
|
|
||||||
assert tfile_instance.write.call_args == ((old_rsp,),)
|
|
||||||
assert new_open.called
|
|
||||||
assert fake_file.read.called
|
|
||||||
|
|
||||||
assert mangled_rsp.full_response == new_contents
|
|
||||||
|
|
||||||
@pytest.inlineCallbacks
|
|
||||||
def test_mangle_response_newlines(req_w_rsp, mock_deep_save, mock_tempfile,
|
|
||||||
ignore_edit, ignore_delete):
|
|
||||||
# Intercepting is off, request in scope
|
|
||||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
|
||||||
r = req_w_rsp
|
|
||||||
old_rsp = r.response.full_response
|
|
||||||
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\n'
|
|
||||||
'Content-Length: 4\n'
|
|
||||||
'Other-Header: foobles\r\n\n'
|
|
||||||
'BBBB')
|
|
||||||
fake_file.read.return_value = new_contents
|
|
||||||
mangled_rsp = yield async_mangle_response(r)
|
|
||||||
|
|
||||||
assert mangled_rsp.full_response == ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
|
|
||||||
'Content-Length: 4\r\n'
|
|
||||||
'Other-Header: foobles\r\n\r\n'
|
|
||||||
'BBBB')
|
|
||||||
assert mangled_rsp.headers['Other-Header'] == 'foobles'
|
|
||||||
|
|
||||||
@pytest.inlineCallbacks
|
|
||||||
def test_mangle_response_drop(req_w_rsp, mock_deep_save, mock_tempfile,
|
|
||||||
ignore_edit, ignore_delete):
|
|
||||||
# Intercepting is off, request in scope
|
|
||||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
|
||||||
r = req_w_rsp
|
|
||||||
old_rsp = r.response.full_response
|
|
||||||
new_contents = ''
|
|
||||||
fake_file.read.return_value = new_contents
|
|
||||||
mangled_rsp = yield async_mangle_response(r)
|
|
||||||
|
|
||||||
assert mangled_rsp is None
|
|
||||||
|
|
||||||
@pytest.inlineCallbacks
|
|
||||||
def test_mangle_response_new_len(req_w_rsp, mock_deep_save, mock_tempfile,
|
|
||||||
ignore_edit, ignore_delete):
|
|
||||||
# Intercepting is off, request in scope
|
|
||||||
tfile_obj, tfile_instance, new_open, fake_file = mock_tempfile
|
|
||||||
r = req_w_rsp
|
|
||||||
old_rsp = r.response.full_response
|
|
||||||
new_contents = ('HTTP/1.1 403 NOTOKIEDOKIE\n'
|
|
||||||
'Content-Length: 4\n'
|
|
||||||
'Other-Header: foobles\r\n\n'
|
|
||||||
'BBBBAAAA')
|
|
||||||
fake_file.read.return_value = new_contents
|
|
||||||
mangled_rsp = yield async_mangle_response(r)
|
|
||||||
|
|
||||||
assert mangled_rsp.full_response == ('HTTP/1.1 403 NOTOKIEDOKIE\r\n'
|
|
||||||
'Content-Length: 8\r\n'
|
|
||||||
'Other-Header: foobles\r\n\r\n'
|
|
||||||
'BBBBAAAA')
|
|
File diff suppressed because it is too large
Load diff
|
@ -12,9 +12,7 @@ class ClassDeleted():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class TLSStringTransport(StringTransport):
|
class TLSStringTransport(StringTransport):
|
||||||
|
startTLS = mock.MagicMock()
|
||||||
def startTLS(self, context, factory):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class PappySession(object):
|
class PappySession(object):
|
||||||
|
|
||||||
|
|
|
@ -119,12 +119,21 @@ def load_reqlist(line, allow_special=True, ids_only=False):
|
||||||
:Returns: Twisted deferred
|
:Returns: Twisted deferred
|
||||||
"""
|
"""
|
||||||
from .http import Request
|
from .http import Request
|
||||||
|
from .plugin import async_main_context_ids
|
||||||
# Parses a comma separated list of ids and returns a list of those requests
|
# Parses a comma separated list of ids and returns a list of those requests
|
||||||
# prints any errors
|
# prints any errors
|
||||||
if not line:
|
if not line:
|
||||||
raise PappyException('Request id(s) required')
|
raise PappyException('Request id(s) required')
|
||||||
ids = re.split(',\s*', line)
|
|
||||||
reqs = []
|
reqs = []
|
||||||
|
|
||||||
|
if line == '*':
|
||||||
|
ids = yield async_main_context_ids()
|
||||||
|
for i in ids:
|
||||||
|
req = yield Request.load_request(i)
|
||||||
|
reqs.append(req)
|
||||||
|
defer.returnValue(reqs)
|
||||||
|
|
||||||
|
ids = re.split(',\s*', line)
|
||||||
if not ids_only:
|
if not ids_only:
|
||||||
for reqid in ids:
|
for reqid in ids:
|
||||||
try:
|
try:
|
||||||
|
@ -336,3 +345,14 @@ def copy_to_clipboard(text):
|
||||||
|
|
||||||
def clipboard_contents():
|
def clipboard_contents():
|
||||||
return pyperclip.paste()
|
return pyperclip.paste()
|
||||||
|
|
||||||
|
def autocomplete_startswith(text, lst, allow_spaces=False):
|
||||||
|
ret = None
|
||||||
|
if not text:
|
||||||
|
ret = lst[:]
|
||||||
|
else:
|
||||||
|
ret = [n for n in lst if n.startswith(text)]
|
||||||
|
if not allow_spaces:
|
||||||
|
ret = [s for s in ret if ' ' not in s]
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
1
setup.py
1
setup.py
|
@ -26,6 +26,7 @@ setup(name='pappyproxy',
|
||||||
'cmd2>=0.6.8',
|
'cmd2>=0.6.8',
|
||||||
'crochet>=1.4.0',
|
'crochet>=1.4.0',
|
||||||
'Jinja2>=2.8',
|
'Jinja2>=2.8',
|
||||||
|
'lxml>=3.6.0',
|
||||||
'pygments>=2.0.2',
|
'pygments>=2.0.2',
|
||||||
'pyperclip>=1.5.26',
|
'pyperclip>=1.5.26',
|
||||||
'pytest-cov>=2.2.0',
|
'pytest-cov>=2.2.0',
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue