bugfixes, etc, this is super alpha branch so your patch notes are the diff
This commit is contained in:
		
							parent
							
								
									d5dbf7b29f
								
							
						
					
					
						commit
						469cb9f52d
					
				
					 30 changed files with 1253 additions and 559 deletions
				
			
		|  | @ -4,7 +4,8 @@ import json | |||
| default_config = """{ | ||||
|     "listeners": [ | ||||
|         {"iface": "127.0.0.1", "port": 8080} | ||||
|     ] | ||||
|     ], | ||||
|     "proxy": {"use_proxy": false, "host": "", "port": 0, "is_socks": false} | ||||
| }""" | ||||
| 
 | ||||
| 
 | ||||
|  | @ -12,6 +13,7 @@ class ProxyConfig: | |||
|      | ||||
|     def __init__(self): | ||||
|         self._listeners = [('127.0.0.1', '8080')] | ||||
|         self._proxy = {'use_proxy': False, 'host': '', 'port': 0, 'is_socks': False} | ||||
|          | ||||
|     def load(self, fname): | ||||
|         try: | ||||
|  | @ -39,6 +41,10 @@ class ProxyConfig: | |||
|                     iface = '127.0.0.1' | ||||
| 
 | ||||
|                 self._listeners.append((iface, port)) | ||||
| 
 | ||||
|         if 'proxy' in config_info: | ||||
|             self._proxy = config_info['proxy'] | ||||
|              | ||||
|                  | ||||
|     @property | ||||
|     def listeners(self): | ||||
|  | @ -47,3 +53,67 @@ class ProxyConfig: | |||
|     @listeners.setter | ||||
|     def listeners(self, val): | ||||
|         self._listeners = val | ||||
|          | ||||
|     @property | ||||
|     def proxy(self): | ||||
|         # don't use this, use the getters to get the parsed values | ||||
|         return self._proxy | ||||
|          | ||||
|     @proxy.setter | ||||
|     def proxy(self, val): | ||||
|         self._proxy = val | ||||
| 
 | ||||
|     @property | ||||
|     def use_proxy(self): | ||||
|         if self._proxy is None: | ||||
|             return False | ||||
|         if 'use_proxy' in self._proxy: | ||||
|             if self._proxy['use_proxy']: | ||||
|                 return True | ||||
|         return False | ||||
| 
 | ||||
|     @property | ||||
|     def proxy_host(self): | ||||
|         if self._proxy is None: | ||||
|             return '' | ||||
|         if 'host' in self._proxy: | ||||
|             return self._proxy['host'] | ||||
|         return '' | ||||
| 
 | ||||
|     @property | ||||
|     def proxy_port(self): | ||||
|         if self._proxy is None: | ||||
|             return '' | ||||
|         if 'port' in self._proxy: | ||||
|             return self._proxy['port'] | ||||
|         return '' | ||||
| 
 | ||||
|     @property | ||||
|     def proxy_username(self): | ||||
|         if self._proxy is None: | ||||
|             return '' | ||||
|         if 'username' in self._proxy: | ||||
|             return self._proxy['username'] | ||||
|         return '' | ||||
| 
 | ||||
|     @property | ||||
|     def proxy_password(self): | ||||
|         if self._proxy is None: | ||||
|             return '' | ||||
|         if 'password' in self._proxy: | ||||
|             return self._proxy['password'] | ||||
|         return '' | ||||
| 
 | ||||
|     @property | ||||
|     def use_proxy_creds(self): | ||||
|         return ('username' in self._proxy or 'password' in self._proxy) | ||||
| 
 | ||||
|     @property | ||||
|     def is_socks_proxy(self): | ||||
|         if self._proxy is None: | ||||
|             return False | ||||
|         if 'is_socks' in self._proxy: | ||||
|             if self._proxy['is_socks']: | ||||
|                 return True | ||||
|         return False | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,6 +1,6 @@ | |||
| from itertools import groupby | ||||
| 
 | ||||
| from ..proxy import InvalidQuery | ||||
| from ..proxy import InvalidQuery, time_to_nsecs | ||||
| from ..colors import Colors, Styles | ||||
| 
 | ||||
| # class BuiltinFilters(object): | ||||
|  | @ -71,6 +71,11 @@ def filtercmd(client, args): | |||
|     """ | ||||
|     try: | ||||
|         phrases = [list(group) for k, group in groupby(args, lambda x: x == "OR") if not k] | ||||
|         for phrase in phrases: | ||||
|             # we do before/after by id not by timestamp | ||||
|             if phrase[0] in ('before', 'b4', 'after', 'af') and len(phrase) > 1: | ||||
|                 r = client.req_by_id(phrase[1], headers_only=True) | ||||
|                 phrase[1] = str(time_to_nsecs(r.time_start)) | ||||
|         client.context.apply_phrase(phrases) | ||||
|     except InvalidQuery as e: | ||||
|         print(e) | ||||
|  |  | |||
|  | @ -7,31 +7,32 @@ import string | |||
| import urllib | ||||
| 
 | ||||
| from ..util import hexdump, printable_data, copy_to_clipboard, clipboard_contents, encode_basic_auth, parse_basic_auth | ||||
| from ..console import CommandError | ||||
| from io import StringIO | ||||
| 
 | ||||
| def print_maybe_bin(s): | ||||
|     binary = False | ||||
|     for c in s: | ||||
|         if str(c) not in string.printable: | ||||
|         if chr(c) not in string.printable: | ||||
|             binary = True | ||||
|             break | ||||
|     if binary: | ||||
|         print(hexdump(s)) | ||||
|     else: | ||||
|         print(s) | ||||
|         print(s.decode()) | ||||
|          | ||||
| def asciihex_encode_helper(s): | ||||
|     return ''.join('{0:x}'.format(c) for c in s) | ||||
|     return ''.join('{0:x}'.format(c) for c in s).encode() | ||||
| 
 | ||||
| def asciihex_decode_helper(s): | ||||
|     ret = [] | ||||
|     try: | ||||
|         for a, b in zip(s[0::2], s[1::2]): | ||||
|             c = a+b | ||||
|             c = chr(a)+chr(b) | ||||
|             ret.append(chr(int(c, 16))) | ||||
|         return ''.join(ret) | ||||
|         return ''.join(ret).encode() | ||||
|     except Exception as e: | ||||
|         raise PappyException(e) | ||||
|         raise CommandError(e) | ||||
|      | ||||
| def gzip_encode_helper(s): | ||||
|     out = StringIO.StringIO() | ||||
|  | @ -54,13 +55,21 @@ def base64_decode_helper(s): | |||
|                 return s_padded | ||||
|             except: | ||||
|                 pass | ||||
|         raise PappyException("Unable to base64 decode string") | ||||
|         raise CommandError("Unable to base64 decode string") | ||||
|      | ||||
| def url_decode_helper(s): | ||||
|     bs = s.decode() | ||||
|     return urllib.parse.unquote(bs).encode() | ||||
| 
 | ||||
| def url_encode_helper(s): | ||||
|     bs = s.decode() | ||||
|     return urllib.parse.quote_plus(bs).encode() | ||||
| 
 | ||||
| def html_encode_helper(s): | ||||
|     return ''.join(['&#x{0:x};'.format(c) for c in s]) | ||||
|     return ''.join(['&#x{0:x};'.format(c) for c in s]).encode() | ||||
| 
 | ||||
| def html_decode_helper(s): | ||||
|     return html.unescape(s) | ||||
|     return html.unescape(s.decode()).encode() | ||||
| 
 | ||||
| def _code_helper(args, func, copy=True): | ||||
|     if len(args) == 0: | ||||
|  | @ -107,7 +116,7 @@ def url_decode(client, args): | |||
|     If no string is given, will decode the contents of the clipboard. | ||||
|     Results are copied to the clipboard. | ||||
|     """ | ||||
|     print_maybe_bin(_code_helper(args, urllib.unquote)) | ||||
|     print_maybe_bin(_code_helper(args, url_decode_helper)) | ||||
| 
 | ||||
| def url_encode(client, args): | ||||
|     """ | ||||
|  | @ -115,7 +124,7 @@ def url_encode(client, args): | |||
|     If no string is given, will encode the contents of the clipboard. | ||||
|     Results are copied to the clipboard. | ||||
|     """ | ||||
|     print_maybe_bin(_code_helper(args, urllib.quote_plus)) | ||||
|     print_maybe_bin(_code_helper(args, url_encode_helper)) | ||||
| 
 | ||||
| def asciihex_decode(client, args): | ||||
|     """ | ||||
|  | @ -187,7 +196,7 @@ def url_decode_raw(client, args): | |||
|     results will not be copied. It is suggested you redirect the output | ||||
|     to a file. | ||||
|     """ | ||||
|     print(_code_helper(args, urllib.unquote, copy=False)) | ||||
|     print(_code_helper(args, url_decode_helper, copy=False)) | ||||
| 
 | ||||
| def url_encode_raw(client, args): | ||||
|     """ | ||||
|  | @ -195,7 +204,7 @@ def url_encode_raw(client, args): | |||
|     results will not be copied. It is suggested you redirect the output | ||||
|     to a file. | ||||
|     """ | ||||
|     print(_code_helper(args, urllib.quote_plus, copy=False)) | ||||
|     print(_code_helper(args, url_encode_helper, copy=False)) | ||||
| 
 | ||||
| def asciihex_decode_raw(client, args): | ||||
|     """ | ||||
|  | @ -254,9 +263,8 @@ def unix_time_decode(client, args): | |||
|     print(_code_helper(args, unix_time_decode_helper)) | ||||
|      | ||||
| def http_auth_encode(client, args): | ||||
|     args = shlex.split(args[0]) | ||||
|     if len(args) != 2: | ||||
|         raise PappyException('Usage: http_auth_encode <username> <password>') | ||||
|         raise CommandError('Usage: http_auth_encode <username> <password>') | ||||
|     username, password = args | ||||
|     print(encode_basic_auth(username, password)) | ||||
| 
 | ||||
|  |  | |||
|  | @ -23,7 +23,7 @@ class WatchMacro(InterceptMacro): | |||
|         printstr = "< " | ||||
|         printstr += verb_color(request.method) + request.method + Colors.ENDC + ' ' | ||||
|         printstr += url_formatter(request, colored=True) | ||||
|         printstr += " -> " | ||||
|         printstr += " \u2192 " | ||||
|         response_code = str(response.status_code) + ' ' + response.reason | ||||
|         response_code = scode_color(response_code) + response_code + Colors.ENDC | ||||
|         printstr += response_code | ||||
|  |  | |||
|  | @ -1524,7 +1524,7 @@ def update_buffers(req): | |||
| 
 | ||||
|     # Save the port, ssl, host setting | ||||
|     vim.command("let s:dest_port=%d" % req.dest_port) | ||||
|     vim.command("let s:dest_host='%s'" % req.dest_host) | ||||
|     vim.command("let s:dest_host='%s'" % escape(req.dest_host)) | ||||
| 
 | ||||
|     if req.use_tls: | ||||
|         vim.command("let s:use_tls=1") | ||||
|  | @ -1544,6 +1544,8 @@ def set_up_windows(): | |||
|     reqid = vim.eval("a:2") | ||||
|     storage_id = vim.eval("a:3") | ||||
|     msg_addr = vim.eval("a:4") | ||||
| 
 | ||||
|     vim.command("let s:storage_id=%d" % int(storage_id)) | ||||
|      | ||||
|     # Get the left buffer | ||||
|     vim.command("new") | ||||
|  | @ -1568,11 +1570,12 @@ def dest_loc(): | |||
|     dest_host = vim.eval("s:dest_host") | ||||
|     dest_port = int(vim.eval("s:dest_port")) | ||||
|     tls_num = vim.eval("s:use_tls") | ||||
|     storage_id = int(vim.eval("s:storage_id")) | ||||
|     if tls_num == "1": | ||||
|         use_tls = True | ||||
|     else: | ||||
|         use_tls = False | ||||
|     return (dest_host, dest_port, use_tls) | ||||
|     return (dest_host, dest_port, use_tls, storage_id) | ||||
| 
 | ||||
| def submit_current_buffer(): | ||||
|     curbuf = vim.current.buffer | ||||
|  | @ -1586,14 +1589,15 @@ def submit_current_buffer(): | |||
|     full_request = '\n'.join(curbuf) | ||||
| 
 | ||||
|     req = parse_request(full_request) | ||||
|     dest_host, dest_port, use_tls = dest_loc() | ||||
|     dest_host, dest_port, use_tls, storage_id = dest_loc() | ||||
|     req.dest_host = dest_host | ||||
|     req.dest_port = dest_port | ||||
|     req.use_tls = use_tls | ||||
| 
 | ||||
|     comm_type, comm_addr = get_conn_addr() | ||||
|     with ProxyConnection(kind=comm_type, addr=comm_addr) as conn: | ||||
|         new_req = conn.submit(req) | ||||
|         new_req = conn.submit(req, storage=storage_id) | ||||
|         conn.add_tag(new_req.db_id, "repeater", storage_id) | ||||
|         update_buffers(new_req) | ||||
|      | ||||
| # (left, right) = set_up_windows() | ||||
|  |  | |||
|  | @ -481,17 +481,23 @@ def site_map(client, args): | |||
|         paths = True | ||||
|     else: | ||||
|         paths = False | ||||
|     reqs = client.in_context_requests(headers_only=True) | ||||
|     paths_set = set() | ||||
|     for req in reqs: | ||||
|         if req.response and req.response.status_code != 404: | ||||
|             paths_set.add(path_tuple(req.url)) | ||||
|     tree = sorted(list(paths_set)) | ||||
|     if paths: | ||||
|         for p in tree: | ||||
|             print ('/'.join(list(p))) | ||||
|     else: | ||||
|         print_tree(tree) | ||||
|     all_reqs = client.in_context_requests(headers_only=True) | ||||
|     reqs_by_host = {} | ||||
|     for req in all_reqs: | ||||
|         reqs_by_host.setdefault(req.dest_host, []).append(req) | ||||
|     for host, reqs in reqs_by_host.items(): | ||||
|         paths_set = set() | ||||
|         for req in reqs: | ||||
|             if req.response and req.response.status_code != 404: | ||||
|                 paths_set.add(path_tuple(req.url)) | ||||
|         tree = sorted(list(paths_set)) | ||||
|         print(host) | ||||
|         if paths: | ||||
|             for p in tree: | ||||
|                 print ('/'.join(list(p))) | ||||
|         else: | ||||
|             print_tree(tree) | ||||
|         print("") | ||||
| 
 | ||||
| def dump_response(client, args): | ||||
|     """ | ||||
|  | @ -515,6 +521,78 @@ def dump_response(client, args): | |||
|     else: | ||||
|         print('Request {} does not have a response'.format(req.reqid)) | ||||
| 
 | ||||
| def get_surrounding_lines(s, n, lines): | ||||
|     left = n | ||||
|     right = n | ||||
|     lines_left = 0 | ||||
|     lines_right = 0 | ||||
| 
 | ||||
|     # move left until we find enough lines or hit the edge | ||||
|     while left > 0 and lines_left < lines: | ||||
|         if s[left] == '\n': | ||||
|             lines_left += 1 | ||||
|         left -= 1 | ||||
| 
 | ||||
|     # move right until we find enough lines or hit the edge | ||||
|     while right < len(s) and lines_right < lines: | ||||
|         if s[right] == '\n': | ||||
|             lines_right += 1 | ||||
|         right += 1 | ||||
| 
 | ||||
|     return s[left:right] | ||||
| 
 | ||||
| def print_search_header(reqid, locstr): | ||||
|     printstr = Styles.TABLE_HEADER | ||||
|     printstr += "Result(s) for request {} ({})".format(reqid, locstr) | ||||
|     printstr += Colors.ENDC | ||||
|     print(printstr) | ||||
| 
 | ||||
| def highlight_str(s, substr): | ||||
|     highlighted = Colors.BGYELLOW + Colors.BLACK + Colors.BOLD + substr + Colors.ENDC | ||||
|     return s.replace(substr, highlighted) | ||||
| 
 | ||||
| def search_message(mes, substr, lines, reqid, locstr): | ||||
|     header_printed = False | ||||
|     for m in re.finditer(substr, mes): | ||||
|         if not header_printed: | ||||
|             print_search_header(reqid, locstr) | ||||
|             header_printed = True | ||||
|         n = m.start() | ||||
|         linestr = get_surrounding_lines(mes, n, lines) | ||||
|         linelist = linestr.split('\n') | ||||
|         linestr = '\n'.join(line[:500] for line in linelist) | ||||
|         toprint = highlight_str(linestr, substr) | ||||
|         print(toprint) | ||||
|         print('-'*50) | ||||
| 
 | ||||
| def search(client, args): | ||||
|     search_str = args[0] | ||||
|     lines = 2 | ||||
|     if len(args) > 1: | ||||
|         lines = int(args[1]) | ||||
|     for req in client.in_context_requests_iter(): | ||||
|         reqid = client.get_reqid(req) | ||||
|         reqheader_printed = False | ||||
|         try: | ||||
|             mes = req.full_message().decode() | ||||
|             search_message(mes, search_str, lines, reqid, "Request") | ||||
|         except UnicodeDecodeError: | ||||
|             pass | ||||
|         if req.response: | ||||
|             try: | ||||
|                 mes = req.response.full_message().decode() | ||||
|                 search_message(mes, search_str, lines, reqid, "Response") | ||||
|             except UnicodeDecodeError: | ||||
|                 pass | ||||
| 
 | ||||
|         wsheader_printed = False | ||||
|         for wsm in req.ws_messages: | ||||
|             if not wsheader_printed: | ||||
|                 print_search_header(client.get_reqid(req), reqid, "Websocket Messages") | ||||
|                 wsheader_printed = True | ||||
|             if search_str in wsm.message: | ||||
|                 print(highlight_str(wsm.message, search_str)) | ||||
| 
 | ||||
| 
 | ||||
| # @crochet.wait_for(timeout=None) | ||||
| # @defer.inlineCallbacks | ||||
|  | @ -572,6 +650,7 @@ def load_cmds(cmd): | |||
|         'urls': (find_urls, None), | ||||
|         'site_map': (site_map, None), | ||||
|         'dump_response': (dump_response, None), | ||||
|         'search': (search, None), | ||||
|         # 'view_request_bytes': (view_request_bytes, None), | ||||
|         # 'view_response_bytes': (view_response_bytes, None), | ||||
|     }) | ||||
|  |  | |||
|  | @ -85,17 +85,23 @@ class SockBuffer: | |||
| 
 | ||||
| class Headers: | ||||
|     def __init__(self, headers=None): | ||||
|         if headers is None: | ||||
|             self.headers = {} | ||||
|         else: | ||||
|             self.headers = headers | ||||
|         self.headers = {} | ||||
|         if headers is not None: | ||||
|             if isinstance(headers, Headers): | ||||
|                 for _, pairs in headers.headers.items(): | ||||
|                     for k, v in pairs: | ||||
|                         self.add(k, v) | ||||
|             else: | ||||
|                 for k, vs in headers.items(): | ||||
|                     for v in vs: | ||||
|                         self.add(k, v) | ||||
|          | ||||
|     def __contains__(self, hd): | ||||
|         for k, _ in self.headers.items(): | ||||
|             if k.lower() == hd.lower(): | ||||
|                 return True | ||||
|         return False | ||||
|          | ||||
| 
 | ||||
|     def add(self, k, v): | ||||
|         try: | ||||
|             l = self.headers[k.lower()] | ||||
|  | @ -265,11 +271,7 @@ class HTTPRequest: | |||
|         self.proto_major = proto_major | ||||
|         self.proto_minor = proto_minor | ||||
| 
 | ||||
|         self.headers = Headers() | ||||
|         if headers is not None: | ||||
|             for k, vs in headers.items(): | ||||
|                 for v in vs: | ||||
|                     self.headers.add(k, v) | ||||
|         self.headers = Headers(headers) | ||||
|          | ||||
|         self.headers_only = headers_only | ||||
|         self._body = bytes() | ||||
|  | @ -280,8 +282,8 @@ class HTTPRequest: | |||
|         self.dest_host = dest_host | ||||
|         self.dest_port = dest_port | ||||
|         self.use_tls = use_tls | ||||
|         self.time_start = time_start or datetime.datetime(1970, 1, 1) | ||||
|         self.time_end = time_end or datetime.datetime(1970, 1, 1) | ||||
|         self.time_start = time_start | ||||
|         self.time_end = time_end | ||||
|          | ||||
|         self.response = None | ||||
|         self.unmangled = None | ||||
|  | @ -412,7 +414,7 @@ class HTTPRequest: | |||
|             path=self.url.geturl(), | ||||
|             proto_major=self.proto_major, | ||||
|             proto_minor=self.proto_minor, | ||||
|             headers=self.headers.headers, | ||||
|             headers=self.headers, | ||||
|             body=self.body, | ||||
|             dest_host=self.dest_host, | ||||
|             dest_port=self.dest_port, | ||||
|  | @ -928,6 +930,21 @@ class ProxyConnection: | |||
|         for ss in result["Storages"]: | ||||
|             ret.append(SavedStorage(ss["Id"], ss["Description"])) | ||||
|         return ret | ||||
| 
 | ||||
|     @messagingFunction | ||||
|     def set_proxy(self, use_proxy=False, proxy_host="", proxy_port=0, use_creds=False, | ||||
|             username="", password="", is_socks=False): | ||||
|         cmd = { | ||||
|             "Command": "SetProxy", | ||||
|             "UseProxy": use_proxy, | ||||
|             "ProxyHost": proxy_host, | ||||
|             "ProxyPort": proxy_port, | ||||
|             "ProxyIsSOCKS": is_socks, | ||||
|             "UseCredentials": use_creds, | ||||
|             "Username": username, | ||||
|             "Password": password, | ||||
|         } | ||||
|         self.reqrsp_cmd(cmd) | ||||
|          | ||||
|     @messagingFunction | ||||
|     def intercept(self, macro): | ||||
|  | @ -1086,6 +1103,7 @@ class ProxyClient: | |||
|             # "add_in_memory_storage", | ||||
|             # "close_storage", | ||||
|             # "set_proxy_storage", | ||||
|             "set_proxy" | ||||
|         } | ||||
|          | ||||
|     def __enter__(self): | ||||
|  | @ -1162,7 +1180,7 @@ class ProxyClient: | |||
|             stype, prefix = s.description.split("|") | ||||
|             storage = ActiveStorage(stype, s.storage_id, prefix) | ||||
|             self._add_storage(storage, prefix) | ||||
|          | ||||
|      | ||||
|     def parse_reqid(self, reqid): | ||||
|         if reqid[0].isalpha(): | ||||
|             prefix = reqid[0] | ||||
|  | @ -1172,6 +1190,10 @@ class ProxyClient: | |||
|             realid = reqid | ||||
|         storage = self.storage_by_prefix[prefix] | ||||
|         return storage, realid | ||||
| 
 | ||||
|     def get_reqid(self, req): | ||||
|         storage = self.storage_by_id[req.storage_id] | ||||
|         return storage.prefix + req.db_id | ||||
|          | ||||
|     def storage_iter(self): | ||||
|         for _, s in self.storage_by_id.items(): | ||||
|  | @ -1190,6 +1212,17 @@ class ProxyClient: | |||
|         if max_results > 0 and len(results) > max_results: | ||||
|             ret = results[:max_results] | ||||
|         return ret | ||||
| 
 | ||||
|     def in_context_requests_iter(self, headers_only=False, max_results=0): | ||||
|         results = self.query_storage(self.context.query, | ||||
|                                      headers_only=headers_only, | ||||
|                                      max_results=max_results) | ||||
|         ret = results | ||||
|         if max_results > 0 and len(results) > max_results: | ||||
|             ret = results[:max_results] | ||||
|         for reqh in ret: | ||||
|             req = self.req_by_id(reqh.db_id, storage_id=reqh.storage_id) | ||||
|             yield req | ||||
|      | ||||
|     def prefixed_reqid(self, req): | ||||
|         prefix = "" | ||||
|  | @ -1246,10 +1279,14 @@ class ProxyClient: | |||
|         results = [r for r in reversed(results)] | ||||
|         return results | ||||
|              | ||||
|     def req_by_id(self, reqid, headers_only=False): | ||||
|         storage, rid = self.parse_reqid(reqid) | ||||
|         return self.msg_conn.req_by_id(rid, headers_only=headers_only, | ||||
|                                        storage=storage.storage_id) | ||||
|     def req_by_id(self, reqid, storage_id=None, headers_only=False): | ||||
|         if storage_id is None: | ||||
|             storage, db_id = self.parse_reqid(reqid) | ||||
|             storage_id = storage.storage_id | ||||
|         else: | ||||
|             db_id = reqid | ||||
|         return self.msg_conn.req_by_id(db_id, headers_only=headers_only, | ||||
|                                        storage=storage_id) | ||||
| 
 | ||||
|     # for these and submit, might need storage stored on the request itself | ||||
|     def add_tag(self, reqid, tag, storage=None): | ||||
|  | @ -1275,12 +1312,12 @@ class ProxyClient: | |||
| 
 | ||||
| 
 | ||||
| def decode_req(result, headers_only=False): | ||||
|     if "StartTime" in result: | ||||
|     if "StartTime" in result and result["StartTime"] > 0: | ||||
|         time_start = time_from_nsecs(result["StartTime"]) | ||||
|     else: | ||||
|         time_start = None | ||||
| 
 | ||||
|     if "EndTime" in result: | ||||
|     if "EndTime" in result and result["EndTime"] > 0: | ||||
|         time_end = time_from_nsecs(result["EndTime"]) | ||||
|     else: | ||||
|         time_end = None | ||||
|  |  | |||
|  | @ -114,6 +114,13 @@ def main(): | |||
|                         client.add_listener(iface, port) | ||||
|                     except MessageError as e: | ||||
|                         print(str(e)) | ||||
| 
 | ||||
|                 # Set upstream proxy | ||||
|                 if config.use_proxy: | ||||
|                     client.set_proxy(config.use_proxy, | ||||
|                                      config.proxy_host, | ||||
|                                      config.proxy_port, | ||||
|                                      config.is_socks_proxy) | ||||
|             interface_loop(client) | ||||
|         except MessageError as e: | ||||
|             print(str(e)) | ||||
|  |  | |||
|  | @ -2,6 +2,7 @@ import sys | |||
| import string | ||||
| import time | ||||
| import datetime | ||||
| import base64 | ||||
| from pygments.formatters import TerminalFormatter | ||||
| from pygments.lexers import get_lexer_for_mimetype, HttpLexer | ||||
| from pygments import highlight | ||||
|  | @ -275,8 +276,8 @@ def clipboard_contents(): | |||
| 
 | ||||
| def encode_basic_auth(username, password): | ||||
|     decoded = '%s:%s' % (username, password) | ||||
|     encoded = base64.b64encode(decoded) | ||||
|     header = 'Basic %s' % encoded | ||||
|     encoded = base64.b64encode(decoded.encode()) | ||||
|     header = 'Basic %s' % encoded.decode() | ||||
|     return header | ||||
| 
 | ||||
| def parse_basic_auth(header): | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue