repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/sslstrip.py | examples/contrib/sslstrip.py | """
This script implements an sslstrip-like attack based on mitmproxy.
https://moxie.org/software/sslstrip/
"""
import re
import urllib.parse
from mitmproxy import http
# set of SSL/TLS capable hosts
secure_hosts: set[str] = set()
def request(flow: http.HTTPFlow) -> None:
flow.request.headers.pop("If-Modified-Since", None)
flow.request.headers.pop("Cache-Control", None)
# do not force https redirection
flow.request.headers.pop("Upgrade-Insecure-Requests", None)
# proxy connections to SSL-enabled hosts
if flow.request.pretty_host in secure_hosts:
flow.request.scheme = "https"
flow.request.port = 443
# We need to update the request destination to whatever is specified in the host header:
# Having no TLS Server Name Indication from the client and just an IP address as request.host
# in transparent mode, TLS server name certificate validation would fail.
flow.request.host = flow.request.pretty_host
def response(flow: http.HTTPFlow) -> None:
assert flow.response
flow.response.headers.pop("Strict-Transport-Security", None)
flow.response.headers.pop("Public-Key-Pins", None)
# strip links in response body
flow.response.content = flow.response.content.replace(b"https://", b"http://")
# strip meta tag upgrade-insecure-requests in response body
csp_meta_tag_pattern = rb'<meta.*http-equiv=["\']Content-Security-Policy[\'"].*upgrade-insecure-requests.*?>'
flow.response.content = re.sub(
csp_meta_tag_pattern, b"", flow.response.content, flags=re.IGNORECASE
)
# strip links in 'Location' header
if flow.response.headers.get("Location", "").startswith("https://"):
location = flow.response.headers["Location"]
hostname = urllib.parse.urlparse(location).hostname
if hostname:
secure_hosts.add(hostname)
flow.response.headers["Location"] = location.replace("https://", "http://", 1)
# strip upgrade-insecure-requests in Content-Security-Policy header
csp_header = flow.response.headers.get("Content-Security-Policy", "")
if re.search("upgrade-insecure-requests", csp_header, flags=re.IGNORECASE):
csp = flow.response.headers["Content-Security-Policy"]
new_header = re.sub(
r"upgrade-insecure-requests[;\s]*", "", csp, flags=re.IGNORECASE
)
flow.response.headers["Content-Security-Policy"] = new_header
# strip secure flag from 'Set-Cookie' headers
cookies = flow.response.headers.get_all("Set-Cookie")
cookies = [re.sub(r";\s*secure\s*", "", s) for s in cookies]
flow.response.headers.set_all("Set-Cookie", cookies)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/upstream_pac.py | examples/contrib/upstream_pac.py | """
Mitmproxy add-on to support using a PAC file to determine the upstream proxy to use.
Supports adding an alternate proxy to use for when the PAC decides the connection should be DIRECT.
Adds two options to mitmproxy:
pac_url - an url that will return a pac file to use for evaluating which upstream proxy to use.
direct_upstream_proxy - an alternate proxy to be used if the PAC returns DIRECT.
Requires pypac to be installed and available on the python path.
This class is inspired by the user contributed add-on:
https://github.com/mitmproxy/mitmproxy/blob/main/examples/contrib/change_upstream_proxy.py
"""
import logging
import pypac
from mitmproxy import ctx
from mitmproxy import http
from mitmproxy.net import server_spec
class UpstreamPac:
pac_file: pypac.parser.PACFile | None
@staticmethod
def load(loader) -> None:
loader.add_option(
name="direct_upstream_proxy",
typespec=str | None,
default=None,
help="Alternate upstream proxy to use when PAC resolution returns direct (http://localhost:8081)",
)
loader.add_option(
name="pac_url",
typespec=str | None,
default=None,
help="Proxy autoconfig url used to retrieve the PAC file",
)
@staticmethod
def configure(updated) -> None:
if "pac_url" in updated:
if ctx.options.pac_url is None:
UpstreamPac.pac_file = None
logging.info("No pac file specified")
else:
UpstreamPac.pac_file = pypac.get_pac(
url=ctx.options.pac_url,
allowed_content_types=[
"application/x-ns-proxy-autoconfig ",
"application/x-javascript-config",
"text/html",
"text/plain",
],
)
if UpstreamPac.pac_file is None:
logging.error(
"Failed to load pac file from: %s", ctx.options.pac_url
)
@staticmethod
def proxy_address(flow: http.HTTPFlow) -> tuple[str, tuple[str, int]] | None:
if UpstreamPac.pac_file:
proxy = UpstreamPac.pac_file.find_proxy_for_url(
flow.request.url, flow.request.host
)
if proxy == "DIRECT":
if ctx.options.direct_upstream_proxy is not None:
return server_spec.parse(ctx.options.direct_upstream_proxy, "http")
else:
return None
else:
proxy_url = pypac.parser.proxy_url(proxy)
return server_spec.parse(proxy_url, "http")
return None
@staticmethod
def request(flow: http.HTTPFlow) -> None:
address = UpstreamPac.proxy_address(flow)
if address is not None:
logging.info(
"Using proxy %s://%s:%s for %s"
% (address[0], address[1][0], address[1][1], flow.request.host)
)
flow.server_conn.via = address
addons = [UpstreamPac()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/save_streamed_data.py | examples/contrib/save_streamed_data.py | """
Save streamed requests and responses
If the option 'save_streamed_data' is set to a format string then
streamed requests and responses are written to individual files with a name
derived from the string. Apart from python strftime() formating (using the
request start time) the following codes can also be used:
- %+T: The time stamp of the request with microseconds
- %+D: 'req' or 'rsp' indicating the direction of the data
- %+I: The client connection ID
- %+C: The client IP address
A good starting point for a template could be '~/streamed_files/%+D:%+T:%+I',
a more complex example is '~/streamed_files/%+C/%Y-%m-%d%/%+D:%+T:%+I'.
The client connection ID combined with the request time stamp should be unique
for associating a file with its corresponding flow in the stream saved with
'--save-stream-file'.
This addon is not compatible with addons that use the same mechanism to
capture streamed data, http-stream-modify.py for instance.
"""
import logging
import os
from datetime import datetime
from pathlib import Path
from typing import Optional
from mitmproxy import ctx
class StreamSaver:
TAG = "save_streamed_data: "
def __init__(self, flow, direction):
self.flow = flow
self.direction = direction
self.fh = None
self.path = None
def done(self):
if self.fh:
self.fh.close()
self.fh = None
# Make sure we have no circular references
self.flow = None
def __call__(self, data):
# End of stream?
if len(data) == 0:
self.done()
return data
# Just in case the option changes while a stream is in flight
if not ctx.options.save_streamed_data:
return data
# This is a safeguard but should not be needed
if not self.flow or not self.flow.request:
return data
if not self.fh:
self.path = datetime.fromtimestamp(
self.flow.request.timestamp_start
).strftime(ctx.options.save_streamed_data)
self.path = self.path.replace("%+T", str(self.flow.request.timestamp_start))
self.path = self.path.replace("%+I", str(self.flow.client_conn.id))
self.path = self.path.replace("%+D", self.direction)
self.path = self.path.replace("%+C", self.flow.client_conn.address[0])
self.path = os.path.expanduser(self.path)
parent = Path(self.path).parent
try:
if not parent.exists():
parent.mkdir(parents=True, exist_ok=True)
except OSError:
logging.error(f"{self.TAG}Failed to create directory: {parent}")
try:
self.fh = open(self.path, "wb", buffering=0)
except OSError:
logging.error(f"{self.TAG}Failed to open for writing: {self.path}")
if self.fh:
try:
self.fh.write(data)
except OSError:
logging.error(f"{self.TAG}Failed to write to: {self.path}")
return data
def load(loader):
loader.add_option(
"save_streamed_data",
Optional[str],
None,
"Format string for saving streamed data to files. If set each streamed request or response is written "
"to a file with a name derived from the string. In addition to formating supported by python "
"strftime() (using the request start time) the code '%+T' is replaced with the time stamp of the request, "
"'%+D' by 'req' or 'rsp' depending on the direction of the data, '%+C' by the client IP addresses and "
"'%+I' by the client connection ID.",
)
def requestheaders(flow):
if ctx.options.save_streamed_data and flow.request.stream:
flow.request.stream = StreamSaver(flow, "req")
def responseheaders(flow):
if isinstance(flow.request.stream, StreamSaver):
flow.request.stream.done()
if ctx.options.save_streamed_data and flow.response.stream:
flow.response.stream = StreamSaver(flow, "rsp")
def response(flow):
if isinstance(flow.response.stream, StreamSaver):
flow.response.stream.done()
def error(flow):
if flow.request and isinstance(flow.request.stream, StreamSaver):
flow.request.stream.done()
if flow.response and isinstance(flow.response.stream, StreamSaver):
flow.response.stream.done()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/domain_fronting.py | examples/contrib/domain_fronting.py | import json
from dataclasses import dataclass
from mitmproxy import ctx
from mitmproxy.addonmanager import Loader
from mitmproxy.http import HTTPFlow
"""
This extension implements support for domain fronting.
Usage:
mitmproxy -s examples/contrib/domain_fronting.py --set domainfrontingfile=./domain_fronting.json
In the following basic example, www.example.com will be used for DNS requests and SNI values
but the secret.example.com value will be used for the HTTP host header:
{
"mappings": [
{
"patterns": ["secret.example.com"],
"server": "www.example.com"
}
]
}
The following example demonstrates the usage of a wildcard (at the beginning of the domain name only):
{
"mappings": [
{
"patterns": ["*.foo.example.com"],
"server": "www.example.com"
}
]
}
In the following example, we override the HTTP host header:
{
"mappings": [
{
"patterns": ["foo.example"],
"server": "www.example.com",
"host": "foo.proxy.example.com"
}
]
}
"""
@dataclass
class Mapping:
server: str | None
host: str | None
class HttpsDomainFronting:
# configurations for regular ("foo.example.com") mappings:
star_mappings: dict[str, Mapping]
# Configurations for star ("*.example.com") mappings:
strict_mappings: dict[str, Mapping]
def __init__(self) -> None:
self.strict_mappings = {}
self.star_mappings = {}
def _resolve_addresses(self, host: str) -> Mapping | None:
mapping = self.strict_mappings.get(host)
if mapping is not None:
return mapping
index = 0
while True:
index = host.find(".", index)
if index == -1:
break
super_domain = host[(index + 1) :]
mapping = self.star_mappings.get(super_domain)
if mapping is not None:
return mapping
index += 1
return None
def load(self, loader: Loader) -> None:
loader.add_option(
name="domainfrontingfile",
typespec=str,
default="./fronting.json",
help="Domain fronting configuration file",
)
def _load_configuration_file(self, filename: str) -> None:
config = json.load(open(filename))
strict_mappings: dict[str, Mapping] = {}
star_mappings: dict[str, Mapping] = {}
for mapping in config["mappings"]:
item = Mapping(server=mapping.get("server"), host=mapping.get("host"))
for pattern in mapping["patterns"]:
if pattern.startswith("*."):
star_mappings[pattern[2:]] = item
else:
strict_mappings[pattern] = item
self.strict_mappings = strict_mappings
self.star_mappings = star_mappings
def configure(self, updated: set[str]) -> None:
if "domainfrontingfile" in updated:
domain_fronting_file = ctx.options.domainfrontingfile
self._load_configuration_file(domain_fronting_file)
def request(self, flow: HTTPFlow) -> None:
if not flow.request.scheme == "https":
return
# We use the host header to dispatch the request:
target = flow.request.host_header
if target is None:
return
mapping = self._resolve_addresses(target)
if mapping is not None:
flow.request.host = mapping.server or target
flow.request.headers["host"] = mapping.host or target
addons = [HttpsDomainFronting()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/mitmproxywrapper.py | examples/contrib/mitmproxywrapper.py | #!/usr/bin/env python
#
# Helper tool to enable/disable OS X proxy and wrap mitmproxy
#
# Get usage information with:
#
# mitmproxywrapper.py -h
#
import argparse
import contextlib
import os
import re
import signal
import socketserver
import subprocess
import sys
class Wrapper:
def __init__(self, port, use_mitmweb, extra_arguments=None):
self.port = port
self.use_mitmweb = use_mitmweb
self.extra_arguments = extra_arguments
def run_networksetup_command(self, *arguments):
return subprocess.check_output(
["sudo", "networksetup"] + list(arguments)
).decode()
def proxy_state_for_service(self, service):
state = self.run_networksetup_command("-getwebproxy", service).splitlines()
return dict([re.findall(r"([^:]+): (.*)", line)[0] for line in state])
def enable_proxy_for_service(self, service):
print(f"Enabling proxy on {service}...")
for subcommand in ["-setwebproxy", "-setsecurewebproxy"]:
self.run_networksetup_command(
subcommand, service, "127.0.0.1", str(self.port)
)
def disable_proxy_for_service(self, service):
print(f"Disabling proxy on {service}...")
for subcommand in ["-setwebproxystate", "-setsecurewebproxystate"]:
self.run_networksetup_command(subcommand, service, "Off")
def interface_name_to_service_name_map(self):
order = self.run_networksetup_command("-listnetworkserviceorder")
mapping = re.findall(
r"\(\d+\)\s(.*)$\n\(.*Device: (.+)\)$", order, re.MULTILINE
)
return {b: a for (a, b) in mapping}
def run_command_with_input(self, command, input):
popen = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout, stderr) = popen.communicate(input.encode())
return stdout.decode()
def primary_interace_name(self):
scutil_script = "get State:/Network/Global/IPv4\nd.show\n"
stdout = self.run_command_with_input("/usr/sbin/scutil", scutil_script)
(interface,) = re.findall(r"PrimaryInterface\s*:\s*(.+)", stdout)
return interface
def primary_service_name(self):
return self.interface_name_to_service_name_map()[self.primary_interace_name()]
def proxy_enabled_for_service(self, service):
return self.proxy_state_for_service(service)["Enabled"] == "Yes"
def toggle_proxy(self):
new_state = not self.proxy_enabled_for_service(self.primary_service_name())
for service_name in self.connected_service_names():
if self.proxy_enabled_for_service(service_name) and not new_state:
self.disable_proxy_for_service(service_name)
elif not self.proxy_enabled_for_service(service_name) and new_state:
self.enable_proxy_for_service(service_name)
def connected_service_names(self):
scutil_script = "list\n"
stdout = self.run_command_with_input("/usr/sbin/scutil", scutil_script)
service_ids = re.findall(r"State:/Network/Service/(.+)/IPv4", stdout)
service_names = []
for service_id in service_ids:
scutil_script = f"show Setup:/Network/Service/{service_id}\n"
stdout = self.run_command_with_input("/usr/sbin/scutil", scutil_script)
(service_name,) = re.findall(r"UserDefinedName\s*:\s*(.+)", stdout)
service_names.append(service_name)
return service_names
def wrap_mitmproxy(self):
with self.wrap_proxy():
cmd = ["mitmweb" if self.use_mitmweb else "mitmproxy", "-p", str(self.port)]
if self.extra_arguments:
cmd.extend(self.extra_arguments)
subprocess.check_call(cmd)
def wrap_honeyproxy(self):
with self.wrap_proxy():
popen = subprocess.Popen("honeyproxy.sh")
try:
popen.wait()
except KeyboardInterrupt:
popen.terminate()
@contextlib.contextmanager
def wrap_proxy(self):
connected_service_names = self.connected_service_names()
for service_name in connected_service_names:
if not self.proxy_enabled_for_service(service_name):
self.enable_proxy_for_service(service_name)
yield
for service_name in connected_service_names:
if self.proxy_enabled_for_service(service_name):
self.disable_proxy_for_service(service_name)
@classmethod
def ensure_superuser(cls):
if os.getuid() != 0:
print("Relaunching with sudo...")
os.execv("/usr/bin/sudo", ["/usr/bin/sudo"] + sys.argv)
@classmethod
def main(cls):
parser = argparse.ArgumentParser(
description="Helper tool for OS X proxy configuration and mitmproxy.",
epilog="Any additional arguments will be passed on unchanged to mitmproxy/mitmweb.",
)
parser.add_argument(
"-t",
"--toggle",
action="store_true",
help="just toggle the proxy configuration",
)
# parser.add_argument('--honeyproxy', action='store_true', help='run honeyproxy instead of mitmproxy')
parser.add_argument(
"-p",
"--port",
type=int,
help="override the default port of 8080",
default=8080,
)
parser.add_argument(
"-P",
"--port-random",
action="store_true",
help="choose a random unused port",
)
parser.add_argument(
"-w",
"--web",
action="store_true",
help="web interface: run mitmweb instead of mitmproxy",
)
args, extra_arguments = parser.parse_known_args()
port = args.port
# Allocate a random unused port, and hope no other process steals it before mitmproxy/mitmweb uses it.
# Passing the allocated socket to mitmproxy/mitmweb would be nicer of course.
if args.port_random:
with socketserver.TCPServer(("localhost", 0), None) as s:
port = s.server_address[1]
print(f"Using random port {port}...")
wrapper = cls(port=port, use_mitmweb=args.web, extra_arguments=extra_arguments)
def handler(signum, frame):
print("Cleaning up proxy settings...")
wrapper.toggle_proxy()
signal.signal(signal.SIGINT, handler)
if args.toggle:
wrapper.toggle_proxy()
# elif args.honeyproxy:
# wrapper.wrap_honeyproxy()
else:
wrapper.wrap_mitmproxy()
if __name__ == "__main__":
Wrapper.ensure_superuser()
Wrapper.main()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/proxyauth_selenium.py | examples/contrib/webscanner_helper/proxyauth_selenium.py | import abc
import logging
import random
import string
import time
from typing import Any
from typing import cast
from selenium import webdriver
import mitmproxy.http
from mitmproxy import flowfilter
from mitmproxy import master
from mitmproxy.script import concurrent
logger = logging.getLogger(__name__)
cookie_key_name = {
"path": "Path",
"expires": "Expires",
"domain": "Domain",
"is_http_only": "HttpOnly",
"is_secure": "Secure",
}
def randomString(string_length=10):
"""Generate a random string of fixed length"""
letters = string.ascii_lowercase
return "".join(random.choice(letters) for i in range(string_length))
class AuthorizationOracle(abc.ABC):
"""Abstract class for an authorization oracle which decides if a given request or response is authenticated."""
@abc.abstractmethod
def is_unauthorized_request(self, flow: mitmproxy.http.HTTPFlow) -> bool:
pass
@abc.abstractmethod
def is_unauthorized_response(self, flow: mitmproxy.http.HTTPFlow) -> bool:
pass
class SeleniumAddon:
"""This Addon can be used in combination with web application scanners in order to help them to authenticate
against a web application.
Since the authentication is highly dependent on the web application, this add-on includes the abstract method
*login*. In order to use the add-on, a class for the web application inheriting from SeleniumAddon needs to be
created. This class needs to include the concrete selenium actions necessary to authenticate against the web
application. In addition, an authentication oracle which inherits from AuthorizationOracle should be created.
"""
def __init__(self, fltr: str, domain: str, auth_oracle: AuthorizationOracle):
self.filter = flowfilter.parse(fltr)
self.auth_oracle = auth_oracle
self.domain = domain
self.browser = None
self.set_cookies = False
options = webdriver.FirefoxOptions()
options.headless = True
profile = webdriver.FirefoxProfile()
profile.set_preference("network.proxy.type", 0)
self.browser = webdriver.Firefox(firefox_profile=profile, options=options)
self.cookies: list[dict[str, str]] = []
def _login(self, flow):
self.cookies = self.login(flow)
self.browser.get("about:blank")
self._set_request_cookies(flow)
self.set_cookies = True
def request(self, flow: mitmproxy.http.HTTPFlow):
if flow.request.is_replay:
logger.warning("Caught replayed request: " + str(flow))
if (
not self.filter or self.filter(flow)
) and self.auth_oracle.is_unauthorized_request(flow):
logger.debug("unauthorized request detected, perform login")
self._login(flow)
# has to be concurrent because replay.client is blocking and replayed flows
# will also call response
@concurrent
def response(self, flow: mitmproxy.http.HTTPFlow):
if flow.response and (self.filter is None or self.filter(flow)):
if self.auth_oracle.is_unauthorized_response(flow):
self._login(flow)
new_flow = flow.copy()
if master and hasattr(master, "commands"):
# cast necessary for mypy
cast(Any, master).commands.call("replay.client", [new_flow])
count = 0
while new_flow.response is None and count < 10:
logger.error("waiting since " + str(count) + " ...")
count = count + 1
time.sleep(1)
if new_flow.response:
flow.response = new_flow.response
else:
logger.warning(
"Could not call 'replay.client' command since master was not initialized yet."
)
if self.set_cookies and flow.response:
logger.debug("set set-cookie header for response")
self._set_set_cookie_headers(flow)
self.set_cookies = False
def done(self):
self.browser.close()
def _set_set_cookie_headers(self, flow: mitmproxy.http.HTTPFlow):
if flow.response and self.cookies:
for cookie in self.cookies:
parts = [f"{cookie['name']}={cookie['value']}"]
for k, v in cookie_key_name.items():
if k in cookie and isinstance(cookie[k], str):
parts.append(f"{v}={cookie[k]}")
elif k in cookie and isinstance(cookie[k], bool) and cookie[k]:
parts.append(cookie[k])
encoded_c = "; ".join(parts)
flow.response.headers["set-cookie"] = encoded_c
def _set_request_cookies(self, flow: mitmproxy.http.HTTPFlow):
if self.cookies:
cookies = "; ".join(
map(lambda c: f"{c['name']}={c['value']}", self.cookies)
)
flow.request.headers["cookie"] = cookies
@abc.abstractmethod
def login(self, flow: mitmproxy.http.HTTPFlow) -> list[dict[str, str]]:
pass
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/watchdog.py | examples/contrib/webscanner_helper/watchdog.py | import logging
import pathlib
import time
from datetime import datetime
import mitmproxy.connections
import mitmproxy.http
from mitmproxy.addons.export import curl_command
from mitmproxy.addons.export import raw
from mitmproxy.exceptions import HttpSyntaxException
logger = logging.getLogger(__name__)
class WatchdogAddon:
"""The Watchdog Add-on can be used in combination with web application scanners in oder to check if the device
under test responds correctls to the scanner's responses.
The Watchdog Add-on checks if the device under test responds correctly to the scanner's responses.
If the Watchdog sees that the DUT is no longer responding correctly, an multiprocessing event is set.
This information can be used to restart the device under test if necessary.
"""
def __init__(self, event, outdir: pathlib.Path, timeout=None):
"""Initializes the Watchdog.
Args:
event: multiprocessing.Event that will be set if the watchdog is triggered.
outdir: path to a directory in which the triggering requests will be saved (curl and raw).
timeout_conn: float that specifies the timeout for the server connection
"""
self.error_event = event
self.flow_dir = outdir
if self.flow_dir.exists() and not self.flow_dir.is_dir():
raise RuntimeError("Watchtdog output path must be a directory.")
elif not self.flow_dir.exists():
self.flow_dir.mkdir(parents=True)
self.last_trigger: None | float = None
self.timeout: None | float = timeout
def serverconnect(self, conn: mitmproxy.connections.ServerConnection):
if self.timeout is not None:
conn.settimeout(self.timeout)
@classmethod
def not_in_timeout(cls, last_triggered, timeout):
"""Checks if current error lies not in timeout after last trigger (potential reset of connection)."""
return (
last_triggered is None
or timeout is None
or (time.time() - last_triggered > timeout)
)
def error(self, flow):
"""Checks if the watchdog will be triggered.
Only triggers watchdog for timeouts after last reset and if flow.error is set (shows that error is a server
error). Ignores HttpSyntaxException Errors since this can be triggered on purpose by web application scanner.
Args:
flow: mitmproxy.http.flow
"""
if (
self.not_in_timeout(self.last_trigger, self.timeout)
and flow.error is not None
and not isinstance(flow.error, HttpSyntaxException)
):
self.last_trigger = time.time()
logger.error(f"Watchdog triggered! Cause: {flow}")
self.error_event.set()
# save the request which might have caused the problem
if flow.request:
with (self.flow_dir / f"{datetime.utcnow().isoformat()}.curl").open(
"w"
) as f:
f.write(curl_command(flow))
with (self.flow_dir / f"{datetime.utcnow().isoformat()}.raw").open(
"wb"
) as f:
f.write(raw(flow))
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/mapping.py | examples/contrib/webscanner_helper/mapping.py | import copy
import logging
from bs4 import BeautifulSoup
from examples.contrib.webscanner_helper.urldict import URLDict
from mitmproxy.http import HTTPFlow
NO_CONTENT = object()
class MappingAddonConfig:
HTML_PARSER = "html.parser"
class MappingAddon:
"""The mapping add-on can be used in combination with web application scanners to reduce their false positives.
Many web application scanners produce false positives caused by dynamically changing content of web applications
such as the current time or current measurements. When testing for injection vulnerabilities, web application
scanners are tricked into thinking they changed the content with the injected payload. In realty, the content of
the web application changed notwithstanding the scanner's input. When the mapping add-on is used to map the content
to a fixed value, these false positives can be avoided.
"""
OPT_MAPPING_FILE = "mapping_file"
"""File where urls and css selector to mapped content is stored.
Elements will be replaced with the content given in this file. If the content is none it will be set to the first
seen value.
Example:
{
"http://10.10.10.10": {
"body": "My Text"
},
"URL": {
"css selector": "Replace with this"
}
}
"""
OPT_MAP_PERSISTENT = "map_persistent"
"""Whether to store all new content in the configuration file."""
def __init__(self, filename: str, persistent: bool = False) -> None:
"""Initializes the mapping add-on
Args:
filename: str that provides the name of the file in which the urls and css selectors to mapped content is
stored.
persistent: bool that indicates whether to store all new content in the configuration file.
Example:
The file in which the mapping config is given should be in the following format:
{
"http://10.10.10.10": {
"body": "My Text"
},
"<URL>": {
"<css selector>": "Replace with this"
}
}
"""
self.filename = filename
self.persistent = persistent
self.logger = logging.getLogger(self.__class__.__name__)
with open(filename) as f:
self.mapping_templates = URLDict.load(f)
def load(self, loader):
loader.add_option(
self.OPT_MAPPING_FILE,
str,
"",
"File where replacement configuration is stored.",
)
loader.add_option(
self.OPT_MAP_PERSISTENT,
bool,
False,
"Whether to store all new content in the configuration file.",
)
def configure(self, updated):
if self.OPT_MAPPING_FILE in updated:
self.filename = updated[self.OPT_MAPPING_FILE]
with open(self.filename) as f:
self.mapping_templates = URLDict.load(f)
if self.OPT_MAP_PERSISTENT in updated:
self.persistent = updated[self.OPT_MAP_PERSISTENT]
def replace(
self, soup: BeautifulSoup, css_sel: str, replace: BeautifulSoup
) -> None:
"""Replaces the content of soup that matches the css selector with the given replace content."""
for content in soup.select(css_sel):
self.logger.debug(f'replace "{content}" with "{replace}"')
content.replace_with(copy.copy(replace))
def apply_template(
self, soup: BeautifulSoup, template: dict[str, BeautifulSoup]
) -> None:
"""Applies the given mapping template to the given soup."""
for css_sel, replace in template.items():
mapped = soup.select(css_sel)
if not mapped:
self.logger.warning(
f'Could not find "{css_sel}", can not freeze anything.'
)
else:
self.replace(
soup,
css_sel,
BeautifulSoup(replace, features=MappingAddonConfig.HTML_PARSER),
)
def response(self, flow: HTTPFlow) -> None:
"""If a response is received, check if we should replace some content."""
try:
templates = self.mapping_templates[flow]
res = flow.response
if res is not None:
encoding = res.headers.get("content-encoding", "utf-8")
content_type = res.headers.get("content-type", "text/html")
if "text/html" in content_type and encoding == "utf-8":
content = BeautifulSoup(res.content, MappingAddonConfig.HTML_PARSER)
for template in templates:
self.apply_template(content, template)
res.content = content.encode(encoding)
else:
self.logger.warning(
f"Unsupported content type '{content_type}' or content encoding '{encoding}'"
)
except KeyError:
pass
def done(self) -> None:
"""Dumps all new content into the configuration file if self.persistent is set."""
if self.persistent:
# make sure that all items are strings and not soups.
def value_dumper(value):
store = {}
if value is None:
return "None"
try:
for css_sel, soup in value.items():
store[css_sel] = str(soup)
except Exception:
raise RuntimeError(value)
return store
with open(self.filename, "w") as f:
self.mapping_templates.dump(f, value_dumper)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/urlinjection.py | examples/contrib/webscanner_helper/urlinjection.py | import abc
import html
import json
import logging
from mitmproxy import flowfilter
from mitmproxy.http import HTTPFlow
logger = logging.getLogger(__name__)
class InjectionGenerator:
"""Abstract class for an generator of the injection content in order to inject the URL index."""
ENCODING = "UTF8"
@abc.abstractmethod
def inject(self, index, flow: HTTPFlow):
"""Injects the given URL index into the given flow."""
class HTMLInjection(InjectionGenerator):
"""Injects the URL index either by creating a new HTML page or by appending is to an existing page."""
def __init__(self, insert: bool = False):
"""Initializes the HTMLInjection.
Args:
insert: boolean to decide whether to insert the URL index to an existing page (True) or to create a new
page containing the URL index.
"""
self.insert = insert
@classmethod
def _form_html(cls, url):
return f'<form action="{url}" method="POST"></form>'
@classmethod
def _link_html(cls, url):
return f'<a href="{url}">link to {url}</a>'
@classmethod
def index_html(cls, index):
link_htmls = []
for scheme_netloc, paths in index.items():
for path, methods in paths.items():
url = scheme_netloc + path
if "POST" in methods:
link_htmls.append(cls._form_html(url))
if "GET" in methods:
link_htmls.append(cls._link_html(url))
return "</ br>".join(link_htmls)
@classmethod
def landing_page(cls, index):
return (
'<head><meta charset="UTF-8"></head><body>'
+ cls.index_html(index)
+ "</body>"
)
def inject(self, index, flow: HTTPFlow):
if flow.response is not None:
if flow.response.status_code != 404 and not self.insert:
logger.warning(
f"URL '{flow.request.url}' didn't return 404 status, "
f"index page would overwrite valid page."
)
elif self.insert:
content = flow.response.content.decode(
self.ENCODING, "backslashreplace"
)
if "</body>" in content:
content = content.replace(
"</body>", self.index_html(index) + "</body>"
)
else:
content += self.index_html(index)
flow.response.content = content.encode(self.ENCODING)
else:
flow.response.content = self.landing_page(index).encode(self.ENCODING)
class RobotsInjection(InjectionGenerator):
"""Injects the URL index by creating a new robots.txt including the URLs."""
def __init__(self, directive="Allow"):
self.directive = directive
@classmethod
def robots_txt(cls, index, directive="Allow"):
lines = ["User-agent: *"]
for scheme_netloc, paths in index.items():
for path, methods in paths.items():
lines.append(directive + ": " + path)
return "\n".join(lines)
def inject(self, index, flow: HTTPFlow):
if flow.response is not None:
if flow.response.status_code != 404:
logger.warning(
f"URL '{flow.request.url}' didn't return 404 status, "
f"index page would overwrite valid page."
)
else:
flow.response.content = self.robots_txt(index, self.directive).encode(
self.ENCODING
)
class SitemapInjection(InjectionGenerator):
"""Injects the URL index by creating a new sitemap including the URLs."""
@classmethod
def sitemap(cls, index):
lines = [
'<?xml version="1.0" encoding="UTF-8"?><urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'
]
for scheme_netloc, paths in index.items():
for path, methods in paths.items():
url = scheme_netloc + path
lines.append(f"<url><loc>{html.escape(url)}</loc></url>")
lines.append("</urlset>")
return "\n".join(lines)
def inject(self, index, flow: HTTPFlow):
if flow.response is not None:
if flow.response.status_code != 404:
logger.warning(
f"URL '{flow.request.url}' didn't return 404 status, "
f"index page would overwrite valid page."
)
else:
flow.response.content = self.sitemap(index).encode(self.ENCODING)
class UrlInjectionAddon:
"""The UrlInjection add-on can be used in combination with web application scanners to improve their crawling
performance.
The given URls will be injected into the web application. With this, web application scanners can find pages to
crawl much easier. Depending on the Injection generator, the URLs will be injected at different places of the
web application. It is possible to create a landing page which includes the URL (HTMLInjection()), to inject the
URLs to an existing page (HTMLInjection(insert=True)), to create a robots.txt containing the URLs
(RobotsInjection()) or to create a sitemap.xml which includes the URLS (SitemapInjection()).
It is necessary that the web application scanner can find the newly created page containing the URL index. For
example, the newly created page can be set as starting point for the web application scanner.
The URL index needed for the injection can be generated by the UrlIndex Add-on.
"""
def __init__(
self, flt: str, url_index_file: str, injection_gen: InjectionGenerator
):
"""Initializes the UrlIndex add-on.
Args:
flt: mitmproxy filter to decide on which pages the URLs will be injected (str).
url_index_file: Path to the file which includes the URL index in JSON format (e.g. generated by the UrlIndexAddon), given
as str.
injection_gen: InjectionGenerator that should be used to inject the URLs into the web application.
"""
self.name = f"{self.__class__.__name__}-{injection_gen.__class__.__name__}-{self.__hash__()}"
self.flt = flowfilter.parse(flt)
self.injection_gen = injection_gen
with open(url_index_file) as f:
self.url_store = json.load(f)
def response(self, flow: HTTPFlow):
"""Checks if the response matches the filter and such should be injected.
Injects the URL index if appropriate.
"""
if flow.response is not None:
if self.flt is not None and self.flt(flow):
self.injection_gen.inject(self.url_store, flow)
flow.response.status_code = 200
flow.response.headers["content-type"] = "text/html"
logger.debug(
f"Set status code to 200 and set content to logged "
f"urls. Method: {self.injection_gen}"
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/test_urldict.py | examples/contrib/webscanner_helper/test_urldict.py | from examples.contrib.webscanner_helper.urldict import URLDict
from mitmproxy.test import tflow
from mitmproxy.test import tutils
url = "http://10.10.10.10"
new_content_body = "New Body"
new_content_title = "New Title"
content = f'{{"body": "{new_content_body}", "title": "{new_content_title}"}}'
url_error = "i~nvalid"
input_file_content = f'{{"{url}": {content}}}'
input_file_content_error = f'{{"{url_error}": {content}}}'
class TestUrlDict:
def test_urldict_empty(self):
urldict = URLDict()
dump = urldict.dumps()
assert dump == "{}"
def test_urldict_loads(self):
urldict = URLDict.loads(input_file_content)
dump = urldict.dumps()
assert dump == input_file_content
def test_urldict_set_error(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(input_file_content_error)
with open(tmpfile) as tfile:
try:
URLDict.load(tfile)
except ValueError:
assert True
else:
assert False
def test_urldict_get(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(input_file_content)
with open(tmpfile) as tfile:
urldict = URLDict.load(tfile)
f = tflow.tflow(resp=tutils.tresp())
f.request.url = url
selection = urldict[f]
assert "body" in selection[0]
assert new_content_body in selection[0]["body"]
assert "title" in selection[0]
assert new_content_title in selection[0]["title"]
selection_get = urldict.get(f)
assert "body" in selection_get[0]
assert new_content_body in selection_get[0]["body"]
assert "title" in selection_get[0]
assert new_content_title in selection_get[0]["title"]
try:
urldict["body"]
except KeyError:
assert True
else:
assert False
assert urldict.get("body", default="default") == "default"
def test_urldict_dumps(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(input_file_content)
with open(tmpfile) as tfile:
urldict = URLDict.load(tfile)
dump = urldict.dumps()
assert dump == input_file_content
def test_urldict_dump(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
outfile = tmpdir.join("outfile")
with open(tmpfile, "w") as tfile:
tfile.write(input_file_content)
with open(tmpfile) as tfile:
urldict = URLDict.load(tfile)
with open(outfile, "w") as ofile:
urldict.dump(ofile)
with open(outfile) as ofile:
output = ofile.read()
assert output == input_file_content
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/test_mapping.py | examples/contrib/webscanner_helper/test_mapping.py | from collections.abc import Callable
from typing import TextIO
from unittest import mock
from unittest.mock import MagicMock
from examples.contrib.webscanner_helper.mapping import MappingAddon
from examples.contrib.webscanner_helper.mapping import MappingAddonConfig
from mitmproxy.test import tflow
from mitmproxy.test import tutils
class TestConfig:
def test_config(self):
assert MappingAddonConfig.HTML_PARSER == "html.parser"
url = "http://10.10.10.10"
new_content = "My Text"
mapping_content = f'{{"{url}": {{"body": "{new_content}"}}}}'
class TestMappingAddon:
def test_init(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile)
assert "My Text" in str(mapping.mapping_templates._dump())
def test_load(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile)
loader = MagicMock()
mapping.load(loader)
assert "mapping_file" in str(loader.add_option.call_args_list)
assert "map_persistent" in str(loader.add_option.call_args_list)
def test_configure(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile)
new_filename = "My new filename"
updated = {
str(mapping.OPT_MAPPING_FILE): new_filename,
str(mapping.OPT_MAP_PERSISTENT): True,
}
open_mock = mock.mock_open(read_data="{}")
with mock.patch("builtins.open", open_mock):
mapping.configure(updated)
assert new_filename in str(open_mock.mock_calls)
assert mapping.filename == new_filename
assert mapping.persistent
def test_response_filtered(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile)
f = tflow.tflow(resp=tutils.tresp())
test_content = b"Test"
f.response.content = test_content
mapping.response(f)
assert f.response.content == test_content
def test_response(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile)
f = tflow.tflow(resp=tutils.tresp())
test_content = b"<body> Test </body>"
f.response.content = test_content
f.request.url = url
mapping.response(f)
assert f.response.content.decode("utf-8") == new_content
def test_response_content_type(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile)
f = tflow.tflow(resp=tutils.tresp())
test_content = b"<body> Test </body>"
f.response.content = test_content
f.request.url = url
f.response.headers.add("content-type", "content-type")
mapping.response(f)
assert f.response.content == test_content
def test_response_not_existing(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile)
f = tflow.tflow(resp=tutils.tresp())
test_content = b"<title> Test </title>"
f.response.content = test_content
f.request.url = url
mapping.response(f)
assert f.response.content == test_content
def test_persistance_false(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile)
open_mock = mock.mock_open(read_data="{}")
with mock.patch("builtins.open", open_mock):
mapping.done()
assert len(open_mock.mock_calls) == 0
def test_persistance_true(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile, persistent=True)
open_mock = mock.mock_open(read_data="{}")
with mock.patch("builtins.open", open_mock):
mapping.done()
with open(tmpfile) as tfile:
results = tfile.read()
assert len(open_mock.mock_calls) != 0
assert results == mapping_content
def test_persistance_true_add_content(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile, persistent=True)
f = tflow.tflow(resp=tutils.tresp())
test_content = b"<title> Test </title>"
f.response.content = test_content
f.request.url = url
mapping.response(f)
mapping.done()
with open(tmpfile) as tfile:
results = tfile.read()
assert mapping_content in results
def mock_dump(self, f: TextIO, value_dumper: Callable):
assert value_dumper(None) == "None"
try:
value_dumper("Test")
except RuntimeError:
assert True
else:
assert False
def test_dump(selfself, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write("{}")
mapping = MappingAddon(tmpfile, persistent=True)
with mock.patch(
"examples.complex.webscanner_helper.urldict.URLDict.dump",
selfself.mock_dump,
):
mapping.done()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/urlindex.py | examples/contrib/webscanner_helper/urlindex.py | import abc
import datetime
import json
import logging
from pathlib import Path
from mitmproxy import flowfilter
from mitmproxy.http import HTTPFlow
logger = logging.getLogger(__name__)
class UrlIndexWriter(abc.ABC):
"""Abstract Add-on to write seen URLs.
For example, these URLs can be injected in a web application to improve the crawling of web application scanners.
The injection can be done using the URLInjection Add-on.
"""
def __init__(self, filename: Path):
"""Initializes the UrlIndexWriter.
Args:
filename: Path to file to which the URL index will be written.
"""
self.filepath = filename
@abc.abstractmethod
def load(self):
"""Load existing URL index."""
@abc.abstractmethod
def add_url(self, flow: HTTPFlow):
"""Add new URL to URL index."""
@abc.abstractmethod
def save(self):
pass
class SetEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
return json.JSONEncoder.default(self, obj)
class JSONUrlIndexWriter(UrlIndexWriter):
"""Writes seen URLs as JSON."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.host_urls = {}
def load(self):
if self.filepath.exists():
with self.filepath.open("r") as f:
self.host_urls = json.load(f)
for host in self.host_urls.keys():
for path, methods in self.host_urls[host].items():
for method, codes in methods.items():
self.host_urls[host][path] = {method: set(codes)}
def add_url(self, flow: HTTPFlow):
req = flow.request
res = flow.response
if req is not None and res is not None:
urls = self.host_urls.setdefault(
f"{req.scheme}://{req.host}:{req.port}", dict()
)
methods = urls.setdefault(req.path, {})
codes = methods.setdefault(req.method, set())
codes.add(res.status_code)
def save(self):
with self.filepath.open("w") as f:
json.dump(self.host_urls, f, cls=SetEncoder)
class TextUrlIndexWriter(UrlIndexWriter):
"""Writes seen URLs as text."""
def load(self):
pass
def add_url(self, flow: HTTPFlow):
res = flow.response
req = flow.request
if res is not None and req is not None:
with self.filepath.open("a+") as f:
f.write(
f"{datetime.datetime.utcnow().isoformat()} STATUS: {res.status_code} METHOD: "
f"{req.method} URL:{req.url}\n"
)
def save(self):
pass
WRITER: dict[str, type[UrlIndexWriter]] = {
"json": JSONUrlIndexWriter,
"text": TextUrlIndexWriter,
}
def filter_404(flow) -> bool:
"""Filters responses with status code 404."""
return flow.response.status_code != 404
class UrlIndexAddon:
"""Add-on to write seen URLs, either as JSON or as text.
For example, these URLs can be injected in a web application to improve the crawling of web application scanners.
The injection can be done using the URLInjection Add-on.
"""
index_filter: str | flowfilter.TFilter | None
writer: UrlIndexWriter
OPT_FILEPATH = "URLINDEX_FILEPATH"
OPT_APPEND = "URLINDEX_APPEND"
OPT_INDEX_FILTER = "URLINDEX_FILTER"
def __init__(
self,
file_path: str | Path,
append: bool = True,
index_filter: str | flowfilter.TFilter = filter_404,
index_format: str = "json",
):
"""Initializes the urlindex add-on.
Args:
file_path: Path to file to which the URL index will be written. Can either be given as str or Path.
append: Bool to decide whether to append new URLs to the given file (as opposed to overwrite the contents
of the file)
index_filer: A mitmproxy filter with which the seen URLs will be filtered before being written. Can either
be given as str or as flowfilter.TFilter
index_format: The format of the URL index, can either be "json" or "text".
"""
if isinstance(index_filter, str):
self.index_filter = flowfilter.parse(index_filter)
if self.index_filter is None:
raise ValueError("Invalid filter expression.")
else:
self.index_filter = index_filter
file_path = Path(file_path)
try:
self.writer = WRITER[index_format.lower()](file_path)
except KeyError:
raise ValueError(f"Format '{index_format}' is not supported.")
if not append and file_path.exists():
file_path.unlink()
self.writer.load()
def response(self, flow: HTTPFlow):
"""Checks if the response should be included in the URL based on the index_filter and adds it to the URL index
if appropriate.
"""
if isinstance(self.index_filter, str) or self.index_filter is None:
raise ValueError("Invalid filter expression.")
else:
if self.index_filter(flow):
self.writer.add_url(flow)
def done(self):
"""Writes the URL index."""
self.writer.save()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/test_urlindex.py | examples/contrib/webscanner_helper/test_urlindex.py | import json
from json import JSONDecodeError
from pathlib import Path
from unittest import mock
from unittest.mock import patch
from examples.contrib.webscanner_helper.urlindex import filter_404
from examples.contrib.webscanner_helper.urlindex import JSONUrlIndexWriter
from examples.contrib.webscanner_helper.urlindex import SetEncoder
from examples.contrib.webscanner_helper.urlindex import TextUrlIndexWriter
from examples.contrib.webscanner_helper.urlindex import UrlIndexAddon
from examples.contrib.webscanner_helper.urlindex import UrlIndexWriter
from examples.contrib.webscanner_helper.urlindex import WRITER
from mitmproxy.test import tflow
from mitmproxy.test import tutils
class TestBaseClass:
@patch.multiple(UrlIndexWriter, __abstractmethods__=set())
def test_base_class(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
index_writer = UrlIndexWriter(tmpfile)
index_writer.load()
index_writer.add_url(tflow.tflow())
index_writer.save()
class TestSetEncoder:
def test_set_encoder_set(self):
test_set = {"foo", "bar", "42"}
result = SetEncoder.default(SetEncoder(), test_set)
assert isinstance(result, list)
assert "foo" in result
assert "bar" in result
assert "42" in result
def test_set_encoder_str(self):
test_str = "test"
try:
SetEncoder.default(SetEncoder(), test_str)
except TypeError:
assert True
else:
assert False
class TestJSONUrlIndexWriter:
def test_load(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(
'{"http://example.com:80": {"/": {"GET": [301]}}, "http://www.example.com:80": {"/": {"GET": [302]}}}'
)
writer = JSONUrlIndexWriter(filename=tmpfile)
writer.load()
assert "http://example.com:80" in writer.host_urls
assert "/" in writer.host_urls["http://example.com:80"]
assert "GET" in writer.host_urls["http://example.com:80"]["/"]
assert 301 in writer.host_urls["http://example.com:80"]["/"]["GET"]
def test_load_empty(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write("{}")
writer = JSONUrlIndexWriter(filename=tmpfile)
writer.load()
assert len(writer.host_urls) == 0
def test_load_nonexisting(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
writer = JSONUrlIndexWriter(filename=tmpfile)
writer.load()
assert len(writer.host_urls) == 0
def test_add(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
writer = JSONUrlIndexWriter(filename=tmpfile)
f = tflow.tflow(resp=tutils.tresp())
url = f"{f.request.scheme}://{f.request.host}:{f.request.port}"
writer.add_url(f)
assert url in writer.host_urls
assert f.request.path in writer.host_urls[url]
def test_save(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
writer = JSONUrlIndexWriter(filename=tmpfile)
f = tflow.tflow(resp=tutils.tresp())
url = f"{f.request.scheme}://{f.request.host}:{f.request.port}"
writer.add_url(f)
writer.save()
with open(tmpfile) as results:
try:
content = json.load(results)
except JSONDecodeError:
assert False
assert url in content
class TestTestUrlIndexWriter:
def test_load(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(
"2020-04-22T05:41:08.679231 STATUS: 200 METHOD: GET URL:http://example.com"
)
writer = TextUrlIndexWriter(filename=tmpfile)
writer.load()
assert True
def test_load_empty(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write("{}")
writer = TextUrlIndexWriter(filename=tmpfile)
writer.load()
assert True
def test_load_nonexisting(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
writer = TextUrlIndexWriter(filename=tmpfile)
writer.load()
assert True
def test_add(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
writer = TextUrlIndexWriter(filename=tmpfile)
f = tflow.tflow(resp=tutils.tresp())
url = f"{f.request.scheme}://{f.request.host}:{f.request.port}"
method = f.request.method
code = f.response.status_code
writer.add_url(f)
with open(tmpfile) as results:
content = results.read()
assert url in content
assert method in content
assert str(code) in content
def test_save(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
writer = TextUrlIndexWriter(filename=tmpfile)
f = tflow.tflow(resp=tutils.tresp())
url = f"{f.request.scheme}://{f.request.host}:{f.request.port}"
method = f.request.method
code = f.response.status_code
writer.add_url(f)
writer.save()
with open(tmpfile) as results:
content = results.read()
assert url in content
assert method in content
assert str(code) in content
class TestWriter:
def test_writer_dict(self):
assert "json" in WRITER
assert isinstance(WRITER["json"], JSONUrlIndexWriter.__class__)
assert "text" in WRITER
assert isinstance(WRITER["text"], TextUrlIndexWriter.__class__)
class TestFilter:
def test_filer_true(self):
f = tflow.tflow(resp=tutils.tresp())
assert filter_404(f)
def test_filter_false(self):
f = tflow.tflow(resp=tutils.tresp())
f.response.status_code = 404
assert not filter_404(f)
class TestUrlIndexAddon:
def test_init(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
UrlIndexAddon(tmpfile)
def test_init_format(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
try:
UrlIndexAddon(tmpfile, index_format="test")
except ValueError:
assert True
else:
assert False
def test_init_filter(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
try:
UrlIndexAddon(tmpfile, index_filter="i~nvalid")
except ValueError:
assert True
else:
assert False
def test_init_append(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write("")
url_index = UrlIndexAddon(tmpfile, append=False)
f = tflow.tflow(resp=tutils.tresp())
with mock.patch(
"examples.complex.webscanner_helper.urlindex.JSONUrlIndexWriter.add_url"
):
url_index.response(f)
assert not Path(tmpfile).exists()
def test_response(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
url_index = UrlIndexAddon(tmpfile)
f = tflow.tflow(resp=tutils.tresp())
with mock.patch(
"examples.complex.webscanner_helper.urlindex.JSONUrlIndexWriter.add_url"
) as mock_add_url:
url_index.response(f)
mock_add_url.assert_called()
def test_response_None(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
url_index = UrlIndexAddon(tmpfile)
url_index.index_filter = None
f = tflow.tflow(resp=tutils.tresp())
try:
url_index.response(f)
except ValueError:
assert True
else:
assert False
def test_done(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
url_index = UrlIndexAddon(tmpfile)
with mock.patch(
"examples.complex.webscanner_helper.urlindex.JSONUrlIndexWriter.save"
) as mock_save:
url_index.done()
mock_save.assert_called()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/test_proxyauth_selenium.py | examples/contrib/webscanner_helper/test_proxyauth_selenium.py | from unittest import mock
from unittest.mock import MagicMock
import pytest
from examples.contrib.webscanner_helper.proxyauth_selenium import AuthorizationOracle
from examples.contrib.webscanner_helper.proxyauth_selenium import logger
from examples.contrib.webscanner_helper.proxyauth_selenium import randomString
from examples.contrib.webscanner_helper.proxyauth_selenium import SeleniumAddon
from mitmproxy.http import HTTPFlow
from mitmproxy.test import tflow
from mitmproxy.test import tutils
class TestRandomString:
def test_random_string(self):
res = randomString()
assert isinstance(res, str)
assert len(res) == 10
res_5 = randomString(5)
assert isinstance(res_5, str)
assert len(res_5) == 5
class AuthenticationOracleTest(AuthorizationOracle):
def is_unauthorized_request(self, flow: HTTPFlow) -> bool:
return True
def is_unauthorized_response(self, flow: HTTPFlow) -> bool:
return True
oracle = AuthenticationOracleTest()
@pytest.fixture(scope="module", autouse=True)
def selenium_addon(request):
addon = SeleniumAddon(
fltr=r"~u http://example\.com/login\.php",
domain=r"~d http://example\.com",
auth_oracle=oracle,
)
browser = MagicMock()
addon.browser = browser
yield addon
def fin():
addon.browser.close()
request.addfinalizer(fin)
class TestSeleniumAddon:
def test_request_replay(self, selenium_addon):
f = tflow.tflow(resp=tutils.tresp())
f.request.is_replay = True
with mock.patch.object(logger, "warning") as mock_warning:
selenium_addon.request(f)
mock_warning.assert_called()
def test_request(self, selenium_addon):
f = tflow.tflow(resp=tutils.tresp())
f.request.url = "http://example.com/login.php"
selenium_addon.set_cookies = False
assert not selenium_addon.set_cookies
with mock.patch.object(logger, "debug") as mock_debug:
selenium_addon.request(f)
mock_debug.assert_called()
assert selenium_addon.set_cookies
def test_request_filtered(self, selenium_addon):
f = tflow.tflow(resp=tutils.tresp())
selenium_addon.set_cookies = False
assert not selenium_addon.set_cookies
selenium_addon.request(f)
assert not selenium_addon.set_cookies
def test_request_cookies(self, selenium_addon):
f = tflow.tflow(resp=tutils.tresp())
f.request.url = "http://example.com/login.php"
selenium_addon.set_cookies = False
assert not selenium_addon.set_cookies
with mock.patch.object(logger, "debug") as mock_debug:
with mock.patch(
"examples.complex.webscanner_helper.proxyauth_selenium.SeleniumAddon.login",
return_value=[{"name": "cookie", "value": "test"}],
) as mock_login:
selenium_addon.request(f)
mock_debug.assert_called()
assert selenium_addon.set_cookies
mock_login.assert_called()
def test_request_filter_None(self, selenium_addon):
f = tflow.tflow(resp=tutils.tresp())
fltr = selenium_addon.filter
selenium_addon.filter = None
assert not selenium_addon.filter
selenium_addon.set_cookies = False
assert not selenium_addon.set_cookies
with mock.patch.object(logger, "debug") as mock_debug:
selenium_addon.request(f)
mock_debug.assert_called()
selenium_addon.filter = fltr
assert selenium_addon.set_cookies
def test_response(self, selenium_addon):
f = tflow.tflow(resp=tutils.tresp())
f.request.url = "http://example.com/login.php"
selenium_addon.set_cookies = False
with mock.patch(
"examples.complex.webscanner_helper.proxyauth_selenium.SeleniumAddon.login",
return_value=[],
) as mock_login:
selenium_addon.response(f)
mock_login.assert_called()
def test_response_cookies(self, selenium_addon):
f = tflow.tflow(resp=tutils.tresp())
f.request.url = "http://example.com/login.php"
selenium_addon.set_cookies = False
with mock.patch(
"examples.complex.webscanner_helper.proxyauth_selenium.SeleniumAddon.login",
return_value=[{"name": "cookie", "value": "test"}],
) as mock_login:
selenium_addon.response(f)
mock_login.assert_called()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/urldict.py | examples/contrib/webscanner_helper/urldict.py | import itertools
import json
from collections.abc import Callable
from collections.abc import Generator
from collections.abc import MutableMapping
from typing import Any
from typing import cast
from typing import TextIO
from mitmproxy import flowfilter
from mitmproxy.http import HTTPFlow
def f_id(x):
return x
class URLDict(MutableMapping):
"""Data structure to store information using filters as keys."""
def __init__(self):
self.store: dict[flowfilter.TFilter, Any] = {}
def __getitem__(self, key, *, count=0):
if count:
ret = itertools.islice(self.get_generator(key), 0, count)
else:
ret = list(self.get_generator(key))
if ret:
return ret
else:
raise KeyError
def __setitem__(self, key: str, value):
fltr = flowfilter.parse(key)
if fltr:
self.store.__setitem__(fltr, value)
else:
raise ValueError("Not a valid filter")
def __delitem__(self, key):
self.store.__delitem__(key)
def __iter__(self):
return self.store.__iter__()
def __len__(self):
return self.store.__len__()
def get_generator(self, flow: HTTPFlow) -> Generator[Any, None, None]:
for fltr, value in self.store.items():
if flowfilter.match(fltr, flow):
yield value
def get(self, flow: HTTPFlow, default=None, *, count=0) -> list[Any]:
try:
return self.__getitem__(flow, count=count)
except KeyError:
return default
@classmethod
def _load(cls, json_obj, value_loader: Callable = f_id):
url_dict = cls()
for fltr, value in json_obj.items():
url_dict[fltr] = value_loader(value)
return url_dict
@classmethod
def load(cls, f: TextIO, value_loader: Callable = f_id):
json_obj = json.load(f)
return cls._load(json_obj, value_loader)
@classmethod
def loads(cls, json_str: str, value_loader: Callable = f_id):
json_obj = json.loads(json_str)
return cls._load(json_obj, value_loader)
def _dump(self, value_dumper: Callable = f_id) -> dict:
dumped: dict[flowfilter.TFilter | str, Any] = {}
for fltr, value in self.store.items():
if hasattr(fltr, "pattern"):
# cast necessary for mypy
dumped[cast(Any, fltr).pattern] = value_dumper(value)
else:
dumped[str(fltr)] = value_dumper(value)
return dumped
def dump(self, f: TextIO, value_dumper: Callable = f_id):
json.dump(self._dump(value_dumper), f)
def dumps(self, value_dumper: Callable = f_id):
return json.dumps(self._dump(value_dumper))
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/__init__.py | examples/contrib/webscanner_helper/__init__.py | python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false | |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/test_urlinjection.py | examples/contrib/webscanner_helper/test_urlinjection.py | import json
from unittest import mock
from examples.contrib.webscanner_helper.urlinjection import HTMLInjection
from examples.contrib.webscanner_helper.urlinjection import InjectionGenerator
from examples.contrib.webscanner_helper.urlinjection import logger
from examples.contrib.webscanner_helper.urlinjection import RobotsInjection
from examples.contrib.webscanner_helper.urlinjection import SitemapInjection
from examples.contrib.webscanner_helper.urlinjection import UrlInjectionAddon
from mitmproxy import flowfilter
from mitmproxy.test import tflow
from mitmproxy.test import tutils
index = json.loads(
'{"http://example.com:80": {"/": {"GET": [301]}}, "http://www.example.com:80": {"/test": {"POST": [302]}}}'
)
class TestInjectionGenerator:
def test_inject(self):
f = tflow.tflow(resp=tutils.tresp())
injection_generator = InjectionGenerator()
injection_generator.inject(index=index, flow=f)
assert True
class TestHTMLInjection:
def test_inject_not404(self):
html_injection = HTMLInjection()
f = tflow.tflow(resp=tutils.tresp())
with mock.patch.object(logger, "warning") as mock_warning:
html_injection.inject(index, f)
assert mock_warning.called
def test_inject_insert(self):
html_injection = HTMLInjection(insert=True)
f = tflow.tflow(resp=tutils.tresp())
assert "example.com" not in str(f.response.content)
html_injection.inject(index, f)
assert "example.com" in str(f.response.content)
def test_inject_insert_body(self):
html_injection = HTMLInjection(insert=True)
f = tflow.tflow(resp=tutils.tresp())
f.response.text = "<body></body>"
assert "example.com" not in str(f.response.content)
html_injection.inject(index, f)
assert "example.com" in str(f.response.content)
def test_inject_404(self):
html_injection = HTMLInjection()
f = tflow.tflow(resp=tutils.tresp())
f.response.status_code = 404
assert "example.com" not in str(f.response.content)
html_injection.inject(index, f)
assert "example.com" in str(f.response.content)
class TestRobotsInjection:
def test_inject_not404(self):
robots_injection = RobotsInjection()
f = tflow.tflow(resp=tutils.tresp())
with mock.patch.object(logger, "warning") as mock_warning:
robots_injection.inject(index, f)
assert mock_warning.called
def test_inject_404(self):
robots_injection = RobotsInjection()
f = tflow.tflow(resp=tutils.tresp())
f.response.status_code = 404
assert "Allow: /test" not in str(f.response.content)
robots_injection.inject(index, f)
assert "Allow: /test" in str(f.response.content)
class TestSitemapInjection:
def test_inject_not404(self):
sitemap_injection = SitemapInjection()
f = tflow.tflow(resp=tutils.tresp())
with mock.patch.object(logger, "warning") as mock_warning:
sitemap_injection.inject(index, f)
assert mock_warning.called
def test_inject_404(self):
sitemap_injection = SitemapInjection()
f = tflow.tflow(resp=tutils.tresp())
f.response.status_code = 404
assert "<url><loc>http://example.com:80/</loc></url>" not in str(
f.response.content
)
sitemap_injection.inject(index, f)
assert "<url><loc>http://example.com:80/</loc></url>" in str(f.response.content)
class TestUrlInjectionAddon:
def test_init(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
json.dump(index, tfile)
flt = f"~u .*/site.html$"
url_injection = UrlInjectionAddon(
f"~u .*/site.html$", tmpfile, HTMLInjection(insert=True)
)
assert "http://example.com:80" in url_injection.url_store
fltr = flowfilter.parse(flt)
f = tflow.tflow(resp=tutils.tresp())
f.request.url = "http://example.com/site.html"
assert fltr(f)
assert "http://example.com:80" not in str(f.response.content)
url_injection.response(f)
assert "http://example.com:80" in str(f.response.content)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/contrib/webscanner_helper/test_watchdog.py | examples/contrib/webscanner_helper/test_watchdog.py | import multiprocessing
import time
from pathlib import Path
from unittest import mock
from examples.contrib.webscanner_helper.watchdog import logger
from examples.contrib.webscanner_helper.watchdog import WatchdogAddon
from mitmproxy.connections import ServerConnection
from mitmproxy.exceptions import HttpSyntaxException
from mitmproxy.test import tflow
from mitmproxy.test import tutils
class TestWatchdog:
def test_init_file(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write("")
event = multiprocessing.Event()
try:
WatchdogAddon(event, Path(tmpfile))
except RuntimeError:
assert True
else:
assert False
def test_init_dir(self, tmpdir):
event = multiprocessing.Event()
mydir = tmpdir.join("mydir")
assert not Path(mydir).exists()
WatchdogAddon(event, Path(mydir))
assert Path(mydir).exists()
def test_serverconnect(self, tmpdir):
event = multiprocessing.Event()
w = WatchdogAddon(event, Path(tmpdir), timeout=10)
with mock.patch(
"mitmproxy.connections.ServerConnection.settimeout"
) as mock_set_timeout:
w.serverconnect(ServerConnection("127.0.0.1"))
mock_set_timeout.assert_called()
def test_serverconnect_None(self, tmpdir):
event = multiprocessing.Event()
w = WatchdogAddon(event, Path(tmpdir))
with mock.patch(
"mitmproxy.connections.ServerConnection.settimeout"
) as mock_set_timeout:
w.serverconnect(ServerConnection("127.0.0.1"))
assert not mock_set_timeout.called
def test_trigger(self, tmpdir):
event = multiprocessing.Event()
w = WatchdogAddon(event, Path(tmpdir))
f = tflow.tflow(resp=tutils.tresp())
f.error = "Test Error"
with mock.patch.object(logger, "error") as mock_error:
open_mock = mock.mock_open()
with mock.patch("pathlib.Path.open", open_mock, create=True):
w.error(f)
mock_error.assert_called()
open_mock.assert_called()
def test_trigger_http_synatx(self, tmpdir):
event = multiprocessing.Event()
w = WatchdogAddon(event, Path(tmpdir))
f = tflow.tflow(resp=tutils.tresp())
f.error = HttpSyntaxException()
assert isinstance(f.error, HttpSyntaxException)
with mock.patch.object(logger, "error") as mock_error:
open_mock = mock.mock_open()
with mock.patch("pathlib.Path.open", open_mock, create=True):
w.error(f)
assert not mock_error.called
assert not open_mock.called
def test_timeout(self, tmpdir):
event = multiprocessing.Event()
w = WatchdogAddon(event, Path(tmpdir))
assert w.not_in_timeout(None, None)
assert w.not_in_timeout(time.time, None)
with mock.patch("time.time", return_value=5):
assert not w.not_in_timeout(3, 20)
assert w.not_in_timeout(3, 1)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/http-stream-simple.py | examples/addons/http-stream-simple.py | """
Select which responses should be streamed.
Enable response streaming for all HTTP flows.
This is equivalent to passing `--set stream_large_bodies=1` to mitmproxy.
"""
def responseheaders(flow):
"""
Enables streaming for all responses.
This is equivalent to passing `--set stream_large_bodies=1` to mitmproxy.
"""
flow.response.stream = True
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/anatomy2.py | examples/addons/anatomy2.py | """An addon using the abbreviated scripting syntax."""
def request(flow):
flow.request.headers["myheader"] = "value"
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/http-trailers.py | examples/addons/http-trailers.py | """
This script simply prints all received HTTP Trailers.
HTTP requests and responses can contain trailing headers which are sent after
the body is fully transmitted. Such trailers need to be announced in the initial
headers by name, so the receiving endpoint can wait and read them after the
body.
"""
from mitmproxy import http
from mitmproxy.http import Headers
def request(flow: http.HTTPFlow):
if flow.request.trailers:
print("HTTP Trailers detected! Request contains:", flow.request.trailers)
if flow.request.path == "/inject_trailers":
if flow.request.is_http10:
# HTTP/1.0 doesn't support trailers
return
elif flow.request.is_http11:
if not flow.request.content:
# Avoid sending a body on GET requests or a 0 byte chunked body with trailers.
# Otherwise some servers return 400 Bad Request.
return
# HTTP 1.1 requires transfer-encoding: chunked to send trailers
flow.request.headers["transfer-encoding"] = "chunked"
# HTTP 2+ supports trailers on all requests/responses
flow.request.headers["trailer"] = "x-my-injected-trailer-header"
flow.request.trailers = Headers([(b"x-my-injected-trailer-header", b"foobar")])
print("Injected a new request trailer...", flow.request.headers["trailer"])
def response(flow: http.HTTPFlow):
assert flow.response
if flow.response.trailers:
print("HTTP Trailers detected! Response contains:", flow.response.trailers)
if flow.request.path == "/inject_trailers":
if flow.request.is_http10:
return
elif flow.request.is_http11:
if not flow.response.content:
return
flow.response.headers["transfer-encoding"] = "chunked"
flow.response.headers["trailer"] = "x-my-injected-trailer-header"
flow.response.trailers = Headers([(b"x-my-injected-trailer-header", b"foobar")])
print("Injected a new response trailer...", flow.response.headers["trailer"])
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/http-modify-form.py | examples/addons/http-modify-form.py | """Modify an HTTP form submission."""
from mitmproxy import http
def request(flow: http.HTTPFlow) -> None:
if flow.request.urlencoded_form:
# If there's already a form, one can just add items to the dict:
flow.request.urlencoded_form["mitmproxy"] = "rocks"
else:
# One can also just pass new form data.
# This sets the proper content type and overrides the body.
flow.request.urlencoded_form = [("foo", "bar")] # type: ignore[assignment]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/io-read-saved-flows.py | examples/addons/io-read-saved-flows.py | #!/usr/bin/env python
"""
Read a mitmproxy dump file.
"""
import pprint
import sys
from mitmproxy import http
from mitmproxy import io
from mitmproxy.exceptions import FlowReadException
with open(sys.argv[1], "rb") as logfile:
freader = io.FlowReader(logfile)
pp = pprint.PrettyPrinter(indent=4)
try:
for f in freader.stream():
print(f)
if isinstance(f, http.HTTPFlow):
print(f.request.host)
pp.pprint(f.get_state())
print("")
except FlowReadException as e:
print(f"Flow file corrupted: {e}")
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/dns-simple.py | examples/addons/dns-simple.py | """
Spoof DNS responses.
In this example, we fiddle with IPv6 (AAAA) records:
- For example.com, `::1` is returned.
(domain is hosted on localhost)
- For example.org, an NXDOMAIN error is returned.
(domain does not exist)
- For all other domains, return a non-error response without any records.
(domain exists, but has no IPv6 configured)
"""
import ipaddress
import logging
from mitmproxy import dns
def dns_request(flow: dns.DNSFlow) -> None:
q = flow.request.question
if q and q.type == dns.types.AAAA:
logging.info(f"Spoofing IPv6 records for {q.name}...")
if q.name == "example.com":
flow.response = flow.request.succeed(
[
dns.ResourceRecord(
name="example.com",
type=dns.types.AAAA,
class_=dns.classes.IN,
ttl=dns.ResourceRecord.DEFAULT_TTL,
data=ipaddress.ip_address("::1").packed,
)
]
)
elif q.name == "example.org":
flow.response = flow.request.fail(dns.response_codes.NXDOMAIN)
else:
flow.response = flow.request.succeed([])
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/log-events.py | examples/addons/log-events.py | """Post messages to mitmproxy's event log."""
import logging
from mitmproxy.addonmanager import Loader
from mitmproxy.log import ALERT
logger = logging.getLogger(__name__)
def load(loader: Loader):
logger.info("This is some informative text.")
logger.warning("This is a warning.")
logger.error("This is an error.")
logger.log(
ALERT,
"This is an alert. It has the same urgency as info, but will also pop up in the status bar.",
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/filter-flows.py | examples/addons/filter-flows.py | """
Use mitmproxy's filter pattern in scripts.
"""
from __future__ import annotations
import logging
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy.addonmanager import Loader
class Filter:
filter: flowfilter.TFilter
def configure(self, updated):
if "flowfilter" in updated:
self.filter = flowfilter.parse(".")
def load(self, loader: Loader):
loader.add_option("flowfilter", str, "", "Check that flow matches filter.")
def response(self, flow: http.HTTPFlow) -> None:
if flowfilter.match(self.filter, flow):
logging.info("Flow matches filter:")
logging.info(flow)
addons = [Filter()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/io-write-flow-file.py | examples/addons/io-write-flow-file.py | """
Generate a mitmproxy dump file.
This script demonstrates how to generate a mitmproxy dump file,
as it would also be generated by passing `-w` to mitmproxy.
In contrast to `-w`, this gives you full control over which
flows should be saved and also allows you to rotate files or log
to multiple files in parallel.
"""
import os
import random
from typing import BinaryIO
from mitmproxy import http
from mitmproxy import io
class Writer:
def __init__(self) -> None:
# We are using an environment variable to keep the example as simple as possible,
# consider implementing this as a mitmproxy option instead.
filename = os.getenv("MITMPROXY_OUTFILE", "out.mitm")
self.f: BinaryIO = open(filename, "wb")
self.w = io.FlowWriter(self.f)
def response(self, flow: http.HTTPFlow) -> None:
if random.choice([True, False]):
self.w.add(flow)
def done(self):
self.f.close()
addons = [Writer()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/options-simple.py | examples/addons/options-simple.py | """
Add a new mitmproxy option.
Usage:
mitmproxy -s options-simple.py --set addheader=true
"""
from mitmproxy import ctx
class AddHeader:
def __init__(self):
self.num = 0
def load(self, loader):
loader.add_option(
name="addheader",
typespec=bool,
default=False,
help="Add a count header to responses",
)
def response(self, flow):
if ctx.options.addheader:
self.num = self.num + 1
flow.response.headers["count"] = str(self.num)
addons = [AddHeader()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/duplicate-modify-replay.py | examples/addons/duplicate-modify-replay.py | """Take incoming HTTP requests and replay them with modified parameters."""
from mitmproxy import ctx
def request(flow):
# Avoid an infinite loop by not replaying already replayed requests
if flow.is_replay == "request":
return
flow = flow.copy()
# Only interactive tools have a view. If we have one, add a duplicate entry
# for our flow.
if "view" in ctx.master.addons:
ctx.master.commands.call("view.flows.duplicate", [flow])
flow.request.path = "/changed"
ctx.master.commands.call("replay.client", [flow])
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/contentview-interactive.py | examples/addons/contentview-interactive.py | from mitmproxy import contentviews
class InteractiveSwapCase(contentviews.InteractiveContentview):
def prettify(
self,
data: bytes,
metadata: contentviews.Metadata,
) -> str:
return data.swapcase().decode()
def reencode(
self,
prettified: str,
metadata: contentviews.Metadata,
) -> bytes:
return prettified.encode().swapcase()
contentviews.add(InteractiveSwapCase)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/commands-simple.py | examples/addons/commands-simple.py | """Add a custom command to mitmproxy's command prompt."""
import logging
from mitmproxy import command
class MyAddon:
def __init__(self):
self.num = 0
@command.command("myaddon.inc")
def inc(self) -> None:
self.num += 1
logging.info(f"num = {self.num}")
addons = [MyAddon()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/shutdown.py | examples/addons/shutdown.py | """
A simple way of shutting down the mitmproxy instance to stop everything.
Usage:
mitmproxy -s shutdown.py
and then send a HTTP request to trigger the shutdown:
curl --proxy localhost:8080 http://example.com/path
"""
import logging
from mitmproxy import ctx
from mitmproxy import http
def request(flow: http.HTTPFlow) -> None:
# a random condition to make this example a bit more interactive
if flow.request.pretty_url == "http://example.com/path":
logging.info("Shutting down everything...")
ctx.master.shutdown()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/websocket-simple.py | examples/addons/websocket-simple.py | """Process individual messages from a WebSocket connection."""
import logging
import re
from mitmproxy import http
def websocket_message(flow: http.HTTPFlow):
assert flow.websocket is not None # make type checker happy
# get the latest message
message = flow.websocket.messages[-1]
# was the message sent from the client or server?
if message.from_client:
logging.info(f"Client sent a message: {message.content!r}")
else:
logging.info(f"Server sent a message: {message.content!r}")
# manipulate the message content
message.content = re.sub(rb"^Hello", b"HAPPY", message.content)
if b"FOOBAR" in message.content:
# kill the message and not send it to the other endpoint
message.drop()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/http-reply-from-proxy.py | examples/addons/http-reply-from-proxy.py | """Send a reply from the proxy without sending the request to the remote server."""
from mitmproxy import http
def request(flow: http.HTTPFlow) -> None:
if flow.request.pretty_url == "http://example.com/path":
flow.response = http.Response.make(
200, # (optional) status code
b"Hello World", # (optional) content
{"Content-Type": "text/html"}, # (optional) headers
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/internet-in-mirror.py | examples/addons/internet-in-mirror.py | """
Mirror all web pages.
Useful if you are living down under.
"""
from mitmproxy import http
def response(flow: http.HTTPFlow) -> None:
if flow.response and flow.response.content:
flow.response.content = flow.response.content.replace(
b"</head>", b"<style>body {transform: scaleX(-1);}</style></head>"
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/contentview.py | examples/addons/contentview.py | from mitmproxy import contentviews
class SwapCase(contentviews.Contentview):
def prettify(self, data: bytes, metadata: contentviews.Metadata) -> str:
return data.swapcase().decode()
def render_priority(self, data: bytes, metadata: contentviews.Metadata) -> float:
if metadata.content_type and metadata.content_type.startswith("text/example"):
return 2 # return a value > 1 to make sure the custom view is automatically selected
else:
return 0
contentviews.add(SwapCase)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/nonblocking.py | examples/addons/nonblocking.py | """
Make events hooks non-blocking using async or @concurrent.
"""
import asyncio
import logging
import time
from mitmproxy.script import concurrent
# Toggle between asyncio and thread-based alternatives.
if True:
# Hooks can be async, which allows the hook to call async functions and perform async I/O
# without blocking other requests. This is generally preferred for new addons.
async def request(flow):
logging.info(f"handle request: {flow.request.host}{flow.request.path}")
await asyncio.sleep(5)
logging.info(f"start request: {flow.request.host}{flow.request.path}")
else:
# Another option is to use @concurrent, which launches the hook in its own thread.
# Please note that this generally opens the door to race conditions and decreases performance if not required.
@concurrent # Remove this to make it synchronous and see what happens
def request(flow):
logging.info(f"handle request: {flow.request.host}{flow.request.path}")
time.sleep(5)
logging.info(f"start request: {flow.request.host}{flow.request.path}")
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/websocket-inject-message.py | examples/addons/websocket-inject-message.py | """
Inject a WebSocket message into a running connection.
This example shows how to inject a WebSocket message into a running connection.
"""
import asyncio
from mitmproxy import ctx
from mitmproxy import http
# Simple example: Inject a message as a response to an event
def websocket_message(flow: http.HTTPFlow):
assert flow.websocket is not None # make type checker happy
last_message = flow.websocket.messages[-1]
if last_message.is_text and "secret" in last_message.text:
last_message.drop()
ctx.master.commands.call(
"inject.websocket", flow, last_message.from_client, b"ssssssh"
)
# Complex example: Schedule a periodic timer
async def inject_async(flow: http.HTTPFlow):
msg = "hello from mitmproxy! "
assert flow.websocket is not None # make type checker happy
while flow.websocket.timestamp_end is None:
ctx.master.commands.call("inject.websocket", flow, True, msg.encode())
await asyncio.sleep(1)
msg = msg[1:] + msg[:1]
tasks = set()
def websocket_start(flow: http.HTTPFlow):
# we need to hold a reference to the task, otherwise it will be garbage collected.
t = asyncio.create_task(inject_async(flow))
tasks.add(t)
t.add_done_callback(tasks.remove)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/http-add-header.py | examples/addons/http-add-header.py | """Add an HTTP header to each response."""
class AddHeader:
def __init__(self):
self.num = 0
def response(self, flow):
self.num = self.num + 1
flow.response.headers["count"] = str(self.num)
addons = [AddHeader()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/tcp-simple.py | examples/addons/tcp-simple.py | """
Process individual messages from a TCP connection.
This script replaces full occurrences of "foo" with "bar" and prints various details for each message.
Please note that TCP is stream-based and *not* message-based. mitmproxy splits stream contents into "messages"
as they are received by socket.recv(). This is pretty arbitrary and should not be relied on.
However, it is sometimes good enough as a quick hack.
Example Invocation:
mitmdump --tcp-hosts ".*" -s examples/tcp-simple.py
"""
import logging
from mitmproxy import tcp
from mitmproxy.utils import strutils
def tcp_message(flow: tcp.TCPFlow):
message = flow.messages[-1]
message.content = message.content.replace(b"foo", b"bar")
logging.info(
f"tcp_message[from_client={message.from_client}), content={strutils.bytes_to_escaped_str(message.content)}]"
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/commands-flows.py | examples/addons/commands-flows.py | """Handle flows as command arguments."""
import logging
from collections.abc import Sequence
from mitmproxy import command
from mitmproxy import flow
from mitmproxy import http
from mitmproxy.log import ALERT
class MyAddon:
@command.command("myaddon.addheader")
def addheader(self, flows: Sequence[flow.Flow]) -> None:
for f in flows:
if isinstance(f, http.HTTPFlow):
f.request.headers["myheader"] = "value"
logging.log(ALERT, "done")
addons = [MyAddon()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/wsgi-flask-app.py | examples/addons/wsgi-flask-app.py | """
Host a WSGI app in mitmproxy.
This example shows how to graft a WSGI app onto mitmproxy. In this
instance, we're using the Flask framework (http://flask.pocoo.org/) to expose
a single simplest-possible page.
"""
from flask import Flask
from mitmproxy.addons import asgiapp
app = Flask("proxapp")
@app.route("/")
def hello_world() -> str:
return "Hello World!"
addons = [
# Host app at the magic domain "example.com" on port 80. Requests to this
# domain and port combination will now be routed to the WSGI app instance.
asgiapp.WSGIApp(app, "example.com", 80),
# TLS works too, but the magic domain needs to be resolvable from the mitmproxy machine due to mitmproxy's design.
# mitmproxy will connect to said domain and use its certificate but won't send any data.
# By using `--set upstream_cert=false` and `--set connection_strategy_lazy` the local certificate is used instead.
# asgiapp.WSGIApp(app, "example.com", 443),
]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/http-redirect-requests.py | examples/addons/http-redirect-requests.py | """Redirect HTTP requests to another server."""
from mitmproxy import http
def request(flow: http.HTTPFlow) -> None:
# pretty_host takes the "Host" header of the request into account,
# which is useful in transparent mode where we usually only have the IP
# otherwise.
if flow.request.pretty_host == "example.org":
flow.request.host = "mitmproxy.org"
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/http-stream-modify.py | examples/addons/http-stream-modify.py | """
Modify a streamed response.
Generally speaking, we recommend *not* to stream messages you need to modify.
Modifying streamed responses is tricky and brittle:
- If the transfer encoding isn't chunked, you cannot simply change the content length.
- If you want to replace all occurrences of "foobar", make sure to catch the cases
where one chunk ends with [...]foo" and the next starts with "bar[...].
"""
from collections.abc import Iterable
def modify(data: bytes) -> bytes | Iterable[bytes]:
"""
This function will be called for each chunk of request/response body data that arrives at the proxy,
and once at the end of the message with an empty bytes argument (b"").
It may either return bytes or an iterable of bytes (which would result in multiple HTTP/2 data frames).
"""
return data.replace(b"foo", b"bar")
def responseheaders(flow):
flow.response.stream = modify
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/http-modify-query-string.py | examples/addons/http-modify-query-string.py | """Modify HTTP query parameters."""
from mitmproxy import http
def request(flow: http.HTTPFlow) -> None:
flow.request.query["mitmproxy"] = "rocks"
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/options-configure.py | examples/addons/options-configure.py | """React to configuration changes."""
from typing import Optional
from mitmproxy import ctx
from mitmproxy import exceptions
class AddHeader:
def load(self, loader):
loader.add_option(
name="addheader",
typespec=Optional[int],
default=None,
help="Add a header to responses",
)
def configure(self, updates):
if "addheader" in updates:
if ctx.options.addheader is not None and ctx.options.addheader > 100:
raise exceptions.OptionsError("addheader must be <= 100")
def response(self, flow):
if ctx.options.addheader is not None:
flow.response.headers["addheader"] = str(ctx.options.addheader)
addons = [AddHeader()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/anatomy.py | examples/addons/anatomy.py | """
Basic skeleton of a mitmproxy addon.
Run as follows: mitmproxy -s anatomy.py
"""
import logging
class Counter:
def __init__(self):
self.num = 0
def request(self, flow):
self.num = self.num + 1
logging.info("We've seen %d flows" % self.num)
addons = [Counter()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/examples/addons/commands-paths.py | examples/addons/commands-paths.py | """Handle file paths as command arguments."""
import logging
from collections.abc import Sequence
from mitmproxy import command
from mitmproxy import flow
from mitmproxy import http
from mitmproxy import types
from mitmproxy.log import ALERT
class MyAddon:
@command.command("myaddon.histogram")
def histogram(
self,
flows: Sequence[flow.Flow],
path: types.Path,
) -> None:
totals: dict[str, int] = {}
for f in flows:
if isinstance(f, http.HTTPFlow):
totals[f.request.host] = totals.setdefault(f.request.host, 0) + 1
with open(path, "w+") as fp:
for cnt, dom in sorted((v, k) for (k, v) in totals.items()):
fp.write(f"{cnt}: {dom}\n")
logging.log(ALERT, "done")
addons = [MyAddon()]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/web/gen/state_js.py | web/gen/state_js.py | #!/usr/bin/env python3
import asyncio
import json
import textwrap
from pathlib import Path
from unittest.mock import Mock
from mitmproxy import options
from mitmproxy.proxy.mode_servers import ServerInstance
from mitmproxy.tools.web import app
from mitmproxy.tools.web import master
here = Path(__file__).parent.absolute()
filename = here / "../src/js/__tests__/ducks/_tbackendstate.ts"
async def make() -> str:
o = options.Options()
m = master.WebMaster(o)
si1 = ServerInstance.make("regular", m.proxyserver)
sock1 = Mock()
sock1.getsockname.return_value = ("127.0.0.1", 8080)
sock2 = Mock()
sock2.getsockname.return_value = ("::1", 8080)
server = Mock()
server.sockets = [sock1, sock2]
si1._servers = [server]
si2 = ServerInstance.make("reverse:example.com", m.proxyserver)
si2.last_exception = RuntimeError("I failed somehow.")
si3 = ServerInstance.make("socks5", m.proxyserver)
si4 = ServerInstance.make("tun", m.proxyserver)
si4._server = Mock()
si4._server.tun_name = lambda: "tun0"
m.proxyserver.servers._instances.update(
{
si1.mode: si1,
si2.mode: si2,
si3.mode: si3,
si4.mode: si4,
}
)
data = app.State.get_json(m)
await m.done()
data.update(available=True)
data["contentViews"] = ["auto", "Raw"]
data["version"] = "1.2.3"
data["platform"] = "darwin"
data["localModeUnavailable"] = None
# language=TypeScript
content = (
"/** Auto-generated by web/gen/state_js.py */\n"
"import type {BackendStateExtra} from '../../ducks/backendState';\n"
"export function TBackendState(): Required<BackendStateExtra> {\n"
" return %s\n"
"}\n"
% textwrap.indent(json.dumps(data, indent=4, sort_keys=True), " ").lstrip()
)
return content
if __name__ == "__main__":
filename.write_bytes(asyncio.run(make()).encode())
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/web/gen/options_js.py | web/gen/options_js.py | #!/usr/bin/env python3
import asyncio
import io
import json
from collections.abc import Sequence
from contextlib import redirect_stdout
from pathlib import Path
from mitmproxy import options
from mitmproxy import optmanager
from mitmproxy.tools.web import master
here = Path(__file__).parent.absolute()
filename = here / "../src/js/ducks/_options_gen.ts"
def _ts_type(t):
if t is bool:
return "boolean"
if t is str:
return "string"
if t is int:
return "number"
if t == Sequence[str]:
return "string[]"
if t == str | None:
return "string | undefined"
if t == int | None:
return "number | undefined"
raise RuntimeError(t)
async def make() -> str:
o = options.Options()
m = master.WebMaster(o)
opt: optmanager._Option
with redirect_stdout(io.StringIO()) as s:
print("/** Auto-generated by web/gen/options_js.py */")
print("export interface OptionsState {")
for _, opt in sorted(m.options.items()):
print(f" {opt.name}: {_ts_type(opt.typespec)};")
print("}")
print("")
print("export type Option = keyof OptionsState;")
print("")
print("export const defaultState: OptionsState = {")
for _, opt in sorted(m.options.items()):
print(
f" {opt.name}: {json.dumps(opt.default)},".replace(
": null", ": undefined"
)
)
print("};")
await m.done()
return s.getvalue()
if __name__ == "__main__":
filename.write_bytes(asyncio.run(make()).encode())
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/web/gen/tflow_js.py | web/gen/tflow_js.py | #!/usr/bin/env python3
import asyncio
import json
import textwrap
from pathlib import Path
from mitmproxy import certs
from mitmproxy.http import Headers
from mitmproxy.test import tflow
from mitmproxy.tools.web import app
here = Path(__file__).parent.absolute()
filename = here / "../src/js/__tests__/ducks/_tflow.ts"
async def make() -> str:
tf_http = tflow.tflow(resp=True, err=True, ws=True)
tf_http.id = "d91165be-ca1f-4612-88a9-c0f8696f3e29"
tf_http.client_conn.id = "4a18d1a0-50a1-48dd-9aa6-d45d74282939"
tf_http.server_conn.id = "f087e7b2-6d0a-41a8-a8f0-e1a4761395f8"
tf_http.server_conn.certificate_list = [
certs.Cert.from_pem(
(
here / "../../test/mitmproxy/net/data/verificationcerts/self-signed.pem"
).read_bytes()
)
]
tf_http.request.trailers = Headers(trailer="qvalue")
tf_http.response.trailers = Headers(trailer="qvalue")
tf_http.comment = "I'm a comment!"
tf_tcp = tflow.ttcpflow(err=True)
tf_tcp.id = "2ea7012b-21b5-4f8f-98cd-d49819954001"
tf_tcp.client_conn.id = "8be32b99-a0b3-446e-93bc-b29982fe1322"
tf_tcp.server_conn.id = "e33bb2cd-c07e-4214-9a8e-3a8f85f25200"
tf_udp = tflow.tudpflow(err=True)
tf_udp.id = "f9f7b2b9-7727-4477-822d-d3526e5b8951"
tf_udp.client_conn.id = "0a8833da-88e4-429d-ac54-61cda8a7f91c"
tf_udp.server_conn.id = "c49f9c2b-a729-4b16-9212-d181717e294b"
tf_dns = tflow.tdnsflow(resp=True, err=True)
tf_dns.id = "5434da94-1017-42fa-872d-a189508d48e4"
tf_dns.client_conn.id = "0b4cc0a3-6acb-4880-81c0-1644084126fc"
tf_dns.server_conn.id = "db5294af-c008-4098-a320-a94f901eaf2f"
# language=TypeScript
content = (
"/** Auto-generated by web/gen/tflow_js.py */\n"
"import type {HTTPFlow, TCPFlow, UDPFlow, DNSFlow} from '../../flow';\n"
"export function THTTPFlow(): Required<HTTPFlow> {\n"
" return %s\n"
"}\n"
"export function TTCPFlow(): Required<TCPFlow> {\n"
" return %s\n"
"}\n"
"export function TUDPFlow(): Required<UDPFlow> {\n"
" return %s\n"
"}\n"
"export function TDNSFlow(): Required<DNSFlow> {\n"
" return %s\n"
"}\n"
% (
textwrap.indent(
json.dumps(app.flow_to_json(tf_http), indent=4, sort_keys=True), " "
),
textwrap.indent(
json.dumps(app.flow_to_json(tf_tcp), indent=4, sort_keys=True), " "
),
textwrap.indent(
json.dumps(app.flow_to_json(tf_udp), indent=4, sort_keys=True), " "
),
textwrap.indent(
json.dumps(app.flow_to_json(tf_dns), indent=4, sort_keys=True), " "
),
)
)
content = content.replace(": null", ": undefined")
return content
if __name__ == "__main__":
filename.write_bytes(asyncio.run(make()).encode())
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/web/gen/web_columns.py | web/gen/web_columns.py | #!/usr/bin/env python3
import asyncio
import json
import re
from pathlib import Path
here = Path(__file__).parent.absolute()
input_filename = here / "../src/js/components/FlowTable/FlowColumns.tsx"
filename = here / "../../mitmproxy/tools/web/web_columns.py"
def extract_columns() -> list:
# Read the Typescript file content
input_file_content = input_filename.read_text()
pattern = r"//\s*parsed by web/gen/web_columns\s*\n([\s\w,]+)"
match = re.search(pattern, input_file_content, re.MULTILINE)
columns_str = match.group(1)
columns = [col.strip() for col in columns_str.split(",") if col.strip()]
return columns
async def make() -> str:
available_web_columns = extract_columns()
# language=Python
content = (
"# Auto-generated by web/gen/web_columns.py\n"
f"AVAILABLE_WEB_COLUMNS = {json.dumps(available_web_columns, indent=4)}"
).replace("\n]", ",\n]\n")
return content
if __name__ == "__main__":
filename.write_bytes(asyncio.run(make()).encode())
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/web/gen/backend_consts.py | web/gen/backend_consts.py | #!/usr/bin/env python3
import asyncio
import typing
from pathlib import Path
from mitmproxy.contentviews import SyntaxHighlight
from mitmproxy.proxy.mode_specs import ReverseMode
here = Path(__file__).parent.absolute()
filename = here / "../src/js/backends/consts.ts"
async def make() -> str:
protocols = typing.get_args(typing.get_type_hints(ReverseMode)["scheme"])
protocol_enum = ",\n ".join(
f'{protocol.upper()} = "{protocol.lower()}"' for protocol in protocols
)
langs = typing.get_args(SyntaxHighlight.__value__)
syntax_highlight_enum = ",\n ".join(
f'{lang.upper()} = "{lang.lower()}"' for lang in langs
)
# language=TypeScript
content = (
"/** Auto-generated by web/gen/backend_consts.py */\n"
"export enum ReverseProxyProtocols {\n"
f" {protocol_enum},\n"
"}\n"
"\n"
"export enum SyntaxHighlight {\n"
f" {syntax_highlight_enum},\n"
"}\n"
)
return content
if __name__ == "__main__":
filename.write_bytes(asyncio.run(make()).encode())
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tls.py | mitmproxy/tls.py | import io
from dataclasses import dataclass
from kaitaistruct import KaitaiStream
from OpenSSL import SSL
from mitmproxy import connection
from mitmproxy.contrib.kaitaistruct import dtls_client_hello
from mitmproxy.contrib.kaitaistruct import tls_client_hello
from mitmproxy.net import check
from mitmproxy.proxy import context
class ClientHello:
"""
A TLS ClientHello is the first message sent by the client when initiating TLS.
"""
_raw_bytes: bytes
def __init__(self, raw_client_hello: bytes, dtls: bool = False):
"""Create a TLS ClientHello object from raw bytes."""
self._raw_bytes = raw_client_hello
if dtls:
self._client_hello = dtls_client_hello.DtlsClientHello(
KaitaiStream(io.BytesIO(raw_client_hello))
)
else:
self._client_hello = tls_client_hello.TlsClientHello(
KaitaiStream(io.BytesIO(raw_client_hello))
)
def raw_bytes(self, wrap_in_record: bool = True) -> bytes:
"""
The raw ClientHello bytes as seen on the wire.
If `wrap_in_record` is True, the ClientHello will be wrapped in a synthetic TLS record
(`0x160303 + len(chm) + 0x01 + len(ch)`), which is the format expected by some tools.
The synthetic record assumes TLS version (`0x0303`), which may be different from what has been sent over the
wire. JA3 hashes are unaffected by this as they only use the TLS version from the ClientHello data structure.
A future implementation may return not just the exact ClientHello, but also the exact record(s) as seen on the
wire.
"""
if isinstance(self._client_hello, dtls_client_hello.DtlsClientHello):
raise NotImplementedError
if wrap_in_record:
return (
# record layer
b"\x16\x03\x03"
+ (len(self._raw_bytes) + 4).to_bytes(2, byteorder="big")
+
# handshake header
b"\x01"
+ len(self._raw_bytes).to_bytes(3, byteorder="big")
+
# ClientHello as defined in https://datatracker.ietf.org/doc/html/rfc8446#section-4.1.2.
self._raw_bytes
)
else:
return self._raw_bytes
@property
def cipher_suites(self) -> list[int]:
"""The cipher suites offered by the client (as raw ints)."""
return self._client_hello.cipher_suites.cipher_suites
@property
def sni(self) -> str | None:
"""
The [Server Name Indication](https://en.wikipedia.org/wiki/Server_Name_Indication),
which indicates which hostname the client wants to connect to.
"""
if ext := getattr(self._client_hello, "extensions", None):
for extension in ext.extensions:
is_valid_sni_extension = (
extension.type == 0x00
and len(extension.body.server_names) == 1
and extension.body.server_names[0].name_type == 0
and check.is_valid_host(extension.body.server_names[0].host_name)
)
if is_valid_sni_extension:
return extension.body.server_names[0].host_name.decode("ascii")
return None
@property
def alpn_protocols(self) -> list[bytes]:
"""
The application layer protocols offered by the client as part of the
[ALPN](https://en.wikipedia.org/wiki/Application-Layer_Protocol_Negotiation) TLS extension.
"""
if ext := getattr(self._client_hello, "extensions", None):
for extension in ext.extensions:
if extension.type == 0x10:
return list(x.name for x in extension.body.alpn_protocols)
return []
@property
def extensions(self) -> list[tuple[int, bytes]]:
"""The raw list of extensions in the form of `(extension_type, raw_bytes)` tuples."""
ret = []
if ext := getattr(self._client_hello, "extensions", None):
for extension in ext.extensions:
body = getattr(extension, "_raw_body", extension.body)
ret.append((extension.type, body))
return ret
def __repr__(self):
return f"ClientHello(sni: {self.sni}, alpn_protocols: {self.alpn_protocols})"
@dataclass
class ClientHelloData:
"""
Event data for `tls_clienthello` event hooks.
"""
context: context.Context
"""The context object for this connection."""
client_hello: ClientHello
"""The entire parsed TLS ClientHello."""
ignore_connection: bool = False
"""
If set to `True`, do not intercept this connection and forward encrypted contents unmodified.
"""
establish_server_tls_first: bool = False
"""
If set to `True`, pause this handshake and establish TLS with an upstream server first.
This makes it possible to process the server certificate when generating an interception certificate.
"""
@dataclass
class TlsData:
"""
Event data for `tls_start_client`, `tls_start_server`, and `tls_handshake` event hooks.
"""
conn: connection.Connection
"""The affected connection."""
context: context.Context
"""The context object for this connection."""
ssl_conn: SSL.Connection | None = None
"""
The associated pyOpenSSL `SSL.Connection` object.
This will be set by an addon in the `tls_start_*` event hooks.
"""
is_dtls: bool = False
"""
If set to `True`, indicates that it is a DTLS event.
"""
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/master.py | mitmproxy/master.py | import asyncio
import logging
from . import ctx as mitmproxy_ctx
from .addons import termlog
from .proxy.mode_specs import ReverseMode
from .utils import asyncio_utils
from mitmproxy import addonmanager
from mitmproxy import command
from mitmproxy import eventsequence
from mitmproxy import hooks
from mitmproxy import http
from mitmproxy import log
from mitmproxy import options
logger = logging.getLogger(__name__)
class Master:
"""
The master handles mitmproxy's main event loop.
"""
event_loop: asyncio.AbstractEventLoop
_termlog_addon: termlog.TermLog | None = None
def __init__(
self,
opts: options.Options | None,
event_loop: asyncio.AbstractEventLoop | None = None,
with_termlog: bool = False,
):
self.options: options.Options = opts or options.Options()
self.commands = command.CommandManager(self)
self.addons = addonmanager.AddonManager(self)
if with_termlog:
self._termlog_addon = termlog.TermLog()
self.addons.add(self._termlog_addon)
self.log = log.Log(self) # deprecated, do not use.
self._legacy_log_events = log.LegacyLogEvents(self)
self._legacy_log_events.install()
# We expect an active event loop here already because some addons
# may want to spawn tasks during the initial configuration phase,
# which happens before run().
self.event_loop = event_loop or asyncio.get_running_loop()
self.should_exit = asyncio.Event()
mitmproxy_ctx.master = self
mitmproxy_ctx.log = self.log # deprecated, do not use.
mitmproxy_ctx.options = self.options
async def run(self) -> None:
with (
asyncio_utils.install_exception_handler(self._asyncio_exception_handler),
asyncio_utils.set_eager_task_factory(),
):
self.should_exit.clear()
# Can we exit before even bringing up servers?
if ec := self.addons.get("errorcheck"):
await ec.shutdown_if_errored()
if ps := self.addons.get("proxyserver"):
# This may block for some proxy modes, so we also monitor should_exit.
await asyncio.wait(
[
asyncio_utils.create_task(
ps.setup_servers(), name="setup_servers", keep_ref=False
),
asyncio_utils.create_task(
self.should_exit.wait(), name="should_exit", keep_ref=False
),
],
return_when=asyncio.FIRST_COMPLETED,
)
if self.should_exit.is_set():
return
# Did bringing up servers fail?
if ec := self.addons.get("errorcheck"):
await ec.shutdown_if_errored()
try:
await self.running()
# Any errors in the final part of startup?
if ec := self.addons.get("errorcheck"):
await ec.shutdown_if_errored()
ec.finish()
await self.should_exit.wait()
finally:
# if running() was called, we also always want to call done().
# .wait might be cancelled (e.g. by sys.exit), so this needs to be in a finally block.
await self.done()
def shutdown(self):
"""
Shut down the proxy. This method is thread-safe.
"""
# We may add an exception argument here.
self.event_loop.call_soon_threadsafe(self.should_exit.set)
async def running(self) -> None:
await self.addons.trigger_event(hooks.RunningHook())
async def done(self) -> None:
await self.addons.trigger_event(hooks.DoneHook())
self._legacy_log_events.uninstall()
if self._termlog_addon is not None:
self._termlog_addon.uninstall()
def _asyncio_exception_handler(self, loop, context) -> None:
try:
exc: Exception = context["exception"]
except KeyError:
logger.error(f"Unhandled asyncio error: {context}")
else:
if isinstance(exc, OSError) and exc.errno == 10038:
return # suppress https://bugs.python.org/issue43253
logger.error(
"Unhandled error in task.",
exc_info=(type(exc), exc, exc.__traceback__),
)
async def load_flow(self, f):
"""
Loads a flow
"""
if (
isinstance(f, http.HTTPFlow)
and len(self.options.mode) == 1
and self.options.mode[0].startswith("reverse:")
):
# When we load flows in reverse proxy mode, we adjust the target host to
# the reverse proxy destination for all flows we load. This makes it very
# easy to replay saved flows against a different host.
# We may change this in the future so that clientplayback always replays to the first mode.
mode = ReverseMode.parse(self.options.mode[0])
assert isinstance(mode, ReverseMode)
f.request.host, f.request.port, *_ = mode.address
f.request.scheme = mode.scheme
for e in eventsequence.iterate(f):
await self.addons.handle_lifecycle(e)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/http.py | mitmproxy/http.py | import binascii
import json
import os
import time
import urllib.parse
import warnings
from collections.abc import Callable
from collections.abc import Iterable
from collections.abc import Iterator
from collections.abc import Mapping
from collections.abc import Sequence
from dataclasses import dataclass
from dataclasses import fields
from email.utils import formatdate
from email.utils import mktime_tz
from email.utils import parsedate_tz
from typing import Any
from typing import cast
from mitmproxy import flow
from mitmproxy.coretypes import multidict
from mitmproxy.coretypes import serializable
from mitmproxy.net import encoding
from mitmproxy.net.http import cookies
from mitmproxy.net.http import multipart
from mitmproxy.net.http import status_codes
from mitmproxy.net.http import url
from mitmproxy.net.http.headers import assemble_content_type
from mitmproxy.net.http.headers import infer_content_encoding
from mitmproxy.net.http.headers import parse_content_type
from mitmproxy.utils import human
from mitmproxy.utils import strutils
from mitmproxy.utils import typecheck
from mitmproxy.utils.strutils import always_bytes
from mitmproxy.utils.strutils import always_str
from mitmproxy.websocket import WebSocketData
# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded.
def _native(x: bytes) -> str:
return x.decode("utf-8", "surrogateescape")
def _always_bytes(x: str | bytes) -> bytes:
return strutils.always_bytes(x, "utf-8", "surrogateescape")
# This cannot be easily typed with mypy yet, so we just specify MultiDict without concrete types.
class Headers(multidict.MultiDict): # type: ignore
"""
Header class which allows both convenient access to individual headers as well as
direct access to the underlying raw data. Provides a full dictionary interface.
Create headers with keyword arguments:
>>> h = Headers(host="example.com", content_type="application/xml")
Headers mostly behave like a normal dict:
>>> h["Host"]
"example.com"
Headers are case insensitive:
>>> h["host"]
"example.com"
Headers can also be created from a list of raw (header_name, header_value) byte tuples:
>>> h = Headers([
(b"Host",b"example.com"),
(b"Accept",b"text/html"),
(b"accept",b"application/xml")
])
Multiple headers are folded into a single header as per RFC 7230:
>>> h["Accept"]
"text/html, application/xml"
Setting a header removes all existing headers with the same name:
>>> h["Accept"] = "application/text"
>>> h["Accept"]
"application/text"
`bytes(h)` returns an HTTP/1 header block:
>>> print(bytes(h))
Host: example.com
Accept: application/text
For full control, the raw header fields can be accessed:
>>> h.fields
Caveats:
- For use with the "Set-Cookie" and "Cookie" headers, either use `Response.cookies` or see `Headers.get_all`.
"""
def __init__(self, fields: Iterable[tuple[bytes, bytes]] = (), **headers):
"""
*Args:*
- *fields:* (optional) list of ``(name, value)`` header byte tuples,
e.g. ``[(b"Host", b"example.com")]``. All names and values must be bytes.
- *\\*\\*headers:* Additional headers to set. Will overwrite existing values from `fields`.
For convenience, underscores in header names will be transformed to dashes -
this behaviour does not extend to other methods.
If ``**headers`` contains multiple keys that have equal ``.lower()`` representations,
the behavior is undefined.
"""
super().__init__(fields)
for key, value in self.fields:
if not isinstance(key, bytes) or not isinstance(value, bytes):
raise TypeError("Header fields must be bytes.")
# content_type -> content-type
self.update(
{
_always_bytes(name).replace(b"_", b"-"): _always_bytes(value)
for name, value in headers.items()
}
)
fields: tuple[tuple[bytes, bytes], ...]
@staticmethod
def _reduce_values(values) -> str:
# Headers can be folded
return ", ".join(values)
@staticmethod
def _kconv(key) -> str:
# Headers are case-insensitive
return key.lower()
def __bytes__(self) -> bytes:
if self.fields:
return b"\r\n".join(b": ".join(field) for field in self.fields) + b"\r\n"
else:
return b""
def __delitem__(self, key: str | bytes) -> None:
key = _always_bytes(key)
super().__delitem__(key)
def __iter__(self) -> Iterator[str]:
for x in super().__iter__():
yield _native(x)
def get_all(self, name: str | bytes) -> list[str]:
"""
Like `Headers.get`, but does not fold multiple headers into a single one.
This is useful for Set-Cookie and Cookie headers, which do not support folding.
*See also:*
- <https://tools.ietf.org/html/rfc7230#section-3.2.2>
- <https://datatracker.ietf.org/doc/html/rfc6265#section-5.4>
- <https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.5>
"""
name = _always_bytes(name)
return [_native(x) for x in super().get_all(name)]
def set_all(self, name: str | bytes, values: Iterable[str | bytes]):
"""
Explicitly set multiple headers for the given key.
See `Headers.get_all`.
"""
name = _always_bytes(name)
values = [_always_bytes(x) for x in values]
return super().set_all(name, values)
def insert(self, index: int, key: str | bytes, value: str | bytes):
key = _always_bytes(key)
value = _always_bytes(value)
super().insert(index, key, value)
def items(self, multi=False):
if multi:
return ((_native(k), _native(v)) for k, v in self.fields)
else:
return super().items()
@dataclass
class MessageData(serializable.Serializable):
http_version: bytes
headers: Headers
content: bytes | None
trailers: Headers | None
timestamp_start: float
timestamp_end: float | None
# noinspection PyUnreachableCode
if __debug__:
def __post_init__(self):
for field in fields(self):
val = getattr(self, field.name)
typecheck.check_option_type(field.name, val, field.type)
def set_state(self, state):
for k, v in state.items():
if k in ("headers", "trailers") and v is not None:
v = Headers.from_state(v)
setattr(self, k, v)
def get_state(self):
state = vars(self).copy()
state["headers"] = state["headers"].get_state()
if state["trailers"] is not None:
state["trailers"] = state["trailers"].get_state()
return state
@classmethod
def from_state(cls, state):
state["headers"] = Headers.from_state(state["headers"])
if state["trailers"] is not None:
state["trailers"] = Headers.from_state(state["trailers"])
return cls(**state)
@dataclass
class RequestData(MessageData):
host: str
port: int
method: bytes
scheme: bytes
authority: bytes
path: bytes
@dataclass
class ResponseData(MessageData):
status_code: int
reason: bytes
class Message(serializable.Serializable):
"""Base class for `Request` and `Response`."""
@classmethod
def from_state(cls, state):
return cls(**state)
def get_state(self):
return self.data.get_state()
def set_state(self, state):
self.data.set_state(state)
data: MessageData
stream: Callable[[bytes], Iterable[bytes] | bytes] | bool = False
"""
This attribute controls if the message body should be streamed.
If `False`, mitmproxy will buffer the entire body before forwarding it to the destination.
This makes it possible to perform string replacements on the entire body.
If `True`, the message body will not be buffered on the proxy
but immediately forwarded instead.
Alternatively, a transformation function can be specified, which will be called for each chunk of data.
Please note that packet boundaries generally should not be relied upon.
This attribute must be set in the `requestheaders` or `responseheaders` hook.
Setting it in `request` or `response` is already too late, mitmproxy has buffered the message body already.
"""
@property
def http_version(self) -> str:
"""
HTTP version string, for example `HTTP/1.1`.
"""
return self.data.http_version.decode("utf-8", "surrogateescape")
@http_version.setter
def http_version(self, http_version: str | bytes) -> None:
self.data.http_version = strutils.always_bytes(
http_version, "utf-8", "surrogateescape"
)
@property
def is_http10(self) -> bool:
return self.data.http_version == b"HTTP/1.0"
@property
def is_http11(self) -> bool:
return self.data.http_version == b"HTTP/1.1"
@property
def is_http2(self) -> bool:
return self.data.http_version == b"HTTP/2.0"
@property
def is_http3(self) -> bool:
return self.data.http_version == b"HTTP/3"
@property
def headers(self) -> Headers:
"""
The HTTP headers.
"""
return self.data.headers
@headers.setter
def headers(self, h: Headers) -> None:
self.data.headers = h
@property
def trailers(self) -> Headers | None:
"""
The [HTTP trailers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Trailer).
"""
return self.data.trailers
@trailers.setter
def trailers(self, h: Headers | None) -> None:
self.data.trailers = h
@property
def raw_content(self) -> bytes | None:
"""
The raw (potentially compressed) HTTP message body.
In contrast to `Message.content` and `Message.text`, accessing this property never raises.
`raw_content` may be `None` if the content is missing, for example due to body streaming
(see `Message.stream`). In contrast, `b""` signals a present but empty message body.
*See also:* `Message.content`, `Message.text`
"""
return self.data.content
@raw_content.setter
def raw_content(self, content: bytes | None) -> None:
self.data.content = content
@property
def content(self) -> bytes | None:
"""
The uncompressed HTTP message body as bytes.
Accessing this attribute may raise a `ValueError` when the HTTP content-encoding is invalid.
*See also:* `Message.raw_content`, `Message.text`
"""
return self.get_content()
@content.setter
def content(self, value: bytes | None) -> None:
self.set_content(value)
@property
def text(self) -> str | None:
"""
The uncompressed and decoded HTTP message body as text.
Accessing this attribute may raise a `ValueError` when either content-encoding or charset is invalid.
*See also:* `Message.raw_content`, `Message.content`
"""
return self.get_text()
@text.setter
def text(self, value: str | None) -> None:
self.set_text(value)
def set_content(self, value: bytes | None) -> None:
if value is None:
self.raw_content = None
return
if not isinstance(value, bytes):
raise TypeError(
f"Message content must be bytes, not {type(value).__name__}. "
"Please use .text if you want to assign a str."
)
ce = self.headers.get("content-encoding")
try:
self.raw_content = encoding.encode(value, ce or "identity")
except ValueError:
# So we have an invalid content-encoding?
# Let's remove it!
del self.headers["content-encoding"]
self.raw_content = value
if "transfer-encoding" in self.headers:
# https://httpwg.org/specs/rfc7230.html#header.content-length
# don't set content-length if a transfer-encoding is provided
pass
else:
self.headers["content-length"] = str(len(self.raw_content))
def get_content(self, strict: bool = True) -> bytes | None:
"""
Similar to `Message.content`, but does not raise if `strict` is `False`.
Instead, the compressed message body is returned as-is.
"""
if self.raw_content is None:
return None
ce = self.headers.get("content-encoding")
if ce:
try:
content = encoding.decode(self.raw_content, ce)
# A client may illegally specify a byte -> str encoding here (e.g. utf8)
if isinstance(content, str):
raise ValueError(f"Invalid Content-Encoding: {ce}")
return content
except ValueError:
if strict:
raise
return self.raw_content
else:
return self.raw_content
def set_text(self, text: str | None) -> None:
if text is None:
self.content = None
return
enc = infer_content_encoding(self.headers.get("content-type", ""))
try:
self.content = cast(bytes, encoding.encode(text, enc))
except ValueError:
# Fall back to UTF-8 and update the content-type header.
ct = parse_content_type(self.headers.get("content-type", "")) or (
"text",
"plain",
{},
)
ct[2]["charset"] = "utf-8"
self.headers["content-type"] = assemble_content_type(*ct)
enc = "utf8"
self.content = text.encode(enc, "surrogateescape")
def get_text(self, strict: bool = True) -> str | None:
"""
Similar to `Message.text`, but does not raise if `strict` is `False`.
Instead, the message body is returned as surrogate-escaped UTF-8.
"""
content = self.get_content(strict)
if content is None:
return None
enc = infer_content_encoding(self.headers.get("content-type", ""), content)
try:
return cast(str, encoding.decode(content, enc))
except ValueError:
if strict:
raise
return content.decode("utf8", "surrogateescape")
@property
def timestamp_start(self) -> float:
"""
*Timestamp:* Headers received.
"""
return self.data.timestamp_start
@timestamp_start.setter
def timestamp_start(self, timestamp_start: float) -> None:
self.data.timestamp_start = timestamp_start
@property
def timestamp_end(self) -> float | None:
"""
*Timestamp:* Last byte received.
"""
return self.data.timestamp_end
@timestamp_end.setter
def timestamp_end(self, timestamp_end: float | None):
self.data.timestamp_end = timestamp_end
def decode(self, strict: bool = True) -> None:
"""
Decodes body based on the current Content-Encoding header, then
removes the header.
If the message body is missing or empty, no action is taken.
*Raises:*
- `ValueError`, when the content-encoding is invalid and strict is True.
"""
if not self.raw_content:
# The body is missing (for example, because of body streaming or because it's a response
# to a HEAD request), so we can't correctly update content-length.
return
decoded = self.get_content(strict)
self.headers.pop("content-encoding", None)
self.content = decoded
def encode(self, encoding: str) -> None:
"""
Encodes body with the given encoding, where e is "gzip", "deflate", "identity", "br", or "zstd".
Any existing content-encodings are overwritten, the content is not decoded beforehand.
*Raises:*
- `ValueError`, when the specified content-encoding is invalid.
"""
self.headers["content-encoding"] = encoding
self.content = self.raw_content
if "content-encoding" not in self.headers:
raise ValueError(f"Invalid content encoding {encoding!r}")
def json(self, **kwargs: Any) -> Any:
"""
Returns the JSON encoded content of the response, if any.
`**kwargs` are optional arguments that will be
passed to `json.loads()`.
Will raise if the content can not be decoded and then parsed as JSON.
*Raises:*
- `json.decoder.JSONDecodeError` if content is not valid JSON.
- `TypeError` if the content is not available, for example because the response
has been streamed.
"""
content = self.get_content(strict=False)
if content is None:
raise TypeError("Message content is not available.")
else:
return json.loads(content, **kwargs)
class Request(Message):
"""
An HTTP request.
"""
data: RequestData
def __init__(
self,
host: str,
port: int,
method: bytes,
scheme: bytes,
authority: bytes,
path: bytes,
http_version: bytes,
headers: Headers | tuple[tuple[bytes, bytes], ...],
content: bytes | None,
trailers: Headers | tuple[tuple[bytes, bytes], ...] | None,
timestamp_start: float,
timestamp_end: float | None,
):
# auto-convert invalid types to retain compatibility with older code.
if isinstance(host, bytes):
host = host.decode("idna", "strict")
if isinstance(method, str):
method = method.encode("ascii", "strict")
if isinstance(scheme, str):
scheme = scheme.encode("ascii", "strict")
if isinstance(authority, str):
authority = authority.encode("ascii", "strict")
if isinstance(path, str):
path = path.encode("ascii", "strict")
if isinstance(http_version, str):
http_version = http_version.encode("ascii", "strict")
if isinstance(content, str):
raise ValueError(f"Content must be bytes, not {type(content).__name__}")
if not isinstance(headers, Headers):
headers = Headers(headers)
if trailers is not None and not isinstance(trailers, Headers):
trailers = Headers(trailers)
self.data = RequestData(
host=host,
port=port,
method=method,
scheme=scheme,
authority=authority,
path=path,
http_version=http_version,
headers=headers,
content=content,
trailers=trailers,
timestamp_start=timestamp_start,
timestamp_end=timestamp_end,
)
def __repr__(self) -> str:
if self.host and self.port:
hostport = f"{self.host}:{self.port}"
else:
hostport = ""
path = self.path or ""
return f"Request({self.method} {hostport}{path})"
@classmethod
def make(
cls,
method: str,
url: str,
content: bytes | str = "",
headers: (
Headers | dict[str | bytes, str | bytes] | Iterable[tuple[bytes, bytes]]
) = (),
) -> "Request":
"""
Simplified API for creating request objects.
"""
# Headers can be list or dict, we differentiate here.
if isinstance(headers, Headers):
pass
elif isinstance(headers, dict):
headers = Headers(
(
always_bytes(k, "utf-8", "surrogateescape"),
always_bytes(v, "utf-8", "surrogateescape"),
)
for k, v in headers.items()
)
elif isinstance(headers, Iterable):
headers = Headers(headers) # type: ignore
else:
raise TypeError(
"Expected headers to be an iterable or dict, but is {}.".format(
type(headers).__name__
)
)
req = cls(
"",
0,
method.encode("utf-8", "surrogateescape"),
b"",
b"",
b"",
b"HTTP/1.1",
headers,
b"",
None,
time.time(),
time.time(),
)
req.url = url
# Assign this manually to update the content-length header.
if isinstance(content, bytes):
req.content = content
elif isinstance(content, str):
req.text = content
else:
raise TypeError(
f"Expected content to be str or bytes, but is {type(content).__name__}."
)
return req
@property
def first_line_format(self) -> str:
"""
*Read-only:* HTTP request form as defined in [RFC 7230](https://tools.ietf.org/html/rfc7230#section-5.3).
origin-form and asterisk-form are subsumed as "relative".
"""
if self.method == "CONNECT":
return "authority"
elif self.authority:
return "absolute"
else:
return "relative"
@property
def method(self) -> str:
"""
HTTP request method, e.g. "GET".
"""
return self.data.method.decode("utf-8", "surrogateescape").upper()
@method.setter
def method(self, val: str | bytes) -> None:
self.data.method = always_bytes(val, "utf-8", "surrogateescape")
@property
def scheme(self) -> str:
"""
HTTP request scheme, which should be "http" or "https".
"""
return self.data.scheme.decode("utf-8", "surrogateescape")
@scheme.setter
def scheme(self, val: str | bytes) -> None:
self.data.scheme = always_bytes(val, "utf-8", "surrogateescape")
@property
def authority(self) -> str:
"""
HTTP request authority.
For HTTP/1, this is the authority portion of the request target
(in either absolute-form or authority-form).
For origin-form and asterisk-form requests, this property is set to an empty string.
For HTTP/2, this is the :authority pseudo header.
*See also:* `Request.host`, `Request.host_header`, `Request.pretty_host`
"""
try:
return self.data.authority.decode("idna")
except UnicodeError:
return self.data.authority.decode("utf8", "surrogateescape")
@authority.setter
def authority(self, val: str | bytes) -> None:
if isinstance(val, str):
try:
val = val.encode("idna", "strict")
except UnicodeError:
val = val.encode("utf8", "surrogateescape") # type: ignore
self.data.authority = val
@property
def host(self) -> str:
"""
Target server for this request. This may be parsed from the raw request
(e.g. from a ``GET http://example.com/ HTTP/1.1`` request line)
or inferred from the proxy mode (e.g. an IP in transparent mode).
Setting the host attribute also updates the host header and authority information, if present.
*See also:* `Request.authority`, `Request.host_header`, `Request.pretty_host`
"""
return self.data.host
@host.setter
def host(self, val: str | bytes) -> None:
self.data.host = always_str(val, "idna", "strict")
self._update_host_and_authority()
@property
def host_header(self) -> str | None:
"""
The request's host/authority header.
This property maps to either ``request.headers["Host"]`` or
``request.authority``, depending on whether it's HTTP/1.x or HTTP/2.0.
*See also:* `Request.authority`,`Request.host`, `Request.pretty_host`
"""
if self.is_http2 or self.is_http3:
return self.authority or self.data.headers.get("Host", None)
else:
return self.data.headers.get("Host", None)
@host_header.setter
def host_header(self, val: None | str | bytes) -> None:
if val is None:
if self.is_http2 or self.is_http3:
self.data.authority = b""
self.headers.pop("Host", None)
else:
if self.is_http2 or self.is_http3:
self.authority = val # type: ignore
if not (self.is_http2 or self.is_http3) or "Host" in self.headers:
# For h2, we only overwrite, but not create, as :authority is the h2 host header.
self.headers["Host"] = val
@property
def port(self) -> int:
"""
Target port.
"""
return self.data.port
@port.setter
def port(self, port: int) -> None:
if not isinstance(port, int):
raise ValueError(f"Port must be an integer, not {port!r}.")
self.data.port = port
self._update_host_and_authority()
def _update_host_and_authority(self) -> None:
val = url.hostport(self.scheme, self.host, self.port)
# Update host header
if "Host" in self.data.headers:
self.data.headers["Host"] = val
# Update authority
if self.data.authority:
self.authority = val
@property
def path(self) -> str:
"""
HTTP request path, e.g. "/index.html" or "/index.html?a=b".
Usually starts with a slash, except for OPTIONS requests, which may just be "*".
This attribute includes both path and query parts of the target URI
(see Sections 3.3 and 3.4 of [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986)).
"""
return self.data.path.decode("utf-8", "surrogateescape")
@path.setter
def path(self, val: str | bytes) -> None:
self.data.path = always_bytes(val, "utf-8", "surrogateescape")
@property
def url(self) -> str:
"""
The full URL string, constructed from `Request.scheme`, `Request.host`, `Request.port` and `Request.path`.
Settings this property updates these attributes as well.
"""
if self.first_line_format == "authority":
return f"{self.host}:{self.port}"
path = self.path if self.path != "*" else ""
return url.unparse(self.scheme, self.host, self.port, path)
@url.setter
def url(self, val: str | bytes) -> None:
val = always_str(val, "utf-8", "surrogateescape")
self.scheme, self.host, self.port, self.path = url.parse(val) # type: ignore
@property
def pretty_host(self) -> str:
"""
*Read-only:* Like `Request.host`, but using `Request.host_header` header as an additional (preferred) data source.
This is useful in transparent mode where `Request.host` is only an IP address.
*Warning:* When working in adversarial environments, this may not reflect the actual destination
as the Host header could be spoofed.
"""
authority = self.host_header
if authority:
return url.parse_authority(authority, check=False)[0]
else:
return self.host
@property
def pretty_url(self) -> str:
"""
*Read-only:* Like `Request.url`, but using `Request.pretty_host` instead of `Request.host`.
"""
if self.first_line_format == "authority":
return self.authority
host_header = self.host_header
if not host_header:
return self.url
pretty_host, pretty_port = url.parse_authority(host_header, check=False)
pretty_port = pretty_port or url.default_port(self.scheme) or 443
path = self.path if self.path != "*" else ""
return url.unparse(self.scheme, pretty_host, pretty_port, path)
def _get_query(self):
query = urllib.parse.urlparse(self.url).query
return tuple(url.decode(query))
def _set_query(self, query_data):
query = url.encode(query_data)
_, _, path, params, _, fragment = urllib.parse.urlparse(self.url)
self.path = urllib.parse.urlunparse(["", "", path, params, query, fragment])
@property
def query(self) -> multidict.MultiDictView[str, str]:
"""
The request query as a mutable mapping view on the request's path.
For the most part, this behaves like a dictionary.
Modifications to the MultiDictView update `Request.path`, and vice versa.
"""
return multidict.MultiDictView(self._get_query, self._set_query)
@query.setter
def query(self, value):
self._set_query(value)
def _get_cookies(self):
h = self.headers.get_all("Cookie")
return tuple(cookies.parse_cookie_headers(h))
def _set_cookies(self, value):
self.headers["cookie"] = cookies.format_cookie_header(value)
@property
def cookies(self) -> multidict.MultiDictView[str, str]:
"""
The request cookies.
For the most part, this behaves like a dictionary.
Modifications to the MultiDictView update `Request.headers`, and vice versa.
"""
return multidict.MultiDictView(self._get_cookies, self._set_cookies)
@cookies.setter
def cookies(self, value):
self._set_cookies(value)
@property
def path_components(self) -> tuple[str, ...]:
"""
The URL's path components as a tuple of strings.
Components are unquoted.
"""
path = urllib.parse.urlparse(self.url).path
# This needs to be a tuple so that it's immutable.
# Otherwise, this would fail silently:
# request.path_components.append("foo")
return tuple(url.unquote(i) for i in path.split("/") if i)
@path_components.setter
def path_components(self, components: Iterable[str]):
components = map(lambda x: url.quote(x, safe=""), components)
path = "/" + "/".join(components)
_, _, _, params, query, fragment = urllib.parse.urlparse(self.url)
self.path = urllib.parse.urlunparse(["", "", path, params, query, fragment])
def anticache(self) -> None:
"""
Modifies this request to remove headers that might produce a cached response.
"""
delheaders = (
"if-modified-since",
"if-none-match",
)
for i in delheaders:
self.headers.pop(i, None)
def anticomp(self) -> None:
"""
Modify the Accept-Encoding header to only accept uncompressed responses.
"""
self.headers["accept-encoding"] = "identity"
def constrain_encoding(self) -> None:
"""
Limits the permissible Accept-Encoding values, based on what we can decode appropriately.
"""
accept_encoding = self.headers.get("accept-encoding")
if accept_encoding:
self.headers["accept-encoding"] = ", ".join(
e
for e in {"gzip", "identity", "deflate", "br", "zstd"}
if e in accept_encoding
)
def _get_urlencoded_form(self):
is_valid_content_type = (
"application/x-www-form-urlencoded"
in self.headers.get("content-type", "").lower()
)
if is_valid_content_type:
return tuple(url.decode(self.get_text(strict=False)))
return ()
def _set_urlencoded_form(self, form_data: Sequence[tuple[str, str]]) -> None:
"""
Sets the body to the URL-encoded form data, and adds the appropriate content-type header.
This will overwrite the existing content if there is one.
"""
self.headers["content-type"] = "application/x-www-form-urlencoded"
self.content = url.encode(form_data, self.get_text(strict=False)).encode()
@property
def urlencoded_form(self) -> multidict.MultiDictView[str, str]:
"""
The URL-encoded form data.
If the content-type indicates non-form data or the form could not be parsed, this is set to
an empty `MultiDictView`.
Modifications to the MultiDictView update `Request.content`, and vice versa.
"""
return multidict.MultiDictView(
self._get_urlencoded_form, self._set_urlencoded_form
)
@urlencoded_form.setter
def urlencoded_form(self, value):
self._set_urlencoded_form(value)
def _get_multipart_form(self) -> list[tuple[bytes, bytes]]:
is_valid_content_type = (
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | true |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/command_lexer.py | mitmproxy/command_lexer.py | import re
import pyparsing
# TODO: There is a lot of work to be done here.
# The current implementation is written in a way that _any_ input is valid,
# which does not make sense once things get more complex.
PartialQuotedString = pyparsing.Regex(
re.compile(
r"""
"[^"]*(?:"|$) # double-quoted string that ends with double quote or EOF
|
'[^']*(?:'|$) # single-quoted string that ends with double quote or EOF
""",
re.VERBOSE,
)
)
expr = pyparsing.ZeroOrMore(
PartialQuotedString
| pyparsing.Word(" \r\n\t")
| pyparsing.CharsNotIn("""'" \r\n\t""")
).leaveWhitespace()
def quote(val: str) -> str:
if val and all(char not in val for char in "'\" \r\n\t"):
return val
if '"' not in val:
return f'"{val}"'
if "'" not in val:
return f"'{val}'"
return '"' + val.replace('"', r"\x22") + '"'
def unquote(x: str) -> str:
if len(x) > 1 and x[0] in "'\"" and x[0] == x[-1]:
return x[1:-1]
else:
return x
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addonmanager.py | mitmproxy/addonmanager.py | import contextlib
import inspect
import logging
import pprint
import sys
import traceback
import types
from collections.abc import Callable
from collections.abc import Sequence
from dataclasses import dataclass
from typing import Any
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import hooks
logger = logging.getLogger(__name__)
def _get_name(itm):
return getattr(itm, "name", itm.__class__.__name__.lower())
def cut_traceback(tb, func_name):
"""
Cut off a traceback at the function with the given name.
The func_name's frame is excluded.
Args:
tb: traceback object, as returned by sys.exc_info()[2]
func_name: function name
Returns:
Reduced traceback.
"""
tb_orig = tb
for _, _, fname, _ in traceback.extract_tb(tb):
tb = tb.tb_next
if fname == func_name:
break
return tb or tb_orig
@contextlib.contextmanager
def safecall():
try:
yield
except (exceptions.AddonHalt, exceptions.OptionsError):
raise
except Exception:
etype, value, tb = sys.exc_info()
tb = cut_traceback(tb, "invoke_addon_sync")
tb = cut_traceback(tb, "invoke_addon")
assert etype
assert value
logger.error(
f"Addon error: {value}",
exc_info=(etype, value, tb),
)
class Loader:
"""
A loader object is passed to the load() event when addons start up.
"""
def __init__(self, master):
self.master = master
def add_option(
self,
name: str,
typespec: type,
default: Any,
help: str,
choices: Sequence[str] | None = None,
) -> None:
"""
Add an option to mitmproxy.
Help should be a single paragraph with no linebreaks - it will be
reflowed by tools. Information on the data type should be omitted -
it will be generated and added by tools as needed.
"""
assert not isinstance(choices, str)
if name in self.master.options:
existing = self.master.options._options[name]
same_signature = (
existing.name == name
and existing.typespec == typespec
and existing.default == default
and existing.help == help
and existing.choices == choices
)
if same_signature:
return
else:
logger.warning("Over-riding existing option %s" % name)
self.master.options.add_option(name, typespec, default, help, choices)
def add_command(self, path: str, func: Callable) -> None:
"""Add a command to mitmproxy.
Unless you are generating commands programatically,
this API should be avoided. Decorate your function with `@mitmproxy.command.command` instead.
"""
self.master.commands.add(path, func)
def traverse(chain):
"""
Recursively traverse an addon chain.
"""
for a in chain:
yield a
if hasattr(a, "addons"):
yield from traverse(a.addons)
@dataclass
class LoadHook(hooks.Hook):
"""
Called when an addon is first loaded. This event receives a Loader
object, which contains methods for adding options and commands. This
method is where the addon configures itself.
"""
loader: Loader
class AddonManager:
def __init__(self, master):
self.lookup = {}
self.chain = []
self.master = master
master.options.changed.connect(self._configure_all)
def _configure_all(self, updated):
self.trigger(hooks.ConfigureHook(updated))
def clear(self):
"""
Remove all addons.
"""
for a in self.chain:
self.invoke_addon_sync(a, hooks.DoneHook())
self.lookup = {}
self.chain = []
def get(self, name):
"""
Retrieve an addon by name. Addon names are equal to the .name
attribute on the instance, or the lower case class name if that
does not exist.
"""
return self.lookup.get(name, None)
def register(self, addon):
"""
Register an addon, call its load event, and then register all its
sub-addons. This should be used by addons that dynamically manage
addons.
If the calling addon is already running, it should follow with
running and configure events. Must be called within a current
context.
"""
api_changes = {
# mitmproxy 6 -> mitmproxy 7
"clientconnect": f"The clientconnect event has been removed, use client_connected instead",
"clientdisconnect": f"The clientdisconnect event has been removed, use client_disconnected instead",
"serverconnect": "The serverconnect event has been removed, use server_connect and server_connected instead",
"serverdisconnect": f"The serverdisconnect event has been removed, use server_disconnected instead",
# mitmproxy 8 -> mitmproxy 9
"add_log": "The add_log event has been deprecated, use Python's builtin logging module instead",
}
for a in traverse([addon]):
for old, msg in api_changes.items():
if hasattr(a, old):
logger.warning(
f"{msg}. For more details, see https://docs.mitmproxy.org/dev/addons-api-changelog/."
)
name = _get_name(a)
if name in self.lookup:
raise exceptions.AddonManagerError(
"An addon called '%s' already exists." % name
)
loader = Loader(self.master)
self.invoke_addon_sync(addon, LoadHook(loader))
for a in traverse([addon]):
name = _get_name(a)
self.lookup[name] = a
for a in traverse([addon]):
self.master.commands.collect_commands(a)
self.master.options.process_deferred()
return addon
def add(self, *addons):
"""
Add addons to the end of the chain, and run their load event.
If any addon has sub-addons, they are registered.
"""
for i in addons:
self.chain.append(self.register(i))
def remove(self, addon):
"""
Remove an addon and all its sub-addons.
If the addon is not in the chain - that is, if it's managed by a
parent addon - it's the parent's responsibility to remove it from
its own addons attribute.
"""
for a in traverse([addon]):
n = _get_name(a)
if n not in self.lookup:
raise exceptions.AddonManagerError("No such addon: %s" % n)
self.chain = [i for i in self.chain if i is not a]
del self.lookup[_get_name(a)]
self.invoke_addon_sync(addon, hooks.DoneHook())
def __len__(self):
return len(self.chain)
def __str__(self):
return pprint.pformat([str(i) for i in self.chain])
def __contains__(self, item):
name = _get_name(item)
return name in self.lookup
async def handle_lifecycle(self, event: hooks.Hook):
"""
Handle a lifecycle event.
"""
message = event.args()[0]
await self.trigger_event(event)
if isinstance(message, flow.Flow):
await self.trigger_event(hooks.UpdateHook([message]))
def _iter_hooks(self, addon, event: hooks.Hook):
"""
Enumerate all hook callables belonging to the given addon
"""
assert isinstance(event, hooks.Hook)
for a in traverse([addon]):
func = getattr(a, event.name, None)
if func:
if callable(func):
yield a, func
elif isinstance(func, types.ModuleType):
# we gracefully exclude module imports with the same name as hooks.
# For example, a user may have "from mitmproxy import log" in an addon,
# which has the same name as the "log" hook. In this particular case,
# we end up in an error loop because we "log" this error.
pass
else:
raise exceptions.AddonManagerError(
f"Addon handler {event.name} ({a}) not callable"
)
async def invoke_addon(self, addon, event: hooks.Hook):
"""
Asynchronously invoke an event on an addon and all its children.
"""
for addon, func in self._iter_hooks(addon, event):
res = func(*event.args())
# Support both async and sync hook functions
if res is not None and inspect.isawaitable(res):
await res
def invoke_addon_sync(self, addon, event: hooks.Hook):
"""
Invoke an event on an addon and all its children.
"""
for addon, func in self._iter_hooks(addon, event):
if inspect.iscoroutinefunction(func):
raise exceptions.AddonManagerError(
f"Async handler {event.name} ({addon}) cannot be called from sync context"
)
func(*event.args())
async def trigger_event(self, event: hooks.Hook):
"""
Asynchronously trigger an event across all addons.
"""
for i in self.chain:
try:
with safecall():
await self.invoke_addon(i, event)
except exceptions.AddonHalt:
return
def trigger(self, event: hooks.Hook):
"""
Trigger an event across all addons.
This API is discouraged and may be deprecated in the future.
Use `trigger_event()` instead, which provides the same functionality but supports async hooks.
"""
for i in self.chain:
try:
with safecall():
self.invoke_addon_sync(i, event)
except exceptions.AddonHalt:
return
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/websocket.py | mitmproxy/websocket.py | """
Mitmproxy used to have its own WebSocketFlow type until mitmproxy 6, but now WebSocket connections now are represented
as HTTP flows as well. They can be distinguished from regular HTTP requests by having the
`mitmproxy.http.HTTPFlow.websocket` attribute set.
This module only defines the classes for individual `WebSocketMessage`s and the `WebSocketData` container.
"""
import time
import warnings
from dataclasses import dataclass
from dataclasses import field
from wsproto.frame_protocol import Opcode
from mitmproxy.coretypes import serializable
WebSocketMessageState = tuple[int, bool, bytes, float, bool, bool]
class WebSocketMessage(serializable.Serializable):
"""
A single WebSocket message sent from one peer to the other.
Fragmented WebSocket messages are reassembled by mitmproxy and then
represented as a single instance of this class.
The [WebSocket RFC](https://tools.ietf.org/html/rfc6455) specifies both
text and binary messages. To avoid a whole class of nasty type confusion bugs,
mitmproxy stores all message contents as `bytes`. If you need a `str`, you can access the `text` property
on text messages:
>>> if message.is_text:
>>> text = message.text
"""
from_client: bool
"""True if this messages was sent by the client."""
type: Opcode
"""
The message type, as per RFC 6455's [opcode](https://tools.ietf.org/html/rfc6455#section-5.2).
Mitmproxy currently only exposes messages assembled from `TEXT` and `BINARY` frames.
"""
content: bytes
"""A byte-string representing the content of this message."""
timestamp: float
"""Timestamp of when this message was received or created."""
dropped: bool
"""True if the message has not been forwarded by mitmproxy, False otherwise."""
injected: bool
"""True if the message was injected and did not originate from a client/server, False otherwise"""
def __init__(
self,
type: int | Opcode,
from_client: bool,
content: bytes,
timestamp: float | None = None,
dropped: bool = False,
injected: bool = False,
) -> None:
self.from_client = from_client
self.type = Opcode(type)
self.content = content
self.timestamp: float = timestamp or time.time()
self.dropped = dropped
self.injected = injected
@classmethod
def from_state(cls, state: WebSocketMessageState):
return cls(*state)
def get_state(self) -> WebSocketMessageState:
return (
int(self.type),
self.from_client,
self.content,
self.timestamp,
self.dropped,
self.injected,
)
def set_state(self, state: WebSocketMessageState) -> None:
(
typ,
self.from_client,
self.content,
self.timestamp,
self.dropped,
self.injected,
) = state
self.type = Opcode(typ)
def _format_ws_message(self) -> bytes:
if self.from_client:
return b"[OUTGOING] " + self.content
else:
return b"[INCOMING] " + self.content
def __repr__(self):
if self.type == Opcode.TEXT:
return repr(self.content.decode(errors="replace"))
else:
return repr(self.content)
@property
def is_text(self) -> bool:
"""
`True` if this message is assembled from WebSocket `TEXT` frames,
`False` if it is assembled from `BINARY` frames.
"""
return self.type == Opcode.TEXT
def drop(self):
"""Drop this message, i.e. don't forward it to the other peer."""
self.dropped = True
def kill(self): # pragma: no cover
"""A deprecated alias for `.drop()`."""
warnings.warn(
"WebSocketMessage.kill() is deprecated, use .drop() instead.",
DeprecationWarning,
stacklevel=2,
)
self.drop()
@property
def text(self) -> str:
"""
The message content as text.
This attribute is only available if `WebSocketMessage.is_text` is `True`.
*See also:* `WebSocketMessage.content`
"""
if self.type != Opcode.TEXT:
raise AttributeError(
f"{self.type.name.title()} WebSocket frames do not have a 'text' attribute."
)
return self.content.decode()
@text.setter
def text(self, value: str) -> None:
if self.type != Opcode.TEXT:
raise AttributeError(
f"{self.type.name.title()} WebSocket frames do not have a 'text' attribute."
)
self.content = value.encode()
@dataclass
class WebSocketData(serializable.SerializableDataclass):
"""
A data container for everything related to a single WebSocket connection.
This is typically accessed as `mitmproxy.http.HTTPFlow.websocket`.
"""
messages: list[WebSocketMessage] = field(default_factory=list)
"""All `WebSocketMessage`s transferred over this connection."""
closed_by_client: bool | None = None
"""
`True` if the client closed the connection,
`False` if the server closed the connection,
`None` if the connection is active.
"""
close_code: int | None = None
"""[Close Code](https://tools.ietf.org/html/rfc6455#section-7.1.5)"""
close_reason: str | None = None
"""[Close Reason](https://tools.ietf.org/html/rfc6455#section-7.1.6)"""
timestamp_end: float | None = None
"""*Timestamp:* WebSocket connection closed."""
def __repr__(self):
return f"<WebSocketData ({len(self.messages)} messages)>"
def _get_formatted_messages(self) -> bytes:
return b"\n".join(m._format_ws_message() for m in self.messages)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/exceptions.py | mitmproxy/exceptions.py | """
Edit 2020-12 @mhils:
The advice below hasn't paid off in any form. We now just use builtin exceptions and specialize where necessary.
---
We try to be very hygienic regarding the exceptions we throw:
- Every exception that might be externally visible to users shall be a subclass
of MitmproxyException.p
- Every exception in the base net module shall be a subclass
of NetlibException, and will not be propagated directly to users.
See also: http://lucumr.pocoo.org/2014/10/16/on-error-handling/
"""
class MitmproxyException(Exception):
"""
Base class for all exceptions thrown by mitmproxy.
"""
def __init__(self, message=None):
super().__init__(message)
class FlowReadException(MitmproxyException):
pass
class ControlException(MitmproxyException):
pass
class CommandError(Exception):
pass
class OptionsError(MitmproxyException):
pass
class AddonManagerError(MitmproxyException):
pass
class AddonHalt(MitmproxyException):
"""
Raised by addons to signal that no further handlers should handle this event.
"""
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tcp.py | mitmproxy/tcp.py | import time
from mitmproxy import connection
from mitmproxy import flow
from mitmproxy.coretypes import serializable
class TCPMessage(serializable.Serializable):
"""
An individual TCP "message".
Note that TCP is *stream-based* and not *message-based*.
For practical purposes the stream is chunked into messages here,
but you should not rely on message boundaries.
"""
def __init__(self, from_client, content, timestamp=None):
self.from_client = from_client
self.content = content
self.timestamp = timestamp or time.time()
@classmethod
def from_state(cls, state):
return cls(*state)
def get_state(self):
return self.from_client, self.content, self.timestamp
def set_state(self, state):
self.from_client, self.content, self.timestamp = state
def __repr__(self):
return "{direction} {content}".format(
direction="->" if self.from_client else "<-", content=repr(self.content)
)
class TCPFlow(flow.Flow):
"""
A TCPFlow is a simplified representation of a TCP session.
"""
messages: list[TCPMessage]
"""
The messages transmitted over this connection.
The latest message can be accessed as `flow.messages[-1]` in event hooks.
"""
def __init__(
self,
client_conn: connection.Client,
server_conn: connection.Server,
live: bool = False,
):
super().__init__(client_conn, server_conn, live)
self.messages = []
def get_state(self) -> serializable.State:
return {
**super().get_state(),
"messages": [m.get_state() for m in self.messages],
}
def set_state(self, state: serializable.State) -> None:
self.messages = [TCPMessage.from_state(m) for m in state.pop("messages")]
super().set_state(state)
def __repr__(self):
return f"<TCPFlow ({len(self.messages)} messages)>"
__all__ = [
"TCPFlow",
"TCPMessage",
]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/version.py | mitmproxy/version.py | import os
import subprocess
import sys
VERSION = "13.0.0.dev"
MITMPROXY = "mitmproxy " + VERSION
# Serialization format version. This is displayed nowhere, it just needs to be incremented by one
# for each change in the file format.
FLOW_FORMAT_VERSION = 21
def get_dev_version() -> str:
"""
Return a detailed version string, sourced either from VERSION or obtained dynamically using git.
"""
mitmproxy_version = VERSION
here = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
try: # pragma: no cover
# Check that we're in the mitmproxy repository: https://github.com/mitmproxy/mitmproxy/issues/3987
# cb0e3287090786fad566feb67ac07b8ef361b2c3 is the first mitmproxy commit.
subprocess.run(
["git", "cat-file", "-e", "cb0e3287090786fad566feb67ac07b8ef361b2c3"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
cwd=here,
check=True,
)
git_describe = subprocess.check_output(
["git", "describe", "--tags", "--long"],
stderr=subprocess.STDOUT,
cwd=here,
)
last_tag, tag_dist_str, commit = git_describe.decode().strip().rsplit("-", 2)
commit = commit.lstrip("g")[:7]
tag_dist = int(tag_dist_str)
except Exception:
pass
else:
# Add commit info for non-tagged releases
if tag_dist > 0:
mitmproxy_version += f" (+{tag_dist}, commit {commit})"
# PyInstaller build indicator, if using precompiled binary
if getattr(sys, "frozen", False):
mitmproxy_version += " binary"
return mitmproxy_version
if __name__ == "__main__": # pragma: no cover
print(VERSION)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/command.py | mitmproxy/command.py | """
This module manages and invokes typed commands.
"""
import functools
import inspect
import logging
import sys
import textwrap
import types
from collections.abc import Callable
from collections.abc import Iterable
from collections.abc import Sequence
from typing import Any
from typing import NamedTuple
import pyparsing
import mitmproxy.types
from mitmproxy import command_lexer
from mitmproxy import exceptions
from mitmproxy.command_lexer import unquote
def verify_arg_signature(f: Callable, args: Iterable[Any], kwargs: dict) -> None:
sig = inspect.signature(f, eval_str=True)
try:
sig.bind(*args, **kwargs)
except TypeError as v:
raise exceptions.CommandError("command argument mismatch: %s" % v.args[0])
def typename(t: type) -> str:
"""
Translates a type to an explanatory string.
"""
if t == inspect._empty: # type: ignore
raise exceptions.CommandError("missing type annotation")
to = mitmproxy.types.CommandTypes.get(t, None)
if not to:
raise exceptions.CommandError(
"unsupported type: %s" % getattr(t, "__name__", t)
)
return to.display
def _empty_as_none(x: Any) -> Any:
if x == inspect.Signature.empty:
return None
return x
class CommandParameter(NamedTuple):
name: str
type: type
kind: inspect._ParameterKind = inspect.Parameter.POSITIONAL_OR_KEYWORD
def __str__(self):
if self.kind is inspect.Parameter.VAR_POSITIONAL:
return f"*{self.name}"
else:
return self.name
class Command:
name: str
manager: "CommandManager"
signature: inspect.Signature
help: str | None
def __init__(self, manager: "CommandManager", name: str, func: Callable) -> None:
self.name = name
self.manager = manager
self.func = func
self.signature = inspect.signature(self.func, eval_str=True)
if func.__doc__:
txt = func.__doc__.strip()
self.help = "\n".join(textwrap.wrap(txt))
else:
self.help = None
# This fails with a CommandException if types are invalid
for name, parameter in self.signature.parameters.items():
t = parameter.annotation
if not mitmproxy.types.CommandTypes.get(parameter.annotation, None):
raise exceptions.CommandError(
f"Argument {name} has an unknown type {t} in {func}."
)
if self.return_type and not mitmproxy.types.CommandTypes.get(
self.return_type, None
):
raise exceptions.CommandError(
f"Return type has an unknown type ({self.return_type}) in {func}."
)
@property
def return_type(self) -> type | None:
return _empty_as_none(self.signature.return_annotation)
@property
def parameters(self) -> list[CommandParameter]:
"""Returns a list of CommandParameters."""
ret = []
for name, param in self.signature.parameters.items():
ret.append(CommandParameter(name, param.annotation, param.kind))
return ret
def signature_help(self) -> str:
params = " ".join(str(param) for param in self.parameters)
if self.return_type:
ret = f" -> {typename(self.return_type)}"
else:
ret = ""
return f"{self.name} {params}{ret}"
def prepare_args(self, args: Sequence[str]) -> inspect.BoundArguments:
try:
bound_arguments = self.signature.bind(*args)
except TypeError:
expected = f"Expected: {self.signature.parameters}"
received = f"Received: {args}"
raise exceptions.CommandError(
f"Command argument mismatch: \n {expected}\n {received}"
)
for name, value in bound_arguments.arguments.items():
param = self.signature.parameters[name]
convert_to = param.annotation
if param.kind == param.VAR_POSITIONAL:
bound_arguments.arguments[name] = tuple(
parsearg(self.manager, x, convert_to) for x in value
)
else:
bound_arguments.arguments[name] = parsearg(
self.manager, value, convert_to
)
bound_arguments.apply_defaults()
return bound_arguments
def call(self, args: Sequence[str]) -> Any:
"""
Call the command with a list of arguments. At this point, all
arguments are strings.
"""
bound_args = self.prepare_args(args)
ret = self.func(*bound_args.args, **bound_args.kwargs)
if ret is None and self.return_type is None:
return
typ = mitmproxy.types.CommandTypes.get(self.return_type)
assert typ
if not typ.is_valid(self.manager, typ, ret):
raise exceptions.CommandError(
f"{self.name} returned unexpected data - expected {typ.display}"
)
return ret
class ParseResult(NamedTuple):
value: str
type: type
valid: bool
class CommandManager:
commands: dict[str, Command]
def __init__(self, master):
self.master = master
self.commands = {}
def collect_commands(self, addon):
for i in dir(addon):
if not i.startswith("__"):
o = getattr(addon, i)
try:
# hasattr is not enough, see https://github.com/mitmproxy/mitmproxy/issues/3794
is_command = isinstance(getattr(o, "command_name", None), str)
except Exception:
pass # getattr may raise if o implements __getattr__.
else:
if is_command:
try:
self.add(o.command_name, o)
except exceptions.CommandError as e:
logging.warning(
f"Could not load command {o.command_name}: {e}"
)
def add(self, path: str, func: Callable):
self.commands[path] = Command(self, path, func)
@functools.lru_cache(maxsize=128)
def parse_partial(
self, cmdstr: str
) -> tuple[Sequence[ParseResult], Sequence[CommandParameter]]:
"""
Parse a possibly partial command. Return a sequence of ParseResults and a sequence of remainder type help items.
"""
parts: pyparsing.ParseResults = command_lexer.expr.parseString(
cmdstr, parseAll=True
)
parsed: list[ParseResult] = []
next_params: list[CommandParameter] = [
CommandParameter("", mitmproxy.types.Cmd),
CommandParameter("", mitmproxy.types.CmdArgs),
]
expected: CommandParameter | None = None
for part in parts:
if part.isspace():
parsed.append(
ParseResult(
value=part,
type=mitmproxy.types.Space,
valid=True,
)
)
continue
if expected and expected.kind is inspect.Parameter.VAR_POSITIONAL:
assert not next_params
elif next_params:
expected = next_params.pop(0)
else:
expected = CommandParameter("", mitmproxy.types.Unknown)
arg_is_known_command = (
expected.type == mitmproxy.types.Cmd and part in self.commands
)
arg_is_unknown_command = (
expected.type == mitmproxy.types.Cmd and part not in self.commands
)
command_args_following = (
next_params and next_params[0].type == mitmproxy.types.CmdArgs
)
if arg_is_known_command and command_args_following:
next_params = self.commands[part].parameters + next_params[1:]
if arg_is_unknown_command and command_args_following:
next_params.pop(0)
to = mitmproxy.types.CommandTypes.get(expected.type, None)
valid = False
if to:
try:
to.parse(self, expected.type, part)
except ValueError:
valid = False
else:
valid = True
parsed.append(
ParseResult(
value=part,
type=expected.type,
valid=valid,
)
)
return parsed, next_params
def call(self, command_name: str, *args: Any) -> Any:
"""
Call a command with native arguments. May raise CommandError.
"""
if command_name not in self.commands:
raise exceptions.CommandError("Unknown command: %s" % command_name)
return self.commands[command_name].func(*args)
def call_strings(self, command_name: str, args: Sequence[str]) -> Any:
"""
Call a command using a list of string arguments. May raise CommandError.
"""
if command_name not in self.commands:
raise exceptions.CommandError("Unknown command: %s" % command_name)
return self.commands[command_name].call(args)
def execute(self, cmdstr: str) -> Any:
"""
Execute a command string. May raise CommandError.
"""
parts, _ = self.parse_partial(cmdstr)
if not parts:
raise exceptions.CommandError(f"Invalid command: {cmdstr!r}")
command_name, *args = (
unquote(part.value) for part in parts if part.type != mitmproxy.types.Space
)
return self.call_strings(command_name, args)
def dump(self, out=sys.stdout) -> None:
cmds = list(self.commands.values())
cmds.sort(key=lambda x: x.signature_help())
for c in cmds:
for hl in (c.help or "").splitlines():
print("# " + hl, file=out)
print(c.signature_help(), file=out)
print(file=out)
def parsearg(manager: CommandManager, spec: str, argtype: type) -> Any:
"""
Convert a string to a argument to the appropriate type.
"""
t = mitmproxy.types.CommandTypes.get(argtype, None)
if not t:
raise exceptions.CommandError(f"Unsupported argument type: {argtype}")
try:
return t.parse(manager, argtype, spec)
except ValueError as e:
raise exceptions.CommandError(str(e)) from e
def command(name: str | None = None):
def decorator(function):
@functools.wraps(function)
def wrapper(*args, **kwargs):
verify_arg_signature(function, args, kwargs)
return function(*args, **kwargs)
wrapper.__dict__["command_name"] = name or function.__name__.replace("_", ".")
return wrapper
return decorator
def argument(name, type):
"""
Set the type of a command argument at runtime. This is useful for more
specific types such as mitmproxy.types.Choice, which we cannot annotate
directly as mypy does not like that.
"""
def decorator(f: types.FunctionType) -> types.FunctionType:
assert name in f.__annotations__
f.__annotations__[name] = type
return f
return decorator
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/dns.py | mitmproxy/dns.py | from __future__ import annotations
import base64
import itertools
import random
import struct
import time
from collections.abc import Iterable
from dataclasses import dataclass
from ipaddress import IPv4Address
from ipaddress import IPv6Address
from typing import Any
from typing import cast
from typing import ClassVar
from typing import Self
from mitmproxy import flow
from mitmproxy.coretypes import serializable
from mitmproxy.net.dns import classes
from mitmproxy.net.dns import domain_names
from mitmproxy.net.dns import https_records
from mitmproxy.net.dns import op_codes
from mitmproxy.net.dns import response_codes
from mitmproxy.net.dns import types
from mitmproxy.net.dns.https_records import HTTPSRecord
from mitmproxy.net.dns.https_records import HTTPSRecordJSON
from mitmproxy.net.dns.https_records import SVCParamKeys
# DNS parameters taken from https://www.iana.org/assignments/dns-parameters/dns-parameters.xml
@dataclass
class Question(serializable.SerializableDataclass):
HEADER: ClassVar[struct.Struct] = struct.Struct("!HH")
name: str
type: int
class_: int
def __str__(self) -> str:
return self.name
def to_json(self) -> dict:
"""
Converts the question into json for mitmweb.
Sync with web/src/flow.ts.
"""
return {
"name": self.name,
"type": types.to_str(self.type),
"class": classes.to_str(self.class_),
}
@classmethod
def from_json(cls, data: dict[str, str]) -> Self:
return cls(
name=data["name"],
type=types.from_str(data["type"]),
class_=classes.from_str(data["class"]),
)
@dataclass
class ResourceRecord(serializable.SerializableDataclass):
DEFAULT_TTL: ClassVar[int] = 60
HEADER: ClassVar[struct.Struct] = struct.Struct("!HHIH")
name: str
type: int
class_: int
ttl: int
data: bytes
def __str__(self) -> str:
return str(self._data_json())
@property
def text(self) -> str:
return self.data.decode("utf-8")
@text.setter
def text(self, value: str) -> None:
self.data = value.encode("utf-8")
@property
def ipv4_address(self) -> IPv4Address:
return IPv4Address(self.data)
@ipv4_address.setter
def ipv4_address(self, ip: IPv4Address) -> None:
self.data = ip.packed
@property
def ipv6_address(self) -> IPv6Address:
return IPv6Address(self.data)
@ipv6_address.setter
def ipv6_address(self, ip: IPv6Address) -> None:
self.data = ip.packed
@property
def domain_name(self) -> str:
return domain_names.unpack(self.data)
@domain_name.setter
def domain_name(self, name: str) -> None:
self.data = domain_names.pack(name)
@property
def https_alpn(self) -> tuple[bytes, ...] | None:
record = https_records.unpack(self.data)
alpn_bytes = record.params.get(SVCParamKeys.ALPN.value, None)
if alpn_bytes is not None:
i = 0
ret = []
while i < len(alpn_bytes):
token_len = alpn_bytes[i]
ret.append(alpn_bytes[i + 1 : i + 1 + token_len])
i += token_len + 1
return tuple(ret)
else:
return None
@https_alpn.setter
def https_alpn(self, alpn: Iterable[bytes] | None) -> None:
record = https_records.unpack(self.data)
if alpn is None:
record.params.pop(SVCParamKeys.ALPN.value, None)
else:
alpn_bytes = b"".join(bytes([len(a)]) + a for a in alpn)
record.params[SVCParamKeys.ALPN.value] = alpn_bytes
self.data = https_records.pack(record)
@property
def https_ech(self) -> str | None:
record = https_records.unpack(self.data)
ech_bytes = record.params.get(SVCParamKeys.ECH.value, None)
if ech_bytes is not None:
return base64.b64encode(ech_bytes).decode("utf-8")
else:
return None
@https_ech.setter
def https_ech(self, ech: str | None) -> None:
record = https_records.unpack(self.data)
if ech is None:
record.params.pop(SVCParamKeys.ECH.value, None)
else:
ech_bytes = base64.b64decode(ech.encode("utf-8"))
record.params[SVCParamKeys.ECH.value] = ech_bytes
self.data = https_records.pack(record)
def _data_json(self) -> str | HTTPSRecordJSON:
try:
match self.type:
case types.A:
return str(self.ipv4_address)
case types.AAAA:
return str(self.ipv6_address)
case types.NS | types.CNAME | types.PTR:
return self.domain_name
case types.TXT:
return self.text
case types.HTTPS:
return https_records.unpack(self.data).to_json()
case _:
return f"0x{self.data.hex()}"
except Exception:
return f"0x{self.data.hex()} (invalid {types.to_str(self.type)} data)"
def to_json(self) -> dict[str, str | int | HTTPSRecordJSON]:
"""
Converts the resource record into json for mitmweb.
Sync with web/src/flow.ts.
"""
return {
"name": self.name,
"type": types.to_str(self.type),
"class": classes.to_str(self.class_),
"ttl": self.ttl,
"data": self._data_json(),
}
@classmethod
def from_json(cls, data: dict[str, Any]) -> Self:
inst = cls(
name=data["name"],
type=types.from_str(data["type"]),
class_=classes.from_str(data["class"]),
ttl=data["ttl"],
data=b"",
)
d: str = data["data"]
try:
match inst.type:
case types.A:
inst.ipv4_address = IPv4Address(d)
case types.AAAA:
inst.ipv6_address = IPv6Address(d)
case types.NS | types.CNAME | types.PTR:
inst.domain_name = d
case types.TXT:
inst.text = d
case types.HTTPS:
record = HTTPSRecord.from_json(cast(HTTPSRecordJSON, d))
inst.data = https_records.pack(record)
case _:
raise ValueError
except Exception:
inst.data = bytes.fromhex(d.removeprefix("0x").partition(" (")[0])
return inst
@classmethod
def A(cls, name: str, ip: IPv4Address, *, ttl: int = DEFAULT_TTL) -> ResourceRecord:
"""Create an IPv4 resource record."""
return cls(name, types.A, classes.IN, ttl, ip.packed)
@classmethod
def AAAA(
cls, name: str, ip: IPv6Address, *, ttl: int = DEFAULT_TTL
) -> ResourceRecord:
"""Create an IPv6 resource record."""
return cls(name, types.AAAA, classes.IN, ttl, ip.packed)
@classmethod
def CNAME(
cls, alias: str, canonical: str, *, ttl: int = DEFAULT_TTL
) -> ResourceRecord:
"""Create a canonical internet name resource record."""
return cls(alias, types.CNAME, classes.IN, ttl, domain_names.pack(canonical))
@classmethod
def PTR(cls, inaddr: str, ptr: str, *, ttl: int = DEFAULT_TTL) -> ResourceRecord:
"""Create a canonical internet name resource record."""
return cls(inaddr, types.PTR, classes.IN, ttl, domain_names.pack(ptr))
@classmethod
def TXT(cls, name: str, text: str, *, ttl: int = DEFAULT_TTL) -> ResourceRecord:
"""Create a textual resource record."""
return cls(name, types.TXT, classes.IN, ttl, text.encode("utf-8"))
@classmethod
def HTTPS(
cls, name: str, record: HTTPSRecord, ttl: int = DEFAULT_TTL
) -> ResourceRecord:
"""Create a HTTPS resource record"""
return cls(name, types.HTTPS, classes.IN, ttl, https_records.pack(record))
# comments are taken from rfc1035
@dataclass
class DNSMessage(serializable.SerializableDataclass):
HEADER: ClassVar[struct.Struct] = struct.Struct("!HHHHHH")
id: int
"""An identifier assigned by the program that generates any kind of query."""
query: bool
"""A field that specifies whether this message is a query."""
op_code: int
"""
A field that specifies kind of query in this message.
This value is set by the originator of a request and copied into the response.
"""
authoritative_answer: bool
"""
This field is valid in responses, and specifies that the responding name server
is an authority for the domain name in question section.
"""
truncation: bool
"""Specifies that this message was truncated due to length greater than that permitted on the transmission channel."""
recursion_desired: bool
"""
This field may be set in a query and is copied into the response.
If set, it directs the name server to pursue the query recursively.
"""
recursion_available: bool
"""This field is set or cleared in a response, and denotes whether recursive query support is available in the name server."""
reserved: int
"""Reserved for future use. Must be zero in all queries and responses."""
response_code: int
"""This field is set as part of responses."""
questions: list[Question]
"""
The question section is used to carry the "question" in most queries, i.e.
the parameters that define what is being asked.
"""
answers: list[ResourceRecord]
"""First resource record section."""
authorities: list[ResourceRecord]
"""Second resource record section."""
additionals: list[ResourceRecord]
"""Third resource record section."""
timestamp: float | None = None
"""The time at which the message was sent or received."""
def __str__(self) -> str:
return "\r\n".join(
map(
str,
itertools.chain(
self.questions, self.answers, self.authorities, self.additionals
),
)
)
@property
def content(self) -> bytes:
return self.packed
@property
def question(self) -> Question | None:
"""DNS practically only supports a single question at the
same time, so this is a shorthand for this."""
if len(self.questions) == 1:
return self.questions[0]
return None
@property
def size(self) -> int:
"""Returns the cumulative data size of all resource record sections."""
return sum(
len(x.data)
for x in itertools.chain.from_iterable(
[self.answers, self.authorities, self.additionals]
)
)
def fail(self, response_code: int) -> DNSMessage:
if response_code == response_codes.NOERROR:
raise ValueError("response_code must be an error code.")
return DNSMessage(
timestamp=time.time(),
id=self.id,
query=False,
op_code=self.op_code,
authoritative_answer=False,
truncation=False,
recursion_desired=self.recursion_desired,
recursion_available=False,
reserved=0,
response_code=response_code,
questions=self.questions,
answers=[],
authorities=[],
additionals=[],
)
def succeed(self, answers: list[ResourceRecord]) -> DNSMessage:
return DNSMessage(
timestamp=time.time(),
id=self.id,
query=False,
op_code=self.op_code,
authoritative_answer=False,
truncation=False,
recursion_desired=self.recursion_desired,
recursion_available=True,
reserved=0,
response_code=response_codes.NOERROR,
questions=self.questions,
answers=answers,
authorities=[],
additionals=[],
)
@classmethod
def unpack(cls, buffer: bytes, timestamp: float | None = None) -> DNSMessage:
"""Converts the entire given buffer into a DNS message."""
length, msg = cls.unpack_from(buffer, 0, timestamp)
if length != len(buffer):
raise struct.error(f"unpack requires a buffer of {length} bytes")
return msg
@classmethod
def unpack_from(
cls, buffer: bytes | bytearray, offset: int, timestamp: float | None = None
) -> tuple[int, DNSMessage]:
"""Converts the buffer from a given offset into a DNS message and also returns its length."""
(
id,
flags,
len_questions,
len_answers,
len_authorities,
len_additionals,
) = DNSMessage.HEADER.unpack_from(buffer, offset)
msg = DNSMessage(
timestamp=timestamp,
id=id,
query=(flags & (1 << 15)) == 0,
op_code=(flags >> 11) & 0b1111,
authoritative_answer=(flags & (1 << 10)) != 0,
truncation=(flags & (1 << 9)) != 0,
recursion_desired=(flags & (1 << 8)) != 0,
recursion_available=(flags & (1 << 7)) != 0,
reserved=(flags >> 4) & 0b111,
response_code=flags & 0b1111,
questions=[],
answers=[],
authorities=[],
additionals=[],
)
offset += DNSMessage.HEADER.size
cached_names = domain_names.cache()
def unpack_domain_name() -> str:
nonlocal buffer, offset, cached_names
name, length = domain_names.unpack_from_with_compression(
buffer, offset, cached_names
)
offset += length
return name
for i in range(0, len_questions):
try:
name = unpack_domain_name()
type, class_ = Question.HEADER.unpack_from(buffer, offset)
offset += Question.HEADER.size
msg.questions.append(Question(name=name, type=type, class_=class_))
except struct.error as e:
raise struct.error(f"question #{i}: {e}")
def unpack_rrs(
section: list[ResourceRecord], section_name: str, count: int
) -> None:
nonlocal buffer, offset
for i in range(0, count):
try:
name = unpack_domain_name()
type, class_, ttl, len_data = ResourceRecord.HEADER.unpack_from(
buffer, offset
)
offset += ResourceRecord.HEADER.size
end_data = offset + len_data
if len(buffer) < end_data:
raise struct.error(
f"unpack requires a data buffer of {len_data} bytes"
)
data = buffer[offset:end_data]
if domain_names.record_data_can_have_compression(type):
data = domain_names.decompress_from_record_data(
buffer, offset, end_data, cached_names
)
section.append(ResourceRecord(name, type, class_, ttl, data))
offset += len_data
except struct.error as e:
raise struct.error(f"{section_name} #{i}: {e}")
unpack_rrs(msg.answers, "answer", len_answers)
unpack_rrs(msg.authorities, "authority", len_authorities)
unpack_rrs(msg.additionals, "additional", len_additionals)
return (offset, msg)
@property
def packed(self) -> bytes:
"""Converts the message into network bytes."""
if self.id < 0 or self.id > 65535:
raise ValueError(f"DNS message's id {self.id} is out of bounds.")
flags = 0
if not self.query:
flags |= 1 << 15
if self.op_code < 0 or self.op_code > 0b1111:
raise ValueError(f"DNS message's op_code {self.op_code} is out of bounds.")
flags |= self.op_code << 11
if self.authoritative_answer:
flags |= 1 << 10
if self.truncation:
flags |= 1 << 9
if self.recursion_desired:
flags |= 1 << 8
if self.recursion_available:
flags |= 1 << 7
if self.reserved < 0 or self.reserved > 0b111:
raise ValueError(
f"DNS message's reserved value of {self.reserved} is out of bounds."
)
flags |= self.reserved << 4
if self.response_code < 0 or self.response_code > 0b1111:
raise ValueError(
f"DNS message's response_code {self.response_code} is out of bounds."
)
flags |= self.response_code
data = bytearray()
data.extend(
DNSMessage.HEADER.pack(
self.id,
flags,
len(self.questions),
len(self.answers),
len(self.authorities),
len(self.additionals),
)
)
# TODO implement compression
for question in self.questions:
data.extend(domain_names.pack(question.name))
data.extend(Question.HEADER.pack(question.type, question.class_))
for rr in (*self.answers, *self.authorities, *self.additionals):
data.extend(domain_names.pack(rr.name))
data.extend(
ResourceRecord.HEADER.pack(rr.type, rr.class_, rr.ttl, len(rr.data))
)
data.extend(rr.data)
return bytes(data)
def to_json(self) -> dict:
"""
Converts the message into json for mitmweb.
Sync with web/src/flow.ts.
"""
ret = {
"id": self.id,
"query": self.query,
"op_code": op_codes.to_str(self.op_code),
"authoritative_answer": self.authoritative_answer,
"truncation": self.truncation,
"recursion_desired": self.recursion_desired,
"recursion_available": self.recursion_available,
"response_code": response_codes.to_str(self.response_code),
"status_code": response_codes.http_equiv_status_code(self.response_code),
"questions": [question.to_json() for question in self.questions],
"answers": [rr.to_json() for rr in self.answers],
"authorities": [rr.to_json() for rr in self.authorities],
"additionals": [rr.to_json() for rr in self.additionals],
"size": self.size,
}
if self.timestamp:
ret["timestamp"] = self.timestamp
return ret
@classmethod
def from_json(cls, data: Any) -> DNSMessage:
"""Reconstruct a DNS message from JSON."""
inst = cls(
id=data["id"],
query=data["query"],
op_code=op_codes.from_str(data["op_code"]),
authoritative_answer=data["authoritative_answer"],
truncation=data["truncation"],
recursion_desired=data["recursion_desired"],
recursion_available=data["recursion_available"],
reserved=0,
response_code=response_codes.from_str(data["response_code"]),
questions=[Question.from_json(x) for x in data["questions"]],
answers=[ResourceRecord.from_json(x) for x in data["answers"]],
authorities=[ResourceRecord.from_json(x) for x in data["authorities"]],
additionals=[ResourceRecord.from_json(x) for x in data["additionals"]],
)
if ts := data.get("timestamp"):
inst.timestamp = ts
return inst
def copy(self) -> DNSMessage:
# we keep the copy semantics but change the ID generation
state = self.get_state()
state["id"] = random.randint(0, 65535)
return DNSMessage.from_state(state)
class DNSFlow(flow.Flow):
"""A DNSFlow is a collection of DNS messages representing a single DNS query."""
request: DNSMessage
"""The DNS request."""
response: DNSMessage | None = None
"""The DNS response."""
def get_state(self) -> serializable.State:
return {
**super().get_state(),
"request": self.request.get_state(),
"response": self.response.get_state() if self.response else None,
}
def set_state(self, state: serializable.State) -> None:
self.request = DNSMessage.from_state(state.pop("request"))
self.response = (
DNSMessage.from_state(r) if (r := state.pop("response")) else None
)
super().set_state(state)
def __repr__(self) -> str:
return f"<DNSFlow\r\n request={self.request!r}\r\n response={self.response!r}\r\n>"
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/log.py | mitmproxy/log.py | from __future__ import annotations
import logging
import os
import typing
import warnings
from dataclasses import dataclass
from mitmproxy import hooks
from mitmproxy.contrib import click as miniclick
from mitmproxy.utils import human
if typing.TYPE_CHECKING:
from mitmproxy import master
ALERT = logging.INFO + 1
"""
The ALERT logging level has the same urgency as info, but
signals to interactive tools that the user's attention should be
drawn to the output even if they're not currently looking at the
event log.
"""
logging.addLevelName(ALERT, "ALERT")
LogLevels = [
"error",
"warn",
"info",
"alert",
"debug",
]
LOG_COLORS = {logging.ERROR: "red", logging.WARNING: "yellow", ALERT: "magenta"}
class MitmFormatter(logging.Formatter):
def __init__(self, colorize: bool):
super().__init__()
self.colorize = colorize
time = "[%s]"
client = "[%s]"
if colorize:
time = miniclick.style(time, fg="cyan", dim=True)
client = miniclick.style(client, fg="yellow", dim=True)
self.with_client = f"{time}{client} %s"
self.without_client = f"{time} %s"
default_time_format = "%H:%M:%S"
default_msec_format = "%s.%03d"
def format(self, record: logging.LogRecord) -> str:
time = self.formatTime(record)
message = record.getMessage()
if record.exc_info:
message = f"{message}\n{self.formatException(record.exc_info)}"
if self.colorize:
message = miniclick.style(
message,
fg=LOG_COLORS.get(record.levelno),
# dim=(record.levelno <= logging.DEBUG)
)
if client := getattr(record, "client", None):
client = human.format_address(client)
return self.with_client % (time, client, message)
else:
return self.without_client % (time, message)
class MitmLogHandler(logging.Handler):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._initiated_in_test = os.environ.get("PYTEST_CURRENT_TEST")
def filter(self, record: logging.LogRecord) -> bool:
# We can't remove stale handlers here because that would modify .handlers during iteration!
return bool(
super().filter(record)
and (
not self._initiated_in_test
or self._initiated_in_test == os.environ.get("PYTEST_CURRENT_TEST")
)
)
def install(self) -> None:
if self._initiated_in_test:
for h in list(logging.getLogger().handlers):
if (
isinstance(h, MitmLogHandler)
and h._initiated_in_test != self._initiated_in_test
):
h.uninstall()
logging.getLogger().addHandler(self)
def uninstall(self) -> None:
logging.getLogger().removeHandler(self)
# everything below is deprecated!
class LogEntry:
def __init__(self, msg, level):
# it's important that we serialize to string here already so that we don't pick up changes
# happening after this log statement.
self.msg = str(msg)
self.level = level
def __eq__(self, other):
if isinstance(other, LogEntry):
return self.__dict__ == other.__dict__
return False
def __repr__(self):
return f"LogEntry({self.msg}, {self.level})"
class Log:
"""
The central logger, exposed to scripts as mitmproxy.ctx.log.
Deprecated: Please use the standard Python logging module instead.
"""
def __init__(self, master):
self.master = master
def debug(self, txt):
"""
Log with level debug.
"""
warnings.warn(
"mitmproxy's ctx.log.debug() is deprecated. Please use the standard Python logging module instead.",
DeprecationWarning,
stacklevel=2,
)
logging.getLogger().debug(txt)
def info(self, txt):
"""
Log with level info.
"""
warnings.warn(
"mitmproxy's ctx.log.info() is deprecated. Please use the standard Python logging module instead.",
DeprecationWarning,
stacklevel=2,
)
logging.getLogger().info(txt)
def alert(self, txt):
"""
Log with level alert. Alerts have the same urgency as info, but
signals to interactive tools that the user's attention should be
drawn to the output even if they're not currently looking at the
event log.
"""
warnings.warn(
"mitmproxy's ctx.log.alert() is deprecated. Please use the standard Python logging module instead.",
DeprecationWarning,
stacklevel=2,
)
logging.getLogger().log(ALERT, txt)
def warn(self, txt):
"""
Log with level warn.
"""
warnings.warn(
"mitmproxy's ctx.log.warn() is deprecated. Please use the standard Python logging module instead.",
DeprecationWarning,
stacklevel=2,
)
logging.getLogger().warning(txt)
def error(self, txt):
"""
Log with level error.
"""
warnings.warn(
"mitmproxy's ctx.log.error() is deprecated. Please use the standard Python logging module instead.",
DeprecationWarning,
stacklevel=2,
)
logging.getLogger().error(txt)
def __call__(self, text, level="info"):
warnings.warn(
"mitmproxy's ctx.log() is deprecated. Please use the standard Python logging module instead.",
DeprecationWarning,
stacklevel=2,
)
logging.getLogger().log(level=logging.getLevelName(level.upper()), msg=text)
LOGGING_LEVELS_TO_LOGENTRY = {
logging.ERROR: "error",
logging.WARNING: "warn",
logging.INFO: "info",
ALERT: "alert",
logging.DEBUG: "debug",
}
class LegacyLogEvents(MitmLogHandler):
"""Emit deprecated `add_log` events from stdlib logging."""
def __init__(
self,
master: master.Master,
):
super().__init__()
self.master = master
self.formatter = MitmFormatter(colorize=False)
def emit(self, record: logging.LogRecord) -> None:
entry = LogEntry(
msg=self.format(record),
level=LOGGING_LEVELS_TO_LOGENTRY.get(record.levelno, "error"),
)
self.master.event_loop.call_soon_threadsafe(
self.master.addons.trigger,
AddLogHook(entry),
)
@dataclass
class AddLogHook(hooks.Hook):
"""
**Deprecated:** Starting with mitmproxy 9, users should use the standard Python logging module instead, for example
by calling `logging.getLogger().addHandler()`.
Called whenever a new log entry is created through the mitmproxy
context. Be careful not to log from this event, which will cause an
infinite loop!
"""
entry: LogEntry
def log_tier(level):
"""
Comparison method for "old" LogEntry log tiers.
Ideally you should use the standard Python logging module instead.
"""
return dict(error=0, warn=1, info=2, alert=2, debug=3).get(level)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/flowfilter.py | mitmproxy/flowfilter.py | """
The following operators are understood:
~q Request
~s Response
Headers:
Patterns are matched against "name: value" strings. Field names are
all-lowercase.
~a Asset content-type in response. Asset content types are:
text/javascript
application/x-javascript
application/javascript
text/css
image/*
font/*
application/font-*
~h rex Header line in either request or response
~hq rex Header in request
~hs rex Header in response
~b rex Expression in the body of either request or response
~bq rex Expression in the body of request
~bs rex Expression in the body of response
~t rex Shortcut for content-type header.
~d rex Request domain
~m rex Method
~u rex URL
~c CODE Response code.
rex Equivalent to ~u rex
"""
import functools
import os
import re
import sys
from collections.abc import Sequence
from typing import Any
from typing import ClassVar
from typing import Protocol
import pyparsing as pp
from mitmproxy import dns
from mitmproxy import flow
from mitmproxy import http
from mitmproxy import tcp
from mitmproxy import udp
maybe_ignore_case = (
re.IGNORECASE if os.environ.get("MITMPROXY_CASE_SENSITIVE_FILTERS") != "1" else 0
)
def only(*types):
def decorator(fn):
@functools.wraps(fn)
def filter_types(self, flow):
if isinstance(flow, types):
return fn(self, flow)
return False
return filter_types
return decorator
class _Token:
def dump(self, indent=0, fp=sys.stdout):
print(
"{spacing}{name}{expr}".format(
spacing="\t" * indent,
name=self.__class__.__name__,
expr=getattr(self, "expr", ""),
),
file=fp,
)
class _Action(_Token):
code: ClassVar[str]
help: ClassVar[str]
@classmethod
def make(klass, s, loc, toks):
return klass(*toks[1:])
class FErr(_Action):
code = "e"
help = "Match error"
def __call__(self, f):
return True if f.error else False
class FMarked(_Action):
code = "marked"
help = "Match marked flows"
def __call__(self, f):
return bool(f.marked)
class FHTTP(_Action):
code = "http"
help = "Match HTTP flows"
@only(http.HTTPFlow)
def __call__(self, f):
return True
class FWebSocket(_Action):
code = "websocket"
help = "Match WebSocket flows"
@only(http.HTTPFlow)
def __call__(self, f: http.HTTPFlow):
return f.websocket is not None
class FTCP(_Action):
code = "tcp"
help = "Match TCP flows"
@only(tcp.TCPFlow)
def __call__(self, f):
return True
class FUDP(_Action):
code = "udp"
help = "Match UDP flows"
@only(udp.UDPFlow)
def __call__(self, f):
return True
class FDNS(_Action):
code = "dns"
help = "Match DNS flows"
@only(dns.DNSFlow)
def __call__(self, f):
return True
class FReq(_Action):
code = "q"
help = "Match request with no response"
@only(http.HTTPFlow, dns.DNSFlow)
def __call__(self, f):
if not f.response:
return True
class FResp(_Action):
code = "s"
help = "Match response"
@only(http.HTTPFlow, dns.DNSFlow)
def __call__(self, f):
return bool(f.response)
class FAll(_Action):
code = "all"
help = "Match all flows"
def __call__(self, f: flow.Flow):
return True
class _Rex(_Action):
flags = 0
is_binary = True
def __init__(self, expr):
self.expr = expr
if self.is_binary:
expr = expr.encode()
try:
self.re = re.compile(expr, self.flags | maybe_ignore_case)
except Exception:
raise ValueError("Cannot compile expression.")
def _check_content_type(rex, message):
return any(
name.lower() == b"content-type" and rex.search(value)
for name, value in message.headers.fields
)
class FAsset(_Action):
code = "a"
help = "Match asset in response: CSS, JavaScript, images, fonts."
ASSET_TYPES = [
re.compile(x)
for x in [
b"text/javascript",
b"application/x-javascript",
b"application/javascript",
b"text/css",
b"image/.*",
b"font/.*",
b"application/font.*",
]
]
@only(http.HTTPFlow)
def __call__(self, f):
if f.response:
for i in self.ASSET_TYPES:
if _check_content_type(i, f.response):
return True
return False
class FContentType(_Rex):
code = "t"
help = "Content-type header"
@only(http.HTTPFlow)
def __call__(self, f):
if _check_content_type(self.re, f.request):
return True
elif f.response and _check_content_type(self.re, f.response):
return True
return False
class FContentTypeRequest(_Rex):
code = "tq"
help = "Request Content-Type header"
@only(http.HTTPFlow)
def __call__(self, f):
return _check_content_type(self.re, f.request)
class FContentTypeResponse(_Rex):
code = "ts"
help = "Response Content-Type header"
@only(http.HTTPFlow)
def __call__(self, f):
if f.response:
return _check_content_type(self.re, f.response)
return False
class FHead(_Rex):
code = "h"
help = "Header"
flags = re.MULTILINE
@only(http.HTTPFlow)
def __call__(self, f):
if f.request and self.re.search(bytes(f.request.headers)):
return True
if f.response and self.re.search(bytes(f.response.headers)):
return True
return False
class FHeadRequest(_Rex):
code = "hq"
help = "Request header"
flags = re.MULTILINE
@only(http.HTTPFlow)
def __call__(self, f):
if f.request and self.re.search(bytes(f.request.headers)):
return True
class FHeadResponse(_Rex):
code = "hs"
help = "Response header"
flags = re.MULTILINE
@only(http.HTTPFlow)
def __call__(self, f):
if f.response and self.re.search(bytes(f.response.headers)):
return True
class FBod(_Rex):
code = "b"
help = "Body"
flags = re.DOTALL
@only(http.HTTPFlow, tcp.TCPFlow, udp.UDPFlow, dns.DNSFlow)
def __call__(self, f):
if isinstance(f, http.HTTPFlow):
if (
f.request
and (content := f.request.get_content(strict=False)) is not None
):
if self.re.search(content):
return True
if (
f.response
and (content := f.response.get_content(strict=False)) is not None
):
if self.re.search(content):
return True
if f.websocket:
for wmsg in f.websocket.messages:
if wmsg.content is not None and self.re.search(wmsg.content):
return True
elif isinstance(f, (tcp.TCPFlow, udp.UDPFlow)):
for msg in f.messages:
if msg.content is not None and self.re.search(msg.content):
return True
elif isinstance(f, dns.DNSFlow):
if f.request and self.re.search(str(f.request).encode()):
return True
if f.response and self.re.search(str(f.response).encode()):
return True
return False
class FBodRequest(_Rex):
code = "bq"
help = "Request body"
flags = re.DOTALL
@only(http.HTTPFlow, tcp.TCPFlow, udp.UDPFlow, dns.DNSFlow)
def __call__(self, f):
if isinstance(f, http.HTTPFlow):
if (
f.request
and (content := f.request.get_content(strict=False)) is not None
):
if self.re.search(content):
return True
if f.websocket:
for wmsg in f.websocket.messages:
if wmsg.from_client and self.re.search(wmsg.content):
return True
elif isinstance(f, (tcp.TCPFlow, udp.UDPFlow)):
for msg in f.messages:
if msg.from_client and self.re.search(msg.content):
return True
elif isinstance(f, dns.DNSFlow):
if f.request and self.re.search(str(f.request).encode()):
return True
class FBodResponse(_Rex):
code = "bs"
help = "Response body"
flags = re.DOTALL
@only(http.HTTPFlow, tcp.TCPFlow, udp.UDPFlow, dns.DNSFlow)
def __call__(self, f):
if isinstance(f, http.HTTPFlow):
if (
f.response
and (content := f.response.get_content(strict=False)) is not None
):
if self.re.search(content):
return True
if f.websocket:
for wmsg in f.websocket.messages:
if not wmsg.from_client and self.re.search(wmsg.content):
return True
elif isinstance(f, (tcp.TCPFlow, udp.UDPFlow)):
for msg in f.messages:
if not msg.from_client and self.re.search(msg.content):
return True
elif isinstance(f, dns.DNSFlow):
if f.response and self.re.search(str(f.response).encode()):
return True
class FMethod(_Rex):
code = "m"
help = "Method"
@only(http.HTTPFlow)
def __call__(self, f):
return bool(self.re.search(f.request.data.method))
class FDomain(_Rex):
code = "d"
help = "Domain"
is_binary = False
@only(http.HTTPFlow)
def __call__(self, f):
return bool(
self.re.search(f.request.host) or self.re.search(f.request.pretty_host)
)
class FUrl(_Rex):
code = "u"
help = "URL"
is_binary = False
# FUrl is special, because it can be "naked".
@classmethod
def make(klass, s, loc, toks):
if len(toks) > 1:
toks = toks[1:]
return klass(*toks)
@only(http.HTTPFlow, dns.DNSFlow)
def __call__(self, f):
if not f or not f.request:
return False
if isinstance(f, http.HTTPFlow):
return self.re.search(f.request.pretty_url)
elif isinstance(f, dns.DNSFlow):
return f.request.questions and self.re.search(f.request.questions[0].name)
class FSrc(_Rex):
code = "src"
help = "Match source address"
is_binary = False
def __call__(self, f):
if not f.client_conn or not f.client_conn.peername:
return False
r = f"{f.client_conn.peername[0]}:{f.client_conn.peername[1]}"
return self.re.search(r)
class FDst(_Rex):
code = "dst"
help = "Match destination address"
is_binary = False
def __call__(self, f):
if not f.server_conn or not f.server_conn.address:
return False
r = f"{f.server_conn.address[0]}:{f.server_conn.address[1]}"
return self.re.search(r)
class FReplay(_Action):
code = "replay"
help = "Match replayed flows"
def __call__(self, f):
return f.is_replay is not None
class FReplayClient(_Action):
code = "replayq"
help = "Match replayed client request"
def __call__(self, f):
return f.is_replay == "request"
class FReplayServer(_Action):
code = "replays"
help = "Match replayed server response"
def __call__(self, f):
return f.is_replay == "response"
class FMeta(_Rex):
code = "meta"
help = "Flow metadata"
flags = re.MULTILINE
is_binary = False
def __call__(self, f):
m = "\n".join([f"{key}: {value}" for key, value in f.metadata.items()])
return self.re.search(m)
class FMarker(_Rex):
code = "marker"
help = "Match marked flows with specified marker"
is_binary = False
def __call__(self, f):
return self.re.search(f.marked)
class FComment(_Rex):
code = "comment"
help = "Flow comment"
flags = re.MULTILINE
is_binary = False
def __call__(self, f):
return self.re.search(f.comment)
class _Int(_Action):
def __init__(self, num):
self.num = int(num)
class FCode(_Int):
code = "c"
help = "HTTP response code"
@only(http.HTTPFlow)
def __call__(self, f):
if f.response and f.response.status_code == self.num:
return True
class FAnd(_Token):
def __init__(self, lst):
self.lst = lst
def dump(self, indent=0, fp=sys.stdout):
super().dump(indent, fp)
for i in self.lst:
i.dump(indent + 1, fp)
def __call__(self, f):
return all(i(f) for i in self.lst)
class FOr(_Token):
def __init__(self, lst):
self.lst = lst
def dump(self, indent=0, fp=sys.stdout):
super().dump(indent, fp)
for i in self.lst:
i.dump(indent + 1, fp)
def __call__(self, f):
return any(i(f) for i in self.lst)
class FNot(_Token):
def __init__(self, itm):
self.itm = itm[0]
def dump(self, indent=0, fp=sys.stdout):
super().dump(indent, fp)
self.itm.dump(indent + 1, fp)
def __call__(self, f):
return not self.itm(f)
filter_unary: Sequence[type[_Action]] = [
FAsset,
FErr,
FHTTP,
FMarked,
FReplay,
FReplayClient,
FReplayServer,
FReq,
FResp,
FTCP,
FUDP,
FDNS,
FWebSocket,
FAll,
]
filter_rex: Sequence[type[_Rex]] = [
FBod,
FBodRequest,
FBodResponse,
FContentType,
FContentTypeRequest,
FContentTypeResponse,
FDomain,
FDst,
FHead,
FHeadRequest,
FHeadResponse,
FMethod,
FSrc,
FUrl,
FMeta,
FMarker,
FComment,
]
filter_int = [FCode]
def _make():
# Order is important - multi-char expressions need to come before narrow
# ones.
parts = []
for cls in filter_unary:
f = pp.Literal(f"~{cls.code}") + pp.WordEnd()
f.setParseAction(cls.make)
parts.append(f)
# This is a bit of a hack to simulate Word(pyparsing_unicode.printables),
# which has a horrible performance with len(pyparsing.pyparsing_unicode.printables) == 1114060
unicode_words = pp.CharsNotIn("()~'\"" + pp.ParserElement.DEFAULT_WHITE_CHARS)
unicode_words.skipWhitespace = True
regex = (
unicode_words
| pp.QuotedString('"', escChar="\\")
| pp.QuotedString("'", escChar="\\")
)
for cls in filter_rex:
f = pp.Literal(f"~{cls.code}") + pp.WordEnd() + regex.copy()
f.setParseAction(cls.make)
parts.append(f)
for cls in filter_int:
f = pp.Literal(f"~{cls.code}") + pp.WordEnd() + pp.Word(pp.nums)
f.setParseAction(cls.make)
parts.append(f)
# A naked rex is a URL rex:
f = regex.copy()
f.setParseAction(FUrl.make)
parts.append(f)
atom = pp.MatchFirst(parts)
expr = pp.OneOrMore(
pp.infixNotation(
atom,
[
(pp.Literal("!").suppress(), 1, pp.opAssoc.RIGHT, lambda x: FNot(*x)),
(pp.Literal("&").suppress(), 2, pp.opAssoc.LEFT, lambda x: FAnd(*x)),
(pp.Literal("|").suppress(), 2, pp.opAssoc.LEFT, lambda x: FOr(*x)),
],
)
)
return expr.setParseAction(lambda x: FAnd(x) if len(x) != 1 else x)
bnf = _make()
class TFilter(Protocol):
pattern: str
# TODO: This should be `-> bool`, but some filters aren't behaving correctly (requiring `bool()` by the caller).
# Correct this when we properly type filters.
def __call__(self, f: flow.Flow) -> Any: ... # pragma: no cover
def parse(s: str) -> TFilter:
"""
Parse a filter expression and return the compiled filter function.
If the filter syntax is invalid, `ValueError` is raised.
"""
if not s:
raise ValueError("Empty filter expression")
try:
flt = bnf.parseString(s, parseAll=True)[0]
flt.pattern = s
return flt
except (pp.ParseException, ValueError) as e:
raise ValueError(f"Invalid filter expression: {s!r}") from e
def match(flt: str | TFilter, flow: flow.Flow) -> bool:
"""
Matches a flow against a compiled filter expression.
Returns True if matched, False if not.
If flt is a string, it will be compiled as a filter expression.
If the expression is invalid, ValueError is raised.
"""
if isinstance(flt, str):
flt = parse(flt)
if flt:
return flt(flow)
return True
match_all: TFilter = parse("~all")
"""A filter function that matches all flows"""
help = []
for a in filter_unary:
help.append((f"~{a.code}", a.help))
for b in filter_rex:
help.append((f"~{b.code} regex", b.help))
for c in filter_int:
help.append((f"~{c.code} int", c.help))
help.sort()
help.extend(
[
("!", "unary not"),
("&", "and"),
("|", "or"),
("(...)", "grouping"),
]
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/flow.py | mitmproxy/flow.py | from __future__ import annotations
import asyncio
import copy
import time
import uuid
from dataclasses import dataclass
from dataclasses import field
from typing import Any
from typing import ClassVar
from mitmproxy import connection
from mitmproxy import exceptions
from mitmproxy import version
from mitmproxy.coretypes import serializable
@dataclass
class Error(serializable.SerializableDataclass):
"""
An Error.
This is distinct from an protocol error response (say, a HTTP code 500),
which is represented by a normal `mitmproxy.http.Response` object. This class is
responsible for indicating errors that fall outside of normal protocol
communications, like interrupted connections, timeouts, or protocol errors.
"""
msg: str
"""Message describing the error."""
timestamp: float = field(default_factory=time.time)
"""Unix timestamp of when this error happened."""
KILLED_MESSAGE: ClassVar[str] = "Connection killed."
def __str__(self):
return self.msg
def __repr__(self):
return self.msg
class Flow(serializable.Serializable):
"""
Base class for network flows. A flow is a collection of objects,
for example HTTP request/response pairs or a list of TCP messages.
See also:
- mitmproxy.http.HTTPFlow
- mitmproxy.tcp.TCPFlow
- mitmproxy.udp.UDPFlow
"""
client_conn: connection.Client
"""The client that connected to mitmproxy."""
server_conn: connection.Server
"""
The server mitmproxy connected to.
Some flows may never cause mitmproxy to initiate a server connection,
for example because their response is replayed by mitmproxy itself.
To simplify implementation, those flows will still have a `server_conn` attribute
with a `timestamp_start` set to `None`.
"""
error: Error | None = None
"""A connection or protocol error affecting this flow."""
intercepted: bool
"""
If `True`, the flow is currently paused by mitmproxy.
We're waiting for a user action to forward the flow to its destination.
"""
marked: str = ""
"""
If this attribute is a non-empty string the flow has been marked by the user.
A string value will be used as the marker annotation. May either be a single character or a Unicode emoji name.
For example `:grapes:` becomes `🍇` in views that support emoji rendering.
Consult the [Github API Emoji List](https://api.github.com/emojis) for a list of emoji that may be used.
Not all emoji, especially [emoji modifiers](https://en.wikipedia.org/wiki/Miscellaneous_Symbols_and_Pictographs#Emoji_modifiers)
will render consistently.
The default marker for the view will be used if the Unicode emoji name can not be interpreted.
"""
is_replay: str | None
"""
This attribute indicates if this flow has been replayed in either direction.
- a value of `request` indicates that the request has been artifically replayed by mitmproxy to the server.
- a value of `response` indicates that the response to the client's request has been set by server replay.
"""
live: bool
"""
If `True`, the flow belongs to a currently active connection.
If `False`, the flow may have been already completed or loaded from disk.
"""
timestamp_created: float
"""
The Unix timestamp of when this flow was created.
In contrast to `timestamp_start`, this value will not change when a flow is replayed.
"""
def __init__(
self,
client_conn: connection.Client,
server_conn: connection.Server,
live: bool = False,
) -> None:
self.id = str(uuid.uuid4())
self.client_conn = client_conn
self.server_conn = server_conn
self.live = live
self.timestamp_created = time.time()
self.intercepted: bool = False
self._resume_event: asyncio.Event | None = None
self._backup: Flow | None = None
self.marked: str = ""
self.is_replay: str | None = None
self.metadata: dict[str, Any] = dict()
self.comment: str = ""
__types: dict[str, type[Flow]] = {}
type: ClassVar[
str
] # automatically derived from the class name in __init_subclass__
"""The flow type, for example `http`, `tcp`, or `dns`."""
def __init_subclass__(cls, **kwargs):
cls.type = cls.__name__.removesuffix("Flow").lower()
Flow.__types[cls.type] = cls
def get_state(self) -> serializable.State:
state = {
"version": version.FLOW_FORMAT_VERSION,
"type": self.type,
"id": self.id,
"error": self.error.get_state() if self.error else None,
"client_conn": self.client_conn.get_state(),
"server_conn": self.server_conn.get_state(),
"intercepted": self.intercepted,
"is_replay": self.is_replay,
"marked": self.marked,
"metadata": copy.deepcopy(self.metadata),
"comment": self.comment,
"timestamp_created": self.timestamp_created,
}
state["backup"] = copy.deepcopy(self._backup) if self._backup != state else None
return state
def set_state(self, state: serializable.State) -> None:
assert state.pop("version") == version.FLOW_FORMAT_VERSION
assert state.pop("type") == self.type
self.id = state.pop("id")
if state["error"]:
if self.error:
self.error.set_state(state.pop("error"))
else:
self.error = Error.from_state(state.pop("error"))
else:
self.error = state.pop("error")
self.client_conn.set_state(state.pop("client_conn"))
self.server_conn.set_state(state.pop("server_conn"))
self.intercepted = state.pop("intercepted")
self.is_replay = state.pop("is_replay")
self.marked = state.pop("marked")
self.metadata = state.pop("metadata")
self.comment = state.pop("comment")
self.timestamp_created = state.pop("timestamp_created")
self._backup = state.pop("backup", None)
assert state == {}
@classmethod
def from_state(cls, state: serializable.State) -> Flow:
try:
flow_cls = Flow.__types[state["type"]]
except KeyError:
raise ValueError(f"Unknown flow type: {state['type']}")
client = connection.Client(peername=("", 0), sockname=("", 0))
server = connection.Server(address=None)
f = flow_cls(client, server)
f.set_state(state)
return f
def copy(self):
"""Make a copy of this flow."""
f = super().copy()
f.live = False
return f
def modified(self):
"""
`True` if this file has been modified by a user, `False` otherwise.
"""
if self._backup:
return self._backup != self.get_state()
else:
return False
def backup(self, force=False):
"""
Save a backup of this flow, which can be restored by calling `Flow.revert()`.
"""
if not self._backup:
self._backup = self.get_state()
def revert(self):
"""
Revert to the last backed up state.
"""
if self._backup:
self.set_state(self._backup)
self._backup = None
@property
def killable(self):
"""*Read-only:* `True` if this flow can be killed, `False` otherwise."""
return self.live and not (self.error and self.error.msg == Error.KILLED_MESSAGE)
def kill(self):
"""
Kill this flow. The current request/response will not be forwarded to its destination.
"""
if not self.killable:
raise exceptions.ControlException("Flow is not killable.")
# TODO: The way we currently signal killing is not ideal. One major problem is that we cannot kill
# flows in transit (https://github.com/mitmproxy/mitmproxy/issues/4711), even though they are advertised
# as killable. An alternative approach would be to introduce a `KillInjected` event similar to
# `MessageInjected`, which should fix this issue.
self.error = Error(Error.KILLED_MESSAGE)
self.intercepted = False
self.live = False
def intercept(self):
"""
Intercept this Flow. Processing will stop until resume is
called.
"""
if self.intercepted:
return
self.intercepted = True
if self._resume_event is not None:
self._resume_event.clear()
async def wait_for_resume(self):
"""
Wait until this Flow is resumed.
"""
if not self.intercepted:
return
if self._resume_event is None:
self._resume_event = asyncio.Event()
await self._resume_event.wait()
def resume(self):
"""
Continue with the flow – called after an intercept().
"""
if not self.intercepted:
return
self.intercepted = False
if self._resume_event is not None:
self._resume_event.set()
@property
def timestamp_start(self) -> float:
"""
*Read-only:* Start time of the flow.
Depending on the flow type, this property is an alias for
`mitmproxy.connection.Client.timestamp_start` or `mitmproxy.http.Request.timestamp_start`.
"""
return self.client_conn.timestamp_start
__all__ = [
"Flow",
"Error",
]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/udp.py | mitmproxy/udp.py | import time
from mitmproxy import connection
from mitmproxy import flow
from mitmproxy.coretypes import serializable
class UDPMessage(serializable.Serializable):
"""
An individual UDP datagram.
"""
def __init__(self, from_client, content, timestamp=None):
self.from_client = from_client
self.content = content
self.timestamp = timestamp or time.time()
@classmethod
def from_state(cls, state):
return cls(*state)
def get_state(self):
return self.from_client, self.content, self.timestamp
def set_state(self, state):
self.from_client, self.content, self.timestamp = state
def __repr__(self):
return "{direction} {content}".format(
direction="->" if self.from_client else "<-", content=repr(self.content)
)
class UDPFlow(flow.Flow):
"""
A UDPFlow is a representation of a UDP session.
"""
messages: list[UDPMessage]
"""
The messages transmitted over this connection.
The latest message can be accessed as `flow.messages[-1]` in event hooks.
"""
def __init__(
self,
client_conn: connection.Client,
server_conn: connection.Server,
live: bool = False,
):
super().__init__(client_conn, server_conn, live)
self.messages = []
def get_state(self) -> serializable.State:
return {
**super().get_state(),
"messages": [m.get_state() for m in self.messages],
}
def set_state(self, state: serializable.State) -> None:
self.messages = [UDPMessage.from_state(m) for m in state.pop("messages")]
super().set_state(state)
def __repr__(self):
return f"<UDPFlow ({len(self.messages)} messages)>"
__all__ = [
"UDPFlow",
"UDPMessage",
]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/eventsequence.py | mitmproxy/eventsequence.py | from collections.abc import Callable
from collections.abc import Iterator
from typing import Any
from mitmproxy import dns
from mitmproxy import flow
from mitmproxy import hooks
from mitmproxy import http
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy.proxy import layers
TEventGenerator = Iterator[hooks.Hook]
def _iterate_http(f: http.HTTPFlow) -> TEventGenerator:
if f.request:
yield layers.http.HttpRequestHeadersHook(f)
yield layers.http.HttpRequestHook(f)
if f.response:
yield layers.http.HttpResponseHeadersHook(f)
yield layers.http.HttpResponseHook(f)
if f.websocket:
message_queue = f.websocket.messages
f.websocket.messages = []
yield layers.websocket.WebsocketStartHook(f)
for m in message_queue:
f.websocket.messages.append(m)
yield layers.websocket.WebsocketMessageHook(f)
yield layers.websocket.WebsocketEndHook(f)
elif f.error:
yield layers.http.HttpErrorHook(f)
def _iterate_tcp(f: tcp.TCPFlow) -> TEventGenerator:
messages = f.messages
f.messages = []
yield layers.tcp.TcpStartHook(f)
while messages:
f.messages.append(messages.pop(0))
yield layers.tcp.TcpMessageHook(f)
if f.error:
yield layers.tcp.TcpErrorHook(f)
else:
yield layers.tcp.TcpEndHook(f)
def _iterate_udp(f: udp.UDPFlow) -> TEventGenerator:
messages = f.messages
f.messages = []
yield layers.udp.UdpStartHook(f)
while messages:
f.messages.append(messages.pop(0))
yield layers.udp.UdpMessageHook(f)
if f.error:
yield layers.udp.UdpErrorHook(f)
else:
yield layers.udp.UdpEndHook(f)
def _iterate_dns(f: dns.DNSFlow) -> TEventGenerator:
if f.request:
yield layers.dns.DnsRequestHook(f)
if f.response:
yield layers.dns.DnsResponseHook(f)
if f.error:
yield layers.dns.DnsErrorHook(f)
_iterate_map: dict[type[flow.Flow], Callable[[Any], TEventGenerator]] = {
http.HTTPFlow: _iterate_http,
tcp.TCPFlow: _iterate_tcp,
udp.UDPFlow: _iterate_udp,
dns.DNSFlow: _iterate_dns,
}
def iterate(f: flow.Flow) -> TEventGenerator:
try:
e = _iterate_map[type(f)]
except KeyError as err:
raise TypeError(f"Unknown flow type: {f.__class__.__name__}") from err
else:
yield from e(f)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/connection.py | mitmproxy/connection.py | import dataclasses
import time
import uuid
import warnings
from abc import ABCMeta
from collections.abc import Sequence
from dataclasses import dataclass
from dataclasses import field
from enum import Flag
from typing import Literal
from mitmproxy import certs
from mitmproxy.coretypes import serializable
from mitmproxy.net import server_spec
from mitmproxy.proxy import mode_specs
from mitmproxy.utils import human
class ConnectionState(Flag):
"""The current state of the underlying socket."""
CLOSED = 0
CAN_READ = 1
CAN_WRITE = 2
OPEN = CAN_READ | CAN_WRITE
TransportProtocol = Literal["tcp", "udp"]
# https://docs.openssl.org/master/man3/SSL_get_version/#return-values
TlsVersion = Literal[
"SSLv3",
"TLSv1",
"TLSv1.1",
"TLSv1.2",
"TLSv1.3",
"DTLSv0.9",
"DTLSv1",
"DTLSv1.2",
"QUICv1",
]
# practically speaking we may have IPv6 addresses with flowinfo and scope_id,
# but type checking isn't good enough to properly handle tuple unions.
# this version at least provides useful type checking messages.
Address = tuple[str, int]
@dataclass(kw_only=True)
class Connection(serializable.SerializableDataclass, metaclass=ABCMeta):
"""
Base class for client and server connections.
The connection object only exposes metadata about the connection, but not the underlying socket object.
This is intentional, all I/O should be handled by `mitmproxy.proxy.server` exclusively.
"""
peername: Address | None
"""The remote's `(ip, port)` tuple for this connection."""
sockname: Address | None
"""Our local `(ip, port)` tuple for this connection."""
state: ConnectionState = field(
default=ConnectionState.CLOSED, metadata={"serialize": False}
)
"""The current connection state."""
# all connections have a unique id. While
# f.client_conn == f2.client_conn already holds true for live flows (where we have object identity),
# we also want these semantics for recorded flows.
id: str = field(default_factory=lambda: str(uuid.uuid4()))
"""A unique UUID to identify the connection."""
transport_protocol: TransportProtocol = field(default="tcp")
"""The connection protocol in use."""
error: str | None = None
"""
A string describing a general error with connections to this address.
The purpose of this property is to signal that new connections to the particular endpoint should not be attempted,
for example because it uses an untrusted TLS certificate. Regular (unexpected) disconnects do not set the error
property. This property is only reused per client connection.
"""
tls: bool = False
"""
`True` if TLS should be established, `False` otherwise.
Note that this property only describes if a connection should eventually be protected using TLS.
To check if TLS has already been established, use `Connection.tls_established`.
"""
certificate_list: Sequence[certs.Cert] = ()
"""
The TLS certificate list as sent by the peer.
The first certificate is the end-entity certificate.
> [RFC 8446] Prior to TLS 1.3, "certificate_list" ordering required each
> certificate to certify the one immediately preceding it; however,
> some implementations allowed some flexibility. Servers sometimes
> send both a current and deprecated intermediate for transitional
> purposes, and others are simply configured incorrectly, but these
> cases can nonetheless be validated properly. For maximum
> compatibility, all implementations SHOULD be prepared to handle
> potentially extraneous certificates and arbitrary orderings from any
> TLS version, with the exception of the end-entity certificate which
> MUST be first.
"""
alpn: bytes | None = None
"""The application-layer protocol as negotiated using
[ALPN](https://en.wikipedia.org/wiki/Application-Layer_Protocol_Negotiation)."""
alpn_offers: Sequence[bytes] = ()
"""The ALPN offers as sent in the ClientHello."""
# we may want to add SSL_CIPHER_description here, but that's currently not exposed by cryptography
cipher: str | None = None
"""The active cipher name as returned by OpenSSL's `SSL_CIPHER_get_name`."""
cipher_list: Sequence[str] = ()
"""Ciphers accepted by the proxy server on this connection."""
tls_version: TlsVersion | None = None
"""The active TLS version."""
sni: str | None = None
"""
The [Server Name Indication (SNI)](https://en.wikipedia.org/wiki/Server_Name_Indication) sent in the ClientHello.
"""
timestamp_start: float | None = None
timestamp_end: float | None = None
"""*Timestamp:* Connection has been closed."""
timestamp_tls_setup: float | None = None
"""*Timestamp:* TLS handshake has been completed successfully."""
@property
def connected(self) -> bool:
"""*Read-only:* `True` if Connection.state is ConnectionState.OPEN, `False` otherwise."""
return self.state is ConnectionState.OPEN
@property
def tls_established(self) -> bool:
"""*Read-only:* `True` if TLS has been established, `False` otherwise."""
return self.timestamp_tls_setup is not None
def __eq__(self, other):
if isinstance(other, Connection):
return self.id == other.id
return False
def __hash__(self):
return hash(self.id)
def __repr__(self):
attrs = {
# ensure these come first.
"id": None,
"address": None,
}
for f in dataclasses.fields(self):
val = getattr(self, f.name)
if val != f.default:
if f.name == "cipher_list":
val = f"<{len(val)} ciphers>"
elif f.name == "id":
val = f"…{val[-6:]}"
attrs[f.name] = val
return f"{type(self).__name__}({attrs!r})"
@property
def alpn_proto_negotiated(self) -> bytes | None: # pragma: no cover
"""*Deprecated:* An outdated alias for Connection.alpn."""
warnings.warn(
"Connection.alpn_proto_negotiated is deprecated, use Connection.alpn instead.",
DeprecationWarning,
stacklevel=2,
)
return self.alpn
@dataclass(eq=False, repr=False, kw_only=True)
class Client(Connection): # type: ignore[override]
"""A connection between a client and mitmproxy."""
peername: Address
"""The client's address."""
sockname: Address
"""The local address we received this connection on."""
mitmcert: certs.Cert | None = None
"""
The certificate used by mitmproxy to establish TLS with the client.
"""
proxy_mode: mode_specs.ProxyMode = field(
default=mode_specs.ProxyMode.parse("regular")
)
"""The proxy server type this client has been connecting to."""
timestamp_start: float = field(default_factory=time.time)
"""*Timestamp:* TCP SYN received"""
def __str__(self):
if self.alpn:
tls_state = f", alpn={self.alpn.decode(errors='replace')}"
elif self.tls_established:
tls_state = ", tls"
else:
tls_state = ""
state = self.state.name
assert state
return f"Client({human.format_address(self.peername)}, state={state.lower()}{tls_state})"
@property
def address(self): # pragma: no cover
"""*Deprecated:* An outdated alias for Client.peername."""
warnings.warn(
"Client.address is deprecated, use Client.peername instead.",
DeprecationWarning,
stacklevel=2,
)
return self.peername
@address.setter
def address(self, x): # pragma: no cover
warnings.warn(
"Client.address is deprecated, use Client.peername instead.",
DeprecationWarning,
stacklevel=2,
)
self.peername = x
@property
def cipher_name(self) -> str | None: # pragma: no cover
"""*Deprecated:* An outdated alias for Connection.cipher."""
warnings.warn(
"Client.cipher_name is deprecated, use Client.cipher instead.",
DeprecationWarning,
stacklevel=2,
)
return self.cipher
@property
def clientcert(self) -> certs.Cert | None: # pragma: no cover
"""*Deprecated:* An outdated alias for Connection.certificate_list[0]."""
warnings.warn(
"Client.clientcert is deprecated, use Client.certificate_list instead.",
DeprecationWarning,
stacklevel=2,
)
if self.certificate_list:
return self.certificate_list[0]
else:
return None
@clientcert.setter
def clientcert(self, val): # pragma: no cover
warnings.warn(
"Client.clientcert is deprecated, use Client.certificate_list instead.",
DeprecationWarning,
stacklevel=2,
)
if val:
self.certificate_list = [val]
else:
self.certificate_list = []
@dataclass(eq=False, repr=False, kw_only=True)
class Server(Connection):
"""A connection between mitmproxy and an upstream server."""
address: Address | None # type: ignore
"""
The server's `(host, port)` address tuple.
The host can either be a domain or a plain IP address.
Which of those two will be present depends on the proxy mode and the client.
For explicit proxies, this value will reflect what the client instructs mitmproxy to connect to.
For example, if the client starts off a connection with `CONNECT example.com HTTP/1.1`, it will be `example.com`.
For transparent proxies such as WireGuard mode, this value will be an IP address.
"""
peername: Address | None = None
"""
The server's resolved `(ip, port)` tuple. Will be set during connection establishment.
May be `None` in upstream proxy mode when the address is resolved by the upstream proxy only.
"""
sockname: Address | None = None
timestamp_start: float | None = None
"""
*Timestamp:* Connection establishment started.
For IP addresses, this corresponds to sending a TCP SYN; for domains, this corresponds to starting a DNS lookup.
"""
timestamp_tcp_setup: float | None = None
"""*Timestamp:* TCP ACK received."""
via: server_spec.ServerSpec | None = None
"""An optional proxy server specification via which the connection should be established."""
def __str__(self):
if self.alpn:
tls_state = f", alpn={self.alpn.decode(errors='replace')}"
elif self.tls_established:
tls_state = ", tls"
else:
tls_state = ""
if self.sockname:
local_port = f", src_port={self.sockname[1]}"
else:
local_port = ""
state = self.state.name
assert state
return f"Server({human.format_address(self.address)}, state={state.lower()}{tls_state}{local_port})"
def __setattr__(self, name, value):
if name in ("address", "via"):
connection_open = (
self.__dict__.get("state", ConnectionState.CLOSED)
is ConnectionState.OPEN
)
# assigning the current value is okay, that may be an artifact of calling .set_state().
attr_changed = self.__dict__.get(name) != value
if connection_open and attr_changed:
raise RuntimeError(f"Cannot change server.{name} on open connection.")
return super().__setattr__(name, value)
@property
def ip_address(self) -> Address | None: # pragma: no cover
"""*Deprecated:* An outdated alias for `Server.peername`."""
warnings.warn(
"Server.ip_address is deprecated, use Server.peername instead.",
DeprecationWarning,
stacklevel=2,
)
return self.peername
@property
def cert(self) -> certs.Cert | None: # pragma: no cover
"""*Deprecated:* An outdated alias for `Connection.certificate_list[0]`."""
warnings.warn(
"Server.cert is deprecated, use Server.certificate_list instead.",
DeprecationWarning,
stacklevel=2,
)
if self.certificate_list:
return self.certificate_list[0]
else:
return None
@cert.setter
def cert(self, val): # pragma: no cover
warnings.warn(
"Server.cert is deprecated, use Server.certificate_list instead.",
DeprecationWarning,
stacklevel=2,
)
if val:
self.certificate_list = [val]
else:
self.certificate_list = []
__all__ = ["Connection", "Client", "Server", "ConnectionState"]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/__init__.py | mitmproxy/__init__.py | python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false | |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/types.py | mitmproxy/types.py | import codecs
import glob
import os
import re
from collections.abc import Sequence
from typing import Any
from typing import TYPE_CHECKING
from typing import Union
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy.utils import emoji
from mitmproxy.utils import strutils
if TYPE_CHECKING: # pragma: no cover
from mitmproxy.command import CommandManager
class Path(str):
pass
class Cmd(str):
pass
class CmdArgs(str):
pass
class Unknown(str):
pass
class Space(str):
pass
class CutSpec(Sequence[str]):
pass
class Data(Sequence[Sequence[Union[str, bytes]]]):
pass
class Marker(str):
pass
class Choice:
def __init__(self, options_command):
self.options_command = options_command
def __instancecheck__(self, instance): # pragma: no cover
# return false here so that arguments are piped through parsearg,
# which does extended validation.
return False
class _BaseType:
typ: type = object
display: str = ""
def completion(self, manager: "CommandManager", t: Any, s: str) -> Sequence[str]:
"""
Returns a list of completion strings for a given prefix. The strings
returned don't necessarily need to be suffixes of the prefix, since
completers will do prefix filtering themselves..
"""
raise NotImplementedError
def parse(self, manager: "CommandManager", typ: Any, s: str) -> Any:
"""
Parse a string, given the specific type instance (to allow rich type annotations like Choice) and a string.
Raises ValueError if the value is invalid.
"""
raise NotImplementedError
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
"""
Check if data is valid for this type.
"""
raise NotImplementedError
class _BoolType(_BaseType):
typ = bool
display = "bool"
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return ["false", "true"]
def parse(self, manager: "CommandManager", t: type, s: str) -> bool:
if s == "true":
return True
elif s == "false":
return False
else:
raise ValueError("Booleans are 'true' or 'false', got %s" % s)
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
return val in [True, False]
class _StrType(_BaseType):
typ = str
display = "str"
# https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
escape_sequences = re.compile(
r"""
\\ (
[\\'"abfnrtv] # Standard C escape sequence
| [0-7]{1,3} # Character with octal value
| x.. # Character with hex value
| N{[^}]+} # Character name in the Unicode database
| u.... # Character with 16-bit hex value
| U........ # Character with 32-bit hex value
)
""",
re.VERBOSE,
)
@staticmethod
def _unescape(match: re.Match) -> str:
return codecs.decode(match.group(0), "unicode-escape") # type: ignore
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return []
def parse(self, manager: "CommandManager", t: type, s: str) -> str:
return self.escape_sequences.sub(self._unescape, s)
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
return isinstance(val, str)
class _BytesType(_BaseType):
typ = bytes
display = "bytes"
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return []
def parse(self, manager: "CommandManager", t: type, s: str) -> bytes:
return strutils.escaped_str_to_bytes(s)
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
return isinstance(val, bytes)
class _UnknownType(_BaseType):
typ = Unknown
display = "unknown"
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return []
def parse(self, manager: "CommandManager", t: type, s: str) -> str:
return s
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
return False
class _IntType(_BaseType):
typ = int
display = "int"
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return []
def parse(self, manager: "CommandManager", t: type, s: str) -> int:
return int(s)
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
return isinstance(val, int)
class _PathType(_BaseType):
typ = Path
display = "path"
def completion(
self, manager: "CommandManager", t: type, start: str
) -> Sequence[str]:
if not start:
start = "./"
path = os.path.expanduser(start)
ret = []
if os.path.isdir(path):
files = glob.glob(os.path.join(path, "*"))
prefix = start
else:
files = glob.glob(path + "*")
prefix = os.path.dirname(start)
prefix = prefix or "./"
for f in files:
display = os.path.join(prefix, os.path.normpath(os.path.basename(f)))
if os.path.isdir(f):
display += "/"
ret.append(display)
if not ret:
ret = [start]
ret.sort()
return ret
def parse(self, manager: "CommandManager", t: type, s: str) -> str:
return os.path.expanduser(s)
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
return isinstance(val, str)
class _CmdType(_BaseType):
typ = Cmd
display = "cmd"
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return list(manager.commands.keys())
def parse(self, manager: "CommandManager", t: type, s: str) -> str:
if s not in manager.commands:
raise ValueError("Unknown command: %s" % s)
return s
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
return val in manager.commands
class _ArgType(_BaseType):
typ = CmdArgs
display = "arg"
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return []
def parse(self, manager: "CommandManager", t: type, s: str) -> str:
return s
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
return isinstance(val, str)
class _StrSeqType(_BaseType):
typ = Sequence[str]
display = "str[]"
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return []
def parse(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return [x.strip() for x in s.split(",")]
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
if isinstance(val, str) or isinstance(val, bytes):
return False
try:
for v in val:
if not isinstance(v, str):
return False
except TypeError:
return False
return True
class _CutSpecType(_BaseType):
typ = CutSpec
display = "cut[]"
valid_prefixes = [
"request.method",
"request.scheme",
"request.host",
"request.http_version",
"request.port",
"request.path",
"request.url",
"request.text",
"request.content",
"request.raw_content",
"request.timestamp_start",
"request.timestamp_end",
"request.header[",
"response.status_code",
"response.reason",
"response.text",
"response.content",
"response.timestamp_start",
"response.timestamp_end",
"response.raw_content",
"response.header[",
"client_conn.peername.port",
"client_conn.peername.host",
"client_conn.tls_version",
"client_conn.sni",
"client_conn.tls_established",
"server_conn.address.port",
"server_conn.address.host",
"server_conn.ip_address.host",
"server_conn.tls_version",
"server_conn.sni",
"server_conn.tls_established",
]
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
spec = s.split(",")
opts = []
for pref in self.valid_prefixes:
spec[-1] = pref
opts.append(",".join(spec))
return opts
def parse(self, manager: "CommandManager", t: type, s: str) -> CutSpec:
parts: Any = s.split(",")
return parts
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
if not isinstance(val, str):
return False
parts = [x.strip() for x in val.split(",")]
for p in parts:
for pref in self.valid_prefixes:
if p.startswith(pref):
break
else:
return False
return True
class _BaseFlowType(_BaseType):
viewmarkers = [
"@all",
"@focus",
"@shown",
"@hidden",
"@marked",
"@unmarked",
]
valid_prefixes = viewmarkers + [
"~q",
"~s",
"~a",
"~hq",
"~hs",
"~b",
"~bq",
"~bs",
"~t",
"~d",
"~m",
"~u",
"~c",
]
def completion(self, manager: "CommandManager", t: type, s: str) -> Sequence[str]:
return self.valid_prefixes
class _FlowType(_BaseFlowType):
typ = flow.Flow
display = "flow"
def parse(self, manager: "CommandManager", t: type, s: str) -> flow.Flow:
try:
flows = manager.call_strings("view.flows.resolve", [s])
except exceptions.CommandError as e:
raise ValueError(str(e)) from e
if len(flows) != 1:
raise ValueError(
"Command requires one flow, specification matched %s." % len(flows)
)
return flows[0]
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
return isinstance(val, flow.Flow)
class _FlowsType(_BaseFlowType):
typ = Sequence[flow.Flow]
display = "flow[]"
def parse(self, manager: "CommandManager", t: type, s: str) -> Sequence[flow.Flow]:
try:
return manager.call_strings("view.flows.resolve", [s])
except exceptions.CommandError as e:
raise ValueError(str(e)) from e
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
try:
for v in val:
if not isinstance(v, flow.Flow):
return False
except TypeError:
return False
return True
class _DataType(_BaseType):
typ = Data
display = "data[][]"
def completion(
self, manager: "CommandManager", t: type, s: str
) -> Sequence[str]: # pragma: no cover
raise ValueError("data cannot be passed as argument")
def parse(
self, manager: "CommandManager", t: type, s: str
) -> Any: # pragma: no cover
raise ValueError("data cannot be passed as argument")
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
# FIXME: validate that all rows have equal length, and all columns have equal types
try:
for row in val:
for cell in row:
if not (isinstance(cell, str) or isinstance(cell, bytes)):
return False
except TypeError:
return False
return True
class _ChoiceType(_BaseType):
typ = Choice
display = "choice"
def completion(self, manager: "CommandManager", t: Choice, s: str) -> Sequence[str]:
return manager.execute(t.options_command)
def parse(self, manager: "CommandManager", t: Choice, s: str) -> str:
opts = manager.execute(t.options_command)
if s not in opts:
raise ValueError("Invalid choice.")
return s
def is_valid(self, manager: "CommandManager", typ: Any, val: Any) -> bool:
try:
opts = manager.execute(typ.options_command)
except exceptions.CommandError:
return False
return val in opts
ALL_MARKERS = ["true", "false"] + list(emoji.emoji)
class _MarkerType(_BaseType):
typ = Marker
display = "marker"
def completion(self, manager: "CommandManager", t: Choice, s: str) -> Sequence[str]:
return ALL_MARKERS
def parse(self, manager: "CommandManager", t: Choice, s: str) -> str:
if s not in ALL_MARKERS:
raise ValueError("Invalid choice.")
if s == "true":
return ":default:"
elif s == "false":
return ""
return s
def is_valid(self, manager: "CommandManager", typ: Any, val: str) -> bool:
return val in ALL_MARKERS
class TypeManager:
def __init__(self, *types):
self.typemap = {}
for t in types:
self.typemap[t.typ] = t()
def get(self, t: type | None, default=None) -> _BaseType | None:
if type(t) in self.typemap:
return self.typemap[type(t)]
return self.typemap.get(t, default)
CommandTypes = TypeManager(
_ArgType,
_BoolType,
_ChoiceType,
_CmdType,
_CutSpecType,
_DataType,
_FlowType,
_FlowsType,
_IntType,
_MarkerType,
_PathType,
_StrType,
_StrSeqType,
_BytesType,
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/certs.py | mitmproxy/certs.py | import contextlib
import datetime
import ipaddress
import logging
import os
import sys
import warnings
from collections.abc import Iterable
from dataclasses import dataclass
from pathlib import Path
from typing import cast
from typing import NewType
from typing import Optional
from typing import Union
import OpenSSL
from cryptography import x509
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import dsa
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.asymmetric.types import CertificatePublicKeyTypes
from cryptography.hazmat.primitives.serialization import pkcs12
from cryptography.x509 import ExtendedKeyUsageOID
from cryptography.x509 import NameOID
from mitmproxy.coretypes import serializable
if sys.version_info < (3, 13): # pragma: no cover
from typing_extensions import deprecated
else:
from warnings import deprecated
logger = logging.getLogger(__name__)
# Default expiry must not be too long: https://github.com/mitmproxy/mitmproxy/issues/815
CA_EXPIRY = datetime.timedelta(days=10 * 365)
CERT_EXPIRY = datetime.timedelta(days=365)
CRL_EXPIRY = datetime.timedelta(days=7)
# Generated with "openssl dhparam". It's too slow to generate this on startup.
DEFAULT_DHPARAM = b"""
-----BEGIN DH PARAMETERS-----
MIICCAKCAgEAyT6LzpwVFS3gryIo29J5icvgxCnCebcdSe/NHMkD8dKJf8suFCg3
O2+dguLakSVif/t6dhImxInJk230HmfC8q93hdcg/j8rLGJYDKu3ik6H//BAHKIv
j5O9yjU3rXCfmVJQic2Nne39sg3CreAepEts2TvYHhVv3TEAzEqCtOuTjgDv0ntJ
Gwpj+BJBRQGG9NvprX1YGJ7WOFBP/hWU7d6tgvE6Xa7T/u9QIKpYHMIkcN/l3ZFB
chZEqVlyrcngtSXCROTPcDOQ6Q8QzhaBJS+Z6rcsd7X+haiQqvoFcmaJ08Ks6LQC
ZIL2EtYJw8V8z7C0igVEBIADZBI6OTbuuhDwRw//zU1uq52Oc48CIZlGxTYG/Evq
o9EWAXUYVzWkDSTeBH1r4z/qLPE2cnhtMxbFxuvK53jGB0emy2y1Ei6IhKshJ5qX
IB/aE7SSHyQ3MDHHkCmQJCsOd4Mo26YX61NZ+n501XjqpCBQ2+DfZCBh8Va2wDyv
A2Ryg9SUz8j0AXViRNMJgJrr446yro/FuJZwnQcO3WQnXeqSBnURqKjmqkeFP+d8
6mk2tqJaY507lRNqtGlLnj7f5RNoBFJDCLBNurVgfvq9TCVWKDIFD4vZRjCrnl6I
rD693XKIHUCWOjMh1if6omGXKHH40QuME2gNa50+YPn1iYDl88uDbbMCAQI=
-----END DH PARAMETERS-----
"""
class Cert(serializable.Serializable):
"""Representation of a (TLS) certificate."""
_cert: x509.Certificate
def __init__(self, cert: x509.Certificate):
assert isinstance(cert, x509.Certificate)
self._cert = cert
def __eq__(self, other):
return self.fingerprint() == other.fingerprint()
def __repr__(self):
altnames = [str(x.value) for x in self.altnames]
return f"<Cert(cn={self.cn!r}, altnames={altnames!r})>"
def __hash__(self):
return self._cert.__hash__()
@classmethod
def from_state(cls, state):
return cls.from_pem(state)
def get_state(self):
return self.to_pem()
def set_state(self, state):
self._cert = x509.load_pem_x509_certificate(state)
@classmethod
def from_pem(cls, data: bytes) -> "Cert":
cert = x509.load_pem_x509_certificate(data) # type: ignore
return cls(cert)
def to_pem(self) -> bytes:
return self._cert.public_bytes(serialization.Encoding.PEM)
@classmethod
def from_pyopenssl(self, x509: OpenSSL.crypto.X509) -> "Cert":
return Cert(x509.to_cryptography())
@deprecated("Use `to_cryptography` instead.")
def to_pyopenssl(self) -> OpenSSL.crypto.X509: # pragma: no cover
return OpenSSL.crypto.X509.from_cryptography(self._cert)
def to_cryptography(self) -> x509.Certificate:
return self._cert
def public_key(self) -> CertificatePublicKeyTypes:
return self._cert.public_key()
def fingerprint(self) -> bytes:
return self._cert.fingerprint(hashes.SHA256())
@property
def issuer(self) -> list[tuple[str, str]]:
return _name_to_keyval(self._cert.issuer)
@property
def notbefore(self) -> datetime.datetime:
try:
# type definitions haven't caught up with new API yet.
return self._cert.not_valid_before_utc # type: ignore
except AttributeError: # pragma: no cover
# cryptography < 42.0
return self._cert.not_valid_before.replace(tzinfo=datetime.UTC)
@property
def notafter(self) -> datetime.datetime:
try:
return self._cert.not_valid_after_utc # type: ignore
except AttributeError: # pragma: no cover
return self._cert.not_valid_after.replace(tzinfo=datetime.UTC)
def has_expired(self) -> bool:
if sys.version_info < (3, 11): # pragma: no cover
return datetime.datetime.now(datetime.UTC) > self.notafter
return datetime.datetime.now(datetime.UTC) > self.notafter
@property
def subject(self) -> list[tuple[str, str]]:
return _name_to_keyval(self._cert.subject)
@property
def serial(self) -> int:
return self._cert.serial_number
@property
def is_ca(self) -> bool:
constraints: x509.BasicConstraints
try:
constraints = self._cert.extensions.get_extension_for_class(
x509.BasicConstraints
).value
return constraints.ca
except x509.ExtensionNotFound:
return False
@property
def keyinfo(self) -> tuple[str, int]:
public_key = self._cert.public_key()
if isinstance(public_key, rsa.RSAPublicKey):
return "RSA", public_key.key_size
if isinstance(public_key, dsa.DSAPublicKey):
return "DSA", public_key.key_size
if isinstance(public_key, ec.EllipticCurvePublicKey):
return f"EC ({public_key.curve.name})", public_key.key_size
return (
public_key.__class__.__name__.replace("PublicKey", "").replace("_", ""),
getattr(public_key, "key_size", -1),
) # pragma: no cover
@property
def cn(self) -> str | None:
attrs = self._cert.subject.get_attributes_for_oid(x509.NameOID.COMMON_NAME)
if attrs:
return cast(str, attrs[0].value)
return None
@property
def organization(self) -> str | None:
attrs = self._cert.subject.get_attributes_for_oid(
x509.NameOID.ORGANIZATION_NAME
)
if attrs:
return cast(str, attrs[0].value)
return None
@property
def altnames(self) -> x509.GeneralNames:
"""
Get all SubjectAlternativeName DNS altnames.
"""
try:
sans = self._cert.extensions.get_extension_for_class(
x509.SubjectAlternativeName
).value
except x509.ExtensionNotFound:
return x509.GeneralNames([])
else:
return x509.GeneralNames(sans)
@property
def crl_distribution_points(self) -> list[str]:
try:
ext = self._cert.extensions.get_extension_for_class(
x509.CRLDistributionPoints
).value
except x509.ExtensionNotFound:
return []
else:
return [
dist_point.full_name[0].value
for dist_point in ext
if dist_point.full_name
and isinstance(dist_point.full_name[0], x509.UniformResourceIdentifier)
]
def _name_to_keyval(name: x509.Name) -> list[tuple[str, str]]:
parts = []
for attr in name:
k = attr.rfc4514_string().partition("=")[0]
v = cast(str, attr.value)
parts.append((k, v))
return parts
def create_ca(
organization: str,
cn: str,
key_size: int,
) -> tuple[rsa.RSAPrivateKeyWithSerialization, x509.Certificate]:
now = datetime.datetime.now()
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=key_size,
) # type: ignore
name = x509.Name(
[
x509.NameAttribute(NameOID.COMMON_NAME, cn),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, organization),
]
)
builder = x509.CertificateBuilder()
builder = builder.serial_number(x509.random_serial_number())
builder = builder.subject_name(name)
builder = builder.not_valid_before(now - datetime.timedelta(days=2))
builder = builder.not_valid_after(now + CA_EXPIRY)
builder = builder.issuer_name(name)
builder = builder.public_key(private_key.public_key())
builder = builder.add_extension(
x509.BasicConstraints(ca=True, path_length=None), critical=True
)
builder = builder.add_extension(
x509.ExtendedKeyUsage([ExtendedKeyUsageOID.SERVER_AUTH]), critical=False
)
builder = builder.add_extension(
x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=True,
crl_sign=True,
encipher_only=False,
decipher_only=False,
),
critical=True,
)
builder = builder.add_extension(
x509.SubjectKeyIdentifier.from_public_key(private_key.public_key()),
critical=False,
)
cert = builder.sign(private_key=private_key, algorithm=hashes.SHA256()) # type: ignore
return private_key, cert
def _fix_legacy_sans(sans: Iterable[x509.GeneralName] | list[str]) -> x509.GeneralNames:
"""
SANs used to be a list of strings in mitmproxy 10.1 and below, but now they're a list of GeneralNames.
This function converts the old format to the new one.
"""
if isinstance(sans, x509.GeneralNames):
return sans
elif (
isinstance(sans, list) and len(sans) > 0 and isinstance(sans[0], str)
): # pragma: no cover
warnings.warn(
"Passing SANs as a list of strings is deprecated.",
DeprecationWarning,
stacklevel=2,
)
ss: list[x509.GeneralName] = []
for x in cast(list[str], sans):
try:
ip = ipaddress.ip_address(x)
except ValueError:
x = x.encode("idna").decode()
ss.append(x509.DNSName(x))
else:
ss.append(x509.IPAddress(ip))
return x509.GeneralNames(ss)
else:
return x509.GeneralNames(cast(Iterable[x509.GeneralName], sans))
def dummy_cert(
privkey: rsa.RSAPrivateKey,
cacert: x509.Certificate,
commonname: str | None,
sans: Iterable[x509.GeneralName],
organization: str | None = None,
crl_url: str | None = None,
) -> Cert:
"""
Generates a dummy certificate.
privkey: CA private key
cacert: CA certificate
commonname: Common name for the generated certificate.
sans: A list of Subject Alternate Names.
organization: Organization name for the generated certificate.
crl_url: URL of CRL distribution point
Returns cert if operation succeeded, None if not.
"""
builder = x509.CertificateBuilder()
builder = builder.issuer_name(cacert.subject)
builder = builder.add_extension(
x509.ExtendedKeyUsage([ExtendedKeyUsageOID.SERVER_AUTH]), critical=False
)
builder = builder.public_key(cacert.public_key())
now = datetime.datetime.now()
builder = builder.not_valid_before(now - datetime.timedelta(days=2))
builder = builder.not_valid_after(now + CERT_EXPIRY)
subject = []
is_valid_commonname = commonname is not None and len(commonname) < 64
if is_valid_commonname:
assert commonname is not None
subject.append(x509.NameAttribute(NameOID.COMMON_NAME, commonname))
if organization is not None:
assert organization is not None
subject.append(x509.NameAttribute(NameOID.ORGANIZATION_NAME, organization))
builder = builder.subject_name(x509.Name(subject))
builder = builder.serial_number(x509.random_serial_number())
# RFC 5280 §4.2.1.6: subjectAltName is critical if subject is empty.
builder = builder.add_extension(
x509.SubjectAlternativeName(_fix_legacy_sans(sans)),
critical=not is_valid_commonname,
)
# https://datatracker.ietf.org/doc/html/rfc5280#section-4.2.1.1
builder = builder.add_extension(
x509.AuthorityKeyIdentifier.from_issuer_public_key(cacert.public_key()), # type: ignore
critical=False,
)
# If CA and leaf cert have the same Subject Key Identifier, SChannel breaks in funny ways,
# see https://github.com/mitmproxy/mitmproxy/issues/6494.
# https://datatracker.ietf.org/doc/html/rfc5280#section-4.2.1.2 states
# that SKI is optional for the leaf cert, so we skip that.
if crl_url:
builder = builder.add_extension(
x509.CRLDistributionPoints(
[
x509.DistributionPoint(
[x509.UniformResourceIdentifier(crl_url)],
relative_name=None,
crl_issuer=None,
reasons=None,
)
]
),
critical=False,
)
cert = builder.sign(private_key=privkey, algorithm=hashes.SHA256()) # type: ignore
return Cert(cert)
def dummy_crl(
privkey: rsa.RSAPrivateKey,
cacert: x509.Certificate,
) -> bytes:
"""
Generates an empty CRL signed with the CA key
privkey: CA private key
cacert: CA certificate
Returns a CRL DER encoded
"""
builder = x509.CertificateRevocationListBuilder()
builder = builder.issuer_name(cacert.issuer)
now = datetime.datetime.now()
builder = builder.last_update(now - datetime.timedelta(days=2))
builder = builder.next_update(now + CRL_EXPIRY)
builder = builder.add_extension(x509.CRLNumber(1000), False) # meaningless number
crl = builder.sign(private_key=privkey, algorithm=hashes.SHA256())
return crl.public_bytes(serialization.Encoding.DER)
@dataclass(frozen=True)
class CertStoreEntry:
cert: Cert
privatekey: rsa.RSAPrivateKey
chain_file: Path | None
chain_certs: list[Cert]
TCustomCertId = str # manually provided certs (e.g. mitmproxy's --certs)
TGeneratedCertId = tuple[Optional[str], x509.GeneralNames] # (common_name, sans)
TCertId = Union[TCustomCertId, TGeneratedCertId]
DHParams = NewType("DHParams", bytes)
class CertStore:
"""
Implements an in-memory certificate store.
"""
STORE_CAP = 100
default_privatekey: rsa.RSAPrivateKey
default_ca: Cert
default_chain_file: Path | None
default_chain_certs: list[Cert]
dhparams: DHParams
certs: dict[TCertId, CertStoreEntry]
expire_queue: list[CertStoreEntry]
def __init__(
self,
default_privatekey: rsa.RSAPrivateKey,
default_ca: Cert,
default_chain_file: Path | None,
default_crl: bytes,
dhparams: DHParams,
):
self.default_privatekey = default_privatekey
self.default_ca = default_ca
self.default_chain_file = default_chain_file
self.default_crl = default_crl
self.default_chain_certs = (
[
Cert(c)
for c in x509.load_pem_x509_certificates(
self.default_chain_file.read_bytes()
)
]
if self.default_chain_file
else [default_ca]
)
self.dhparams = dhparams
self.certs = {}
self.expire_queue = []
def expire(self, entry: CertStoreEntry) -> None:
self.expire_queue.append(entry)
if len(self.expire_queue) > self.STORE_CAP:
d = self.expire_queue.pop(0)
self.certs = {k: v for k, v in self.certs.items() if v != d}
@staticmethod
def load_dhparam(path: Path) -> DHParams:
# mitmproxy<=0.10 doesn't generate a dhparam file.
# Create it now if necessary.
if not path.exists():
path.write_bytes(DEFAULT_DHPARAM)
# we could use cryptography for this, but it's unclear how to convert cryptography's object to pyOpenSSL's
# expected format.
bio = OpenSSL.SSL._lib.BIO_new_file( # type: ignore
str(path).encode(sys.getfilesystemencoding()), b"r"
)
if bio != OpenSSL.SSL._ffi.NULL: # type: ignore
bio = OpenSSL.SSL._ffi.gc(bio, OpenSSL.SSL._lib.BIO_free) # type: ignore
dh = OpenSSL.SSL._lib.PEM_read_bio_DHparams( # type: ignore
bio,
OpenSSL.SSL._ffi.NULL, # type: ignore
OpenSSL.SSL._ffi.NULL, # type: ignore
OpenSSL.SSL._ffi.NULL, # type: ignore
)
dh = OpenSSL.SSL._ffi.gc(dh, OpenSSL.SSL._lib.DH_free) # type: ignore
return dh
raise RuntimeError("Error loading DH Params.") # pragma: no cover
@classmethod
def from_store(
cls,
path: Path | str,
basename: str,
key_size: int,
passphrase: bytes | None = None,
) -> "CertStore":
path = Path(path)
ca_file = path / f"{basename}-ca.pem"
dhparam_file = path / f"{basename}-dhparam.pem"
if not ca_file.exists():
cls.create_store(path, basename, key_size)
return cls.from_files(ca_file, dhparam_file, passphrase)
@classmethod
def from_files(
cls, ca_file: Path, dhparam_file: Path, passphrase: bytes | None = None
) -> "CertStore":
raw = ca_file.read_bytes()
key = load_pem_private_key(raw, passphrase)
dh = cls.load_dhparam(dhparam_file)
certs = x509.load_pem_x509_certificates(raw)
ca = Cert(certs[0])
crl = dummy_crl(key, ca._cert)
if len(certs) > 1:
chain_file: Path | None = ca_file
else:
chain_file = None
return cls(key, ca, chain_file, crl, dh)
@staticmethod
@contextlib.contextmanager
def umask_secret():
"""
Context to temporarily set umask to its original value bitor 0o77.
Useful when writing private keys to disk so that only the owner
will be able to read them.
"""
original_umask = os.umask(0)
os.umask(original_umask | 0o77)
try:
yield
finally:
os.umask(original_umask)
@staticmethod
def create_store(
path: Path, basename: str, key_size: int, organization=None, cn=None
) -> None:
path.mkdir(parents=True, exist_ok=True)
organization = organization or basename
cn = cn or basename
key: rsa.RSAPrivateKeyWithSerialization
ca: x509.Certificate
key, ca = create_ca(organization=organization, cn=cn, key_size=key_size)
# Dump the CA plus private key.
with CertStore.umask_secret():
# PEM format
(path / f"{basename}-ca.pem").write_bytes(
key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
+ ca.public_bytes(serialization.Encoding.PEM)
)
# PKCS12 format for Windows devices
(path / f"{basename}-ca.p12").write_bytes(
pkcs12.serialize_key_and_certificates( # type: ignore
name=basename.encode(),
key=key,
cert=ca,
cas=None,
encryption_algorithm=serialization.NoEncryption(),
)
)
# Dump the certificate in PEM format
pem_cert = ca.public_bytes(serialization.Encoding.PEM)
(path / f"{basename}-ca-cert.pem").write_bytes(pem_cert)
# Create a .cer file with the same contents for Android
(path / f"{basename}-ca-cert.cer").write_bytes(pem_cert)
# Dump the certificate in PKCS12 format for Windows devices
(path / f"{basename}-ca-cert.p12").write_bytes(
pkcs12.serialize_key_and_certificates(
name=basename.encode(),
key=None, # type: ignore
cert=ca,
cas=None,
encryption_algorithm=serialization.NoEncryption(),
)
)
(path / f"{basename}-dhparam.pem").write_bytes(DEFAULT_DHPARAM)
def add_cert_file(
self, spec: str, path: Path, passphrase: bytes | None = None
) -> None:
raw = path.read_bytes()
cert = Cert.from_pem(raw)
try:
private_key = load_pem_private_key(raw, password=passphrase)
except ValueError as e:
private_key = self.default_privatekey
if cert.public_key() != private_key.public_key():
raise ValueError(
f'Unable to find private key in "{path.absolute()}": {e}'
) from e
else:
if cert.public_key() != private_key.public_key():
raise ValueError(
f'Private and public keys in "{path.absolute()}" do not match:\n'
f"{cert.public_key()=}\n"
f"{private_key.public_key()=}"
)
try:
chain = [Cert(x) for x in x509.load_pem_x509_certificates(raw)]
except ValueError as e:
logger.warning(f"Failed to read certificate chain: {e}")
chain = [cert]
if cert.is_ca:
logger.warning(
f'"{path.absolute()}" is a certificate authority and not a leaf certificate. '
f"This indicates a misconfiguration, see https://docs.mitmproxy.org/stable/concepts-certificates/."
)
self.add_cert(CertStoreEntry(cert, private_key, path, chain), spec)
def add_cert(self, entry: CertStoreEntry, *names: str) -> None:
"""
Adds a cert to the certstore. We register the CN in the cert plus
any SANs, and also the list of names provided as an argument.
"""
if entry.cert.cn:
self.certs[entry.cert.cn] = entry
for i in entry.cert.altnames:
self.certs[str(i.value)] = entry
for i in names:
self.certs[i] = entry
@staticmethod
def asterisk_forms(dn: str | x509.GeneralName) -> list[str]:
"""
Return all asterisk forms for a domain. For example, for www.example.com this will return
[b"www.example.com", b"*.example.com", b"*.com"]. The single wildcard "*" is omitted.
"""
if isinstance(dn, str):
parts = dn.split(".")
ret = [dn]
for i in range(1, len(parts)):
ret.append("*." + ".".join(parts[i:]))
return ret
elif isinstance(dn, x509.DNSName):
return CertStore.asterisk_forms(dn.value)
else:
return [str(dn.value)]
def get_cert(
self,
commonname: str | None,
sans: Iterable[x509.GeneralName],
organization: str | None = None,
crl_url: str | None = None,
) -> CertStoreEntry:
"""
commonname: Common name for the generated certificate. Must be a
valid, plain-ASCII, IDNA-encoded domain name.
sans: A list of Subject Alternate Names.
organization: Organization name for the generated certificate.
crl_url: URL of CRL distribution point
"""
sans = _fix_legacy_sans(sans)
potential_keys: list[TCertId] = []
if commonname:
potential_keys.extend(self.asterisk_forms(commonname))
for s in sans:
potential_keys.extend(self.asterisk_forms(s))
potential_keys.append("*")
potential_keys.append((commonname, sans))
name = next(filter(lambda key: key in self.certs, potential_keys), None)
if name:
entry = self.certs[name]
else:
entry = CertStoreEntry(
cert=dummy_cert(
self.default_privatekey,
self.default_ca._cert,
commonname,
sans,
organization,
crl_url,
),
privatekey=self.default_privatekey,
chain_file=self.default_chain_file,
chain_certs=self.default_chain_certs,
)
self.certs[(commonname, sans)] = entry
self.expire(entry)
return entry
def load_pem_private_key(data: bytes, password: bytes | None) -> rsa.RSAPrivateKey:
"""
like cryptography's load_pem_private_key, but silently falls back to not using a password
if the private key is unencrypted.
"""
try:
return serialization.load_pem_private_key(data, password) # type: ignore
except TypeError:
if password is not None:
return load_pem_private_key(data, None)
raise
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/ctx.py | mitmproxy/ctx.py | from __future__ import annotations
import typing
if typing.TYPE_CHECKING:
import mitmproxy.log
import mitmproxy.master
import mitmproxy.options
master: mitmproxy.master.Master
options: mitmproxy.options.Options
log: mitmproxy.log.Log
"""Deprecated: Use Python's builtin `logging` module instead."""
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/hooks.py | mitmproxy/hooks.py | import re
import warnings
from collections.abc import Sequence
from dataclasses import dataclass
from dataclasses import fields
from dataclasses import is_dataclass
from typing import Any
from typing import ClassVar
from typing import TYPE_CHECKING
import mitmproxy.flow
if TYPE_CHECKING:
import mitmproxy.addonmanager
import mitmproxy.log
class Hook:
name: ClassVar[str]
def args(self) -> list[Any]:
args = []
for field in fields(self): # type: ignore[arg-type]
args.append(getattr(self, field.name))
return args
def __new__(cls, *args, **kwargs):
if cls is Hook:
raise TypeError("Hook may not be instantiated directly.")
if not is_dataclass(cls):
raise TypeError("Subclass is not a dataclass.")
return super().__new__(cls)
def __init_subclass__(cls, **kwargs):
# initialize .name attribute. HttpRequestHook -> http_request
if cls.__dict__.get("name", None) is None:
name = cls.__name__.replace("Hook", "")
cls.name = re.sub("(?!^)([A-Z]+)", r"_\1", name).lower()
if cls.name in all_hooks:
other = all_hooks[cls.name]
warnings.warn(
f"Two conflicting event classes for {cls.name}: {cls} and {other}",
RuntimeWarning,
)
if cls.name == "":
return # don't register Hook class.
all_hooks[cls.name] = cls
# define a custom hash and __eq__ function so that events are hashable and not comparable.
cls.__hash__ = object.__hash__ # type: ignore
cls.__eq__ = object.__eq__ # type: ignore
all_hooks: dict[str, type[Hook]] = {}
@dataclass
class ConfigureHook(Hook):
"""
Called when configuration changes. The updated argument is a
set-like object containing the keys of all changed options. This
event is called during startup with all options in the updated set.
"""
updated: set[str]
@dataclass
class DoneHook(Hook):
"""
Called when the addon shuts down, either by being removed from
the mitmproxy instance, or when mitmproxy itself shuts down. On
shutdown, this event is called after the event loop is
terminated, guaranteeing that it will be the final event an addon
sees. Note that log handlers are shut down at this point, so
calls to log functions will produce no output.
"""
@dataclass
class RunningHook(Hook):
"""
Called when the proxy is completely up and running. At this point,
you can expect all addons to be loaded and all options to be set.
"""
@dataclass
class UpdateHook(Hook):
"""
Update is called when one or more flow objects have been modified,
usually from a different addon.
"""
flows: Sequence[mitmproxy.flow.Flow]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/optmanager.py | mitmproxy/optmanager.py | from __future__ import annotations
import contextlib
import copy
import pprint
import textwrap
import weakref
from collections.abc import Callable
from collections.abc import Iterable
from collections.abc import Sequence
from dataclasses import dataclass
from pathlib import Path
from typing import Any
from typing import Optional
from typing import TextIO
import ruamel.yaml
from mitmproxy import exceptions
from mitmproxy.utils import signals
from mitmproxy.utils import typecheck
"""
The base implementation for Options.
"""
unset = object()
class _Option:
__slots__ = ("name", "typespec", "value", "_default", "choices", "help")
def __init__(
self,
name: str,
typespec: type | object, # object for Optional[x], which is not a type.
default: Any,
help: str,
choices: Sequence[str] | None,
) -> None:
typecheck.check_option_type(name, default, typespec)
self.name = name
self.typespec = typespec
self._default = default
self.value = unset
self.help = textwrap.dedent(help).strip().replace("\n", " ")
self.choices = choices
def __repr__(self):
return f"{self.current()} [{self.typespec}]"
@property
def default(self):
return copy.deepcopy(self._default)
def current(self) -> Any:
if self.value is unset:
v = self.default
else:
v = self.value
return copy.deepcopy(v)
def set(self, value: Any) -> None:
typecheck.check_option_type(self.name, value, self.typespec)
self.value = value
def reset(self) -> None:
self.value = unset
def has_changed(self) -> bool:
return self.current() != self.default
def __eq__(self, other) -> bool:
for i in self.__slots__:
if getattr(self, i) != getattr(other, i):
return False
return True
def __deepcopy__(self, _):
o = _Option(self.name, self.typespec, self.default, self.help, self.choices)
if self.has_changed():
o.value = self.current()
return o
@dataclass
class _UnconvertedStrings:
val: list[str]
def _sig_changed_spec(updated: set[str]) -> None: # pragma: no cover
... # expected function signature for OptManager.changed receivers.
def _sig_errored_spec(exc: Exception) -> None: # pragma: no cover
... # expected function signature for OptManager.errored receivers.
class OptManager:
"""
OptManager is the base class from which Options objects are derived.
.changed is a Signal that triggers whenever options are
updated. If any handler in the chain raises an exceptions.OptionsError
exception, all changes are rolled back, the exception is suppressed,
and the .errored signal is notified.
Optmanager always returns a deep copy of options to ensure that
mutation doesn't change the option state inadvertently.
"""
def __init__(self) -> None:
self.deferred: dict[str, Any] = {}
self.changed = signals.SyncSignal(_sig_changed_spec)
self.changed.connect(self._notify_subscribers)
self.errored = signals.SyncSignal(_sig_errored_spec)
self._subscriptions: list[tuple[weakref.ref[Callable], set[str]]] = []
# Options must be the last attribute here - after that, we raise an
# error for attribute assignment to unknown options.
self._options: dict[str, Any] = {}
def add_option(
self,
name: str,
typespec: type | object,
default: Any,
help: str,
choices: Sequence[str] | None = None,
) -> None:
self._options[name] = _Option(name, typespec, default, help, choices)
self.changed.send(updated={name})
@contextlib.contextmanager
def rollback(self, updated, reraise=False):
old = copy.deepcopy(self._options)
try:
yield
except exceptions.OptionsError as e:
# Notify error handlers
self.errored.send(exc=e)
# Rollback
self.__dict__["_options"] = old
self.changed.send(updated=updated)
if reraise:
raise e
def subscribe(self, func, opts):
"""
Subscribe a callable to the .changed signal, but only for a
specified list of options. The callable should accept arguments
(options, updated), and may raise an OptionsError.
The event will automatically be unsubscribed if the callable goes out of scope.
"""
for i in opts:
if i not in self._options:
raise exceptions.OptionsError("No such option: %s" % i)
self._subscriptions.append((signals.make_weak_ref(func), set(opts)))
def _notify_subscribers(self, updated) -> None:
cleanup = False
for ref, opts in self._subscriptions:
callback = ref()
if callback is not None:
if opts & updated:
callback(self, updated)
else:
cleanup = True
if cleanup:
self.__dict__["_subscriptions"] = [
(ref, opts) for (ref, opts) in self._subscriptions if ref() is not None
]
def __eq__(self, other):
if isinstance(other, OptManager):
return self._options == other._options
return False
def __deepcopy__(self, memodict=None):
o = OptManager()
o.__dict__["_options"] = copy.deepcopy(self._options, memodict)
return o
__copy__ = __deepcopy__
def __getattr__(self, attr):
if attr in self._options:
return self._options[attr].current()
else:
raise AttributeError("No such option: %s" % attr)
def __setattr__(self, attr, value):
# This is slightly tricky. We allow attributes to be set on the instance
# until we have an _options attribute. After that, assignment is sent to
# the update function, and will raise an error for unknown options.
opts = self.__dict__.get("_options")
if not opts:
super().__setattr__(attr, value)
else:
self.update(**{attr: value})
def keys(self):
return set(self._options.keys())
def items(self):
return self._options.items()
def __contains__(self, k):
return k in self._options
def reset(self):
"""
Restore defaults for all options.
"""
for o in self._options.values():
o.reset()
self.changed.send(updated=set(self._options.keys()))
def update_known(self, **kwargs):
"""
Update and set all known options from kwargs. Returns a dictionary
of unknown options.
"""
known, unknown = {}, {}
for k, v in kwargs.items():
if k in self._options:
known[k] = v
else:
unknown[k] = v
updated = set(known.keys())
if updated:
with self.rollback(updated, reraise=True):
for k, v in known.items():
self._options[k].set(v)
self.changed.send(updated=updated)
return unknown
def update_defer(self, **kwargs):
unknown = self.update_known(**kwargs)
self.deferred.update(unknown)
def update(self, **kwargs):
u = self.update_known(**kwargs)
if u:
raise KeyError("Unknown options: %s" % ", ".join(u.keys()))
def setter(self, attr):
"""
Generate a setter for a given attribute. This returns a callable
taking a single argument.
"""
if attr not in self._options:
raise KeyError("No such option: %s" % attr)
def setter(x):
setattr(self, attr, x)
return setter
def toggler(self, attr):
"""
Generate a toggler for a boolean attribute. This returns a callable
that takes no arguments.
"""
if attr not in self._options:
raise KeyError("No such option: %s" % attr)
o = self._options[attr]
if o.typespec is not bool:
raise ValueError("Toggler can only be used with boolean options")
def toggle():
setattr(self, attr, not getattr(self, attr))
return toggle
def default(self, option: str) -> Any:
return self._options[option].default
def has_changed(self, option):
"""
Has the option changed from the default?
"""
return self._options[option].has_changed()
def merge(self, opts):
"""
Merge a dict of options into this object. Options that have None
value are ignored. Lists and tuples are appended to the current
option value.
"""
toset = {}
for k, v in opts.items():
if v is not None:
if isinstance(v, (list, tuple)):
toset[k] = getattr(self, k) + v
else:
toset[k] = v
self.update(**toset)
def __repr__(self):
options = pprint.pformat(self._options, indent=4).strip(" {}")
if "\n" in options:
options = "\n " + options + "\n"
return "{mod}.{cls}({{{options}}})".format(
mod=type(self).__module__, cls=type(self).__name__, options=options
)
def set(self, *specs: str, defer: bool = False) -> None:
"""
Takes a list of set specification in standard form (option=value).
Options that are known are updated immediately. If defer is true,
options that are not known are deferred, and will be set once they
are added.
May raise an `OptionsError` if a value is malformed or an option is unknown and defer is False.
"""
# First, group specs by option name.
unprocessed: dict[str, list[str]] = {}
for spec in specs:
if "=" in spec:
name, value = spec.split("=", maxsplit=1)
unprocessed.setdefault(name, []).append(value)
else:
unprocessed.setdefault(spec, [])
# Second, convert values to the correct type.
processed: dict[str, Any] = {}
for name in list(unprocessed.keys()):
if name in self._options:
processed[name] = self._parse_setval(
self._options[name], unprocessed.pop(name)
)
# Third, stash away unrecognized options or complain about them.
if defer:
self.deferred.update(
{k: _UnconvertedStrings(v) for k, v in unprocessed.items()}
)
elif unprocessed:
raise exceptions.OptionsError(
f"Unknown option(s): {', '.join(unprocessed)}"
)
# Finally, apply updated options.
self.update(**processed)
def process_deferred(self) -> None:
"""
Processes options that were deferred in previous calls to set, and
have since been added.
"""
update: dict[str, Any] = {}
for optname, value in self.deferred.items():
if optname in self._options:
if isinstance(value, _UnconvertedStrings):
value = self._parse_setval(self._options[optname], value.val)
update[optname] = value
self.update(**update)
for k in update.keys():
del self.deferred[k]
def _parse_setval(self, o: _Option, values: list[str]) -> Any:
"""
Convert a string to a value appropriate for the option type.
"""
if o.typespec == Sequence[str]:
return values
if len(values) > 1:
raise exceptions.OptionsError(
f"Received multiple values for {o.name}: {values}"
)
optstr: str | None
if values:
optstr = values[0]
else:
optstr = None
if o.typespec in (str, Optional[str]):
if o.typespec is str and optstr is None:
raise exceptions.OptionsError(f"Option is required: {o.name}")
return optstr
elif o.typespec in (int, Optional[int]):
if optstr:
try:
return int(optstr)
except ValueError:
raise exceptions.OptionsError(
f"Failed to parse option {o.name}: not an integer: {optstr}"
)
elif o.typespec is int:
raise exceptions.OptionsError(f"Option is required: {o.name}")
else:
return None
elif o.typespec is bool:
if optstr == "toggle":
return not o.current()
if not optstr or optstr == "true":
return True
elif optstr == "false":
return False
else:
raise exceptions.OptionsError(
f'Failed to parse option {o.name}: boolean must be "true", "false", or have the value omitted (a synonym for "true").'
)
raise NotImplementedError(
f"Failed to parse option {o.name}: unsupported option type: {o.typespec}"
)
def make_parser(self, parser, optname, metavar=None, short=None):
"""
Auto-Create a command-line parser entry for a named option. If the
option does not exist, it is ignored.
"""
if optname not in self._options:
return
o = self._options[optname]
def mkf(x, s):
x = x.replace("_", "-")
f = ["--%s" % x]
if s:
f.append("-" + s)
return f
flags = mkf(optname, short)
if o.typespec is bool:
g = parser.add_mutually_exclusive_group(required=False)
onf = mkf(optname, None)
offf = mkf("no-" + optname, None)
# The short option for a bool goes to whatever is NOT the default
if short:
if o.default:
offf = mkf("no-" + optname, short)
else:
onf = mkf(optname, short)
g.add_argument(
*offf,
action="store_false",
dest=optname,
)
g.add_argument(*onf, action="store_true", dest=optname, help=o.help)
parser.set_defaults(**{optname: None})
elif o.typespec in (int, Optional[int]):
parser.add_argument(
*flags,
action="store",
type=int,
dest=optname,
help=o.help,
metavar=metavar,
)
elif o.typespec in (str, Optional[str]):
parser.add_argument(
*flags,
action="store",
type=str,
dest=optname,
help=o.help,
metavar=metavar,
choices=o.choices,
)
elif o.typespec == Sequence[str]:
parser.add_argument(
*flags,
action="append",
type=str,
dest=optname,
help=o.help + " May be passed multiple times.",
metavar=metavar,
choices=o.choices,
)
else:
raise ValueError("Unsupported option type: %s", o.typespec)
def dump_defaults(opts, out: TextIO):
"""
Dumps an annotated file with all options.
"""
# Sort data
s = ruamel.yaml.comments.CommentedMap()
for k in sorted(opts.keys()):
o = opts._options[k]
s[k] = o.default
txt = o.help.strip()
if o.choices:
txt += " Valid values are %s." % ", ".join(repr(c) for c in o.choices)
else:
t = typecheck.typespec_to_str(o.typespec)
txt += " Type %s." % t
txt = "\n".join(textwrap.wrap(txt))
s.yaml_set_comment_before_after_key(k, before="\n" + txt)
return ruamel.yaml.YAML().dump(s, out)
def dump_dicts(opts, keys: Iterable[str] | None = None) -> dict:
"""
Dumps the options into a list of dict object.
Return: A list like: { "anticache": { type: "bool", default: false, value: true, help: "help text"} }
"""
options_dict = {}
if keys is None:
keys = opts.keys()
for k in sorted(keys):
o = opts._options[k]
t = typecheck.typespec_to_str(o.typespec)
option = {
"type": t,
"default": o.default,
"value": o.current(),
"help": o.help,
"choices": o.choices,
}
options_dict[k] = option
return options_dict
def parse(text):
if not text:
return {}
try:
yaml = ruamel.yaml.YAML(typ="safe", pure=True)
data = yaml.load(text)
except ruamel.yaml.error.YAMLError as v:
if hasattr(v, "problem_mark"):
snip = v.problem_mark.get_snippet()
raise exceptions.OptionsError(
"Config error at line %s:\n%s\n%s"
% (v.problem_mark.line + 1, snip, getattr(v, "problem", ""))
)
else:
raise exceptions.OptionsError("Could not parse options.")
if isinstance(data, str):
raise exceptions.OptionsError("Config error - no keys found.")
elif data is None:
return {}
return data
def load(opts: OptManager, text: str, cwd: Path | str | None = None) -> None:
"""
Load configuration from text, over-writing options already set in
this object. May raise OptionsError if the config file is invalid.
"""
data = parse(text)
scripts = data.get("scripts")
if scripts is not None and cwd is not None:
data["scripts"] = [
str(relative_path(Path(path), relative_to=Path(cwd))) for path in scripts
]
opts.update_defer(**data)
def load_paths(opts: OptManager, *paths: Path | str) -> None:
"""
Load paths in order. Each path takes precedence over the previous
path. Paths that don't exist are ignored, errors raise an
OptionsError.
"""
for p in paths:
p = Path(p).expanduser()
if p.exists() and p.is_file():
with p.open(encoding="utf8") as f:
try:
txt = f.read()
except UnicodeDecodeError as e:
raise exceptions.OptionsError(f"Error reading {p}: {e}")
try:
load(opts, txt, cwd=p.absolute().parent)
except exceptions.OptionsError as e:
raise exceptions.OptionsError(f"Error reading {p}: {e}")
def serialize(
opts: OptManager, file: TextIO, text: str, defaults: bool = False
) -> None:
"""
Performs a round-trip serialization. If text is not None, it is
treated as a previous serialization that should be modified
in-place.
- If "defaults" is False, only options with non-default values are
serialized. Default values in text are preserved.
- Unknown options in text are removed.
- Raises OptionsError if text is invalid.
"""
data = parse(text)
for k in opts.keys():
if defaults or opts.has_changed(k):
data[k] = getattr(opts, k)
for k in list(data.keys()):
if k not in opts._options:
del data[k]
ruamel.yaml.YAML().dump(data, file)
def save(opts: OptManager, path: Path | str, defaults: bool = False) -> None:
"""
Save to path. If the destination file exists, modify it in-place.
Raises OptionsError if the existing data is corrupt.
"""
path = Path(path).expanduser()
if path.exists() and path.is_file():
with path.open(encoding="utf8") as f:
try:
data = f.read()
except UnicodeDecodeError as e:
raise exceptions.OptionsError(f"Error trying to modify {path}: {e}")
else:
data = ""
with path.open("w", encoding="utf8") as f:
serialize(opts, f, data, defaults)
def relative_path(script_path: Path | str, *, relative_to: Path | str) -> Path:
"""
Make relative paths found in config files relative to said config file,
instead of relative to where the command is ran.
"""
script_path = Path(script_path)
# Edge case when $HOME is not an absolute path
if script_path.expanduser() != script_path and not script_path.is_absolute():
script_path = script_path.expanduser().absolute()
return (relative_to / script_path.expanduser()).absolute()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/options.py | mitmproxy/options.py | from collections.abc import Sequence
from typing import Optional
from mitmproxy import optmanager
CONF_DIR = "~/.mitmproxy"
CONF_BASENAME = "mitmproxy"
CONTENT_VIEW_LINES_CUTOFF = 512
KEY_SIZE = 2048
class Options(optmanager.OptManager):
def __init__(self, **kwargs) -> None:
super().__init__()
self.add_option(
"server", bool, True, "Start a proxy server. Enabled by default."
)
self.add_option(
"showhost",
bool,
False,
"""Use the Host header to construct URLs for display.
This option is disabled by default because malicious apps may send misleading host headers to evade
your analysis. If this is not a concern, enable this options for better flow display.""",
)
self.add_option(
"show_ignored_hosts",
bool,
False,
"""
Record ignored flows in the UI even if we do not perform TLS interception.
This option will keep ignored flows' contents in memory, which can greatly increase memory usage.
A future release will fix this issue, record ignored flows by default, and remove this option.
""",
)
# Proxy options
self.add_option(
"add_upstream_certs_to_client_chain",
bool,
False,
"""
Add all certificates of the upstream server to the certificate chain
that will be served to the proxy client, as extras.
""",
)
self.add_option(
"confdir",
str,
CONF_DIR,
"Location of the default mitmproxy configuration files.",
)
self.add_option(
"certs",
Sequence[str],
[],
"""
SSL certificates of the form "[domain=]path". The domain may include
a wildcard, and is equal to "*" if not specified. The file at path
is a certificate in PEM format. If a private key is included in the
PEM, it is used, else the default key in the conf dir is used. The
PEM file should contain the full certificate chain, with the leaf
certificate as the first entry.
""",
)
self.add_option(
"cert_passphrase",
Optional[str],
None,
"""
Passphrase for decrypting the private key provided in the --cert option.
Note that passing cert_passphrase on the command line makes your passphrase visible in your system's
process list. Specify it in config.yaml to avoid this.
""",
)
self.add_option(
"client_certs", Optional[str], None, "Client certificate file or directory."
)
self.add_option(
"ignore_hosts",
Sequence[str],
[],
"""
Ignore host and forward all traffic without processing it. In
transparent mode, it is recommended to use an IP address (range),
not the hostname. In regular mode, only SSL traffic is ignored and
the hostname should be used. The supplied value is interpreted as a
regular expression and matched on the ip or the hostname.
""",
)
self.add_option("allow_hosts", Sequence[str], [], "Opposite of --ignore-hosts.")
self.add_option(
"listen_host",
str,
"",
"Address to bind proxy server(s) to (may be overridden for individual modes, see `mode`).",
)
self.add_option(
"listen_port",
Optional[int],
None,
"Port to bind proxy server(s) to (may be overridden for individual modes, see `mode`). "
"By default, the port is mode-specific. The default regular HTTP proxy spawns on port 8080.",
)
self.add_option(
"mode",
Sequence[str],
["regular"],
"""
The proxy server type(s) to spawn. Can be passed multiple times.
Mitmproxy supports "regular" (HTTP), "local", "transparent", "socks5", "reverse:SPEC",
"upstream:SPEC", and "wireguard[:PATH]" proxy servers. For reverse and upstream proxy modes, SPEC
is host specification in the form of "http[s]://host[:port]". For WireGuard mode, PATH may point to
a file containing key material. If no such file exists, it will be created on startup.
You may append `@listen_port` or `@listen_host:listen_port` to override `listen_host` or `listen_port` for
a specific proxy mode. Features such as client playback will use the first mode to determine
which upstream server to use.
""",
)
self.add_option(
"upstream_cert",
bool,
True,
"Connect to upstream server to look up certificate details.",
)
self.add_option(
"http2",
bool,
True,
"Enable/disable HTTP/2 support. HTTP/2 support is enabled by default.",
)
self.add_option(
"http2_ping_keepalive",
int,
58,
"""
Send a PING frame if an HTTP/2 connection is idle for more than
the specified number of seconds to prevent the remote site from closing it.
Set to 0 to disable this feature.
""",
)
self.add_option(
"http3",
bool,
True,
"Enable/disable support for QUIC and HTTP/3. Enabled by default.",
)
self.add_option(
"http_connect_send_host_header",
bool,
True,
"Include host header with CONNECT requests. Enabled by default.",
)
self.add_option(
"websocket",
bool,
True,
"Enable/disable WebSocket support. "
"WebSocket support is enabled by default.",
)
self.add_option(
"rawtcp",
bool,
True,
"Enable/disable raw TCP connections. "
"TCP connections are enabled by default. ",
)
self.add_option(
"ssl_insecure",
bool,
False,
"""Do not verify upstream server SSL/TLS certificates.
If this option is enabled, certificate validation is skipped and mitmproxy itself will be vulnerable to
TLS interception.""",
)
self.add_option(
"ssl_verify_upstream_trusted_confdir",
Optional[str],
None,
"""
Path to a directory of trusted CA certificates for upstream server
verification prepared using the c_rehash tool.
""",
)
self.add_option(
"ssl_verify_upstream_trusted_ca",
Optional[str],
None,
"Path to a PEM formatted trusted CA certificate.",
)
self.add_option(
"tcp_hosts",
Sequence[str],
[],
"""
Generic TCP SSL proxy mode for all hosts that match the pattern.
Similar to --ignore-hosts, but SSL connections are intercepted.
The communication contents are printed to the log in verbose mode.
""",
)
self.add_option(
"udp_hosts",
Sequence[str],
[],
"""
Generic UDP SSL proxy mode for all hosts that match the pattern.
Similar to --ignore-hosts, but SSL connections are intercepted.
The communication contents are printed to the log in verbose mode.
""",
)
self.add_option(
"content_view_lines_cutoff",
int,
CONTENT_VIEW_LINES_CUTOFF,
"""
Flow content view lines limit. Limit is enabled by default to
speedup flows browsing.
""",
)
self.add_option(
"key_size",
int,
KEY_SIZE,
"""
TLS key size for certificates and CA.
""",
)
self.add_option(
"protobuf_definitions",
Optional[str],
None,
"Path to a .proto file that's used to resolve Protobuf field names when pretty-printing.",
)
self.add_option(
"tcp_timeout",
int,
600,
"""
Timeout in seconds for inactive TCP connections. Connections will be closed after this period of inactivity.
""",
)
self.update(**kwargs)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/dump.py | mitmproxy/tools/dump.py | from mitmproxy import addons
from mitmproxy import master
from mitmproxy import options
from mitmproxy.addons import dumper
from mitmproxy.addons import errorcheck
from mitmproxy.addons import keepserving
from mitmproxy.addons import readfile
class DumpMaster(master.Master):
def __init__(
self,
options: options.Options,
loop=None,
with_termlog=True,
with_dumper=True,
) -> None:
super().__init__(options, event_loop=loop, with_termlog=with_termlog)
self.addons.add(*addons.default_addons())
if with_dumper:
self.addons.add(dumper.Dumper())
self.addons.add(
keepserving.KeepServing(),
readfile.ReadFileStdin(),
errorcheck.ErrorCheck(),
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/main.py | mitmproxy/tools/main.py | from __future__ import annotations
import argparse
import asyncio
import logging
import os
import signal
import sys
from collections.abc import Callable
from collections.abc import Sequence
from typing import Any
from typing import TypeVar
from mitmproxy import exceptions
from mitmproxy import master
from mitmproxy import options
from mitmproxy import optmanager
from mitmproxy.tools import cmdline
from mitmproxy.utils import arg_check
from mitmproxy.utils import debug
def process_options(parser, opts, args):
if args.version:
print(debug.dump_system_info())
sys.exit(0)
if args.quiet or args.options or args.commands:
# also reduce log verbosity if --options or --commands is passed,
# we don't want log messages from regular startup then.
args.termlog_verbosity = "error"
args.flow_detail = 0
if args.verbose:
args.termlog_verbosity = "debug"
args.flow_detail = 2
adict = {
key: val for key, val in vars(args).items() if key in opts and val is not None
}
opts.update(**adict)
T = TypeVar("T", bound=master.Master)
def run(
master_cls: type[T],
make_parser: Callable[[options.Options], argparse.ArgumentParser],
arguments: Sequence[str],
extra: Callable[[Any], dict] | None = None,
) -> T: # pragma: no cover
"""
extra: Extra argument processing callable which returns a dict of
options.
"""
async def main() -> T:
logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger("tornado").setLevel(logging.WARNING)
logging.getLogger("asyncio").setLevel(logging.WARNING)
logging.getLogger("hpack").setLevel(logging.WARNING)
logging.getLogger("urwid").setLevel(logging.INFO)
logging.getLogger("quic").setLevel(
logging.WARNING
) # aioquic uses a different prefix...
debug.register_info_dumpers()
opts = options.Options()
master = master_cls(opts)
parser = make_parser(opts)
# To make migration from 2.x to 3.0 bearable.
if "-R" in sys.argv and sys.argv[sys.argv.index("-R") + 1].startswith("http"):
print(
"To use mitmproxy in reverse mode please use --mode reverse:SPEC instead"
)
try:
args = parser.parse_args(arguments)
except SystemExit:
arg_check.check()
sys.exit(1)
try:
opts.set(*args.setoptions, defer=True)
optmanager.load_paths(
opts,
os.path.join(opts.confdir, "config.yaml"),
os.path.join(opts.confdir, "config.yml"),
)
process_options(parser, opts, args)
if args.options:
optmanager.dump_defaults(opts, sys.stdout)
sys.exit(0)
if args.commands:
master.commands.dump()
sys.exit(0)
if extra:
if args.filter_args:
logging.info(
f'Only processing flows that match "{" & ".join(args.filter_args)}"'
)
opts.update(**extra(args))
except exceptions.OptionsError as e:
print(f"{sys.argv[0]}: {e}", file=sys.stderr)
sys.exit(1)
loop = asyncio.get_running_loop()
def _sigint(*_):
loop.call_soon_threadsafe(
getattr(master, "prompt_for_exit", master.shutdown)
)
def _sigterm(*_):
loop.call_soon_threadsafe(master.shutdown)
try:
# Prefer loop.add_signal_handler where it is available
# https://github.com/mitmproxy/mitmproxy/issues/7128
loop.add_signal_handler(signal.SIGINT, _sigint)
loop.add_signal_handler(signal.SIGTERM, _sigterm)
except NotImplementedError:
# Fall back to `signal.signal` for platforms where that is not available (Windows' Proactorloop)
signal.signal(signal.SIGINT, _sigint)
signal.signal(signal.SIGTERM, _sigterm)
# to fix the issue mentioned https://github.com/mitmproxy/mitmproxy/issues/6744
# by setting SIGPIPE to SIG_IGN, the process will not terminate and continue to run
if hasattr(signal, "SIGPIPE"):
signal.signal(signal.SIGPIPE, signal.SIG_IGN)
await master.run()
return master
return asyncio.run(main())
def mitmproxy(args=None) -> int | None: # pragma: no cover
from mitmproxy.tools import console
run(console.master.ConsoleMaster, cmdline.mitmproxy, args)
return None
def mitmdump(args=None) -> int | None: # pragma: no cover
from mitmproxy.tools import dump
def extra(args):
if args.filter_args:
v = " ".join(args.filter_args)
return dict(
save_stream_filter=v,
readfile_filter=v,
dumper_filter=v,
)
return {}
run(dump.DumpMaster, cmdline.mitmdump, args, extra)
return None
def mitmweb(args=None) -> int | None: # pragma: no cover
from mitmproxy.tools import web
run(web.master.WebMaster, cmdline.mitmweb, args)
return None
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/__init__.py | mitmproxy/tools/__init__.py | python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false | |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/cmdline.py | mitmproxy/tools/cmdline.py | import argparse
def common_options(parser, opts):
parser.add_argument(
"--version",
action="store_true",
help="show version number and exit",
dest="version",
)
parser.add_argument(
"--options",
action="store_true",
help="Show all options and their default values",
)
parser.add_argument(
"--commands",
action="store_true",
help="Show all commands and their signatures",
)
parser.add_argument(
"--set",
type=str,
dest="setoptions",
default=[],
action="append",
metavar="option[=value]",
help="""
Set an option. When the value is omitted, booleans are set to true,
strings and integers are set to None (if permitted), and sequences
are emptied. Boolean values can be true, false or toggle.
Sequences are set using multiple invocations to set for
the same option.
""",
)
parser.add_argument(
"-q", "--quiet", action="store_true", dest="quiet", help="Quiet."
)
parser.add_argument(
"-v",
"--verbose",
action="store_const",
dest="verbose",
const="debug",
help="Increase log verbosity.",
)
# Basic options
opts.make_parser(parser, "mode", short="m")
opts.make_parser(parser, "anticache")
opts.make_parser(parser, "showhost")
opts.make_parser(parser, "show_ignored_hosts")
opts.make_parser(parser, "rfile", metavar="PATH", short="r")
opts.make_parser(parser, "scripts", metavar="SCRIPT", short="s")
opts.make_parser(parser, "stickycookie", metavar="FILTER")
opts.make_parser(parser, "stickyauth", metavar="FILTER")
opts.make_parser(parser, "save_stream_file", metavar="PATH", short="w")
opts.make_parser(parser, "anticomp")
# Proxy options
group = parser.add_argument_group("Proxy Options")
opts.make_parser(group, "listen_host", metavar="HOST")
opts.make_parser(group, "listen_port", metavar="PORT", short="p")
opts.make_parser(group, "server", short="n")
opts.make_parser(group, "ignore_hosts", metavar="HOST")
opts.make_parser(group, "allow_hosts", metavar="HOST")
opts.make_parser(group, "tcp_hosts", metavar="HOST")
opts.make_parser(group, "upstream_auth", metavar="USER:PASS")
opts.make_parser(group, "proxyauth", metavar="SPEC")
opts.make_parser(group, "store_streamed_bodies")
opts.make_parser(group, "rawtcp")
opts.make_parser(group, "http2")
# Proxy SSL options
group = parser.add_argument_group("SSL")
opts.make_parser(group, "certs", metavar="SPEC")
opts.make_parser(group, "cert_passphrase", metavar="PASS")
opts.make_parser(group, "ssl_insecure", short="k")
# Client replay
group = parser.add_argument_group("Client Replay")
opts.make_parser(group, "client_replay", metavar="PATH", short="C")
# Server replay
group = parser.add_argument_group("Server Replay")
opts.make_parser(group, "server_replay", metavar="PATH", short="S")
opts.make_parser(group, "server_replay_kill_extra")
opts.make_parser(group, "server_replay_extra")
opts.make_parser(group, "server_replay_reuse")
opts.make_parser(group, "server_replay_refresh")
# Map Remote
group = parser.add_argument_group("Map Remote")
opts.make_parser(group, "map_remote", metavar="PATTERN", short="M")
# Map Local
group = parser.add_argument_group("Map Local")
opts.make_parser(group, "map_local", metavar="PATTERN")
# Modify Body
group = parser.add_argument_group("Modify Body")
opts.make_parser(group, "modify_body", metavar="PATTERN", short="B")
# Modify headers
group = parser.add_argument_group("Modify Headers")
opts.make_parser(group, "modify_headers", metavar="PATTERN", short="H")
def mitmproxy(opts):
parser = argparse.ArgumentParser(usage="%(prog)s [options]")
common_options(parser, opts)
opts.make_parser(parser, "console_layout")
opts.make_parser(parser, "console_layout_headers")
group = parser.add_argument_group(
"Filters", "See help in mitmproxy for filter expression syntax."
)
opts.make_parser(group, "intercept", metavar="FILTER")
opts.make_parser(group, "view_filter", metavar="FILTER")
return parser
def mitmdump(opts):
parser = argparse.ArgumentParser(usage="%(prog)s [options] [filter]")
common_options(parser, opts)
opts.make_parser(parser, "flow_detail", metavar="LEVEL")
parser.add_argument(
"filter_args",
nargs="...",
help="""
Filter expression, equivalent to setting both the view_filter
and save_stream_filter options.
""",
)
return parser
def mitmweb(opts):
parser = argparse.ArgumentParser(usage="%(prog)s [options]")
group = parser.add_argument_group("Mitmweb")
opts.make_parser(group, "web_open_browser")
opts.make_parser(group, "web_port", metavar="PORT")
opts.make_parser(group, "web_host", metavar="HOST")
common_options(parser, opts)
group = parser.add_argument_group(
"Filters", "See help in mitmproxy for filter expression syntax."
)
opts.make_parser(group, "intercept", metavar="FILTER")
return parser
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/eventlog.py | mitmproxy/tools/console/eventlog.py | import collections
import urwid
from mitmproxy import log
from mitmproxy.tools.console import layoutwidget
class LogBufferWalker(urwid.SimpleListWalker):
pass
class EventLog(urwid.ListBox, layoutwidget.LayoutWidget):
keyctx = "eventlog"
title = "Events"
def __init__(self, master):
self.master = master
self.walker = LogBufferWalker(collections.deque(maxlen=self.master.events.size))
master.events.sig_add.connect(self.add_event)
master.events.sig_refresh.connect(self.refresh_events)
self.master.options.subscribe(
self.refresh_events, ["console_eventlog_verbosity"]
)
self.refresh_events()
super().__init__(self.walker)
def load(self, loader):
loader.add_option(
"console_focus_follow", bool, False, "Focus follows new flows."
)
def set_focus(self, index):
if 0 <= index < len(self.walker):
super().set_focus(index)
def keypress(self, size, key):
if key == "m_end":
self.set_focus(len(self.walker) - 1)
elif key == "m_start":
self.set_focus(0)
return super().keypress(size, key)
def add_event(self, entry: log.LogEntry):
if log.log_tier(self.master.options.console_eventlog_verbosity) < log.log_tier(
entry.level
):
return
txt = f"{entry.level}: {entry.msg}"
if entry.level in ("error", "warn", "alert"):
e = urwid.Text((entry.level, txt))
else:
e = urwid.Text(txt)
self.walker.append(e)
if self.master.options.console_focus_follow:
self.walker.set_focus(len(self.walker) - 1)
def refresh_events(self, *_) -> None:
self.walker.clear()
for event in self.master.events.data:
self.add_event(event)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/palettes.py | mitmproxy/tools/console/palettes.py | # Low-color themes should ONLY use the standard foreground and background
# colours listed here:
#
# http://urwid.org/manual/displayattributes.html
#
from __future__ import annotations
from collections.abc import Mapping
from collections.abc import Sequence
class Palette:
_fields = [
"background",
"title",
# Status bar & heading
"heading",
"heading_key",
"heading_inactive",
# Help
"key",
"head",
"text",
# Options
"option_selected",
"option_active",
"option_active_selected",
"option_selected_key",
# List and Connections
"method_get",
"method_post",
"method_delete",
"method_other",
"method_head",
"method_put",
"method_http2_push",
"scheme_http",
"scheme_https",
"scheme_ws",
"scheme_wss",
"scheme_tcp",
"scheme_udp",
"scheme_dns",
"scheme_quic",
"scheme_other",
"url_punctuation",
"url_domain",
"url_filename",
"url_extension",
"url_query_key",
"url_query_value",
"content_none",
"content_text",
"content_script",
"content_media",
"content_data",
"content_raw",
"content_other",
"focus",
"code_200",
"code_300",
"code_400",
"code_500",
"code_other",
"error",
"warn",
"alert",
"header",
"highlight",
"intercept",
"replay",
"mark",
# Contentview Syntax Highlighting
"name",
"string",
"number",
"boolean",
"comment",
"error",
# TCP flow details
"from_client",
"to_client",
# Grid Editor
"focusfield",
"focusfield_error",
"field_error",
"editfield",
# Commander
"commander_command",
"commander_invalid",
"commander_hint",
]
_fields.extend(["gradient_%02d" % i for i in range(100)])
high: Mapping[str, Sequence[str]] | None = None
low: Mapping[str, Sequence[str]]
def palette(self, transparent: bool):
lst: list[Sequence[str | None]] = []
highback, lowback = None, None
if not transparent:
if self.high and self.high.get("background"):
highback = self.high["background"][1]
lowback = self.low["background"][1]
for i in self._fields:
if transparent and i == "background":
lst.append(["background", "default", "default"])
else:
v: list[str | None] = [i]
low = list(self.low[i])
if lowback and low[1] == "default":
low[1] = lowback
v.extend(low)
if self.high and i in self.high:
v.append(None)
high: list[str | None] = list(self.high[i])
if highback and high[1] == "default":
high[1] = highback
v.extend(high)
elif highback and self.low[i][1] == "default":
high = [None, low[0], highback]
v.extend(high)
lst.append(tuple(v))
return lst
def gen_gradient(palette, cols):
for i in range(100):
palette["gradient_%02d" % i] = (cols[i * len(cols) // 100], "default")
def gen_rgb_gradient(palette, cols):
parts = len(cols) - 1
for i in range(100):
p = i / 100
idx = int(p * parts)
t0 = cols[idx]
t1 = cols[idx + 1]
pp = p * parts % 1
t = (
round(t0[0] + (t1[0] - t0[0]) * pp),
round(t0[1] + (t1[1] - t0[1]) * pp),
round(t0[2] + (t1[2] - t0[2]) * pp),
)
palette["gradient_%02d" % i] = ("#%x%x%x" % t, "default")
class LowDark(Palette):
"""
Low-color dark background
"""
low = dict(
background=("white", "black"),
title=("white,bold", "default"),
# Status bar & heading
heading=("white", "dark blue"),
heading_key=("light cyan", "dark blue"),
heading_inactive=("dark gray", "light gray"),
# Help
key=("light cyan", "default"),
head=("white,bold", "default"),
text=("light gray", "default"),
# Options
option_selected=("black", "light gray"),
option_selected_key=("light cyan", "light gray"),
option_active=("light red", "default"),
option_active_selected=("light red", "light gray"),
# List and Connections
method_get=("light green", "default"),
method_post=("brown", "default"),
method_delete=("light red", "default"),
method_head=("dark cyan", "default"),
method_put=("dark red", "default"),
method_other=("dark magenta", "default"),
method_http2_push=("dark gray", "default"),
scheme_http=("dark cyan", "default"),
scheme_https=("dark green", "default"),
scheme_ws=("brown", "default"),
scheme_wss=("dark magenta", "default"),
scheme_tcp=("dark magenta", "default"),
scheme_udp=("dark magenta", "default"),
scheme_dns=("dark blue", "default"),
scheme_quic=("brown", "default"),
scheme_other=("dark magenta", "default"),
url_punctuation=("light gray", "default"),
url_domain=("white", "default"),
url_filename=("dark cyan", "default"),
url_extension=("light gray", "default"),
url_query_key=("white", "default"),
url_query_value=("light gray", "default"),
content_none=("dark gray", "default"),
content_text=("light gray", "default"),
content_script=("dark green", "default"),
content_media=("light blue", "default"),
content_data=("brown", "default"),
content_raw=("dark red", "default"),
content_other=("dark magenta", "default"),
focus=("yellow", "default"),
code_200=("dark green", "default"),
code_300=("light blue", "default"),
code_400=("light red", "default"),
code_500=("light red", "default"),
code_other=("dark red", "default"),
alert=("light magenta", "default"),
warn=("brown", "default"),
error=("light red", "default"),
header=("dark cyan", "default"),
highlight=("white,bold", "default"),
intercept=("brown", "default"),
replay=("light green", "default"),
mark=("light red", "default"),
# Contentview Syntax Highlighting
name=("dark green", "default"),
string=("dark blue", "default"),
number=("light magenta", "default"),
boolean=("dark magenta", "default"),
comment=("dark gray", "default"),
# TCP flow details
from_client=("light blue", "default"),
to_client=("light red", "default"),
# Grid Editor
focusfield=("black", "light gray"),
focusfield_error=("dark red", "light gray"),
field_error=("dark red", "default"),
editfield=("white", "default"),
commander_command=("white,bold", "default"),
commander_invalid=("light red", "default"),
commander_hint=("dark gray", "default"),
)
gen_gradient(
low,
["light red", "yellow", "light green", "dark green", "dark cyan", "dark blue"],
)
class Dark(LowDark):
high = dict(
heading_inactive=("g58", "g11"),
intercept=("#f60", "default"),
option_selected=("g85", "g45"),
option_selected_key=("light cyan", "g50"),
option_active_selected=("light red", "g50"),
)
class LowLight(Palette):
"""
Low-color light background
"""
low = dict(
background=("black", "white"),
title=("dark magenta", "default"),
# Status bar & heading
heading=("white", "black"),
heading_key=("dark blue", "black"),
heading_inactive=("black", "light gray"),
# Help
key=("dark blue", "default"),
head=("black", "default"),
text=("dark gray", "default"),
# Options
option_selected=("black", "light gray"),
option_selected_key=("dark blue", "light gray"),
option_active=("light red", "default"),
option_active_selected=("light red", "light gray"),
# List and Connections
method_get=("dark green", "default"),
method_post=("brown", "default"),
method_head=("dark cyan", "default"),
method_put=("light red", "default"),
method_delete=("dark red", "default"),
method_other=("light magenta", "default"),
method_http2_push=("light gray", "default"),
scheme_http=("dark cyan", "default"),
scheme_https=("light green", "default"),
scheme_ws=("brown", "default"),
scheme_wss=("light magenta", "default"),
scheme_tcp=("light magenta", "default"),
scheme_udp=("light magenta", "default"),
scheme_dns=("light blue", "default"),
scheme_quic=("brown", "default"),
scheme_other=("light magenta", "default"),
url_punctuation=("dark gray", "default"),
url_domain=("dark gray", "default"),
url_filename=("black", "default"),
url_extension=("dark gray", "default"),
url_query_key=("light blue", "default"),
url_query_value=("dark blue", "default"),
content_none=("black", "default"),
content_text=("dark gray", "default"),
content_script=("light green", "default"),
content_media=("light blue", "default"),
content_data=("brown", "default"),
content_raw=("light red", "default"),
content_other=("light magenta", "default"),
focus=("black", "default"),
code_200=("dark green", "default"),
code_300=("light blue", "default"),
code_400=("dark red", "default"),
code_500=("dark red", "default"),
code_other=("light red", "default"),
error=("light red", "default"),
warn=("brown", "default"),
alert=("light magenta", "default"),
header=("dark blue", "default"),
highlight=("black,bold", "default"),
intercept=("brown", "default"),
replay=("dark green", "default"),
mark=("dark red", "default"),
# Contentview Syntax Highlighting
name=("dark green", "default"),
string=("dark blue", "default"),
number=("light magenta", "default"),
boolean=("dark magenta", "default"),
comment=("dark gray", "default"),
# TCP flow details
from_client=("dark blue", "default"),
to_client=("dark red", "default"),
# Grid Editor
focusfield=("black", "light gray"),
focusfield_error=("dark red", "light gray"),
field_error=("dark red", "black"),
editfield=("black", "default"),
commander_command=("dark magenta", "default"),
commander_invalid=("light red", "default"),
commander_hint=("light gray", "default"),
)
gen_gradient(
low,
["light red", "yellow", "light green", "dark green", "dark cyan", "dark blue"],
)
class Light(LowLight):
high = dict(
background=("black", "g100"),
heading=("g99", "#08f"),
heading_key=("#0ff,bold", "#08f"),
heading_inactive=("g35", "g85"),
replay=("#0a0,bold", "default"),
option_selected=("black", "g85"),
option_selected_key=("dark blue", "g85"),
option_active_selected=("light red", "g85"),
)
# Solarized palette in Urwid-style terminal high-colour offsets
# See: http://ethanschoonover.com/solarized
sol_base03 = "h234"
sol_base02 = "h235"
sol_base01 = "h240"
sol_base00 = "h241"
sol_base0 = "h244"
sol_base1 = "h245"
sol_base2 = "h254"
sol_base3 = "h230"
sol_yellow = "h136"
sol_orange = "h166"
sol_red = "h160"
sol_magenta = "h125"
sol_violet = "h61"
sol_blue = "h33"
sol_cyan = "h37"
sol_green = "h64"
class SolarizedLight(LowLight):
high = dict(
background=(sol_base00, sol_base3),
title=(sol_cyan, "default"),
text=(sol_base00, "default"),
# Status bar & heading
heading=(sol_base2, sol_base02),
heading_key=(sol_blue, sol_base03),
heading_inactive=(sol_base03, sol_base1),
# Help
key=(
sol_blue,
"default",
),
head=(sol_base00, "default"),
# Options
option_selected=(sol_base03, sol_base2),
option_selected_key=(sol_blue, sol_base2),
option_active=(sol_orange, "default"),
option_active_selected=(sol_orange, sol_base2),
# List and Connections
method_get=(sol_green, "default"),
method_post=(sol_orange, "default"),
method_head=(sol_cyan, "default"),
method_put=(sol_red, "default"),
method_delete=(sol_red, "default"),
method_other=(sol_magenta, "default"),
method_http2_push=("light gray", "default"),
scheme_http=(sol_cyan, "default"),
scheme_https=("light green", "default"),
scheme_ws=(sol_orange, "default"),
scheme_wss=("light magenta", "default"),
scheme_tcp=("light magenta", "default"),
scheme_udp=("light magenta", "default"),
scheme_dns=("light blue", "default"),
scheme_quic=(sol_orange, "default"),
scheme_other=("light magenta", "default"),
url_punctuation=("dark gray", "default"),
url_domain=("dark gray", "default"),
url_filename=("black", "default"),
url_extension=("dark gray", "default"),
url_query_key=(sol_blue, "default"),
url_query_value=("dark blue", "default"),
focus=(sol_base01, "default"),
code_200=(sol_green, "default"),
code_300=(sol_blue, "default"),
code_400=(
sol_orange,
"default",
),
code_500=(sol_red, "default"),
code_other=(sol_magenta, "default"),
error=(sol_red, "default"),
warn=(sol_orange, "default"),
alert=(sol_magenta, "default"),
header=(sol_blue, "default"),
highlight=(sol_base01, "default"),
intercept=(
sol_red,
"default",
),
replay=(
sol_green,
"default",
),
mark=(sol_base01, "default"),
# Contentview Syntax Highlighting
name=(sol_green, "default"),
string=(sol_cyan, "default"),
number=(sol_blue, "default"),
boolean=(sol_magenta, "default"),
comment=(sol_base1, "default"),
# TCP flow details
from_client=(sol_blue, "default"),
to_client=(sol_red, "default"),
# Grid Editor
focusfield=(sol_base00, sol_base2),
focusfield_error=(sol_red, sol_base2),
field_error=(sol_red, "default"),
editfield=(sol_base01, "default"),
commander_command=(sol_cyan, "default"),
commander_invalid=(sol_orange, "default"),
commander_hint=(sol_base1, "default"),
)
class SolarizedDark(LowDark):
high = dict(
background=(sol_base2, sol_base03),
title=(sol_blue, "default"),
text=(sol_base1, "default"),
# Status bar & heading
heading=(sol_base2, sol_base01),
heading_key=(sol_blue + ",bold", sol_base01),
heading_inactive=(sol_base1, sol_base02),
# Help
key=(
sol_blue,
"default",
),
head=(sol_base2, "default"),
# Options
option_selected=(sol_base03, sol_base00),
option_selected_key=(sol_blue, sol_base00),
option_active=(sol_orange, "default"),
option_active_selected=(sol_orange, sol_base00),
# List and Connections
focus=(sol_base1, "default"),
method_get=(sol_green, "default"),
method_post=(sol_orange, "default"),
method_delete=(sol_red, "default"),
method_head=(sol_cyan, "default"),
method_put=(sol_red, "default"),
method_other=(sol_magenta, "default"),
method_http2_push=(sol_base01, "default"),
url_punctuation=("h242", "default"),
url_domain=("h252", "default"),
url_filename=("h132", "default"),
url_extension=("h96", "default"),
url_query_key=("h37", "default"),
url_query_value=("h30", "default"),
content_none=(sol_base01, "default"),
content_text=(sol_base1, "default"),
content_media=(sol_blue, "default"),
code_200=(sol_green, "default"),
code_300=(sol_blue, "default"),
code_400=(
sol_orange,
"default",
),
code_500=(sol_red, "default"),
code_other=(sol_magenta, "default"),
error=(sol_red, "default"),
warn=(sol_orange, "default"),
alert=(sol_magenta, "default"),
header=(sol_blue, "default"),
highlight=(sol_base01, "default"),
intercept=(
sol_red,
"default",
),
replay=(
sol_green,
"default",
),
mark=(sol_base01, "default"),
# Contentview Syntax Highlighting
name=(sol_green, "default"),
string=(sol_cyan, "default"),
number=(sol_blue, "default"),
boolean=(sol_magenta, "default"),
comment=(sol_base00, "default"),
# TCP flow details
from_client=(sol_blue, "default"),
to_client=(sol_red, "default"),
# Grid Editor
focusfield=(sol_base0, sol_base02),
focusfield_error=(sol_red, sol_base02),
field_error=(sol_red, "default"),
editfield=(sol_base1, "default"),
commander_command=(sol_blue, "default"),
commander_invalid=(sol_orange, "default"),
commander_hint=(sol_base00, "default"),
)
gen_rgb_gradient(
high, [(15, 0, 0), (15, 15, 0), (0, 15, 0), (0, 15, 15), (0, 0, 15)]
)
DEFAULT = "dark"
palettes = {
"lowlight": LowLight(),
"lowdark": LowDark(),
"light": Light(),
"dark": Dark(),
"solarized_light": SolarizedLight(),
"solarized_dark": SolarizedDark(),
}
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/master.py | mitmproxy/tools/console/master.py | import asyncio
import contextlib
import mimetypes
import os.path
import shlex
import shutil
import stat
import subprocess
import sys
import tempfile
import threading
from typing import TypeVar
import urwid
from tornado.platform.asyncio import AddThreadSelectorEventLoop
from mitmproxy import addons
from mitmproxy import log
from mitmproxy import master
from mitmproxy import options
from mitmproxy.addons import errorcheck
from mitmproxy.addons import eventstore
from mitmproxy.addons import intercept
from mitmproxy.addons import readfile
from mitmproxy.addons import view
from mitmproxy.tools.console import consoleaddons
from mitmproxy.tools.console import defaultkeys
from mitmproxy.tools.console import keymap
from mitmproxy.tools.console import palettes
from mitmproxy.tools.console import signals
from mitmproxy.tools.console import window
from mitmproxy.utils import strutils
T = TypeVar("T", str, bytes)
class ConsoleMaster(master.Master):
def __init__(self, opts: options.Options) -> None:
super().__init__(opts)
self.view: view.View = view.View()
self.events = eventstore.EventStore()
self.events.sig_add.connect(self.sig_add_log)
self.stream_path = None
self.keymap = keymap.Keymap(self)
defaultkeys.map(self.keymap)
self.options.errored.connect(self.options_error)
self.addons.add(*addons.default_addons())
self.addons.add(
intercept.Intercept(),
self.view,
self.events,
readfile.ReadFile(),
consoleaddons.ConsoleAddon(self),
keymap.KeymapConfig(self),
errorcheck.ErrorCheck(repeat_errors_on_stderr=True),
)
self.window: window.Window | None = None
def __setattr__(self, name, value):
super().__setattr__(name, value)
signals.update_settings.send()
def options_error(self, exc) -> None:
signals.status_message.send(message=str(exc), expire=1)
def prompt_for_user_choice(self, prompt, callback) -> None:
signals.status_prompt_onekey.send(
prompt=prompt,
keys=[
("yes", "y"),
("no", "n"),
],
callback=callback,
)
def prompt_for_exit(self) -> None:
self.prompt_for_user_choice("Quit", self.quit)
def sig_add_log(self, entry: log.LogEntry):
if log.log_tier(self.options.console_eventlog_verbosity) < log.log_tier(
entry.level
):
return
if entry.level in ("error", "warn", "alert"):
signals.status_message.send(
message=(
entry.level,
f"{entry.level.title()}: {entry.msg.lstrip()}",
),
expire=5,
)
def sig_call_in(self, seconds, callback):
def cb(*_):
return callback()
self.loop.set_alarm_in(seconds, cb)
@contextlib.contextmanager
def uistopped(self):
self.loop.stop()
try:
yield
finally:
self.loop.start()
self.loop.screen_size = None
self.loop.draw_screen()
def get_editor(self) -> str:
# based upon https://github.com/pallets/click/blob/main/src/click/_termui_impl.py
if m := os.environ.get("MITMPROXY_EDITOR"):
return m
if m := os.environ.get("EDITOR"):
return m
for editor in "sensible-editor", "nano", "vim":
if shutil.which(editor):
return editor
if os.name == "nt":
return "notepad"
else:
return "vi"
def get_hex_editor(self) -> str:
editors = ["ghex", "hexedit", "hxd", "hexer", "hexcurse"]
for editor in editors:
if shutil.which(editor):
return editor
return self.get_editor()
def spawn_editor(self, data: T) -> T:
text = isinstance(data, str)
fd, name = tempfile.mkstemp("", "mitmproxy", text=text)
with_hexeditor = isinstance(data, bytes) and strutils.is_mostly_bin(data)
with open(fd, "w" if text else "wb") as f:
f.write(data)
if with_hexeditor:
c = self.get_hex_editor()
else:
c = self.get_editor()
cmd = shlex.split(c)
cmd.append(name)
with self.uistopped():
try:
subprocess.call(cmd)
except Exception:
signals.status_message.send(message="Can't start editor: %s" % c)
else:
with open(name, "r" if text else "rb") as f:
data = f.read()
os.unlink(name)
return data
def spawn_external_viewer(self, data, contenttype):
if contenttype:
contenttype = contenttype.split(";")[0]
ext = mimetypes.guess_extension(contenttype) or ""
else:
ext = ""
fd, name = tempfile.mkstemp(ext, "mproxy")
os.write(fd, data)
os.close(fd)
# read-only to remind the user that this is a view function
os.chmod(name, stat.S_IREAD)
# hm which one should get priority?
c = (
os.environ.get("MITMPROXY_EDITOR")
or os.environ.get("PAGER")
or os.environ.get("EDITOR")
)
if not c:
c = "less"
cmd = shlex.split(c)
cmd.append(name)
with self.uistopped():
try:
subprocess.call(cmd, shell=False)
except Exception:
signals.status_message.send(
message="Can't start external viewer: %s" % " ".join(c)
)
# add a small delay before deletion so that the file is not removed before being loaded by the viewer
t = threading.Timer(1.0, os.unlink, args=[name])
t.start()
def set_palette(self, *_) -> None:
self.ui.register_palette(
palettes.palettes[self.options.console_palette].palette(
self.options.console_palette_transparent
)
)
self.ui.clear()
def inject_key(self, key):
self.loop.process_input([key])
async def running(self) -> None:
if not sys.stdout.isatty():
print(
"Error: mitmproxy's console interface requires a tty. "
"Please run mitmproxy in an interactive shell environment.",
file=sys.stderr,
)
sys.exit(1)
detected_encoding = urwid.detected_encoding.lower()
if os.name != "nt" and detected_encoding and "utf" not in detected_encoding:
print(
f"mitmproxy expects a UTF-8 console environment, not {urwid.detected_encoding!r}. "
f"Set your LANG environment variable to something like en_US.UTF-8.",
file=sys.stderr,
)
# Experimental (04/2022): We just don't exit here and see if/how that affects users.
# sys.exit(1)
urwid.set_encoding("utf8")
signals.call_in.connect(self.sig_call_in)
self.ui = window.Screen()
self.ui.set_terminal_properties(256)
self.set_palette(None)
self.options.subscribe(
self.set_palette, ["console_palette", "console_palette_transparent"]
)
loop = asyncio.get_running_loop()
if isinstance(loop, getattr(asyncio, "ProactorEventLoop", tuple())):
# fix for https://bugs.python.org/issue37373
loop = AddThreadSelectorEventLoop(loop) # type: ignore
self.loop = urwid.MainLoop(
urwid.SolidFill("x"),
event_loop=urwid.AsyncioEventLoop(loop=loop),
screen=self.ui,
handle_mouse=self.options.console_mouse,
)
self.window = window.Window(self)
self.loop.widget = self.window
self.window.refresh()
self.loop.start()
await super().running()
async def done(self):
self.loop.stop()
await super().done()
def overlay(self, widget, **kwargs):
assert self.window
self.window.set_overlay(widget, **kwargs)
def switch_view(self, name):
assert self.window
self.window.push(name)
def quit(self, a):
if a != "n":
self.shutdown()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/commandexecutor.py | mitmproxy/tools/console/commandexecutor.py | import logging
from collections.abc import Sequence
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy.tools.console import overlay
from mitmproxy.tools.console import signals
class CommandExecutor:
def __init__(self, master):
self.master = master
def __call__(self, cmd: str) -> None:
if cmd.strip():
try:
ret = self.master.commands.execute(cmd)
except exceptions.CommandError as e:
logging.error(str(e))
else:
if ret is not None:
if type(ret) == Sequence[flow.Flow]: # noqa: E721
signals.status_message.send(
message="Command returned %s flows" % len(ret)
)
elif type(ret) is flow.Flow:
signals.status_message.send(message="Command returned 1 flow")
else:
self.master.overlay(
overlay.DataViewerOverlay(
self.master,
ret,
),
valign="top",
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/common.py | mitmproxy/tools/console/common.py | import enum
import math
import platform
from collections.abc import Iterable
from functools import lru_cache
import urwid.util
from publicsuffix2 import get_sld
from publicsuffix2 import get_tld
from mitmproxy import dns
from mitmproxy import flow
from mitmproxy.dns import DNSFlow
from mitmproxy.http import HTTPFlow
from mitmproxy.tcp import TCPFlow
from mitmproxy.udp import UDPFlow
from mitmproxy.utils import emoji
from mitmproxy.utils import human
# Detect Windows Subsystem for Linux and Windows
IS_WINDOWS_OR_WSL = (
"Microsoft" in platform.platform() or "Windows" in platform.platform()
)
def is_keypress(k):
"""
Is this input event a keypress?
"""
if isinstance(k, str):
return True
def highlight_key(text, key, textattr="text", keyattr="key"):
lst = []
parts = text.split(key, 1)
if parts[0]:
lst.append((textattr, parts[0]))
lst.append((keyattr, key))
if parts[1]:
lst.append((textattr, parts[1]))
return lst
KEY_MAX = 30
def format_keyvals(
entries: Iterable[tuple[str, None | str | urwid.Widget]],
key_format: str = "key",
value_format: str = "text",
indent: int = 0,
) -> list[urwid.Columns]:
"""
Format a list of (key, value) tuples.
Args:
entries: The list to format. keys must be strings, values can also be None or urwid widgets.
The latter makes it possible to use the result of format_keyvals() as a value.
key_format: The display attribute for the key.
value_format: The display attribute for the value.
indent: Additional indent to apply.
"""
max_key_len = max((len(k) for k, v in entries if k is not None), default=0)
max_key_len = min(max_key_len, KEY_MAX)
if indent > 2:
indent -= 2 # We use dividechars=2 below, which already adds two empty spaces
ret = []
for k, v in entries:
if v is None:
v = urwid.Text("")
elif not isinstance(v, urwid.Widget):
v = urwid.Text([(value_format, v)])
ret.append(
urwid.Columns(
[
("fixed", indent, urwid.Text("")),
("fixed", max_key_len, urwid.Text([(key_format, k)])),
v,
],
dividechars=2,
)
)
return ret
def fcol(s: str, attr: str) -> tuple[str, int, urwid.Text]:
s = str(s)
return ("fixed", len(s), urwid.Text([(attr, s)]))
if urwid.util.detected_encoding:
SYMBOL_REPLAY = "\u21ba"
SYMBOL_RETURN = "\u2190"
SYMBOL_MARK = "\u25cf"
SYMBOL_UP = "\u21e7"
SYMBOL_DOWN = "\u21e9"
SYMBOL_ELLIPSIS = "\u2026"
SYMBOL_FROM_CLIENT = "\u21d2"
SYMBOL_TO_CLIENT = "\u21d0"
else:
SYMBOL_REPLAY = "[r]"
SYMBOL_RETURN = "<-"
SYMBOL_MARK = "#"
SYMBOL_UP = "^"
SYMBOL_DOWN = " "
SYMBOL_ELLIPSIS = "~"
SYMBOL_FROM_CLIENT = "->"
SYMBOL_TO_CLIENT = "<-"
SCHEME_STYLES = {
"http": "scheme_http",
"https": "scheme_https",
"ws": "scheme_ws",
"wss": "scheme_wss",
"tcp": "scheme_tcp",
"udp": "scheme_udp",
"dns": "scheme_dns",
"quic": "scheme_quic",
}
HTTP_REQUEST_METHOD_STYLES = {
"GET": "method_get",
"POST": "method_post",
"DELETE": "method_delete",
"HEAD": "method_head",
"PUT": "method_put",
}
HTTP_RESPONSE_CODE_STYLE = {
2: "code_200",
3: "code_300",
4: "code_400",
5: "code_500",
}
class RenderMode(enum.Enum):
TABLE = 1
"""The flow list in table format, i.e. one row per flow."""
LIST = 2
"""The flow list in list format, i.e. potentially multiple rows per flow."""
DETAILVIEW = 3
"""The top lines in the detail view."""
def fixlen(s: str, maxlen: int) -> str:
if len(s) <= maxlen:
return s.ljust(maxlen)
else:
return s[0 : maxlen - len(SYMBOL_ELLIPSIS)] + SYMBOL_ELLIPSIS
def fixlen_r(s: str, maxlen: int) -> str:
if len(s) <= maxlen:
return s.rjust(maxlen)
else:
return SYMBOL_ELLIPSIS + s[len(s) - maxlen + len(SYMBOL_ELLIPSIS) :]
def render_marker(marker: str) -> str:
rendered = emoji.emoji.get(marker, SYMBOL_MARK)
# The marker can only be one glyph. Some emoji that use zero-width joiners (ZWJ)
# will not be rendered as a single glyph and instead will show
# multiple glyphs. Just use the first glyph as a fallback.
# https://emojipedia.org/emoji-zwj-sequence/
return rendered[0]
class TruncatedText(urwid.Widget):
def __init__(self, text, attr, align="left"):
self.text = text
self.attr = attr
self.align = align
super().__init__()
def pack(self, size, focus=False):
return (len(self.text), 1)
def rows(self, size, focus=False):
return 1
def render(self, size, focus=False):
text = self.text
attr = self.attr
if self.align == "right":
text = text[::-1]
attr = attr[::-1]
text_len = urwid.calc_width(text, 0, len(text))
if size is not None and len(size) > 0:
width = size[0]
else:
width = text_len
if width >= text_len:
remaining = width - text_len
if remaining > 0:
c_text = text + " " * remaining
c_attr = attr + [("text", remaining)]
else:
c_text = text
c_attr = attr
else:
trim = urwid.util.calc_trim_text(text, 0, width - 1, 0, width - 1)
visible_text = text[0 : trim[1]]
if trim[3] == 1:
visible_text += " "
c_text = visible_text + SYMBOL_ELLIPSIS
c_attr = urwid.util.rle_subseg(attr, 0, len(visible_text.encode())) + [
("focus", len(SYMBOL_ELLIPSIS.encode()))
]
if self.align == "right":
c_text = c_text[::-1]
c_attr = c_attr[::-1]
return urwid.TextCanvas([c_text.encode()], [c_attr], maxcol=width)
def truncated_plain(text, attr, align="left"):
return TruncatedText(text, [(attr, len(text.encode()))], align)
# Work around https://github.com/urwid/urwid/pull/330
def rle_append_beginning_modify(rle, a_r):
"""
Append (a, r) (unpacked from *a_r*) to BEGINNING of rle.
Merge with first run when possible
MODIFIES rle parameter contents. Returns None.
"""
a, r = a_r
if not rle:
rle[:] = [(a, r)]
else:
al, run = rle[0]
if a == al:
rle[0] = (a, run + r)
else:
rle[0:0] = [(a, r)]
def colorize_host(host: str):
if not host:
return []
tld = get_tld(host)
sld = get_sld(host)
attr: list = []
tld_size = len(tld)
sld_size = len(sld) - tld_size
for letter in reversed(range(len(host))):
character = host[letter]
if tld_size > 0:
style = "url_domain"
tld_size -= 1
elif tld_size == 0:
style = "text"
tld_size -= 1
elif sld_size > 0:
sld_size -= 1
style = "url_extension"
else:
style = "text"
rle_append_beginning_modify(attr, (style, len(character.encode())))
return attr
def colorize_req(s: str):
path = s.split("?", 2)[0]
i_query = len(path)
i_last_slash = path.rfind("/")
i_ext = path[i_last_slash + 1 :].rfind(".")
i_ext = i_last_slash + i_ext if i_ext >= 0 else len(s)
in_val = False
attr: list = []
for i in range(len(s)):
c = s[i]
if (
(i < i_query and c == "/")
or (i < i_query and i > i_last_slash and c == ".")
or (i == i_query)
):
a = "url_punctuation"
elif i > i_query:
if in_val:
if c == "&":
in_val = False
a = "url_punctuation"
else:
a = "url_query_value"
else:
if c == "=":
in_val = True
a = "url_punctuation"
else:
a = "url_query_key"
elif i > i_ext:
a = "url_extension"
elif i > i_last_slash:
a = "url_filename"
else:
a = "text"
urwid.util.rle_append_modify(attr, (a, len(c.encode())))
return attr
def colorize_url(url):
parts = url.split("/", 3)
if len(parts) < 4 or len(parts[1]) > 0 or parts[0][-1:] != ":":
return [("error", len(url))] # bad URL
return (
[
(SCHEME_STYLES.get(parts[0], "scheme_other"), len(parts[0]) - 1),
("url_punctuation", 3), # ://
]
+ colorize_host(parts[2])
+ colorize_req("/" + parts[3])
)
def format_http_content_type(content_type: str) -> tuple[str, str]:
content_type = content_type.split(";")[0]
if content_type.endswith("/javascript"):
style = "content_script"
elif content_type.startswith("text/"):
style = "content_text"
elif (
content_type.startswith("image/")
or content_type.startswith("video/")
or content_type.startswith("font/")
or "/x-font-" in content_type
):
style = "content_media"
elif content_type.endswith("/json") or content_type.endswith("/xml"):
style = "content_data"
elif content_type.startswith("application/"):
style = "content_raw"
else:
style = "content_other"
return content_type, style
def format_duration(duration: float) -> tuple[str, str]:
pretty_duration = human.pretty_duration(duration)
style = "gradient_%02d" % int(
99 - 100 * min(math.log2(max(1.0, 1000 * duration)) / 12, 0.99)
)
return pretty_duration, style
def format_size(num_bytes: int) -> tuple[str, str]:
pretty_size = human.pretty_size(num_bytes)
style = "gradient_%02d" % int(99 - 100 * min(math.log2(1 + num_bytes) / 20, 0.99))
return pretty_size, style
def format_left_indicators(*, focused: bool, intercepted: bool, timestamp: float):
indicators: list[str | tuple[str, str]] = []
if focused:
indicators.append(("focus", ">>"))
else:
indicators.append(" ")
pretty_timestamp = human.format_timestamp(timestamp)[-8:]
if intercepted:
indicators.append(("intercept", pretty_timestamp))
else:
indicators.append(("text", pretty_timestamp))
return "fixed", 10, urwid.Text(indicators)
def format_right_indicators(
*,
replay: bool,
marked: str,
):
indicators: list[str | tuple[str, str]] = []
if replay:
indicators.append(("replay", SYMBOL_REPLAY))
else:
indicators.append(" ")
if bool(marked):
indicators.append(("mark", render_marker(marked)))
else:
indicators.append(" ")
return "fixed", 3, urwid.Text(indicators)
@lru_cache(maxsize=800)
def format_http_flow_list(
*,
render_mode: RenderMode,
focused: bool,
marked: str,
is_replay: bool,
request_method: str,
request_scheme: str,
request_host: str,
request_path: str,
request_url: str,
request_http_version: str,
request_timestamp: float,
request_is_push_promise: bool,
intercepted: bool,
response_code: int | None,
response_reason: str | None,
response_content_length: int | None,
response_content_type: str | None,
duration: float | None,
error_message: str | None,
) -> urwid.Widget:
req = []
if render_mode is RenderMode.DETAILVIEW:
req.append(fcol(human.format_timestamp(request_timestamp), "highlight"))
else:
if focused:
req.append(fcol(">>", "focus"))
else:
req.append(fcol(" ", "focus"))
method_style = HTTP_REQUEST_METHOD_STYLES.get(request_method, "method_other")
req.append(fcol(request_method, method_style))
if request_is_push_promise:
req.append(fcol("PUSH_PROMISE", "method_http2_push"))
preamble_len = sum(x[1] for x in req) + len(req) - 1
if request_http_version not in ("HTTP/1.0", "HTTP/1.1"):
request_url += " " + request_http_version
if intercepted and not response_code:
url_style = "intercept"
elif response_code or error_message:
url_style = "text"
else:
url_style = "title"
if render_mode is RenderMode.DETAILVIEW:
req.append(urwid.Text([(url_style, request_url)]))
else:
req.append(truncated_plain(request_url, url_style))
req.append(format_right_indicators(replay=is_replay, marked=marked))
resp = [("fixed", preamble_len, urwid.Text(""))]
if response_code:
if intercepted:
style = "intercept"
else:
style = ""
status_style = style or HTTP_RESPONSE_CODE_STYLE.get(
response_code // 100, "code_other"
)
resp.append(fcol(SYMBOL_RETURN, status_style))
resp.append(fcol(str(response_code), status_style))
if response_reason and render_mode is RenderMode.DETAILVIEW:
resp.append(fcol(response_reason, status_style))
if response_content_type:
ct, ct_style = format_http_content_type(response_content_type)
resp.append(fcol(ct, style or ct_style))
if response_content_length:
size, size_style = format_size(response_content_length)
elif response_content_length == 0:
size = "[no content]"
size_style = "text"
else:
size = "[content missing]"
size_style = "text"
resp.append(fcol(size, style or size_style))
if duration:
dur, dur_style = format_duration(duration)
resp.append(fcol(dur, style or dur_style))
elif error_message:
resp.append(fcol(SYMBOL_RETURN, "error"))
resp.append(urwid.Text([("error", error_message)]))
return urwid.Pile(
[urwid.Columns(req, dividechars=1), urwid.Columns(resp, dividechars=1)]
)
@lru_cache(maxsize=800)
def format_http_flow_table(
*,
render_mode: RenderMode,
focused: bool,
marked: str,
is_replay: str | None,
request_method: str,
request_scheme: str,
request_host: str,
request_path: str,
request_url: str,
request_http_version: str,
request_timestamp: float,
request_is_push_promise: bool,
intercepted: bool,
response_code: int | None,
response_reason: str | None,
response_content_length: int | None,
response_content_type: str | None,
duration: float | None,
error_message: str | None,
) -> urwid.Widget:
items = [
format_left_indicators(
focused=focused, intercepted=intercepted, timestamp=request_timestamp
)
]
if intercepted and not response_code:
request_style = "intercept"
else:
request_style = ""
scheme_style = request_style or SCHEME_STYLES.get(request_scheme, "scheme_other")
items.append(fcol(fixlen(request_scheme.upper(), 5), scheme_style))
if request_is_push_promise:
method_style = "method_http2_push"
else:
method_style = request_style or HTTP_REQUEST_METHOD_STYLES.get(
request_method, "method_other"
)
items.append(fcol(fixlen(request_method, 4), method_style))
items.append(
(
"weight",
0.25,
TruncatedText(request_host, colorize_host(request_host), "right"),
)
)
items.append(
("weight", 1.0, TruncatedText(request_path, colorize_req(request_path), "left"))
)
if intercepted and response_code:
response_style = "intercept"
else:
response_style = ""
if response_code:
status = str(response_code)
status_style = response_style or HTTP_RESPONSE_CODE_STYLE.get(
response_code // 100, "code_other"
)
if response_content_length and response_content_type:
content, content_style = format_http_content_type(response_content_type)
content_style = response_style or content_style
elif response_content_length:
content = ""
content_style = "content_none"
elif response_content_length == 0:
content = "[no content]"
content_style = "content_none"
else:
content = "[content missing]"
content_style = "content_none"
elif error_message:
status = "err"
status_style = "error"
content = error_message
content_style = "error"
else:
status = ""
status_style = "text"
content = ""
content_style = ""
items.append(fcol(fixlen(status, 3), status_style))
items.append(("weight", 0.15, truncated_plain(content, content_style, "right")))
if response_content_length:
size, size_style = format_size(response_content_length)
items.append(fcol(fixlen_r(size, 5), response_style or size_style))
else:
items.append(("fixed", 5, urwid.Text("")))
if duration:
duration_pretty, duration_style = format_duration(duration)
items.append(
fcol(fixlen_r(duration_pretty, 5), response_style or duration_style)
)
else:
items.append(("fixed", 5, urwid.Text("")))
items.append(
format_right_indicators(
replay=bool(is_replay),
marked=marked,
)
)
return urwid.Columns(items, dividechars=1, min_width=15)
@lru_cache(maxsize=800)
def format_message_flow(
*,
render_mode: RenderMode,
focused: bool,
timestamp_start: float,
marked: str,
protocol: str,
client_address,
server_address,
total_size: int,
duration: float | None,
error_message: str | None,
):
conn = f"{human.format_address(client_address)} <-> {human.format_address(server_address)}"
items = []
if render_mode in (RenderMode.TABLE, RenderMode.DETAILVIEW):
items.append(
format_left_indicators(
focused=focused, intercepted=False, timestamp=timestamp_start
)
)
else:
if focused:
items.append(fcol(">>", "focus"))
else:
items.append(fcol(" ", "focus"))
if render_mode is RenderMode.TABLE:
items.append(fcol(fixlen(protocol.upper(), 5), SCHEME_STYLES[protocol]))
else:
items.append(fcol(protocol.upper(), SCHEME_STYLES[protocol]))
items.append(("weight", 1.0, truncated_plain(conn, "text", "left")))
if error_message:
items.append(("weight", 1.0, truncated_plain(error_message, "error", "left")))
if total_size:
size, size_style = format_size(total_size)
items.append(fcol(fixlen_r(size, 5), size_style))
else:
items.append(("fixed", 5, urwid.Text("")))
if duration:
duration_pretty, duration_style = format_duration(duration)
items.append(fcol(fixlen_r(duration_pretty, 5), duration_style))
else:
items.append(("fixed", 5, urwid.Text("")))
items.append(format_right_indicators(replay=False, marked=marked))
return urwid.Pile([urwid.Columns(items, dividechars=1, min_width=15)])
@lru_cache(maxsize=800)
def format_dns_flow(
*,
render_mode: RenderMode,
focused: bool,
intercepted: bool,
marked: str,
is_replay: str | None,
op_code: str,
request_timestamp: float,
domain: str,
type: str,
response_code: str | None,
response_code_http_equiv: int,
answer: str | None,
error_message: str,
duration: float | None,
):
items = []
if render_mode in (RenderMode.TABLE, RenderMode.DETAILVIEW):
items.append(
format_left_indicators(
focused=focused, intercepted=intercepted, timestamp=request_timestamp
)
)
else:
items.append(fcol(">>" if focused else " ", "focus"))
scheme_style = "intercepted" if intercepted else SCHEME_STYLES["dns"]
t = f"DNS {op_code}"
if render_mode is RenderMode.TABLE:
t = fixlen(t, 10)
items.append(fcol(t, scheme_style))
items.append(("weight", 0.5, TruncatedText(domain, colorize_host(domain), "right")))
items.append(fcol("(" + fixlen(type, 5)[: len(type)] + ") =", "text"))
items.append(
(
"weight",
1,
(
truncated_plain(
"..." if answer is None else "?" if not answer else answer, "text"
)
if error_message is None
else truncated_plain(error_message, "error")
),
)
)
status_style = (
"intercepted"
if intercepted
else HTTP_RESPONSE_CODE_STYLE.get(response_code_http_equiv // 100, "code_other")
)
items.append(
fcol(fixlen("" if response_code is None else response_code, 9), status_style)
)
if duration:
duration_pretty, duration_style = format_duration(duration)
items.append(fcol(fixlen_r(duration_pretty, 5), duration_style))
else:
items.append(("fixed", 5, urwid.Text("")))
items.append(
format_right_indicators(
replay=bool(is_replay),
marked=marked,
)
)
return urwid.Pile([urwid.Columns(items, dividechars=1, min_width=15)])
def format_flow(
f: flow.Flow,
*,
render_mode: RenderMode,
hostheader: bool = False, # pass options directly if we need more stuff from them
focused: bool = True,
) -> urwid.Widget:
"""
This functions calls the proper renderer depending on the flow type.
We also want to cache the renderer output, so we extract all attributes
relevant for display and call the render with only that. This assures that rows
are updated if the flow is changed.
"""
duration: float | None
error_message: str | None
if f.error:
error_message = f.error.msg
else:
error_message = None
if isinstance(f, (TCPFlow, UDPFlow)):
total_size = 0
for message in f.messages:
total_size += len(message.content)
if f.messages:
duration = f.messages[-1].timestamp - f.client_conn.timestamp_start
else:
duration = None
if f.client_conn.tls_version == "QUICv1":
protocol = "quic"
else:
protocol = f.type
return format_message_flow(
render_mode=render_mode,
focused=focused,
timestamp_start=f.client_conn.timestamp_start,
marked=f.marked,
protocol=protocol,
client_address=f.client_conn.peername,
server_address=f.server_conn.address,
total_size=total_size,
duration=duration,
error_message=error_message,
)
elif isinstance(f, DNSFlow):
if f.request.timestamp and f.response and f.response.timestamp:
duration = f.response.timestamp - f.request.timestamp
else:
duration = None
if f.response:
response_code_str: str | None = dns.response_codes.to_str(
f.response.response_code
)
response_code_http_equiv = dns.response_codes.http_equiv_status_code(
f.response.response_code
)
answer = ", ".join(str(x) for x in f.response.answers)
else:
response_code_str = None
response_code_http_equiv = 0
answer = None
return format_dns_flow(
render_mode=render_mode,
focused=focused,
intercepted=f.intercepted,
marked=f.marked,
is_replay=f.is_replay,
op_code=dns.op_codes.to_str(f.request.op_code),
request_timestamp=f.request.timestamp,
domain=f.request.questions[0].name if f.request.questions else "",
type=dns.types.to_str(f.request.questions[0].type)
if f.request.questions
else "",
response_code=response_code_str,
response_code_http_equiv=response_code_http_equiv,
answer=answer,
error_message=error_message,
duration=duration,
)
elif isinstance(f, HTTPFlow):
intercepted = f.intercepted
response_content_length: int | None
if f.response:
if f.response.raw_content is not None:
response_content_length = len(f.response.raw_content)
else:
response_content_length = None
response_code: int | None = f.response.status_code
response_reason: str | None = f.response.reason
response_content_type = f.response.headers.get("content-type")
if f.response.timestamp_end:
duration = max(
[f.response.timestamp_end - f.request.timestamp_start, 0]
)
else:
duration = None
else:
response_content_length = None
response_code = None
response_reason = None
response_content_type = None
duration = None
scheme = f.request.scheme
if f.websocket is not None:
if scheme == "https":
scheme = "wss"
elif scheme == "http":
scheme = "ws"
if render_mode in (RenderMode.LIST, RenderMode.DETAILVIEW):
render_func = format_http_flow_list
else:
render_func = format_http_flow_table
return render_func(
render_mode=render_mode,
focused=focused,
marked=f.marked,
is_replay=f.is_replay,
request_method=f.request.method,
request_scheme=scheme,
request_host=f.request.pretty_host if hostheader else f.request.host,
request_path=f.request.path,
request_url=f.request.pretty_url if hostheader else f.request.url,
request_http_version=f.request.http_version,
request_timestamp=f.request.timestamp_start,
request_is_push_promise="h2-pushed-stream" in f.metadata,
intercepted=intercepted,
response_code=response_code,
response_reason=response_reason,
response_content_length=response_content_length,
response_content_type=response_content_type,
duration=duration,
error_message=error_message,
)
else:
raise NotImplementedError()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/searchable.py | mitmproxy/tools/console/searchable.py | import urwid
from mitmproxy.tools.console import signals
class Highlight(urwid.AttrMap):
def __init__(self, t):
urwid.AttrMap.__init__(
self,
urwid.Text(t.text),
"focusfield",
)
self.backup = t
class Searchable(urwid.ListBox):
def __init__(self, contents):
self.walker = urwid.SimpleFocusListWalker(contents)
urwid.ListBox.__init__(self, self.walker)
self.search_offset = 0
self.current_highlight = None
self.search_term = None
self.last_search = None
def keypress(self, size, key: str):
if key == "/":
signals.status_prompt.send(
prompt="Search for", text="", callback=self.set_search
)
elif key == "n":
self.find_next(False)
elif key == "N":
self.find_next(True)
elif key == "m_start":
self.set_focus(0)
self.walker._modified()
elif key == "m_end":
self.set_focus(len(self.walker) - 1)
self.walker._modified()
else:
return super().keypress(size, key)
def set_search(self, text):
self.last_search = text
self.search_term = text or None
self.find_next(False)
def set_highlight(self, offset):
if self.current_highlight is not None:
old = self.body[self.current_highlight]
self.body[self.current_highlight] = old.backup
if offset is None:
self.current_highlight = None
else:
self.body[offset] = Highlight(self.body[offset])
self.current_highlight = offset
def get_text(self, w):
if isinstance(w, urwid.Text):
return w.text
elif isinstance(w, Highlight):
return w.backup.text
else:
return None
def find_next(self, backwards: bool):
if not self.search_term:
if self.last_search:
self.search_term = self.last_search
else:
self.set_highlight(None)
return
# Start search at focus + 1
if backwards:
rng = range(len(self.body) - 1, -1, -1)
else:
rng = range(1, len(self.body) + 1)
for i in rng:
off = (self.focus_position + i) % len(self.body)
w = self.body[off]
txt = self.get_text(w)
if txt and self.search_term in txt:
self.set_highlight(off)
self.set_focus(off, coming_from="above")
self.body._modified()
return
else:
self.set_highlight(None)
signals.status_message.send(message="Search not found.", expire=1)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/signals.py | mitmproxy/tools/console/signals.py | from __future__ import annotations
from collections.abc import Callable
from typing import Union
from mitmproxy.utils import signals
StatusMessage = Union[tuple[str, str], str]
# Show a status message in the action bar
# Instead of using this signal directly, consider emitting a log event.
def _status_message(message: StatusMessage, expire: int = 5) -> None: ...
status_message = signals.SyncSignal(_status_message)
# Prompt for input
def _status_prompt(
prompt: str, text: str | None, callback: Callable[[str], None]
) -> None: ...
status_prompt = signals.SyncSignal(_status_prompt)
# Prompt for a single keystroke
def _status_prompt_onekey(
prompt: str, keys: list[tuple[str, str]], callback: Callable[[str], None]
) -> None: ...
status_prompt_onekey = signals.SyncSignal(_status_prompt_onekey)
# Prompt for a command
def _status_prompt_command(partial: str = "", cursor: int | None = None) -> None: ...
status_prompt_command = signals.SyncSignal(_status_prompt_command)
# Call a callback in N seconds
def _call_in(seconds: float, callback: Callable[[], None]) -> None: ...
call_in = signals.SyncSignal(_call_in)
# Focus the body, footer or header of the main window
focus = signals.SyncSignal(lambda section: None)
# Fired when settings change
update_settings = signals.SyncSignal(lambda: None)
# Fired when a flow changes
flow_change = signals.SyncSignal(lambda flow: None)
# Pop and push view state onto a stack
pop_view_state = signals.SyncSignal(lambda: None)
# Fired when the window state changes
window_refresh = signals.SyncSignal(lambda: None)
# Fired when the key bindings change
keybindings_change = signals.SyncSignal(lambda: None)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/keybindings.py | mitmproxy/tools/console/keybindings.py | import textwrap
import urwid
from mitmproxy.tools.console import layoutwidget
from mitmproxy.tools.console import signals
from mitmproxy.utils import signals as utils_signals
HELP_HEIGHT = 5
class KeyItem(urwid.WidgetWrap):
def __init__(self, walker, binding, focused):
self.walker, self.binding, self.focused = walker, binding, focused
super().__init__(self.get_widget())
def get_widget(self):
cmd = textwrap.dedent(self.binding.command).strip()
parts = [
(4, urwid.Text([("focus", ">> " if self.focused else " ")])),
(10, urwid.Text([("title", self.binding.key)])),
(12, urwid.Text([("highlight", "\n".join(self.binding.contexts))])),
urwid.Text([("text", cmd)]),
]
return urwid.Columns(parts)
def get_edit_text(self):
return self._w[1].get_edit_text()
def selectable(self):
return True
def keypress(self, size, key):
return key
class KeyListWalker(urwid.ListWalker):
def __init__(self, master, keybinding_focus_change):
self.keybinding_focus_change = keybinding_focus_change
self.master = master
self.index = 0
self.focusobj = None
self.bindings = list(master.keymap.list("all"))
self.set_focus(0)
signals.keybindings_change.connect(self.sig_modified)
def sig_modified(self):
self.bindings = list(self.master.keymap.list("all"))
self.set_focus(min(self.index, len(self.bindings) - 1))
self._modified()
def get_edit_text(self):
return self.focus_obj.get_edit_text()
def _get(self, pos):
binding = self.bindings[pos]
return KeyItem(self, binding, pos == self.index)
def get_focus(self):
return self.focus_obj, self.index
def set_focus(self, index):
binding = self.bindings[index]
self.index = index
self.focus_obj = self._get(self.index)
self.keybinding_focus_change.send(binding.help or "")
self._modified()
def get_next(self, pos):
if pos >= len(self.bindings) - 1:
return None, None
pos = pos + 1
return self._get(pos), pos
def get_prev(self, pos):
pos = pos - 1
if pos < 0:
return None, None
return self._get(pos), pos
def positions(self, reverse=False):
if reverse:
return reversed(range(len(self.bindings)))
else:
return range(len(self.bindings))
class KeyList(urwid.ListBox):
def __init__(self, master, keybinding_focus_change):
self.master = master
self.walker = KeyListWalker(master, keybinding_focus_change)
super().__init__(self.walker)
def keypress(self, size, key):
if key == "m_select":
foc, idx = self.get_focus()
# Act here
elif key == "m_start":
self.set_focus(0)
self.walker._modified()
elif key == "m_end":
self.set_focus(len(self.walker.bindings) - 1)
self.walker._modified()
return super().keypress(size, key)
class KeyHelp(urwid.Frame):
def __init__(self, master, keybinding_focus_change):
self.master = master
super().__init__(self.widget(""))
self.set_active(False)
keybinding_focus_change.connect(self.sig_mod)
def set_active(self, val):
h = urwid.Text("Key Binding Help")
style = "heading" if val else "heading_inactive"
self.header = urwid.AttrMap(h, style)
def widget(self, txt):
cols, _ = self.master.ui.get_cols_rows()
return urwid.ListBox([urwid.Text(i) for i in textwrap.wrap(txt, cols)])
def sig_mod(self, txt):
self.body = self.widget(txt)
class KeyBindings(urwid.Pile, layoutwidget.LayoutWidget):
title = "Key Bindings"
keyctx = "keybindings"
focus_position: int
def __init__(self, master):
keybinding_focus_change = utils_signals.SyncSignal(lambda text: None)
oh = KeyHelp(master, keybinding_focus_change)
super().__init__(
[
KeyList(master, keybinding_focus_change),
(HELP_HEIGHT, oh),
]
)
self.master = master
def get_focused_binding(self):
if self.focus_position != 0:
return None
f = self.contents[0][0]
return f.walker.get_focus()[0].binding
def keypress(self, size, key):
if key == "m_next":
self.focus_position = (self.focus_position + 1) % len(self.widget_list)
self.contents[1][0].set_active(self.focus_position == 1)
key = None
# This is essentially a copypasta from urwid.Pile's keypress handler.
# So much for "closed for modification, but open for extension".
item_rows = None
if len(size) == 2:
item_rows = self.get_item_rows(size, focus=True)
tsize = self.get_item_size(size, self.focus_position, True, item_rows)
return self.focus.keypress(tsize, key)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/commands.py | mitmproxy/tools/console/commands.py | import textwrap
import urwid
from mitmproxy import command
from mitmproxy.tools.console import layoutwidget
from mitmproxy.tools.console import signals
from mitmproxy.utils import signals as utils_signals
HELP_HEIGHT = 5
command_focus_change = utils_signals.SyncSignal(lambda text: None)
class CommandItem(urwid.WidgetWrap):
def __init__(self, walker, cmd: command.Command, focused: bool):
self.walker, self.cmd, self.focused = walker, cmd, focused
super().__init__(self.get_widget())
def get_widget(self):
parts = [("focus", ">> " if self.focused else " "), ("title", self.cmd.name)]
if self.cmd.parameters:
parts += [
("text", " "),
("text", " ".join(str(param) for param in self.cmd.parameters)),
]
if self.cmd.return_type:
parts += [
("title", " -> "),
("text", command.typename(self.cmd.return_type)),
]
return urwid.AttrMap(urwid.Padding(urwid.Text(parts)), "text")
def get_edit_text(self):
return self._w[1].get_edit_text()
def selectable(self):
return True
def keypress(self, size, key):
return key
class CommandListWalker(urwid.ListWalker):
def __init__(self, master):
self.master = master
self.index = 0
self.refresh()
def refresh(self):
self.cmds = list(self.master.commands.commands.values())
self.cmds.sort(key=lambda x: x.signature_help())
self.set_focus(self.index)
def get_edit_text(self):
return self.focus_obj.get_edit_text()
def _get(self, pos):
cmd = self.cmds[pos]
return CommandItem(self, cmd, pos == self.index)
def get_focus(self):
return self.focus_obj, self.index
def set_focus(self, index: int) -> None:
cmd = self.cmds[index]
self.index = index
self.focus_obj = self._get(self.index)
command_focus_change.send(cmd.help or "")
def get_next(self, pos):
if pos >= len(self.cmds) - 1:
return None, None
pos = pos + 1
return self._get(pos), pos
def get_prev(self, pos):
pos = pos - 1
if pos < 0:
return None, None
return self._get(pos), pos
class CommandsList(urwid.ListBox):
def __init__(self, master):
self.master = master
self.walker = CommandListWalker(master)
super().__init__(self.walker)
def keypress(self, size: int, key: str):
if key == "m_select":
foc, idx = self.get_focus()
signals.status_prompt_command.send(partial=foc.cmd.name + " ")
elif key == "m_start":
self.set_focus(0)
self.walker._modified()
elif key == "m_end":
self.set_focus(len(self.walker.cmds) - 1)
self.walker._modified()
return super().keypress(size, key)
class CommandHelp(urwid.Frame):
def __init__(self, master):
self.master = master
super().__init__(self.widget(""))
self.set_active(False)
command_focus_change.connect(self.sig_mod)
def set_active(self, val):
h = urwid.Text("Command Help")
style = "heading" if val else "heading_inactive"
self.header = urwid.AttrMap(h, style)
def widget(self, txt):
cols, _ = self.master.ui.get_cols_rows()
return urwid.ListBox([urwid.Text(i) for i in textwrap.wrap(txt, cols)])
def sig_mod(self, txt):
self.body = self.widget(txt)
class Commands(urwid.Pile, layoutwidget.LayoutWidget):
title = "Command Reference"
keyctx = "commands"
focus_position: int
def __init__(self, master):
oh = CommandHelp(master)
super().__init__(
[
CommandsList(master),
(HELP_HEIGHT, oh),
]
)
self.master = master
def layout_pushed(self, prev):
self.widget_list[0].walker.refresh()
def keypress(self, size, key):
if key == "m_next":
self.focus_position = (self.focus_position + 1) % len(self.widget_list)
self.widget_list[1].set_active(self.focus_position == 1)
key = None
# This is essentially a copypasta from urwid.Pile's keypress handler.
# So much for "closed for modification, but open for extension".
item_rows = None
if len(size) == 2:
item_rows = self.get_item_rows(size, focus=True)
i = self.widget_list.index(self.focus_item)
tsize = self.get_item_size(size, i, True, item_rows)
return self.focus_item.keypress(tsize, key)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/layoutwidget.py | mitmproxy/tools/console/layoutwidget.py | from typing import ClassVar
class LayoutWidget:
"""
All top-level layout widgets and all widgets that may be set in an
overlay must comply with this API.
"""
# Title is only required for windows, not overlay components
title = ""
keyctx: ClassVar[str] = ""
def key_responder(self):
"""
Returns the object responding to key input. Usually self, but may be
a wrapped object.
"""
return self
def focus_changed(self):
"""
The view focus has changed. Layout objects should implement the API
rather than directly subscribing to events.
"""
def view_changed(self):
"""
The view list has changed.
"""
def layout_popping(self):
"""
We are just about to pop a window off the stack, or exit an overlay.
"""
def layout_pushed(self, prev):
"""
We have just pushed a window onto the stack.
"""
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/keymap.py | mitmproxy/tools/console/keymap.py | import logging
import os
from collections import defaultdict
from collections.abc import Sequence
from functools import cache
import ruamel.yaml.error
import mitmproxy.types
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy.tools.console import commandexecutor
from mitmproxy.tools.console import signals
class KeyBindingError(Exception):
pass
Contexts = {
"chooser",
"commands",
"commonkey",
"dataviewer",
"eventlog",
"flowlist",
"flowview",
"global",
"grideditor",
"help",
"keybindings",
"options",
}
navkeys = [
"m_start",
"m_end",
"m_next",
"m_select",
"up",
"down",
"page_up",
"page_down",
"left",
"right",
]
class Binding:
def __init__(self, key, command, contexts, help):
self.key, self.command, self.contexts = key, command, sorted(contexts)
self.help = help
def keyspec(self):
"""
Translate the key spec from a convenient user specification to one
Urwid understands.
"""
return self.key.replace("space", " ")
def key_short(self) -> str:
return (
self.key.replace("enter", "⏎").replace("right", "→").replace("space", "␣")
)
def sortkey(self):
return self.key + ",".join(self.contexts)
class Keymap:
def __init__(self, master):
self.executor = commandexecutor.CommandExecutor(master)
self.keys: dict[str, dict[str, Binding]] = defaultdict(dict)
self.bindings = []
def _check_contexts(self, contexts):
if not contexts:
raise ValueError("Must specify at least one context.")
for c in contexts:
if c not in Contexts:
raise ValueError("Unsupported context: %s" % c)
def _on_change(self) -> None:
signals.keybindings_change.send()
self.binding_for_help.cache_clear()
def add(self, key: str, command: str, contexts: Sequence[str], help="") -> None:
"""
Add a key to the key map.
"""
self._check_contexts(contexts)
for b in self.bindings:
if b.key == key and b.command.strip() == command.strip():
b.contexts = sorted(list(set(b.contexts + contexts)))
if help:
b.help = help
self.bind(b)
break
else:
self.remove(key, contexts)
b = Binding(key=key, command=command, contexts=contexts, help=help)
self.bindings.append(b)
self.bind(b)
self._on_change()
def remove(self, key: str, contexts: Sequence[str]) -> None:
"""
Remove a key from the key map.
"""
self._check_contexts(contexts)
for c in contexts:
b = self.get(c, key)
if b:
self.unbind(b)
b.contexts = [x for x in b.contexts if x != c]
if b.contexts:
self.bindings.append(b)
self.bind(b)
self._on_change()
def bind(self, binding: Binding) -> None:
for c in binding.contexts:
self.keys[c][binding.keyspec()] = binding
def unbind(self, binding: Binding) -> None:
"""
Unbind also removes the binding from the list.
"""
for c in binding.contexts:
del self.keys[c][binding.keyspec()]
self.bindings = [b for b in self.bindings if b != binding]
self._on_change()
def get(self, context: str, key: str) -> Binding | None:
if context in self.keys:
return self.keys[context].get(key, None)
return None
@cache
def binding_for_help(self, help: str) -> Binding | None:
for b in self.bindings:
if b.help == help:
return b
return None
def list(self, context: str) -> Sequence[Binding]:
b = [x for x in self.bindings if context in x.contexts or context == "all"]
single = [x for x in b if len(x.key.split()) == 1]
multi = [x for x in b if len(x.key.split()) != 1]
single.sort(key=lambda x: x.sortkey())
multi.sort(key=lambda x: x.sortkey())
return single + multi
def handle(self, context: str, key: str) -> str | None:
"""
Returns the key if it has not been handled, or None.
"""
b = self.get(context, key) or self.get("global", key)
if b:
self.executor(b.command)
return None
return key
def handle_only(self, context: str, key: str) -> str | None:
"""
Like handle, but ignores global bindings. Returns the key if it has
not been handled, or None.
"""
b = self.get(context, key)
if b:
self.executor(b.command)
return None
return key
keyAttrs = {
"key": lambda x: isinstance(x, str),
"cmd": lambda x: isinstance(x, str),
"ctx": lambda x: isinstance(x, list) and [isinstance(v, str) for v in x],
"help": lambda x: isinstance(x, str),
}
requiredKeyAttrs = {"key", "cmd"}
class KeymapConfig:
defaultFile = "keys.yaml"
def __init__(self, master):
self.master = master
@command.command("console.keymap.load")
def keymap_load_path(self, path: mitmproxy.types.Path) -> None:
try:
self.load_path(self.master.keymap, path) # type: ignore
except (OSError, KeyBindingError) as e:
raise exceptions.CommandError("Could not load key bindings - %s" % e) from e
def running(self):
p = os.path.join(os.path.expanduser(ctx.options.confdir), self.defaultFile)
if os.path.exists(p):
try:
self.load_path(self.master.keymap, p)
except KeyBindingError as e:
logging.error(e)
def load_path(self, km, p):
if os.path.exists(p) and os.path.isfile(p):
with open(p, encoding="utf8") as f:
try:
txt = f.read()
except UnicodeDecodeError as e:
raise KeyBindingError(f"Encoding error - expected UTF8: {p}: {e}")
try:
vals = self.parse(txt)
except KeyBindingError as e:
raise KeyBindingError(f"Error reading {p}: {e}") from e
for v in vals:
user_ctxs = v.get("ctx", ["global"])
try:
km._check_contexts(user_ctxs)
km.remove(v["key"], user_ctxs)
km.add(
key=v["key"],
command=v["cmd"],
contexts=user_ctxs,
help=v.get("help", None),
)
except ValueError as e:
raise KeyBindingError(f"Error reading {p}: {e}") from e
def parse(self, text):
try:
data = ruamel.yaml.YAML(typ="safe", pure=True).load(text)
except ruamel.yaml.error.MarkedYAMLError as v:
if hasattr(v, "problem_mark"):
snip = v.problem_mark.get_snippet()
raise KeyBindingError(
"Key binding config error at line %s:\n%s\n%s"
% (v.problem_mark.line + 1, snip, v.problem)
)
else:
raise KeyBindingError("Could not parse key bindings.")
if not data:
return []
if not isinstance(data, list):
raise KeyBindingError("Invalid keybinding config - expected a list of keys")
for k in data:
unknown = k.keys() - keyAttrs.keys()
if unknown:
raise KeyBindingError("Unknown key attributes: %s" % unknown)
missing = requiredKeyAttrs - k.keys()
if missing:
raise KeyBindingError("Missing required key attributes: %s" % unknown)
for attr in k.keys():
if not keyAttrs[attr](k[attr]):
raise KeyBindingError("Invalid type for %s" % attr)
return data
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/__init__.py | mitmproxy/tools/console/__init__.py | from mitmproxy.tools.console import master
__all__ = ["master"]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/defaultkeys.py | mitmproxy/tools/console/defaultkeys.py | from mitmproxy.tools.console.keymap import Keymap
def map(km: Keymap) -> None:
km.add(":", "console.command ", ["commonkey", "global"], "Command prompt")
km.add(
";",
"console.command flow.comment @focus ''",
["flowlist", "flowview"],
"Add comment to flow",
)
km.add("?", "console.view.help", ["global"], "View help")
km.add("B", "browser.start", ["global"], "Start an attached browser")
km.add("C", "console.view.commands", ["global"], "View commands")
km.add("K", "console.view.keybindings", ["global"], "View key bindings")
km.add("O", "console.view.options", ["commonkey", "global"], "View options")
km.add("E", "console.view.eventlog", ["commonkey", "global"], "View event log")
km.add("Q", "console.exit", ["global"], "Exit immediately")
km.add("q", "console.view.pop", ["commonkey", "global"], "Exit the current view")
km.add("esc", "console.view.pop", ["commonkey", "global"], "Exit the current view")
km.add("-", "console.layout.cycle", ["global"], "Cycle to next layout")
km.add("ctrl right", "console.panes.next", ["global"], "Focus next layout pane")
km.add("ctrl left", "console.panes.prev", ["global"], "Focus previous layout pane")
km.add("shift tab", "console.panes.next", ["global"], "Focus next layout pane")
km.add("P", "console.view.flow @focus", ["global"], "View flow details")
km.add("?", "console.view.pop", ["help"], "Exit help")
km.add("g", "console.nav.start", ["global"], "Go to start")
km.add("G", "console.nav.end", ["global"], "Go to end")
km.add("k", "console.nav.up", ["global"], "Up")
km.add("j", "console.nav.down", ["global"], "Down")
km.add("l", "console.nav.right", ["global"], "Right")
km.add("h", "console.nav.left", ["global"], "Left")
km.add("tab", "console.nav.next", ["commonkey", "global"], "Next")
km.add("enter", "console.nav.select", ["commonkey", "global"], "Select")
km.add("space", "console.nav.pagedown", ["global"], "Page down")
km.add("ctrl f", "console.nav.pagedown", ["global"], "Page down")
km.add("ctrl b", "console.nav.pageup", ["global"], "Page up")
km.add(
"I",
"set intercept_active toggle",
["global"],
"Toggle whether the filtering via the intercept option is enabled",
)
km.add("i", "console.command.set intercept", ["global"], "Set intercept")
km.add("W", "console.command.set save_stream_file", ["global"], "Stream to file")
km.add(
"A",
"flow.resume @all",
["flowlist", "flowview"],
"Resume all intercepted flows",
)
km.add(
"a",
"flow.resume @focus",
["flowlist", "flowview"],
"Resume this intercepted flow",
)
km.add(
"b",
"console.command cut.save @focus response.content ",
["flowlist", "flowview"],
"Save response body to file",
)
km.add(
"d",
"view.flows.remove @focus",
["flowlist", "flowview"],
"Delete flow from view",
)
km.add(
"D", "view.flows.duplicate @focus", ["flowlist", "flowview"], "Duplicate flow"
)
km.add(
"x",
"""
console.choose.cmd "Export as..." export.formats
console.command export.file {choice} @focus
""",
["flowlist", "flowview"],
"Export this flow to file",
)
km.add("f", "console.command.set view_filter", ["flowlist"], "Set view filter")
km.add(
"F",
"set console_focus_follow toggle",
["flowlist", "flowview"],
"Set focus follow",
)
km.add(
"ctrl l",
"console.command cut.clip ",
["flowlist", "flowview"],
"Send cuts to clipboard",
)
km.add(
"L", "console.command view.flows.load ", ["flowlist"], "Load flows from file"
)
km.add("m", "flow.mark.toggle @focus", ["flowlist"], "Toggle mark on this flow")
km.add(
"M",
"view.properties.marked.toggle",
["flowlist"],
"Toggle viewing marked flows",
)
km.add(
"n",
"console.command view.flows.create get https://example.com/",
["flowlist"],
"Create a new flow",
)
km.add(
"o",
"""
console.choose.cmd "Order flows by..." view.order.options
set view_order {choice}
""",
["flowlist"],
"Set flow list order",
)
km.add("r", "replay.client @focus", ["flowlist", "flowview"], "Replay this flow")
km.add("S", "console.command replay.server ", ["flowlist"], "Start server replay")
km.add(
"v", "set view_order_reversed toggle", ["flowlist"], "Reverse flow list order"
)
km.add("U", "flow.mark @all false", ["flowlist"], "Un-set all marks")
km.add(
"w",
"console.command save.file @shown ",
["flowlist"],
"Save listed flows to file",
)
km.add(
"V",
"flow.revert @focus",
["flowlist", "flowview"],
"Revert changes to this flow",
)
km.add("X", "flow.kill @focus", ["flowlist"], "Kill this flow")
km.add(
"z",
'console.command.confirm "Delete all flows" view.flows.remove @all',
["flowlist"],
"Clear flow list",
)
km.add(
"Z",
'console.command.confirm "Purge all hidden flows" view.flows.remove @hidden',
["flowlist"],
"Purge all flows not showing",
)
km.add(
"|",
"console.command script.run @focus ",
["flowlist", "flowview"],
"Run a script on this flow",
)
km.add(
"e",
"""
console.choose.cmd "Edit..." console.edit.focus.options
console.edit.focus {choice}
""",
["flowlist", "flowview"],
"Edit a flow component",
)
km.add(
"f",
"view.settings.setval.toggle @focus fullcontents",
["flowview"],
"Toggle viewing full contents on this flow",
)
km.add("w", "console.command save.file @focus ", ["flowview"], "Save flow to file")
km.add("space", "view.focus.next", ["flowview"], "Go to next flow")
km.add(
"v",
"""
console.choose "View..." request,response
console.bodyview @focus {choice}
""",
["flowview"],
"View flow body in an external viewer",
)
km.add("p", "view.focus.prev", ["flowview"], "Go to previous flow")
km.add(
"m",
"""
console.choose.cmd "Set contentview..." console.flowview.mode.options
console.flowview.mode.set {choice}
""",
["flowview"],
"Set flow view mode",
)
km.add(
"z",
"""
console.choose "Encode/decode..." request,response
flow.encode.toggle @focus {choice}
""",
["flowview"],
"Encode/decode flow body",
)
km.add("L", "console.command options.load ", ["options"], "Load from file")
km.add("S", "console.command options.save ", ["options"], "Save to file")
km.add("D", "options.reset", ["options"], "Reset all options")
km.add("d", "console.options.reset.focus", ["options"], "Reset this option")
km.add("a", "console.grideditor.add", ["grideditor"], "Add a row after cursor")
km.add(
"A", "console.grideditor.insert", ["grideditor"], "Insert a row before cursor"
)
km.add("d", "console.grideditor.delete", ["grideditor"], "Delete this row")
km.add(
"r",
"console.command console.grideditor.load",
["grideditor"],
"Read unescaped data into the current cell from file",
)
km.add(
"R",
"console.command console.grideditor.load_escaped",
["grideditor"],
"Load a Python-style escaped string into the current cell from file",
)
km.add("e", "console.grideditor.editor", ["grideditor"], "Edit in external editor")
km.add(
"w",
"console.command console.grideditor.save ",
["grideditor"],
"Save data to file as CSV",
)
km.add(
"z",
'console.command.confirm "Clear event log" eventstore.clear',
["eventlog"],
"Clear",
)
km.add(
"a",
"""
console.choose.cmd "Context" console.key.contexts
console.command console.key.bind {choice}
""",
["keybindings"],
"Add a key binding",
)
km.add(
"d",
"console.key.unbind.focus",
["keybindings"],
"Unbind the currently focused key binding",
)
km.add(
"x",
"console.key.execute.focus",
["keybindings"],
"Execute the currently focused key binding",
)
km.add(
"enter",
"console.key.edit.focus",
["keybindings"],
"Edit the currently focused key binding",
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/flowview.py | mitmproxy/tools/console/flowview.py | import sys
from functools import lru_cache
import urwid
import mitmproxy.flow
import mitmproxy.tools.console.master
import mitmproxy_rs.syntax_highlight
from mitmproxy import contentviews
from mitmproxy import ctx
from mitmproxy import dns
from mitmproxy import http
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy.dns import DNSMessage
from mitmproxy.tools.console import common
from mitmproxy.tools.console import flowdetailview
from mitmproxy.tools.console import layoutwidget
from mitmproxy.tools.console import searchable
from mitmproxy.tools.console import tabs
from mitmproxy.utils import strutils
class SearchError(Exception):
pass
class FlowViewHeader(urwid.WidgetWrap):
def __init__(
self,
master: "mitmproxy.tools.console.master.ConsoleMaster",
) -> None:
self.master = master
self.focus_changed()
def focus_changed(self):
cols, _ = self.master.ui.get_cols_rows()
if self.master.view.focus.flow:
self._w = common.format_flow(
self.master.view.focus.flow,
render_mode=common.RenderMode.DETAILVIEW,
hostheader=self.master.options.showhost,
)
else:
self._w = urwid.Pile([])
class FlowDetails(tabs.Tabs):
def __init__(self, master):
self.master = master
super().__init__([])
self.show()
self.last_displayed_body = None
self.last_displayed_websocket_messages = None
contentviews.registry.on_change.connect(self.contentview_changed)
@property
def view(self):
return self.master.view
@property
def flow(self) -> mitmproxy.flow.Flow:
return self.master.view.focus.flow
def contentview_changed(self, view):
# this is called when a contentview addon is live-reloaded.
# we clear our cache and then rerender
self._get_content_view.cache_clear()
if self.master.window.current_window("flowview"):
self.show()
def focus_changed(self):
f = self.flow
if f:
if isinstance(f, http.HTTPFlow):
if f.websocket:
self.tabs = [
(self.tab_http_request, self.view_request),
(self.tab_http_response, self.view_response),
(self.tab_websocket_messages, self.view_websocket_messages),
(self.tab_details, self.view_details),
]
else:
self.tabs = [
(self.tab_http_request, self.view_request),
(self.tab_http_response, self.view_response),
(self.tab_details, self.view_details),
]
elif isinstance(f, tcp.TCPFlow):
self.tabs = [
(self.tab_tcp_stream, self.view_message_stream),
(self.tab_details, self.view_details),
]
elif isinstance(f, udp.UDPFlow):
self.tabs = [
(self.tab_udp_stream, self.view_message_stream),
(self.tab_details, self.view_details),
]
elif isinstance(f, dns.DNSFlow):
self.tabs = [
(self.tab_dns_request, self.view_dns_request),
(self.tab_dns_response, self.view_dns_response),
(self.tab_details, self.view_details),
]
self.show()
else:
# Get the top window from the focus stack (the currently active view).
# If it's NOT the "flowlist", it's safe to pop back to the previous view.
if self.master.window.focus_stack().stack[-1] != "flowlist":
self.master.window.pop()
# If it is the "flowlist", we’re already at the main view with no flows to show.
# Popping now would close the last window and prompt app exit, so we remain on the empty flow list screen instead.
def tab_http_request(self):
flow = self.flow
assert isinstance(flow, http.HTTPFlow)
if self.flow.intercepted and not flow.response:
return "Request intercepted"
else:
return "Request"
def tab_http_response(self):
flow = self.flow
assert isinstance(flow, http.HTTPFlow)
# there is no good way to detect what part of the flow is intercepted,
# so we apply some heuristics to see if it's the HTTP response.
websocket_started = flow.websocket and len(flow.websocket.messages) != 0
response_is_intercepted = (
self.flow.intercepted and flow.response and not websocket_started
)
if response_is_intercepted:
return "Response intercepted"
else:
return "Response"
def tab_dns_request(self) -> str:
flow = self.flow
assert isinstance(flow, dns.DNSFlow)
if self.flow.intercepted and not flow.response:
return "Request intercepted"
else:
return "Request"
def tab_dns_response(self) -> str:
flow = self.flow
assert isinstance(flow, dns.DNSFlow)
if self.flow.intercepted and flow.response:
return "Response intercepted"
else:
return "Response"
def tab_tcp_stream(self):
return "TCP Stream"
def tab_udp_stream(self):
return "UDP Stream"
def tab_websocket_messages(self):
flow = self.flow
assert isinstance(flow, http.HTTPFlow)
assert flow.websocket
if self.flow.intercepted and len(flow.websocket.messages) != 0:
return "WebSocket Messages intercepted"
else:
return "WebSocket Messages"
def tab_details(self):
return "Detail"
def view_request(self):
flow = self.flow
assert isinstance(flow, http.HTTPFlow)
return self.conn_text(flow.request)
def view_response(self):
flow = self.flow
assert isinstance(flow, http.HTTPFlow)
return self.conn_text(flow.response)
def view_dns_request(self):
flow = self.flow
assert isinstance(flow, dns.DNSFlow)
return self.dns_message_text("request", flow.request)
def view_dns_response(self):
flow = self.flow
assert isinstance(flow, dns.DNSFlow)
return self.dns_message_text("response", flow.response)
def _contentview_status_bar(self, description: str, viewmode: str):
cols = [
urwid.Text(
[
("heading", description),
]
),
urwid.Text(
[
" ",
("heading", "["),
("heading_key", "m"),
("heading", (":%s]" % viewmode)),
],
align="right",
),
]
contentview_status_bar = urwid.AttrMap(urwid.Columns(cols), "heading")
return contentview_status_bar
FROM_CLIENT_MARKER = ("from_client", f"{common.SYMBOL_FROM_CLIENT} ")
TO_CLIENT_MARKER = ("to_client", f"{common.SYMBOL_TO_CLIENT} ")
def view_websocket_messages(self):
flow = self.flow
assert isinstance(flow, http.HTTPFlow)
assert flow.websocket is not None
if not flow.websocket.messages:
return searchable.Searchable([urwid.Text(("highlight", "No messages."))])
viewmode = self.master.commands.call("console.flowview.mode")
widget_lines = []
for m in flow.websocket.messages:
pretty = contentviews.prettify_message(m, flow, viewmode)
chunks = mitmproxy_rs.syntax_highlight.highlight(
pretty.text,
language=pretty.syntax_highlight,
)
if m.from_client:
marker = self.FROM_CLIENT_MARKER
else:
marker = self.TO_CLIENT_MARKER
widget_lines.append(urwid.Text([marker, *chunks]))
if flow.websocket.closed_by_client is not None:
widget_lines.append(
urwid.Text(
[
(
self.FROM_CLIENT_MARKER
if flow.websocket.closed_by_client
else self.TO_CLIENT_MARKER
),
(
"alert"
if flow.websocket.close_code in (1000, 1001, 1005)
else "error",
f"Connection closed: {flow.websocket.close_code} {flow.websocket.close_reason}",
),
]
)
)
if flow.intercepted:
markup = widget_lines[-1].get_text()[0]
widget_lines[-1].set_text(("intercept", markup))
widget_lines.insert(
0, self._contentview_status_bar(viewmode.capitalize(), viewmode)
)
if (last_view := self.last_displayed_websocket_messages) is not None:
last_view.walker[:] = widget_lines
view = last_view
else:
view = searchable.Searchable(widget_lines)
self.last_displayed_websocket_messages = view
return view
def view_message_stream(self) -> urwid.Widget:
flow = self.flow
assert isinstance(flow, (tcp.TCPFlow, udp.UDPFlow))
if not flow.messages:
return searchable.Searchable([urwid.Text(("highlight", "No messages."))])
viewmode = self.master.commands.call("console.flowview.mode")
widget_lines = []
for m in flow.messages:
if m.from_client:
marker = self.FROM_CLIENT_MARKER
else:
marker = self.TO_CLIENT_MARKER
pretty = contentviews.prettify_message(m, flow, viewmode)
chunks = mitmproxy_rs.syntax_highlight.highlight(
pretty.text,
language=pretty.syntax_highlight,
)
widget_lines.append(urwid.Text([marker, *chunks]))
if flow.intercepted:
markup = widget_lines[-1].get_text()[0]
widget_lines[-1].set_text(("intercept", markup))
widget_lines.insert(
0, self._contentview_status_bar(viewmode.capitalize(), viewmode)
)
return searchable.Searchable(widget_lines)
def view_details(self):
return flowdetailview.flowdetails(self.view, self.flow)
def content_view(
self, viewmode: str, message: http.Message
) -> tuple[str, list[urwid.Text]]:
if message.raw_content is None:
return "", [urwid.Text([("error", "[content missing]")])]
if message.raw_content == b"":
if isinstance(message, http.Request):
query = getattr(message, "query", "")
if not query:
# No body and no query params
return "", [urwid.Text("No request content")]
# else: there are query params -> fall through to render them
else:
return "", [urwid.Text("No content")]
full = self.master.commands.execute(
"view.settings.getval @focus fullcontents false"
)
if full == "true":
limit = sys.maxsize
else:
limit = ctx.options.content_view_lines_cutoff
flow_modify_cache_invalidation = hash(
(
message.raw_content,
message.headers.fields,
getattr(message, "path", None),
)
)
# we need to pass the message off-band because it's not hashable
self._get_content_view_message = message
return self._get_content_view(viewmode, limit, flow_modify_cache_invalidation)
@lru_cache(maxsize=200)
def _get_content_view(
self, viewmode: str, max_lines: int, _
) -> tuple[str, list[urwid.Text]]:
message: http.Message = self._get_content_view_message
self._get_content_view_message = None # type: ignore[assignment]
pretty = contentviews.prettify_message(message, self.flow, viewmode)
cut_off = strutils.cut_after_n_lines(pretty.text, max_lines)
chunks = mitmproxy_rs.syntax_highlight.highlight(
cut_off,
language=pretty.syntax_highlight,
)
text_objects = [urwid.Text(chunks)]
if len(cut_off) < len(pretty.text):
text_objects.append(
urwid.Text(
[
(
"highlight",
"Stopped displaying data after %d lines. Press "
% max_lines,
),
("key", "f"),
("highlight", " to load all data."),
]
)
)
return f"{pretty.view_name} {pretty.description}", text_objects
def conn_text(self, conn):
if conn:
hdrs = []
for k, v in conn.headers.fields:
# This will always force an ascii representation of headers. For example, if the server sends a
#
# X-Authors: Made with ❤ in Hamburg
#
# header, mitmproxy will display the following:
#
# X-Authors: Made with \xe2\x9d\xa4 in Hamburg.
#
# The alternative would be to just use the header's UTF-8 representation and maybe
# do `str.replace("\t", "\\t")` to exempt tabs from urwid's special characters escaping [1].
# That would in some terminals allow rendering UTF-8 characters, but the mapping
# wouldn't be bijective, i.e. a user couldn't distinguish "\\t" and "\t".
# Also, from a security perspective, a mitmproxy user couldn't be fooled by homoglyphs.
#
# 1) https://github.com/mitmproxy/mitmproxy/issues/1833
# https://github.com/urwid/urwid/blob/6608ee2c9932d264abd1171468d833b7a4082e13/urwid/display_common.py#L35-L36,
k = strutils.bytes_to_escaped_str(k) + ":"
v = strutils.bytes_to_escaped_str(v)
hdrs.append((k, v))
txt = common.format_keyvals(hdrs, key_format="header")
viewmode = self.master.commands.call("console.flowview.mode")
msg, body = self.content_view(viewmode, conn)
txt.append(self._contentview_status_bar(msg, viewmode))
txt.extend(body)
else:
txt = [
urwid.Text(""),
urwid.Text(
[
("highlight", "No response. Press "),
("key", "e"),
("highlight", " and edit any aspect to add one."),
]
),
]
return searchable.Searchable(txt)
def dns_message_text(
self, type: str, message: DNSMessage | None
) -> searchable.Searchable:
"""
Alternative:
if not message:
return searchable.Searchable([urwid.Text(("highlight", f"No {typ}."))])
viewmode = self.master.commands.call("console.flowview.mode")
pretty = contentviews.prettify_message(message, flow, viewmode)
chunks = mitmproxy_rs.syntax_highlight.highlight(
pretty.text,
language=pretty.syntax_highlight,
)
widget_lines = [
self._contentview_status_bar(viewmode.capitalize(), viewmode),
urwid.Text(chunks)
]
return searchable.Searchable(widget_lines)
"""
# Keep in sync with web/src/js/components/FlowView/DnsMessages.tsx
if message:
def rr_text(rr: dns.ResourceRecord):
return urwid.Text(
f" {rr.name} {dns.types.to_str(rr.type)} {dns.classes.to_str(rr.class_)} {rr.ttl} {rr}"
)
txt = []
txt.append(
urwid.Text(
"{recursive}Question".format(
recursive="Recursive " if message.recursion_desired else "",
)
)
)
txt.extend(
urwid.Text(
f" {q.name} {dns.types.to_str(q.type)} {dns.classes.to_str(q.class_)}"
)
for q in message.questions
)
txt.append(urwid.Text(""))
txt.append(
urwid.Text(
"{authoritative}{recursive}Answer".format(
authoritative="Authoritative "
if message.authoritative_answer
else "",
recursive="Recursive " if message.recursion_available else "",
)
)
)
txt.extend(map(rr_text, message.answers))
txt.append(urwid.Text(""))
txt.append(urwid.Text("Authority"))
txt.extend(map(rr_text, message.authorities))
txt.append(urwid.Text(""))
txt.append(urwid.Text("Addition"))
txt.extend(map(rr_text, message.additionals))
return searchable.Searchable(txt)
else:
return searchable.Searchable([urwid.Text(("highlight", f"No {type}."))])
class FlowView(urwid.Frame, layoutwidget.LayoutWidget):
keyctx = "flowview"
title = "Flow Details"
def __init__(self, master):
super().__init__(
FlowDetails(master),
header=FlowViewHeader(master),
)
self.master = master
def focus_changed(self, *args, **kwargs):
self.body.focus_changed()
self.header.focus_changed()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/quickhelp.py | mitmproxy/tools/console/quickhelp.py | """
This module is reponsible for drawing the quick key help at the bottom of mitmproxy.
"""
from dataclasses import dataclass
from typing import Union
import urwid
from mitmproxy import flow
from mitmproxy.http import HTTPFlow
from mitmproxy.tools.console.eventlog import EventLog
from mitmproxy.tools.console.flowlist import FlowListBox
from mitmproxy.tools.console.flowview import FlowView
from mitmproxy.tools.console.grideditor.base import FocusEditor
from mitmproxy.tools.console.help import HelpView
from mitmproxy.tools.console.keybindings import KeyBindings
from mitmproxy.tools.console.keymap import Keymap
from mitmproxy.tools.console.options import Options
@dataclass
class BasicKeyHelp:
"""Quick help for urwid-builtin keybindings (i.e. those keys that do not appear in the keymap)"""
key: str
HelpItems = dict[str, Union[str, BasicKeyHelp]]
"""
A mapping from the short text that should be displayed in the help bar to the full help text provided for the key
binding. The order of the items in the dictionary determines the order in which they are displayed in the help bar.
Some help items explain builtin urwid functionality, so there is no key binding for them. In this case, the value
is a BasicKeyHelp object.
"""
@dataclass
class QuickHelp:
top_label: str
top_items: HelpItems
bottom_label: str
bottom_items: HelpItems
def make_rows(self, keymap: Keymap) -> tuple[urwid.Columns, urwid.Columns]:
top = _make_row(self.top_label, self.top_items, keymap)
bottom = _make_row(self.bottom_label, self.bottom_items, keymap)
return top, bottom
def make(
widget: type[urwid.Widget],
focused_flow: flow.Flow | None,
is_root_widget: bool,
) -> QuickHelp:
top_label = ""
top_items: HelpItems = {}
if widget in (FlowListBox, FlowView):
top_label = "Flow:"
if focused_flow:
if widget == FlowListBox:
top_items["Select"] = "Select"
else:
top_items["Edit"] = "Edit a flow component"
top_items |= {
"Duplicate": "Duplicate flow",
"Replay": "Replay this flow",
"Export": "Export this flow to file",
"Delete": "Delete flow from view",
}
if widget == FlowListBox:
if focused_flow.marked:
top_items["Unmark"] = "Toggle mark on this flow"
else:
top_items["Mark"] = "Toggle mark on this flow"
top_items["Edit"] = "Edit a flow component"
if focused_flow.intercepted:
top_items["Resume"] = "Resume this intercepted flow"
if focused_flow.modified():
top_items["Restore"] = "Revert changes to this flow"
if isinstance(focused_flow, HTTPFlow) and focused_flow.response:
top_items["Save body"] = "Save response body to file"
if widget == FlowView:
top_items |= {
"Next flow": "Go to next flow",
"Prev flow": "Go to previous flow",
}
else:
top_items |= {
"Load flows": "Load flows from file",
"Create new": "Create a new flow",
}
elif widget == KeyBindings:
top_label = "Keybindings:"
top_items |= {
"Add": "Add a key binding",
"Edit": "Edit the currently focused key binding",
"Delete": "Unbind the currently focused key binding",
"Execute": "Execute the currently focused key binding",
}
elif widget == Options:
top_label = "Options:"
top_items |= {
"Edit": BasicKeyHelp("⏎"),
"Reset": "Reset this option",
"Reset all": "Reset all options",
"Load file": "Load from file",
"Save file": "Save to file",
}
elif widget == HelpView:
top_label = "Help:"
top_items |= {
"Scroll down": BasicKeyHelp("↓"),
"Scroll up": BasicKeyHelp("↑"),
"Exit help": "Exit help",
"Next tab": BasicKeyHelp("tab"),
}
elif widget == EventLog:
top_label = "Events:"
top_items |= {
"Scroll down": BasicKeyHelp("↓"),
"Scroll up": BasicKeyHelp("↑"),
"Clear": "Clear",
}
elif issubclass(widget, FocusEditor):
top_label = f"Edit:"
top_items |= {
"Start edit": BasicKeyHelp("⏎"),
"Stop edit": BasicKeyHelp("esc"),
"Add row": "Add a row after cursor",
"Delete row": "Delete this row",
}
else:
pass
bottom_label = "Proxy:"
bottom_items: HelpItems = {
"Help": "View help",
}
if is_root_widget:
bottom_items["Quit"] = "Exit the current view"
else:
bottom_items["Back"] = "Exit the current view"
bottom_items |= {
"Events": "View event log",
"Options": "View options",
"Intercept": "Set intercept",
"Filter": "Set view filter",
}
if focused_flow:
bottom_items |= {
"Save flows": "Save listed flows to file",
"Clear list": "Clear flow list",
}
bottom_items |= {
"Layout": "Cycle to next layout",
"Switch": "Focus next layout pane",
"Follow new": "Set focus follow",
}
label_len = max(len(top_label), len(bottom_label), 8) + 1
top_label = top_label.ljust(label_len)
bottom_label = bottom_label.ljust(label_len)
return QuickHelp(top_label, top_items, bottom_label, bottom_items)
def _make_row(label: str, items: HelpItems, keymap: Keymap) -> urwid.Columns:
cols = [
(len(label), urwid.Text(label)),
]
for short, long in items.items():
if isinstance(long, BasicKeyHelp):
key_short = long.key
else:
b = keymap.binding_for_help(long)
if b is None:
continue
key_short = b.key_short()
txt = urwid.Text(
[
("heading_inactive", key_short),
" ",
short,
],
wrap="clip",
)
cols.append((14, txt))
return urwid.Columns(cols)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/statusbar.py | mitmproxy/tools/console/statusbar.py | from __future__ import annotations
from collections.abc import Callable
from functools import lru_cache
import urwid
import mitmproxy.tools.console.master
from mitmproxy.tools.console import commandexecutor
from mitmproxy.tools.console import common
from mitmproxy.tools.console import flowlist
from mitmproxy.tools.console import quickhelp
from mitmproxy.tools.console import signals
from mitmproxy.tools.console.commander import commander
from mitmproxy.utils import human
@lru_cache
def shorten_message(
msg: tuple[str, str] | str, max_width: int
) -> list[tuple[str, str]]:
"""
Shorten message so that it fits into a single line in the statusbar.
"""
if isinstance(msg, tuple):
disp_attr, msg_text = msg
elif isinstance(msg, str):
msg_text = msg
disp_attr = ""
else:
raise AssertionError(f"Unexpected message type: {type(msg)}")
msg_end = "\u2026" # unicode ellipsis for the end of shortened message
prompt = "(more in eventlog)"
msg_lines = msg_text.split("\n")
first_line = msg_lines[0]
if len(msg_lines) > 1:
# First line of messages with a few lines must end with prompt.
line_length = len(first_line) + len(prompt)
else:
line_length = len(first_line)
if line_length > max_width:
shortening_index = max(0, max_width - len(prompt) - len(msg_end))
first_line = first_line[:shortening_index] + msg_end
else:
if len(msg_lines) == 1:
prompt = ""
return [(disp_attr, first_line), ("warn", prompt)]
class ActionBar(urwid.WidgetWrap):
def __init__(self, master: mitmproxy.tools.console.master.ConsoleMaster) -> None:
self.master = master
self.top = urwid.WidgetWrap(urwid.Text(""))
self.bottom = urwid.WidgetWrap(urwid.Text(""))
super().__init__(urwid.Pile([self.top, self.bottom]))
self.show_quickhelp()
signals.status_message.connect(self.sig_message)
signals.status_prompt.connect(self.sig_prompt)
signals.status_prompt_onekey.connect(self.sig_prompt_onekey)
signals.status_prompt_command.connect(self.sig_prompt_command)
signals.window_refresh.connect(self.sig_update)
master.view.focus.sig_change.connect(self.sig_update)
master.view.sig_view_update.connect(self.sig_update)
self.prompting: Callable[[str], None] | None = None
self.onekey: set[str] | None = None
def sig_update(self, flow=None) -> None:
if not self.prompting and flow is None or flow == self.master.view.focus.flow:
self.show_quickhelp()
def sig_message(
self, message: tuple[str, str] | str, expire: int | None = 1
) -> None:
if self.prompting:
return
cols, _ = self.master.ui.get_cols_rows()
w = urwid.Text(shorten_message(message, cols))
self.top._w = w
self.bottom._w = urwid.Text("")
if expire:
def cb():
if w == self.top._w:
self.show_quickhelp()
signals.call_in.send(seconds=expire, callback=cb)
def sig_prompt(
self, prompt: str, text: str | None, callback: Callable[[str], None]
) -> None:
signals.focus.send(section="footer")
self.top._w = urwid.Edit(f"{prompt.strip()}: ", text or "")
self.bottom._w = urwid.Text("")
self.prompting = callback
def sig_prompt_command(self, partial: str = "", cursor: int | None = None) -> None:
signals.focus.send(section="footer")
self.top._w = commander.CommandEdit(
self.master,
partial,
)
if cursor is not None:
self.top._w.cbuf.cursor = cursor
self.bottom._w = urwid.Text("")
self.prompting = self.execute_command
def execute_command(self, txt: str) -> None:
if txt.strip():
self.master.commands.call("commands.history.add", txt)
execute = commandexecutor.CommandExecutor(self.master)
execute(txt)
def sig_prompt_onekey(
self, prompt: str, keys: list[tuple[str, str]], callback: Callable[[str], None]
) -> None:
"""
Keys are a set of (word, key) tuples. The appropriate key in the
word is highlighted.
"""
signals.focus.send(section="footer")
parts = [prompt, " ("]
mkup = []
for i, e in enumerate(keys):
mkup.extend(common.highlight_key(e[0], e[1]))
if i < len(keys) - 1:
mkup.append(",")
parts.extend(mkup)
parts.append(")? ")
self.onekey = {i[1] for i in keys}
self.top._w = urwid.Edit(parts, "")
self.bottom._w = urwid.Text("")
self.prompting = callback
def selectable(self) -> bool:
return True
def keypress(self, size, k):
if self.prompting:
if k == "esc":
self.prompt_done()
elif self.onekey:
if k == "enter":
self.prompt_done()
elif k in self.onekey:
self.prompt_execute(k)
elif k == "enter":
text = self.top._w.get_edit_text()
self.prompt_execute(text)
else:
if common.is_keypress(k):
self.top._w.keypress(size, k)
else:
return k
def show_quickhelp(self) -> None:
if w := self.master.window:
s = w.focus_stack()
focused_widget = type(s.top_widget())
is_top_widget = len(s.stack) == 1
else: # on startup
focused_widget = flowlist.FlowListBox
is_top_widget = True
focused_flow = self.master.view.focus.flow
qh = quickhelp.make(focused_widget, focused_flow, is_top_widget)
self.top._w, self.bottom._w = qh.make_rows(self.master.keymap)
def prompt_done(self) -> None:
self.prompting = None
self.onekey = None
self.show_quickhelp()
signals.focus.send(section="body")
def prompt_execute(self, txt) -> None:
callback = self.prompting
assert callback is not None
self.prompt_done()
msg = callback(txt)
if msg:
signals.status_message.send(message=msg, expire=1)
class StatusBar(urwid.WidgetWrap):
REFRESHTIME = 0.5 # Timed refresh time in seconds
keyctx = ""
def __init__(self, master: mitmproxy.tools.console.master.ConsoleMaster) -> None:
self.master = master
self.ib = urwid.WidgetWrap(urwid.Text(""))
self.ab = ActionBar(self.master)
super().__init__(urwid.Pile([self.ib, self.ab]))
signals.flow_change.connect(self.sig_update)
signals.update_settings.connect(self.sig_update)
master.options.changed.connect(self.sig_update)
master.view.focus.sig_change.connect(self.sig_update)
master.view.sig_view_add.connect(self.sig_update)
self.refresh()
def refresh(self) -> None:
self.redraw()
signals.call_in.send(seconds=self.REFRESHTIME, callback=self.refresh)
def sig_update(self, *args, **kwargs) -> None:
self.redraw()
def keypress(self, *args, **kwargs):
return self.ab.keypress(*args, **kwargs)
def get_status(self) -> list[tuple[str, str] | str]:
r: list[tuple[str, str] | str] = []
sreplay = self.master.commands.call("replay.server.count")
creplay = self.master.commands.call("replay.client.count")
if len(self.master.options.modify_headers):
r.append("[")
r.append(("heading_key", "H"))
r.append("eaders]")
if len(self.master.options.modify_body):
r.append("[%d body modifications]" % len(self.master.options.modify_body))
if creplay:
r.append("[")
r.append(("heading_key", "cplayback"))
r.append(":%s]" % creplay)
if sreplay:
r.append("[")
r.append(("heading_key", "splayback"))
r.append(":%s]" % sreplay)
if self.master.options.ignore_hosts:
r.append("[")
r.append(("heading_key", "I"))
r.append("gnore:%d]" % len(self.master.options.ignore_hosts))
elif self.master.options.allow_hosts:
r.append("[")
r.append(("heading_key", "A"))
r.append("llow:%d]" % len(self.master.options.allow_hosts))
if self.master.options.tcp_hosts:
r.append("[")
r.append(("heading_key", "T"))
r.append("CP:%d]" % len(self.master.options.tcp_hosts))
if self.master.options.intercept:
r.append("[")
if not self.master.options.intercept_active:
r.append("X")
r.append(("heading_key", "i"))
r.append(":%s]" % self.master.options.intercept)
if self.master.options.view_filter:
r.append("[")
r.append(("heading_key", "f"))
r.append(":%s]" % self.master.options.view_filter)
if self.master.options.stickycookie:
r.append("[")
r.append(("heading_key", "t"))
r.append(":%s]" % self.master.options.stickycookie)
if self.master.options.stickyauth:
r.append("[")
r.append(("heading_key", "u"))
r.append(":%s]" % self.master.options.stickyauth)
if self.master.options.console_default_contentview != "auto":
r.append(
"[contentview:%s]" % (self.master.options.console_default_contentview)
)
if self.master.options.has_changed("view_order"):
r.append("[")
r.append(("heading_key", "o"))
r.append(":%s]" % self.master.options.view_order)
opts = []
if self.master.options.anticache:
opts.append("anticache")
if self.master.options.anticomp:
opts.append("anticomp")
if self.master.options.showhost:
opts.append("showhost")
if not self.master.options.server_replay_refresh:
opts.append("norefresh")
if not self.master.options.upstream_cert:
opts.append("no-upstream-cert")
if self.master.options.console_focus_follow:
opts.append("following")
if self.master.options.stream_large_bodies:
opts.append(self.master.options.stream_large_bodies)
if opts:
r.append("[%s]" % (":".join(opts)))
if self.master.options.mode != ["regular"]:
if len(self.master.options.mode) == 1:
r.append(f"[{self.master.options.mode[0]}]")
else:
r.append(f"[modes:{len(self.master.options.mode)}]")
if self.master.options.scripts:
r.append("[scripts:%s]" % len(self.master.options.scripts))
if self.master.options.save_stream_file:
r.append("[W:%s]" % self.master.options.save_stream_file)
return r
def redraw(self) -> None:
fc = self.master.commands.execute("view.properties.length")
if self.master.view.focus.index is None:
offset = 0
else:
offset = self.master.view.focus.index + 1
if self.master.options.view_order_reversed:
arrow = common.SYMBOL_UP
else:
arrow = common.SYMBOL_DOWN
marked = ""
if self.master.commands.execute("view.properties.marked"):
marked = "M"
t: list[tuple[str, str] | str] = [
("heading", f"{arrow} {marked} [{offset}/{fc}]".ljust(11)),
]
listen_addrs: list[str] = list(
dict.fromkeys(
human.format_address(a)
for a in self.master.addons.get("proxyserver").listen_addrs()
)
)
if listen_addrs:
boundaddr = f"[{', '.join(listen_addrs)}]"
else:
boundaddr = ""
t.extend(self.get_status())
status = urwid.AttrMap(
urwid.Columns(
[
urwid.Text(t),
urwid.Text(boundaddr, align="right"),
]
),
"heading",
)
self.ib._w = status
def selectable(self) -> bool:
return True
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/flowlist.py | mitmproxy/tools/console/flowlist.py | from functools import lru_cache
import urwid
import mitmproxy.tools.console.master
from mitmproxy.tools.console import common
from mitmproxy.tools.console import layoutwidget
class FlowItem(urwid.WidgetWrap):
def __init__(self, master, flow):
self.master, self.flow = master, flow
w = self.get_text()
urwid.WidgetWrap.__init__(self, w)
def get_text(self):
cols, _ = self.master.ui.get_cols_rows()
layout = self.master.options.console_flowlist_layout
if layout == "list" or (layout == "default" and cols < 100):
render_mode = common.RenderMode.LIST
else:
render_mode = common.RenderMode.TABLE
return common.format_flow(
self.flow,
render_mode=render_mode,
focused=self.flow is self.master.view.focus.flow,
hostheader=self.master.options.showhost,
)
def selectable(self):
return True
def mouse_event(self, size, event, button, col, row, focus):
if event == "mouse press" and button == 1:
self.master.commands.execute("console.view.flow @focus")
return True
def keypress(self, size, key):
return key
class FlowListWalker(urwid.ListWalker):
master: "mitmproxy.tools.console.master.ConsoleMaster"
def __init__(self, master):
self.master = master
def positions(self, reverse=False):
# The stub implementation of positions can go once this issue is resolved:
# https://github.com/urwid/urwid/issues/294
ret = range(self.master.view.get_length())
if reverse:
return reversed(ret)
return ret
def view_changed(self):
self._modified()
self._get.cache_clear()
def get_focus(self):
if not self.master.view.focus.flow:
return None, 0
f = FlowItem(self.master, self.master.view.focus.flow)
return f, self.master.view.focus.index
def set_focus(self, index):
if self.master.commands.execute("view.properties.inbounds %d" % index):
self.master.view.focus.index = index
@lru_cache(maxsize=None)
def _get(self, pos: int) -> tuple[FlowItem | None, int | None]:
if not self.master.view.inbounds(pos):
return None, None
return FlowItem(self.master, self.master.view[pos]), pos
def get_next(self, pos):
return self._get(pos + 1)
def get_prev(self, pos):
return self._get(pos - 1)
class FlowListBox(urwid.ListBox, layoutwidget.LayoutWidget):
title = "Flows"
keyctx = "flowlist"
def __init__(self, master: "mitmproxy.tools.console.master.ConsoleMaster") -> None:
self.master: "mitmproxy.tools.console.master.ConsoleMaster" = master
super().__init__(FlowListWalker(master))
self.master.options.subscribe(
self.set_flowlist_layout, ["console_flowlist_layout"]
)
def keypress(self, size, key):
if key == "m_start":
self.master.commands.execute("view.focus.go 0")
elif key == "m_end":
self.master.commands.execute("view.focus.go -1")
elif key == "m_select":
self.master.commands.execute("console.view.flow @focus")
return urwid.ListBox.keypress(self, size, key)
def view_changed(self):
self.body.view_changed()
def set_flowlist_layout(self, *_) -> None:
self.master.ui.clear()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/tabs.py | mitmproxy/tools/console/tabs.py | import urwid
class Tab(urwid.WidgetWrap):
def __init__(self, offset, content, attr, onclick):
"""
onclick is called on click with the tab offset as argument
"""
p = urwid.Text(content, align="center")
p = urwid.Padding(p, align="center", width=("relative", 100))
p = urwid.AttrMap(p, attr)
urwid.WidgetWrap.__init__(self, p)
self.offset = offset
self.onclick = onclick
def mouse_event(self, size, event, button, col, row, focus):
if event == "mouse press" and button == 1:
self.onclick(self.offset)
return True
class Tabs(urwid.WidgetWrap):
def __init__(self, tabs, tab_offset=0):
super().__init__(urwid.Pile([]))
self.tab_offset = tab_offset
self.tabs = tabs
self.show()
def change_tab(self, offset):
self.tab_offset = offset
self.show()
def keypress(self, size, key):
n = len(self.tabs)
if key == "m_next":
self.change_tab((self.tab_offset + 1) % n)
elif key == "right":
self.change_tab((self.tab_offset + 1) % n)
elif key == "left":
self.change_tab((self.tab_offset - 1) % n)
return self._w.keypress(size, key)
def show(self):
if not self.tabs:
return
headers = []
for i in range(len(self.tabs)):
txt = self.tabs[i][0]()
if i == self.tab_offset % len(self.tabs):
headers.append(Tab(i, txt, "heading", self.change_tab))
else:
headers.append(Tab(i, txt, "heading_inactive", self.change_tab))
headers = urwid.Columns(headers, dividechars=1)
self._w = urwid.Frame(
body=self.tabs[self.tab_offset % len(self.tabs)][1](), header=headers
)
self._w.focus_position = "body"
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/tools/console/flowdetailview.py | mitmproxy/tools/console/flowdetailview.py | import urwid
import mitmproxy.flow
from mitmproxy import http
from mitmproxy.tools.console import common
from mitmproxy.tools.console import searchable
from mitmproxy.utils import human
from mitmproxy.utils import strutils
def maybe_timestamp(base, attr):
if base is not None and getattr(base, attr):
return human.format_timestamp_with_milli(getattr(base, attr))
else:
# in mitmdump we serialize before a connection is closed.
# loading those flows at a later point shouldn't display "active".
# We also use a ndash (and not a regular dash) so that it is sorted
# after other timestamps. We may need to revisit that in the future if it turns out
# to render ugly in consoles.
return "–"
def flowdetails(state, flow: mitmproxy.flow.Flow):
text = []
sc = flow.server_conn
cc = flow.client_conn
req: http.Request | None
resp: http.Response | None
if isinstance(flow, http.HTTPFlow):
req = flow.request
resp = flow.response
else:
req = None
resp = None
metadata = flow.metadata
comment = flow.comment
if comment:
text.append(urwid.Text([("head", "Comment: "), ("text", comment)]))
if metadata is not None and len(metadata) > 0:
parts = [(str(k), repr(v)) for k, v in metadata.items()]
text.append(urwid.Text([("head", "Metadata:")]))
text.extend(common.format_keyvals(parts, indent=4))
if sc is not None and sc.peername:
text.append(urwid.Text([("head", "Server Connection:")]))
parts = [
("Address", human.format_address(sc.address)),
]
if sc.peername:
parts.append(("Resolved Address", human.format_address(sc.peername)))
if resp:
parts.append(("HTTP Version", resp.http_version))
if sc.alpn:
parts.append(("ALPN", strutils.bytes_to_escaped_str(sc.alpn)))
text.extend(common.format_keyvals(parts, indent=4))
if sc.certificate_list:
c = sc.certificate_list[0]
text.append(urwid.Text([("head", "Server Certificate:")]))
parts = [
("Type", "%s, %s bits" % c.keyinfo),
("SHA256 digest", c.fingerprint().hex(" ")),
("Valid from", str(c.notbefore)),
("Valid to", str(c.notafter)),
("Serial", str(c.serial)),
(
"Subject",
urwid.Pile(
common.format_keyvals(c.subject, key_format="highlight")
),
),
(
"Issuer",
urwid.Pile(common.format_keyvals(c.issuer, key_format="highlight")),
),
]
if c.altnames:
parts.append(("Alt names", ", ".join(str(x.value) for x in c.altnames)))
text.extend(common.format_keyvals(parts, indent=4))
if cc is not None:
text.append(urwid.Text([("head", "Client Connection:")]))
parts = [
("Address", human.format_address(cc.peername)),
]
if req:
parts.append(("HTTP Version", req.http_version))
if cc.tls_version:
parts.append(("TLS Version", cc.tls_version))
if cc.sni:
parts.append(("Server Name Indication", cc.sni))
if cc.cipher:
parts.append(("Cipher Name", cc.cipher))
if cc.alpn:
parts.append(("ALPN", strutils.bytes_to_escaped_str(cc.alpn)))
text.extend(common.format_keyvals(parts, indent=4))
parts = []
if cc is not None and cc.timestamp_start:
parts.append(
("Client conn. established", maybe_timestamp(cc, "timestamp_start"))
)
if cc.tls_established:
parts.append(
(
"Client conn. TLS handshake",
maybe_timestamp(cc, "timestamp_tls_setup"),
)
)
parts.append(("Client conn. closed", maybe_timestamp(cc, "timestamp_end")))
if sc is not None and sc.timestamp_start:
parts.append(("Server conn. initiated", maybe_timestamp(sc, "timestamp_start")))
parts.append(
("Server conn. TCP handshake", maybe_timestamp(sc, "timestamp_tcp_setup"))
)
if sc.tls_established:
parts.append(
(
"Server conn. TLS handshake",
maybe_timestamp(sc, "timestamp_tls_setup"),
)
)
parts.append(("Server conn. closed", maybe_timestamp(sc, "timestamp_end")))
if req is not None and req.timestamp_start:
parts.append(("First request byte", maybe_timestamp(req, "timestamp_start")))
parts.append(("Request complete", maybe_timestamp(req, "timestamp_end")))
if resp is not None and resp.timestamp_start:
parts.append(("First response byte", maybe_timestamp(resp, "timestamp_start")))
parts.append(("Response complete", maybe_timestamp(resp, "timestamp_end")))
if parts:
# sort operations by timestamp
parts = sorted(parts, key=lambda p: p[1])
text.append(urwid.Text([("head", "Timing:")]))
text.extend(common.format_keyvals(parts, indent=4))
return searchable.Searchable(text)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.