repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/maplocal.py | mitmproxy/addons/maplocal.py | import logging
import mimetypes
import re
import urllib.parse
from collections.abc import Sequence
from pathlib import Path
from typing import NamedTuple
from werkzeug.security import safe_join
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy import version
from mitmproxy.utils.spec import parse_spec
class MapLocalSpec(NamedTuple):
matches: flowfilter.TFilter
regex: str
local_path: Path
def parse_map_local_spec(option: str) -> MapLocalSpec:
filter, regex, replacement = parse_spec(option)
try:
re.compile(regex)
except re.error as e:
raise ValueError(f"Invalid regular expression {regex!r} ({e})")
try:
path = Path(replacement).expanduser().resolve(strict=True)
except FileNotFoundError as e:
raise ValueError(f"Invalid file path: {replacement} ({e})")
return MapLocalSpec(filter, regex, path)
def _safe_path_join(root: Path, untrusted: str) -> Path:
"""Join a Path element with an untrusted str.
This is a convenience wrapper for werkzeug's safe_join,
raising a ValueError if the path is malformed."""
untrusted_parts = Path(untrusted).parts
joined = safe_join(root.as_posix(), *untrusted_parts)
if joined is None:
raise ValueError("Untrusted paths.")
return Path(joined)
def file_candidates(url: str, spec: MapLocalSpec) -> list[Path]:
"""
Get all potential file candidates given a URL and a mapping spec ordered by preference.
This function already assumes that the spec regex matches the URL.
"""
m = re.search(spec.regex, url)
assert m
if m.groups():
suffix = m.group(1)
else:
suffix = re.split(spec.regex, url, maxsplit=1)[1]
suffix = suffix.split("?")[0] # remove query string
suffix = suffix.strip("/")
if suffix:
decoded_suffix = urllib.parse.unquote(suffix)
suffix_candidates = [decoded_suffix, f"{decoded_suffix}/index.html"]
escaped_suffix = re.sub(r"[^0-9a-zA-Z\-_.=(),/]", "_", decoded_suffix)
if decoded_suffix != escaped_suffix:
suffix_candidates.extend([escaped_suffix, f"{escaped_suffix}/index.html"])
try:
return [_safe_path_join(spec.local_path, x) for x in suffix_candidates]
except ValueError:
return []
else:
return [spec.local_path / "index.html"]
class MapLocal:
def __init__(self) -> None:
self.replacements: list[MapLocalSpec] = []
def load(self, loader):
loader.add_option(
"map_local",
Sequence[str],
[],
"""
Map remote resources to a local file using a pattern of the form
"[/flow-filter]/url-regex/file-or-directory-path", where the
separator can be any character.
""",
)
def configure(self, updated):
if "map_local" in updated:
self.replacements = []
for option in ctx.options.map_local:
try:
spec = parse_map_local_spec(option)
except ValueError as e:
raise exceptions.OptionsError(
f"Cannot parse map_local option {option}: {e}"
) from e
self.replacements.append(spec)
def request(self, flow: http.HTTPFlow) -> None:
if flow.response or flow.error or not flow.live:
return
url = flow.request.pretty_url
all_candidates = []
for spec in self.replacements:
if spec.matches(flow) and re.search(spec.regex, url):
if spec.local_path.is_file():
candidates = [spec.local_path]
else:
candidates = file_candidates(url, spec)
all_candidates.extend(candidates)
local_file = None
for candidate in candidates:
if candidate.is_file():
local_file = candidate
break
if local_file:
headers = {"Server": version.MITMPROXY}
mimetype = mimetypes.guess_type(str(local_file))[0]
if mimetype:
headers["Content-Type"] = mimetype
try:
contents = local_file.read_bytes()
except OSError as e:
logging.warning(f"Could not read file: {e}")
continue
flow.response = http.Response.make(200, contents, headers)
# only set flow.response once, for the first matching rule
return
if all_candidates:
flow.response = http.Response.make(404)
logging.info(
f"None of the local file candidates exist: {', '.join(str(x) for x in all_candidates)}"
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/savehar.py | mitmproxy/addons/savehar.py | """Write flow objects to a HAR file"""
import base64
import json
import logging
import zlib
from collections.abc import Sequence
from datetime import datetime
from datetime import timezone
from typing import Any
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy import types
from mitmproxy import version
from mitmproxy.addonmanager import Loader
from mitmproxy.connection import Server
from mitmproxy.coretypes.multidict import _MultiDict
from mitmproxy.log import ALERT
from mitmproxy.utils import human
from mitmproxy.utils import strutils
logger = logging.getLogger(__name__)
class SaveHar:
def __init__(self) -> None:
self.flows: list[flow.Flow] = []
self.filt: flowfilter.TFilter | None = None
@command.command("save.har")
def export_har(self, flows: Sequence[flow.Flow], path: types.Path) -> None:
"""Export flows to an HAR (HTTP Archive) file."""
har = json.dumps(self.make_har(flows), indent=4).encode()
if path.endswith(".zhar"):
har = zlib.compress(har, 9)
with open(path, "wb") as f:
f.write(har)
logging.log(ALERT, f"HAR file saved ({human.pretty_size(len(har))} bytes).")
def make_har(self, flows: Sequence[flow.Flow]) -> dict:
entries = []
skipped = 0
# A list of server seen till now is maintained so we can avoid
# using 'connect' time for entries that use an existing connection.
servers_seen: set[Server] = set()
for f in flows:
if isinstance(f, http.HTTPFlow):
entries.append(self.flow_entry(f, servers_seen))
else:
skipped += 1
if skipped > 0:
logger.info(f"Skipped {skipped} flows that weren't HTTP flows.")
return {
"log": {
"version": "1.2",
"creator": {
"name": "mitmproxy",
"version": version.VERSION,
"comment": "",
},
"pages": [],
"entries": entries,
}
}
def load(self, loader: Loader):
loader.add_option(
"hardump",
str,
"",
"""
Save a HAR file with all flows on exit.
You may select particular flows by setting save_stream_filter.
For mitmdump, enabling this option will mean that flows are kept in memory.
""",
)
def configure(self, updated):
if "save_stream_filter" in updated:
if ctx.options.save_stream_filter:
try:
self.filt = flowfilter.parse(ctx.options.save_stream_filter)
except ValueError as e:
raise exceptions.OptionsError(str(e)) from e
else:
self.filt = None
if "hardump" in updated:
if not ctx.options.hardump:
self.flows = []
def response(self, flow: http.HTTPFlow) -> None:
# websocket flows will receive a websocket_end,
# we don't want to persist them here already
if flow.websocket is None:
self._save_flow(flow)
def error(self, flow: http.HTTPFlow) -> None:
self.response(flow)
def websocket_end(self, flow: http.HTTPFlow) -> None:
self._save_flow(flow)
def _save_flow(self, flow: http.HTTPFlow) -> None:
if ctx.options.hardump:
flow_matches = self.filt is None or self.filt(flow)
if flow_matches:
self.flows.append(flow)
def done(self):
if ctx.options.hardump:
if ctx.options.hardump == "-":
har = self.make_har(self.flows)
print(json.dumps(har, indent=4))
else:
self.export_har(self.flows, ctx.options.hardump)
def flow_entry(self, flow: http.HTTPFlow, servers_seen: set[Server]) -> dict:
"""Creates HAR entry from flow"""
if flow.server_conn in servers_seen:
connect_time = -1.0
ssl_time = -1.0
elif flow.server_conn.timestamp_tcp_setup:
assert flow.server_conn.timestamp_start
connect_time = 1000 * (
flow.server_conn.timestamp_tcp_setup - flow.server_conn.timestamp_start
)
if flow.server_conn.timestamp_tls_setup:
ssl_time = 1000 * (
flow.server_conn.timestamp_tls_setup
- flow.server_conn.timestamp_tcp_setup
)
else:
ssl_time = -1.0
servers_seen.add(flow.server_conn)
else:
connect_time = -1.0
ssl_time = -1.0
if flow.request.timestamp_end:
send = 1000 * (flow.request.timestamp_end - flow.request.timestamp_start)
else:
send = 0
if flow.response and flow.request.timestamp_end:
wait = 1000 * (flow.response.timestamp_start - flow.request.timestamp_end)
else:
wait = 0
if flow.response and flow.response.timestamp_end:
receive = 1000 * (
flow.response.timestamp_end - flow.response.timestamp_start
)
else:
receive = 0
timings: dict[str, float | None] = {
"connect": connect_time,
"ssl": ssl_time,
"send": send,
"receive": receive,
"wait": wait,
}
if flow.response:
try:
content = flow.response.content
except ValueError:
content = flow.response.raw_content
response_body_size = (
len(flow.response.raw_content) if flow.response.raw_content else 0
)
response_body_decoded_size = len(content) if content else 0
response_body_compression = response_body_decoded_size - response_body_size
response = {
"status": flow.response.status_code,
"statusText": flow.response.reason,
"httpVersion": flow.response.http_version,
"cookies": self.format_response_cookies(flow.response),
"headers": self.format_multidict(flow.response.headers),
"content": {
"size": response_body_size,
"compression": response_body_compression,
"mimeType": flow.response.headers.get("Content-Type", ""),
},
"redirectURL": flow.response.headers.get("Location", ""),
"headersSize": len(str(flow.response.headers)),
"bodySize": response_body_size,
}
if content and strutils.is_mostly_bin(content):
response["content"]["text"] = base64.b64encode(content).decode()
response["content"]["encoding"] = "base64"
else:
text_content = flow.response.get_text(strict=False)
if text_content is None:
response["content"]["text"] = ""
else:
response["content"]["text"] = text_content
else:
response = {
"status": 0,
"statusText": "",
"httpVersion": "",
"headers": [],
"cookies": [],
"content": {},
"redirectURL": "",
"headersSize": -1,
"bodySize": -1,
"_transferSize": 0,
"_error": None,
}
if flow.error:
response["_error"] = flow.error.msg
if flow.request.method == "CONNECT":
url = f"https://{flow.request.pretty_url}/"
else:
url = flow.request.pretty_url
entry: dict[str, Any] = {
"startedDateTime": datetime.fromtimestamp(
flow.request.timestamp_start, timezone.utc
).isoformat(),
"time": sum(v for v in timings.values() if v is not None and v >= 0),
"request": {
"method": flow.request.method,
"url": url,
"httpVersion": flow.request.http_version,
"cookies": self.format_multidict(flow.request.cookies),
"headers": self.format_multidict(flow.request.headers),
"queryString": self.format_multidict(flow.request.query),
"headersSize": len(str(flow.request.headers)),
"bodySize": len(flow.request.raw_content)
if flow.request.raw_content
else 0,
},
"response": response,
"cache": {},
"timings": timings,
}
if flow.request.method in ["POST", "PUT", "PATCH"]:
params = self.format_multidict(flow.request.urlencoded_form)
entry["request"]["postData"] = {
"mimeType": flow.request.headers.get("Content-Type", ""),
"text": flow.request.get_text(strict=False),
"params": params,
}
if flow.server_conn.peername:
entry["serverIPAddress"] = str(flow.server_conn.peername[0])
websocket_messages = []
if flow.websocket:
for message in flow.websocket.messages:
if message.is_text:
data = message.text
else:
data = base64.b64encode(message.content).decode()
websocket_message = {
"type": "send" if message.from_client else "receive",
"time": message.timestamp,
"opcode": message.type.value,
"data": data,
}
websocket_messages.append(websocket_message)
entry["_resourceType"] = "websocket"
entry["_webSocketMessages"] = websocket_messages
return entry
def format_response_cookies(self, response: http.Response) -> list[dict]:
"""Formats the response's cookie header to list of cookies"""
cookie_list = response.cookies.items(multi=True)
rv = []
for name, (value, attrs) in cookie_list:
cookie = {
"name": name,
"value": value,
"path": attrs.get("path", "/"),
"domain": attrs.get("domain", ""),
"httpOnly": "httpOnly" in attrs,
"secure": "secure" in attrs,
}
# TODO: handle expires attribute here.
# This is not quite trivial because we need to parse random date formats.
# For now, we just ignore the attribute.
if "sameSite" in attrs:
cookie["sameSite"] = attrs["sameSite"]
rv.append(cookie)
return rv
def format_multidict(self, obj: _MultiDict[str, str]) -> list[dict]:
return [{"name": k, "value": v} for k, v in obj.items(multi=True)]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/__init__.py | mitmproxy/addons/__init__.py | from mitmproxy.addons import anticache
from mitmproxy.addons import anticomp
from mitmproxy.addons import block
from mitmproxy.addons import blocklist
from mitmproxy.addons import browser
from mitmproxy.addons import clientplayback
from mitmproxy.addons import command_history
from mitmproxy.addons import comment
from mitmproxy.addons import core
from mitmproxy.addons import cut
from mitmproxy.addons import disable_h2c
from mitmproxy.addons import dns_resolver
from mitmproxy.addons import export
from mitmproxy.addons import maplocal
from mitmproxy.addons import mapremote
from mitmproxy.addons import modifybody
from mitmproxy.addons import modifyheaders
from mitmproxy.addons import next_layer
from mitmproxy.addons import onboarding
from mitmproxy.addons import proxyauth
from mitmproxy.addons import proxyserver
from mitmproxy.addons import save
from mitmproxy.addons import savehar
from mitmproxy.addons import script
from mitmproxy.addons import serverplayback
from mitmproxy.addons import stickyauth
from mitmproxy.addons import stickycookie
from mitmproxy.addons import strip_dns_https_records
from mitmproxy.addons import tlsconfig
from mitmproxy.addons import update_alt_svc
from mitmproxy.addons import upstream_auth
def default_addons():
return [
core.Core(),
browser.Browser(),
block.Block(),
strip_dns_https_records.StripDnsHttpsRecords(),
blocklist.BlockList(),
anticache.AntiCache(),
anticomp.AntiComp(),
clientplayback.ClientPlayback(),
command_history.CommandHistory(),
comment.Comment(),
cut.Cut(),
disable_h2c.DisableH2C(),
export.Export(),
onboarding.Onboarding(),
proxyauth.ProxyAuth(),
proxyserver.Proxyserver(),
script.ScriptLoader(),
dns_resolver.DnsResolver(),
next_layer.NextLayer(),
serverplayback.ServerPlayback(),
mapremote.MapRemote(),
maplocal.MapLocal(),
modifybody.ModifyBody(),
modifyheaders.ModifyHeaders(),
stickyauth.StickyAuth(),
stickycookie.StickyCookie(),
save.Save(),
savehar.SaveHar(),
tlsconfig.TlsConfig(),
upstream_auth.UpstreamAuth(),
update_alt_svc.UpdateAltSvc(),
]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/update_alt_svc.py | mitmproxy/addons/update_alt_svc.py | import re
from mitmproxy import ctx
from mitmproxy.http import HTTPFlow
from mitmproxy.proxy import mode_specs
ALT_SVC = "alt-svc"
HOST_PATTERN = r"([a-zA-Z0-9.-]*:\d{1,5})"
def update_alt_svc_header(header: str, port: int) -> str:
return re.sub(HOST_PATTERN, f":{port}", header)
class UpdateAltSvc:
def load(self, loader):
loader.add_option(
"keep_alt_svc_header",
bool,
False,
"Reverse Proxy: Keep Alt-Svc headers as-is, even if they do not point to mitmproxy. Enabling this option may cause clients to bypass the proxy.",
)
def responseheaders(self, flow: HTTPFlow):
assert flow.response
if (
not ctx.options.keep_alt_svc_header
and isinstance(flow.client_conn.proxy_mode, mode_specs.ReverseMode)
and ALT_SVC in flow.response.headers
):
_, listen_port, *_ = flow.client_conn.sockname
headers = flow.response.headers
headers[ALT_SVC] = update_alt_svc_header(headers[ALT_SVC], listen_port)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/strip_dns_https_records.py | mitmproxy/addons/strip_dns_https_records.py | from mitmproxy import ctx
from mitmproxy import dns
from mitmproxy.net.dns import types
class StripDnsHttpsRecords:
def load(self, loader):
loader.add_option(
"strip_ech",
bool,
True,
"Strip Encrypted ClientHello (ECH) data from DNS HTTPS records so that mitmproxy can generate matching certificates.",
)
def dns_response(self, flow: dns.DNSFlow):
assert flow.response
if ctx.options.strip_ech:
for answer in flow.response.answers:
if answer.type == types.HTTPS:
answer.https_ech = None
if not ctx.options.http3:
for answer in flow.response.answers:
if (
answer.type == types.HTTPS
and answer.https_alpn is not None
and any(
# HTTP/3 or any of the spec drafts (h3-...)?
a == b"h3" or a.startswith(b"h3-")
for a in answer.https_alpn
)
):
alpns = tuple(
a
for a in answer.https_alpn
if a != b"h3" and not a.startswith(b"h3-")
)
answer.https_alpn = alpns or None
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/proxyserver.py | mitmproxy/addons/proxyserver.py | """
This addon is responsible for starting/stopping the proxy server sockets/instances specified by the mode option.
"""
from __future__ import annotations
import asyncio
import collections
import ipaddress
import logging
from collections.abc import Iterable
from collections.abc import Iterator
from contextlib import contextmanager
from typing import Optional
from wsproto.frame_protocol import Opcode
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import http
from mitmproxy import platform
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy import websocket
from mitmproxy.connection import Address
from mitmproxy.flow import Flow
from mitmproxy.proxy import events
from mitmproxy.proxy import mode_specs
from mitmproxy.proxy import server_hooks
from mitmproxy.proxy.layers.tcp import TcpMessageInjected
from mitmproxy.proxy.layers.udp import UdpMessageInjected
from mitmproxy.proxy.layers.websocket import WebSocketMessageInjected
from mitmproxy.proxy.mode_servers import ProxyConnectionHandler
from mitmproxy.proxy.mode_servers import ServerInstance
from mitmproxy.proxy.mode_servers import ServerManager
from mitmproxy.utils import asyncio_utils
from mitmproxy.utils import human
from mitmproxy.utils import signals
logger = logging.getLogger(__name__)
class Servers:
def __init__(self, manager: ServerManager):
self.changed = signals.AsyncSignal(lambda: None)
self._instances: dict[mode_specs.ProxyMode, ServerInstance] = dict()
self._lock = asyncio.Lock()
self._manager = manager
@property
def is_updating(self) -> bool:
return self._lock.locked()
async def update(self, modes: Iterable[mode_specs.ProxyMode]) -> bool:
all_ok = True
async with self._lock:
new_instances: dict[mode_specs.ProxyMode, ServerInstance] = {}
start_tasks = []
if ctx.options.server:
# Create missing modes and keep existing ones.
for spec in modes:
if spec in self._instances:
instance = self._instances[spec]
else:
instance = ServerInstance.make(spec, self._manager)
start_tasks.append(instance.start())
new_instances[spec] = instance
# Shutdown modes that have been removed from the list.
stop_tasks = [
s.stop()
for spec, s in self._instances.items()
if spec not in new_instances
]
if not start_tasks and not stop_tasks:
return (
True # nothing to do, so we don't need to trigger `self.changed`.
)
self._instances = new_instances
# Notify listeners about the new not-yet-started servers.
await self.changed.send()
# We first need to free ports before starting new servers.
for ret in await asyncio.gather(*stop_tasks, return_exceptions=True):
if ret:
all_ok = False
logger.error(str(ret))
for ret in await asyncio.gather(*start_tasks, return_exceptions=True):
if ret:
all_ok = False
logger.error(str(ret))
await self.changed.send()
return all_ok
def __len__(self) -> int:
return len(self._instances)
def __iter__(self) -> Iterator[ServerInstance]:
return iter(self._instances.values())
def __getitem__(self, mode: str | mode_specs.ProxyMode) -> ServerInstance:
if isinstance(mode, str):
mode = mode_specs.ProxyMode.parse(mode)
return self._instances[mode]
class Proxyserver(ServerManager):
"""
This addon runs the actual proxy server.
"""
connections: dict[tuple | str, ProxyConnectionHandler]
servers: Servers
is_running: bool
_connect_addr: Address | None = None
def __init__(self):
self.connections = {}
self.servers = Servers(self)
self.is_running = False
def __repr__(self):
return f"Proxyserver({len(self.connections)} active conns)"
@command.command("proxyserver.active_connections")
def active_connections(self) -> int:
return len(self.connections)
@contextmanager
def register_connection(
self, connection_id: tuple | str, handler: ProxyConnectionHandler
):
self.connections[connection_id] = handler
try:
yield
finally:
del self.connections[connection_id]
def load(self, loader):
loader.add_option(
"store_streamed_bodies",
bool,
False,
"Store HTTP request and response bodies when streamed (see `stream_large_bodies`). "
"This increases memory consumption, but makes it possible to inspect streamed bodies.",
)
loader.add_option(
"connection_strategy",
str,
"eager",
"Determine when server connections should be established. When set to lazy, mitmproxy "
"tries to defer establishing an upstream connection as long as possible. This makes it possible to "
"use server replay while being offline. When set to eager, mitmproxy can detect protocols with "
"server-side greetings, as well as accurately mirror TLS ALPN negotiation.",
choices=("eager", "lazy"),
)
loader.add_option(
"stream_large_bodies",
Optional[str],
None,
"""
Stream data to the client if request or response body exceeds the given
threshold. If streamed, the body will not be stored in any way,
and such responses cannot be modified. Understands k/m/g
suffixes, i.e. 3m for 3 megabytes. To store streamed bodies, see `store_streamed_bodies`.
""",
)
loader.add_option(
"body_size_limit",
Optional[str],
None,
"""
Byte size limit of HTTP request and response bodies. Understands
k/m/g suffixes, i.e. 3m for 3 megabytes.
""",
)
loader.add_option(
"keep_host_header",
bool,
False,
"""
Reverse Proxy: Keep the original host header instead of rewriting it
to the reverse proxy target.
""",
)
loader.add_option(
"proxy_debug",
bool,
False,
"Enable debug logs in the proxy core.",
)
loader.add_option(
"normalize_outbound_headers",
bool,
True,
"""
Normalize outgoing HTTP/2 header names, but emit a warning when doing so.
HTTP/2 does not allow uppercase header names. This option makes sure that HTTP/2 headers set
in custom scripts are lowercased before they are sent.
""",
)
loader.add_option(
"validate_inbound_headers",
bool,
True,
"""
Make sure that incoming HTTP requests are not malformed.
Disabling this option makes mitmproxy vulnerable to HTTP smuggling attacks.
""",
)
loader.add_option(
"connect_addr",
Optional[str],
None,
"""Set the local IP address that mitmproxy should use when connecting to upstream servers.""",
)
def running(self):
self.is_running = True
def configure(self, updated) -> None:
if "stream_large_bodies" in updated:
try:
human.parse_size(ctx.options.stream_large_bodies)
except ValueError:
raise exceptions.OptionsError(
f"Invalid stream_large_bodies specification: "
f"{ctx.options.stream_large_bodies}"
)
if "body_size_limit" in updated:
try:
human.parse_size(ctx.options.body_size_limit)
except ValueError:
raise exceptions.OptionsError(
f"Invalid body_size_limit specification: "
f"{ctx.options.body_size_limit}"
)
if "connect_addr" in updated:
try:
if ctx.options.connect_addr:
self._connect_addr = (
str(ipaddress.ip_address(ctx.options.connect_addr)),
0,
)
else:
self._connect_addr = None
except ValueError:
raise exceptions.OptionsError(
f"Invalid value for connect_addr: {ctx.options.connect_addr!r}. Specify a valid IP address."
)
if "mode" in updated or "server" in updated:
# Make sure that all modes are syntactically valid...
modes: list[mode_specs.ProxyMode] = []
for mode in ctx.options.mode:
try:
modes.append(mode_specs.ProxyMode.parse(mode))
except ValueError as e:
raise exceptions.OptionsError(
f"Invalid proxy mode specification: {mode} ({e})"
)
# ...and don't listen on the same address.
listen_addrs = []
for m in modes:
if m.transport_protocol == "both":
protocols = ["tcp", "udp"]
else:
protocols = [m.transport_protocol]
host = m.listen_host(ctx.options.listen_host)
port = m.listen_port(ctx.options.listen_port)
if port is None:
continue
for proto in protocols:
listen_addrs.append((host, port, proto))
if len(set(listen_addrs)) != len(listen_addrs):
(host, port, _) = collections.Counter(listen_addrs).most_common(1)[0][0]
dup_addr = human.format_address((host or "0.0.0.0", port))
raise exceptions.OptionsError(
f"Cannot spawn multiple servers on the same address: {dup_addr}"
)
if ctx.options.mode and not ctx.master.addons.get("nextlayer"):
logger.warning("Warning: Running proxyserver without nextlayer addon!")
if any(isinstance(m, mode_specs.TransparentMode) for m in modes):
if platform.original_addr:
platform.init_transparent_mode()
else:
raise exceptions.OptionsError(
"Transparent mode not supported on this platform."
)
if self.is_running:
asyncio_utils.create_task(
self.servers.update(modes),
name="update servers",
keep_ref=True,
)
async def setup_servers(self) -> bool:
"""Setup proxy servers. This may take an indefinite amount of time to complete (e.g. on permission prompts)."""
return await self.servers.update(
[mode_specs.ProxyMode.parse(m) for m in ctx.options.mode]
)
def listen_addrs(self) -> list[Address]:
return [addr for server in self.servers for addr in server.listen_addrs]
def inject_event(self, event: events.MessageInjected):
connection_id: str | tuple
if event.flow.client_conn.transport_protocol != "udp":
connection_id = event.flow.client_conn.id
else: # pragma: no cover
# temporary workaround: for UDP we don't have persistent client IDs yet.
connection_id = (
event.flow.client_conn.peername,
event.flow.client_conn.sockname,
)
if connection_id not in self.connections:
raise ValueError("Flow is not from a live connection.")
asyncio_utils.create_task(
self.connections[connection_id].server_event(event),
name=f"inject_event",
keep_ref=True,
client=event.flow.client_conn.peername,
)
@command.command("inject.websocket")
def inject_websocket(
self, flow: Flow, to_client: bool, message: bytes, is_text: bool = True
):
if not isinstance(flow, http.HTTPFlow) or not flow.websocket:
logger.warning("Cannot inject WebSocket messages into non-WebSocket flows.")
msg = websocket.WebSocketMessage(
Opcode.TEXT if is_text else Opcode.BINARY, not to_client, message
)
event = WebSocketMessageInjected(flow, msg)
try:
self.inject_event(event)
except ValueError as e:
logger.warning(str(e))
@command.command("inject.tcp")
def inject_tcp(self, flow: Flow, to_client: bool, message: bytes):
if not isinstance(flow, tcp.TCPFlow):
logger.warning("Cannot inject TCP messages into non-TCP flows.")
event = TcpMessageInjected(flow, tcp.TCPMessage(not to_client, message))
try:
self.inject_event(event)
except ValueError as e:
logger.warning(str(e))
@command.command("inject.udp")
def inject_udp(self, flow: Flow, to_client: bool, message: bytes):
if not isinstance(flow, udp.UDPFlow):
logger.warning("Cannot inject UDP messages into non-UDP flows.")
event = UdpMessageInjected(flow, udp.UDPMessage(not to_client, message))
try:
self.inject_event(event)
except ValueError as e:
logger.warning(str(e))
def server_connect(self, data: server_hooks.ServerConnectionHookData):
if data.server.sockname is None:
data.server.sockname = self._connect_addr
# Prevent mitmproxy from recursively connecting to itself.
assert data.server.address
connect_host, connect_port, *_ = data.server.address
for server in self.servers:
for listen_host, listen_port, *_ in server.listen_addrs:
self_connect = (
connect_port == listen_port
and connect_host in ("localhost", "127.0.0.1", "::1", listen_host)
and server.mode.transport_protocol == data.server.transport_protocol
)
if self_connect:
data.server.error = (
"Request destination unknown. "
"Unable to figure out where this request should be forwarded to."
)
return
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/readfile.py | mitmproxy/addons/readfile.py | import asyncio
import logging
import os.path
import sys
from typing import BinaryIO
from typing import Optional
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import io
from mitmproxy.utils import asyncio_utils
logger = logging.getLogger(__name__)
class ReadFile:
"""
An addon that handles reading from file on startup.
"""
def __init__(self):
self.filter = None
self._read_task: asyncio.Task | None = None
def load(self, loader):
loader.add_option("rfile", Optional[str], None, "Read flows from file.")
loader.add_option(
"readfile_filter", Optional[str], None, "Read only matching flows."
)
def configure(self, updated):
if "readfile_filter" in updated:
if ctx.options.readfile_filter:
try:
self.filter = flowfilter.parse(ctx.options.readfile_filter)
except ValueError as e:
raise exceptions.OptionsError(str(e)) from e
else:
self.filter = None
async def load_flows(self, fo: BinaryIO) -> int:
cnt = 0
freader = io.FlowReader(fo)
try:
for flow in freader.stream():
if self.filter and not self.filter(flow):
continue
await ctx.master.load_flow(flow)
cnt += 1
except (OSError, exceptions.FlowReadException) as e:
if cnt:
logging.warning("Flow file corrupted - loaded %i flows." % cnt)
else:
logging.error("Flow file corrupted.")
raise exceptions.FlowReadException(str(e)) from e
else:
return cnt
async def load_flows_from_path(self, path: str) -> int:
path = os.path.expanduser(path)
try:
with open(path, "rb") as f:
return await self.load_flows(f)
except OSError as e:
logging.error(f"Cannot load flows: {e}")
raise exceptions.FlowReadException(str(e)) from e
async def doread(self, rfile: str) -> None:
try:
await self.load_flows_from_path(rfile)
except exceptions.FlowReadException as e:
logger.exception(f"Failed to read {ctx.options.rfile}: {e}")
def running(self):
if ctx.options.rfile:
self._read_task = asyncio_utils.create_task(
self.doread(ctx.options.rfile),
name="readfile",
keep_ref=False,
)
@command.command("readfile.reading")
def reading(self) -> bool:
return bool(self._read_task and not self._read_task.done())
class ReadFileStdin(ReadFile):
"""Support the special case of "-" for reading from stdin"""
async def load_flows_from_path(self, path: str) -> int:
if path == "-": # pragma: no cover
# Need to think about how to test this. This function is scheduled
# onto the event loop, where a sys.stdin mock has no effect.
return await self.load_flows(sys.stdin.buffer)
else:
return await super().load_flows_from_path(path)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/comment.py | mitmproxy/addons/comment.py | from collections.abc import Sequence
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import flow
from mitmproxy.hooks import UpdateHook
class Comment:
@command.command("flow.comment")
def comment(self, flow: Sequence[flow.Flow], comment: str) -> None:
"Add a comment to a flow"
updated = []
for f in flow:
f.comment = comment
updated.append(f)
ctx.master.addons.trigger(UpdateHook(updated))
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/stickyauth.py | mitmproxy/addons/stickyauth.py | from typing import Optional
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
class StickyAuth:
def __init__(self):
self.flt = None
self.hosts = {}
def load(self, loader):
loader.add_option(
"stickyauth",
Optional[str],
None,
"Set sticky auth filter. Matched against requests.",
)
def configure(self, updated):
if "stickyauth" in updated:
if ctx.options.stickyauth:
try:
self.flt = flowfilter.parse(ctx.options.stickyauth)
except ValueError as e:
raise exceptions.OptionsError(str(e)) from e
else:
self.flt = None
def request(self, flow):
if self.flt:
host = flow.request.host
if "authorization" in flow.request.headers:
self.hosts[host] = flow.request.headers["authorization"]
elif flowfilter.match(self.flt, flow):
if host in self.hosts:
flow.request.headers["authorization"] = self.hosts[host]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/proxyauth.py | mitmproxy/addons/proxyauth.py | from __future__ import annotations
import binascii
import pathlib
import weakref
from abc import ABC
from abc import abstractmethod
from collections.abc import MutableMapping
from typing import Optional
import ldap3
from mitmproxy import connection
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import http
from mitmproxy.net.http import status_codes
from mitmproxy.proxy import mode_specs
from mitmproxy.proxy.layers import modes
from mitmproxy.utils import htpasswd
REALM = "mitmproxy"
class ProxyAuth:
validator: Validator | None = None
def __init__(self) -> None:
self.authenticated: MutableMapping[connection.Client, tuple[str, str]] = (
weakref.WeakKeyDictionary()
)
"""Contains all connections that are permanently authenticated after an HTTP CONNECT"""
def load(self, loader):
loader.add_option(
"proxyauth",
Optional[str],
None,
"""
Require proxy authentication. Format:
"username:pass",
"any" to accept any user/pass combination,
"@path" to use an Apache htpasswd file,
or "ldap[s]:url_server_ldap[:port]:dn_auth:password:dn_subtree[?search_filter_key=...]" for LDAP authentication.
""",
)
def configure(self, updated):
if "proxyauth" in updated:
auth = ctx.options.proxyauth
if auth:
if auth == "any":
self.validator = AcceptAll()
elif auth.startswith("@"):
self.validator = Htpasswd(auth)
elif ctx.options.proxyauth.startswith("ldap"):
self.validator = Ldap(auth)
elif ":" in ctx.options.proxyauth:
self.validator = SingleUser(auth)
else:
raise exceptions.OptionsError("Invalid proxyauth specification.")
else:
self.validator = None
def socks5_auth(self, data: modes.Socks5AuthData) -> None:
if self.validator and self.validator(data.username, data.password):
data.valid = True
self.authenticated[data.client_conn] = data.username, data.password
def http_connect(self, f: http.HTTPFlow) -> None:
if self.validator and self.authenticate_http(f):
# Make a note that all further requests over this connection are ok.
self.authenticated[f.client_conn] = f.metadata["proxyauth"]
def requestheaders(self, f: http.HTTPFlow) -> None:
if self.validator:
# Is this connection authenticated by a previous HTTP CONNECT?
if f.client_conn in self.authenticated:
f.metadata["proxyauth"] = self.authenticated[f.client_conn]
elif f.is_replay:
pass
else:
self.authenticate_http(f)
def authenticate_http(self, f: http.HTTPFlow) -> bool:
"""
Authenticate an HTTP request, returns if authentication was successful.
If valid credentials are found, the matching authentication header is removed.
In no or invalid credentials are found, flow.response is set to an error page.
"""
assert self.validator
username = None
password = None
is_valid = False
is_proxy = is_http_proxy(f)
auth_header = http_auth_header(is_proxy)
try:
auth_value = f.request.headers.get(auth_header, "")
scheme, username, password = parse_http_basic_auth(auth_value)
is_valid = self.validator(username, password)
except Exception:
pass
if is_valid:
f.metadata["proxyauth"] = (username, password)
del f.request.headers[auth_header]
return True
else:
f.response = make_auth_required_response(is_proxy)
return False
def make_auth_required_response(is_proxy: bool) -> http.Response:
if is_proxy:
status_code = status_codes.PROXY_AUTH_REQUIRED
headers = {"Proxy-Authenticate": f'Basic realm="{REALM}"'}
else:
status_code = status_codes.UNAUTHORIZED
headers = {"WWW-Authenticate": f'Basic realm="{REALM}"'}
reason = http.status_codes.RESPONSES[status_code]
return http.Response.make(
status_code,
(
f"<html>"
f"<head><title>{status_code} {reason}</title></head>"
f"<body><h1>{status_code} {reason}</h1></body>"
f"</html>"
),
headers,
)
def http_auth_header(is_proxy: bool) -> str:
if is_proxy:
return "Proxy-Authorization"
else:
return "Authorization"
def is_http_proxy(f: http.HTTPFlow) -> bool:
"""
Returns:
- True, if authentication is done as if mitmproxy is a proxy
- False, if authentication is done as if mitmproxy is an HTTP server
"""
return isinstance(
f.client_conn.proxy_mode, (mode_specs.RegularMode, mode_specs.UpstreamMode)
)
def mkauth(username: str, password: str, scheme: str = "basic") -> str:
"""
Craft a basic auth string
"""
v = binascii.b2a_base64((username + ":" + password).encode("utf8")).decode("ascii")
return scheme + " " + v
def parse_http_basic_auth(s: str) -> tuple[str, str, str]:
"""
Parse a basic auth header.
Raises a ValueError if the input is invalid.
"""
scheme, authinfo = s.split()
if scheme.lower() != "basic":
raise ValueError("Unknown scheme")
try:
user, password = (
binascii.a2b_base64(authinfo.encode()).decode("utf8", "replace").split(":")
)
except binascii.Error as e:
raise ValueError(str(e))
return scheme, user, password
class Validator(ABC):
"""Base class for all username/password validators."""
@abstractmethod
def __call__(self, username: str, password: str) -> bool:
raise NotImplementedError
class AcceptAll(Validator):
def __call__(self, username: str, password: str) -> bool:
return True
class SingleUser(Validator):
def __init__(self, proxyauth: str):
try:
self.username, self.password = proxyauth.split(":")
except ValueError:
raise exceptions.OptionsError("Invalid single-user auth specification.")
def __call__(self, username: str, password: str) -> bool:
return self.username == username and self.password == password
class Htpasswd(Validator):
def __init__(self, proxyauth: str):
path = pathlib.Path(proxyauth[1:]).expanduser()
try:
self.htpasswd = htpasswd.HtpasswdFile.from_file(path)
except (ValueError, OSError) as e:
raise exceptions.OptionsError(
f"Could not open htpasswd file: {path}"
) from e
def __call__(self, username: str, password: str) -> bool:
return self.htpasswd.check_password(username, password)
class Ldap(Validator):
conn: ldap3.Connection
server: ldap3.Server
dn_subtree: str
filter_key: str
def __init__(self, proxyauth: str):
(
use_ssl,
url,
port,
ldap_user,
ldap_pass,
self.dn_subtree,
self.filter_key,
) = self.parse_spec(proxyauth)
server = ldap3.Server(url, port=port, use_ssl=use_ssl)
conn = ldap3.Connection(server, ldap_user, ldap_pass, auto_bind=True)
self.conn = conn
self.server = server
@staticmethod
def parse_spec(spec: str) -> tuple[bool, str, int | None, str, str, str, str]:
try:
if spec.count(":") > 4:
(
security,
url,
port_str,
ldap_user,
ldap_pass,
dn_subtree,
) = spec.split(":")
port = int(port_str)
else:
security, url, ldap_user, ldap_pass, dn_subtree = spec.split(":")
port = None
if "?" in dn_subtree:
dn_subtree, search_str = dn_subtree.split("?")
key, value = search_str.split("=")
if key == "search_filter_key":
search_filter_key = value
else:
raise ValueError
else:
search_filter_key = "cn"
if security == "ldaps":
use_ssl = True
elif security == "ldap":
use_ssl = False
else:
raise ValueError
return (
use_ssl,
url,
port,
ldap_user,
ldap_pass,
dn_subtree,
search_filter_key,
)
except ValueError:
raise exceptions.OptionsError(f"Invalid LDAP specification: {spec}")
def __call__(self, username: str, password: str) -> bool:
if not username or not password:
return False
self.conn.search(self.dn_subtree, f"({self.filter_key}={username})")
if self.conn.response:
c = ldap3.Connection(
self.server, self.conn.response[0]["dn"], password, auto_bind=True
)
if c:
return True
return False
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/keepserving.py | mitmproxy/addons/keepserving.py | from __future__ import annotations
import asyncio
from mitmproxy import ctx
from mitmproxy.utils import asyncio_utils
class KeepServing:
def load(self, loader):
loader.add_option(
"keepserving",
bool,
False,
"""
Continue serving after client playback, server playback or file
read. This option is ignored by interactive tools, which always keep
serving.
""",
)
def keepgoing(self) -> bool:
# Checking for proxyserver.active_connections is important for server replay,
# the addon may report that replay is finished but not the entire response has been sent yet.
# (https://github.com/mitmproxy/mitmproxy/issues/7569)
checks = [
"readfile.reading",
"replay.client.count",
"replay.server.count",
"proxyserver.active_connections",
]
return any([ctx.master.commands.call(c) for c in checks])
def shutdown(self): # pragma: no cover
ctx.master.shutdown()
async def watch(self):
while True:
await asyncio.sleep(0.1)
if not self.keepgoing():
self.shutdown()
def running(self):
opts = [
ctx.options.client_replay,
ctx.options.server_replay,
ctx.options.rfile,
]
if any(opts) and not ctx.options.keepserving:
asyncio_utils.create_task(
self.watch(),
name="keepserving",
keep_ref=True,
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/upstream_auth.py | mitmproxy/addons/upstream_auth.py | import base64
import re
from typing import Optional
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import http
from mitmproxy.proxy import mode_specs
from mitmproxy.utils import strutils
def parse_upstream_auth(auth: str) -> bytes:
pattern = re.compile(".+:")
if pattern.search(auth) is None:
raise exceptions.OptionsError("Invalid upstream auth specification: %s" % auth)
return b"Basic" + b" " + base64.b64encode(strutils.always_bytes(auth))
class UpstreamAuth:
"""
This addon handles authentication to systems upstream from us for the
upstream proxy and reverse proxy mode. There are 3 cases:
- Upstream proxy CONNECT requests should have authentication added, and
subsequent already connected requests should not.
- Upstream proxy regular requests
- Reverse proxy regular requests (CONNECT is invalid in this mode)
"""
auth: bytes | None = None
def load(self, loader):
loader.add_option(
"upstream_auth",
Optional[str],
None,
"""
Add HTTP Basic authentication to upstream proxy and reverse proxy
requests. Format: username:password.
""",
)
def configure(self, updated):
if "upstream_auth" in updated:
if ctx.options.upstream_auth is None:
self.auth = None
else:
self.auth = parse_upstream_auth(ctx.options.upstream_auth)
def http_connect_upstream(self, f: http.HTTPFlow):
if self.auth:
f.request.headers["Proxy-Authorization"] = self.auth
def requestheaders(self, f: http.HTTPFlow):
if self.auth:
if (
isinstance(f.client_conn.proxy_mode, mode_specs.UpstreamMode)
and f.request.scheme == "http"
):
f.request.headers["Proxy-Authorization"] = self.auth
elif isinstance(f.client_conn.proxy_mode, mode_specs.ReverseMode):
f.request.headers["Authorization"] = self.auth
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/intercept.py | mitmproxy/addons/intercept.py | from typing import Optional
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import flowfilter
class Intercept:
filt: flowfilter.TFilter | None = None
def load(self, loader):
loader.add_option("intercept_active", bool, False, "Intercept toggle")
loader.add_option(
"intercept", Optional[str], None, "Intercept filter expression."
)
def configure(self, updated):
if "intercept" in updated:
if ctx.options.intercept:
try:
self.filt = flowfilter.parse(ctx.options.intercept)
except ValueError as e:
raise exceptions.OptionsError(str(e)) from e
ctx.options.intercept_active = True
else:
self.filt = None
ctx.options.intercept_active = False
def should_intercept(self, f: flow.Flow) -> bool:
return bool(
ctx.options.intercept_active
and self.filt
and self.filt(f)
and not f.is_replay
)
def process_flow(self, f: flow.Flow) -> None:
if self.should_intercept(f):
f.intercept()
# Handlers
def request(self, f):
self.process_flow(f)
def response(self, f):
self.process_flow(f)
def tcp_message(self, f):
self.process_flow(f)
def udp_message(self, f):
self.process_flow(f)
def dns_request(self, f):
self.process_flow(f)
def dns_response(self, f):
self.process_flow(f)
def websocket_message(self, f):
self.process_flow(f)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/save.py | mitmproxy/addons/save.py | import logging
import os.path
import sys
from collections.abc import Sequence
from datetime import datetime
from functools import lru_cache
from pathlib import Path
from typing import Literal
from typing import Optional
import mitmproxy.types
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import dns
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy import io
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy.log import ALERT
@lru_cache
def _path(path: str) -> str:
"""Extract the path from a path spec (which may have an extra "+" at the front)"""
if path.startswith("+"):
path = path[1:]
return os.path.expanduser(path)
@lru_cache
def _mode(path: str) -> Literal["ab", "wb"]:
"""Extract the writing mode (overwrite or append) from a path spec"""
if path.startswith("+"):
return "ab"
else:
return "wb"
class Save:
def __init__(self) -> None:
self.stream: io.FilteredFlowWriter | None = None
self.filt: flowfilter.TFilter | None = None
self.active_flows: set[flow.Flow] = set()
self.current_path: str | None = None
def load(self, loader):
loader.add_option(
"save_stream_file",
Optional[str],
None,
"""
Stream flows to file as they arrive. Prefix path with + to append.
The full path can use python strftime() formating, missing
directories are created as needed. A new file is opened every time
the formatted string changes.
""",
)
loader.add_option(
"save_stream_filter",
Optional[str],
None,
"Filter which flows are written to file.",
)
def configure(self, updated):
if "save_stream_filter" in updated:
if ctx.options.save_stream_filter:
try:
self.filt = flowfilter.parse(ctx.options.save_stream_filter)
except ValueError as e:
raise exceptions.OptionsError(str(e)) from e
else:
self.filt = None
if "save_stream_file" in updated or "save_stream_filter" in updated:
if ctx.options.save_stream_file:
try:
self.maybe_rotate_to_new_file()
except OSError as e:
raise exceptions.OptionsError(str(e)) from e
assert self.stream
self.stream.flt = self.filt
else:
self.done()
def maybe_rotate_to_new_file(self) -> None:
path = datetime.today().strftime(_path(ctx.options.save_stream_file))
if self.current_path == path:
return
if self.stream:
self.stream.fo.close()
self.stream = None
new_log_file = Path(path)
new_log_file.parent.mkdir(parents=True, exist_ok=True)
f = new_log_file.open(_mode(ctx.options.save_stream_file))
self.stream = io.FilteredFlowWriter(f, self.filt)
self.current_path = path
def save_flow(self, flow: flow.Flow) -> None:
"""
Write the flow to the stream, but first check if we need to rotate to a new file.
"""
if not self.stream:
return
try:
self.maybe_rotate_to_new_file()
self.stream.add(flow)
except OSError as e:
# If we somehow fail to write flows to a logfile, we really want to crash visibly
# instead of letting traffic through unrecorded.
# No normal logging here, that would not be triggered anymore.
sys.stderr.write(f"Error while writing to {self.current_path}: {e}")
sys.exit(1)
else:
self.active_flows.discard(flow)
def done(self) -> None:
if self.stream:
for f in self.active_flows:
self.stream.add(f)
self.active_flows.clear()
self.current_path = None
self.stream.fo.close()
self.stream = None
@command.command("save.file")
def save(self, flows: Sequence[flow.Flow], path: mitmproxy.types.Path) -> None:
"""
Save flows to a file. If the path starts with a +, flows are
appended to the file, otherwise it is over-written.
"""
try:
with open(_path(path), _mode(path)) as f:
stream = io.FlowWriter(f)
for i in flows:
stream.add(i)
except OSError as e:
raise exceptions.CommandError(e) from e
if path.endswith(".har") or path.endswith(".zhar"): # pragma: no cover
logging.log(
ALERT,
f"Saved as mitmproxy dump file. To save HAR files, use the `save.har` command.",
)
else:
logging.log(ALERT, f"Saved {len(flows)} flows.")
def tcp_start(self, flow: tcp.TCPFlow):
if self.stream:
self.active_flows.add(flow)
def tcp_end(self, flow: tcp.TCPFlow):
self.save_flow(flow)
def tcp_error(self, flow: tcp.TCPFlow):
self.tcp_end(flow)
def udp_start(self, flow: udp.UDPFlow):
if self.stream:
self.active_flows.add(flow)
def udp_end(self, flow: udp.UDPFlow):
self.save_flow(flow)
def udp_error(self, flow: udp.UDPFlow):
self.udp_end(flow)
def websocket_end(self, flow: http.HTTPFlow):
self.save_flow(flow)
def request(self, flow: http.HTTPFlow):
if self.stream:
self.active_flows.add(flow)
def response(self, flow: http.HTTPFlow):
# websocket flows will receive a websocket_end,
# we don't want to persist them here already
if flow.websocket is None:
self.save_flow(flow)
def error(self, flow: http.HTTPFlow):
self.response(flow)
def dns_request(self, flow: dns.DNSFlow):
if self.stream:
self.active_flows.add(flow)
def dns_response(self, flow: dns.DNSFlow):
self.save_flow(flow)
def dns_error(self, flow: dns.DNSFlow):
self.save_flow(flow)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/asgiapp.py | mitmproxy/addons/asgiapp.py | import asyncio
import logging
import urllib.parse
import asgiref.compatibility
import asgiref.wsgi
from mitmproxy import ctx
from mitmproxy import http
logger = logging.getLogger(__name__)
class ASGIApp:
"""
An addon that hosts an ASGI/WSGI HTTP app within mitmproxy, at a specified hostname and port.
Some important caveats:
- This implementation will block and wait until the entire HTTP response is completed before sending out data.
- It currently only implements the HTTP protocol (Lifespan and WebSocket are unimplemented).
"""
def __init__(self, asgi_app, host: str, port: int | None):
asgi_app = asgiref.compatibility.guarantee_single_callable(asgi_app)
self.asgi_app, self.host, self.port = asgi_app, host, port
@property
def name(self) -> str:
return f"asgiapp:{self.host}:{self.port}"
def should_serve(self, flow: http.HTTPFlow) -> bool:
return bool(
flow.request.pretty_host == self.host
and (self.port is None or flow.request.port == self.port)
and flow.live
and not flow.error
and not flow.response
)
async def request(self, flow: http.HTTPFlow) -> None:
if self.should_serve(flow):
await serve(self.asgi_app, flow)
class WSGIApp(ASGIApp):
def __init__(self, wsgi_app, host: str, port: int | None):
asgi_app = asgiref.wsgi.WsgiToAsgi(wsgi_app)
super().__init__(asgi_app, host, port)
HTTP_VERSION_MAP = {
"HTTP/1.0": "1.0",
"HTTP/1.1": "1.1",
"HTTP/2.0": "2",
}
def make_scope(flow: http.HTTPFlow) -> dict:
# %3F is a quoted question mark
quoted_path = urllib.parse.quote_from_bytes(flow.request.data.path).split(
"%3F", maxsplit=1
)
# (Unicode string) – HTTP request target excluding any query string, with percent-encoded
# sequences and UTF-8 byte sequences decoded into characters.
path = quoted_path[0]
# (byte string) – URL portion after the ?, percent-encoded.
query_string: bytes
if len(quoted_path) > 1:
query_string = urllib.parse.unquote(quoted_path[1]).encode()
else:
query_string = b""
return {
"type": "http",
"asgi": {
"version": "3.0",
"spec_version": "2.1",
},
"http_version": HTTP_VERSION_MAP.get(flow.request.http_version, "1.1"),
"method": flow.request.method,
"scheme": flow.request.scheme.upper(),
"path": path,
"raw_path": flow.request.path,
"query_string": query_string,
"headers": [
(name.lower(), value) for (name, value) in flow.request.headers.fields
],
"client": flow.client_conn.peername,
"extensions": {
"mitmproxy.master": ctx.master,
},
}
async def serve(app, flow: http.HTTPFlow):
"""
Serves app on flow.
"""
scope = make_scope(flow)
done = asyncio.Event()
received_body = False
sent_response = False
async def receive():
nonlocal received_body
if not received_body:
received_body = True
return {
"type": "http.request",
"body": flow.request.raw_content,
}
else: # pragma: no cover
# We really don't expect this to be called a second time, but what to do?
# We just wait until the request is done before we continue here with sending a disconnect.
await done.wait()
return {"type": "http.disconnect"}
async def send(event):
if event["type"] == "http.response.start":
flow.response = http.Response.make(
event["status"], b"", event.get("headers", [])
)
flow.response.decode()
elif event["type"] == "http.response.body":
assert flow.response
flow.response.content += event.get("body", b"")
if not event.get("more_body", False):
nonlocal sent_response
sent_response = True
else:
raise AssertionError(f"Unexpected event: {event['type']}")
try:
await app(scope, receive, send)
if not sent_response:
raise RuntimeError(f"no response sent.")
except Exception as e:
logger.exception(f"Error in asgi app: {e}")
flow.response = http.Response.make(500, b"ASGI Error.")
finally:
done.set()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/eventstore.py | mitmproxy/addons/eventstore.py | import asyncio
import collections
import logging
from collections.abc import Callable
from mitmproxy import command
from mitmproxy import log
from mitmproxy.log import LogEntry
from mitmproxy.utils import signals
class EventStore:
def __init__(self, size: int = 10000) -> None:
self.data: collections.deque[LogEntry] = collections.deque(maxlen=size)
self.sig_add = signals.SyncSignal(lambda entry: None)
self.sig_refresh = signals.SyncSignal(lambda: None)
self.logger = CallbackLogger(self._add_log)
self.logger.install()
def done(self):
self.logger.uninstall()
def _add_log(self, entry: LogEntry) -> None:
self.data.append(entry)
self.sig_add.send(entry)
@property
def size(self) -> int | None:
return self.data.maxlen
@command.command("eventstore.clear")
def clear(self) -> None:
"""
Clear the event log.
"""
self.data.clear()
self.sig_refresh.send()
class CallbackLogger(log.MitmLogHandler):
def __init__(
self,
callback: Callable[[LogEntry], None],
):
super().__init__()
self.callback = callback
self.event_loop = asyncio.get_running_loop()
self.formatter = log.MitmFormatter(colorize=False)
def emit(self, record: logging.LogRecord) -> None:
entry = LogEntry(
msg=self.format(record),
level=log.LOGGING_LEVELS_TO_LOGENTRY.get(record.levelno, "error"),
)
self.event_loop.call_soon_threadsafe(self.callback, entry)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/script.py | mitmproxy/addons/script.py | import asyncio
import importlib.machinery
import importlib.util
import logging
import os
import sys
import types
from collections.abc import Sequence
import mitmproxy.types as mtypes
from mitmproxy import addonmanager
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import eventsequence
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import hooks
from mitmproxy.utils import asyncio_utils
logger = logging.getLogger(__name__)
def load_script(path: str) -> types.ModuleType | None:
fullname = "__mitmproxy_script__.{}".format(
os.path.splitext(os.path.basename(path))[0]
)
# the fullname is not unique among scripts, so if there already is an existing script with said
# fullname, remove it.
sys.modules.pop(fullname, None)
oldpath = sys.path
sys.path.insert(0, os.path.dirname(path))
try:
loader = importlib.machinery.SourceFileLoader(fullname, path)
spec = importlib.util.spec_from_loader(fullname, loader=loader)
assert spec
m = importlib.util.module_from_spec(spec)
loader.exec_module(m)
if not getattr(m, "name", None):
m.name = path # type: ignore
return m
except ImportError as e:
if getattr(sys, "frozen", False):
e.msg += (
f".\n"
f"Note that mitmproxy's binaries include their own Python environment. "
f"If your addon requires the installation of additional dependencies, "
f"please install mitmproxy from PyPI "
f"(https://docs.mitmproxy.org/stable/overview-installation/#installation-from-the-python-package-index-pypi)."
)
script_error_handler(path, e)
return None
except Exception as e:
script_error_handler(path, e)
return None
finally:
sys.path[:] = oldpath
def script_error_handler(path: str, exc: Exception) -> None:
"""
Log errors during script loading.
"""
tback = exc.__traceback__
tback = addonmanager.cut_traceback(
tback, "invoke_addon_sync"
) # we're calling configure() on load
tback = addonmanager.cut_traceback(
tback, "_call_with_frames_removed"
) # module execution from importlib
logger.error(f"error in script {path}", exc_info=(type(exc), exc, tback))
ReloadInterval = 1
class Script:
"""
An addon that manages a single script.
"""
def __init__(self, path: str, reload: bool) -> None:
self.name = "scriptmanager:" + path
self.path = path
self.fullpath = os.path.expanduser(path.strip("'\" "))
self.ns: types.ModuleType | None = None
self.is_running = False
if not os.path.isfile(self.fullpath):
raise exceptions.OptionsError(f"No such script: {self.fullpath}")
self.reloadtask = None
if reload:
self.reloadtask = asyncio_utils.create_task(
self.watcher(),
name=f"script watcher for {path}",
keep_ref=False,
)
else:
self.loadscript()
def running(self):
self.is_running = True
def done(self):
if self.reloadtask:
self.reloadtask.cancel()
@property
def addons(self):
return [self.ns] if self.ns else []
def loadscript(self):
logger.info("Loading script %s" % self.path)
if self.ns:
ctx.master.addons.remove(self.ns)
self.ns = None
with addonmanager.safecall():
ns = load_script(self.fullpath)
ctx.master.addons.register(ns)
self.ns = ns
if self.ns:
try:
ctx.master.addons.invoke_addon_sync(
self.ns, hooks.ConfigureHook(ctx.options.keys())
)
except Exception as e:
script_error_handler(self.fullpath, e)
if self.is_running:
# We're already running, so we call that on the addon now.
ctx.master.addons.invoke_addon_sync(self.ns, hooks.RunningHook())
async def watcher(self):
# Script loading is terminally confused at the moment.
# This here is a stopgap workaround to defer loading.
await asyncio.sleep(0)
last_mtime = 0.0
while True:
try:
mtime = os.stat(self.fullpath).st_mtime
except FileNotFoundError:
logger.info("Removing script %s" % self.path)
scripts = list(ctx.options.scripts)
scripts.remove(self.path)
ctx.options.update(scripts=scripts)
return
if mtime > last_mtime:
self.loadscript()
last_mtime = mtime
await asyncio.sleep(ReloadInterval)
class ScriptLoader:
"""
An addon that manages loading scripts from options.
"""
def __init__(self):
self.is_running = False
self.addons = []
def load(self, loader):
loader.add_option("scripts", Sequence[str], [], "Execute a script.")
def running(self):
self.is_running = True
@command.command("script.run")
def script_run(self, flows: Sequence[flow.Flow], path: mtypes.Path) -> None:
"""
Run a script on the specified flows. The script is configured with
the current options and all lifecycle events for each flow are
simulated. Note that the load event is not invoked.
"""
if not os.path.isfile(path):
logger.error("No such script: %s" % path)
return
mod = load_script(path)
if mod:
with addonmanager.safecall():
ctx.master.addons.invoke_addon_sync(
mod,
hooks.ConfigureHook(ctx.options.keys()),
)
ctx.master.addons.invoke_addon_sync(mod, hooks.RunningHook())
for f in flows:
for evt in eventsequence.iterate(f):
ctx.master.addons.invoke_addon_sync(mod, evt)
def configure(self, updated):
if "scripts" in updated:
for s in ctx.options.scripts:
if ctx.options.scripts.count(s) > 1:
raise exceptions.OptionsError("Duplicate script")
for a in self.addons[:]:
if a.path not in ctx.options.scripts:
logger.info("Un-loading script: %s" % a.path)
ctx.master.addons.remove(a)
self.addons.remove(a)
# The machinations below are to ensure that:
# - Scripts remain in the same order
# - Scripts are not initialized un-necessarily. If only a
# script's order in the script list has changed, it is just
# moved.
current = {}
for a in self.addons:
current[a.path] = a
ordered = []
newscripts = []
for s in ctx.options.scripts:
if s in current:
ordered.append(current[s])
else:
sc = Script(s, True)
ordered.append(sc)
newscripts.append(sc)
self.addons = ordered
for s in newscripts:
ctx.master.addons.register(s)
if self.is_running:
# If we're already running, we configure and tell the addon
# we're up and running.
ctx.master.addons.invoke_addon_sync(s, hooks.RunningHook())
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/addons/onboardingapp/__init__.py | mitmproxy/addons/onboardingapp/__init__.py | import os
from flask import Flask
from flask import render_template
from mitmproxy.options import CONF_BASENAME
from mitmproxy.options import CONF_DIR
from mitmproxy.utils.magisk import write_magisk_module
app = Flask(__name__)
# will be overridden in the addon, setting this here so that the Flask app can be run standalone.
app.config["CONFDIR"] = CONF_DIR
@app.route("/")
def index():
return render_template("index.html")
@app.route("/cert/pem")
def pem():
return read_cert("pem", "application/x-x509-ca-cert")
@app.route("/cert/p12")
def p12():
return read_cert("p12", "application/x-pkcs12")
@app.route("/cert/cer")
def cer():
return read_cert("cer", "application/x-x509-ca-cert")
@app.route("/cert/magisk")
def magisk():
filename = CONF_BASENAME + f"-magisk-module.zip"
p = os.path.join(app.config["CONFDIR"], filename)
p = os.path.expanduser(p)
if not os.path.exists(p):
write_magisk_module(p)
with open(p, "rb") as f:
cert = f.read()
return cert, {
"Content-Type": "application/zip",
"Content-Disposition": f"attachment; {filename=!s}",
}
def read_cert(ext, content_type):
filename = CONF_BASENAME + f"-ca-cert.{ext}"
p = os.path.join(app.config["CONFDIR"], filename)
p = os.path.expanduser(p)
with open(p, "rb") as f:
cert = f.read()
return cert, {
"Content-Type": content_type,
"Content-Disposition": f"attachment; {filename=!s}",
}
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/test/tflow.py | mitmproxy/test/tflow.py | import time
import uuid
from wsproto.frame_protocol import Opcode
from mitmproxy import connection
from mitmproxy import dns
from mitmproxy import flow
from mitmproxy import http
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy import websocket
from mitmproxy.connection import ConnectionState
from mitmproxy.proxy.mode_specs import ProxyMode
from mitmproxy.test.tutils import tdnsreq
from mitmproxy.test.tutils import tdnsresp
from mitmproxy.test.tutils import treq
from mitmproxy.test.tutils import tresp
def ttcpflow(
client_conn=True, server_conn=True, messages=True, err=None
) -> tcp.TCPFlow:
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if messages is True:
messages = [
tcp.TCPMessage(True, b"hello", 946681204.2),
tcp.TCPMessage(False, b"it's me", 946681204.5),
]
if err is True:
err = terr()
f = tcp.TCPFlow(client_conn, server_conn)
f.timestamp_created = client_conn.timestamp_start
f.messages = messages
f.error = err
f.live = True
return f
def tudpflow(
client_conn=True, server_conn=True, messages=True, err=None
) -> udp.UDPFlow:
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if messages is True:
messages = [
udp.UDPMessage(True, b"hello", 946681204.2),
udp.UDPMessage(False, b"it's me", 946681204.5),
]
if err is True:
err = terr()
f = udp.UDPFlow(client_conn, server_conn)
f.timestamp_created = client_conn.timestamp_start
f.messages = messages
f.error = err
f.live = True
return f
def twebsocketflow(
messages=True, err=None, close_code=None, close_reason=""
) -> http.HTTPFlow:
flow = http.HTTPFlow(tclient_conn(), tserver_conn())
flow.request = http.Request(
"example.com",
80,
b"GET",
b"http",
b"example.com",
b"/ws",
b"HTTP/1.1",
headers=http.Headers(
connection="upgrade",
upgrade="websocket",
sec_websocket_version="13",
sec_websocket_key="1234",
),
content=b"",
trailers=None,
timestamp_start=946681200,
timestamp_end=946681201,
)
flow.response = http.Response(
b"HTTP/1.1",
101,
reason=b"Switching Protocols",
headers=http.Headers(
connection="upgrade",
upgrade="websocket",
sec_websocket_accept=b"",
),
content=b"",
trailers=None,
timestamp_start=946681202,
timestamp_end=946681203,
)
flow.websocket = twebsocket()
flow.websocket.close_reason = close_reason
if close_code is not None:
flow.websocket.close_code = close_code
else:
if err is True:
# ABNORMAL_CLOSURE
flow.websocket.close_code = 1006
else:
# NORMAL_CLOSURE
flow.websocket.close_code = 1000
flow.live = True
return flow
def tdnsflow(
*,
client_conn: connection.Client | None = None,
server_conn: connection.Server | None = None,
req: dns.DNSMessage | None = None,
resp: bool | dns.DNSMessage = False,
err: bool | flow.Error = False,
live: bool = True,
) -> dns.DNSFlow:
"""Create a DNS flow for testing."""
if client_conn is None:
client_conn = tclient_conn()
client_conn.proxy_mode = ProxyMode.parse("dns")
client_conn.transport_protocol = "udp"
if server_conn is None:
server_conn = tserver_conn()
server_conn.transport_protocol = "udp"
if req is None:
req = tdnsreq()
if resp is True:
resp = tdnsresp()
if err is True:
err = terr()
assert resp is False or isinstance(resp, dns.DNSMessage)
assert err is False or isinstance(err, flow.Error)
f = dns.DNSFlow(client_conn, server_conn)
f.timestamp_created = req.timestamp or time.time()
f.request = req
f.response = resp or None
f.error = err or None
f.live = live
return f
def tflow(
*,
client_conn: connection.Client | None = None,
server_conn: connection.Server | None = None,
req: http.Request | None = None,
resp: bool | http.Response = False,
err: bool | flow.Error = False,
ws: bool | websocket.WebSocketData = False,
live: bool = True,
) -> http.HTTPFlow:
"""Create a flow for testing."""
if client_conn is None:
client_conn = tclient_conn()
if server_conn is None:
server_conn = tserver_conn()
if req is None:
req = treq()
if resp is True:
resp = tresp()
if err is True:
err = terr()
if ws is True:
ws = twebsocket()
assert resp is False or isinstance(resp, http.Response)
assert err is False or isinstance(err, flow.Error)
assert ws is False or isinstance(ws, websocket.WebSocketData)
f = http.HTTPFlow(client_conn, server_conn)
f.timestamp_created = req.timestamp_start
f.request = req
f.response = resp or None
f.error = err or None
f.websocket = ws or None
f.live = live
return f
class DummyFlow(flow.Flow):
"""A flow that is neither HTTP nor TCP."""
def tdummyflow(client_conn=True, server_conn=True, err=None) -> DummyFlow:
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if err is True:
err = terr()
f = DummyFlow(client_conn, server_conn)
f.error = err
f.live = True
return f
def tclient_conn() -> connection.Client:
c = connection.Client(
id=str(uuid.uuid4()),
peername=("127.0.0.1", 22),
sockname=("", 0),
mitmcert=None,
timestamp_start=946681200,
timestamp_tls_setup=946681201,
timestamp_end=946681206,
sni="address",
cipher="cipher",
alpn=b"http/1.1",
tls_version="TLSv1.2",
state=ConnectionState.OPEN,
error=None,
tls=False,
certificate_list=[],
alpn_offers=[],
cipher_list=[],
proxy_mode=ProxyMode.parse("regular"),
)
return c
def tserver_conn() -> connection.Server:
c = connection.Server(
id=str(uuid.uuid4()),
address=("address", 22),
peername=("192.168.0.1", 22),
sockname=("address", 22),
timestamp_start=946681202,
timestamp_tcp_setup=946681203,
timestamp_tls_setup=946681204,
timestamp_end=946681205,
sni="address",
alpn=None,
tls_version="TLSv1.2",
via=None,
state=ConnectionState.CLOSED,
error=None,
tls=False,
certificate_list=[],
alpn_offers=[],
cipher=None,
cipher_list=[],
)
return c
def terr(content: str = "error") -> flow.Error:
err = flow.Error(content, 946681207)
return err
def twebsocket(messages: bool = True) -> websocket.WebSocketData:
ws = websocket.WebSocketData()
if messages:
ws.messages = [
websocket.WebSocketMessage(Opcode.BINARY, True, b"hello binary", 946681203),
websocket.WebSocketMessage(Opcode.TEXT, True, b"hello text", 946681204),
websocket.WebSocketMessage(Opcode.TEXT, False, b"it's me", 946681205),
]
ws.close_reason = "Close Reason"
ws.close_code = 1000
ws.closed_by_client = False
ws.timestamp_end = 946681205
return ws
def tflows() -> list[flow.Flow]:
return [
tflow(resp=True),
tflow(err=True),
tflow(ws=True),
ttcpflow(),
ttcpflow(err=True),
tudpflow(),
tudpflow(err=True),
tdnsflow(resp=True),
tdnsflow(req=tdnsreq(questions=[])),
tdnsflow(err=True),
]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/test/taddons.py | mitmproxy/test/taddons.py | import asyncio
import mitmproxy.master
import mitmproxy.options
from mitmproxy import command
from mitmproxy import eventsequence
from mitmproxy import hooks
from mitmproxy.addons import core
from mitmproxy.addons import script
class context:
"""
A context for testing addons, which sets up the mitmproxy.ctx module so
handlers can run as they would within mitmproxy. The context also
provides a number of helper methods for common testing scenarios.
"""
def __init__(self, *addons, options=None, loadcore=True):
self.owns_loop = False
try:
loop = asyncio.get_running_loop()
except RuntimeError:
self.owns_loop = True
loop = asyncio.new_event_loop()
options = options or mitmproxy.options.Options()
self.master = mitmproxy.master.Master(options, event_loop=loop)
self.options = self.master.options
if loadcore:
self.master.addons.add(core.Core())
for a in addons:
self.master.addons.add(a)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if self.owns_loop and not self.master.event_loop.is_closed():
# Close the loop if we created it
self.master.event_loop.close()
return False
async def cycle(self, addon, f):
"""
Cycles the flow through the events for the flow. Stops if the flow
is intercepted.
"""
for evt in eventsequence.iterate(f):
await self.master.addons.invoke_addon(addon, evt)
if f.intercepted:
return
def configure(self, addon, **kwargs):
"""
A helper for testing configure methods. Modifies the registered
Options object with the given keyword arguments, then calls the
configure method on the addon with the updated value.
"""
if addon not in self.master.addons:
self.master.addons.register(addon)
with self.options.rollback(kwargs.keys(), reraise=True):
if kwargs:
self.options.update(**kwargs)
else:
self.master.addons.invoke_addon_sync(addon, hooks.ConfigureHook(set()))
def script(self, path):
"""
Loads a script from path, and returns the enclosed addon.
"""
sc = script.Script(path, False)
return sc.addons[0] if sc.addons else None
def command(self, func, *args):
"""
Invoke a command function with a list of string arguments within a command context, mimicking the actual command environment.
"""
cmd = command.Command(self.master.commands, "test.command", func)
return cmd.call(args)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/test/tutils.py | mitmproxy/test/tutils.py | from mitmproxy import dns
from mitmproxy import http
def tdnsreq(**kwargs) -> dns.DNSMessage:
default = dict(
timestamp=946681200,
id=42,
query=True,
op_code=dns.op_codes.QUERY,
authoritative_answer=False,
truncation=False,
recursion_desired=True,
recursion_available=False,
reserved=0,
response_code=dns.response_codes.NOERROR,
questions=[dns.Question("dns.google", dns.types.A, dns.classes.IN)],
answers=[],
authorities=[],
additionals=[],
)
default.update(kwargs)
return dns.DNSMessage(**default) # type: ignore
def tdnsresp(**kwargs) -> dns.DNSMessage:
default = dict(
timestamp=946681201,
id=42,
query=False,
op_code=dns.op_codes.QUERY,
authoritative_answer=False,
truncation=False,
recursion_desired=True,
recursion_available=True,
reserved=0,
response_code=dns.response_codes.NOERROR,
questions=[dns.Question("dns.google", dns.types.A, dns.classes.IN)],
answers=[
dns.ResourceRecord(
"dns.google", dns.types.A, dns.classes.IN, 32, b"\x08\x08\x08\x08"
),
dns.ResourceRecord(
"dns.google", dns.types.A, dns.classes.IN, 32, b"\x08\x08\x04\x04"
),
],
authorities=[],
additionals=[],
)
default.update(kwargs)
return dns.DNSMessage(**default) # type: ignore
def treq(**kwargs) -> http.Request:
"""
Returns:
mitmproxy.net.http.Request
"""
default = dict(
host="address",
port=22,
method=b"GET",
scheme=b"http",
authority=b"",
path=b"/path",
http_version=b"HTTP/1.1",
headers=http.Headers(((b"header", b"qvalue"), (b"content-length", b"7"))),
content=b"content",
trailers=None,
timestamp_start=946681200,
timestamp_end=946681201,
)
default.update(kwargs)
return http.Request(**default) # type: ignore
def tresp(**kwargs) -> http.Response:
"""
Returns:
mitmproxy.net.http.Response
"""
default = dict(
http_version=b"HTTP/1.1",
status_code=200,
reason=b"OK",
headers=http.Headers(
((b"header-response", b"svalue"), (b"content-length", b"7"))
),
content=b"message",
trailers=None,
timestamp_start=946681202,
timestamp_end=946681203,
)
default.update(kwargs)
return http.Response(**default) # type: ignore
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/tls.py | mitmproxy/net/tls.py | import os
import threading
import typing
from collections.abc import Callable
from collections.abc import Iterable
from enum import Enum
from functools import cache
from functools import lru_cache
from pathlib import Path
from typing import Any
from typing import BinaryIO
import certifi
import OpenSSL
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurve
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurveOID
from cryptography.hazmat.primitives.asymmetric.ec import get_curve_for_oid
from cryptography.x509 import ObjectIdentifier
from OpenSSL import SSL
from mitmproxy import certs
# Remove once pyOpenSSL 23.3.0 is released and bump version in pyproject.toml.
try: # pragma: no cover
from OpenSSL.SSL import OP_LEGACY_SERVER_CONNECT # type: ignore
except ImportError:
OP_LEGACY_SERVER_CONNECT = 0x4
# redeclared here for strict type checking
class Method(Enum):
TLS_SERVER_METHOD = SSL.TLS_SERVER_METHOD
TLS_CLIENT_METHOD = SSL.TLS_CLIENT_METHOD
# Type-pyopenssl does not know about these DTLS constants.
DTLS_SERVER_METHOD = SSL.DTLS_SERVER_METHOD # type: ignore
DTLS_CLIENT_METHOD = SSL.DTLS_CLIENT_METHOD # type: ignore
try:
SSL._lib.TLS_server_method # type: ignore
except AttributeError as e: # pragma: no cover
raise RuntimeError(
"Your installation of the cryptography Python package is outdated."
) from e
class Version(Enum):
UNBOUNDED = 0
SSL3 = SSL.SSL3_VERSION
TLS1 = SSL.TLS1_VERSION
TLS1_1 = SSL.TLS1_1_VERSION
TLS1_2 = SSL.TLS1_2_VERSION
TLS1_3 = SSL.TLS1_3_VERSION
INSECURE_TLS_MIN_VERSIONS: tuple[Version, ...] = (
Version.UNBOUNDED,
Version.SSL3,
Version.TLS1,
Version.TLS1_1,
)
class Verify(Enum):
VERIFY_NONE = SSL.VERIFY_NONE
VERIFY_PEER = SSL.VERIFY_PEER
DEFAULT_MIN_VERSION = Version.TLS1_2
DEFAULT_MAX_VERSION = Version.UNBOUNDED
DEFAULT_OPTIONS = SSL.OP_CIPHER_SERVER_PREFERENCE | SSL.OP_NO_COMPRESSION
@cache
def is_supported_version(version: Version):
client_ctx = SSL.Context(SSL.TLS_CLIENT_METHOD)
# Without SECLEVEL, recent OpenSSL versions forbid old TLS versions.
# https://github.com/pyca/cryptography/issues/9523
client_ctx.set_cipher_list(b"@SECLEVEL=0:ALL")
client_ctx.set_min_proto_version(version.value)
client_ctx.set_max_proto_version(version.value)
client_conn = SSL.Connection(client_ctx)
client_conn.set_connect_state()
try:
client_conn.recv(4096)
except SSL.WantReadError:
return True
except SSL.Error:
return False
EC_CURVES: dict[str, EllipticCurve] = {}
for oid in EllipticCurveOID.__dict__.values():
if isinstance(oid, ObjectIdentifier):
curve = get_curve_for_oid(oid)()
EC_CURVES[curve.name] = curve
@typing.overload
def get_curve(name: str) -> EllipticCurve: ...
@typing.overload
def get_curve(name: None) -> None: ...
def get_curve(name: str | None) -> EllipticCurve | None:
if name is None:
return None
return EC_CURVES[name]
class MasterSecretLogger:
def __init__(self, filename: Path):
self.filename = filename.expanduser()
self.f: BinaryIO | None = None
self.lock = threading.Lock()
# required for functools.wraps, which pyOpenSSL uses.
__name__ = "MasterSecretLogger"
def __call__(self, connection: SSL.Connection, keymaterial: bytes) -> None:
with self.lock:
if self.f is None:
self.filename.parent.mkdir(parents=True, exist_ok=True)
self.f = self.filename.open("ab")
self.f.write(b"\n")
self.f.write(keymaterial + b"\n")
self.f.flush()
def close(self):
with self.lock:
if self.f is not None:
self.f.close()
def make_master_secret_logger(filename: str | None) -> MasterSecretLogger | None:
if filename:
return MasterSecretLogger(Path(filename))
return None
log_master_secret = make_master_secret_logger(
os.getenv("MITMPROXY_SSLKEYLOGFILE") or os.getenv("SSLKEYLOGFILE")
)
def _create_ssl_context(
*,
method: Method,
min_version: Version,
max_version: Version,
cipher_list: Iterable[str] | None,
ecdh_curve: EllipticCurve | None,
) -> SSL.Context:
context = SSL.Context(method.value)
ok = SSL._lib.SSL_CTX_set_min_proto_version(context._context, min_version.value) # type: ignore
ok += SSL._lib.SSL_CTX_set_max_proto_version(context._context, max_version.value) # type: ignore
if ok != 2:
raise RuntimeError(
f"Error setting TLS versions ({min_version=}, {max_version=}). "
"The version you specified may be unavailable in your libssl."
)
# Options
context.set_options(DEFAULT_OPTIONS)
# ECDHE for Key exchange
if ecdh_curve is not None:
try:
context.set_tmp_ecdh(ecdh_curve)
except ValueError as e:
raise RuntimeError(f"Elliptic curve specification error: {e}") from e
# Cipher List
if cipher_list is not None:
try:
context.set_cipher_list(b":".join(x.encode() for x in cipher_list))
except SSL.Error as e:
raise RuntimeError(f"SSL cipher specification error: {e}") from e
# SSLKEYLOGFILE
if log_master_secret:
context.set_keylog_callback(log_master_secret)
return context
@lru_cache(256)
def create_proxy_server_context(
*,
method: Method,
min_version: Version,
max_version: Version,
cipher_list: tuple[str, ...] | None,
ecdh_curve: EllipticCurve | None,
verify: Verify,
ca_path: str | None,
ca_pemfile: str | None,
client_cert: str | None,
legacy_server_connect: bool,
) -> SSL.Context:
context: SSL.Context = _create_ssl_context(
method=method,
min_version=min_version,
max_version=max_version,
cipher_list=cipher_list,
ecdh_curve=ecdh_curve,
)
context.set_verify(verify.value, None)
if ca_path is None and ca_pemfile is None:
ca_pemfile = certifi.where()
try:
context.load_verify_locations(ca_pemfile, ca_path)
except SSL.Error as e:
raise RuntimeError(
f"Cannot load trusted certificates ({ca_pemfile=}, {ca_path=})."
) from e
# Client Certs
if client_cert:
try:
context.use_privatekey_file(client_cert)
context.use_certificate_chain_file(client_cert)
except SSL.Error as e:
raise RuntimeError(f"Cannot load TLS client certificate: {e}") from e
# https://github.com/mitmproxy/mitmproxy/discussions/7550
SSL._lib.SSL_CTX_set_post_handshake_auth(context._context, 1) # type: ignore
if legacy_server_connect:
context.set_options(OP_LEGACY_SERVER_CONNECT)
return context
@lru_cache(256)
def create_client_proxy_context(
*,
method: Method,
min_version: Version,
max_version: Version,
cipher_list: tuple[str, ...] | None,
ecdh_curve: EllipticCurve | None,
chain_file: Path | None,
alpn_select_callback: Callable[[SSL.Connection, list[bytes]], Any] | None,
request_client_cert: bool,
extra_chain_certs: tuple[certs.Cert, ...],
dhparams: certs.DHParams,
) -> SSL.Context:
context: SSL.Context = _create_ssl_context(
method=method,
min_version=min_version,
max_version=max_version,
cipher_list=cipher_list,
ecdh_curve=ecdh_curve,
)
if chain_file is not None:
try:
context.load_verify_locations(str(chain_file), None)
except SSL.Error as e:
raise RuntimeError(f"Cannot load certificate chain ({chain_file}).") from e
if alpn_select_callback is not None:
assert callable(alpn_select_callback)
context.set_alpn_select_callback(alpn_select_callback)
if request_client_cert:
# The request_client_cert argument requires some explanation. We're
# supposed to be able to do this with no negative effects - if the
# client has no cert to present, we're notified and proceed as usual.
# Unfortunately, Android seems to have a bug (tested on 4.2.2) - when
# an Android client is asked to present a certificate it does not
# have, it hangs up, which is frankly bogus. Some time down the track
# we may be able to make the proper behaviour the default again, but
# until then we're conservative.
context.set_verify(Verify.VERIFY_PEER.value, accept_all)
else:
context.set_verify(Verify.VERIFY_NONE.value, None)
for i in extra_chain_certs:
context.add_extra_chain_cert(i.to_cryptography())
if dhparams:
res = SSL._lib.SSL_CTX_set_tmp_dh(context._context, dhparams) # type: ignore
SSL._openssl_assert(res == 1) # type: ignore
return context
def accept_all(
conn_: SSL.Connection,
x509: OpenSSL.crypto.X509,
errno: int,
err_depth: int,
is_cert_verified: int,
) -> bool:
# Return true to prevent cert verification error
return True
def starts_like_tls_record(d: bytes) -> bool:
"""
Returns:
True, if the passed bytes could be the start of a TLS record
False, otherwise.
"""
# TLS ClientHello magic, works for SSLv3, TLSv1.0, TLSv1.1, TLSv1.2, and TLSv1.3
# http://www.moserware.com/2009/06/first-few-milliseconds-of-https.html#client-hello
# https://tls13.ulfheim.net/
# We assume that a client sending less than 3 bytes initially is not a TLS client.
return len(d) > 2 and d[0] == 0x16 and d[1] == 0x03 and 0x00 <= d[2] <= 0x03
def starts_like_dtls_record(d: bytes) -> bool:
"""
Returns:
True, if the passed bytes could be the start of a DTLS record
False, otherwise.
"""
# TLS ClientHello magic, works for DTLS 1.1, DTLS 1.2, and DTLS 1.3.
# https://www.rfc-editor.org/rfc/rfc4347#section-4.1
# https://www.rfc-editor.org/rfc/rfc6347#section-4.1
# https://www.rfc-editor.org/rfc/rfc9147#section-4-6.2
# We assume that a client sending less than 3 bytes initially is not a DTLS client.
return len(d) > 2 and d[0] == 0x16 and d[1] == 0xFE and 0xFD <= d[2] <= 0xFE
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/check.py | mitmproxy/net/check.py | import ipaddress
import re
from typing import AnyStr
# Allow underscore in host name
# Note: This could be a DNS label, a hostname, a FQDN, or an IP
_label_valid = re.compile(rb"[A-Z\d\-_]{1,63}$", re.IGNORECASE)
def is_valid_host(host: AnyStr) -> bool:
"""
Checks if the passed bytes are a valid DNS hostname or an IPv4/IPv6 address.
"""
if isinstance(host, str):
try:
host_bytes = host.encode("idna")
except UnicodeError:
return False
else:
host_bytes = host
try:
host_bytes.decode("idna")
except ValueError:
return False
# RFC1035: 255 bytes or less.
if len(host_bytes) > 255:
return False
if host_bytes and host_bytes.endswith(b"."):
host_bytes = host_bytes[:-1]
# DNS hostname
if all(_label_valid.match(x) for x in host_bytes.split(b".")):
return True
# IPv4/IPv6 address
try:
ipaddress.ip_address(host_bytes.decode("idna"))
return True
except ValueError:
return False
def is_valid_port(port: int) -> bool:
return 0 <= port <= 65535
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/free_port.py | mitmproxy/net/free_port.py | import socket
def get_free_port() -> int:
"""
Get a port that's free for both TCP and UDP.
This method never raises. If no free port can be found, 0 is returned.
"""
for _ in range(10):
tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
udp = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
tcp.bind(("", 0))
port: int = tcp.getsockname()[1]
udp.bind(("", port))
udp.close()
return port
except OSError:
pass
finally:
tcp.close()
return 0
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/server_spec.py | mitmproxy/net/server_spec.py | """
Server specs are used to describe an upstream proxy or server.
"""
import re
from functools import cache
from typing import Literal
from mitmproxy.net import check
ServerSpec = tuple[
Literal["http", "https", "http3", "tls", "dtls", "tcp", "udp", "dns", "quic"],
tuple[str, int],
]
server_spec_re = re.compile(
r"""
^
(?:(?P<scheme>\w+)://)? # scheme is optional
(?P<host>[^:/]+|\[.+\]) # hostname can be DNS name, IPv4, or IPv6 address.
(?::(?P<port>\d+))? # port is optional
/? # we allow a trailing backslash, but no path
$
""",
re.VERBOSE,
)
@cache
def parse(server_spec: str, default_scheme: str) -> ServerSpec:
"""
Parses a server mode specification, e.g.:
- http://example.com/
- example.org
- example.com:443
*Raises:*
- ValueError, if the server specification is invalid.
"""
m = server_spec_re.match(server_spec)
if not m:
raise ValueError(f"Invalid server specification: {server_spec}")
if m.group("scheme"):
scheme = m.group("scheme")
else:
scheme = default_scheme
if scheme not in (
"http",
"https",
"http3",
"tls",
"dtls",
"tcp",
"udp",
"dns",
"quic",
):
raise ValueError(f"Invalid server scheme: {scheme}")
host = m.group("host")
# IPv6 brackets
if host.startswith("[") and host.endswith("]"):
host = host[1:-1]
if not check.is_valid_host(host):
raise ValueError(f"Invalid hostname: {host}")
if m.group("port"):
port = int(m.group("port"))
else:
try:
port = {
"http": 80,
"https": 443,
"quic": 443,
"http3": 443,
"dns": 53,
}[scheme]
except KeyError:
raise ValueError(f"Port specification missing.")
if not check.is_valid_port(port):
raise ValueError(f"Invalid port: {port}")
return scheme, (host, port) # type: ignore
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/encoding.py | mitmproxy/net/encoding.py | """
Utility functions for decoding response bodies.
"""
import codecs
import collections
import gzip
import zlib
from io import BytesIO
from typing import overload
import brotli
import zstandard as zstd
# We have a shared single-element cache for encoding and decoding.
# This is quite useful in practice, e.g.
# flow.request.content = flow.request.content.replace(b"foo", b"bar")
# does not require an .encode() call if content does not contain b"foo"
CachedDecode = collections.namedtuple("CachedDecode", "encoded encoding errors decoded")
_cache = CachedDecode(None, None, None, None)
@overload
def decode(encoded: None, encoding: str, errors: str = "strict") -> None: ...
@overload
def decode(encoded: str, encoding: str, errors: str = "strict") -> str: ...
@overload
def decode(encoded: bytes, encoding: str, errors: str = "strict") -> str | bytes: ...
def decode(
encoded: None | str | bytes, encoding: str, errors: str = "strict"
) -> None | str | bytes:
"""
Decode the given input object
Returns:
The decoded value
Raises:
ValueError, if decoding fails.
"""
if encoded is None:
return None
encoding = encoding.lower()
global _cache
cached = (
isinstance(encoded, bytes)
and _cache.encoded == encoded
and _cache.encoding == encoding
and _cache.errors == errors
)
if cached:
return _cache.decoded
try:
try:
decoded = custom_decode[encoding](encoded)
except KeyError:
decoded = codecs.decode(encoded, encoding, errors) # type: ignore
if encoding in ("gzip", "deflate", "deflateraw", "br", "zstd"):
_cache = CachedDecode(encoded, encoding, errors, decoded)
return decoded
except TypeError:
raise
except Exception as e:
raise ValueError(
"{} when decoding {} with {}: {}".format(
type(e).__name__,
repr(encoded)[:10],
repr(encoding),
repr(e),
)
)
@overload
def encode(decoded: None, encoding: str, errors: str = "strict") -> None: ...
@overload
def encode(decoded: str, encoding: str, errors: str = "strict") -> str | bytes: ...
@overload
def encode(decoded: bytes, encoding: str, errors: str = "strict") -> bytes: ...
def encode(
decoded: None | str | bytes, encoding, errors="strict"
) -> None | str | bytes:
"""
Encode the given input object
Returns:
The encoded value
Raises:
ValueError, if encoding fails.
"""
if decoded is None:
return None
encoding = encoding.lower()
global _cache
cached = (
isinstance(decoded, bytes)
and _cache.decoded == decoded
and _cache.encoding == encoding
and _cache.errors == errors
)
if cached:
return _cache.encoded
try:
try:
encoded = custom_encode[encoding](decoded)
except KeyError:
encoded = codecs.encode(decoded, encoding, errors) # type: ignore
if encoding in ("gzip", "deflate", "deflateraw", "br", "zstd"):
_cache = CachedDecode(encoded, encoding, errors, decoded)
return encoded
except TypeError:
raise
except Exception as e:
raise ValueError(
"{} when encoding {} with {}: {}".format(
type(e).__name__,
repr(decoded)[:10],
repr(encoding),
repr(e),
)
)
def identity(content):
"""
Returns content unchanged. Identity is the default value of
Accept-Encoding headers.
"""
return content
def decode_gzip(content: bytes) -> bytes:
"""Decode gzip or zlib-compressed data using zlib's auto-detection."""
if not content:
return b""
try:
# Using wbits=47 (32 + 15) tells zlib to automatically detect both gzip and zlib headers.
# This simplifies decoding and avoids the need for a separate gzip.GzipFile fallback.
# Reference: https://docs.python.org/3/library/zlib.html#zlib.decompress
decompressor = zlib.decompressobj(47)
return decompressor.decompress(content) + decompressor.flush()
except zlib.error as e:
raise ValueError(f"Decompression failed: {e}")
def encode_gzip(content: bytes) -> bytes:
s = BytesIO()
# set mtime to 0 so that gzip encoding is deterministic.
with gzip.GzipFile(fileobj=s, mode="wb", mtime=0) as f:
f.write(content)
return s.getvalue()
def decode_brotli(content: bytes) -> bytes:
if not content:
return b""
return brotli.decompress(content)
def encode_brotli(content: bytes) -> bytes:
return brotli.compress(content)
def decode_zstd(content: bytes) -> bytes:
if not content:
return b""
zstd_ctx = zstd.ZstdDecompressor()
return zstd_ctx.stream_reader(BytesIO(content), read_across_frames=True).read()
def encode_zstd(content: bytes) -> bytes:
zstd_ctx = zstd.ZstdCompressor()
return zstd_ctx.compress(content)
def decode_deflate(content: bytes) -> bytes:
"""
Returns decompressed data for DEFLATE. Some servers may respond with
compressed data without a zlib header or checksum. An undocumented
feature of zlib permits the lenient decompression of data missing both
values.
http://bugs.python.org/issue5784
"""
if not content:
return b""
try:
return zlib.decompress(content)
except zlib.error:
return zlib.decompress(content, -15)
def encode_deflate(content: bytes) -> bytes:
"""
Returns compressed content, always including zlib header and checksum.
"""
return zlib.compress(content)
custom_decode = {
"none": identity,
"identity": identity,
"gzip": decode_gzip,
"deflate": decode_deflate,
"deflateraw": decode_deflate,
"br": decode_brotli,
"zstd": decode_zstd,
}
custom_encode = {
"none": identity,
"identity": identity,
"gzip": encode_gzip,
"deflate": encode_deflate,
"deflateraw": encode_deflate,
"br": encode_brotli,
"zstd": encode_zstd,
}
__all__ = ["encode", "decode"]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/__init__.py | mitmproxy/net/__init__.py | python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false | |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/local_ip.py | mitmproxy/net/local_ip.py | from __future__ import annotations
import socket
def get_local_ip(reachable: str = "8.8.8.8") -> str | None:
"""
Get the default local outgoing IPv4 address without sending any packets.
This will fail if the target address is known to be unreachable.
We use Google DNS's IPv4 address as the default.
"""
# https://stackoverflow.com/questions/166506/finding-local-ip-addresses-using-pythons-stdlib
s = None
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((reachable, 80))
return s.getsockname()[0] # pragma: no cover
except OSError:
return None # pragma: no cover
finally:
if s is not None:
s.close()
def get_local_ip6(reachable: str = "2001:4860:4860::8888") -> str | None:
"""
Get the default local outgoing IPv6 address without sending any packets.
This will fail if the target address is known to be unreachable.
We use Google DNS's IPv6 address as the default.
"""
s = None
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
s.connect((reachable, 80))
return s.getsockname()[0] # pragma: no cover
except OSError: # pragma: no cover
return None
finally:
if s is not None:
s.close()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/dns/classes.py | mitmproxy/net/dns/classes.py | IN = 1
CH = 3
HS = 4
NONE = 254
ANY = 255
_STRINGS = {IN: "IN", CH: "CH", HS: "HS", NONE: "NONE", ANY: "ANY"}
_INTS = {v: k for k, v in _STRINGS.items()}
def to_str(class_: int) -> str:
return _STRINGS.get(class_, f"CLASS({class_})")
def from_str(class_: str) -> int:
try:
return _INTS[class_]
except KeyError:
return int(class_.removeprefix("CLASS(").removesuffix(")"))
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/dns/domain_names.py | mitmproxy/net/dns/domain_names.py | import struct
from typing import Optional
from . import types
_LABEL_SIZE = struct.Struct("!B")
_POINTER_OFFSET = struct.Struct("!H")
_POINTER_INDICATOR = 0b11000000
Cache = dict[int, Optional[tuple[str, int]]]
def cache() -> Cache:
return dict()
def _unpack_label_into(labels: list[str], buffer: bytes, offset: int) -> int:
(size,) = _LABEL_SIZE.unpack_from(buffer, offset)
if size >= 64:
raise struct.error(f"unpack encountered a label of length {size}")
elif size == 0:
return _LABEL_SIZE.size
else:
offset += _LABEL_SIZE.size
end_label = offset + size
if len(buffer) < end_label:
raise struct.error(f"unpack requires a label buffer of {size} bytes")
try:
labels.append(buffer[offset:end_label].decode("idna"))
except UnicodeDecodeError:
raise struct.error(
f"unpack encountered an illegal characters at offset {offset}"
)
return _LABEL_SIZE.size + size
def unpack_from_with_compression(
buffer: bytes, offset: int, cache: Cache
) -> tuple[str, int]:
if offset in cache:
result = cache[offset]
if result is None:
raise struct.error(f"unpack encountered domain name loop")
else:
cache[offset] = None # this will indicate that the offset is being unpacked
start_offset = offset
labels = []
while True:
(size,) = _LABEL_SIZE.unpack_from(buffer, offset)
if size & _POINTER_INDICATOR == _POINTER_INDICATOR:
(pointer,) = _POINTER_OFFSET.unpack_from(buffer, offset)
offset += _POINTER_OFFSET.size
label, _ = unpack_from_with_compression(
buffer, pointer & ~(_POINTER_INDICATOR << 8), cache
)
labels.append(label)
break
else:
offset += _unpack_label_into(labels, buffer, offset)
if size == 0:
break
result = ".".join(labels), (offset - start_offset)
cache[start_offset] = result
return result
def unpack_from(buffer: bytes, offset: int) -> tuple[str, int]:
"""Converts RDATA into a domain name without pointer compression from a given offset and also returns the binary size."""
labels: list[str] = []
while True:
(size,) = _LABEL_SIZE.unpack_from(buffer, offset)
if size & _POINTER_INDICATOR == _POINTER_INDICATOR:
raise struct.error(
f"unpack encountered a pointer which is not supported in RDATA"
)
else:
offset += _unpack_label_into(labels, buffer, offset)
if size == 0:
break
return ".".join(labels), offset
def unpack(buffer: bytes) -> str:
"""Converts RDATA into a domain name without pointer compression."""
name, length = unpack_from(buffer, 0)
if length != len(buffer):
raise struct.error(f"unpack requires a buffer of {length} bytes")
return name
def pack(name: str) -> bytes:
"""Converts a domain name into RDATA without pointer compression."""
buffer = bytearray()
if len(name) > 0:
for part in name.split("."):
label = part.encode("idna")
size = len(label)
if size == 0:
raise ValueError(f"domain name '{name}' contains empty labels")
if size >= 64: # pragma: no cover
# encoding with 'idna' will already have raised an exception earlier
raise ValueError(
f"encoded label '{part}' of domain name '{name}' is too long ({size} bytes)"
)
buffer.extend(_LABEL_SIZE.pack(size))
buffer.extend(label)
buffer.extend(_LABEL_SIZE.pack(0))
return bytes(buffer)
def record_data_can_have_compression(record_type: int) -> bool:
if record_type in (
types.CNAME,
types.HINFO,
types.MB,
types.MD,
types.MF,
types.MG,
types.MINFO,
types.MR,
types.MX,
types.NS,
types.PTR,
types.SOA,
types.TXT,
types.RP,
types.AFSDB,
types.RT,
types.SIG,
types.PX,
types.NXT,
types.NAPTR,
types.SRV,
):
return True
return False
def decompress_from_record_data(
buffer: bytes, offset: int, end_data: int, cached_names: Cache
) -> bytes:
# we decompress compression pointers in RDATA by iterating through each byte and checking
# if it has a leading 0b11, if so we try to decompress it and update it in the data variable.
data = bytearray(buffer[offset:end_data])
data_offset = 0
decompress_size = 0
while data_offset < end_data - offset:
if buffer[offset + data_offset] & _POINTER_INDICATOR == _POINTER_INDICATOR:
try:
(
rr_name,
rr_name_len,
) = unpack_from_with_compression(
buffer, offset + data_offset, cached_names
)
data[
data_offset + decompress_size : data_offset
+ decompress_size
+ rr_name_len
] = pack(rr_name)
decompress_size += len(rr_name)
data_offset += rr_name_len
continue
except struct.error:
# the byte isn't actually a domain name compression pointer but some other data type
pass
data_offset += 1
return bytes(data)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/dns/https_records.py | mitmproxy/net/dns/https_records.py | import enum
import struct
from dataclasses import dataclass
from typing import Self
from ...utils import strutils
from . import domain_names
"""
HTTPS records are formatted as follows (as per RFC9460):
- a 2-octet field for SvcPriority as an integer in network byte order.
- the uncompressed, fully qualified TargetName, represented as a sequence of length-prefixed labels per Section 3.1 of [RFC1035].
- the SvcParams, consuming the remainder of the record (so smaller than 65535 octets and constrained by the RDATA and DNS message sizes).
When the list of SvcParams is non-empty, it contains a series of SvcParamKey=SvcParamValue pairs, represented as:
- a 2-octet field containing the SvcParamKey as an integer in network byte order. (See Section 14.3.2 for the defined values.)
- a 2-octet field containing the length of the SvcParamValue as an integer between 0 and 65535 in network byte order.
- an octet string of this length whose contents are the SvcParamValue in a format determined by the SvcParamKey.
https://datatracker.ietf.org/doc/rfc9460/
https://datatracker.ietf.org/doc/rfc1035/
"""
class SVCParamKeys(enum.Enum):
MANDATORY = 0
ALPN = 1
NO_DEFAULT_ALPN = 2
PORT = 3
IPV4HINT = 4
ECH = 5
IPV6HINT = 6
type HTTPSRecordJSON = dict[str | int, str | int]
@dataclass
class HTTPSRecord:
priority: int
target_name: str
params: dict[int, bytes]
def __repr__(self):
return str(self.to_json())
def to_json(self) -> HTTPSRecordJSON:
ret: HTTPSRecordJSON = {
"target_name": self.target_name,
"priority": self.priority,
}
typ: str | int
for typ, val in self.params.items():
try:
typ = SVCParamKeys(typ).name.lower()
except ValueError:
pass
ret[typ] = strutils.bytes_to_escaped_str(val)
return ret
@classmethod
def from_json(cls, data: HTTPSRecordJSON) -> Self:
target_name = data.pop("target_name")
assert isinstance(target_name, str)
priority = data.pop("priority")
assert isinstance(priority, int)
params: dict[int, bytes] = {}
for k, v in data.items():
if isinstance(k, str):
k = SVCParamKeys[k.upper()].value
assert isinstance(v, str)
params[k] = strutils.escaped_str_to_bytes(v)
return cls(target_name=target_name, priority=priority, params=params)
def _unpack_params(data: bytes, offset: int) -> dict[int, bytes]:
"""Unpacks the service parameters from the given offset."""
params = {}
while offset < len(data):
param_type = struct.unpack("!H", data[offset : offset + 2])[0]
offset += 2
param_length = struct.unpack("!H", data[offset : offset + 2])[0]
offset += 2
if offset + param_length > len(data):
raise struct.error(
"unpack requires a buffer of %i bytes" % (offset + param_length)
)
param_value = data[offset : offset + param_length]
offset += param_length
params[param_type] = param_value
return params
def unpack(data: bytes) -> HTTPSRecord:
"""
Unpacks HTTPS RDATA from byte data.
Raises:
struct.error if the record is malformed.
"""
offset = 0
# Priority (2 bytes)
priority = struct.unpack("!h", data[offset : offset + 2])[0]
offset += 2
# TargetName (variable length)
target_name, offset = domain_names.unpack_from(data, offset)
# Service Parameters (remaining bytes)
params = _unpack_params(data, offset)
return HTTPSRecord(priority=priority, target_name=target_name, params=params)
def _pack_params(params: dict[int, bytes]) -> bytes:
"""Converts the service parameters into the raw byte format"""
buffer = bytearray()
for k, v in params.items():
buffer.extend(struct.pack("!H", k))
buffer.extend(struct.pack("!H", len(v)))
buffer.extend(v)
return bytes(buffer)
def pack(record: HTTPSRecord) -> bytes:
"""Packs the HTTPS record into its bytes form."""
buffer = bytearray()
buffer.extend(struct.pack("!h", record.priority))
buffer.extend(domain_names.pack(record.target_name))
buffer.extend(_pack_params(record.params))
return bytes(buffer)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/dns/__init__.py | mitmproxy/net/dns/__init__.py | python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false | |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/dns/types.py | mitmproxy/net/dns/types.py | A = 1
NS = 2
MD = 3
MF = 4
CNAME = 5
SOA = 6
MB = 7
MG = 8
MR = 9
NULL = 10
WKS = 11
PTR = 12
HINFO = 13
MINFO = 14
MX = 15
TXT = 16
RP = 17
AFSDB = 18
X25 = 19
ISDN = 20
RT = 21
NSAP = 22
NSAP_PTR = 23
SIG = 24
KEY = 25
PX = 26
GPOS = 27
AAAA = 28
LOC = 29
NXT = 30
EID = 31
NIMLOC = 32
SRV = 33
ATMA = 34
NAPTR = 35
KX = 36
CERT = 37
A6 = 38
DNAME = 39
SINK = 40
OPT = 41
APL = 42
DS = 43
SSHFP = 44
IPSECKEY = 45
RRSIG = 46
NSEC = 47
DNSKEY = 48
DHCID = 49
NSEC3 = 50
NSEC3PARAM = 51
TLSA = 52
SMIMEA = 53
HIP = 55
NINFO = 56
RKEY = 57
TALINK = 58
CDS = 59
CDNSKEY = 60
OPENPGPKEY = 61
CSYNC = 62
ZONEMD = 63
SVCB = 64
HTTPS = 65
SPF = 99
UINFO = 100
UID = 101
GID = 102
UNSPEC = 103
NID = 104
L32 = 105
L64 = 106
LP = 107
EUI48 = 108
EUI64 = 109
TKEY = 249
TSIG = 250
IXFR = 251
AXFR = 252
MAILB = 253
MAILA = 254
ANY = 255
URI = 256
CAA = 257
AVC = 258
DOA = 259
AMTRELAY = 260
TA = 32768
DLV = 32769
_STRINGS = {
A: "A",
NS: "NS",
MD: "MD",
MF: "MF",
CNAME: "CNAME",
SOA: "SOA",
MB: "MB",
MG: "MG",
MR: "MR",
NULL: "NULL",
WKS: "WKS",
PTR: "PTR",
HINFO: "HINFO",
MINFO: "MINFO",
MX: "MX",
TXT: "TXT",
RP: "RP",
AFSDB: "AFSDB",
X25: "X25",
ISDN: "ISDN",
RT: "RT",
NSAP: "NSAP",
NSAP_PTR: "NSAP_PTR",
SIG: "SIG",
KEY: "KEY",
PX: "PX",
GPOS: "GPOS",
AAAA: "AAAA",
LOC: "LOC",
NXT: "NXT",
EID: "EID",
NIMLOC: "NIMLOC",
SRV: "SRV",
ATMA: "ATMA",
NAPTR: "NAPTR",
KX: "KX",
CERT: "CERT",
A6: "A6",
DNAME: "DNAME",
SINK: "SINK",
OPT: "OPT",
APL: "APL",
DS: "DS",
SSHFP: "SSHFP",
IPSECKEY: "IPSECKEY",
RRSIG: "RRSIG",
NSEC: "NSEC",
DNSKEY: "DNSKEY",
DHCID: "DHCID",
NSEC3: "NSEC3",
NSEC3PARAM: "NSEC3PARAM",
TLSA: "TLSA",
SMIMEA: "SMIMEA",
HIP: "HIP",
NINFO: "NINFO",
RKEY: "RKEY",
TALINK: "TALINK",
CDS: "CDS",
CDNSKEY: "CDNSKEY",
OPENPGPKEY: "OPENPGPKEY",
CSYNC: "CSYNC",
ZONEMD: "ZONEMD",
SVCB: "SVCB",
HTTPS: "HTTPS",
SPF: "SPF",
UINFO: "UINFO",
UID: "UID",
GID: "GID",
UNSPEC: "UNSPEC",
NID: "NID",
L32: "L32",
L64: "L64",
LP: "LP",
EUI48: "EUI48",
EUI64: "EUI64",
TKEY: "TKEY",
TSIG: "TSIG",
IXFR: "IXFR",
AXFR: "AXFR",
MAILB: "MAILB",
MAILA: "MAILA",
ANY: "ANY",
URI: "URI",
CAA: "CAA",
AVC: "AVC",
DOA: "DOA",
AMTRELAY: "AMTRELAY",
TA: "TA",
DLV: "DLV",
}
_INTS = {v: k for k, v in _STRINGS.items()}
def to_str(type_: int) -> str:
return _STRINGS.get(type_, f"TYPE({type_})")
def from_str(type_: str) -> int:
try:
return _INTS[type_]
except KeyError:
return int(type_.removeprefix("TYPE(").removesuffix(")"))
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/dns/response_codes.py | mitmproxy/net/dns/response_codes.py | NOERROR = 0
FORMERR = 1
SERVFAIL = 2
NXDOMAIN = 3
NOTIMP = 4
REFUSED = 5
YXDOMAIN = 6
YXRRSET = 7
NXRRSET = 8
NOTAUTH = 9
NOTZONE = 10
DSOTYPENI = 11
_CODES = {
NOERROR: 200,
FORMERR: 400,
SERVFAIL: 500,
NXDOMAIN: 404,
NOTIMP: 501,
REFUSED: 403,
YXDOMAIN: 409,
YXRRSET: 409,
NXRRSET: 410,
NOTAUTH: 401,
NOTZONE: 404,
DSOTYPENI: 501,
}
_STRINGS = {
NOERROR: "NOERROR",
FORMERR: "FORMERR",
SERVFAIL: "SERVFAIL",
NXDOMAIN: "NXDOMAIN",
NOTIMP: "NOTIMP",
REFUSED: "REFUSED",
YXDOMAIN: "YXDOMAIN",
YXRRSET: "YXRRSET",
NXRRSET: "NXRRSET",
NOTAUTH: "NOTAUTH",
NOTZONE: "NOTZONE",
DSOTYPENI: "DSOTYPENI",
}
_INTS = {v: k for k, v in _STRINGS.items()}
def http_equiv_status_code(response_code: int) -> int:
return _CODES.get(response_code, 500)
def to_str(response_code: int) -> str:
return _STRINGS.get(response_code, f"RCODE({response_code})")
def from_str(response_code: str) -> int:
try:
return _INTS[response_code]
except KeyError:
return int(response_code.removeprefix("RCODE(").removesuffix(")"))
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/dns/op_codes.py | mitmproxy/net/dns/op_codes.py | QUERY = 0
IQUERY = 1
STATUS = 2
NOTIFY = 4
UPDATE = 5
DSO = 6
_STRINGS = {
QUERY: "QUERY",
IQUERY: "IQUERY",
STATUS: "STATUS",
NOTIFY: "NOTIFY",
UPDATE: "UPDATE",
DSO: "DSO",
}
_INTS = {v: k for k, v in _STRINGS.items()}
def to_str(op_code: int) -> str:
return _STRINGS.get(op_code, f"OPCODE({op_code})")
def from_str(op_code: str) -> int:
try:
return _INTS[op_code]
except KeyError:
return int(op_code.removeprefix("OPCODE(").removesuffix(")"))
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/url.py | mitmproxy/net/http/url.py | from __future__ import annotations
import re
import urllib.parse
from collections.abc import Sequence
from typing import AnyStr
from typing import overload
from mitmproxy.net import check
from mitmproxy.net.check import is_valid_host
from mitmproxy.net.check import is_valid_port
from mitmproxy.utils.strutils import always_str
# This regex extracts & splits the host header into host and port.
# Handles the edge case of IPv6 addresses containing colons.
# https://bugzilla.mozilla.org/show_bug.cgi?id=45891
_authority_re = re.compile(r"^(?P<host>[^:]+|\[.+\])(?::(?P<port>\d+))?$")
def parse(url: str | bytes) -> tuple[bytes, bytes, int, bytes]:
"""
URL-parsing function that checks that
- port is an integer 0-65535
- host is a valid IDNA-encoded hostname with no null-bytes
- path is valid ASCII
Args:
A URL (as bytes or as unicode)
Returns:
A (scheme, host, port, path) tuple
Raises:
ValueError, if the URL is not properly formatted.
"""
# FIXME: We shouldn't rely on urllib here.
# Size of Ascii character after encoding is 1 byte which is same as its size
# But non-Ascii character's size after encoding will be more than its size
def ascii_check(x):
if len(x) == len(str(x).encode()):
return True
return False
if isinstance(url, bytes):
url = url.decode()
if not ascii_check(url):
url = urllib.parse.urlsplit(url) # type: ignore
url = list(url) # type: ignore
url[3] = urllib.parse.quote(url[3]) # type: ignore
url = urllib.parse.urlunsplit(url) # type: ignore
parsed: urllib.parse.ParseResult = urllib.parse.urlparse(url)
if not parsed.hostname:
raise ValueError("No hostname given")
else:
host = parsed.hostname.encode("idna")
parsed_b: urllib.parse.ParseResultBytes = parsed.encode("ascii") # type: ignore
port = parsed_b.port
if not port:
port = 443 if parsed_b.scheme == b"https" else 80
full_path: bytes = urllib.parse.urlunparse(
(b"", b"", parsed_b.path, parsed_b.params, parsed_b.query, parsed_b.fragment) # type: ignore
)
if not full_path.startswith(b"/"):
full_path = b"/" + full_path # type: ignore
if not check.is_valid_host(host):
raise ValueError("Invalid Host")
return parsed_b.scheme, host, port, full_path
@overload
def unparse(scheme: str, host: str, port: int, path) -> str: ...
@overload
def unparse(scheme: bytes, host: bytes, port: int, path) -> bytes: ...
def unparse(scheme, host, port, path):
"""
Returns a URL string, constructed from the specified components.
"""
authority = hostport(scheme, host, port)
if isinstance(scheme, str):
return f"{scheme}://{authority}{path}"
else:
return b"%s://%s%s" % (scheme, authority, path)
def encode(s: Sequence[tuple[str, str]], similar_to: str | None = None) -> str:
"""
Takes a list of (key, value) tuples and returns a urlencoded string.
If similar_to is passed, the output is formatted similar to the provided urlencoded string.
"""
remove_trailing_equal = False
if similar_to:
remove_trailing_equal = any("=" not in param for param in similar_to.split("&"))
encoded = urllib.parse.urlencode(s, False, errors="surrogateescape")
if encoded and remove_trailing_equal:
encoded = encoded.replace("=&", "&")
if encoded[-1] == "=":
encoded = encoded[:-1]
return encoded
def decode(s):
"""
Takes a urlencoded string and returns a list of surrogate-escaped (key, value) tuples.
"""
return urllib.parse.parse_qsl(s, keep_blank_values=True, errors="surrogateescape")
def quote(b: str, safe: str = "/") -> str:
"""
Returns:
An ascii-encodable str.
"""
return urllib.parse.quote(b, safe=safe, errors="surrogateescape")
def unquote(s: str) -> str:
"""
Args:
s: A surrogate-escaped str
Returns:
A surrogate-escaped str
"""
return urllib.parse.unquote(s, errors="surrogateescape")
def hostport(scheme: AnyStr, host: AnyStr, port: int) -> AnyStr:
"""
Returns the host component, with a port specification if needed.
"""
if default_port(scheme) == port:
return host
else:
if isinstance(host, bytes):
return b"%s:%d" % (host, port)
else:
return "%s:%d" % (host, port)
def default_port(scheme: AnyStr) -> int | None:
return {
"http": 80,
b"http": 80,
"https": 443,
b"https": 443,
}.get(scheme, None)
def parse_authority(authority: AnyStr, check: bool) -> tuple[str, int | None]:
"""Extract the host and port from host header/authority information
Raises:
ValueError, if check is True and the authority information is malformed.
"""
try:
if isinstance(authority, bytes):
m = _authority_re.match(authority.decode("utf-8"))
if not m:
raise ValueError
host = m["host"].encode("utf-8").decode("idna")
else:
m = _authority_re.match(authority)
if not m:
raise ValueError
host = m.group("host")
if host.startswith("[") and host.endswith("]"):
host = host[1:-1]
if not is_valid_host(host):
raise ValueError
if m.group("port"):
port = int(m.group("port"))
if not is_valid_port(port):
raise ValueError
return host, port
else:
return host, None
except ValueError:
if check:
raise
else:
return always_str(authority, "utf-8", "surrogateescape"), None
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/cookies.py | mitmproxy/net/http/cookies.py | import email.utils
import re
import time
from collections.abc import Iterable
from mitmproxy.coretypes import multidict
"""
A flexible module for cookie parsing and manipulation.
This module differs from usual standards-compliant cookie modules in a number
of ways. We try to be as permissive as possible, and to retain even mal-formed
information. Duplicate cookies are preserved in parsing, and can be set in
formatting. We do attempt to escape and quote values where needed, but will not
reject data that violate the specs.
Parsing accepts the formats in RFC6265 and partially RFC2109 and RFC2965. We
also parse the comma-separated variant of Set-Cookie that allows multiple
cookies to be set in a single header. Serialization follows RFC6265.
http://tools.ietf.org/html/rfc6265
http://tools.ietf.org/html/rfc2109
http://tools.ietf.org/html/rfc2965
"""
_cookie_params = {
"expires",
"path",
"comment",
"max-age",
"secure",
"httponly",
"version",
}
ESCAPE = re.compile(r"([\"\\])")
class CookieAttrs(multidict.MultiDict):
@staticmethod
def _kconv(key):
return key.lower()
@staticmethod
def _reduce_values(values):
# See the StickyCookieTest for a weird cookie that only makes sense
# if we take the last part.
return values[-1]
TSetCookie = tuple[str, str | None, CookieAttrs]
TPairs = list[tuple[str, str | None]]
def _read_until(s, start, term):
"""
Read until one of the characters in term is reached.
"""
if start == len(s):
return "", start + 1
for i in range(start, len(s)):
if s[i] in term:
return s[start:i], i
return s[start : i + 1], i + 1
def _read_quoted_string(s, start):
"""
start: offset to the first quote of the string to be read
A sort of loose super-set of the various quoted string specifications.
RFC6265 disallows backslashes or double quotes within quoted strings.
Prior RFCs use backslashes to escape. This leaves us free to apply
backslash escaping by default and be compatible with everything.
"""
escaping = False
ret = []
# Skip the first quote
i = start # initialize in case the loop doesn't run.
for i in range(start + 1, len(s)):
if escaping:
ret.append(s[i])
escaping = False
elif s[i] == '"':
break
elif s[i] == "\\":
escaping = True
else:
ret.append(s[i])
return "".join(ret), i + 1
def _read_key(s, start, delims=";="):
"""
Read a key - the LHS of a token/value pair in a cookie.
"""
return _read_until(s, start, delims)
def _read_value(s, start, delims):
"""
Reads a value - the RHS of a token/value pair in a cookie.
"""
if start >= len(s):
return "", start
elif s[start] == '"':
return _read_quoted_string(s, start)
else:
return _read_until(s, start, delims)
def _read_cookie_pairs(s, off=0):
"""
Read pairs of lhs=rhs values from Cookie headers.
off: start offset
"""
pairs = []
while True:
lhs, off = _read_key(s, off)
lhs = lhs.lstrip()
rhs = ""
if off < len(s) and s[off] == "=":
rhs, off = _read_value(s, off + 1, ";")
if rhs or lhs:
pairs.append([lhs, rhs])
off += 1
if not off < len(s):
break
return pairs, off
def _read_set_cookie_pairs(s: str, off=0) -> tuple[list[TPairs], int]:
"""
Read pairs of lhs=rhs values from SetCookie headers while handling multiple cookies.
off: start offset
specials: attributes that are treated specially
"""
cookies: list[TPairs] = []
pairs: TPairs = []
while True:
lhs, off = _read_key(s, off, ";=,")
lhs = lhs.lstrip()
rhs = ""
if off < len(s) and s[off] == "=":
rhs, off = _read_value(s, off + 1, ";,")
# Special handling of attributes
if lhs.lower() == "expires":
# 'expires' values can contain commas in them so they need to
# be handled separately.
# We actually bank on the fact that the expires value WILL
# contain a comma. Things will fail, if they don't.
# '3' is just a heuristic we use to determine whether we've
# only read a part of the expires value and we should read more.
if len(rhs) <= 3:
trail, off = _read_value(s, off + 1, ";,")
rhs = rhs + "," + trail
# as long as there's a "=", we consider it a pair
pairs.append((lhs, rhs))
elif lhs:
pairs.append((lhs, None))
# comma marks the beginning of a new cookie
if off < len(s) and s[off] == ",":
cookies.append(pairs)
pairs = []
off += 1
if not off < len(s):
break
if pairs or not cookies:
cookies.append(pairs)
return cookies, off
def _has_special(s: str) -> bool:
for i in s:
if i in '",;\\':
return True
o = ord(i)
if o < 0x21 or o > 0x7E:
return True
return False
def _format_pairs(pairs, specials=(), sep="; "):
"""
specials: A lower-cased list of keys that will not be quoted.
"""
vals = []
for k, v in pairs:
if v is None:
val = k
elif k.lower() not in specials and _has_special(v):
v = ESCAPE.sub(r"\\\1", v)
v = '"%s"' % v
val = f"{k}={v}"
else:
val = f"{k}={v}"
vals.append(val)
return sep.join(vals)
def _format_set_cookie_pairs(lst):
return _format_pairs(lst, specials=("expires", "path"))
def parse_cookie_header(line):
"""
Parse a Cookie header value.
Returns a list of (lhs, rhs) tuples.
"""
pairs, off_ = _read_cookie_pairs(line)
return pairs
def parse_cookie_headers(cookie_headers):
cookie_list = []
for header in cookie_headers:
cookie_list.extend(parse_cookie_header(header))
return cookie_list
def format_cookie_header(lst):
"""
Formats a Cookie header value.
"""
return _format_pairs(lst)
def parse_set_cookie_header(line: str) -> list[TSetCookie]:
"""
Parse a Set-Cookie header value
Returns:
A list of (name, value, attrs) tuples, where attrs is a
CookieAttrs dict of attributes. No attempt is made to parse attribute
values - they are treated purely as strings.
"""
cookie_pairs, off = _read_set_cookie_pairs(line)
cookies = []
for pairs in cookie_pairs:
if pairs:
cookie, *attrs = pairs
cookies.append((cookie[0], cookie[1], CookieAttrs(attrs)))
return cookies
def parse_set_cookie_headers(headers: Iterable[str]) -> list[TSetCookie]:
rv = []
for header in headers:
cookies = parse_set_cookie_header(header)
rv.extend(cookies)
return rv
def format_set_cookie_header(set_cookies: list[TSetCookie]) -> str:
"""
Formats a Set-Cookie header value.
"""
rv = []
for name, value, attrs in set_cookies:
pairs = [(name, value)]
pairs.extend(attrs.fields if hasattr(attrs, "fields") else attrs)
rv.append(_format_set_cookie_pairs(pairs))
return ", ".join(rv)
def refresh_set_cookie_header(c: str, delta: int) -> str:
"""
Args:
c: A Set-Cookie string
delta: Time delta in seconds
Returns:
A refreshed Set-Cookie string
Raises:
ValueError, if the cookie is invalid.
"""
cookies = parse_set_cookie_header(c)
for cookie in cookies:
name, value, attrs = cookie
if not name or not value:
raise ValueError("Invalid Cookie")
if "expires" in attrs:
e = email.utils.parsedate_tz(attrs["expires"])
if e:
f = email.utils.mktime_tz(e) + delta
attrs.set_all("expires", [email.utils.formatdate(f, usegmt=True)])
else:
# This can happen when the expires tag is invalid.
# reddit.com sends a an expires tag like this: "Thu, 31 Dec
# 2037 23:59:59 GMT", which is valid RFC 1123, but not
# strictly correct according to the cookie spec. Browsers
# appear to parse this tolerantly - maybe we should too.
# For now, we just ignore this.
del attrs["expires"]
return format_set_cookie_header(cookies)
def get_expiration_ts(cookie_attrs):
"""
Determines the time when the cookie will be expired.
Considering both 'expires' and 'max-age' parameters.
Returns: timestamp of when the cookie will expire.
None, if no expiration time is set.
"""
if "expires" in cookie_attrs:
e = email.utils.parsedate_tz(cookie_attrs["expires"])
if e:
return email.utils.mktime_tz(e)
elif "max-age" in cookie_attrs:
try:
max_age = int(cookie_attrs["Max-Age"])
except ValueError:
pass
else:
now_ts = time.time()
return now_ts + max_age
return None
def is_expired(cookie_attrs):
"""
Determines whether a cookie has expired.
Returns: boolean
"""
exp_ts = get_expiration_ts(cookie_attrs)
now_ts = time.time()
# If no expiration information was provided with the cookie
if exp_ts is None:
return False
else:
return exp_ts <= now_ts
def group_cookies(pairs):
"""
Converts a list of pairs to a (name, value, attrs) for each cookie.
"""
if not pairs:
return []
cookie_list = []
# First pair is always a new cookie
name, value = pairs[0]
attrs = []
for k, v in pairs[1:]:
if k.lower() in _cookie_params:
attrs.append((k, v))
else:
cookie_list.append((name, value, CookieAttrs(attrs)))
name, value, attrs = k, v, []
cookie_list.append((name, value, CookieAttrs(attrs)))
return cookie_list
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/validate.py | mitmproxy/net/http/validate.py | import logging
import re
import typing
from mitmproxy.http import Message
from mitmproxy.http import Request
from mitmproxy.http import Response
logger = logging.getLogger(__name__)
# https://datatracker.ietf.org/doc/html/rfc7230#section-3.2: Header fields are tokens.
# "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
_valid_header_name = re.compile(rb"^[!#$%&'*+\-.^_`|~0-9a-zA-Z]+$")
_valid_content_length = re.compile(rb"^(?:0|[1-9][0-9]*)$")
_valid_content_length_str = re.compile(r"^(?:0|[1-9][0-9]*)$")
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1:
# > A sender MUST NOT apply the chunked transfer coding more than once to a message body (i.e., chunking an already
# > chunked message is not allowed). If any transfer coding other than chunked is applied to a request's content, the
# > sender MUST apply chunked as the final transfer coding to ensure that the message is properly framed. If any
# > transfer coding other than chunked is applied to a response's content, the sender MUST either apply chunked as the
# > final transfer coding or terminate the message by closing the connection.
#
# The RFC technically still allows for fun encodings, we are a bit stricter and only accept a known subset by default.
TransferEncoding = typing.Literal[
"chunked",
"compress,chunked",
"deflate,chunked",
"gzip,chunked",
"compress",
"deflate",
"gzip",
"identity",
]
_HTTP_1_1_TRANSFER_ENCODINGS = frozenset(typing.get_args(TransferEncoding))
def parse_content_length(value: str | bytes) -> int:
"""Parse a content-length header value, or raise a ValueError if it is invalid."""
if isinstance(value, str):
valid = bool(_valid_content_length_str.match(value))
else:
valid = bool(_valid_content_length.match(value))
if not valid:
raise ValueError(f"invalid content-length header: {value!r}")
return int(value)
def parse_transfer_encoding(value: str | bytes) -> TransferEncoding:
"""Parse a transfer-encoding header value, or raise a ValueError if it is invalid or unknown."""
# guard against .lower() transforming non-ascii to ascii
if not value.isascii():
raise ValueError(f"invalid transfer-encoding header: {value!r}")
if isinstance(value, str):
te = value
else:
te = value.decode()
te = te.lower()
te = re.sub(r"[\t ]*,[\t ]*", ",", te)
if te not in _HTTP_1_1_TRANSFER_ENCODINGS:
raise ValueError(f"unknown transfer-encoding header: {value!r}")
return typing.cast(TransferEncoding, te)
def validate_headers(message: Message) -> None:
"""
Validate HTTP message headers to avoid request smuggling attacks.
Raises a ValueError if they are malformed.
"""
te = []
cl = []
for name, value in message.headers.fields:
if not _valid_header_name.match(name):
raise ValueError(f"invalid header name: {name!r}")
match name.lower():
case b"transfer-encoding":
te.append(value)
case b"content-length":
cl.append(value)
if te and cl:
# > A server MAY reject a request that contains both Content-Length and Transfer-Encoding or process such a
# > request in accordance with the Transfer-Encoding alone.
# > A sender MUST NOT send a Content-Length header field in any message that contains a Transfer-Encoding header
# > field.
raise ValueError(
"message with both transfer-encoding and content-length headers"
)
elif te:
if len(te) > 1:
raise ValueError(f"multiple transfer-encoding headers: {te!r}")
# > Transfer-Encoding was added in HTTP/1.1. It is generally assumed that implementations advertising only
# > HTTP/1.0 support will not understand how to process transfer-encoded content, and that an HTTP/1.0 message
# > received with a Transfer-Encoding is likely to have been forwarded without proper handling of the chunked
# > transfer coding in transit.
#
# > A client MUST NOT send a request containing Transfer-Encoding unless it knows the server will handle
# > HTTP/1.1 requests (or later minor revisions); such knowledge might be in the form of specific user
# > configuration or by remembering the version of a prior received response. A server MUST NOT send a response
# > containing Transfer-Encoding unless the corresponding request indicates HTTP/1.1 (or later minor revisions).
if not message.is_http11:
raise ValueError(
f"unexpected HTTP transfer-encoding {te[0]!r} for {message.http_version}"
)
# > A server MUST NOT send a Transfer-Encoding header field in any response with a status code of 1xx
# > (Informational) or 204 (No Content).
if isinstance(message, Response) and (
100 <= message.status_code <= 199 or message.status_code == 204
):
raise ValueError(
f"unexpected HTTP transfer-encoding {te[0]!r} for response with status code {message.status_code}"
)
# > If a Transfer-Encoding header field is present in a request and the chunked transfer coding is not the final
# > encoding, the message body length cannot be determined reliably; the server MUST respond with the 400 (Bad
# > Request) status code and then close the connection.
te_parsed = parse_transfer_encoding(te[0])
match te_parsed:
case "chunked" | "compress,chunked" | "deflate,chunked" | "gzip,chunked":
pass
case "compress" | "deflate" | "gzip" | "identity":
if isinstance(message, Request):
raise ValueError(
f"unexpected HTTP transfer-encoding {te_parsed!r} for request"
)
case other: # pragma: no cover
typing.assert_never(other)
elif cl:
# > If a message is received without Transfer-Encoding and with an invalid Content-Length header field, then the
# > message framing is invalid and the recipient MUST treat it as an unrecoverable error, unless the field value
# > can be successfully parsed as a comma-separated list (Section 5.6.1 of [HTTP]), all values in the list are
# > valid, and all values in the list are the same (in which case, the message is processed with that single
# > value used as the Content-Length field value).
# We are stricter here and reject comma-separated lists.
if len(cl) > 1:
raise ValueError(f"multiple content-length headers: {cl!r}")
parse_content_length(cl[0])
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/user_agents.py | mitmproxy/net/http/user_agents.py | """
A small collection of useful user-agent header strings. These should be
kept reasonably current to reflect common usage.
"""
# pylint: line-too-long
# A collection of (name, shortcut, string) tuples.
UASTRINGS = [
(
"android",
"a",
"Mozilla/5.0 (Linux; U; Android 4.1.1; en-gb; Nexus 7 Build/JRO03D) AFL/01.04.02",
),
(
"blackberry",
"l",
"Mozilla/5.0 (BlackBerry; U; BlackBerry 9900; en) AppleWebKit/534.11+ (KHTML, like Gecko) Version/7.1.0.346 Mobile Safari/534.11+",
),
(
"bingbot",
"b",
"Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)",
),
(
"chrome",
"c",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
),
(
"firefox",
"f",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:14.0) Gecko/20120405 Firefox/14.0a1",
),
("googlebot", "g", "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"),
("ie9", "i", "Mozilla/5.0 (Windows; U; MSIE 9.0; WIndows NT 9.0; en-US)"),
(
"ipad",
"p",
"Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B176 Safari/7534.48.3",
),
(
"iphone",
"h",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148a Safari/6533.18.5", # noqa
),
(
"safari",
"s",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10",
),
]
def get_by_shortcut(s):
"""
Retrieve a user agent entry by shortcut.
"""
for i in UASTRINGS:
if s == i[1]:
return i
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/__init__.py | mitmproxy/net/http/__init__.py | python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false | |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/headers.py | mitmproxy/net/http/headers.py | import collections
import re
def parse_content_type(c: str) -> tuple[str, str, dict[str, str]] | None:
"""
A simple parser for content-type values. Returns a (type, subtype,
parameters) tuple, where type and subtype are strings, and parameters
is a dict. If the string could not be parsed, return None.
E.g. the following string:
text/html; charset=UTF-8
Returns:
("text", "html", {"charset": "UTF-8"})
"""
parts = c.split(";", 1)
ts = parts[0].split("/", 1)
if len(ts) != 2:
return None
d = collections.OrderedDict()
if len(parts) == 2:
for i in parts[1].split(";"):
clause = i.split("=", 1)
if len(clause) == 2:
d[clause[0].strip()] = clause[1].strip()
return ts[0].lower(), ts[1].lower(), d
def assemble_content_type(type, subtype, parameters):
if not parameters:
return f"{type}/{subtype}"
params = "; ".join(f"{k}={v}" for k, v in parameters.items())
return f"{type}/{subtype}; {params}"
def infer_content_encoding(content_type: str, content: bytes = b"") -> str:
"""
Infer the encoding of content from the content-type header.
"""
enc = None
# BOM has the highest priority
if content.startswith(b"\x00\x00\xfe\xff"):
enc = "utf-32be"
elif content.startswith(b"\xff\xfe\x00\x00"):
enc = "utf-32le"
elif content.startswith(b"\xfe\xff"):
enc = "utf-16be"
elif content.startswith(b"\xff\xfe"):
enc = "utf-16le"
elif content.startswith(b"\xef\xbb\xbf"):
# 'utf-8-sig' will strip the BOM on decode
enc = "utf-8-sig"
elif parsed_content_type := parse_content_type(content_type):
# Use the charset from the header if possible
enc = parsed_content_type[2].get("charset")
# Otherwise, infer the encoding
if not enc and "json" in content_type:
enc = "utf8"
if not enc and "html" in content_type:
meta_charset = re.search(
rb"""<meta[^>]+charset=['"]?([^'">]+)""", content, re.IGNORECASE
)
if meta_charset:
enc = meta_charset.group(1).decode("ascii", "ignore")
else:
# Fallback to utf8 for html
# Ref: https://html.spec.whatwg.org/multipage/parsing.html#determining-the-character-encoding
# > 9. [snip] the comprehensive UTF-8 encoding is suggested.
enc = "utf8"
if not enc and "xml" in content_type:
if xml_encoding := re.search(
rb"""<\?xml[^\?>]+encoding=['"]([^'"\?>]+)""", content, re.IGNORECASE
):
enc = xml_encoding.group(1).decode("ascii", "ignore")
else:
# Fallback to utf8 for xml
# Ref: https://datatracker.ietf.org/doc/html/rfc7303#section-8.5
# > the XML processor [snip] to determine an encoding of UTF-8.
enc = "utf8"
if not enc and ("javascript" in content_type or "ecmascript" in content_type):
# Fallback to utf8 for javascript
# Ref: https://datatracker.ietf.org/doc/html/rfc9239#section-4.2
# > 3. Else, the character encoding scheme is assumed to be UTF-8
enc = "utf8"
if not enc and "text/css" in content_type:
# @charset rule must be the very first thing.
css_charset = re.match(rb"""@charset "([^"]+)";""", content, re.IGNORECASE)
if css_charset:
enc = css_charset.group(1).decode("ascii", "ignore")
else:
# Fallback to utf8 for css
# Ref: https://drafts.csswg.org/css-syntax/#determine-the-fallback-encoding
# > 4. Otherwise, return utf-8
enc = "utf8"
# Fallback to latin-1
if not enc:
enc = "latin-1"
# Use GB 18030 as the superset of GB2312 and GBK to fix common encoding problems on Chinese websites.
if enc.lower() in ("gb2312", "gbk"):
enc = "gb18030"
return enc
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/multipart.py | mitmproxy/net/http/multipart.py | from __future__ import annotations
import mimetypes
import re
import warnings
from urllib.parse import quote
from mitmproxy.net.http import headers
def encode_multipart(content_type: str, parts: list[tuple[bytes, bytes]]) -> bytes:
if content_type:
ct = headers.parse_content_type(content_type)
if ct is not None:
try:
raw_boundary = ct[2]["boundary"].encode("ascii")
boundary = quote(raw_boundary)
except (KeyError, UnicodeError):
return b""
hdrs = []
for key, value in parts:
file_type = (
mimetypes.guess_type(str(key))[0] or "text/plain; charset=utf-8"
)
if key:
hdrs.append(b"--%b" % boundary.encode("utf-8"))
disposition = b'form-data; name="%b"' % key
hdrs.append(b"Content-Disposition: %b" % disposition)
hdrs.append(b"Content-Type: %b" % file_type.encode("utf-8"))
hdrs.append(b"")
hdrs.append(value)
hdrs.append(b"")
if value is not None:
# If boundary is found in value then raise ValueError
if re.search(
rb"^--%b$" % re.escape(boundary.encode("utf-8")), value
):
raise ValueError(b"boundary found in encoded string")
hdrs.append(b"--%b--\r\n" % boundary.encode("utf-8"))
temp = b"\r\n".join(hdrs)
return temp
return b""
def decode_multipart(
content_type: str | None, content: bytes
) -> list[tuple[bytes, bytes]]:
"""
Takes a multipart boundary encoded string and returns list of (key, value) tuples.
"""
if content_type:
ct = headers.parse_content_type(content_type)
if not ct:
return []
try:
boundary = ct[2]["boundary"].encode("ascii")
except (KeyError, UnicodeError):
return []
rx = re.compile(rb'\bname="([^"]+)"')
r = []
if content is not None:
for i in content.split(b"--" + boundary):
parts = i.splitlines()
if len(parts) > 1 and parts[0][0:2] != b"--":
match = rx.search(parts[1])
if match:
key = match.group(1)
value = b"".join(parts[3 + parts[2:].index(b"") :])
r.append((key, value))
return r
return []
def encode(ct, parts): # pragma: no cover
# 2023-02
warnings.warn(
"multipart.encode is deprecated, use multipart.encode_multipart instead.",
DeprecationWarning,
stacklevel=2,
)
return encode_multipart(ct, parts)
def decode(ct, content): # pragma: no cover
# 2023-02
warnings.warn(
"multipart.decode is deprecated, use multipart.decode_multipart instead.",
DeprecationWarning,
stacklevel=2,
)
return encode_multipart(ct, content)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/status_codes.py | mitmproxy/net/http/status_codes.py | # Covered status codes:
# - official HTTP status codes: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
# - custom codes:
# - 444 No Response
# - 499 Client Closed Request
CONTINUE = 100
SWITCHING = 101
PROCESSING = 102
EARLY_HINTS = 103
OK = 200
CREATED = 201
ACCEPTED = 202
NON_AUTHORITATIVE_INFORMATION = 203
NO_CONTENT = 204
RESET_CONTENT = 205
PARTIAL_CONTENT = 206
MULTI_STATUS = 207
ALREADY_REPORTED = 208
IM_USED = 226
MULTIPLE_CHOICE = 300
MOVED_PERMANENTLY = 301
FOUND = 302
SEE_OTHER = 303
NOT_MODIFIED = 304
USE_PROXY = 305
TEMPORARY_REDIRECT = 307
PERMANENT_REDIRECT = 308
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
NOT_ALLOWED = 405
NOT_ACCEPTABLE = 406
PROXY_AUTH_REQUIRED = 407
REQUEST_TIMEOUT = 408
CONFLICT = 409
GONE = 410
LENGTH_REQUIRED = 411
PRECONDITION_FAILED = 412
PAYLOAD_TOO_LARGE = 413
REQUEST_URI_TOO_LONG = 414
UNSUPPORTED_MEDIA_TYPE = 415
REQUESTED_RANGE_NOT_SATISFIABLE = 416
EXPECTATION_FAILED = 417
IM_A_TEAPOT = 418
MISDIRECTED_REQUEST = 421
UNPROCESSABLE_CONTENT = 422
LOCKED = 423
FAILED_DEPENDENCY = 424
TOO_EARLY = 425
UPGRADE_REQUIRED = 426
PRECONDITION_REQUIRED = 428
TOO_MANY_REQUESTS = 429
REQUEST_HEADER_FIELDS_TOO_LARGE = 431
UNAVAILABLE_FOR_LEGAL_REASONS = 451
NO_RESPONSE = 444
CLIENT_CLOSED_REQUEST = 499
INTERNAL_SERVER_ERROR = 500
NOT_IMPLEMENTED = 501
BAD_GATEWAY = 502
SERVICE_UNAVAILABLE = 503
GATEWAY_TIMEOUT = 504
HTTP_VERSION_NOT_SUPPORTED = 505
VARIANT_ALSO_NEGOTIATES = 506
INSUFFICIENT_STORAGE_SPACE = 507
LOOP_DETECTED = 508
NOT_EXTENDED = 510
NETWORK_AUTHENTICATION_REQUIRED = 511
RESPONSES = {
# 100
CONTINUE: "Continue",
SWITCHING: "Switching Protocols",
PROCESSING: "Processing",
EARLY_HINTS: "Early Hints",
# 200
OK: "OK",
CREATED: "Created",
ACCEPTED: "Accepted",
NON_AUTHORITATIVE_INFORMATION: "Non-Authoritative Information",
NO_CONTENT: "No Content",
RESET_CONTENT: "Reset Content",
PARTIAL_CONTENT: "Partial Content",
MULTI_STATUS: "Multi-Status",
ALREADY_REPORTED: "Already Reported",
IM_USED: "IM Used",
# 300
MULTIPLE_CHOICE: "Multiple Choices",
MOVED_PERMANENTLY: "Moved Permanently",
FOUND: "Found",
SEE_OTHER: "See Other",
NOT_MODIFIED: "Not Modified",
USE_PROXY: "Use Proxy",
# 306 not defined??
TEMPORARY_REDIRECT: "Temporary Redirect",
PERMANENT_REDIRECT: "Permanent Redirect",
# 400
BAD_REQUEST: "Bad Request",
UNAUTHORIZED: "Unauthorized",
PAYMENT_REQUIRED: "Payment Required",
FORBIDDEN: "Forbidden",
NOT_FOUND: "Not Found",
NOT_ALLOWED: "Method Not Allowed",
NOT_ACCEPTABLE: "Not Acceptable",
PROXY_AUTH_REQUIRED: "Proxy Authentication Required",
REQUEST_TIMEOUT: "Request Time-out",
CONFLICT: "Conflict",
GONE: "Gone",
LENGTH_REQUIRED: "Length Required",
PRECONDITION_FAILED: "Precondition Failed",
PAYLOAD_TOO_LARGE: "Payload Too Large",
REQUEST_URI_TOO_LONG: "Request-URI Too Long",
UNSUPPORTED_MEDIA_TYPE: "Unsupported Media Type",
REQUESTED_RANGE_NOT_SATISFIABLE: "Requested Range not satisfiable",
EXPECTATION_FAILED: "Expectation Failed",
IM_A_TEAPOT: "I'm a teapot",
MISDIRECTED_REQUEST: "Misdirected Request",
UNPROCESSABLE_CONTENT: "Unprocessable Content",
LOCKED: "Locked",
FAILED_DEPENDENCY: "Failed Dependency",
TOO_EARLY: "Too Early",
UPGRADE_REQUIRED: "Upgrade Required",
PRECONDITION_REQUIRED: "Precondition Required",
TOO_MANY_REQUESTS: "Too Many Requests",
REQUEST_HEADER_FIELDS_TOO_LARGE: "Request Header Fields Too Large",
UNAVAILABLE_FOR_LEGAL_REASONS: "Unavailable For Legal Reasons",
NO_RESPONSE: "No Response",
CLIENT_CLOSED_REQUEST: "Client Closed Request",
# 500
INTERNAL_SERVER_ERROR: "Internal Server Error",
NOT_IMPLEMENTED: "Not Implemented",
BAD_GATEWAY: "Bad Gateway",
SERVICE_UNAVAILABLE: "Service Unavailable",
GATEWAY_TIMEOUT: "Gateway Time-out",
HTTP_VERSION_NOT_SUPPORTED: "HTTP Version not supported",
VARIANT_ALSO_NEGOTIATES: "Variant Also Negotiates",
INSUFFICIENT_STORAGE_SPACE: "Insufficient Storage Space",
LOOP_DETECTED: "Loop Detected",
NOT_EXTENDED: "Not Extended",
NETWORK_AUTHENTICATION_REQUIRED: "Network Authentication Required",
}
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/http1/assemble.py | mitmproxy/net/http/http1/assemble.py | def assemble_request(request):
if request.data.content is None:
raise ValueError("Cannot assemble flow with missing content")
head = assemble_request_head(request)
body = b"".join(
assemble_body(
request.data.headers, [request.data.content], request.data.trailers
)
)
return head + body
def assemble_request_head(request):
first_line = _assemble_request_line(request.data)
headers = _assemble_request_headers(request.data)
return b"%s\r\n%s\r\n" % (first_line, headers)
def assemble_response(response):
if response.data.content is None:
raise ValueError("Cannot assemble flow with missing content")
head = assemble_response_head(response)
body = b"".join(
assemble_body(
response.data.headers, [response.data.content], response.data.trailers
)
)
return head + body
def assemble_response_head(response):
first_line = _assemble_response_line(response.data)
headers = _assemble_response_headers(response.data)
return b"%s\r\n%s\r\n" % (first_line, headers)
def assemble_body(headers, body_chunks, trailers):
if "chunked" in headers.get("transfer-encoding", "").lower():
for chunk in body_chunks:
if chunk:
yield b"%x\r\n%s\r\n" % (len(chunk), chunk)
if trailers:
yield b"0\r\n%s\r\n" % trailers
else:
yield b"0\r\n\r\n"
else:
if trailers:
raise ValueError(
"Sending HTTP/1.1 trailer headers requires transfer-encoding: chunked"
)
for chunk in body_chunks:
yield chunk
def _assemble_request_line(request_data):
"""
Args:
request_data (mitmproxy.net.http.request.RequestData)
"""
if request_data.method.upper() == b"CONNECT":
return b"%s %s %s" % (
request_data.method,
request_data.authority,
request_data.http_version,
)
elif request_data.authority:
return b"%s %s://%s%s %s" % (
request_data.method,
request_data.scheme,
request_data.authority,
request_data.path,
request_data.http_version,
)
else:
return b"%s %s %s" % (
request_data.method,
request_data.path,
request_data.http_version,
)
def _assemble_request_headers(request_data):
"""
Args:
request_data (mitmproxy.net.http.request.RequestData)
"""
return bytes(request_data.headers)
def _assemble_response_line(response_data):
return b"%s %d %s" % (
response_data.http_version,
response_data.status_code,
response_data.reason,
)
def _assemble_response_headers(response):
return bytes(response.headers)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/http1/__init__.py | mitmproxy/net/http/http1/__init__.py | from .assemble import assemble_body
from .assemble import assemble_request
from .assemble import assemble_request_head
from .assemble import assemble_response
from .assemble import assemble_response_head
from .read import connection_close
from .read import expected_http_body_size
from .read import read_request_head
from .read import read_response_head
__all__ = [
"read_request_head",
"read_response_head",
"connection_close",
"expected_http_body_size",
"assemble_request",
"assemble_request_head",
"assemble_response",
"assemble_response_head",
"assemble_body",
]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/net/http/http1/read.py | mitmproxy/net/http/http1/read.py | import re
import time
import typing
from collections.abc import Iterable
from mitmproxy.http import Headers
from mitmproxy.http import Request
from mitmproxy.http import Response
from mitmproxy.net.http import url
from mitmproxy.net.http import validate
def get_header_tokens(headers, key):
"""
Retrieve all tokens for a header key. A number of different headers
follow a pattern where each header line can containe comma-separated
tokens, and headers can be set multiple times.
"""
if key not in headers:
return []
tokens = headers[key].split(",")
return [token.strip() for token in tokens]
def connection_close(http_version, headers):
"""
Checks the message to see if the client connection should be closed
according to RFC 2616 Section 8.1.
If we don't have a Connection header, HTTP 1.1 connections are assumed
to be persistent.
"""
if "connection" in headers:
tokens = get_header_tokens(headers, "connection")
if "close" in tokens:
return True
elif "keep-alive" in tokens:
return False
return http_version not in (
"HTTP/1.1",
b"HTTP/1.1",
"HTTP/2.0",
b"HTTP/2.0",
)
def expected_http_body_size(
request: Request, response: Response | None = None
) -> int | None:
"""
Returns:
The expected body length:
- a positive integer, if the size is known in advance
- None, if the size in unknown in advance (chunked encoding)
- -1, if all data should be read until end of stream.
Raises:
ValueError, if the content-length or transfer-encoding header is invalid
"""
# Determine response size according to http://tools.ietf.org/html/rfc7230#section-3.3, which is inlined below.
if not response:
headers = request.headers
else:
headers = response.headers
# 1. Any response to a HEAD request and any response with a 1xx
# (Informational), 204 (No Content), or 304 (Not Modified) status
# code is always terminated by the first empty line after the
# header fields, regardless of the header fields present in the
# message, and thus cannot contain a message body.
if request.method.upper() == "HEAD":
return 0
if 100 <= response.status_code <= 199:
return 0
if response.status_code in (204, 304):
return 0
# 2. Any 2xx (Successful) response to a CONNECT request implies that
# the connection will become a tunnel immediately after the empty
# line that concludes the header fields. A client MUST ignore any
# Content-Length or Transfer-Encoding header fields received in
# such a message.
if 200 <= response.status_code <= 299 and request.method.upper() == "CONNECT":
return 0
# 3. If a Transfer-Encoding header field is present and the chunked
# transfer coding (Section 4.1) is the final encoding, the message
# body length is determined by reading and decoding the chunked
# data until the transfer coding indicates the data is complete.
#
# If a Transfer-Encoding header field is present in a response and
# the chunked transfer coding is not the final encoding, the
# message body length is determined by reading the connection until
# it is closed by the server. If a Transfer-Encoding header field
# is present in a request and the chunked transfer coding is not
# the final encoding, the message body length cannot be determined
# reliably; the server MUST respond with the 400 (Bad Request)
# status code and then close the connection.
#
# If a message is received with both a Transfer-Encoding and a
# Content-Length header field, the Transfer-Encoding overrides the
# Content-Length. Such a message might indicate an attempt to
# perform request smuggling (Section 9.5) or response splitting
# (Section 9.4) and ought to be handled as an error. A sender MUST
# remove the received Content-Length field prior to forwarding such
# a message downstream.
#
if te_str := headers.get("transfer-encoding"):
te = validate.parse_transfer_encoding(te_str)
match te:
case "chunked" | "compress,chunked" | "deflate,chunked" | "gzip,chunked":
return None
case "compress" | "deflate" | "gzip" | "identity":
if response:
return -1
# These values are valid for responses only (not requests), which is ensured in
# mitmproxy.net.http.validate. If users have explicitly disabled header validation,
# we strive for maximum compatibility with weird clients.
if te == "identity" or "content-length" in headers:
pass # Content-Length or 0
else:
return (
-1
) # compress/deflate/gzip with no content-length -> read until eof
case other: # pragma: no cover
typing.assert_never(other)
# 4. If a message is received without Transfer-Encoding and with
# either multiple Content-Length header fields having differing
# field-values or a single Content-Length header field having an
# invalid value, then the message framing is invalid and the
# recipient MUST treat it as an unrecoverable error. If this is a
# request message, the server MUST respond with a 400 (Bad Request)
# status code and then close the connection. If this is a response
# message received by a proxy, the proxy MUST close the connection
# to the server, discard the received response, and send a 502 (Bad
# Gateway) response to the client. If this is a response message
# received by a user agent, the user agent MUST close the
# connection to the server and discard the received response.
#
# 5. If a valid Content-Length header field is present without
# Transfer-Encoding, its decimal value defines the expected message
# body length in octets. If the sender closes the connection or
# the recipient times out before the indicated number of octets are
# received, the recipient MUST consider the message to be
# incomplete and close the connection.
if cl := headers.get("content-length"):
return validate.parse_content_length(cl)
# 6. If this is a request message and none of the above are true, then
# the message body length is zero (no message body is present).
if not response:
return 0
# 7. Otherwise, this is a response message without a declared message
# body length, so the message body length is determined by the
# number of octets received prior to the server closing the
# connection.
return -1
def raise_if_http_version_unknown(http_version: bytes) -> None:
if not re.match(rb"^HTTP/\d\.\d$", http_version):
raise ValueError(f"Unknown HTTP version: {http_version!r}")
def _read_request_line(
line: bytes,
) -> tuple[str, int, bytes, bytes, bytes, bytes, bytes]:
try:
method, target, http_version = line.split()
port: int | None
if target == b"*" or target.startswith(b"/"):
scheme, authority, path = b"", b"", target
host, port = "", 0
elif method == b"CONNECT":
scheme, authority, path = b"", target, b""
host, port = url.parse_authority(authority, check=True)
if not port:
raise ValueError
else:
scheme, rest = target.split(b"://", maxsplit=1)
authority, _, path_ = rest.partition(b"/")
path = b"/" + path_
host, port = url.parse_authority(authority, check=True)
port = port or url.default_port(scheme)
if not port:
raise ValueError
# TODO: we can probably get rid of this check?
url.parse(target)
raise_if_http_version_unknown(http_version)
except ValueError as e:
raise ValueError(f"Bad HTTP request line: {line!r}") from e
return host, port, method, scheme, authority, path, http_version
def _read_response_line(line: bytes) -> tuple[bytes, int, bytes]:
try:
parts = line.split(None, 2)
if len(parts) == 2: # handle missing message gracefully
parts.append(b"")
http_version, status_code_str, reason = parts
status_code = int(status_code_str)
raise_if_http_version_unknown(http_version)
except ValueError as e:
raise ValueError(f"Bad HTTP response line: {line!r}") from e
return http_version, status_code, reason
def _read_headers(lines: Iterable[bytes]) -> Headers:
"""
Read a set of headers.
Stop once a blank line is reached.
Returns:
A headers object
Raises:
exceptions.HttpSyntaxException
"""
ret: list[tuple[bytes, bytes]] = []
for line in lines:
if line[0] in b" \t":
if not ret:
raise ValueError("Invalid headers")
# continued header
ret[-1] = (ret[-1][0], ret[-1][1] + b"\r\n " + line.strip())
else:
try:
name, value = line.split(b":", 1)
value = value.strip()
if not name:
raise ValueError()
ret.append((name, value))
except ValueError:
raise ValueError(f"Invalid header line: {line!r}")
return Headers(ret)
def read_request_head(lines: list[bytes]) -> Request:
"""
Parse an HTTP request head (request line + headers) from an iterable of lines
Args:
lines: The input lines
Returns:
The HTTP request object (without body)
Raises:
ValueError: The input is malformed.
"""
host, port, method, scheme, authority, path, http_version = _read_request_line(
lines[0]
)
headers = _read_headers(lines[1:])
return Request(
host=host,
port=port,
method=method,
scheme=scheme,
authority=authority,
path=path,
http_version=http_version,
headers=headers,
content=None,
trailers=None,
timestamp_start=time.time(),
timestamp_end=None,
)
def read_response_head(lines: list[bytes]) -> Response:
"""
Parse an HTTP response head (response line + headers) from an iterable of lines
Args:
lines: The input lines
Returns:
The HTTP response object (without body)
Raises:
ValueError: The input is malformed.
"""
http_version, status_code, reason = _read_response_line(lines[0])
headers = _read_headers(lines[1:])
return Response(
http_version=http_version,
status_code=status_code,
reason=reason,
headers=headers,
content=None,
trailers=None,
timestamp_start=time.time(),
timestamp_end=None,
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/coretypes/bidi.py | mitmproxy/coretypes/bidi.py | class BiDi:
"""
A wee utility class for keeping bi-directional mappings, like field
constants in protocols. Names are attributes on the object, dict-like
access maps values to names:
CONST = BiDi(a=1, b=2)
assert CONST.a == 1
assert CONST.get_name(1) == "a"
"""
def __init__(self, **kwargs):
self.names = kwargs
self.values = {}
for k, v in kwargs.items():
self.values[v] = k
if len(self.names) != len(self.values):
raise ValueError("Duplicate values not allowed.")
def __getattr__(self, k):
if k in self.names:
return self.names[k]
raise AttributeError("No such attribute: %s", k)
def get_name(self, n, default=None):
return self.values.get(n, default)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/coretypes/serializable.py | mitmproxy/coretypes/serializable.py | import abc
import collections.abc
import dataclasses
import enum
import typing
import uuid
from functools import cache
from typing import TypeVar
try:
from types import NoneType
from types import UnionType
except ImportError: # pragma: no cover
class UnionType: # type: ignore
pass
NoneType = type(None) # type: ignore
T = TypeVar("T", bound="Serializable")
State = typing.Any
class Serializable(metaclass=abc.ABCMeta):
"""
Abstract Base Class that defines an API to save an object's state and restore it later on.
"""
@classmethod
@abc.abstractmethod
def from_state(cls: type[T], state) -> T:
"""
Create a new object from the given state.
Consumes the passed state.
"""
raise NotImplementedError()
@abc.abstractmethod
def get_state(self) -> State:
"""
Retrieve object state.
"""
raise NotImplementedError()
@abc.abstractmethod
def set_state(self, state):
"""
Set object state to the given state. Consumes the passed state.
May return a `dataclasses.FrozenInstanceError` if the object is immutable.
"""
raise NotImplementedError()
def copy(self: T) -> T:
state = self.get_state()
if isinstance(state, dict) and "id" in state:
state["id"] = str(uuid.uuid4())
return self.from_state(state)
U = TypeVar("U", bound="SerializableDataclass")
class SerializableDataclass(Serializable):
@classmethod
@cache
def __fields(cls) -> tuple[dataclasses.Field, ...]:
# with from __future__ import annotations, `field.type` is a string,
# see https://github.com/python/cpython/issues/83623.
hints = typing.get_type_hints(cls)
fields = []
# noinspection PyDataclass
for field in dataclasses.fields(cls): # type: ignore[arg-type]
if field.metadata.get("serialize", True) is False:
continue
if isinstance(field.type, str):
field.type = hints[field.name]
fields.append(field)
return tuple(fields)
def get_state(self) -> State:
state: dict[str, State] = {}
for field in self.__fields():
val = getattr(self, field.name)
state[field.name] = _to_state(val, field.type, field.name)
return state
@classmethod
def from_state(cls: type[U], state) -> U:
# state = state.copy()
for field in cls.__fields():
state[field.name] = _to_val(state[field.name], field.type, field.name)
try:
return cls(**state) # type: ignore
except TypeError as e:
raise ValueError(f"Invalid state for {cls}: {e} ({state=})") from e
def set_state(self, state: State) -> None:
for field in self.__fields():
current = getattr(self, field.name)
f_state = state.pop(field.name)
if isinstance(current, Serializable) and f_state is not None:
try:
current.set_state(f_state)
continue
except dataclasses.FrozenInstanceError:
pass
val: typing.Any = _to_val(f_state, field.type, field.name)
try:
setattr(self, field.name, val)
except dataclasses.FrozenInstanceError:
state[field.name] = f_state # restore state dict.
raise
if state:
raise ValueError(
f"Unexpected fields in {type(self).__name__}.set_state: {state}"
)
def _process(
attr_val: typing.Any, attr_type: typing.Any, attr_name: str, make: bool
) -> typing.Any:
origin = typing.get_origin(attr_type)
if origin is typing.Literal:
if attr_val not in typing.get_args(attr_type):
raise ValueError(
f"Invalid value for {attr_name}: {attr_val!r} does not match any literal value."
)
return attr_val
if origin in (UnionType, typing.Union):
attr_type, nt = typing.get_args(attr_type)
assert nt is NoneType, (
f"{attr_name}: only `x | None` union types are supported`"
)
if attr_val is None:
return None # type: ignore
else:
return _process(attr_val, attr_type, attr_name, make)
else:
if attr_val is None:
raise ValueError(f"Attribute {attr_name} must not be None.")
if make and hasattr(attr_type, "from_state"):
return attr_type.from_state(attr_val) # type: ignore
elif not make and hasattr(attr_type, "get_state"):
return attr_val.get_state()
if origin in (list, collections.abc.Sequence):
(T,) = typing.get_args(attr_type)
return [_process(x, T, attr_name, make) for x in attr_val] # type: ignore
elif origin is tuple:
# We don't have a good way to represent tuple[str,int] | tuple[str,int,int,int], so we do a dirty hack here.
if attr_name in ("peername", "sockname"):
return tuple(
_process(x, T, attr_name, make)
for x, T in zip(attr_val, [str, int, int, int])
) # type: ignore
Ts = typing.get_args(attr_type)
if len(Ts) != len(attr_val):
raise ValueError(
f"Invalid data for {attr_name}. Expected {Ts}, got {attr_val}."
)
return tuple(_process(x, T, attr_name, make) for T, x in zip(Ts, attr_val)) # type: ignore
elif origin is dict:
k_cls, v_cls = typing.get_args(attr_type)
return {
_process(k, k_cls, attr_name, make): _process(v, v_cls, attr_name, make)
for k, v in attr_val.items()
} # type: ignore
elif attr_type in (int, float):
if not isinstance(attr_val, (int, float)):
raise ValueError(
f"Invalid value for {attr_name}. Expected {attr_type}, got {attr_val} ({type(attr_val)})."
)
return attr_type(attr_val) # type: ignore
elif attr_type in (str, bytes, bool):
if not isinstance(attr_val, attr_type):
raise ValueError(
f"Invalid value for {attr_name}. Expected {attr_type}, got {attr_val} ({type(attr_val)})."
)
return attr_type(attr_val) # type: ignore
elif isinstance(attr_type, type) and issubclass(attr_type, enum.Enum):
if make:
return attr_type(attr_val) # type: ignore
else:
return attr_val.value
else:
raise TypeError(f"Unexpected type for {attr_name}: {attr_type!r}")
def _to_val(state: typing.Any, attr_type: typing.Any, attr_name: str) -> typing.Any:
"""Create an object based on the state given in val."""
return _process(state, attr_type, attr_name, True)
def _to_state(value: typing.Any, attr_type: typing.Any, attr_name: str) -> typing.Any:
"""Get the state of the object given as val."""
return _process(value, attr_type, attr_name, False)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/coretypes/__init__.py | mitmproxy/coretypes/__init__.py | python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false | |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/coretypes/multidict.py | mitmproxy/coretypes/multidict.py | from abc import ABCMeta
from abc import abstractmethod
from collections.abc import Iterator
from collections.abc import MutableMapping
from collections.abc import Sequence
from typing import TypeVar
from mitmproxy.coretypes import serializable
KT = TypeVar("KT")
VT = TypeVar("VT")
class _MultiDict(MutableMapping[KT, VT], metaclass=ABCMeta):
"""
A MultiDict is a dictionary-like data structure that supports multiple values per key.
"""
fields: tuple[tuple[KT, VT], ...]
"""The underlying raw datastructure."""
def __repr__(self):
fields = (repr(field) for field in self.fields)
return "{cls}[{fields}]".format(
cls=type(self).__name__, fields=", ".join(fields)
)
@staticmethod
@abstractmethod
def _reduce_values(values: Sequence[VT]) -> VT:
"""
If a user accesses multidict["foo"], this method
reduces all values for "foo" to a single value that is returned.
For example, HTTP headers are folded, whereas we will just take
the first cookie we found with that name.
"""
@staticmethod
@abstractmethod
def _kconv(key: KT) -> KT:
"""
This method converts a key to its canonical representation.
For example, HTTP headers are case-insensitive, so this method returns key.lower().
"""
def __getitem__(self, key: KT) -> VT:
values = self.get_all(key)
if not values:
raise KeyError(key)
return self._reduce_values(values)
def __setitem__(self, key: KT, value: VT) -> None:
self.set_all(key, [value])
def __delitem__(self, key: KT) -> None:
if key not in self:
raise KeyError(key)
key = self._kconv(key)
self.fields = tuple(
field for field in self.fields if key != self._kconv(field[0])
)
def __iter__(self) -> Iterator[KT]:
seen = set()
for key, _ in self.fields:
key_kconv = self._kconv(key)
if key_kconv not in seen:
seen.add(key_kconv)
yield key
def __len__(self) -> int:
return len({self._kconv(key) for key, _ in self.fields})
def __eq__(self, other) -> bool:
if isinstance(other, MultiDict):
return self.fields == other.fields
return False
def get_all(self, key: KT) -> list[VT]:
"""
Return the list of all values for a given key.
If that key is not in the MultiDict, the return value will be an empty list.
"""
key = self._kconv(key)
return [value for k, value in self.fields if self._kconv(k) == key]
def set_all(self, key: KT, values: list[VT]) -> None:
"""
Remove the old values for a key and add new ones.
"""
key_kconv = self._kconv(key)
new_fields: list[tuple[KT, VT]] = []
for field in self.fields:
if self._kconv(field[0]) == key_kconv:
if values:
new_fields.append((field[0], values.pop(0)))
else:
new_fields.append(field)
while values:
new_fields.append((key, values.pop(0)))
self.fields = tuple(new_fields)
def add(self, key: KT, value: VT) -> None:
"""
Add an additional value for the given key at the bottom.
"""
self.insert(len(self.fields), key, value)
def insert(self, index: int, key: KT, value: VT) -> None:
"""
Insert an additional value for the given key at the specified position.
"""
item = (key, value)
self.fields = self.fields[:index] + (item,) + self.fields[index:]
def keys(self, multi: bool = False):
"""
Get all keys.
If `multi` is True, one key per value will be returned.
If `multi` is False, duplicate keys will only be returned once.
"""
return (k for k, _ in self.items(multi))
def values(self, multi: bool = False):
"""
Get all values.
If `multi` is True, all values will be returned.
If `multi` is False, only the first value per key will be returned.
"""
return (v for _, v in self.items(multi))
def items(self, multi: bool = False):
"""
Get all (key, value) tuples.
If `multi` is True, all `(key, value)` pairs will be returned.
If False, only one tuple per key is returned.
"""
if multi:
return self.fields
else:
return super().items()
class MultiDict(_MultiDict[KT, VT], serializable.Serializable):
"""A concrete MultiDict, storing its own data."""
def __init__(self, fields=()):
super().__init__()
self.fields = tuple(tuple(i) for i in fields) # type: ignore
@staticmethod
def _reduce_values(values):
return values[0]
@staticmethod
def _kconv(key):
return key
def get_state(self):
return self.fields
def set_state(self, state):
self.fields = tuple(tuple(x) for x in state) # type: ignore
@classmethod
def from_state(cls, state):
return cls(state)
class MultiDictView(_MultiDict[KT, VT]):
"""
The MultiDictView provides the MultiDict interface over calculated data.
The view itself contains no state - data is retrieved from the parent on
request, and stored back to the parent on change.
"""
def __init__(self, getter, setter):
self._getter = getter
self._setter = setter
super().__init__()
@staticmethod
def _kconv(key):
# All request-attributes are case-sensitive.
return key
@staticmethod
def _reduce_values(values):
# We just return the first element if
# multiple elements exist with the same key.
return values[0]
@property # type: ignore
def fields(self):
return self._getter()
@fields.setter
def fields(self, value):
self._setter(value)
def copy(self) -> "MultiDict[KT,VT]":
return MultiDict(self.fields)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/asyncio_utils.py | mitmproxy/utils/asyncio_utils.py | import asyncio
import os
import sys
import time
from collections.abc import Coroutine
from collections.abc import Iterator
from contextlib import contextmanager
from mitmproxy.utils import human
_KEEP_ALIVE = set()
def create_task(
coro: Coroutine,
*,
name: str,
keep_ref: bool,
client: tuple | None = None,
) -> asyncio.Task:
"""
Wrapper around `asyncio.create_task`.
- Use `keep_ref` to keep an internal reference.
This ensures that the task is not garbage collected mid-execution if no other reference is kept.
- Use `client` to pass the client address as additional debug info on the task.
"""
t = asyncio.create_task(coro) # noqa: TID251
set_task_debug_info(t, name=name, client=client)
if keep_ref and not t.done():
# The event loop only keeps weak references to tasks.
# A task that isn’t referenced elsewhere may get garbage collected at any time, even before it’s done.
_KEEP_ALIVE.add(t)
t.add_done_callback(_KEEP_ALIVE.discard)
return t
def set_task_debug_info(
task: asyncio.Task,
*,
name: str,
client: tuple | None = None,
) -> None:
"""Set debug info for an externally-spawned task."""
task.created = time.time() # type: ignore
if __debug__ is True and (test := os.environ.get("PYTEST_CURRENT_TEST", None)):
name = f"{name} [created in {test}]"
task.set_name(name)
if client:
task.client = client # type: ignore
def set_current_task_debug_info(
*,
name: str,
client: tuple | None = None,
) -> None:
"""Set debug info for the current task."""
task = asyncio.current_task()
assert task
set_task_debug_info(task, name=name, client=client)
def task_repr(task: asyncio.Task) -> str:
"""Get a task representation with debug info."""
name = task.get_name()
a: float = getattr(task, "created", 0)
if a:
age = f" (age: {time.time() - a:.0f}s)"
else:
age = ""
client = getattr(task, "client", "")
if client:
client = f"{human.format_address(client)}: "
return f"{client}{name}{age}"
@contextmanager
def install_exception_handler(handler) -> Iterator[None]:
loop = asyncio.get_running_loop()
existing = loop.get_exception_handler()
loop.set_exception_handler(handler)
try:
yield
finally:
loop.set_exception_handler(existing)
@contextmanager
def set_eager_task_factory() -> Iterator[None]:
loop = asyncio.get_running_loop()
if sys.version_info < (3, 12): # pragma: no cover
yield
else:
existing = loop.get_task_factory()
loop.set_task_factory(asyncio.eager_task_factory) # type: ignore
try:
yield
finally:
loop.set_task_factory(existing)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/strutils.py | mitmproxy/utils/strutils.py | import codecs
import io
import re
from collections.abc import Iterable
from typing import overload
# https://mypy.readthedocs.io/en/stable/more_types.html#function-overloading
@overload
def always_bytes(str_or_bytes: None, *encode_args) -> None: ...
@overload
def always_bytes(str_or_bytes: str | bytes, *encode_args) -> bytes: ...
def always_bytes(str_or_bytes: None | str | bytes, *encode_args) -> None | bytes:
if str_or_bytes is None or isinstance(str_or_bytes, bytes):
return str_or_bytes
elif isinstance(str_or_bytes, str):
return str_or_bytes.encode(*encode_args)
else:
raise TypeError(
f"Expected str or bytes, but got {type(str_or_bytes).__name__}."
)
@overload
def always_str(str_or_bytes: None, *encode_args) -> None: ...
@overload
def always_str(str_or_bytes: str | bytes, *encode_args) -> str: ...
def always_str(str_or_bytes: None | str | bytes, *decode_args) -> None | str:
"""
Returns,
str_or_bytes unmodified, if
"""
if str_or_bytes is None or isinstance(str_or_bytes, str):
return str_or_bytes
elif isinstance(str_or_bytes, bytes):
return str_or_bytes.decode(*decode_args)
else:
raise TypeError(
f"Expected str or bytes, but got {type(str_or_bytes).__name__}."
)
# Translate control characters to "safe" characters. This implementation
# initially replaced them with the matching control pictures
# (http://unicode.org/charts/PDF/U2400.pdf), but that turned out to render badly
# with monospace fonts. We are back to "." therefore.
_control_char_trans = {
x: ord(".")
for x in range(32) # x + 0x2400 for unicode control group pictures
}
_control_char_trans[127] = ord(".") # 0x2421
_control_char_trans_newline = _control_char_trans.copy()
for x in ("\r", "\n", "\t"):
del _control_char_trans_newline[ord(x)]
_control_char_trans = str.maketrans(_control_char_trans)
_control_char_trans_newline = str.maketrans(_control_char_trans_newline)
def escape_control_characters(text: str, keep_spacing=True) -> str:
"""
Replace all unicode C1 control characters from the given text with a single "."
Args:
keep_spacing: If True, tabs and newlines will not be replaced.
"""
if not isinstance(text, str):
raise ValueError(f"text type must be unicode but is {type(text).__name__}")
trans = _control_char_trans_newline if keep_spacing else _control_char_trans
return text.translate(trans)
def bytes_to_escaped_str(
data: bytes, keep_spacing: bool = False, escape_single_quotes: bool = False
) -> str:
"""
Take bytes and return a safe string that can be displayed to the user.
Single quotes are always escaped, double quotes are never escaped:
"'" + bytes_to_escaped_str(...) + "'"
gives a valid Python string.
Args:
keep_spacing: If True, tabs and newlines will not be escaped.
"""
if not isinstance(data, bytes):
raise ValueError(f"data must be bytes, but is {data.__class__.__name__}")
# We always insert a double-quote here so that we get a single-quoted string back
# https://stackoverflow.com/questions/29019340/why-does-python-use-different-quotes-for-representing-strings-depending-on-their
ret = repr(b'"' + data).lstrip("b")[2:-1]
if not escape_single_quotes:
ret = re.sub(r"(?<!\\)(\\\\)*\\'", lambda m: (m.group(1) or "") + "'", ret)
if keep_spacing:
ret = re.sub(
r"(?<!\\)(\\\\)*\\([nrt])",
lambda m: (m.group(1) or "") + dict(n="\n", r="\r", t="\t")[m.group(2)],
ret,
)
return ret
def escaped_str_to_bytes(data: str) -> bytes:
"""
Take an escaped string and return the unescaped bytes equivalent.
Raises:
ValueError, if the escape sequence is invalid.
"""
if not isinstance(data, str):
raise ValueError(f"data must be str, but is {data.__class__.__name__}")
# This one is difficult - we use an undocumented Python API here
# as per http://stackoverflow.com/a/23151714/934719
return codecs.escape_decode(data)[0] # type: ignore
def is_mostly_bin(s: bytes) -> bool:
if not s:
return False
# Cut off at ~100 chars, but do it smartly so that if the input is UTF-8, we don't
# chop a multibyte code point in half.
if len(s) > 100:
for cut in range(100, 104):
is_continuation_byte = (s[cut] >> 6) == 0b10
if not is_continuation_byte:
# A new character starts here, so we cut off just before that.
s = s[:cut]
break
else:
s = s[:100]
low_bytes = sum(i < 9 or 13 < i < 32 for i in s)
high_bytes = sum(i > 126 for i in s)
ascii_bytes = len(s) - low_bytes - high_bytes
# Heuristic 1: If it's mostly printable ASCII, it's not bin.
if ascii_bytes / len(s) > 0.7:
return False
# Heuristic 2: If it's UTF-8 without too many ASCII control chars, it's not bin.
# Note that b"\x00\x00\x00" would be valid UTF-8, so we don't want to accept _any_
# UTF-8 with higher code points.
if (ascii_bytes + high_bytes) / len(s) > 0.95:
try:
s.decode()
return False
except ValueError:
pass
return True
def is_xml(s: bytes) -> bool:
for char in s:
if char in (9, 10, 32): # is space?
continue
return char == 60 # is a "<"?
return False
def clean_hanging_newline(t):
"""
Many editors will silently add a newline to the final line of a
document (I'm looking at you, Vim). This function fixes this common
problem at the risk of removing a hanging newline in the rare cases
where the user actually intends it.
"""
if t and t[-1] == "\n":
return t[:-1]
return t
def hexdump(s):
"""
Returns:
A generator of (offset, hex, str) tuples
"""
for i in range(0, len(s), 16):
offset = f"{i:0=10x}"
part = s[i : i + 16]
x = " ".join(f"{i:0=2x}" for i in part)
x = x.ljust(47) # 16*2 + 15
part_repr = always_str(
escape_control_characters(
part.decode("ascii", "replace").replace("\ufffd", "."), False
)
)
yield (offset, x, part_repr)
def _move_to_private_code_plane(matchobj):
return chr(ord(matchobj.group(0)) + 0xE000)
def _restore_from_private_code_plane(matchobj):
return chr(ord(matchobj.group(0)) - 0xE000)
NO_ESCAPE = r"(?<!\\)(?:\\\\)*"
MULTILINE_CONTENT = r"[\s\S]*?"
SINGLELINE_CONTENT = r".*?"
MULTILINE_CONTENT_LINE_CONTINUATION = r"(?:.|(?<=\\)\n)*?"
def split_special_areas(
data: str,
area_delimiter: Iterable[str],
):
"""
Split a string of code into a [code, special area, code, special area, ..., code] list.
For example,
>>> split_special_areas(
>>> "test /* don't modify me */ foo",
>>> [r"/\\*[\\s\\S]*?\\*/"]) # (regex matching comments)
["test ", "/* don't modify me */", " foo"]
"".join(split_special_areas(x, ...)) == x always holds true.
"""
return re.split("({})".format("|".join(area_delimiter)), data, flags=re.MULTILINE)
def escape_special_areas(
data: str,
area_delimiter: Iterable[str],
control_characters,
):
"""
Escape all control characters present in special areas with UTF8 symbols
in the private use plane (U+E000 t+ ord(char)).
This is useful so that one can then use regex replacements on the resulting string without
interfering with special areas.
control_characters must be 0 < ord(x) < 256.
Example:
>>> print(x)
if (true) { console.log('{}'); }
>>> x = escape_special_areas(x, "{", ["'" + SINGLELINE_CONTENT + "'"])
>>> print(x)
if (true) { console.log('�}'); }
>>> x = re.sub(r"\\s*{\\s*", " {\n ", x)
>>> x = unescape_special_areas(x)
>>> print(x)
if (true) {
console.log('{}'); }
"""
buf = io.StringIO()
parts = split_special_areas(data, area_delimiter)
rex = re.compile(rf"[{control_characters}]")
for i, x in enumerate(parts):
if i % 2:
x = rex.sub(_move_to_private_code_plane, x)
buf.write(x)
return buf.getvalue()
def unescape_special_areas(data: str):
"""
Invert escape_special_areas.
x == unescape_special_areas(escape_special_areas(x)) always holds true.
"""
return re.sub(r"[\ue000-\ue0ff]", _restore_from_private_code_plane, data)
def cut_after_n_lines(content: str, n: int) -> str:
assert n > 0
pos = content.find("\n")
while pos >= 0 and n > 1:
pos = content.find("\n", pos + 1)
n -= 1
if pos >= 0:
content = content[: pos + 1]
return content
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/debug.py | mitmproxy/utils/debug.py | import asyncio
import gc
import linecache
import os
import platform
import signal
import sys
import threading
import traceback
from collections import Counter
from contextlib import redirect_stdout
from OpenSSL import SSL
from mitmproxy import version
from mitmproxy.utils import asyncio_utils
def dump_system_info():
mitmproxy_version = version.get_dev_version()
openssl_version: str | bytes = SSL.SSLeay_version(SSL.SSLEAY_VERSION)
if isinstance(openssl_version, bytes):
openssl_version = openssl_version.decode()
data = [
f"Mitmproxy: {mitmproxy_version}",
f"Python: {platform.python_version()}",
f"OpenSSL: {openssl_version}",
f"Platform: {platform.platform()}",
]
return "\n".join(data)
def dump_info(signal=None, frame=None, file=sys.stdout): # pragma: no cover
with redirect_stdout(file):
print("****************************************************")
print("Summary")
print("=======")
try:
import psutil
except ModuleNotFoundError:
print("(psutil not installed, skipping some debug info)")
else:
p = psutil.Process()
print("num threads: ", p.num_threads())
if hasattr(p, "num_fds"):
print("num fds: ", p.num_fds())
print("memory: ", p.memory_info())
print()
print("Files")
print("=====")
for i in p.open_files():
print(i)
print()
print("Connections")
print("===========")
for i in p.connections():
print(i)
print()
print("Threads")
print("=======")
bthreads = []
for i in threading.enumerate():
if hasattr(i, "_threadinfo"):
bthreads.append(i)
else:
print(i.name)
bthreads.sort(key=lambda x: getattr(x, "_thread_started", 0))
for i in bthreads:
print(i._threadinfo())
print()
print("Memory")
print("======")
gc.collect()
objs = Counter(str(type(i)) for i in gc.get_objects())
for cls, count in objs.most_common(20):
print(f"{count} {cls}")
print()
print("Memory (mitmproxy only)")
print("=======================")
mitm_objs = Counter({k: v for k, v in objs.items() if "mitmproxy" in k})
for cls, count in mitm_objs.most_common(20):
print(f"{count} {cls}")
try:
asyncio.get_running_loop()
except RuntimeError:
pass
else:
print()
print("Tasks")
print("=======")
for task in asyncio.all_tasks():
f = task.get_stack(limit=1)[0]
line = linecache.getline(
f.f_code.co_filename, f.f_lineno, f.f_globals
).strip()
line = f"{line} # at {os.path.basename(f.f_code.co_filename)}:{f.f_lineno}"
print(f"{asyncio_utils.task_repr(task)}\n {line}")
print("****************************************************")
if os.getenv("MITMPROXY_DEBUG_EXIT"): # pragma: no cover
sys.exit(1)
def dump_stacks(signal=None, frame=None, file=sys.stdout):
id2name = {th.ident: th.name for th in threading.enumerate()}
code = []
for threadId, stack in sys._current_frames().items():
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
code.append(" %s" % (line.strip()))
print("\n".join(code), file=file)
if os.getenv("MITMPROXY_DEBUG_EXIT"): # pragma: no cover
sys.exit(1)
def register_info_dumpers():
if os.name != "nt": # pragma: windows no cover
signal.signal(signal.SIGUSR1, dump_info) # type: ignore
signal.signal(signal.SIGUSR2, dump_stacks) # type: ignore
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/spec.py | mitmproxy/utils/spec.py | from mitmproxy import flowfilter
def parse_spec(option: str) -> tuple[flowfilter.TFilter, str, str]:
"""
Parse strings in the following format:
[/flow-filter]/subject/replacement
"""
sep, rem = option[0], option[1:]
parts = rem.split(sep, 2)
if len(parts) == 2:
subject, replacement = parts
return flowfilter.match_all, subject, replacement
elif len(parts) == 3:
patt, subject, replacement = parts
flow_filter = flowfilter.parse(patt)
return flow_filter, subject, replacement
else:
raise ValueError("Invalid number of parameters (2 or 3 are expected)")
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/arg_check.py | mitmproxy/utils/arg_check.py | import re
import sys
DEPRECATED = """
--confdir
-Z
--body-size-limit
--stream
--palette
--palette-transparent
--follow
--order
--no-mouse
--reverse
--http2-priority
--no-http2-priority
--no-websocket
--websocket
--upstream-bind-address
--ciphers-client
--ciphers-server
--client-certs
--no-upstream-cert
--add-upstream-certs-to-client-chain
--upstream-trusted-confdir
--upstream-trusted-ca
--ssl-version-client
--ssl-version-server
--no-onboarding
--onboarding-host
--onboarding-port
--server-replay-use-header
--no-pop
--replay-ignore-content
--replay-ignore-payload-param
--replay-ignore-param
--replay-ignore-host
--replace-from-file
"""
REPLACED = """
-t
-u
--wfile
-a
--afile
-z
-b
--bind-address
--port
-I
--ignore
--tcp
--cert
--insecure
-c
--replace
--replacements
-i
-f
--filter
--socks
--server-replay-nopop
"""
REPLACEMENTS = {
"--stream": "stream_large_bodies",
"--palette": "console_palette",
"--palette-transparent": "console_palette_transparent:",
"--follow": "console_focus_follow",
"--order": "view_order",
"--no-mouse": "console_mouse",
"--reverse": "view_order_reversed",
"--no-websocket": "websocket",
"--no-upstream-cert": "upstream_cert",
"--upstream-trusted-confdir": "ssl_verify_upstream_trusted_confdir",
"--upstream-trusted-ca": "ssl_verify_upstream_trusted_ca",
"--no-onboarding": "onboarding",
"--no-pop": "server_replay_reuse",
"--replay-ignore-content": "server_replay_ignore_content",
"--replay-ignore-payload-param": "server_replay_ignore_payload_params",
"--replay-ignore-param": "server_replay_ignore_params",
"--replay-ignore-host": "server_replay_ignore_host",
"--replace-from-file": "replacements (use @ to specify path)",
"-t": "--stickycookie",
"-u": "--stickyauth",
"--wfile": "--save-stream-file",
"-a": "-w Prefix path with + to append.",
"--afile": "-w Prefix path with + to append.",
"-z": "--anticomp",
"-b": "--listen-host",
"--bind-address": "--listen-host",
"--port": "--listen-port",
"-I": "--ignore-hosts",
"--ignore": "--ignore-hosts",
"--tcp": "--tcp-hosts",
"--cert": "--certs",
"--insecure": "--ssl-insecure",
"-c": "-C",
"--replace": ["--modify-body", "--modify-headers"],
"--replacements": ["--modify-body", "--modify-headers"],
"-i": "--intercept",
"-f": "--view-filter",
"--filter": "--view-filter",
"--socks": "--mode socks5",
"--server-replay-nopop": "--server-replay-reuse",
}
def check():
args = sys.argv[1:]
print()
if "-U" in args:
print("-U is deprecated, please use --mode upstream:SPEC instead")
if "-T" in args:
print("-T is deprecated, please use --mode transparent instead")
for option in ("-e", "--eventlog", "--norefresh"):
if option in args:
print(f"{option} has been removed.")
for option in ("--nonanonymous", "--singleuser", "--htpasswd"):
if option in args:
print(
"{} is deprecated.\n"
"Please use `--proxyauth SPEC` instead.\n"
'SPEC Format: "username:pass", "any" to accept any user/pass combination,\n'
'"@path" to use an Apache htpasswd file, or\n'
'"ldap[s]:url_server_ldap[:port]:dn_auth:password:dn_subtree[?search_filter_key=...]" '
"for LDAP authentication.".format(option)
)
for option in REPLACED.splitlines():
if option in args:
r = REPLACEMENTS.get(option)
if isinstance(r, list):
new_options = r
else:
new_options = [r]
print(
"{} is deprecated.\nPlease use `{}` instead.".format(
option, "` or `".join(new_options)
)
)
for option in DEPRECATED.splitlines():
if option in args:
print(
"{} is deprecated.\n"
"Please use `--set {}=value` instead.\n"
"To show all options and their default values use --options".format(
option,
REPLACEMENTS.get(option, None)
or option.lstrip("-").replace("-", "_"),
)
)
# Check for underscores in the options. Options always follow '--'.
for argument in args:
underscoreParam = re.search(r"[-]{2}((.*?_)(.*?(\s|$)))+", argument)
if underscoreParam is not None:
print(
"{} uses underscores, please use hyphens {}".format(
argument, argument.replace("_", "-")
)
)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/signals.py | mitmproxy/utils/signals.py | """
This module provides signals, which are a simple dispatching system that allows any number of interested parties
to subscribe to events ("signals").
This is similar to the Blinker library (https://pypi.org/project/blinker/), with the following changes:
- provides only a small subset of Blinker's functionality
- supports type hints
- supports async receivers.
"""
from __future__ import annotations
import asyncio
import inspect
import weakref
from collections.abc import Awaitable
from collections.abc import Callable
from typing import Any
from typing import cast
from typing import Generic
from typing import ParamSpec
from typing import TypeVar
P = ParamSpec("P")
R = TypeVar("R")
def make_weak_ref(obj: Any) -> weakref.ReferenceType:
"""
Like weakref.ref(), but using weakref.WeakMethod for bound methods.
"""
if hasattr(obj, "__self__"):
return cast(weakref.ref, weakref.WeakMethod(obj))
else:
return weakref.ref(obj)
# We're running into https://github.com/python/mypy/issues/6073 here,
# which is why the base class is a mixin and not a generic superclass.
class _SignalMixin:
def __init__(self) -> None:
self.receivers: list[weakref.ref[Callable]] = []
def connect(self, receiver: Callable) -> None:
"""
Register a signal receiver.
The signal will only hold a weak reference to the receiver function.
"""
receiver = make_weak_ref(receiver)
self.receivers.append(receiver)
def disconnect(self, receiver: Callable) -> None:
self.receivers = [r for r in self.receivers if r() != receiver]
def notify(self, *args, **kwargs):
cleanup = False
for ref in self.receivers:
r = ref()
if r is not None:
yield r(*args, **kwargs)
else:
cleanup = True
if cleanup:
self.receivers = [r for r in self.receivers if r() is not None]
class _SyncSignal(Generic[P], _SignalMixin):
def connect(self, receiver: Callable[P, None]) -> None:
assert not inspect.iscoroutinefunction(receiver)
super().connect(receiver)
def disconnect(self, receiver: Callable[P, None]) -> None:
super().disconnect(receiver)
def send(self, *args: P.args, **kwargs: P.kwargs) -> None:
for ret in super().notify(*args, **kwargs):
assert ret is None or not inspect.isawaitable(ret)
class _AsyncSignal(Generic[P], _SignalMixin):
def connect(self, receiver: Callable[P, Awaitable[None] | None]) -> None:
super().connect(receiver)
def disconnect(self, receiver: Callable[P, Awaitable[None] | None]) -> None:
super().disconnect(receiver)
async def send(self, *args: P.args, **kwargs: P.kwargs) -> None:
await asyncio.gather(
*[
aws
for aws in super().notify(*args, **kwargs)
if aws is not None and inspect.isawaitable(aws)
]
)
# noinspection PyPep8Naming
def SyncSignal(receiver_spec: Callable[P, None]) -> _SyncSignal[P]:
"""
Create a synchronous signal with the given function signature for receivers.
Example:
s = SyncSignal(lambda event: None) # all receivers must accept a single "event" argument.
def receiver(event):
print(event)
s.connect(receiver)
s.send("foo") # prints foo
s.send(event="bar") # prints bar
def receiver2():
...
s.connect(receiver2) # mypy complains about receiver2 not having the right signature
s2 = SyncSignal(lambda: None) # this signal has no arguments
s2.send()
"""
return cast(_SyncSignal[P], _SyncSignal())
# noinspection PyPep8Naming
def AsyncSignal(receiver_spec: Callable[P, Awaitable[None] | None]) -> _AsyncSignal[P]:
"""
Create an signal that supports both regular and async receivers:
Example:
s = AsyncSignal(lambda event: None)
async def receiver(event):
print(event)
s.connect(receiver)
await s.send("foo") # prints foo
"""
return cast(_AsyncSignal[P], _AsyncSignal())
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/human.py | mitmproxy/utils/human.py | import datetime
import functools
import ipaddress
import time
SIZE_UNITS = {
"b": 1024**0,
"k": 1024**1,
"m": 1024**2,
"g": 1024**3,
"t": 1024**4,
}
def pretty_size(size: int) -> str:
"""Convert a number of bytes into a human-readable string.
len(return value) <= 5 always holds true.
"""
s: float = size # type cast for mypy
if s < 1024:
return f"{s}b"
for suffix in ["k", "m", "g", "t"]:
s /= 1024
if s < 99.95:
return f"{s:.1f}{suffix}"
if s < 1024 or suffix == "t":
return f"{s:.0f}{suffix}"
raise AssertionError
@functools.lru_cache
def parse_size(s: str | None) -> int | None:
"""
Parse a size with an optional k/m/... suffix.
Invalid values raise a ValueError. For added convenience, passing `None` returns `None`.
"""
if s is None:
return None
try:
return int(s)
except ValueError:
pass
for i in SIZE_UNITS.keys():
if s.endswith(i):
try:
return int(s[:-1]) * SIZE_UNITS[i]
except ValueError:
break
raise ValueError("Invalid size specification.")
def pretty_duration(secs: float | None) -> str:
formatters = [
(100, "{:.0f}s"),
(10, "{:2.1f}s"),
(1, "{:1.2f}s"),
]
if secs is None:
return ""
for limit, formatter in formatters:
if secs >= limit:
return formatter.format(secs)
# less than 1 sec
return f"{secs * 1000:.0f}ms"
def format_timestamp(s):
s = time.localtime(s)
d = datetime.datetime.fromtimestamp(time.mktime(s))
return d.strftime("%Y-%m-%d %H:%M:%S")
def format_timestamp_with_milli(s):
d = datetime.datetime.fromtimestamp(s)
return d.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
@functools.lru_cache
def format_address(address: tuple | None) -> str:
"""
This function accepts IPv4/IPv6 tuples and
returns the formatted address string with port number
"""
if address is None:
return "<no address>"
try:
host = ipaddress.ip_address(address[0])
if host.is_unspecified:
return f"*:{address[1]}"
if isinstance(host, ipaddress.IPv4Address):
return f"{host}:{address[1]}"
# If IPv6 is mapped to IPv4
elif host.ipv4_mapped:
return f"{host.ipv4_mapped}:{address[1]}"
return f"[{host}]:{address[1]}"
except ValueError:
return f"{address[0]}:{address[1]}"
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/magisk.py | mitmproxy/utils/magisk.py | import hashlib
import os
from zipfile import ZipFile
from cryptography import x509
from cryptography.hazmat.primitives import serialization
from mitmproxy import certs
from mitmproxy import ctx
from mitmproxy.options import CONF_BASENAME
# The following 3 variables are for including in the magisk module as text file
MODULE_PROP_TEXT = """id=mitmproxycert
name=MITMProxy cert
version=v1
versionCode=1
author=mitmproxy
description=Adds the mitmproxy certificate to the system store
template=3"""
CONFIG_SH_TEXT = """
MODID=mitmproxycert
AUTOMOUNT=true
PROPFILE=false
POSTFSDATA=false
LATESTARTSERVICE=false
print_modname() {
ui_print "*******************************"
ui_print " MITMProxy cert installer "
ui_print "*******************************"
}
REPLACE="
"
set_permissions() {
set_perm_recursive $MODPATH 0 0 0755 0644
}
"""
UPDATE_BINARY_TEXT = """
#!/sbin/sh
#################
# Initialization
#################
umask 022
# echo before loading util_functions
ui_print() { echo "$1"; }
require_new_magisk() {
ui_print "*******************************"
ui_print " Please install Magisk v20.4+! "
ui_print "*******************************"
exit 1
}
OUTFD=$2
ZIPFILE=$3
mount /data 2>/dev/null
[ -f /data/adb/magisk/util_functions.sh ] || require_new_magisk
. /data/adb/magisk/util_functions.sh
[ $MAGISK_VER_CODE -lt 20400 ] && require_new_magisk
install_module
exit 0
"""
def get_ca_from_files() -> x509.Certificate:
# Borrowed from tlsconfig
certstore_path = os.path.expanduser(ctx.options.confdir)
certstore = certs.CertStore.from_store(
path=certstore_path,
basename=CONF_BASENAME,
key_size=ctx.options.key_size,
passphrase=ctx.options.cert_passphrase.encode("utf8")
if ctx.options.cert_passphrase
else None,
)
return certstore.default_ca._cert
def subject_hash_old(ca: x509.Certificate) -> str:
# Mimics the -subject_hash_old option of openssl used for android certificate names
full_hash = hashlib.md5(ca.subject.public_bytes()).digest()
sho = full_hash[0] | (full_hash[1] << 8) | (full_hash[2] << 16) | full_hash[3] << 24
return hex(sho)[2:]
def write_magisk_module(path: str):
# Makes a zip file that can be loaded by Magisk
# Android certs are stored as DER files
ca = get_ca_from_files()
der_cert = ca.public_bytes(serialization.Encoding.DER)
with ZipFile(path, "w") as zipp:
# Main cert file, name is always the old subject hash with a '.0' added
zipp.writestr(f"system/etc/security/cacerts/{subject_hash_old(ca)}.0", der_cert)
zipp.writestr("module.prop", MODULE_PROP_TEXT)
zipp.writestr("config.sh", CONFIG_SH_TEXT)
zipp.writestr("META-INF/com/google/android/updater-script", "#MAGISK")
zipp.writestr("META-INF/com/google/android/update-binary", UPDATE_BINARY_TEXT)
zipp.writestr(
"common/file_contexts_image", "/magisk(/.*)? u:object_r:system_file:s0"
)
zipp.writestr("common/post-fs-data.sh", "MODDIR=${0%/*}")
zipp.writestr("common/service.sh", "MODDIR=${0%/*}")
zipp.writestr("common/system.prop", "")
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/sliding_window.py | mitmproxy/utils/sliding_window.py | import itertools
from collections.abc import Iterable
from collections.abc import Iterator
from typing import TypeVar
T = TypeVar("T")
def window(
iterator: Iterable[T], behind: int = 0, ahead: int = 0
) -> Iterator[tuple[T | None, ...]]:
"""
Sliding window for an iterator.
Example:
>>> for prev, i, nxt in window(range(10), 1, 1):
>>> print(prev, i, nxt)
None 0 1
0 1 2
1 2 3
2 3 None
"""
# TODO: move into utils
iters: list[Iterator[T | None]] = list(itertools.tee(iterator, behind + 1 + ahead))
for i in range(behind):
iters[i] = itertools.chain((behind - i) * [None], iters[i])
for i in range(ahead):
iters[-1 - i] = itertools.islice(
itertools.chain(iters[-1 - i], (ahead - i) * [None]), (ahead - i), None
)
return zip(*iters)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/typecheck.py | mitmproxy/utils/typecheck.py | import typing
from collections import abc
try:
from types import UnionType
except ImportError: # pragma: no cover
UnionType = object() # type: ignore
Type = typing.Union[
typing.Any # anything more elaborate really fails with mypy at the moment.
]
def check_option_type(name: str, value: typing.Any, typeinfo: Type) -> None:
"""
Check if the provided value is an instance of typeinfo and raises a
TypeError otherwise. This function supports only those types required for
options.
"""
e = TypeError(f"Expected {typeinfo} for {name}, but got {type(value)}.")
origin = typing.get_origin(typeinfo)
if origin is typing.Union or origin is UnionType:
for T in typing.get_args(typeinfo):
try:
check_option_type(name, value, T)
except TypeError:
pass
else:
return
raise e
elif origin is tuple:
types = typing.get_args(typeinfo)
if not isinstance(value, (tuple, list)):
raise e
if len(types) != len(value):
raise e
for i, (x, T) in enumerate(zip(value, types)):
check_option_type(f"{name}[{i}]", x, T)
return
elif origin is abc.Sequence:
T = typing.get_args(typeinfo)[0]
if not isinstance(value, (tuple, list)):
raise e
for v in value:
check_option_type(name, v, T)
elif origin is typing.IO or typeinfo in (typing.TextIO, typing.BinaryIO):
if hasattr(value, "read"):
return
else:
raise e
elif typeinfo is typing.Any:
return
elif not isinstance(value, typeinfo):
if typeinfo is float and isinstance(value, int):
return
raise e
def typespec_to_str(typespec: typing.Any) -> str:
if typespec in (str, int, float, bool):
t = typespec.__name__
elif typespec == typing.Optional[str]:
t = "optional str"
elif typespec in (typing.Sequence[str], abc.Sequence[str]):
t = "sequence of str"
elif typespec == typing.Optional[int]:
t = "optional int"
else:
raise NotImplementedError
return t
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/__init__.py | mitmproxy/utils/__init__.py | python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false | |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/vt_codes.py | mitmproxy/utils/vt_codes.py | """
This module provides a method to detect if a given file object supports virtual terminal escape codes.
"""
import os
import sys
from typing import IO
if os.name == "nt":
from ctypes import byref # type: ignore
from ctypes import windll # type: ignore
from ctypes.wintypes import BOOL
from ctypes.wintypes import DWORD
from ctypes.wintypes import HANDLE
from ctypes.wintypes import LPDWORD
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
# https://docs.microsoft.com/de-de/windows/console/getstdhandle
GetStdHandle = windll.kernel32.GetStdHandle
GetStdHandle.argtypes = [DWORD]
GetStdHandle.restype = HANDLE
# https://docs.microsoft.com/de-de/windows/console/getconsolemode
GetConsoleMode = windll.kernel32.GetConsoleMode
GetConsoleMode.argtypes = [HANDLE, LPDWORD]
GetConsoleMode.restype = BOOL
# https://docs.microsoft.com/de-de/windows/console/setconsolemode
SetConsoleMode = windll.kernel32.SetConsoleMode
SetConsoleMode.argtypes = [HANDLE, DWORD]
SetConsoleMode.restype = BOOL
def ensure_supported(f: IO[str]) -> bool:
if not f.isatty():
return False
if f == sys.stdout:
h = STD_OUTPUT_HANDLE
elif f == sys.stderr:
h = STD_ERROR_HANDLE
else:
return False
handle = GetStdHandle(h)
console_mode = DWORD()
ok = GetConsoleMode(handle, byref(console_mode))
if not ok:
return False
ok = SetConsoleMode(
handle, console_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING
)
return ok
else:
def ensure_supported(f: IO[str]) -> bool:
return f.isatty()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/emoji.py | mitmproxy/utils/emoji.py | #!/usr/bin/env python3
"""
All of the emoji and characters that can be used as flow markers.
"""
# auto-generated. run this file to refresh.
emoji = {
":+1:": "👍",
":-1:": "👎",
":100:": "💯",
":1234:": "🔢",
":1st_place_medal:": "🥇",
":2nd_place_medal:": "🥈",
":3rd_place_medal:": "🥉",
":8ball:": "🎱",
":a:": "🅰",
":ab:": "🆎",
":abacus:": "🧮",
":abc:": "🔤",
":abcd:": "🔡",
":accept:": "🉑",
":adhesive_bandage:": "🩹",
":adult:": "🧑",
":aerial_tramway:": "🚡",
":afghanistan:": "🇦🇫",
":airplane:": "✈",
":aland_islands:": "🇦🇽",
":alarm_clock:": "⏰",
":albania:": "🇦🇱",
":alembic:": "⚗",
":algeria:": "🇩🇿",
":alien:": "👽",
":ambulance:": "🚑",
":american_samoa:": "🇦🇸",
":amphora:": "🏺",
":anchor:": "⚓",
":andorra:": "🇦🇩",
":angel:": "👼",
":anger:": "💢",
":angola:": "🇦🇴",
":angry:": "😠",
":anguilla:": "🇦🇮",
":anguished:": "😧",
":ant:": "🐜",
":antarctica:": "🇦🇶",
":antigua_barbuda:": "🇦🇬",
":apple:": "🍎",
":aquarius:": "♒",
":argentina:": "🇦🇷",
":aries:": "♈",
":armenia:": "🇦🇲",
":arrow_backward:": "◀",
":arrow_double_down:": "⏬",
":arrow_double_up:": "⏫",
":arrow_down:": "⬇",
":arrow_down_small:": "🔽",
":arrow_forward:": "▶",
":arrow_heading_down:": "⤵",
":arrow_heading_up:": "⤴",
":arrow_left:": "⬅",
":arrow_lower_left:": "↙",
":arrow_lower_right:": "↘",
":arrow_right:": "➡",
":arrow_right_hook:": "↪",
":arrow_up:": "⬆",
":arrow_up_down:": "↕",
":arrow_up_small:": "🔼",
":arrow_upper_left:": "↖",
":arrow_upper_right:": "↗",
":arrows_clockwise:": "🔃",
":arrows_counterclockwise:": "🔄",
":art:": "🎨",
":articulated_lorry:": "🚛",
":artificial_satellite:": "🛰",
":artist:": "🧑🎨",
":aruba:": "🇦🇼",
":ascension_island:": "🇦🇨",
":asterisk:": "*⃣",
":astonished:": "😲",
":astronaut:": "🧑🚀",
":athletic_shoe:": "👟",
":atm:": "🏧",
":atom_symbol:": "⚛",
":australia:": "🇦🇺",
":austria:": "🇦🇹",
":auto_rickshaw:": "🛺",
":avocado:": "🥑",
":axe:": "🪓",
":azerbaijan:": "🇦🇿",
":b:": "🅱",
":baby:": "👶",
":baby_bottle:": "🍼",
":baby_chick:": "🐤",
":baby_symbol:": "🚼",
":back:": "🔙",
":bacon:": "🥓",
":badger:": "🦡",
":badminton:": "🏸",
":bagel:": "🥯",
":baggage_claim:": "🛄",
":baguette_bread:": "🥖",
":bahamas:": "🇧🇸",
":bahrain:": "🇧🇭",
":balance_scale:": "⚖",
":bald_man:": "👨🦲",
":bald_woman:": "👩🦲",
":ballet_shoes:": "🩰",
":balloon:": "🎈",
":ballot_box:": "🗳",
":ballot_box_with_check:": "☑",
":bamboo:": "🎍",
":banana:": "🍌",
":bangbang:": "‼",
":bangladesh:": "🇧🇩",
":banjo:": "🪕",
":bank:": "🏦",
":bar_chart:": "📊",
":barbados:": "🇧🇧",
":barber:": "💈",
":baseball:": "⚾",
":basket:": "🧺",
":basketball:": "🏀",
":basketball_man:": "⛹♂",
":basketball_woman:": "⛹♀",
":bat:": "🦇",
":bath:": "🛀",
":bathtub:": "🛁",
":battery:": "🔋",
":beach_umbrella:": "🏖",
":bear:": "🐻",
":bearded_person:": "🧔",
":bed:": "🛏",
":bee:": "🐝",
":beer:": "🍺",
":beers:": "🍻",
":beetle:": "🐞",
":beginner:": "🔰",
":belarus:": "🇧🇾",
":belgium:": "🇧🇪",
":belize:": "🇧🇿",
":bell:": "🔔",
":bellhop_bell:": "🛎",
":benin:": "🇧🇯",
":bento:": "🍱",
":bermuda:": "🇧🇲",
":beverage_box:": "🧃",
":bhutan:": "🇧🇹",
":bicyclist:": "🚴",
":bike:": "🚲",
":biking_man:": "🚴♂",
":biking_woman:": "🚴♀",
":bikini:": "👙",
":billed_cap:": "🧢",
":biohazard:": "☣",
":bird:": "🐦",
":birthday:": "🎂",
":black_circle:": "⚫",
":black_flag:": "🏴",
":black_heart:": "🖤",
":black_joker:": "🃏",
":black_large_square:": "⬛",
":black_medium_small_square:": "◾",
":black_medium_square:": "◼",
":black_nib:": "✒",
":black_small_square:": "▪",
":black_square_button:": "🔲",
":blond_haired_man:": "👱♂",
":blond_haired_person:": "👱",
":blond_haired_woman:": "👱♀",
":blonde_woman:": "👱♀",
":blossom:": "🌼",
":blowfish:": "🐡",
":blue_book:": "📘",
":blue_car:": "🚙",
":blue_heart:": "💙",
":blue_square:": "🟦",
":blush:": "😊",
":boar:": "🐗",
":boat:": "⛵",
":bolivia:": "🇧🇴",
":bomb:": "💣",
":bone:": "🦴",
":book:": "📖",
":bookmark:": "🔖",
":bookmark_tabs:": "📑",
":books:": "📚",
":boom:": "💥",
":boot:": "👢",
":bosnia_herzegovina:": "🇧🇦",
":botswana:": "🇧🇼",
":bouncing_ball_man:": "⛹♂",
":bouncing_ball_person:": "⛹",
":bouncing_ball_woman:": "⛹♀",
":bouquet:": "💐",
":bouvet_island:": "🇧🇻",
":bow:": "🙇",
":bow_and_arrow:": "🏹",
":bowing_man:": "🙇♂",
":bowing_woman:": "🙇♀",
":bowl_with_spoon:": "🥣",
":bowling:": "🎳",
":boxing_glove:": "🥊",
":boy:": "👦",
":brain:": "🧠",
":brazil:": "🇧🇷",
":bread:": "🍞",
":breast_feeding:": "🤱",
":bricks:": "🧱",
":bride_with_veil:": "👰",
":bridge_at_night:": "🌉",
":briefcase:": "💼",
":british_indian_ocean_territory:": "🇮🇴",
":british_virgin_islands:": "🇻🇬",
":broccoli:": "🥦",
":broken_heart:": "💔",
":broom:": "🧹",
":brown_circle:": "🟤",
":brown_heart:": "🤎",
":brown_square:": "🟫",
":brunei:": "🇧🇳",
":bug:": "🐛",
":building_construction:": "🏗",
":bulb:": "💡",
":bulgaria:": "🇧🇬",
":bullettrain_front:": "🚅",
":bullettrain_side:": "🚄",
":burkina_faso:": "🇧🇫",
":burrito:": "🌯",
":burundi:": "🇧🇮",
":bus:": "🚌",
":business_suit_levitating:": "🕴",
":busstop:": "🚏",
":bust_in_silhouette:": "👤",
":busts_in_silhouette:": "👥",
":butter:": "🧈",
":butterfly:": "🦋",
":cactus:": "🌵",
":cake:": "🍰",
":calendar:": "📆",
":call_me_hand:": "🤙",
":calling:": "📲",
":cambodia:": "🇰🇭",
":camel:": "🐫",
":camera:": "📷",
":camera_flash:": "📸",
":cameroon:": "🇨🇲",
":camping:": "🏕",
":canada:": "🇨🇦",
":canary_islands:": "🇮🇨",
":cancer:": "♋",
":candle:": "🕯",
":candy:": "🍬",
":canned_food:": "🥫",
":canoe:": "🛶",
":cape_verde:": "🇨🇻",
":capital_abcd:": "🔠",
":capricorn:": "♑",
":car:": "🚗",
":card_file_box:": "🗃",
":card_index:": "📇",
":card_index_dividers:": "🗂",
":caribbean_netherlands:": "🇧🇶",
":carousel_horse:": "🎠",
":carrot:": "🥕",
":cartwheeling:": "🤸",
":cat:": "🐱",
":cat2:": "🐈",
":cayman_islands:": "🇰🇾",
":cd:": "💿",
":central_african_republic:": "🇨🇫",
":ceuta_melilla:": "🇪🇦",
":chad:": "🇹🇩",
":chains:": "⛓",
":chair:": "🪑",
":champagne:": "🍾",
":chart:": "💹",
":chart_with_downwards_trend:": "📉",
":chart_with_upwards_trend:": "📈",
":checkered_flag:": "🏁",
":cheese:": "🧀",
":cherries:": "🍒",
":cherry_blossom:": "🌸",
":chess_pawn:": "♟",
":chestnut:": "🌰",
":chicken:": "🐔",
":child:": "🧒",
":children_crossing:": "🚸",
":chile:": "🇨🇱",
":chipmunk:": "🐿",
":chocolate_bar:": "🍫",
":chopsticks:": "🥢",
":christmas_island:": "🇨🇽",
":christmas_tree:": "🎄",
":church:": "⛪",
":cinema:": "🎦",
":circus_tent:": "🎪",
":city_sunrise:": "🌇",
":city_sunset:": "🌆",
":cityscape:": "🏙",
":cl:": "🆑",
":clamp:": "🗜",
":clap:": "👏",
":clapper:": "🎬",
":classical_building:": "🏛",
":climbing:": "🧗",
":climbing_man:": "🧗♂",
":climbing_woman:": "🧗♀",
":clinking_glasses:": "🥂",
":clipboard:": "📋",
":clipperton_island:": "🇨🇵",
":clock1:": "🕐",
":clock10:": "🕙",
":clock1030:": "🕥",
":clock11:": "🕚",
":clock1130:": "🕦",
":clock12:": "🕛",
":clock1230:": "🕧",
":clock130:": "🕜",
":clock2:": "🕑",
":clock230:": "🕝",
":clock3:": "🕒",
":clock330:": "🕞",
":clock4:": "🕓",
":clock430:": "🕟",
":clock5:": "🕔",
":clock530:": "🕠",
":clock6:": "🕕",
":clock630:": "🕡",
":clock7:": "🕖",
":clock730:": "🕢",
":clock8:": "🕗",
":clock830:": "🕣",
":clock9:": "🕘",
":clock930:": "🕤",
":closed_book:": "📕",
":closed_lock_with_key:": "🔐",
":closed_umbrella:": "🌂",
":cloud:": "☁",
":cloud_with_lightning:": "🌩",
":cloud_with_lightning_and_rain:": "⛈",
":cloud_with_rain:": "🌧",
":cloud_with_snow:": "🌨",
":clown_face:": "🤡",
":clubs:": "♣",
":cn:": "🇨🇳",
":coat:": "🧥",
":cocktail:": "🍸",
":coconut:": "🥥",
":cocos_islands:": "🇨🇨",
":coffee:": "☕",
":coffin:": "⚰",
":cold_face:": "🥶",
":cold_sweat:": "😰",
":collision:": "💥",
":colombia:": "🇨🇴",
":comet:": "☄",
":comoros:": "🇰🇲",
":compass:": "🧭",
":computer:": "💻",
":computer_mouse:": "🖱",
":confetti_ball:": "🎊",
":confounded:": "😖",
":confused:": "😕",
":congo_brazzaville:": "🇨🇬",
":congo_kinshasa:": "🇨🇩",
":congratulations:": "㊗",
":construction:": "🚧",
":construction_worker:": "👷",
":construction_worker_man:": "👷♂",
":construction_worker_woman:": "👷♀",
":control_knobs:": "🎛",
":convenience_store:": "🏪",
":cook:": "🧑🍳",
":cook_islands:": "🇨🇰",
":cookie:": "🍪",
":cool:": "🆒",
":cop:": "👮",
":copyright:": "©",
":corn:": "🌽",
":costa_rica:": "🇨🇷",
":cote_divoire:": "🇨🇮",
":couch_and_lamp:": "🛋",
":couple:": "👫",
":couple_with_heart:": "💑",
":couple_with_heart_man_man:": "👨❤👨",
":couple_with_heart_woman_man:": "👩❤👨",
":couple_with_heart_woman_woman:": "👩❤👩",
":couplekiss:": "💏",
":couplekiss_man_man:": "👨❤💋👨",
":couplekiss_man_woman:": "👩❤💋👨",
":couplekiss_woman_woman:": "👩❤💋👩",
":cow:": "🐮",
":cow2:": "🐄",
":cowboy_hat_face:": "🤠",
":crab:": "🦀",
":crayon:": "🖍",
":credit_card:": "💳",
":crescent_moon:": "🌙",
":cricket:": "🦗",
":cricket_game:": "🏏",
":croatia:": "🇭🇷",
":crocodile:": "🐊",
":croissant:": "🥐",
":crossed_fingers:": "🤞",
":crossed_flags:": "🎌",
":crossed_swords:": "⚔",
":crown:": "👑",
":cry:": "😢",
":crying_cat_face:": "😿",
":crystal_ball:": "🔮",
":cuba:": "🇨🇺",
":cucumber:": "🥒",
":cup_with_straw:": "🥤",
":cupcake:": "🧁",
":cupid:": "💘",
":curacao:": "🇨🇼",
":curling_stone:": "🥌",
":curly_haired_man:": "👨🦱",
":curly_haired_woman:": "👩🦱",
":curly_loop:": "➰",
":currency_exchange:": "💱",
":curry:": "🍛",
":cursing_face:": "🤬",
":custard:": "🍮",
":customs:": "🛃",
":cut_of_meat:": "🥩",
":cyclone:": "🌀",
":cyprus:": "🇨🇾",
":czech_republic:": "🇨🇿",
":dagger:": "🗡",
":dancer:": "💃",
":dancers:": "👯",
":dancing_men:": "👯♂",
":dancing_women:": "👯♀",
":dango:": "🍡",
":dark_sunglasses:": "🕶",
":dart:": "🎯",
":dash:": "💨",
":date:": "📅",
":de:": "🇩🇪",
":deaf_man:": "🧏♂",
":deaf_person:": "🧏",
":deaf_woman:": "🧏♀",
":deciduous_tree:": "🌳",
":deer:": "🦌",
":denmark:": "🇩🇰",
":department_store:": "🏬",
":derelict_house:": "🏚",
":desert:": "🏜",
":desert_island:": "🏝",
":desktop_computer:": "🖥",
":detective:": "🕵",
":diamond_shape_with_a_dot_inside:": "💠",
":diamonds:": "♦",
":diego_garcia:": "🇩🇬",
":disappointed:": "😞",
":disappointed_relieved:": "😥",
":diving_mask:": "🤿",
":diya_lamp:": "🪔",
":dizzy:": "💫",
":dizzy_face:": "😵",
":djibouti:": "🇩🇯",
":dna:": "🧬",
":do_not_litter:": "🚯",
":dog:": "🐶",
":dog2:": "🐕",
":dollar:": "💵",
":dolls:": "🎎",
":dolphin:": "🐬",
":dominica:": "🇩🇲",
":dominican_republic:": "🇩🇴",
":door:": "🚪",
":doughnut:": "🍩",
":dove:": "🕊",
":dragon:": "🐉",
":dragon_face:": "🐲",
":dress:": "👗",
":dromedary_camel:": "🐪",
":drooling_face:": "🤤",
":drop_of_blood:": "🩸",
":droplet:": "💧",
":drum:": "🥁",
":duck:": "🦆",
":dumpling:": "🥟",
":dvd:": "📀",
":e-mail:": "📧",
":eagle:": "🦅",
":ear:": "👂",
":ear_of_rice:": "🌾",
":ear_with_hearing_aid:": "🦻",
":earth_africa:": "🌍",
":earth_americas:": "🌎",
":earth_asia:": "🌏",
":ecuador:": "🇪🇨",
":egg:": "🥚",
":eggplant:": "🍆",
":egypt:": "🇪🇬",
":eight:": "8⃣",
":eight_pointed_black_star:": "✴",
":eight_spoked_asterisk:": "✳",
":eject_button:": "⏏",
":el_salvador:": "🇸🇻",
":electric_plug:": "🔌",
":elephant:": "🐘",
":elf:": "🧝",
":elf_man:": "🧝♂",
":elf_woman:": "🧝♀",
":email:": "✉",
":end:": "🔚",
":england:": "🏴",
":envelope:": "✉",
":envelope_with_arrow:": "📩",
":equatorial_guinea:": "🇬🇶",
":eritrea:": "🇪🇷",
":es:": "🇪🇸",
":estonia:": "🇪🇪",
":ethiopia:": "🇪🇹",
":eu:": "🇪🇺",
":euro:": "💶",
":european_castle:": "🏰",
":european_post_office:": "🏤",
":european_union:": "🇪🇺",
":evergreen_tree:": "🌲",
":exclamation:": "❗",
":exploding_head:": "🤯",
":expressionless:": "😑",
":eye:": "👁",
":eye_speech_bubble:": "👁🗨",
":eyeglasses:": "👓",
":eyes:": "👀",
":face_with_head_bandage:": "🤕",
":face_with_thermometer:": "🤒",
":facepalm:": "🤦",
":facepunch:": "👊",
":factory:": "🏭",
":factory_worker:": "🧑🏭",
":fairy:": "🧚",
":fairy_man:": "🧚♂",
":fairy_woman:": "🧚♀",
":falafel:": "🧆",
":falkland_islands:": "🇫🇰",
":fallen_leaf:": "🍂",
":family:": "👪",
":family_man_boy:": "👨👦",
":family_man_boy_boy:": "👨👦👦",
":family_man_girl:": "👨👧",
":family_man_girl_boy:": "👨👧👦",
":family_man_girl_girl:": "👨👧👧",
":family_man_man_boy:": "👨👨👦",
":family_man_man_boy_boy:": "👨👨👦👦",
":family_man_man_girl:": "👨👨👧",
":family_man_man_girl_boy:": "👨👨👧👦",
":family_man_man_girl_girl:": "👨👨👧👧",
":family_man_woman_boy:": "👨👩👦",
":family_man_woman_boy_boy:": "👨👩👦👦",
":family_man_woman_girl:": "👨👩👧",
":family_man_woman_girl_boy:": "👨👩👧👦",
":family_man_woman_girl_girl:": "👨👩👧👧",
":family_woman_boy:": "👩👦",
":family_woman_boy_boy:": "👩👦👦",
":family_woman_girl:": "👩👧",
":family_woman_girl_boy:": "👩👧👦",
":family_woman_girl_girl:": "👩👧👧",
":family_woman_woman_boy:": "👩👩👦",
":family_woman_woman_boy_boy:": "👩👩👦👦",
":family_woman_woman_girl:": "👩👩👧",
":family_woman_woman_girl_boy:": "👩👩👧👦",
":family_woman_woman_girl_girl:": "👩👩👧👧",
":farmer:": "🧑🌾",
":faroe_islands:": "🇫🇴",
":fast_forward:": "⏩",
":fax:": "📠",
":fearful:": "😨",
":feet:": "🐾",
":female_detective:": "🕵♀",
":female_sign:": "♀",
":ferris_wheel:": "🎡",
":ferry:": "⛴",
":field_hockey:": "🏑",
":fiji:": "🇫🇯",
":file_cabinet:": "🗄",
":file_folder:": "📁",
":film_projector:": "📽",
":film_strip:": "🎞",
":finland:": "🇫🇮",
":fire:": "🔥",
":fire_engine:": "🚒",
":fire_extinguisher:": "🧯",
":firecracker:": "🧨",
":firefighter:": "🧑🚒",
":fireworks:": "🎆",
":first_quarter_moon:": "🌓",
":first_quarter_moon_with_face:": "🌛",
":fish:": "🐟",
":fish_cake:": "🍥",
":fishing_pole_and_fish:": "🎣",
":fist:": "✊",
":fist_left:": "🤛",
":fist_oncoming:": "👊",
":fist_raised:": "✊",
":fist_right:": "🤜",
":five:": "5⃣",
":flags:": "🎏",
":flamingo:": "🦩",
":flashlight:": "🔦",
":flat_shoe:": "🥿",
":fleur_de_lis:": "⚜",
":flight_arrival:": "🛬",
":flight_departure:": "🛫",
":flipper:": "🐬",
":floppy_disk:": "💾",
":flower_playing_cards:": "🎴",
":flushed:": "😳",
":flying_disc:": "🥏",
":flying_saucer:": "🛸",
":fog:": "🌫",
":foggy:": "🌁",
":foot:": "🦶",
":football:": "🏈",
":footprints:": "👣",
":fork_and_knife:": "🍴",
":fortune_cookie:": "🥠",
":fountain:": "⛲",
":fountain_pen:": "🖋",
":four:": "4⃣",
":four_leaf_clover:": "🍀",
":fox_face:": "🦊",
":fr:": "🇫🇷",
":framed_picture:": "🖼",
":free:": "🆓",
":french_guiana:": "🇬🇫",
":french_polynesia:": "🇵🇫",
":french_southern_territories:": "🇹🇫",
":fried_egg:": "🍳",
":fried_shrimp:": "🍤",
":fries:": "🍟",
":frog:": "🐸",
":frowning:": "😦",
":frowning_face:": "☹",
":frowning_man:": "🙍♂",
":frowning_person:": "🙍",
":frowning_woman:": "🙍♀",
":fu:": "🖕",
":fuelpump:": "⛽",
":full_moon:": "🌕",
":full_moon_with_face:": "🌝",
":funeral_urn:": "⚱",
":gabon:": "🇬🇦",
":gambia:": "🇬🇲",
":game_die:": "🎲",
":garlic:": "🧄",
":gb:": "🇬🇧",
":gear:": "⚙",
":gem:": "💎",
":gemini:": "♊",
":genie:": "🧞",
":genie_man:": "🧞♂",
":genie_woman:": "🧞♀",
":georgia:": "🇬🇪",
":ghana:": "🇬🇭",
":ghost:": "👻",
":gibraltar:": "🇬🇮",
":gift:": "🎁",
":gift_heart:": "💝",
":giraffe:": "🦒",
":girl:": "👧",
":globe_with_meridians:": "🌐",
":gloves:": "🧤",
":goal_net:": "🥅",
":goat:": "🐐",
":goggles:": "🥽",
":golf:": "⛳",
":golfing:": "🏌",
":golfing_man:": "🏌♂",
":golfing_woman:": "🏌♀",
":gorilla:": "🦍",
":grapes:": "🍇",
":greece:": "🇬🇷",
":green_apple:": "🍏",
":green_book:": "📗",
":green_circle:": "🟢",
":green_heart:": "💚",
":green_salad:": "🥗",
":green_square:": "🟩",
":greenland:": "🇬🇱",
":grenada:": "🇬🇩",
":grey_exclamation:": "❕",
":grey_question:": "❔",
":grimacing:": "😬",
":grin:": "😁",
":grinning:": "😀",
":guadeloupe:": "🇬🇵",
":guam:": "🇬🇺",
":guard:": "💂",
":guardsman:": "💂♂",
":guardswoman:": "💂♀",
":guatemala:": "🇬🇹",
":guernsey:": "🇬🇬",
":guide_dog:": "🦮",
":guinea:": "🇬🇳",
":guinea_bissau:": "🇬🇼",
":guitar:": "🎸",
":gun:": "🔫",
":guyana:": "🇬🇾",
":haircut:": "💇",
":haircut_man:": "💇♂",
":haircut_woman:": "💇♀",
":haiti:": "🇭🇹",
":hamburger:": "🍔",
":hammer:": "🔨",
":hammer_and_pick:": "⚒",
":hammer_and_wrench:": "🛠",
":hamster:": "🐹",
":hand:": "✋",
":hand_over_mouth:": "🤭",
":handbag:": "👜",
":handball_person:": "🤾",
":handshake:": "🤝",
":hankey:": "💩",
":hash:": "#⃣",
":hatched_chick:": "🐥",
":hatching_chick:": "🐣",
":headphones:": "🎧",
":health_worker:": "🧑⚕",
":hear_no_evil:": "🙉",
":heard_mcdonald_islands:": "🇭🇲",
":heart:": "❤",
":heart_decoration:": "💟",
":heart_eyes:": "😍",
":heart_eyes_cat:": "😻",
":heartbeat:": "💓",
":heartpulse:": "💗",
":hearts:": "♥",
":heavy_check_mark:": "✔",
":heavy_division_sign:": "➗",
":heavy_dollar_sign:": "💲",
":heavy_exclamation_mark:": "❗",
":heavy_heart_exclamation:": "❣",
":heavy_minus_sign:": "➖",
":heavy_multiplication_x:": "✖",
":heavy_plus_sign:": "➕",
":hedgehog:": "🦔",
":helicopter:": "🚁",
":herb:": "🌿",
":hibiscus:": "🌺",
":high_brightness:": "🔆",
":high_heel:": "👠",
":hiking_boot:": "🥾",
":hindu_temple:": "🛕",
":hippopotamus:": "🦛",
":hocho:": "🔪",
":hole:": "🕳",
":honduras:": "🇭🇳",
":honey_pot:": "🍯",
":honeybee:": "🐝",
":hong_kong:": "🇭🇰",
":horse:": "🐴",
":horse_racing:": "🏇",
":hospital:": "🏥",
":hot_face:": "🥵",
":hot_pepper:": "🌶",
":hotdog:": "🌭",
":hotel:": "🏨",
":hotsprings:": "♨",
":hourglass:": "⌛",
":hourglass_flowing_sand:": "⏳",
":house:": "🏠",
":house_with_garden:": "🏡",
":houses:": "🏘",
":hugs:": "🤗",
":hungary:": "🇭🇺",
":hushed:": "😯",
":ice_cream:": "🍨",
":ice_cube:": "🧊",
":ice_hockey:": "🏒",
":ice_skate:": "⛸",
":icecream:": "🍦",
":iceland:": "🇮🇸",
":id:": "🆔",
":ideograph_advantage:": "🉐",
":imp:": "👿",
":inbox_tray:": "📥",
":incoming_envelope:": "📨",
":india:": "🇮🇳",
":indonesia:": "🇮🇩",
":infinity:": "♾",
":information_desk_person:": "💁",
":information_source:": "ℹ",
":innocent:": "😇",
":interrobang:": "⁉",
":iphone:": "📱",
":iran:": "🇮🇷",
":iraq:": "🇮🇶",
":ireland:": "🇮🇪",
":isle_of_man:": "🇮🇲",
":israel:": "🇮🇱",
":it:": "🇮🇹",
":izakaya_lantern:": "🏮",
":jack_o_lantern:": "🎃",
":jamaica:": "🇯🇲",
":japan:": "🗾",
":japanese_castle:": "🏯",
":japanese_goblin:": "👺",
":japanese_ogre:": "👹",
":jeans:": "👖",
":jersey:": "🇯🇪",
":jigsaw:": "🧩",
":jordan:": "🇯🇴",
":joy:": "😂",
":joy_cat:": "😹",
":joystick:": "🕹",
":jp:": "🇯🇵",
":judge:": "🧑⚖",
":juggling_person:": "🤹",
":kaaba:": "🕋",
":kangaroo:": "🦘",
":kazakhstan:": "🇰🇿",
":kenya:": "🇰🇪",
":key:": "🔑",
":keyboard:": "⌨",
":keycap_ten:": "🔟",
":kick_scooter:": "🛴",
":kimono:": "👘",
":kiribati:": "🇰🇮",
":kiss:": "💋",
":kissing:": "😗",
":kissing_cat:": "😽",
":kissing_closed_eyes:": "😚",
":kissing_heart:": "😘",
":kissing_smiling_eyes:": "😙",
":kite:": "🪁",
":kiwi_fruit:": "🥝",
":kneeling_man:": "🧎♂",
":kneeling_person:": "🧎",
":kneeling_woman:": "🧎♀",
":knife:": "🔪",
":koala:": "🐨",
":koko:": "🈁",
":kosovo:": "🇽🇰",
":kr:": "🇰🇷",
":kuwait:": "🇰🇼",
":kyrgyzstan:": "🇰🇬",
":lab_coat:": "🥼",
":label:": "🏷",
":lacrosse:": "🥍",
":lantern:": "🏮",
":laos:": "🇱🇦",
":large_blue_circle:": "🔵",
":large_blue_diamond:": "🔷",
":large_orange_diamond:": "🔶",
":last_quarter_moon:": "🌗",
":last_quarter_moon_with_face:": "🌜",
":latin_cross:": "✝",
":latvia:": "🇱🇻",
":laughing:": "😆",
":leafy_green:": "🥬",
":leaves:": "🍃",
":lebanon:": "🇱🇧",
":ledger:": "📒",
":left_luggage:": "🛅",
":left_right_arrow:": "↔",
":left_speech_bubble:": "🗨",
":leftwards_arrow_with_hook:": "↩",
":leg:": "🦵",
":lemon:": "🍋",
":leo:": "♌",
":leopard:": "🐆",
":lesotho:": "🇱🇸",
":level_slider:": "🎚",
":liberia:": "🇱🇷",
":libra:": "♎",
":libya:": "🇱🇾",
":liechtenstein:": "🇱🇮",
":light_rail:": "🚈",
":link:": "🔗",
":lion:": "🦁",
":lips:": "👄",
":lipstick:": "💄",
":lithuania:": "🇱🇹",
":lizard:": "🦎",
":llama:": "🦙",
":lobster:": "🦞",
":lock:": "🔒",
":lock_with_ink_pen:": "🔏",
":lollipop:": "🍭",
":loop:": "➿",
":lotion_bottle:": "🧴",
":lotus_position:": "🧘",
":lotus_position_man:": "🧘♂",
":lotus_position_woman:": "🧘♀",
":loud_sound:": "🔊",
":loudspeaker:": "📢",
":love_hotel:": "🏩",
":love_letter:": "💌",
":love_you_gesture:": "🤟",
":low_brightness:": "🔅",
":luggage:": "🧳",
":luxembourg:": "🇱🇺",
":lying_face:": "🤥",
":m:": "Ⓜ",
":macau:": "🇲🇴",
":macedonia:": "🇲🇰",
":madagascar:": "🇲🇬",
":mag:": "🔍",
":mag_right:": "🔎",
":mage:": "🧙",
":mage_man:": "🧙♂",
":mage_woman:": "🧙♀",
":magnet:": "🧲",
":mahjong:": "🀄",
":mailbox:": "📫",
":mailbox_closed:": "📪",
":mailbox_with_mail:": "📬",
":mailbox_with_no_mail:": "📭",
":malawi:": "🇲🇼",
":malaysia:": "🇲🇾",
":maldives:": "🇲🇻",
":male_detective:": "🕵♂",
":male_sign:": "♂",
":mali:": "🇲🇱",
":malta:": "🇲🇹",
":man:": "👨",
":man_artist:": "👨🎨",
":man_astronaut:": "👨🚀",
":man_cartwheeling:": "🤸♂",
":man_cook:": "👨🍳",
":man_dancing:": "🕺",
":man_facepalming:": "🤦♂",
":man_factory_worker:": "👨🏭",
":man_farmer:": "👨🌾",
":man_firefighter:": "👨🚒",
":man_health_worker:": "👨⚕",
":man_in_manual_wheelchair:": "👨🦽",
":man_in_motorized_wheelchair:": "👨🦼",
":man_in_tuxedo:": "🤵",
":man_judge:": "👨⚖",
":man_juggling:": "🤹♂",
":man_mechanic:": "👨🔧",
":man_office_worker:": "👨💼",
":man_pilot:": "👨✈",
":man_playing_handball:": "🤾♂",
":man_playing_water_polo:": "🤽♂",
":man_scientist:": "👨🔬",
":man_shrugging:": "🤷♂",
":man_singer:": "👨🎤",
":man_student:": "👨🎓",
":man_teacher:": "👨🏫",
":man_technologist:": "👨💻",
":man_with_gua_pi_mao:": "👲",
":man_with_probing_cane:": "👨🦯",
":man_with_turban:": "👳♂",
":mandarin:": "🍊",
":mango:": "🥭",
":mans_shoe:": "👞",
":mantelpiece_clock:": "🕰",
":manual_wheelchair:": "🦽",
":maple_leaf:": "🍁",
":marshall_islands:": "🇲🇭",
":martial_arts_uniform:": "🥋",
":martinique:": "🇲🇶",
":mask:": "😷",
":massage:": "💆",
":massage_man:": "💆♂",
":massage_woman:": "💆♀",
":mate:": "🧉",
":mauritania:": "🇲🇷",
":mauritius:": "🇲🇺",
":mayotte:": "🇾🇹",
":meat_on_bone:": "🍖",
":mechanic:": "🧑🔧",
":mechanical_arm:": "🦾",
":mechanical_leg:": "🦿",
":medal_military:": "🎖",
":medal_sports:": "🏅",
":medical_symbol:": "⚕",
":mega:": "📣",
":melon:": "🍈",
":memo:": "📝",
":men_wrestling:": "🤼♂",
":menorah:": "🕎",
":mens:": "🚹",
":mermaid:": "🧜♀",
":merman:": "🧜♂",
":merperson:": "🧜",
":metal:": "🤘",
":metro:": "🚇",
":mexico:": "🇲🇽",
":microbe:": "🦠",
":micronesia:": "🇫🇲",
":microphone:": "🎤",
":microscope:": "🔬",
":middle_finger:": "🖕",
":milk_glass:": "🥛",
":milky_way:": "🌌",
":minibus:": "🚐",
":minidisc:": "💽",
":mobile_phone_off:": "📴",
":moldova:": "🇲🇩",
":monaco:": "🇲🇨",
":money_mouth_face:": "🤑",
":money_with_wings:": "💸",
":moneybag:": "💰",
":mongolia:": "🇲🇳",
":monkey:": "🐒",
":monkey_face:": "🐵",
":monocle_face:": "🧐",
":monorail:": "🚝",
":montenegro:": "🇲🇪",
":montserrat:": "🇲🇸",
":moon:": "🌔",
":moon_cake:": "🥮",
":morocco:": "🇲🇦",
":mortar_board:": "🎓",
":mosque:": "🕌",
":mosquito:": "🦟",
":motor_boat:": "🛥",
":motor_scooter:": "🛵",
":motorcycle:": "🏍",
":motorized_wheelchair:": "🦼",
":motorway:": "🛣",
":mount_fuji:": "🗻",
":mountain:": "⛰",
":mountain_bicyclist:": "🚵",
":mountain_biking_man:": "🚵♂",
":mountain_biking_woman:": "🚵♀",
":mountain_cableway:": "🚠",
":mountain_railway:": "🚞",
":mountain_snow:": "🏔",
":mouse:": "🐭",
":mouse2:": "🐁",
":movie_camera:": "🎥",
":moyai:": "🗿",
":mozambique:": "🇲🇿",
":mrs_claus:": "🤶",
":muscle:": "💪",
":mushroom:": "🍄",
":musical_keyboard:": "🎹",
":musical_note:": "🎵",
":musical_score:": "🎼",
":mute:": "🔇",
":myanmar:": "🇲🇲",
":nail_care:": "💅",
":name_badge:": "📛",
":namibia:": "🇳🇦",
":national_park:": "🏞",
":nauru:": "🇳🇷",
":nauseated_face:": "🤢",
":nazar_amulet:": "🧿",
":necktie:": "👔",
":negative_squared_cross_mark:": "❎",
":nepal:": "🇳🇵",
":nerd_face:": "🤓",
":netherlands:": "🇳🇱",
":neutral_face:": "😐",
":new:": "🆕",
":new_caledonia:": "🇳🇨",
":new_moon:": "🌑",
":new_moon_with_face:": "🌚",
":new_zealand:": "🇳🇿",
":newspaper:": "📰",
":newspaper_roll:": "🗞",
":next_track_button:": "⏭",
":ng:": "🆖",
":ng_man:": "🙅♂",
":ng_woman:": "🙅♀",
":nicaragua:": "🇳🇮",
":niger:": "🇳🇪",
":nigeria:": "🇳🇬",
":night_with_stars:": "🌃",
":nine:": "9⃣",
":niue:": "🇳🇺",
":no_bell:": "🔕",
":no_bicycles:": "🚳",
":no_entry:": "⛔",
":no_entry_sign:": "🚫",
":no_good:": "🙅",
":no_good_man:": "🙅♂",
":no_good_woman:": "🙅♀",
":no_mobile_phones:": "📵",
":no_mouth:": "😶",
":no_pedestrians:": "🚷",
":no_smoking:": "🚭",
":non-potable_water:": "🚱",
":norfolk_island:": "🇳🇫",
":north_korea:": "🇰🇵",
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | true |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/htpasswd.py | mitmproxy/utils/htpasswd.py | """
A standalone, minimal htpasswd parser.
This implementation currently supports bcrypt and SHA1 passwords. SHA1 is insecure.
"""
from __future__ import annotations
import base64
import hashlib
from pathlib import Path
import bcrypt
class HtpasswdFile:
def __init__(self, content: str):
"""
Create a HtpasswdFile from a string.
"""
self.users: dict[str, str] = {}
for line in content.splitlines():
line = line.strip()
if not line or line.startswith("#"):
continue
if ":" not in line:
raise ValueError(f"Malformed htpasswd line: {line!r}")
user, pwhash = line.split(":", 1)
if not user:
raise ValueError(f"Malformed htpasswd line: {line!r}")
is_sha = pwhash.startswith("{SHA}")
is_bcrypt = pwhash.startswith(("$2y$", "$2b$", "$2a$"))
if not is_sha and not is_bcrypt:
raise ValueError(f"Unsupported htpasswd format for user {user!r}")
self.users[user] = pwhash
@classmethod
def from_file(cls, path: Path) -> HtpasswdFile:
"""
Initializes and loads an htpasswd file.
Args:
path: The path to the htpasswd file.
Raises:
OSError: If the file cannot be read.
ValueError: If the file is malformed.
"""
try:
content = path.read_text("utf-8")
except FileNotFoundError:
raise OSError(f"Htpasswd file not found: {path}") from None
return cls(content)
def check_password(self, username: str, password: str) -> bool:
"""
Checks if a username and password combination is valid.
Args:
username: The username to check.
password: The password to check.
Returns:
True if the password is valid, False otherwise.
"""
pwhash = self.users.get(username)
if pwhash is None:
return False
pwhash = pwhash.split(":", 1)[0]
if pwhash.startswith("{SHA}"):
# Apache's {SHA} is base64-encoded SHA-1.
# https://httpd.apache.org/docs/2.4/misc/password_encryptions.html
digest = hashlib.sha1(password.encode("utf-8")).digest()
expected = base64.b64encode(digest).decode("ascii")
return pwhash[5:] == expected
else: # pwhash.startswith(("$2y$", "$2b$", "$2a$")):
return bcrypt.checkpw(password.encode("utf-8"), pwhash.encode("utf-8"))
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/bits.py | mitmproxy/utils/bits.py | def setbit(byte, offset, value):
"""
Set a bit in a byte to 1 if value is truthy, 0 if not.
"""
if value:
return byte | (1 << offset)
else:
return byte & ~(1 << offset)
def getbit(byte, offset):
mask = 1 << offset
return bool(byte & mask)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/data.py | mitmproxy/utils/data.py | import importlib
import inspect
import os.path
class Data:
def __init__(self, name):
self.name = name
m = importlib.import_module(name)
f = inspect.getsourcefile(m)
assert f is not None
dirname = os.path.dirname(f)
self.dirname = os.path.abspath(dirname)
def push(self, subpath):
"""
Change the data object to a path relative to the module.
"""
dirname = os.path.normpath(os.path.join(self.dirname, subpath))
ret = Data(self.name)
ret.dirname = dirname
return ret
def path(self, path):
"""
Returns a path to the package data housed at 'path' under this
module.Path can be a path to a file, or to a directory.
This function will raise ValueError if the path does not exist.
"""
fullpath = os.path.normpath(os.path.join(self.dirname, path))
if not os.path.exists(fullpath):
raise ValueError("dataPath: %s does not exist." % fullpath)
return fullpath
pkg_data = Data(__name__).push("..")
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/pyinstaller/hook-mitmproxy.addons.onboardingapp.py | mitmproxy/utils/pyinstaller/hook-mitmproxy.addons.onboardingapp.py | from PyInstaller.utils.hooks import collect_data_files
datas = collect_data_files("mitmproxy.addons.onboardingapp")
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/pyinstaller/hook-mitmproxy.py | mitmproxy/utils/pyinstaller/hook-mitmproxy.py | hiddenimports = ["mitmproxy.script"]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/pyinstaller/hook-mitmproxy.tools.web.py | mitmproxy/utils/pyinstaller/hook-mitmproxy.tools.web.py | from PyInstaller.utils.hooks import collect_data_files
datas = collect_data_files("mitmproxy.tools.web")
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/utils/pyinstaller/__init__.py | mitmproxy/utils/pyinstaller/__init__.py | from pathlib import Path
here = Path(__file__).parent.absolute()
def hook_dirs() -> list[str]:
return [str(here)]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_multipart.py | mitmproxy/contentviews/_view_multipart.py | from ._utils import byte_pairs_to_str_pairs
from ._utils import merge_repeated_keys
from ._utils import yaml_dumps
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.net.http.multipart import decode_multipart
class MultipartContentview(Contentview):
name = "Multipart Form"
syntax_highlight = "yaml"
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
if not metadata.http_message:
raise ValueError("Not an HTTP message")
content_type = metadata.http_message.headers["content-type"]
items = decode_multipart(content_type, data)
return yaml_dumps(merge_repeated_keys(byte_pairs_to_str_pairs(items)))
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return float(bool(data) and metadata.content_type == "multipart/form-data")
multipart = MultipartContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_graphql.py | mitmproxy/contentviews/_view_graphql.py | import json
from typing import Any
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
def format_graphql(data):
query = data["query"]
header_data = data.copy()
header_data["query"] = "..."
return """{header}
---
{query}
""".format(header=json.dumps(header_data, indent=2), query=query)
def format_query_list(data: list[Any]):
num_queries = len(data) - 1
result = ""
for i, op in enumerate(data):
result += f"--- {i}/{num_queries}\n"
result += format_graphql(op)
return result
def is_graphql_query(data):
return isinstance(data, dict) and "query" in data and "\n" in data["query"]
def is_graphql_batch_query(data):
return (
isinstance(data, list)
and len(data) > 0
and isinstance(data[0], dict)
and "query" in data[0]
)
class GraphQLContentview(Contentview):
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
gql = json.loads(data)
if is_graphql_query(gql):
return format_graphql(gql)
elif is_graphql_batch_query(gql):
return format_query_list(gql)
else:
raise ValueError("Not a GraphQL message.")
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
if metadata.content_type != "application/json" or not data:
return 0
try:
data = json.loads(data)
if is_graphql_query(data) or is_graphql_batch_query(data):
return 2
except ValueError:
pass
return 0
graphql = GraphQLContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_urlencoded.py | mitmproxy/contentviews/_view_urlencoded.py | import urllib
import urllib.parse
from ._utils import byte_pairs_to_str_pairs
from ._utils import merge_repeated_keys
from ._utils import yaml_dumps
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
class URLEncodedContentview(Contentview):
name = "URL-encoded"
syntax_highlight = "yaml"
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
items = urllib.parse.parse_qsl(data, keep_blank_values=True)
return yaml_dumps(merge_repeated_keys(byte_pairs_to_str_pairs(items)))
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return float(
bool(data) and metadata.content_type == "application/x-www-form-urlencoded"
)
urlencoded = URLEncodedContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_mqtt.py | mitmproxy/contentviews/_view_mqtt.py | import struct
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.utils import strutils
# from https://github.com/nikitastupin/mitmproxy-mqtt-script
class MQTTControlPacket:
# Packet types
(
CONNECT,
CONNACK,
PUBLISH,
PUBACK,
PUBREC,
PUBREL,
PUBCOMP,
SUBSCRIBE,
SUBACK,
UNSUBSCRIBE,
UNSUBACK,
PINGREQ,
PINGRESP,
DISCONNECT,
) = range(1, 15)
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Table_2.1_-
Names = [
"reserved",
"CONNECT",
"CONNACK",
"PUBLISH",
"PUBACK",
"PUBREC",
"PUBREL",
"PUBCOMP",
"SUBSCRIBE",
"SUBACK",
"UNSUBSCRIBE",
"UNSUBACK",
"PINGREQ",
"PINGRESP",
"DISCONNECT",
"reserved",
]
PACKETS_WITH_IDENTIFIER = [
PUBACK,
PUBREC,
PUBREL,
PUBCOMP,
SUBSCRIBE,
SUBACK,
UNSUBSCRIBE,
UNSUBACK,
]
def __init__(self, packet):
self._packet = packet
# Fixed header
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718020
self.packet_type = self._parse_packet_type()
self.packet_type_human = self.Names[self.packet_type]
self.dup, self.qos, self.retain = self._parse_flags()
self.remaining_length = self._parse_remaining_length()
# Variable header & Payload
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718024
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718026
if self.packet_type == self.CONNECT:
self._parse_connect_variable_headers()
self._parse_connect_payload()
elif self.packet_type == self.PUBLISH:
self._parse_publish_variable_headers()
self._parse_publish_payload()
elif self.packet_type == self.SUBSCRIBE:
self._parse_subscribe_variable_headers()
self._parse_subscribe_payload()
elif self.packet_type == self.SUBACK:
pass
elif self.packet_type == self.UNSUBSCRIBE:
pass
else:
self.payload = None
def pprint(self):
s = f"[{self.Names[self.packet_type]}]"
if self.packet_type == self.CONNECT:
assert self.payload
s += f"""
Client Id: {self.payload["ClientId"]}
Will Topic: {self.payload.get("WillTopic")}
Will Message: {strutils.bytes_to_escaped_str(self.payload.get("WillMessage", b"None"))}
User Name: {self.payload.get("UserName")}
Password: {strutils.bytes_to_escaped_str(self.payload.get("Password", b"None"))}
"""
elif self.packet_type == self.SUBSCRIBE:
s += " sent topic filters: "
s += ", ".join([f"'{tf}'" for tf in self.topic_filters])
elif self.packet_type == self.PUBLISH:
assert self.payload
topic_name = strutils.bytes_to_escaped_str(self.topic_name)
payload = strutils.bytes_to_escaped_str(self.payload)
s += f" '{payload}' to topic '{topic_name}'"
elif self.packet_type in [self.PINGREQ, self.PINGRESP]:
pass
else:
s = f"Packet type {self.Names[self.packet_type]} is not supported yet!"
return s
def _parse_length_prefixed_bytes(self, offset):
field_length_bytes = self._packet[offset : offset + 2]
field_length = struct.unpack("!H", field_length_bytes)[0]
field_content_bytes = self._packet[offset + 2 : offset + 2 + field_length]
return field_length + 2, field_content_bytes
def _parse_publish_variable_headers(self):
offset = len(self._packet) - self.remaining_length
field_length, field_content_bytes = self._parse_length_prefixed_bytes(offset)
self.topic_name = field_content_bytes
if self.qos in [0x01, 0x02]:
offset += field_length
self.packet_identifier = self._packet[offset : offset + 2]
def _parse_publish_payload(self):
fixed_header_length = len(self._packet) - self.remaining_length
variable_header_length = 2 + len(self.topic_name)
if self.qos in [0x01, 0x02]:
variable_header_length += 2
offset = fixed_header_length + variable_header_length
self.payload = self._packet[offset:]
def _parse_subscribe_variable_headers(self):
self._parse_packet_identifier()
def _parse_subscribe_payload(self):
offset = len(self._packet) - self.remaining_length + 2
self.topic_filters = {}
while len(self._packet) - offset > 0:
field_length, topic_filter_bytes = self._parse_length_prefixed_bytes(offset)
offset += field_length
qos = self._packet[offset : offset + 1]
offset += 1
topic_filter = topic_filter_bytes.decode("utf-8")
self.topic_filters[topic_filter] = {"qos": qos}
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718030
def _parse_connect_variable_headers(self):
offset = len(self._packet) - self.remaining_length
self.variable_headers = {}
self.connect_flags = {}
self.variable_headers["ProtocolName"] = self._packet[offset : offset + 6]
self.variable_headers["ProtocolLevel"] = self._packet[offset + 6 : offset + 7]
self.variable_headers["ConnectFlags"] = self._packet[offset + 7 : offset + 8]
self.variable_headers["KeepAlive"] = self._packet[offset + 8 : offset + 10]
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc385349229
self.connect_flags["CleanSession"] = bool(
self.variable_headers["ConnectFlags"][0] & 0x02
)
self.connect_flags["Will"] = bool(
self.variable_headers["ConnectFlags"][0] & 0x04
)
self.will_qos = (self.variable_headers["ConnectFlags"][0] >> 3) & 0x03
self.connect_flags["WillRetain"] = bool(
self.variable_headers["ConnectFlags"][0] & 0x20
)
self.connect_flags["Password"] = bool(
self.variable_headers["ConnectFlags"][0] & 0x40
)
self.connect_flags["UserName"] = bool(
self.variable_headers["ConnectFlags"][0] & 0x80
)
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718031
def _parse_connect_payload(self):
fields = []
offset = len(self._packet) - self.remaining_length + 10
while len(self._packet) - offset > 0:
field_length, field_content = self._parse_length_prefixed_bytes(offset)
fields.append(field_content)
offset += field_length
self.payload = {}
for f in fields:
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc385349242
if "ClientId" not in self.payload:
self.payload["ClientId"] = f.decode("utf-8")
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc385349243
elif self.connect_flags["Will"] and "WillTopic" not in self.payload:
self.payload["WillTopic"] = f.decode("utf-8")
elif self.connect_flags["Will"] and "WillMessage" not in self.payload:
self.payload["WillMessage"] = f
elif (
self.connect_flags["UserName"] and "UserName" not in self.payload
): # pragma: no cover
self.payload["UserName"] = f.decode("utf-8")
elif (
self.connect_flags["Password"] and "Password" not in self.payload
): # pragma: no cover
self.payload["Password"] = f
else:
raise AssertionError(f"Unknown field in CONNECT payload: {f}")
def _parse_packet_type(self):
return self._packet[0] >> 4
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718022
def _parse_flags(self):
dup = None
qos = None
retain = None
if self.packet_type == self.PUBLISH:
dup = (self._packet[0] >> 3) & 0x01
qos = (self._packet[0] >> 1) & 0x03
retain = self._packet[0] & 0x01
return dup, qos, retain
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Table_2.4_Size
def _parse_remaining_length(self):
multiplier = 1
value = 0
i = 1
while True:
encodedByte = self._packet[i]
value += (encodedByte & 127) * multiplier
multiplier *= 128
if multiplier > 128 * 128 * 128:
raise ValueError("Malformed Remaining Length")
if encodedByte & 128 == 0:
break
i += 1
return value
# http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Table_2.5_-
def _parse_packet_identifier(self):
offset = len(self._packet) - self.remaining_length
self.packet_identifier = self._packet[offset : offset + 2]
class MQTTContentview(Contentview):
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
mqtt_packet = MQTTControlPacket(data)
return mqtt_packet.pprint()
mqtt = MQTTContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_json.py | mitmproxy/contentviews/_view_json.py | import json
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
class JSONContentview(Contentview):
syntax_highlight = "yaml"
def prettify(self, data: bytes, metadata: Metadata) -> str:
data = json.loads(data)
return json.dumps(data, indent=4, ensure_ascii=False)
def render_priority(self, data: bytes, metadata: Metadata) -> float:
if not data:
return 0
if metadata.content_type in (
"application/json",
"application/json-rpc",
):
return 1
if (
metadata.content_type
and metadata.content_type.startswith("application/")
and metadata.content_type.endswith("json")
):
return 1
return 0
json_view = JSONContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_dns.py | mitmproxy/contentviews/_view_dns.py | from mitmproxy.contentviews._api import InteractiveContentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.contentviews._utils import yaml_dumps
from mitmproxy.contentviews._utils import yaml_loads
from mitmproxy.dns import DNSMessage as DNSMessage
from mitmproxy.proxy.layers.dns import pack_message
def _is_dns_tcp(metadata: Metadata) -> bool:
return bool(metadata.tcp_message or metadata.http_message)
class DNSContentview(InteractiveContentview):
syntax_highlight = "yaml"
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
if _is_dns_tcp(metadata):
data = data[2:] # hack: cut off length label and hope for the best
message = DNSMessage.unpack(data).to_json()
del message["status_code"]
message.pop("timestamp", None)
return yaml_dumps(message)
def reencode(
self,
prettified: str,
metadata: Metadata,
) -> bytes:
data = yaml_loads(prettified)
message = DNSMessage.from_json(data)
return pack_message(message, "tcp" if _is_dns_tcp(metadata) else "udp")
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return float(
metadata.content_type == "application/dns-message"
or bool(
metadata.flow
and metadata.flow.server_conn
and metadata.flow.server_conn.address
and metadata.flow.server_conn.address[1] in (53, 5353)
)
)
dns = DNSContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_wbxml.py | mitmproxy/contentviews/_view_wbxml.py | from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.contrib.wbxml import ASCommandResponse
class WBXMLContentview(Contentview):
__content_types = ("application/vnd.wap.wbxml", "application/vnd.ms-sync.wbxml")
syntax_highlight = "xml"
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
return ASCommandResponse.ASCommandResponse(data).xmlString
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return float(bool(data) and metadata.content_type in self.__content_types)
wbxml = WBXMLContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_api.py | mitmproxy/contentviews/_api.py | from __future__ import annotations
import logging
import typing
from abc import abstractmethod
from dataclasses import dataclass
from pathlib import Path
from typing import Literal
from mitmproxy import http
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy.dns import DNSMessage
from mitmproxy.flow import Flow
from mitmproxy.websocket import WebSocketMessage
logger = logging.getLogger(__name__)
type SyntaxHighlight = Literal["css", "javascript", "xml", "yaml", "none", "error"]
@typing.runtime_checkable
class Contentview(typing.Protocol):
"""
Base class for all contentviews.
"""
@property
def name(self) -> str:
"""
The name of this contentview, e.g. "XML/HTML".
Inferred from the class name by default.
"""
return type(self).__name__.removesuffix("Contentview")
@property
def syntax_highlight(self) -> SyntaxHighlight:
"""Optional syntax highlighting that should be applied to the prettified output."""
return "none"
@abstractmethod
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
"""
Transform raw data into human-readable output.
May raise an exception (e.g. `ValueError`) if data cannot be prettified.
"""
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
"""
Return the priority of this view for rendering `data`.
If no particular view is chosen by the user, the view with the highest priority is selected.
If this view does not support the given data, return a float < 0.
"""
return 0
def __lt__(self, other):
return self.name.__lt__(other.name)
@typing.runtime_checkable
class InteractiveContentview(Contentview, typing.Protocol):
"""A contentview that prettifies raw data and allows for interactive editing."""
@abstractmethod
def reencode(
self,
prettified: str,
metadata: Metadata,
) -> bytes:
"""
Reencode the given (modified) `prettified` output into the original data format.
May raise an exception (e.g. `ValueError`) if reencoding failed.
"""
@dataclass
class Metadata:
"""
Metadata about the data that is being prettified.
Do not rely on any given attribute to be present.
"""
flow: Flow | None = None
"""The flow that the data belongs to, if any."""
content_type: str | None = None
"""The HTTP content type of the data, if any."""
http_message: http.Message | None = None
"""The HTTP message that the data belongs to, if any."""
tcp_message: tcp.TCPMessage | None = None
"""The TCP message that the data belongs to, if any."""
udp_message: udp.UDPMessage | None = None
"""The UDP message that the data belongs to, if any."""
websocket_message: WebSocketMessage | None = None
"""The websocket message that the data belongs to, if any."""
dns_message: DNSMessage | None = None
"""The DNS message that the data belongs to, if any."""
protobuf_definitions: Path | None = None
"""Path to a .proto file that's used to resolve Protobuf field names."""
original_data: bytes | None = None
"""When reencoding: The original data that was prettified."""
Metadata.__init__.__doc__ = "@private"
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_compat.py | mitmproxy/contentviews/_compat.py | from __future__ import annotations
import sys
import typing
from typing import Iterator
from mitmproxy import contentviews
from mitmproxy.contentviews import SyntaxHighlight
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.utils.strutils import always_str
if sys.version_info < (3, 13): # pragma: no cover
from typing_extensions import deprecated
else:
from warnings import deprecated
if typing.TYPE_CHECKING:
from mitmproxy.contentviews.base import TViewLine
from mitmproxy.contentviews.base import View
class LegacyContentview(Contentview):
@property
def name(self) -> str:
return self.contentview.name
@property
def syntax_highlight(self) -> SyntaxHighlight:
return getattr(self.contentview, "syntax_highlight", "none")
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return (
self.contentview.render_priority(
data=data,
content_type=metadata.content_type,
flow=metadata.flow,
http_message=metadata.http_message,
)
or 0.0
)
def prettify(self, data: bytes, metadata: Metadata) -> str:
lines: Iterator[TViewLine]
desc_, lines = self.contentview(
data,
content_type=metadata.content_type,
flow=metadata.flow,
http_message=metadata.http_message,
)
return "\n".join(
"".join(always_str(text, "utf8", "backslashescape") for tag, text in line)
for line in lines
)
def __init__(self, contentview: View):
self.contentview = contentview
@deprecated("Use `mitmproxy.contentviews.registry` instead.")
def get(name: str) -> Contentview | None:
try:
return contentviews.registry[name.lower()]
except KeyError:
return None
@deprecated("Use `mitmproxy.contentviews.Contentview` instead.")
def remove(view: View):
pass
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_xml_html.py | mitmproxy/contentviews/_view_xml_html.py | import io
import re
import textwrap
from collections.abc import Iterable
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.utils import sliding_window
from mitmproxy.utils import strutils
"""
A custom XML/HTML prettifier. Compared to other prettifiers, its main features are:
- Implemented in pure Python.
- Modifies whitespace only.
- Works with any input.
- Lazy evaluation.
The implementation is split into two main parts: tokenization and formatting of tokens.
"""
# http://www.xml.com/pub/a/2001/07/25/namingparts.html - this is close enough for what we do.
REGEX_TAG = re.compile(r"[a-zA-Z0-9._:\-]+(?!=)")
# https://www.w3.org/TR/html5/syntax.html#void-elements
HTML_VOID_ELEMENTS = {
"area",
"base",
"br",
"col",
"embed",
"hr",
"img",
"input",
"keygen",
"link",
"meta",
"param",
"source",
"track",
"wbr",
}
NO_INDENT_TAGS = {"xml", "doctype", "html"}
INDENT = 2
class Token:
def __init__(self, data):
self.data = data
def __repr__(self):
return f"{type(self).__name__}({self.data})"
class Text(Token):
@property
def text(self):
return self.data.strip()
class Tag(Token):
@property
def tag(self):
t = REGEX_TAG.search(self.data)
if t is not None:
return t.group(0).lower()
return "<empty>"
@property
def is_comment(self) -> bool:
return self.data.startswith("<!--")
@property
def is_cdata(self) -> bool:
return self.data.startswith("<![CDATA[")
@property
def is_closing(self):
return self.data.startswith("</")
@property
def is_self_closing(self):
return (
self.is_comment
or self.is_cdata
or self.data.endswith("/>")
or self.tag in HTML_VOID_ELEMENTS
)
@property
def is_opening(self):
return not self.is_closing and not self.is_self_closing
@property
def done(self):
if self.is_comment:
return self.data.endswith("-->")
elif self.is_cdata:
return self.data.endswith("]]>")
else:
# This fails for attributes that contain an unescaped ">"
return self.data.endswith(">")
def tokenize(data: str) -> Iterable[Token]:
token: Token = Text("")
i = 0
def readuntil(char, start, include=1):
nonlocal i
end = data.find(char, start)
if end == -1:
end = len(data)
ret = data[i : end + include]
i = end + include
return ret
while i < len(data):
if isinstance(token, Text):
token.data = readuntil("<", i, 0)
if token.text:
yield token
token = Tag("")
elif isinstance(token, Tag):
token.data += readuntil(">", i, 1)
if token.done:
yield token
token = Text("")
if token.data.strip():
yield token
def indent_text(data: str, prefix: str) -> str:
# Add spacing to first line so that we dedent in cases like this:
# <li>This is
# example text
# over multiple lines
# </li>
dedented = textwrap.dedent(" " * 32 + data).strip()
return textwrap.indent(dedented, prefix[:32])
def is_inline_text(a: Token | None, b: Token | None, c: Token | None) -> bool:
if isinstance(a, Tag) and isinstance(b, Text) and isinstance(c, Tag):
if a.is_opening and "\n" not in b.data and c.is_closing and a.tag == c.tag:
return True
return False
def is_inline(
prev2: Token | None,
prev1: Token | None,
t: Token | None,
next1: Token | None,
next2: Token | None,
) -> bool:
if isinstance(t, Text):
return is_inline_text(prev1, t, next1)
elif isinstance(t, Tag):
if is_inline_text(prev2, prev1, t) or is_inline_text(t, next1, next2):
return True
if (
isinstance(next1, Tag)
and t.is_opening
and next1.is_closing
and t.tag == next1.tag
):
return True # <div></div> (start tag)
if (
isinstance(prev1, Tag)
and prev1.is_opening
and t.is_closing
and prev1.tag == t.tag
):
return True # <div></div> (end tag)
return False
class ElementStack:
"""
Keep track of how deeply nested our document is.
"""
def __init__(self):
self.open_tags = []
self.indent = ""
def push_tag(self, tag: str):
if len(self.open_tags) > 16:
return
self.open_tags.append(tag)
if tag not in NO_INDENT_TAGS:
self.indent += " " * INDENT
def pop_tag(self, tag: str):
if tag in self.open_tags:
remove_indent = 0
while True:
t = self.open_tags.pop()
if t not in NO_INDENT_TAGS:
remove_indent += INDENT
if t == tag:
break
self.indent = self.indent[:-remove_indent]
else:
pass # this closing tag has no start tag. let's keep indentation as-is.
def format_xml(tokens: Iterable[Token]) -> str:
out = io.StringIO()
context = ElementStack()
for prev2, prev1, token, next1, next2 in sliding_window.window(tokens, 2, 2):
if isinstance(token, Tag):
if token.is_opening:
out.write(indent_text(token.data, context.indent))
if not is_inline(prev2, prev1, token, next1, next2):
out.write("\n")
context.push_tag(token.tag)
elif token.is_closing:
context.pop_tag(token.tag)
if is_inline(prev2, prev1, token, next1, next2):
out.write(token.data)
else:
out.write(indent_text(token.data, context.indent))
out.write("\n")
else: # self-closing
out.write(indent_text(token.data, context.indent))
out.write("\n")
elif isinstance(token, Text):
if is_inline(prev2, prev1, token, next1, next2):
out.write(token.text)
else:
out.write(indent_text(token.data, context.indent))
out.write("\n")
else: # pragma: no cover
raise RuntimeError()
return out.getvalue()
class XmlHtmlContentview(Contentview):
__content_types = ("text/xml", "text/html")
name = "XML/HTML"
syntax_highlight = "xml"
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
if metadata.http_message:
data_str = metadata.http_message.get_text(strict=False) or ""
else:
data_str = data.decode("utf8", "backslashreplace")
tokens = tokenize(data_str)
return format_xml(tokens)
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
if not data:
return 0
if metadata.content_type in self.__content_types:
return 1
elif strutils.is_xml(data):
return 0.4
return 0
xml_html = XmlHtmlContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_http3.py | mitmproxy/contentviews/_view_http3.py | from collections import defaultdict
from dataclasses import dataclass
from dataclasses import field
import pylsqpack
from aioquic.buffer import Buffer
from aioquic.buffer import BufferReadError
from aioquic.h3.connection import parse_settings
from aioquic.h3.connection import Setting
from ..proxy.layers.http import is_h3_alpn
from mitmproxy import tcp
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy_rs.contentviews import hex_dump
@dataclass(frozen=True)
class Frame:
"""Representation of an HTTP/3 frame."""
type: int
data: bytes
def pretty(self) -> str:
frame_name = f"0x{self.type:x} Frame"
if self.type == 0:
frame_name = "DATA Frame"
elif self.type == 1:
try:
hdrs = pylsqpack.Decoder(4096, 16).feed_header(0, self.data)[1]
return f"HEADERS Frame\n" + "\n".join(
f"{k.decode(errors='backslashreplace')}: {v.decode(errors='backslashreplace')}"
for k, v in hdrs
)
except Exception as e:
frame_name = f"HEADERS Frame (error: {e})"
elif self.type == 4:
settings = []
try:
s = parse_settings(self.data)
except Exception as e:
frame_name = f"SETTINGS Frame (error: {e})"
else:
for k, v in s.items():
try:
key = Setting(k).name
except ValueError:
key = f"0x{k:x}"
settings.append(f"{key}: 0x{v:x}")
return "SETTINGS Frame\n" + "\n".join(settings)
return f"{frame_name}\n" + hex_dump.prettify(self.data, Metadata())
@dataclass(frozen=True)
class StreamType:
"""Representation of an HTTP/3 stream types."""
type: int
def pretty(self) -> str:
stream_type = {
0x00: "Control Stream",
0x01: "Push Stream",
0x02: "QPACK Encoder Stream",
0x03: "QPACK Decoder Stream",
}.get(self.type, f"0x{self.type:x} Stream")
return stream_type
@dataclass
class ConnectionState:
message_count: int = 0
frames: dict[int, list[Frame | StreamType]] = field(default_factory=dict)
client_buf: bytearray = field(default_factory=bytearray)
server_buf: bytearray = field(default_factory=bytearray)
class Http3Contentview(Contentview):
def __init__(self) -> None:
self.connections: defaultdict[tcp.TCPFlow, ConnectionState] = defaultdict(
ConnectionState
)
@property
def name(self) -> str:
return "HTTP/3 Frames"
def prettify(self, data: bytes, metadata: Metadata) -> str:
flow = metadata.flow
tcp_message = metadata.tcp_message
assert isinstance(flow, tcp.TCPFlow)
assert tcp_message
state = self.connections[flow]
for message in flow.messages[state.message_count :]:
if message.from_client:
buf = state.client_buf
else:
buf = state.server_buf
buf += message.content
if state.message_count == 0 and flow.metadata["quic_is_unidirectional"]:
h3_buf = Buffer(data=bytes(buf[:8]))
stream_type = h3_buf.pull_uint_var()
consumed = h3_buf.tell()
del buf[:consumed]
state.frames[0] = [StreamType(stream_type)]
while True:
h3_buf = Buffer(data=bytes(buf[:16]))
try:
frame_type = h3_buf.pull_uint_var()
frame_size = h3_buf.pull_uint_var()
except BufferReadError:
break
consumed = h3_buf.tell()
if len(buf) < consumed + frame_size:
break
frame_data = bytes(buf[consumed : consumed + frame_size])
frame = Frame(frame_type, frame_data)
state.frames.setdefault(state.message_count, []).append(frame)
del buf[: consumed + frame_size]
state.message_count += 1
frames = state.frames.get(flow.messages.index(tcp_message), [])
if not frames:
return ""
else:
return "\n\n".join(frame.pretty() for frame in frames)
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
flow = metadata.flow
return (
2
* float(bool(flow and is_h3_alpn(flow.client_conn.alpn)))
* float(isinstance(flow, tcp.TCPFlow))
)
http3 = Http3Contentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_query.py | mitmproxy/contentviews/_view_query.py | from .. import http
from ._utils import merge_repeated_keys
from ._utils import yaml_dumps
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
class QueryContentview(Contentview):
syntax_highlight = "yaml"
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
if not isinstance(metadata.http_message, http.Request):
raise ValueError("Not an HTTP request.")
items = metadata.http_message.query.items(multi=True)
return yaml_dumps(merge_repeated_keys(items))
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return 0.3 * float(
not data and bool(getattr(metadata.http_message, "query", False))
)
query = QueryContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/__init__.py | mitmproxy/contentviews/__init__.py | """
mitmproxy includes a set of content views which can be used to
format/decode/highlight/reencode data. While they are mostly used for HTTP message
bodies, the may be used in other contexts, e.g. to decode WebSocket messages.
See "Custom Contentviews" in the mitmproxy documentation for examples.
"""
import logging
import sys
import traceback
import warnings
from dataclasses import dataclass
from ..addonmanager import cut_traceback
from ._api import Contentview
from ._api import InteractiveContentview
from ._api import Metadata
from ._api import SyntaxHighlight
from ._compat import get # noqa: F401
from ._compat import LegacyContentview
from ._compat import remove # noqa: F401
from ._registry import ContentviewRegistry
from ._utils import ContentviewMessage
from ._utils import get_data
from ._utils import make_metadata
from ._view_css import css
from ._view_dns import dns
from ._view_graphql import graphql
from ._view_http3 import http3
from ._view_image import image
from ._view_javascript import javascript
from ._view_json import json_view
from ._view_mqtt import mqtt
from ._view_multipart import multipart
from ._view_query import query
from ._view_raw import raw
from ._view_socketio import socket_io
from ._view_urlencoded import urlencoded
from ._view_wbxml import wbxml
from ._view_xml_html import xml_html
from .base import View
import mitmproxy_rs.contentviews
from mitmproxy import flow
from mitmproxy.utils import strutils
logger = logging.getLogger(__name__)
@dataclass
class ContentviewResult:
text: str
syntax_highlight: SyntaxHighlight
view_name: str | None
description: str
registry = ContentviewRegistry()
def prettify_message(
message: ContentviewMessage,
flow: flow.Flow,
view_name: str = "auto",
registry: ContentviewRegistry = registry,
) -> ContentviewResult:
data, enc = get_data(message)
if data is None:
return ContentviewResult(
text="Content is missing.",
syntax_highlight="error",
description="",
view_name=None,
)
# Determine the correct view
metadata = make_metadata(message, flow)
view = registry.get_view(data, metadata, view_name)
# Finally, we can pretty-print!
try:
ret = ContentviewResult(
text=view.prettify(data, metadata),
syntax_highlight=view.syntax_highlight,
view_name=view.name,
description=enc,
)
except Exception as e:
logger.debug(f"Contentview {view.name!r} failed: {e}", exc_info=True)
if view_name == "auto":
# If the contentview was chosen as the best matching one, fall back to raw.
ret = ContentviewResult(
text=raw.prettify(data, metadata),
syntax_highlight=raw.syntax_highlight,
view_name=raw.name,
description=f"{enc}[failed to parse as {view.name}]",
)
else:
# Cut the exception traceback for display.
exc, value, tb = sys.exc_info()
tb_cut = cut_traceback(tb, "prettify_message")
if (
tb_cut == tb
): # If there are no extra frames, just skip displaying the traceback.
tb_cut = None
# If the contentview has been set explicitly, we display a hard error.
err = "".join(traceback.format_exception(exc, value=value, tb=tb_cut))
ret = ContentviewResult(
text=f"Couldn't parse as {view.name}:\n{err}",
syntax_highlight="error",
view_name=view.name,
description=enc,
)
ret.text = strutils.escape_control_characters(ret.text)
return ret
def reencode_message(
prettified: str,
message: ContentviewMessage,
flow: flow.Flow,
view_name: str,
) -> bytes:
metadata = make_metadata(message, flow)
view = registry[view_name.lower()]
if not isinstance(view, InteractiveContentview):
raise ValueError(f"Contentview {view.name} is not interactive.")
return view.reencode(prettified, metadata)
_views: list[Contentview] = [
css,
dns,
graphql,
http3,
image,
javascript,
json_view,
mqtt,
multipart,
query,
raw,
socket_io,
urlencoded,
wbxml,
xml_html,
]
for view in _views:
registry.register(view)
for name in mitmproxy_rs.contentviews.__all__:
if name.startswith("_"):
continue
cv = getattr(mitmproxy_rs.contentviews, name)
if isinstance(cv, Contentview) and not isinstance(cv, type):
registry.register(cv)
def add(contentview: Contentview | type[Contentview]) -> None:
"""
Register a contentview for use in mitmproxy.
You may pass a `Contentview` instance or the class itself.
When passing the class, its constructor will be invoked with no arguments.
"""
if isinstance(contentview, View):
warnings.warn(
f"`mitmproxy.contentviews.View` is deprecated since mitmproxy 12, "
f"migrate {contentview.__class__.__name__} to `mitmproxy.contentviews.Contentview` instead.",
stacklevel=2,
)
contentview = LegacyContentview(contentview)
registry.register(contentview)
# hack: docstring where pdoc finds it.
SyntaxHighlight = SyntaxHighlight
"""
Syntax highlighting formats currently supported by mitmproxy.
Note that YAML is a superset of JSON; so if you'd like to highlight JSON, pick the YAML highlighter.
*If you have a concrete use case for additional formats, please open an issue.*
"""
__all__ = [
# Public Contentview API
"Contentview",
"InteractiveContentview",
"SyntaxHighlight",
"add",
"Metadata",
]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_raw.py | mitmproxy/contentviews/_view_raw.py | from ._api import Contentview
from ._api import Metadata
class RawContentview(Contentview):
def prettify(self, data: bytes, metadata: Metadata) -> str:
return data.decode("utf-8", "backslashreplace")
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return 0.1
raw = RawContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/base.py | mitmproxy/contentviews/base.py | # Default view cutoff *in lines*
import sys
from abc import ABC
from abc import abstractmethod
from collections.abc import Iterable
from collections.abc import Iterator
from collections.abc import Mapping
from typing import ClassVar
from typing import Union
from mitmproxy import flow
from mitmproxy import http
if sys.version_info < (3, 13): # pragma: no cover
from typing_extensions import deprecated
else:
from warnings import deprecated
KEY_MAX = 30
TTextType = Union[str, bytes] # FIXME: This should be either bytes or str ultimately.
TViewLine = list[tuple[str, TTextType]]
TViewResult = tuple[str, Iterator[TViewLine]]
@deprecated("Use `mitmproxy.contentviews.Contentview` instead.")
class View(ABC):
"""
Deprecated, do not use.
"""
name: ClassVar[str]
@abstractmethod
def __call__(
self,
data: bytes,
*,
content_type: str | None = None,
flow: flow.Flow | None = None,
http_message: http.Message | None = None,
**unknown_metadata,
) -> TViewResult:
"""
Transform raw data into human-readable output.
Returns a (description, content generator) tuple.
The content generator yields lists of (style, text) tuples, where each list represents
a single line. ``text`` is a unfiltered string which may need to be escaped,
depending on the used output. For example, it may contain terminal control sequences
or unfiltered HTML.
Except for `data`, implementations must not rely on any given argument to be present.
To ensure compatibility with future mitmproxy versions, unknown keyword arguments should be ignored.
The content generator must not yield tuples of tuples, because urwid cannot process that.
You have to yield a *list* of tuples per line.
"""
raise NotImplementedError() # pragma: no cover
def render_priority(
self,
data: bytes,
*,
content_type: str | None = None,
flow: flow.Flow | None = None,
http_message: http.Message | None = None,
**unknown_metadata,
) -> float:
"""
Return the priority of this view for rendering `data`.
If no particular view is chosen by the user, the view with the highest priority is selected.
Except for `data`, implementations must not rely on any given argument to be present.
To ensure compatibility with future mitmproxy versions, unknown keyword arguments should be ignored.
"""
return 0
def __lt__(self, other):
assert isinstance(other, View)
return self.name.__lt__(other.name)
@deprecated("Use `mitmproxy.contentviews.Contentview` instead.")
def format_pairs(items: Iterable[tuple[TTextType, TTextType]]) -> Iterator[TViewLine]:
"""
Helper function that accepts a list of (k,v) pairs into a list of
[
("key", key )
("value", value)
]
where key is padded to a uniform width
"""
max_key_len = max((len(k[0]) for k in items), default=0)
max_key_len = min((max_key_len, KEY_MAX), default=0)
for key, value in items:
if isinstance(key, bytes):
key += b":"
else:
key += ":"
key = key.ljust(max_key_len + 2)
yield [("header", key), ("text", value)]
@deprecated("Use `mitmproxy.contentviews.Contentview` instead.")
def format_dict(d: Mapping[TTextType, TTextType]) -> Iterator[TViewLine]:
"""
Helper function that transforms the given dictionary into a list of
[
("key", key )
("value", value)
]
entries, where key is padded to a uniform width.
"""
return format_pairs(d.items())
@deprecated("Use `mitmproxy.contentviews.Contentview` instead.")
def format_text(text: TTextType) -> Iterator[TViewLine]:
"""
Helper function that transforms bytes into the view output format.
"""
for line in text.splitlines():
yield [("text", line)]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_javascript.py | mitmproxy/contentviews/_view_javascript.py | import io
import re
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.utils import strutils
DELIMITERS = "{};\n"
SPECIAL_AREAS = (
r"(?<=[^\w\s)])\s*/(?:[^\n/]|(?<!\\)(?:\\\\)*\\/)+?/(?=[gimsuy]{0,6}\s*(?:[;,).\n]|$))",
r"'" + strutils.MULTILINE_CONTENT_LINE_CONTINUATION + strutils.NO_ESCAPE + "'",
r'"' + strutils.MULTILINE_CONTENT_LINE_CONTINUATION + strutils.NO_ESCAPE + '"',
r"`" + strutils.MULTILINE_CONTENT + strutils.NO_ESCAPE + "`",
r"/\*" + strutils.MULTILINE_CONTENT + r"\*/",
r"//" + strutils.SINGLELINE_CONTENT + "$",
r"for\(" + strutils.SINGLELINE_CONTENT + r"\)",
)
def beautify(data):
data = strutils.escape_special_areas(data, SPECIAL_AREAS, DELIMITERS)
data = re.sub(r"\s*{\s*(?!};)", " {\n", data)
data = re.sub(r"\s*;\s*", ";\n", data)
data = re.sub(r"(?<!{)\s*}(;)?\s*", r"\n}\1\n", data)
beautified = io.StringIO()
indent_level = 0
for line in data.splitlines(True):
if line.endswith("{\n"):
beautified.write(" " * 2 * indent_level + line)
indent_level += 1
elif line.startswith("}"):
indent_level -= 1
beautified.write(" " * 2 * indent_level + line)
else:
beautified.write(" " * 2 * indent_level + line)
data = strutils.unescape_special_areas(beautified.getvalue())
return data
class JavaScriptContentview(Contentview):
syntax_highlight = "javascript"
__content_types = (
"application/x-javascript",
"application/javascript",
"text/javascript",
)
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
data_str = data.decode("utf-8", "replace")
return beautify(data_str)
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return float(bool(data) and metadata.content_type in self.__content_types)
javascript = JavaScriptContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_utils.py | mitmproxy/contentviews/_utils.py | import io
import typing
from collections.abc import Iterable
from pathlib import Path
from typing import Any
from ruamel.yaml import YAML
from .. import ctx
from .. import http
from ..dns import DNSMessage
from ..flow import Flow
from ..tcp import TCPMessage
from ..udp import UDPMessage
from ..utils import strutils
from ..websocket import WebSocketMessage
from ._api import Metadata
type ContentviewMessage = (
http.Message | TCPMessage | UDPMessage | WebSocketMessage | DNSMessage
)
def make_metadata(
message: ContentviewMessage,
flow: Flow,
) -> Metadata:
metadata = Metadata(
flow=flow,
protobuf_definitions=Path(ctx.options.protobuf_definitions).expanduser()
if ctx.options.protobuf_definitions
else None,
)
match message:
case http.Message():
metadata.http_message = message
if ctype := message.headers.get("content-type"):
if ct := http.parse_content_type(ctype):
metadata.content_type = f"{ct[0]}/{ct[1]}"
case TCPMessage():
metadata.tcp_message = message
case UDPMessage():
metadata.udp_message = message
case WebSocketMessage():
metadata.websocket_message = message
case DNSMessage():
metadata.dns_message = message
case other: # pragma: no cover
typing.assert_never(other)
return metadata
def get_data(
message: ContentviewMessage,
) -> tuple[bytes | None, str]:
content: bytes | None
try:
content = message.content
except ValueError:
assert isinstance(message, http.Message)
content = message.raw_content
enc = "[cannot decode]"
else:
if isinstance(message, http.Message) and content != message.raw_content:
enc = "[decoded {}]".format(message.headers.get("content-encoding"))
else:
enc = ""
return content, enc
def yaml_dumps(d: Any) -> str:
if not d:
return ""
out = io.StringIO()
YAML(typ="rt", pure=True).dump(d, out)
return out.getvalue()
def yaml_loads(yaml: str) -> Any:
return YAML(typ="safe", pure=True).load(yaml)
def merge_repeated_keys(items: Iterable[tuple[str, str]]) -> dict[str, str | list[str]]:
"""
Helper function that takes a list of pairs and merges repeated keys.
"""
ret: dict[str, str | list[str]] = {}
for key, value in items:
if existing := ret.get(key):
if isinstance(existing, list):
existing.append(value)
else:
ret[key] = [existing, value]
else:
ret[key] = value
return ret
def byte_pairs_to_str_pairs(
items: Iterable[tuple[bytes, bytes]],
) -> Iterable[tuple[str, str]]:
for key, value in items:
yield (strutils.bytes_to_escaped_str(key), strutils.bytes_to_escaped_str(value))
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_socketio.py | mitmproxy/contentviews/_view_socketio.py | from abc import abstractmethod
from enum import Enum
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.http import HTTPFlow
from mitmproxy.utils import strutils
class PacketType(Enum):
@property
@abstractmethod
def visible(self) -> bool:
raise RuntimeError # pragma: no cover
def __str__(self):
return f"{type(self).__name__}.{self.name}"
class EngineIO(PacketType):
# https://github.com/socketio/engine.io-protocol?tab=readme-ov-file#protocol
OPEN = ord("0")
CLOSE = ord("1")
PING = ord("2")
PONG = ord("3")
MESSAGE = ord("4")
UPGRADE = ord("5")
NOOP = ord("6")
@property
def visible(self):
return self not in (
self.PING,
self.PONG,
)
class SocketIO(PacketType):
# https://github.com/socketio/socket.io-protocol?tab=readme-ov-file#exchange-protocol
CONNECT = ord("0")
DISCONNECT = ord("1")
EVENT = ord("2")
ACK = ord("3")
CONNECT_ERROR = ord("4")
BINARY_EVENT = ord("5")
BINARY_ACK = ord("6")
@property
def visible(self):
return self not in (
self.ACK,
self.BINARY_ACK,
)
def parse_packet(data: bytes) -> tuple[PacketType, bytes]:
# throws IndexError/ValueError if invalid packet
engineio_type = EngineIO(data[0])
data = data[1:]
if engineio_type is not EngineIO.MESSAGE:
return engineio_type, data
socketio_type = SocketIO(data[0])
data = data[1:]
return socketio_type, data
class SocketIOContentview(Contentview):
name = "Socket.IO"
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
packet_type, msg = parse_packet(data)
if not packet_type.visible:
return ""
return f"{packet_type} {strutils.bytes_to_escaped_str(msg)}"
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return float(
bool(
data
and isinstance(metadata.flow, HTTPFlow)
and metadata.flow.websocket is not None
and "/socket.io/?" in metadata.flow.request.path
)
)
socket_io = SocketIOContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_registry.py | mitmproxy/contentviews/_registry.py | from __future__ import annotations
import logging
import typing
from collections.abc import Mapping
from ..utils import signals
from ._api import Contentview
from ._api import Metadata
logger = logging.getLogger(__name__)
def _on_change(view: Contentview) -> None: ...
class ContentviewRegistry(Mapping[str, Contentview]):
def __init__(self):
self._by_name: dict[str, Contentview] = {}
self.on_change = signals.SyncSignal(_on_change)
def register(self, instance: Contentview | type[Contentview]) -> None:
if isinstance(instance, type):
instance = instance()
name = instance.name.lower()
if name in self._by_name:
logger.info(f"Replacing existing {name} contentview.")
self._by_name[name] = instance
self.on_change.send(instance)
def available_views(self) -> list[str]:
return ["auto", *sorted(self._by_name.keys())]
def get_view(
self, data: bytes, metadata: Metadata, view_name: str = "auto"
) -> Contentview:
"""
Get the best contentview for the given data and metadata.
If `view_name` is "auto" or the provided view not found,
the best matching contentview based on `render_priority` will be returned.
"""
if view_name != "auto":
try:
return self[view_name.lower()]
except KeyError:
logger.warning(
f"Unknown contentview {view_name!r}, selecting best match instead."
)
max_prio: tuple[float, Contentview] | None = None
for name, view in self._by_name.items():
try:
priority = view.render_priority(data, metadata)
assert isinstance(priority, (int, float)), (
f"render_priority for {view.name} did not return a number."
)
except Exception:
logger.exception(f"Error in {view.name}.render_priority")
else:
if max_prio is None or max_prio[0] < priority:
max_prio = (priority, view)
assert max_prio, "At least one view needs to have a working `render_priority`."
return max_prio[1]
def __iter__(self) -> typing.Iterator[str]:
return iter(self._by_name)
def __getitem__(self, item: str) -> Contentview:
return self._by_name[item.lower()]
def __len__(self):
return len(self._by_name)
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_css.py | mitmproxy/contentviews/_view_css.py | import re
import time
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.utils import strutils
"""
A custom CSS prettifier. Compared to other prettifiers, its main features are:
- Implemented in pure Python.
- Modifies whitespace only.
- Works with any input.
- Considerably faster than e.g. cssutils.
"""
CSS_SPECIAL_AREAS = (
"'" + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + "'",
'"' + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + '"',
r"/\*" + strutils.MULTILINE_CONTENT + r"\*/",
"//" + strutils.SINGLELINE_CONTENT + "$",
)
CSS_SPECIAL_CHARS = "{};:"
def beautify(data: str, indent: str = " "):
"""Beautify a string containing CSS code"""
data = strutils.escape_special_areas(
data.strip(),
CSS_SPECIAL_AREAS,
CSS_SPECIAL_CHARS,
)
# Add newlines
data = re.sub(r"\s*;\s*", ";\n", data)
data = re.sub(r"\s*{\s*", " {\n", data)
data = re.sub(r"\s*}\s*", "\n}\n\n", data)
# Fix incorrect ":" placement
data = re.sub(r"\s*:\s*(?=[^{]+})", ": ", data)
# Fix no space after ","
data = re.sub(r"\s*,\s*", ", ", data)
# indent
data = re.sub("\n[ \t]+", "\n", data)
data = re.sub("\n(?![}\n])(?=[^{]*})", "\n" + indent, data)
data = strutils.unescape_special_areas(data)
return data.rstrip("\n") + "\n"
class ViewCSS(Contentview):
syntax_highlight = "css"
def prettify(self, data: bytes, metadata: Metadata) -> str:
data_str = data.decode("utf8", "surrogateescape")
return beautify(data_str)
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return float(bool(data) and metadata.content_type == "text/css")
css = ViewCSS()
if __name__ == "__main__": # pragma: no cover
with open("../tools/web/static/vendor.css") as f:
data = f.read()
t = time.time()
x = beautify(data)
print(f"Beautifying vendor.css took {time.time() - t:.2}s")
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_image/view.py | mitmproxy/contentviews/_view_image/view.py | from .._utils import merge_repeated_keys
from .._utils import yaml_dumps
from . import image_parser
from mitmproxy.contentviews._api import Contentview
from mitmproxy.contentviews._api import Metadata
from mitmproxy.contrib import imghdr
def test_ico(h, f):
if h.startswith(b"\x00\x00\x01\x00"):
return "ico"
return None
imghdr.tests.append(test_ico)
class ImageContentview(Contentview):
syntax_highlight = "yaml"
def prettify(
self,
data: bytes,
metadata: Metadata,
) -> str:
image_type = imghdr.what("", h=data)
if image_type == "png":
image_metadata = image_parser.parse_png(data)
elif image_type == "gif":
image_metadata = image_parser.parse_gif(data)
elif image_type == "jpeg":
image_metadata = image_parser.parse_jpeg(data)
elif image_type == "ico":
image_metadata = image_parser.parse_ico(data)
else:
image_metadata = []
if image_type:
view_name = f"{image_type.upper()} Image"
else:
view_name = "Unknown Image"
return f"# {view_name}\n" + yaml_dumps(merge_repeated_keys(image_metadata))
def render_priority(
self,
data: bytes,
metadata: Metadata,
) -> float:
return float(
bool(
metadata.content_type
and metadata.content_type.startswith("image/")
and not metadata.content_type.endswith("+xml")
)
)
image = ImageContentview()
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_image/__init__.py | mitmproxy/contentviews/_view_image/__init__.py | from .view import image
__all__ = ["image"]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/contentviews/_view_image/image_parser.py | mitmproxy/contentviews/_view_image/image_parser.py | import io
from kaitaistruct import KaitaiStream
from mitmproxy.contrib.kaitaistruct import gif
from mitmproxy.contrib.kaitaistruct import ico
from mitmproxy.contrib.kaitaistruct import jpeg
from mitmproxy.contrib.kaitaistruct import png
type ImageMetadata = list[tuple[str, str]]
def parse_png(data: bytes) -> ImageMetadata:
img = png.Png(KaitaiStream(io.BytesIO(data)))
parts = [
("Format", "Portable network graphics"),
("Size", f"{img.ihdr.width} x {img.ihdr.height} px"),
]
for chunk in img.chunks:
if chunk.type == "gAMA":
parts.append(("gamma", str(chunk.body.gamma_int / 100000)))
elif chunk.type == "pHYs":
aspectx = chunk.body.pixels_per_unit_x
aspecty = chunk.body.pixels_per_unit_y
parts.append(("aspect", f"{aspectx} x {aspecty}"))
elif chunk.type == "tEXt":
parts.append((chunk.body.keyword, chunk.body.text))
elif chunk.type == "iTXt":
parts.append((chunk.body.keyword, chunk.body.text))
elif chunk.type == "zTXt":
parts.append(
(chunk.body.keyword, chunk.body.text_datastream.decode("iso8859-1"))
)
return parts
def parse_gif(data: bytes) -> ImageMetadata:
img = gif.Gif(KaitaiStream(io.BytesIO(data)))
descriptor = img.logical_screen_descriptor
parts = [
("Format", "Compuserve GIF"),
("Version", f"GIF{img.hdr.version}"),
("Size", f"{descriptor.screen_width} x {descriptor.screen_height} px"),
("background", str(descriptor.bg_color_index)),
]
ext_blocks = []
for block in img.blocks:
if block.block_type.name == "extension":
ext_blocks.append(block)
comment_blocks = []
for block in ext_blocks:
if block.body.label._name_ == "comment":
comment_blocks.append(block)
for block in comment_blocks:
entries = block.body.body.entries
for entry in entries:
comment = entry.bytes
if comment != b"":
parts.append(("comment", str(comment)))
return parts
def parse_jpeg(data: bytes) -> ImageMetadata:
img = jpeg.Jpeg(KaitaiStream(io.BytesIO(data)))
parts = [("Format", "JPEG (ISO 10918)")]
for segment in img.segments:
if segment.marker._name_ == "sof0":
parts.append(
("Size", f"{segment.data.image_width} x {segment.data.image_height} px")
)
if segment.marker._name_ == "app0":
parts.append(
(
"jfif_version",
f"({segment.data.version_major}, {segment.data.version_minor})",
)
)
parts.append(
(
"jfif_density",
f"({segment.data.density_x}, {segment.data.density_y})",
)
)
parts.append(("jfif_unit", str(segment.data.density_units._value_)))
if segment.marker._name_ == "com":
parts.append(("comment", segment.data.decode("utf8", "backslashreplace")))
if segment.marker._name_ == "app1":
if hasattr(segment.data, "body"):
for field in segment.data.body.data.body.ifd0.fields:
if field.data is not None:
parts.append(
(field.tag._name_, field.data.decode("UTF-8").strip("\x00"))
)
return parts
def parse_ico(data: bytes) -> ImageMetadata:
img = ico.Ico(KaitaiStream(io.BytesIO(data)))
parts = [
("Format", "ICO"),
("Number of images", str(img.num_images)),
]
for i, image in enumerate(img.images):
parts.append(
(
f"Image {i + 1}",
"Size: {} x {}\n{: >18}Bits per pixel: {}\n{: >18}PNG: {}".format(
256 if not image.width else image.width,
256 if not image.height else image.height,
"",
image.bpp,
"",
image.is_png,
),
)
)
return parts
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/script/concurrent.py | mitmproxy/script/concurrent.py | """
This module provides a @concurrent decorator primitive to
offload computations from mitmproxy's main master thread.
"""
import asyncio
import inspect
from mitmproxy import hooks
def concurrent(fn):
if fn.__name__ not in set(hooks.all_hooks.keys()) - {"load", "configure"}:
raise NotImplementedError(
"Concurrent decorator not supported for '%s' method." % fn.__name__
)
async def _concurrent(*args):
def run():
if inspect.iscoroutinefunction(fn):
# Run the async function in a new event loop
loop = asyncio.new_event_loop()
try:
loop.run_until_complete(fn(*args))
finally:
loop.close()
else:
fn(*args)
await asyncio.get_running_loop().run_in_executor(None, run)
return _concurrent
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/script/__init__.py | mitmproxy/script/__init__.py | from .concurrent import concurrent
__all__ = [
"concurrent",
]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/io/compat.py | mitmproxy/io/compat.py | """
This module handles the import of mitmproxy flows generated by old versions.
The flow file version is decoupled from the mitmproxy release cycle (since
v3.0.0dev) and versioning. Every change or migration gets a new flow file
version number, this prevents issues with developer builds and snapshots.
"""
import copy
import uuid
from typing import Any
from mitmproxy import version
from mitmproxy.utils import strutils
def convert_011_012(data):
data[b"version"] = (0, 12)
return data
def convert_012_013(data):
data[b"version"] = (0, 13)
return data
def convert_013_014(data):
data[b"request"][b"first_line_format"] = data[b"request"].pop(b"form_in")
data[b"request"][b"http_version"] = (
b"HTTP/"
+ ".".join(str(x) for x in data[b"request"].pop(b"httpversion")).encode()
)
data[b"response"][b"http_version"] = (
b"HTTP/"
+ ".".join(str(x) for x in data[b"response"].pop(b"httpversion")).encode()
)
data[b"response"][b"status_code"] = data[b"response"].pop(b"code")
data[b"response"][b"body"] = data[b"response"].pop(b"content")
data[b"server_conn"].pop(b"state")
data[b"server_conn"][b"via"] = None
data[b"version"] = (0, 14)
return data
def convert_014_015(data):
data[b"version"] = (0, 15)
return data
def convert_015_016(data):
for m in (b"request", b"response"):
if b"body" in data[m]:
data[m][b"content"] = data[m].pop(b"body")
if b"msg" in data[b"response"]:
data[b"response"][b"reason"] = data[b"response"].pop(b"msg")
data[b"request"].pop(b"form_out", None)
data[b"version"] = (0, 16)
return data
def convert_016_017(data):
data[b"server_conn"][b"peer_address"] = None
data[b"version"] = (0, 17)
return data
def convert_017_018(data):
# convert_unicode needs to be called for every dual release and the first py3-only release
data = convert_unicode(data)
data["server_conn"]["ip_address"] = data["server_conn"].pop("peer_address", None)
data["marked"] = False
data["version"] = (0, 18)
return data
def convert_018_019(data):
# convert_unicode needs to be called for every dual release and the first py3-only release
data = convert_unicode(data)
data["request"].pop("stickyauth", None)
data["request"].pop("stickycookie", None)
data["client_conn"]["sni"] = None
data["client_conn"]["alpn_proto_negotiated"] = None
data["client_conn"]["cipher_name"] = None
data["client_conn"]["tls_version"] = None
data["server_conn"]["alpn_proto_negotiated"] = None
if data["server_conn"]["via"]:
data["server_conn"]["via"]["alpn_proto_negotiated"] = None
data["mode"] = "regular"
data["metadata"] = dict()
data["version"] = (0, 19)
return data
def convert_019_100(data):
# convert_unicode needs to be called for every dual release and the first py3-only release
data = convert_unicode(data)
data["version"] = (1, 0, 0)
return data
def convert_100_200(data):
data["version"] = (2, 0, 0)
data["client_conn"]["address"] = data["client_conn"]["address"]["address"]
data["server_conn"]["address"] = data["server_conn"]["address"]["address"]
data["server_conn"]["source_address"] = data["server_conn"]["source_address"][
"address"
]
if data["server_conn"]["ip_address"]:
data["server_conn"]["ip_address"] = data["server_conn"]["ip_address"]["address"]
if data["server_conn"]["via"]:
data["server_conn"]["via"]["address"] = data["server_conn"]["via"]["address"][
"address"
]
data["server_conn"]["via"]["source_address"] = data["server_conn"]["via"][
"source_address"
]["address"]
if data["server_conn"]["via"]["ip_address"]:
data["server_conn"]["via"]["ip_address"] = data["server_conn"]["via"][
"ip_address"
]["address"]
return data
def convert_200_300(data):
data["version"] = (3, 0, 0)
data["client_conn"]["mitmcert"] = None
data["server_conn"]["tls_version"] = None
if data["server_conn"]["via"]:
data["server_conn"]["via"]["tls_version"] = None
return data
def convert_300_4(data):
data["version"] = 4
# This is an empty migration to transition to the new versioning scheme.
return data
client_connections: dict[tuple[str, ...], str] = {}
server_connections: dict[tuple[str, ...], str] = {}
def convert_4_5(data):
data["version"] = 5
client_conn_key = (
data["client_conn"]["timestamp_start"],
*data["client_conn"]["address"],
)
server_conn_key = (
data["server_conn"]["timestamp_start"],
*data["server_conn"]["source_address"],
)
data["client_conn"]["id"] = client_connections.setdefault(
client_conn_key, str(uuid.uuid4())
)
data["server_conn"]["id"] = server_connections.setdefault(
server_conn_key, str(uuid.uuid4())
)
if data["server_conn"]["via"]:
server_conn_key = (
data["server_conn"]["via"]["timestamp_start"],
*data["server_conn"]["via"]["source_address"],
)
data["server_conn"]["via"]["id"] = server_connections.setdefault(
server_conn_key, str(uuid.uuid4())
)
return data
def convert_5_6(data):
data["version"] = 6
data["client_conn"]["tls_established"] = data["client_conn"].pop("ssl_established")
data["client_conn"]["timestamp_tls_setup"] = data["client_conn"].pop(
"timestamp_ssl_setup"
)
data["server_conn"]["tls_established"] = data["server_conn"].pop("ssl_established")
data["server_conn"]["timestamp_tls_setup"] = data["server_conn"].pop(
"timestamp_ssl_setup"
)
if data["server_conn"]["via"]:
data["server_conn"]["via"]["tls_established"] = data["server_conn"]["via"].pop(
"ssl_established"
)
data["server_conn"]["via"]["timestamp_tls_setup"] = data["server_conn"][
"via"
].pop("timestamp_ssl_setup")
return data
def convert_6_7(data):
data["version"] = 7
data["client_conn"]["tls_extensions"] = None
return data
def convert_7_8(data):
data["version"] = 8
if "request" in data and data["request"] is not None:
data["request"]["trailers"] = None
if "response" in data and data["response"] is not None:
data["response"]["trailers"] = None
return data
def convert_8_9(data):
data["version"] = 9
is_request_replay = False
if "request" in data:
data["request"].pop("first_line_format")
data["request"]["authority"] = b""
is_request_replay = data["request"].pop("is_replay", False)
is_response_replay = False
if "response" in data and data["response"] is not None:
is_response_replay = data["response"].pop("is_replay", False)
if is_request_replay: # pragma: no cover
data["is_replay"] = "request"
elif is_response_replay: # pragma: no cover
data["is_replay"] = "response"
else:
data["is_replay"] = None
return data
def convert_9_10(data):
data["version"] = 10
def conv_conn(conn):
conn["state"] = 0
conn["error"] = None
conn["tls"] = conn["tls_established"]
alpn = conn["alpn_proto_negotiated"]
conn["alpn_offers"] = [alpn] if alpn else None
cipher = conn["cipher_name"]
conn["cipher_list"] = [cipher] if cipher else None
def conv_cconn(conn):
conn["sockname"] = ("", 0)
cc = conn.pop("clientcert", None)
conn["certificate_list"] = [cc] if cc else []
conv_conn(conn)
def conv_sconn(conn):
crt = conn.pop("cert", None)
conn["certificate_list"] = [crt] if crt else []
conn["cipher_name"] = None
conn["via2"] = None
conv_conn(conn)
conv_cconn(data["client_conn"])
conv_sconn(data["server_conn"])
if data["server_conn"]["via"]:
conv_sconn(data["server_conn"]["via"])
return data
def convert_10_11(data):
data["version"] = 11
def conv_conn(conn):
conn["sni"] = strutils.always_str(conn["sni"], "ascii", "backslashreplace")
conn["alpn"] = conn.pop("alpn_proto_negotiated")
conn["alpn_offers"] = conn["alpn_offers"] or []
conn["cipher_list"] = conn["cipher_list"] or []
conv_conn(data["client_conn"])
conv_conn(data["server_conn"])
if data["server_conn"]["via"]:
conv_conn(data["server_conn"]["via"])
return data
_websocket_handshakes = {}
def convert_11_12(data):
data["version"] = 12
if "websocket" in data["metadata"]:
_websocket_handshakes[data["id"]] = copy.deepcopy(data)
if "websocket_handshake" in data["metadata"]:
ws_flow = data
try:
data = _websocket_handshakes.pop(data["metadata"]["websocket_handshake"])
except KeyError:
# The handshake flow is missing, which should never really happen. We make up a dummy.
data = {
"client_conn": data["client_conn"],
"error": data["error"],
"id": data["id"],
"intercepted": data["intercepted"],
"is_replay": data["is_replay"],
"marked": data["marked"],
"metadata": {},
"mode": "transparent",
"request": {
"authority": b"",
"content": None,
"headers": [],
"host": b"unknown",
"http_version": b"HTTP/1.1",
"method": b"GET",
"path": b"/",
"port": 80,
"scheme": b"http",
"timestamp_end": 0,
"timestamp_start": 0,
"trailers": None,
},
"response": None,
"server_conn": data["server_conn"],
"type": "http",
"version": 12,
}
data["metadata"]["duplicated"] = (
"This WebSocket flow has been migrated from an old file format version "
"and may appear duplicated."
)
data["websocket"] = {
"messages": ws_flow["messages"],
"closed_by_client": ws_flow["close_sender"] == "client",
"close_code": ws_flow["close_code"],
"close_reason": ws_flow["close_reason"],
"timestamp_end": data.get("server_conn", {}).get("timestamp_end", None),
}
else:
data["websocket"] = None
return data
def convert_12_13(data):
data["version"] = 13
if data["marked"]:
data["marked"] = ":default:"
else:
data["marked"] = ""
return data
def convert_13_14(data):
data["version"] = 14
data["comment"] = ""
# bugfix for https://github.com/mitmproxy/mitmproxy/issues/4576
if data.get("response", None) and data["response"]["timestamp_start"] is None:
data["response"]["timestamp_start"] = data["request"]["timestamp_end"]
data["response"]["timestamp_end"] = data["request"]["timestamp_end"] + 1
return data
def convert_14_15(data):
data["version"] = 15
if data.get("websocket", None):
# Add "injected" attribute.
data["websocket"]["messages"] = [
msg + [False] for msg in data["websocket"]["messages"]
]
return data
def convert_15_16(data):
data["version"] = 16
data["timestamp_created"] = data.get("request", data["client_conn"])[
"timestamp_start"
]
return data
def convert_16_17(data):
data["version"] = 17
data.pop("mode", None)
return data
def convert_17_18(data):
data["version"] = 18
data["client_conn"]["proxy_mode"] = "regular"
return data
def convert_18_19(data):
data["version"] = 19
data["client_conn"]["peername"] = data["client_conn"].pop("address", None)
if data["client_conn"].get("timestamp_start") is None:
data["client_conn"]["timestamp_start"] = 0.0
data["client_conn"].pop("tls_extensions")
data["server_conn"]["peername"] = data["server_conn"].pop("ip_address", None)
data["server_conn"]["sockname"] = data["server_conn"].pop("source_address", None)
data["server_conn"]["via"] = data["server_conn"].pop("via2", None)
for conn in ["client_conn", "server_conn"]:
data[conn].pop("tls_established")
data[conn]["cipher"] = data[conn].pop("cipher_name", None)
data[conn].setdefault("transport_protocol", "tcp")
for name in ["peername", "sockname", "address"]:
if data[conn].get(name) and isinstance(data[conn][name][0], bytes):
data[conn][name][0] = data[conn][name][0].decode(
errors="backslashreplace"
)
if data["server_conn"]["sni"] is True:
data["server_conn"]["sni"] = data["server_conn"]["address"][0]
return data
def convert_19_20(data):
data["version"] = 20
data["client_conn"].pop("state", None)
data["server_conn"].pop("state", None)
return data
def convert_20_21(data):
data["version"] = 21
if data["client_conn"]["tls_version"] == "QUIC":
data["client_conn"]["tls_version"] = "QUICv1"
if data["server_conn"]["tls_version"] == "QUIC":
data["server_conn"]["tls_version"] = "QUICv1"
return data
def _convert_dict_keys(o: Any) -> Any:
if isinstance(o, dict):
return {strutils.always_str(k): _convert_dict_keys(v) for k, v in o.items()}
else:
return o
def _convert_dict_vals(o: dict, values_to_convert: dict) -> dict:
for k, v in values_to_convert.items():
if not o or k not in o:
continue # pragma: no cover
if v is True:
o[k] = strutils.always_str(o[k])
else:
_convert_dict_vals(o[k], v)
return o
def convert_unicode(data: dict) -> dict:
"""
This method converts between Python 3 and Python 2 dumpfiles.
"""
data = _convert_dict_keys(data)
data = _convert_dict_vals(
data,
{
"type": True,
"id": True,
"request": {"first_line_format": True},
"error": {"msg": True},
},
)
return data
converters = {
(0, 11): convert_011_012,
(0, 12): convert_012_013,
(0, 13): convert_013_014,
(0, 14): convert_014_015,
(0, 15): convert_015_016,
(0, 16): convert_016_017,
(0, 17): convert_017_018,
(0, 18): convert_018_019,
(0, 19): convert_019_100,
(1, 0): convert_100_200,
(2, 0): convert_200_300,
(3, 0): convert_300_4,
4: convert_4_5,
5: convert_5_6,
6: convert_6_7,
7: convert_7_8,
8: convert_8_9,
9: convert_9_10,
10: convert_10_11,
11: convert_11_12,
12: convert_12_13,
13: convert_13_14,
14: convert_14_15,
15: convert_15_16,
16: convert_16_17,
17: convert_17_18,
18: convert_18_19,
19: convert_19_20,
20: convert_20_21,
}
def migrate_flow(flow_data: dict[bytes | str, Any]) -> dict[bytes | str, Any]:
while True:
flow_version = flow_data.get(b"version", flow_data.get("version"))
# Historically, we used the mitmproxy minor version tuple as the flow format version.
if not isinstance(flow_version, int):
flow_version = tuple(flow_version)[:2] # type: ignore
if flow_version == version.FLOW_FORMAT_VERSION:
break
elif flow_version in converters:
flow_data = converters[flow_version](flow_data)
else:
should_upgrade = (
isinstance(flow_version, int)
and flow_version > version.FLOW_FORMAT_VERSION
)
raise ValueError(
"{} cannot read files with flow format version {}{}.".format(
version.MITMPROXY,
flow_version,
", please update mitmproxy" if should_upgrade else "",
)
)
return flow_data
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/io/tnetstring.py | mitmproxy/io/tnetstring.py | """
tnetstring: data serialization using typed netstrings
======================================================
This is a custom Python 3 implementation of tnetstrings.
Compared to other implementations, the main difference
is that this implementation supports a custom unicode datatype.
An ordinary tnetstring is a blob of data prefixed with its length and postfixed
with its type. Here are some examples:
>>> tnetstring.dumps("hello world")
11:hello world,
>>> tnetstring.dumps(12345)
5:12345#
>>> tnetstring.dumps([12345, True, 0])
19:5:12345#4:true!1:0#]
This module gives you the following functions:
:dump: dump an object as a tnetstring to a file
:dumps: dump an object as a tnetstring to a string
:load: load a tnetstring-encoded object from a file
:loads: load a tnetstring-encoded object from a string
Note that since parsing a tnetstring requires reading all the data into memory
at once, there's no efficiency gain from using the file-based versions of these
functions. They're only here so you can use load() to read precisely one
item from a file or socket without consuming any extra data.
The tnetstrings specification explicitly states that strings are binary blobs
and forbids the use of unicode at the protocol level.
**This implementation decodes dictionary keys as surrogate-escaped ASCII**,
all other strings are returned as plain bytes.
:Copyright: (c) 2012-2013 by Ryan Kelly <ryan@rfk.id.au>.
:Copyright: (c) 2014 by Carlo Pires <carlopires@gmail.com>.
:Copyright: (c) 2016 by Maximilian Hils <tnetstring3@maximilianhils.com>.
:License: MIT
"""
import collections
from typing import BinaryIO
from typing import Union
TSerializable = Union[None, str, bool, int, float, bytes, list, tuple, dict]
def dumps(value: TSerializable) -> bytes:
"""
This function dumps a python object as a tnetstring.
"""
# This uses a deque to collect output fragments in reverse order,
# then joins them together at the end. It's measurably faster
# than creating all the intermediate strings.
q: collections.deque = collections.deque()
_rdumpq(q, 0, value)
return b"".join(q)
def dump(value: TSerializable, file_handle: BinaryIO) -> None:
"""
This function dumps a python object as a tnetstring and
writes it to the given file.
"""
file_handle.write(dumps(value))
def _rdumpq(q: collections.deque, size: int, value: TSerializable) -> int:
"""
Dump value as a tnetstring, to a deque instance, last chunks first.
This function generates the tnetstring representation of the given value,
pushing chunks of the output onto the given deque instance. It pushes
the last chunk first, then recursively generates more chunks.
When passed in the current size of the string in the queue, it will return
the new size of the string in the queue.
Operating last-chunk-first makes it easy to calculate the size written
for recursive structures without having to build their representation as
a string. This is measurably faster than generating the intermediate
strings, especially on deeply nested structures.
"""
write = q.appendleft
if value is None:
write(b"0:~")
return size + 3
elif value is True:
write(b"4:true!")
return size + 7
elif value is False:
write(b"5:false!")
return size + 8
elif isinstance(value, int):
data = str(value).encode()
ldata = len(data)
span = str(ldata).encode()
write(b"%s:%s#" % (span, data))
return size + 2 + len(span) + ldata
elif isinstance(value, float):
# Use repr() for float rather than str().
# It round-trips more accurately.
# Probably unnecessary in later python versions that
# use David Gay's ftoa routines.
data = repr(value).encode()
ldata = len(data)
span = str(ldata).encode()
write(b"%s:%s^" % (span, data))
return size + 2 + len(span) + ldata
elif isinstance(value, bytes):
data = value
ldata = len(data)
span = str(ldata).encode()
write(b",")
write(data)
write(b":")
write(span)
return size + 2 + len(span) + ldata
elif isinstance(value, str):
data = value.encode("utf8")
ldata = len(data)
span = str(ldata).encode()
write(b";")
write(data)
write(b":")
write(span)
return size + 2 + len(span) + ldata
elif isinstance(value, (list, tuple)):
write(b"]")
init_size = size = size + 1
for item in reversed(value):
size = _rdumpq(q, size, item)
span = str(size - init_size).encode()
write(b":")
write(span)
return size + 1 + len(span)
elif isinstance(value, dict):
write(b"}")
init_size = size = size + 1
for k, v in value.items():
size = _rdumpq(q, size, v)
size = _rdumpq(q, size, k)
span = str(size - init_size).encode()
write(b":")
write(span)
return size + 1 + len(span)
else:
raise ValueError(f"unserializable object: {value} ({type(value)})")
def loads(string: bytes) -> TSerializable:
"""
This function parses a tnetstring into a python object.
"""
return pop(memoryview(string))[0]
def load(file_handle: BinaryIO) -> TSerializable:
"""load(file) -> object
This function reads a tnetstring from a file and parses it into a
python object. The file must support the read() method, and this
function promises not to read more data than necessary.
"""
# Read the length prefix one char at a time.
# Note that the netstring spec explicitly forbids padding zeros.
c = file_handle.read(1)
if c == b"": # we want to detect this special case.
raise ValueError("not a tnetstring: empty file")
data_length = b""
while c.isdigit():
data_length += c
if len(data_length) > 12:
raise ValueError("not a tnetstring: absurdly large length prefix")
c = file_handle.read(1)
if c != b":":
raise ValueError("not a tnetstring: missing or invalid length prefix")
data = memoryview(file_handle.read(int(data_length)))
data_type = file_handle.read(1)[0]
return parse(data_type, data)
def parse(data_type: int, data: memoryview) -> TSerializable:
if data_type == ord(b","):
return data.tobytes()
if data_type == ord(b";"):
return str(data, "utf8")
if data_type == ord(b"#"):
try:
return int(data)
except ValueError:
raise ValueError(f"not a tnetstring: invalid integer literal: {data!r}")
if data_type == ord(b"^"):
try:
return float(data)
except ValueError:
raise ValueError(f"not a tnetstring: invalid float literal: {data!r}")
if data_type == ord(b"!"):
if data == b"true":
return True
elif data == b"false":
return False
else:
raise ValueError(f"not a tnetstring: invalid boolean literal: {data!r}")
if data_type == ord(b"~"):
if data:
raise ValueError(f"not a tnetstring: invalid null literal: {data!r}")
return None
if data_type == ord(b"]"):
lst = []
while data:
item, data = pop(data)
lst.append(item) # type: ignore
return lst
if data_type == ord(b"}"):
d = {}
while data:
key, data = pop(data)
val, data = pop(data)
d[key] = val # type: ignore
return d
raise ValueError(f"unknown type tag: {data_type}")
def split(data: memoryview, sep: bytes) -> tuple[int, memoryview]:
i = 0
try:
ord_sep = ord(sep)
while data[i] != ord_sep:
i += 1
# here i is the position of b":" in the memoryview
return int(data[:i]), data[i + 1 :]
except (IndexError, ValueError):
raise ValueError(
f"not a tnetstring: missing or invalid length prefix: {data.tobytes()!r}"
)
def pop(data: memoryview) -> tuple[TSerializable, memoryview]:
"""
This function parses a tnetstring into a python object.
It returns a tuple giving the parsed object and a string
containing any unparsed data from the end of the string.
"""
# Parse out data length, type and remaining string.
length, data = split(data, b":")
try:
data, data_type, remain = data[:length], data[length], data[length + 1 :]
except IndexError:
# This fires if len(data) < dlen, meaning we don't need
# to further validate that data is the right length.
raise ValueError(f"not a tnetstring: invalid length prefix: {length}")
# Parse the data based on the type tag.
return parse(data_type, data), remain
__all__ = ["dump", "dumps", "load", "loads", "pop"]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/io/__init__.py | mitmproxy/io/__init__.py | from .io import FilteredFlowWriter
from .io import FlowReader
from .io import FlowWriter
from .io import read_flows_from_paths
__all__ = ["FlowWriter", "FlowReader", "FilteredFlowWriter", "read_flows_from_paths"]
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/io/har.py | mitmproxy/io/har.py | """Reads HAR files into flow objects"""
import base64
import logging
import time
from datetime import datetime
from mitmproxy import connection
from mitmproxy import exceptions
from mitmproxy import http
from mitmproxy.net.http.headers import infer_content_encoding
logger = logging.getLogger(__name__)
def fix_headers(
request_headers: list[dict[str, str]] | list[tuple[str, str]],
) -> http.Headers:
"""Converts provided headers into (b"header-name", b"header-value") tuples"""
flow_headers: list[tuple[bytes, bytes]] = []
for header in request_headers:
# Applications that use the {"name":item,"value":item} notation are Brave,Chrome,Edge,Firefox,Charles,Fiddler,Insomnia,Safari
if isinstance(header, dict):
key = header["name"]
value = header["value"]
# Application that uses the [name, value] notation is Slack
else:
try:
key = header[0]
value = header[1]
except IndexError as e:
raise exceptions.OptionsError(str(e)) from e
flow_headers.append((key.encode(), value.encode()))
return http.Headers(flow_headers)
def request_to_flow(request_json: dict) -> http.HTTPFlow:
"""
Creates a HTTPFlow object from a given entry in HAR file
"""
timestamp_start = datetime.fromisoformat(
request_json["startedDateTime"].replace("Z", "+00:00")
).timestamp()
timestamp_end = timestamp_start + request_json["time"] / 1000.0
request_method = request_json["request"]["method"]
request_url = request_json["request"]["url"]
server_address = request_json.get("serverIPAddress", None)
request_headers = fix_headers(request_json["request"]["headers"])
http_version_req = request_json["request"]["httpVersion"]
http_version_resp = request_json["response"]["httpVersion"]
request_content = ""
# List contains all the representations of an http request across different HAR files
if request_url.startswith("http://"):
port = 80
else:
port = 443
client_conn = connection.Client(
peername=("127.0.0.1", 0),
sockname=("127.0.0.1", 0),
# TODO Get time info from HAR File
timestamp_start=time.time(),
)
if server_address:
server_conn = connection.Server(address=(server_address, port))
else:
server_conn = connection.Server(address=None)
new_flow = http.HTTPFlow(client_conn, server_conn)
if "postData" in request_json["request"]:
request_content = request_json["request"]["postData"]["text"]
new_flow.request = http.Request.make(
request_method, request_url, request_content, request_headers
)
response_code = request_json["response"]["status"]
# In Firefox HAR files images don't include response bodies
response_content = request_json["response"]["content"].get("text", "")
content_encoding = request_json["response"]["content"].get("encoding", None)
response_headers = fix_headers(request_json["response"]["headers"])
if content_encoding == "base64":
response_content = base64.b64decode(response_content)
elif isinstance(response_content, str):
# Convert text to bytes, as in `Response.set_text`
try:
response_content = http.encoding.encode(
response_content,
(
content_encoding
or infer_content_encoding(response_headers.get("content-type", ""))
),
)
except ValueError:
# Fallback to UTF-8
response_content = response_content.encode(
"utf-8", errors="surrogateescape"
)
# Then encode the content, as in `Response.set_content`
response_content = http.encoding.encode(
response_content, response_headers.get("content-encoding") or "identity"
)
new_flow.response = http.Response(
b"HTTP/1.1",
response_code,
http.status_codes.RESPONSES.get(response_code, "").encode(),
response_headers,
response_content,
None,
timestamp_start,
timestamp_end,
)
# Update timestamps
new_flow.request.timestamp_start = timestamp_start
new_flow.request.timestamp_end = timestamp_end
new_flow.client_conn.timestamp_start = timestamp_start
new_flow.client_conn.timestamp_end = timestamp_end
# Update HTTP version
match http_version_req:
case "http/2.0":
new_flow.request.http_version = "HTTP/2"
case "HTTP/2":
new_flow.request.http_version = "HTTP/2"
case "HTTP/3":
new_flow.request.http_version = "HTTP/3"
case _:
new_flow.request.http_version = "HTTP/1.1"
match http_version_resp:
case "http/2.0":
new_flow.response.http_version = "HTTP/2"
case "HTTP/2":
new_flow.response.http_version = "HTTP/2"
case "HTTP/3":
new_flow.response.http_version = "HTTP/3"
case _:
new_flow.response.http_version = "HTTP/1.1"
# Remove compression because that may generate different sizes between versions
new_flow.request.decode()
new_flow.response.decode()
return new_flow
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
mitmproxy/mitmproxy | https://github.com/mitmproxy/mitmproxy/blob/e6aa924bb411a9687b91920b8d094af37bc02b90/mitmproxy/io/io.py | mitmproxy/io/io.py | import json
import os
from collections.abc import Iterable
from io import BufferedReader
from typing import Any
from typing import BinaryIO
from typing import cast
from typing import Union
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import flowfilter
from mitmproxy.io import compat
from mitmproxy.io import tnetstring
from mitmproxy.io.har import request_to_flow
class FlowWriter:
def __init__(self, fo):
self.fo = fo
def add(self, f: flow.Flow) -> None:
d = f.get_state()
tnetstring.dump(d, self.fo)
class FlowReader:
fo: BinaryIO
def __init__(self, fo: BinaryIO):
self.fo = fo
def peek(self, n: int) -> bytes:
try:
return cast(BufferedReader, self.fo).peek(n)
except AttributeError:
# https://github.com/python/cpython/issues/90533: io.BytesIO does not have peek()
pos = self.fo.tell()
ret = self.fo.read(n)
self.fo.seek(pos)
return ret
def stream(self) -> Iterable[flow.Flow]:
"""
Yields Flow objects from the dump.
"""
if self.peek(4).startswith(
b"\xef\xbb\xbf{"
): # skip BOM, usually added by Fiddler
self.fo.read(3)
if self.peek(1).startswith(b"{"):
try:
har_file = json.loads(self.fo.read().decode("utf-8"))
for request_json in har_file["log"]["entries"]:
yield request_to_flow(request_json)
except Exception:
raise exceptions.FlowReadException(
"Unable to read HAR file. Please provide a valid HAR file"
)
else:
try:
while True:
# FIXME: This cast hides a lack of dynamic type checking
loaded = cast(
dict[Union[bytes, str], Any],
tnetstring.load(self.fo),
)
try:
if not isinstance(loaded, dict):
raise ValueError(f"Invalid flow: {loaded=}")
yield flow.Flow.from_state(compat.migrate_flow(loaded))
except ValueError as e:
raise exceptions.FlowReadException(e) from e
except (ValueError, TypeError, IndexError) as e:
if str(e) == "not a tnetstring: empty file":
return # Error is due to EOF
raise exceptions.FlowReadException("Invalid data format.") from e
class FilteredFlowWriter:
def __init__(self, fo: BinaryIO, flt: flowfilter.TFilter | None):
self.fo = fo
self.flt = flt
def add(self, f: flow.Flow) -> None:
if self.flt and not flowfilter.match(self.flt, f):
return
d = f.get_state()
tnetstring.dump(d, self.fo)
self.fo.flush()
def read_flows_from_paths(paths) -> list[flow.Flow]:
"""
Given a list of filepaths, read all flows and return a list of them.
From a performance perspective, streaming would be advisable -
however, if there's an error with one of the files, we want it to be raised immediately.
Raises:
FlowReadException, if any error occurs.
"""
try:
flows: list[flow.Flow] = []
for path in paths:
path = os.path.expanduser(path)
with open(path, "rb") as f:
flows.extend(FlowReader(f).stream())
except OSError as e:
raise exceptions.FlowReadException(e.strerror)
return flows
| python | MIT | e6aa924bb411a9687b91920b8d094af37bc02b90 | 2026-01-04T14:40:00.086164Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.