File size: 3,191 Bytes
973c49b d52d7f3 973c49b d882745 973c49b d882745 973c49b d882745 973c49b d882745 973c49b d52d7f3 973c49b d52d7f3 973c49b d52d7f3 973c49b d882745 973c49b d882745 973c49b d882745 973c49b d882745 973c49b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 | import streamlit as st
import asyncio
import json
from playwright.async_api import async_playwright
st.set_page_config(page_title="Network Inspector", layout="wide")
st.title("๐ Network Request Inspector")
st.caption("Paste a Goal.com match URL to intercept all API calls")
# persist results across reruns
if "logs" not in st.session_state:
st.session_state.logs = []
url = st.text_input("Match URL", placeholder="https://www.goal.com/en-ng/match/...")
if st.button("Inspect") and url:
with st.spinner("Loading page and intercepting requests..."):
async def intercept(target_url):
requests_log = []
async with async_playwright() as p:
browser = await p.chromium.launch(headless=True)
context = await browser.new_context(
user_agent="Mozilla/5.0 (iPhone; CPU iPhone OS 16_0 like Mac OS X) AppleWebKit/605.1.15"
)
page = await context.new_page()
async def handle_request(request):
if request.resource_type in ("xhr", "fetch"):
requests_log.append({
"method": request.method,
"url": request.url,
"type": request.resource_type,
})
async def handle_response(response):
if response.request.resource_type in ("xhr", "fetch"):
for r in requests_log:
if r["url"] == response.url and "status" not in r:
r["status"] = response.status
try:
body = await response.text()
r["preview"] = body[:500]
except Exception:
r["preview"] = "(could not read body)"
break
page.on("request", handle_request)
page.on("response", handle_response)
await page.goto(target_url, wait_until="domcontentloaded", timeout=90000)
await page.wait_for_timeout(8000)
await browser.close()
return requests_log
st.session_state.logs = asyncio.run(intercept(url))
if st.session_state.logs:
logs = st.session_state.logs
st.success(f"Captured {len(logs)} XHR/fetch requests")
filter_text = st.text_input("Filter URLs (e.g. commentary, api, sportfeeds)", "")
filtered = [r for r in logs if filter_text.lower() in r["url"].lower()] if filter_text else logs
st.write(f"Showing {len(filtered)} requests")
for r in filtered:
status = r.get("status", "?")
color = "๐ข" if status == 200 else "๐ด"
with st.expander(f"{color} [{r['method']}] {r['url'][:120]} (HTTP {status})"):
st.code(r["url"], language="text")
if "preview" in r:
st.subheader("Response preview")
try:
st.json(json.loads(r["preview"]))
except Exception:
st.text(r["preview"]) |