| from fastapi import APIRouter | |
| import httpx | |
| from bs4 import BeautifulSoup | |
| router = APIRouter() | |
| async def exploitdb_feed(): | |
| url = "https://www.exploit-db.com/google-hacking-database" | |
| async with httpx.AsyncClient() as client: | |
| resp = await client.get(url) | |
| soup = BeautifulSoup(resp.text, "html.parser") | |
| rows = soup.select("table tbody tr") | |
| data = [] | |
| for row in rows[:10]: | |
| cols = row.find_all("td") | |
| if len(cols) >= 3: | |
| entry = { | |
| "dork": cols[1].text.strip(), | |
| "description": cols[2].text.strip() | |
| } | |
| data.append(entry) | |
| return {"entries": data} |