HyperMega-C2-API / supervisor.py
Wrzzzrzr's picture
Upload folder using huggingface_hub
1f9f7aa verified
import requests
import logging
import os
import json
import asyncio
logger = logging.getLogger("spider_supervisor")
logging.basicConfig(level=logging.INFO)
# The URL of the detached Spider deployment on Hugging Face
SPIDER_API_URL = os.getenv("SPIDER_API_URL", "https://wrzzzrzr-hypermega-c2-spider.hf.space")
class SpiderSupervisor:
def __init__(self):
self.workers = {}
def deploy_spiders(self, targets):
"""Sends the target payload to the external Spider Space instead of spawning local processes."""
sources_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'config', 'sources.json'))
try:
with open(sources_path, 'r') as f:
existing = json.load(f)
except Exception:
existing = []
existing_urls = [s.get("url") for s in existing]
new_targets = []
for t in targets:
if t not in existing_urls:
existing.append({"name": "HF_Dynamic_Target", "url": t, "domain": t.split("//")[-1].split("/")[0]})
new_targets.append(t)
else:
new_targets.append(t)
# Forward the targets to the Spider Space asynchronously
if new_targets:
asyncio.create_task(self._forward_to_spider(new_targets))
# Save updated sources map
try:
with open(sources_path, 'w') as f:
json.dump(existing, f, indent=4)
except Exception as e:
logger.error(f"[Supervisor] Could not write to sources.json: {e}")
async def _forward_to_spider(self, targets):
try:
import httpx
async with httpx.AsyncClient() as client:
url = f"{SPIDER_API_URL.rstrip('/')}/crawl"
logger.info(f"[Supervisor] Deploying {len(targets)} targets to Spider Space: {url}")
r = await client.post(url, json={"urls": targets}, timeout=10.0)
r.raise_for_status()
logger.info(f"[Supervisor] Spider acknowledged targets: {r.json()}")
except Exception as e:
logger.error(f"[Supervisor] Failed to deploy targets to Spider Space: {e}")
def active_count(self):
# We can poll the Spider Space / for status if needed, but for now we assume 1 active swarm
return 1
supervisor = SpiderSupervisor()