openreview-enriched / start_droplets.py
jonah410's picture
Upload start_droplets.py with huggingface_hub
024dbe2 verified
#!/usr/bin/env python3
import os
import time
import logging
from concurrent.futures import ThreadPoolExecutor
import digitalocean
import paramiko
from scp import SCPClient
# ── CONFIG ────────────────────────────────────────────────────────────────
API_TOKEN = ""
PRIVATE_KEY_PATH = os.path.expanduser("~/.ssh/id_ed25519")
IMAGE = "ubuntu-22-04-x64"
SIZE = "c-2" # CPU-optimized
REGION = "nyc3"
DROPLET_COUNT = 10
SHARD_PREFIX = "shard_"
ENR_PREFIX = "enriched_"
REMOTE_SCRIPT = "enrich_openreview.py"
# ── LOGGING ───────────────────────────────────────────────────────────────
logging.basicConfig(
level=logging.INFO,
format="[%(asctime)s] %(message)s",
datefmt="%H:%M:%S",
)
logger = logging.getLogger()
def setup_and_run(index, ssh_key_fp):
# 1) Look up (or create) worker-{index}
mgr = digitalocean.Manager(token=API_TOKEN)
all_droplets = {d.name: d for d in mgr.get_all_droplets()}
name = f"worker-{index}"
if name not in all_droplets:
logger.info(f"[{index}] creating droplet {name}…")
droplet = digitalocean.Droplet(
token=API_TOKEN,
name=name,
region=REGION,
image=IMAGE,
size_slug=SIZE,
ssh_keys=[ssh_key_fp]
)
droplet.create()
for action in droplet.get_actions():
action.wait()
else:
droplet = all_droplets[name]
# 2) Wait until the droplet has a public IPv4
droplet.load()
deadline = time.time() + 120
ip = None
while time.time() < deadline:
droplet.load()
v4nets = droplet.networks.get("v4", []) or []
publics = [n["ip_address"] for n in v4nets if n.get("type") == "public"]
if publics:
ip = publics[0]
break
time.sleep(5)
if not ip:
ip = getattr(droplet, "ip_address", None)
if not ip:
raise RuntimeError(f"[{index}] Could not determine public IP for droplet {name}")
logger.info(f"[{index}] β†’ {name} @ {ip}")
# 3) Give SSH a moment to open
time.sleep(20)
# 4) SSH in
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(ip, username="root", key_filename=PRIVATE_KEY_PATH)
logger.info(f"[{index}] SSH connected")
# 5) Upload only the crawler script (no shard files to push if starting fresh)
with SCPClient(ssh.get_transport()) as scp:
logger.info(f"[{index}] uploading {REMOTE_SCRIPT}")
scp.put(REMOTE_SCRIPT, REMOTE_SCRIPT)
# 6) Install dependencies and run under nohup from /root/
# (so enrich_openreview.py can find itself in /root/ and create /root/log.txt)
cmd = (
"apt update && apt install -y python3-pip && "
"pip3 install requests openreview-py PyPDF2 huggingface-hub && "
f"nohup python3 enrich_openreview.py {index} > log.txt 2>&1 &"
)
logger.info(f"[{index}] starting enrichment")
stdin, stdout, stderr = ssh.exec_command(cmd)
logger.debug(f"[{index}] remote stdout: {stdout.read().decode().strip()}")
logger.debug(f"[{index}] remote stderr: {stderr.read().decode().strip()}")
# 7) Poll for the enriched output file
for attempt in range(1, 21):
stdin2, stdout2, _ = ssh.exec_command(
f"test -f /root/{ENR_PREFIX}{index}.jsonl && echo YES || echo NO"
)
status = stdout2.read().decode().strip()
logger.info(f"[{index}] poll {attempt}/20 β†’ enriched present? {status}")
if status == "YES":
logger.info(f"[{index}] β†’ found {ENR_PREFIX}{index}.jsonl")
break
time.sleep(30)
ssh.close()
return droplet, ip
def retrieve_results(ip, index):
logger.info(f"[{index}] retrieving {ENR_PREFIX}{index}.jsonl from {ip}")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(ip, username="root", key_filename=PRIVATE_KEY_PATH)
with SCPClient(ssh.get_transport()) as scp:
scp.get(f"/root/{ENR_PREFIX}{index}.jsonl", f"{ENR_PREFIX}{index}.jsonl")
ssh.close()
logger.info(f"[{index}] downloaded {ENR_PREFIX}{index}.jsonl")
def merge_outputs(num_parts, output="enriched_full.jsonl"):
logger.info("Merging all shards…")
with open(output, "w") as out:
for i in range(num_parts):
part = f"{ENR_PREFIX}{i}.jsonl"
if os.path.exists(part):
logger.info(f" β†’ appending {part}")
with open(part) as f:
out.writelines(f)
else:
logger.warning(f" β†’ skipping {part} (not found)")
logger.info(f"βœ“ merged into {output}")
if __name__ == "__main__":
# lookup your SSH key fingerprint in DO
mgr = digitalocean.Manager(token=API_TOKEN)
key = next(k for k in mgr.get_all_sshkeys() if k.name == "spiral")
ssh_fp = key.fingerprint
# launch + poll
with ThreadPoolExecutor(max_workers=DROPLET_COUNT) as ex:
futures = [ex.submit(setup_and_run, i, ssh_fp) for i in range(DROPLET_COUNT)]
deps = [f.result() for f in futures]
# retrieve each successful shard
with ThreadPoolExecutor(max_workers=DROPLET_COUNT) as ex:
for idx, (_, ip) in enumerate(deps):
ex.submit(retrieve_results, ip, idx)
# finally, merge all enriched_<i>.jsonl into enriched_full.jsonl
merge_outputs(DROPLET_COUNT)
logger.info("βœ“ All done!")