ADAPT-Chase's picture
Add files using upload-large-folder tool
93be2a2 verified
from __future__ import annotations
import asyncio
import aiohttp
import json
from pathlib import Path
from typing import Any, Dict, List
from .registry import ToolRegistry
async def _fetch_one(session: aiohttp.ClientSession, url: str, dest_dir: Path) -> Dict[str, Any]:
try:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=120)) as resp:
content = await resp.read()
name = url.split("/")[-1] or "download.bin"
path = dest_dir / name
path.write_bytes(content)
return {"url": url, "status": resp.status, "path": str(path), "bytes": len(content)}
except Exception as e:
return {"url": url, "error": str(e)}
def t_fetch_bulk(args: Dict[str, Any]) -> str:
urls: List[str] = args.get("urls") or []
out_dir = Path(str(args.get("out_dir", "/data/adaptai/projects/elizabeth/data/downloads")))
out_dir.mkdir(parents=True, exist_ok=True)
if not urls:
return json.dumps({"error": "urls required"})
async def run():
async with aiohttp.ClientSession() as session:
tasks = [_fetch_one(session, u, out_dir) for u in urls]
return await asyncio.gather(*tasks)
results = asyncio.get_event_loop().run_until_complete(run())
return json.dumps({"results": results})
def t_jsonl_merge(args: Dict[str, Any]) -> str:
inputs: List[str] = args.get("inputs") or []
output = Path(str(args.get("output", "/data/adaptai/projects/elizabeth/data/merged.jsonl")))
if not inputs:
return json.dumps({"error": "inputs required"})
count = 0
with output.open("w", encoding="utf-8") as out:
for p in inputs:
for line in Path(p).read_text(encoding="utf-8").splitlines():
line = line.strip()
if not line:
continue
out.write(line + "\n")
count += 1
return json.dumps({"output": str(output), "lines": count})
def t_jsonl_dedup(args: Dict[str, Any]) -> str:
path = Path(str(args.get("path")))
key = args.get("key", "text")
out = Path(str(args.get("output", str(path) + ".dedup.jsonl")))
if not path.exists():
return json.dumps({"error": f"missing {path}"})
seen = set()
kept = 0
with out.open("w", encoding="utf-8") as w:
for line in path.read_text(encoding="utf-8").splitlines():
try:
obj = json.loads(line)
except Exception:
continue
val = obj.get(key)
if not val:
continue
h = hash(val)
if h in seen:
continue
seen.add(h)
w.write(json.dumps(obj) + "\n")
kept += 1
return json.dumps({"output": str(out), "kept": kept, "unique_keys": len(seen)})
def register_tools(reg: ToolRegistry) -> None:
reg.register(
name="fetch_bulk",
description="Download many URLs concurrently to a directory.",
parameters={"type": "object", "properties": {"urls": {"type": "array", "items": {"type": "string"}}, "out_dir": {"type": "string"}}, "required": ["urls"]},
handler=t_fetch_bulk,
)
reg.register(
name="jsonl_merge",
description="Merge multiple JSONL files.",
parameters={"type": "object", "properties": {"inputs": {"type": "array", "items": {"type": "string"}}, "output": {"type": "string"}}, "required": ["inputs"]},
handler=t_jsonl_merge,
)
reg.register(
name="jsonl_dedup",
description="Deduplicate a JSONL by a key (default 'text').",
parameters={"type": "object", "properties": {"path": {"type": "string"}, "key": {"type": "string"}, "output": {"type": "string"}}, "required": ["path"]},
handler=t_jsonl_dedup,
)