| """ |
| pull_structures.py |
| ---------------------------- |
| Pulls all glycan structure files from GlycoShape API and uploads |
| to HuggingFace as a Parquet dataset. |
| |
| Requirements: |
| pip install requests huggingface_hub pyarrow tqdm |
| |
| Usage: |
| python pull_structures.py --repo your-username/GlycoShape |
| """ |
|
|
| import argparse, io, json, os, time, zipfile, requests |
| from pathlib import Path |
| import pyarrow as pa |
| import pyarrow.parquet as pq |
| from tqdm import tqdm |
| from huggingface_hub import HfApi |
|
|
| |
| BASE_URL = "https://glycoshape.org/api" |
| BATCH_SIZE = 50 |
| SLEEP_SEC = 0.3 |
| TIMEOUT = 30 |
|
|
|
|
| |
| def fetch_available_ids() -> list[str]: |
| print("Fetching GlyTouCan ID list from /api/available …") |
| r = requests.get(f"{BASE_URL}/available", timeout=TIMEOUT) |
| r.raise_for_status() |
| ids = r.json() |
| print(f" → {len(ids)} glycans found") |
| return ids |
|
|
|
|
| |
| def download_structures(glycan_id: str) -> list[dict]: |
| """ |
| Returns a list of records, one per PDB file inside the ZIP: |
| { |
| glytoucan_id : str – GlyTouCan accession (e.g. "G00028MO") |
| filename : str – original filename inside the ZIP |
| file_type : str – extension: "pdb", "mol2", etc. |
| cluster : str – cluster label parsed from filename (if present) |
| pdb_content : str – full text content of the file |
| } |
| """ |
| url = f"{BASE_URL}/download/{glycan_id}" |
| try: |
| r = requests.get(url, timeout=60) |
| r.raise_for_status() |
| except requests.HTTPError as e: |
| print(f" HTTP error for {glycan_id}: {e}") |
| return [] |
| except requests.RequestException as e: |
| print(f" Network error for {glycan_id}: {e}") |
| return [] |
|
|
| records = [] |
| try: |
| with zipfile.ZipFile(io.BytesIO(r.content)) as zf: |
| for name in zf.namelist(): |
| ext = Path(name).suffix.lstrip(".").lower() |
| if ext not in {"pdb", "mol2", "cif", "xyz"}: |
| continue |
|
|
| raw = zf.read(name) |
| try: |
| content = raw.decode("utf-8") |
| except UnicodeDecodeError: |
| content = raw.decode("latin-1") |
|
|
| |
| stem = Path(name).stem |
| cluster = "" |
| for part in stem.replace("-", "_").split("_"): |
| if part.isdigit(): |
| cluster = part |
| break |
|
|
| records.append({ |
| "glytoucan_id": glycan_id, |
| "filename": name, |
| "file_type": ext, |
| "cluster": cluster, |
| "pdb_content": content, |
| }) |
| except zipfile.BadZipFile: |
| print(f" Bad ZIP for {glycan_id}") |
|
|
| return records |
|
|
|
|
| |
| def write_parquet(records: list[dict], out_dir: Path, shard_idx: int) -> Path: |
| schema = pa.schema([ |
| pa.field("glytoucan_id", pa.string()), |
| pa.field("filename", pa.string()), |
| pa.field("file_type", pa.string()), |
| pa.field("cluster", pa.string()), |
| pa.field("pdb_content", pa.large_string()), |
| ]) |
| table = pa.Table.from_pylist(records, schema=schema) |
| out_path = out_dir / f"shard-{shard_idx:05d}.parquet" |
| pq.write_table(table, out_path, compression="zstd") |
| print(f" → wrote {out_path.name} ({len(records)} structure files)") |
| return out_path |
|
|
| |
| def main(): |
| parser = argparse.ArgumentParser() |
| parser.add_argument("--out-dir", default="../data/structure_parquets/", help="Output dir for parquet shards (default: ../data/structure_parquets/)") |
| parser.add_argument("--limit", type=int, default=None, help="Only process first N glycans (for testing)") |
| parser.add_argument("--retry-failed", action="store_true", help="Retry only the IDs listed in failed_ids.json") |
| args = parser.parse_args() |
|
|
| out_dir = Path(args.out_dir) |
| out_dir.mkdir(parents=True, exist_ok=True) |
|
|
| |
| if args.retry_failed: |
| fail_path = out_dir / "failed_ids.json" |
| if not fail_path.exists(): |
| print(f"No failed_ids.json found in {out_dir}") |
| return |
| all_ids = json.loads(fail_path.read_text()) |
| print(f"Retrying {len(all_ids)} previously failed glycans …") |
| else: |
| all_ids = fetch_available_ids() |
| if args.limit: |
| all_ids = all_ids[: args.limit] |
| print(f" (limited to first {args.limit} for testing)") |
|
|
| |
| |
| batch: list[dict] = [] |
| shard_idx: int = len(list(out_dir.glob("shard-*.parquet"))) |
| failed: list[str] = [] |
|
|
| for glycan_id in tqdm(all_ids, desc="Downloading"): |
| records = download_structures(glycan_id) |
| if records: |
| batch.extend(records) |
| else: |
| failed.append(glycan_id) |
|
|
| if len(batch) >= BATCH_SIZE * 10: |
| write_parquet(batch, out_dir, shard_idx) |
| shard_idx += 1 |
| batch = [] |
|
|
| time.sleep(SLEEP_SEC) |
|
|
| |
| if batch: |
| write_parquet(batch, out_dir, shard_idx) |
|
|
| if failed: |
| fail_path = out_dir / "failed_ids.json" |
| fail_path.write_text(json.dumps(failed, indent=2)) |
| print(f"\n⚠ {len(failed)} glycans failed — saved to {fail_path}") |
| elif args.retry_failed: |
| |
| (out_dir / "failed_ids.json").unlink(missing_ok=True) |
| print("\n✓ All previously failed glycans downloaded successfully") |
|
|
|
|
| if __name__ == "__main__": |
| main() |