paulgessinger commited on
Commit
746ff15
·
1 Parent(s): b34e94b

add a script to move data files

Browse files
Files changed (1) hide show
  1. scripts/manage_dataset_files.py +616 -0
scripts/manage_dataset_files.py ADDED
@@ -0,0 +1,616 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ # /// script
3
+ # requires-python = ">=3.9"
4
+ # dependencies = [
5
+ # "aiohttp",
6
+ # "typer>=0.9",
7
+ # "rich",
8
+ # "PyYAML",
9
+ # ]
10
+ # ///
11
+ """Download ColliderML dataset assets and manage README URL updates."""
12
+
13
+ from __future__ import annotations
14
+
15
+ import asyncio
16
+ import contextlib
17
+ from dataclasses import dataclass
18
+ from pathlib import Path
19
+ from typing import Annotated, Any, List, Sequence
20
+ from urllib.parse import urlparse
21
+
22
+ import aiohttp
23
+ import typer
24
+ import yaml
25
+ from rich.console import Console
26
+ from rich.progress import (
27
+ BarColumn,
28
+ DownloadColumn,
29
+ Progress,
30
+ TextColumn,
31
+ TimeElapsedColumn,
32
+ TransferSpeedColumn,
33
+ )
34
+ from rich.table import Table
35
+
36
+
37
+ console = Console()
38
+ app = typer.Typer(help="Manage dataset file URLs declared in README front matter.")
39
+
40
+
41
+ @dataclass
42
+ class DataFileEntry:
43
+ config_name: str
44
+ path: Sequence[str]
45
+ url: str
46
+
47
+ def parsed(self) -> tuple[str, str]:
48
+ parsed = urlparse(self.url)
49
+ return parsed.netloc, parsed.path.lstrip("/")
50
+
51
+ def relative_path(self) -> Path:
52
+ netloc, remainder = self.parsed()
53
+ return Path(netloc) / Path(remainder)
54
+
55
+
56
+ @dataclass
57
+ class DownloadResult:
58
+ entry: DataFileEntry
59
+ path: Path
60
+ success: bool
61
+ skipped: bool
62
+ error: Exception | None = None
63
+ order: int = 0
64
+
65
+
66
+ @dataclass
67
+ class VerifyResult:
68
+ entry: DataFileEntry
69
+ ok: bool
70
+ status: int | None
71
+ detail: str = ""
72
+ order: int = 0
73
+
74
+
75
+ def read_front_matter(readme_path: Path) -> tuple[str, str]:
76
+ text = readme_path.read_text(encoding="utf-8")
77
+ if not text.startswith("---\n"):
78
+ raise ValueError("README.md does not start with YAML front matter.")
79
+ try:
80
+ front_matter, body = text[4:].split("\n---\n", 1)
81
+ except ValueError as exc:
82
+ raise ValueError("README.md front matter is not terminated by '---'.") from exc
83
+ return front_matter, body
84
+
85
+
86
+ def load_data_file_entries(front_matter_text: str) -> List[DataFileEntry]:
87
+ data = yaml.safe_load(front_matter_text)
88
+ configs = data.get("configs", []) if isinstance(data, dict) else []
89
+ entries: List[DataFileEntry] = []
90
+
91
+ def _walk(value: Any, path: list[str]) -> None:
92
+ if value is None:
93
+ return
94
+ if isinstance(value, str):
95
+ entries.append(
96
+ DataFileEntry(config_name=current_config, path=tuple(path), url=value)
97
+ )
98
+ elif isinstance(value, list):
99
+ for idx, item in enumerate(value):
100
+ _walk(item, [*path, str(idx)])
101
+ elif isinstance(value, dict):
102
+ for key, item in value.items():
103
+ _walk(item, [*path, str(key)])
104
+
105
+ for config in configs:
106
+ if not isinstance(config, dict):
107
+ continue
108
+ current_config = str(config.get("config_name", "<unnamed>"))
109
+ _walk(config.get("data_files"), ["data_files"])
110
+
111
+ return entries
112
+
113
+
114
+ def replace_once(text: str, old: str, new: str) -> str:
115
+ index = text.find(old)
116
+ if index == -1:
117
+ raise ValueError(f"Value '{old}' was not found in front matter.")
118
+ return f"{text[:index]}{new}{text[index + len(old):]}"
119
+
120
+
121
+ def render_rewrite_template(
122
+ template: str, result: DownloadResult, output_dir: Path
123
+ ) -> str:
124
+ entry = result.entry
125
+ netloc, remote_path = entry.parsed()
126
+ relative_path = result.path.relative_to(output_dir)
127
+ context = {
128
+ "config": entry.config_name,
129
+ "filename": Path(remote_path).name,
130
+ "stem": Path(remote_path).stem,
131
+ "netloc": netloc,
132
+ "remote_path": remote_path,
133
+ "relative_path": relative_path.as_posix(),
134
+ "local_path": result.path.as_posix(),
135
+ "output_dir": output_dir.as_posix(),
136
+ }
137
+ try:
138
+ return template.format(**context)
139
+ except KeyError as exc:
140
+ missing = ", ".join(sorted(set(exc.args)))
141
+ raise typer.BadParameter(f"Template is missing keys: {missing}") from exc
142
+
143
+
144
+ def resolve_destination(entry: DataFileEntry, output_dir: Path) -> Path:
145
+ _, remote_path = entry.parsed()
146
+ filename = Path(remote_path).name or remote_path
147
+ base_dir = output_dir / entry.config_name
148
+ return (base_dir / filename).resolve()
149
+
150
+
151
+ async def download_one(
152
+ entry: DataFileEntry,
153
+ output_dir: Path,
154
+ session: aiohttp.ClientSession,
155
+ semaphore: asyncio.Semaphore,
156
+ skip_existing: bool,
157
+ progress: Progress,
158
+ order: int,
159
+ ) -> DownloadResult:
160
+ _, remote_path = entry.parsed()
161
+ filename = Path(remote_path).name or remote_path
162
+ terse_name = (filename[:32] + "…") if len(filename) > 33 else filename
163
+ description = f"{entry.config_name}: {terse_name}"
164
+ async with semaphore:
165
+ task_id: int | None = None
166
+ destination = resolve_destination(entry, output_dir)
167
+ tmp_path = destination.parent / f"{destination.name}.part"
168
+ try:
169
+ destination.parent.mkdir(parents=True, exist_ok=True)
170
+ task_id = progress.add_task(description, total=0, start=False)
171
+ progress.start_task(task_id)
172
+ async with session.get(entry.url) as response:
173
+ response.raise_for_status()
174
+ total_bytes = response.content_length or 0
175
+ if (
176
+ skip_existing
177
+ and destination.exists()
178
+ and total_bytes
179
+ and destination.stat().st_size == total_bytes
180
+ ):
181
+ if task_id is not None:
182
+ progress.remove_task(task_id)
183
+ task_id = None
184
+ return DownloadResult(
185
+ entry=entry,
186
+ path=destination,
187
+ success=True,
188
+ skipped=True,
189
+ order=order,
190
+ )
191
+ if total_bytes:
192
+ progress.update(task_id, total=total_bytes)
193
+ with tmp_path.open("wb") as handle:
194
+ async for chunk in response.content.iter_chunked(1 << 17):
195
+ handle.write(chunk)
196
+ progress.update(task_id, advance=len(chunk))
197
+ tmp_path.rename(destination)
198
+ return DownloadResult(
199
+ entry=entry,
200
+ path=destination,
201
+ success=True,
202
+ skipped=False,
203
+ order=order,
204
+ )
205
+ except Exception as exc: # noqa: BLE001
206
+ with contextlib.suppress(FileNotFoundError):
207
+ tmp_path.unlink()
208
+ return DownloadResult(
209
+ entry=entry,
210
+ path=destination,
211
+ success=False,
212
+ skipped=False,
213
+ error=exc,
214
+ order=order,
215
+ )
216
+ finally:
217
+ if task_id is not None:
218
+ progress.remove_task(task_id)
219
+
220
+
221
+ async def perform_downloads(
222
+ entries: Sequence[DataFileEntry],
223
+ output_dir: Path,
224
+ max_concurrency: int,
225
+ timeout: float,
226
+ skip_existing: bool,
227
+ ) -> List[DownloadResult]:
228
+ if not entries:
229
+ return []
230
+
231
+ semaphore = asyncio.Semaphore(max_concurrency)
232
+ results: List[DownloadResult] = []
233
+ timeout_cfg = aiohttp.ClientTimeout(total=timeout)
234
+ progress = Progress(
235
+ TextColumn("{task.description}"),
236
+ BarColumn(bar_width=None),
237
+ DownloadColumn(),
238
+ TransferSpeedColumn(),
239
+ TimeElapsedColumn(),
240
+ console=console,
241
+ )
242
+
243
+ async with aiohttp.ClientSession(timeout=timeout_cfg) as session:
244
+ with progress:
245
+ tasks: list[asyncio.Task[DownloadResult]] = []
246
+ for order, entry in enumerate(entries):
247
+ task = asyncio.create_task(
248
+ download_one(
249
+ entry=entry,
250
+ output_dir=output_dir,
251
+ session=session,
252
+ semaphore=semaphore,
253
+ skip_existing=skip_existing,
254
+ progress=progress,
255
+ order=order,
256
+ )
257
+ )
258
+ tasks.append(task)
259
+
260
+ for future in asyncio.as_completed(tasks):
261
+ result = await future
262
+ results.append(result)
263
+
264
+ results.sort(key=lambda item: item.order)
265
+ return results
266
+
267
+
268
+ async def verify_one(
269
+ entry: DataFileEntry,
270
+ session: aiohttp.ClientSession,
271
+ semaphore: asyncio.Semaphore,
272
+ order: int,
273
+ ) -> VerifyResult:
274
+ async with semaphore:
275
+ last_error: str = ""
276
+ for method in ("HEAD", "GET"):
277
+ try:
278
+ async with session.request(
279
+ method, entry.url, allow_redirects=True
280
+ ) as response:
281
+ status = response.status
282
+ if status < 400:
283
+ return VerifyResult(
284
+ entry=entry, ok=True, status=status, order=order
285
+ )
286
+ if method == "HEAD" and status in {405, 501}:
287
+ last_error = f"{method} returned {status}; retrying with GET."
288
+ continue
289
+ return VerifyResult(
290
+ entry=entry,
291
+ ok=False,
292
+ status=status,
293
+ detail=f"{method} -> {status}",
294
+ order=order,
295
+ )
296
+ except Exception as exc: # noqa: BLE001
297
+ last_error = str(exc)
298
+ return VerifyResult(
299
+ entry=entry,
300
+ ok=False,
301
+ status=None,
302
+ detail=last_error or "Unknown error",
303
+ order=order,
304
+ )
305
+
306
+
307
+ async def perform_verification(
308
+ entries: Sequence[DataFileEntry],
309
+ max_concurrency: int,
310
+ timeout: float,
311
+ ) -> List[VerifyResult]:
312
+ if not entries:
313
+ return []
314
+
315
+ semaphore = asyncio.Semaphore(max_concurrency)
316
+ timeout_cfg = aiohttp.ClientTimeout(total=timeout)
317
+ results: List[VerifyResult] = []
318
+ progress = Progress(
319
+ TextColumn("{task.description}"),
320
+ BarColumn(),
321
+ TextColumn("{task.completed}/{task.total}"),
322
+ TimeElapsedColumn(),
323
+ console=console,
324
+ )
325
+
326
+ async with aiohttp.ClientSession(timeout=timeout_cfg) as session:
327
+ with progress:
328
+ task_id = progress.add_task("Verifying dataset URLs", total=len(entries))
329
+ tasks = [
330
+ asyncio.create_task(verify_one(entry, session, semaphore, order=order))
331
+ for order, entry in enumerate(entries)
332
+ ]
333
+ for future in asyncio.as_completed(tasks):
334
+ result = await future
335
+ results.append(result)
336
+ progress.advance(task_id)
337
+
338
+ results.sort(key=lambda item: item.order)
339
+ return results
340
+
341
+
342
+ def rewrite_readme(
343
+ readme_path: Path,
344
+ front_matter_text: str,
345
+ body_text: str,
346
+ results: Sequence[DownloadResult],
347
+ template: str,
348
+ output_dir: Path,
349
+ ) -> None:
350
+ updated_front = front_matter_text
351
+ replacements: list[tuple[DownloadResult, str]] = []
352
+ for result in results:
353
+ new_url = render_rewrite_template(template, result, output_dir=output_dir)
354
+ updated_front = replace_once(updated_front, result.entry.url, new_url)
355
+ replacements.append((result, new_url))
356
+
357
+ readme_path.write_text(f"---\n{updated_front}\n---\n{body_text}", encoding="utf-8")
358
+
359
+ table = Table(title="README URL updates")
360
+ table.add_column("Config")
361
+ table.add_column("Old URL", overflow="fold")
362
+ table.add_column("New URL", overflow="fold")
363
+ for result, new_url in replacements:
364
+ table.add_row(result.entry.config_name, result.entry.url, new_url)
365
+ console.print(table)
366
+
367
+
368
+ def ensure_all_success(results: Sequence[DownloadResult]) -> bool:
369
+ return all(result.success for result in results)
370
+
371
+
372
+ @app.command()
373
+ def download(
374
+ output_dir: Annotated[
375
+ Path,
376
+ typer.Option(
377
+ "--output-dir",
378
+ "-o",
379
+ help="Directory where files will be stored.",
380
+ resolve_path=True,
381
+ ),
382
+ ],
383
+ readme_path: Annotated[
384
+ Path,
385
+ typer.Option(
386
+ "--readme-path",
387
+ "-r",
388
+ help="Path to the README file with YAML front matter.",
389
+ exists=True,
390
+ resolve_path=True,
391
+ dir_okay=False,
392
+ ),
393
+ ] = Path("README.md"),
394
+ max_concurrency: Annotated[
395
+ int,
396
+ typer.Option(
397
+ "--max-concurrency",
398
+ "-c",
399
+ min=1,
400
+ show_default=True,
401
+ help="Maximum number of concurrent downloads.",
402
+ ),
403
+ ] = 4,
404
+ timeout: Annotated[
405
+ float,
406
+ typer.Option(
407
+ "--timeout",
408
+ min=1.0,
409
+ show_default=True,
410
+ help="Request timeout in seconds.",
411
+ ),
412
+ ] = 600.0,
413
+ rewrite_template: Annotated[
414
+ str | None,
415
+ typer.Option(
416
+ "--rewrite-template",
417
+ help=(
418
+ "Optional template for rewriting URLs in README. "
419
+ "Placeholders: {relative_path}, {remote_path}, {filename}, {stem}, {config}, {netloc}, {local_path}, {output_dir}."
420
+ ),
421
+ ),
422
+ ] = None,
423
+ dry_run: Annotated[
424
+ bool,
425
+ typer.Option(
426
+ "--dry-run/--no-dry-run",
427
+ show_default=True,
428
+ help="Preview downloads and README rewrites without performing any changes.",
429
+ ),
430
+ ] = False,
431
+ skip_existing: Annotated[
432
+ bool,
433
+ typer.Option(
434
+ "--skip-existing/--no-skip-existing",
435
+ show_default=True,
436
+ help="Skip downloading files that already exist locally.",
437
+ ),
438
+ ] = True,
439
+ ) -> None:
440
+ front_matter_text, body_text = read_front_matter(readme_path)
441
+ entries = load_data_file_entries(front_matter_text)
442
+ if not entries:
443
+ console.print(
444
+ "[yellow]No data_files entries found in README front matter.[/yellow]"
445
+ )
446
+ raise typer.Exit(code=0)
447
+
448
+ console.print(
449
+ f"[cyan]Found {len(entries)} data file URLs across {readme_path}.[/cyan]"
450
+ )
451
+ output_dir = output_dir.resolve()
452
+
453
+ if dry_run:
454
+ preview = Table(title="Download plan (dry-run)")
455
+ preview.add_column("Config")
456
+ preview.add_column("Local file", overflow="fold")
457
+ preview.add_column("Source URL", overflow="fold")
458
+ preview.add_column(
459
+ "Rewritten URL" if rewrite_template else "Rewritten URL (n/a)",
460
+ overflow="fold",
461
+ )
462
+ for order, entry in enumerate(entries):
463
+ destination = resolve_destination(entry, output_dir)
464
+ try:
465
+ relative = destination.relative_to(output_dir)
466
+ except ValueError:
467
+ relative = Path(destination)
468
+ new_url = "-"
469
+ if rewrite_template:
470
+ fake_result = DownloadResult(
471
+ entry=entry,
472
+ path=destination,
473
+ success=True,
474
+ skipped=False,
475
+ order=order,
476
+ )
477
+ new_url = render_rewrite_template(
478
+ rewrite_template, fake_result, output_dir
479
+ )
480
+ preview.add_row(
481
+ entry.config_name,
482
+ relative.as_posix(),
483
+ entry.url,
484
+ new_url,
485
+ )
486
+ console.print(preview)
487
+ console.print(
488
+ "[yellow]Dry run: no files downloaded and README left unchanged.[/yellow]"
489
+ )
490
+ return
491
+
492
+ output_dir.mkdir(parents=True, exist_ok=True)
493
+
494
+ results = asyncio.run(
495
+ perform_downloads(
496
+ entries=entries,
497
+ output_dir=output_dir,
498
+ max_concurrency=max_concurrency,
499
+ timeout=timeout,
500
+ skip_existing=skip_existing,
501
+ )
502
+ )
503
+
504
+ successes = sum(1 for item in results if item.success)
505
+ failures = len(results) - successes
506
+ skipped = sum(1 for item in results if item.skipped)
507
+ console.print(
508
+ f"[green]{successes}[/green] succeeded, [red]{failures}[/red] failed, [yellow]{skipped}[/yellow] skipped."
509
+ )
510
+
511
+ if failures:
512
+ for item in results:
513
+ if not item.success:
514
+ console.print(f"[red]Error:[/red] {item.entry.url} -> {item.error}")
515
+
516
+ if rewrite_template:
517
+ if not ensure_all_success(results):
518
+ console.print(
519
+ "[red]README rewrite skipped because some downloads failed.[/red]"
520
+ )
521
+ raise typer.Exit(code=1)
522
+ rewrite_readme(
523
+ readme_path,
524
+ front_matter_text,
525
+ body_text,
526
+ results,
527
+ rewrite_template,
528
+ output_dir=output_dir,
529
+ )
530
+ console.print("[green]README.md updated with new URLs.[/green]")
531
+
532
+ if failures:
533
+ raise typer.Exit(code=1)
534
+
535
+
536
+ @app.command()
537
+ def verify(
538
+ readme_path: Annotated[
539
+ Path,
540
+ typer.Option(
541
+ "--readme-path",
542
+ "-r",
543
+ help="Path to the README file with YAML front matter.",
544
+ exists=True,
545
+ resolve_path=True,
546
+ dir_okay=False,
547
+ ),
548
+ ] = Path("README.md"),
549
+ max_concurrency: Annotated[
550
+ int,
551
+ typer.Option(
552
+ "--max-concurrency",
553
+ "-c",
554
+ min=1,
555
+ show_default=True,
556
+ help="Maximum concurrent verification requests.",
557
+ ),
558
+ ] = 8,
559
+ timeout: Annotated[
560
+ float,
561
+ typer.Option(
562
+ "--timeout",
563
+ min=1.0,
564
+ show_default=True,
565
+ help="Request timeout in seconds.",
566
+ ),
567
+ ] = 60.0,
568
+ ) -> None:
569
+ front_matter_text, _ = read_front_matter(readme_path)
570
+ entries = load_data_file_entries(front_matter_text)
571
+ if not entries:
572
+ console.print(
573
+ "[yellow]No data_files entries found in README front matter.[/yellow]"
574
+ )
575
+ raise typer.Exit(code=0)
576
+
577
+ console.print(f"[cyan]Verifying {len(entries)} URLs from {readme_path}.[/cyan]")
578
+ results = asyncio.run(
579
+ perform_verification(
580
+ entries=entries,
581
+ max_concurrency=max_concurrency,
582
+ timeout=timeout,
583
+ )
584
+ )
585
+
586
+ table = Table(title="URL verification results")
587
+ table.add_column("Config")
588
+ table.add_column("Status")
589
+ table.add_column("Detail", overflow="fold")
590
+ failed = 0
591
+ for result in sorted(results, key=lambda r: (not r.ok, r.order)):
592
+ status_text = str(result.status) if result.status is not None else "-"
593
+ if result.ok:
594
+ table.add_row(
595
+ result.entry.config_name,
596
+ f"[green]{status_text}[/green]",
597
+ result.entry.url,
598
+ )
599
+ else:
600
+ failed += 1
601
+ detail = result.detail or result.entry.url
602
+ table.add_row(
603
+ result.entry.config_name,
604
+ f"[red]{status_text}[/red]",
605
+ f"{result.entry.url}\n{detail}",
606
+ )
607
+
608
+ console.print(table)
609
+ if failed:
610
+ console.print(f"[red]{failed} URLs failed verification.[/red]")
611
+ raise typer.Exit(code=1)
612
+ console.print("[green]All URLs verified successfully.[/green]")
613
+
614
+
615
+ if __name__ == "__main__":
616
+ app()