Add CellxGene Census lung Zarr dataset
Browse files- README.md +4 -4
- build_lung_zarr.py +28 -0
- dataset_summary.json +2 -2
README.md
CHANGED
|
@@ -197,13 +197,13 @@ sc.tl.leiden(adata, resolution=0.5)
|
|
| 197 |
|
| 198 |
Measured locally while building this dataset:
|
| 199 |
|
| 200 |
-
- open Zarr group: **0.
|
| 201 |
-
- read one `X[chunk]` (1000×1000): **0.
|
| 202 |
|
| 203 |
```json
|
| 204 |
{
|
| 205 |
-
"open_seconds": 0.
|
| 206 |
-
"read_chunk_seconds": 0.
|
| 207 |
"chunk_shape": [
|
| 208 |
1000,
|
| 209 |
1000
|
|
|
|
| 197 |
|
| 198 |
Measured locally while building this dataset:
|
| 199 |
|
| 200 |
+
- open Zarr group: **0.0012 s**
|
| 201 |
+
- read one `X[chunk]` (1000×1000): **0.0061 s**
|
| 202 |
|
| 203 |
```json
|
| 204 |
{
|
| 205 |
+
"open_seconds": 0.0011517200618982315,
|
| 206 |
+
"read_chunk_seconds": 0.006099492311477661,
|
| 207 |
"chunk_shape": [
|
| 208 |
1000,
|
| 209 |
1000
|
build_lung_zarr.py
CHANGED
|
@@ -38,6 +38,31 @@ def _utc_now_iso() -> str:
|
|
| 38 |
return _dt.datetime.now(tz=_dt.timezone.utc).isoformat()
|
| 39 |
|
| 40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
def _chunk_1d(n: int, target: int = 100_000) -> int:
|
| 42 |
return int(min(max(1, target), max(1, n)))
|
| 43 |
|
|
@@ -339,6 +364,7 @@ def main() -> None:
|
|
| 339 |
ap.add_argument("--seed", type=int, default=0, help="Random seed for subsampling.")
|
| 340 |
ap.add_argument("--n-hvg", type=int, default=0, help="Select this many HVGs (0 = keep all genes).")
|
| 341 |
ap.add_argument("--only-readme", action="store_true", help="Only (re)generate README+summary from an existing zarr store.")
|
|
|
|
| 342 |
ap.add_argument("--upload", action="store_true", help="Upload folder to HuggingFace Hub (requires HF_TOKEN env var).")
|
| 343 |
args = ap.parse_args()
|
| 344 |
|
|
@@ -423,6 +449,7 @@ def main() -> None:
|
|
| 423 |
raise SystemExit("HF_TOKEN env var is required for --upload.")
|
| 424 |
from huggingface_hub import create_repo, upload_folder
|
| 425 |
|
|
|
|
| 426 |
print(f"[upload] ensuring dataset repo exists: {args.repo_id}", flush=True)
|
| 427 |
create_repo(repo_id=args.repo_id, repo_type="dataset", exist_ok=True, token=token)
|
| 428 |
print(f"[upload] uploading folder: {out_dir}", flush=True)
|
|
@@ -587,6 +614,7 @@ def main() -> None:
|
|
| 587 |
|
| 588 |
from huggingface_hub import create_repo, upload_folder
|
| 589 |
|
|
|
|
| 590 |
print(f"[upload] ensuring dataset repo exists: {args.repo_id}", flush=True)
|
| 591 |
create_repo(repo_id=args.repo_id, repo_type="dataset", exist_ok=True, token=token)
|
| 592 |
print(f"[upload] uploading folder: {out_dir}", flush=True)
|
|
|
|
| 38 |
return _dt.datetime.now(tz=_dt.timezone.utc).isoformat()
|
| 39 |
|
| 40 |
|
| 41 |
+
def _configure_hf_http_timeout(timeout_seconds: float) -> None:
|
| 42 |
+
"""
|
| 43 |
+
HuggingFace Hub default HTTP read timeout is small (~10s) and can cause
|
| 44 |
+
`httpx.ReadTimeout` during large/many-file commits. Increase it by setting
|
| 45 |
+
a custom shared httpx client factory.
|
| 46 |
+
"""
|
| 47 |
+
try:
|
| 48 |
+
import httpx
|
| 49 |
+
from huggingface_hub.utils import set_client_factory
|
| 50 |
+
from huggingface_hub.utils._http import hf_request_event_hook
|
| 51 |
+
|
| 52 |
+
def _factory() -> httpx.Client:
|
| 53 |
+
timeout = httpx.Timeout(timeout_seconds, write=timeout_seconds)
|
| 54 |
+
return httpx.Client(
|
| 55 |
+
event_hooks={"request": [hf_request_event_hook]},
|
| 56 |
+
follow_redirects=True,
|
| 57 |
+
timeout=timeout,
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
set_client_factory(_factory)
|
| 61 |
+
except Exception:
|
| 62 |
+
# Best-effort: if this fails for any reason, fall back to defaults.
|
| 63 |
+
return
|
| 64 |
+
|
| 65 |
+
|
| 66 |
def _chunk_1d(n: int, target: int = 100_000) -> int:
|
| 67 |
return int(min(max(1, target), max(1, n)))
|
| 68 |
|
|
|
|
| 364 |
ap.add_argument("--seed", type=int, default=0, help="Random seed for subsampling.")
|
| 365 |
ap.add_argument("--n-hvg", type=int, default=0, help="Select this many HVGs (0 = keep all genes).")
|
| 366 |
ap.add_argument("--only-readme", action="store_true", help="Only (re)generate README+summary from an existing zarr store.")
|
| 367 |
+
ap.add_argument("--hf-timeout", type=float, default=600.0, help="HF Hub HTTP read/write timeout in seconds (upload only).")
|
| 368 |
ap.add_argument("--upload", action="store_true", help="Upload folder to HuggingFace Hub (requires HF_TOKEN env var).")
|
| 369 |
args = ap.parse_args()
|
| 370 |
|
|
|
|
| 449 |
raise SystemExit("HF_TOKEN env var is required for --upload.")
|
| 450 |
from huggingface_hub import create_repo, upload_folder
|
| 451 |
|
| 452 |
+
_configure_hf_http_timeout(args.hf_timeout)
|
| 453 |
print(f"[upload] ensuring dataset repo exists: {args.repo_id}", flush=True)
|
| 454 |
create_repo(repo_id=args.repo_id, repo_type="dataset", exist_ok=True, token=token)
|
| 455 |
print(f"[upload] uploading folder: {out_dir}", flush=True)
|
|
|
|
| 614 |
|
| 615 |
from huggingface_hub import create_repo, upload_folder
|
| 616 |
|
| 617 |
+
_configure_hf_http_timeout(args.hf_timeout)
|
| 618 |
print(f"[upload] ensuring dataset repo exists: {args.repo_id}", flush=True)
|
| 619 |
create_repo(repo_id=args.repo_id, repo_type="dataset", exist_ok=True, token=token)
|
| 620 |
print(f"[upload] uploading folder: {out_dir}", flush=True)
|
dataset_summary.json
CHANGED
|
@@ -590,8 +590,8 @@
|
|
| 590 |
"ulcerative colitis"
|
| 591 |
],
|
| 592 |
"benchmark": {
|
| 593 |
-
"open_seconds": 0.
|
| 594 |
-
"read_chunk_seconds": 0.
|
| 595 |
"chunk_shape": [
|
| 596 |
1000,
|
| 597 |
1000
|
|
|
|
| 590 |
"ulcerative colitis"
|
| 591 |
],
|
| 592 |
"benchmark": {
|
| 593 |
+
"open_seconds": 0.0011517200618982315,
|
| 594 |
+
"read_chunk_seconds": 0.006099492311477661,
|
| 595 |
"chunk_shape": [
|
| 596 |
1000,
|
| 597 |
1000
|