davanstrien HF Staff commited on
Commit
c17d549
·
verified ·
1 Parent(s): 5097f77

Upload tile_iiif.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. tile_iiif.py +401 -0
tile_iiif.py ADDED
@@ -0,0 +1,401 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # requires-python = ">=3.10"
3
+ # dependencies = [
4
+ # "pyvips[binary]",
5
+ # "huggingface-hub",
6
+ # ]
7
+ # ///
8
+ """Generate IIIF Level 0 static tiles from images in a HF Bucket.
9
+
10
+ Downloads source images from a bucket, generates IIIF Image API 3.0 tiles
11
+ using libvips, creates a IIIF Presentation v3 manifest, and syncs everything
12
+ to an output bucket for static serving via HF CDN.
13
+
14
+ Usage:
15
+ uv run tile_iiif.py --source-bucket org/source --output-bucket org/tiles
16
+
17
+ HF Jobs:
18
+ hf jobs uv run tile_iiif.py --source-bucket org/source --output-bucket org/tiles
19
+ """
20
+
21
+ import argparse
22
+ import json
23
+ import shutil
24
+ import tempfile
25
+ import time
26
+ from concurrent.futures import ThreadPoolExecutor, as_completed
27
+ from pathlib import Path
28
+
29
+ import pyvips
30
+ from huggingface_hub import HfApi, sync_bucket
31
+
32
+ IMAGE_EXTENSIONS = {".jpg", ".jpeg", ".tif", ".tiff", ".png", ".webp"}
33
+
34
+
35
+ def generate_tiles(image_path: Path, output_dir: Path, tile_size: int) -> dict:
36
+ """Generate IIIF Level 0 tiles for a single image. Returns image metadata."""
37
+ image = pyvips.Image.new_from_file(str(image_path))
38
+ name = image_path.stem
39
+
40
+ # dzsave with iiif3 layout generates the full tile tree + info.json
41
+ image.dzsave(
42
+ str(output_dir / name),
43
+ layout="iiif3",
44
+ tile_size=tile_size,
45
+ suffix=".jpg[Q=85]",
46
+ overlap=0,
47
+ )
48
+
49
+ # libvips doesn't generate full/max — create it so the manifest body resolves
50
+ max_dir = output_dir / name / "full" / "max" / "0"
51
+ max_dir.mkdir(parents=True, exist_ok=True)
52
+ image.jpegsave(str(max_dir / "default.jpg"), Q=85)
53
+
54
+ return {
55
+ "name": name,
56
+ "width": image.width,
57
+ "height": image.height,
58
+ }
59
+
60
+
61
+ def patch_info_json(tile_dir: Path, image_name: str, base_url: str):
62
+ """Patch the info.json id field to point to the bucket URL."""
63
+ info_path = tile_dir / image_name / "info.json"
64
+ info = json.loads(info_path.read_text())
65
+ info["id"] = f"{base_url}/{image_name}"
66
+ info_path.write_text(json.dumps(info, indent=2))
67
+
68
+
69
+ def generate_manifest(
70
+ images: list[dict], base_url: str, collection_name: str
71
+ ) -> dict:
72
+ """Generate a minimal IIIF Presentation v3 manifest."""
73
+ items = []
74
+ for img in images:
75
+ canvas_id = f"{base_url}/{img['name']}/canvas"
76
+ image_service_id = f"{base_url}/{img['name']}"
77
+ full_image_id = (
78
+ f"{base_url}/{img['name']}/full/max/0/default.jpg"
79
+ )
80
+
81
+ items.append(
82
+ {
83
+ "id": canvas_id,
84
+ "type": "Canvas",
85
+ "width": img["width"],
86
+ "height": img["height"],
87
+ "label": {"en": [img["name"]]},
88
+ "items": [
89
+ {
90
+ "id": f"{canvas_id}/page",
91
+ "type": "AnnotationPage",
92
+ "items": [
93
+ {
94
+ "id": f"{canvas_id}/page/annotation",
95
+ "type": "Annotation",
96
+ "motivation": "painting",
97
+ "body": {
98
+ "id": full_image_id,
99
+ "type": "Image",
100
+ "format": "image/jpeg",
101
+ "width": img["width"],
102
+ "height": img["height"],
103
+ "service": [
104
+ {
105
+ "id": image_service_id,
106
+ "type": "ImageService3",
107
+ "profile": "level0",
108
+ }
109
+ ],
110
+ },
111
+ "target": canvas_id,
112
+ }
113
+ ],
114
+ }
115
+ ],
116
+ }
117
+ )
118
+
119
+ return {
120
+ "@context": "http://iiif.io/api/presentation/3/context.json",
121
+ "id": f"{base_url}/manifest.json",
122
+ "type": "Manifest",
123
+ "label": {"en": [collection_name]},
124
+ "items": items,
125
+ }
126
+
127
+
128
+ def collect_tile_files(tile_dir: Path, image_name: str) -> list[tuple[str, str]]:
129
+ """Collect (local_path, remote_path) pairs for all tiles of an image."""
130
+ image_tile_dir = tile_dir / image_name
131
+ pairs = []
132
+ for f in image_tile_dir.rglob("*"):
133
+ if f.is_file():
134
+ remote = str(f.relative_to(tile_dir))
135
+ pairs.append((str(f), remote))
136
+ return pairs
137
+
138
+
139
+ def main():
140
+ parser = argparse.ArgumentParser(
141
+ description="Generate IIIF tiles from images in a HF Bucket"
142
+ )
143
+ parser.add_argument(
144
+ "--source-bucket",
145
+ default=None,
146
+ help="Source bucket with images (e.g., org/iiif-source)",
147
+ )
148
+ parser.add_argument(
149
+ "--output-bucket",
150
+ default=None,
151
+ help="Output bucket for tiles (e.g., org/iiif-tiles)",
152
+ )
153
+ parser.add_argument(
154
+ "--tile-size", type=int, default=512, help="Tile size in pixels (default: 512)"
155
+ )
156
+ parser.add_argument(
157
+ "--base-url",
158
+ default=None,
159
+ help="Base URL for IIIF ids (default: https://huggingface.co/buckets/{output-bucket}/resolve)",
160
+ )
161
+ parser.add_argument(
162
+ "--collection-name",
163
+ default="IIIF Collection",
164
+ help="Name for the IIIF manifest",
165
+ )
166
+ parser.add_argument(
167
+ "--source-dir",
168
+ default=None,
169
+ help="Use a local directory as source instead of a bucket",
170
+ )
171
+ parser.add_argument(
172
+ "--output-dir",
173
+ default=None,
174
+ help="Write tiles to a local directory instead of a bucket",
175
+ )
176
+ parser.add_argument(
177
+ "--workers",
178
+ type=int,
179
+ default=3,
180
+ help="Concurrent workers for bucket-to-bucket mode (default: 3)",
181
+ )
182
+ args = parser.parse_args()
183
+
184
+ if not args.source_dir and not args.source_bucket:
185
+ parser.error("Either --source-bucket or --source-dir is required")
186
+
187
+ base_url = args.base_url
188
+ if not base_url:
189
+ if args.output_bucket:
190
+ base_url = f"https://huggingface.co/buckets/{args.output_bucket}/resolve"
191
+ else:
192
+ base_url = "http://localhost:8000" # for local testing
193
+
194
+ api = HfApi()
195
+ use_streaming = args.source_bucket and args.output_bucket and not args.source_dir
196
+
197
+ if use_streaming:
198
+ # Streaming mode: process one image at a time to minimize local storage
199
+ _run_streaming(api, args, base_url)
200
+ else:
201
+ # Batch mode: local source and/or local output
202
+ _run_batch(api, args, base_url)
203
+
204
+
205
+ def _process_one_image(
206
+ api: HfApi,
207
+ source_bucket: str,
208
+ output_bucket: str,
209
+ bucket_file,
210
+ work_dir: Path,
211
+ tile_size: int,
212
+ base_url: str,
213
+ index: int,
214
+ total: int,
215
+ ) -> dict:
216
+ """Download, tile, upload, and cleanup a single image. Returns metadata."""
217
+ img_name = Path(bucket_file.path).name
218
+ # Each worker gets its own subdirectory to avoid conflicts
219
+ worker_dir = work_dir / f"worker_{index}"
220
+ worker_dir.mkdir(parents=True, exist_ok=True)
221
+
222
+ # Download
223
+ t1 = time.time()
224
+ local_img = worker_dir / img_name
225
+ api.download_bucket_files(
226
+ source_bucket,
227
+ files=[(bucket_file, str(local_img))],
228
+ )
229
+ t_dl = time.time() - t1
230
+
231
+ # Tile
232
+ t1 = time.time()
233
+ tile_dir = worker_dir / "tiles"
234
+ tile_dir.mkdir(exist_ok=True)
235
+ meta = generate_tiles(local_img, tile_dir, tile_size)
236
+ patch_info_json(tile_dir, meta["name"], base_url)
237
+ for xml in tile_dir.rglob("vips-properties.xml"):
238
+ xml.unlink()
239
+ t_tile = time.time() - t1
240
+
241
+ # Upload
242
+ t1 = time.time()
243
+ tile_pairs = collect_tile_files(tile_dir, meta["name"])
244
+ api.batch_bucket_files(output_bucket, add=tile_pairs)
245
+ t_ul = time.time() - t1
246
+
247
+ print(
248
+ f" [{index + 1}/{total}] {img_name}: "
249
+ f"download {t_dl:.1f}s, tile {t_tile:.1f}s, "
250
+ f"upload {len(tile_pairs)} files {t_ul:.1f}s"
251
+ )
252
+
253
+ # Cleanup
254
+ shutil.rmtree(worker_dir)
255
+
256
+ return meta
257
+
258
+
259
+ def _run_streaming(api: HfApi, args, base_url: str):
260
+ """Process images with concurrent workers to overlap I/O and tiling."""
261
+ t0 = time.time()
262
+ workers = args.workers
263
+
264
+ # List source images
265
+ print(f"Listing images in bucket: {args.source_bucket}")
266
+ source_files = [
267
+ f
268
+ for f in api.list_bucket_tree(args.source_bucket)
269
+ if hasattr(f, "path")
270
+ and Path(f.path).suffix.lower() in IMAGE_EXTENSIONS
271
+ ]
272
+
273
+ if not source_files:
274
+ print("No images found in source bucket")
275
+ return
276
+
277
+ source_files.sort(key=lambda f: f.path)
278
+ total = len(source_files)
279
+ print(f"Found {total} images, processing with {workers} worker(s)")
280
+
281
+ with tempfile.TemporaryDirectory() as tmpdir:
282
+ work_dir = Path(tmpdir)
283
+
284
+ if workers == 1:
285
+ # Sequential — simpler output
286
+ image_metadata = []
287
+ for i, bf in enumerate(source_files):
288
+ meta = _process_one_image(
289
+ api, args.source_bucket, args.output_bucket,
290
+ bf, work_dir, args.tile_size, base_url, i, total,
291
+ )
292
+ image_metadata.append(meta)
293
+ else:
294
+ # Concurrent
295
+ image_metadata = [None] * total
296
+ with ThreadPoolExecutor(max_workers=workers) as pool:
297
+ futures = {
298
+ pool.submit(
299
+ _process_one_image,
300
+ api, args.source_bucket, args.output_bucket,
301
+ bf, work_dir, args.tile_size, base_url, i, total,
302
+ ): i
303
+ for i, bf in enumerate(source_files)
304
+ }
305
+ for future in as_completed(futures):
306
+ idx = futures[future]
307
+ image_metadata[idx] = future.result()
308
+
309
+ # Generate and upload manifest
310
+ manifest = generate_manifest(image_metadata, base_url, args.collection_name)
311
+ manifest_json = json.dumps(manifest, indent=2)
312
+ api.batch_bucket_files(
313
+ args.output_bucket,
314
+ add=[(manifest_json.encode(), "manifest.json")],
315
+ )
316
+
317
+ t_total = time.time() - t0
318
+ print(f"\nDone in {t_total:.1f}s! View your manifest at:")
319
+ print(f" {base_url}/manifest.json")
320
+ print("\nOpen in a IIIF viewer:")
321
+ manifest_url = f"{base_url}/manifest.json"
322
+ print(f" https://projectmirador.org/embed/?iiif-content={manifest_url}")
323
+
324
+
325
+ def _run_batch(api: HfApi, args, base_url: str):
326
+ """Batch mode: download all, tile all, upload all. For local sources/outputs."""
327
+ t0 = time.time()
328
+
329
+ with tempfile.TemporaryDirectory() as tmpdir:
330
+ source_dir = Path(tmpdir) / "source"
331
+ tile_dir = Path(args.output_dir) if args.output_dir else Path(tmpdir) / "tiles"
332
+ source_dir.mkdir(exist_ok=True)
333
+ tile_dir.mkdir(parents=True, exist_ok=True)
334
+
335
+ # 1. Get source images
336
+ if args.source_dir:
337
+ source_dir = Path(args.source_dir)
338
+ print(f"Using local source: {source_dir}")
339
+ else:
340
+ print(f"Syncing from bucket: {args.source_bucket}")
341
+ sync_bucket(
342
+ f"hf://buckets/{args.source_bucket}",
343
+ str(source_dir),
344
+ )
345
+
346
+ # 2. Find images
347
+ images = [
348
+ f
349
+ for f in source_dir.iterdir()
350
+ if f.is_file() and f.suffix.lower() in IMAGE_EXTENSIONS
351
+ ]
352
+
353
+ if not images:
354
+ print(f"No images found in {source_dir}")
355
+ return
356
+
357
+ print(f"Found {len(images)} images")
358
+
359
+ # 3. Generate tiles
360
+ image_metadata = []
361
+ for img_path in sorted(images):
362
+ print(f" Tiling: {img_path.name}")
363
+ meta = generate_tiles(img_path, tile_dir, args.tile_size)
364
+ patch_info_json(tile_dir, meta["name"], base_url)
365
+ image_metadata.append(meta)
366
+
367
+ # 4. Clean up vips metadata files
368
+ for xml in tile_dir.rglob("vips-properties.xml"):
369
+ xml.unlink()
370
+
371
+ # 5. Generate manifest
372
+ manifest = generate_manifest(image_metadata, base_url, args.collection_name)
373
+ manifest_path = tile_dir / "manifest.json"
374
+ manifest_path.write_text(json.dumps(manifest, indent=2))
375
+ print(f"Manifest written: {manifest_path}")
376
+
377
+ # 6. Sync tiles to output bucket (or report local output)
378
+ if args.output_bucket:
379
+ print(f"Syncing tiles to bucket: {args.output_bucket}")
380
+ sync_bucket(
381
+ str(tile_dir),
382
+ f"hf://buckets/{args.output_bucket}",
383
+ )
384
+ print("\nDone! View your manifest at:")
385
+ print(f" {base_url}/manifest.json")
386
+ print("\nOpen in a IIIF viewer:")
387
+ manifest_url = f"{base_url}/manifest.json"
388
+ print(f" https://projectmirador.org/embed/?iiif-content={manifest_url}")
389
+ elif args.output_dir:
390
+ print(f"\nTiles written to: {tile_dir}")
391
+ print(f"To test locally: cd {tile_dir} && python -m http.server")
392
+ print("Then open: http://localhost:8000/manifest.json")
393
+ else:
394
+ print("\nNo --output-bucket or --output-dir specified, tiles in temp dir (will be deleted)")
395
+
396
+ t_total = time.time() - t0
397
+ print(f"\nTotal time: {t_total:.1f}s")
398
+
399
+
400
+ if __name__ == "__main__":
401
+ main()