File size: 1,314 Bytes
23680f2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
#!/usr/bin/env python
"""Demo: CLIP (Euclidean) + HyCoCLIP (Poincaré) on CIFAR-100."""

import hyperview as hv
import hyperview.embeddings.providers.hycoclip  # noqa: F401
from hyperview.embeddings.providers import ModelSpec

DATASET_NAME = "cifar100_clip_hycoclip"
HF_DATASET = "uoft-cs/cifar100"
HF_SPLIT = "test"
HF_IMAGE_KEY = "img"
HF_LABEL_KEY = "fine_label"
NUM_SAMPLES = 200
CLIP_MODEL_ID = "openai/clip-vit-base-patch32"
HYCOCLIP_MODEL_ID = "hycoclip_vit_s"


def main() -> None:
    print("Loading CIFAR-100 from Hugging Face...")
    dataset = hv.Dataset(DATASET_NAME, persist=False)
    dataset.add_from_huggingface(
        HF_DATASET,
        split=HF_SPLIT,
        image_key=HF_IMAGE_KEY,
        label_key=HF_LABEL_KEY,
        max_samples=NUM_SAMPLES,
    )
    print(f"Loaded {len(dataset)} samples")

    clip_space = dataset.compute_embeddings(CLIP_MODEL_ID)
    dataset.compute_visualization(space_key=clip_space, geometry="euclidean")
    hycoclip_spec = ModelSpec(provider="hycoclip", model_id=HYCOCLIP_MODEL_ID)
    hycoclip_space = dataset.compute_embeddings(hycoclip_spec)
    dataset.compute_visualization(space_key=hycoclip_space, geometry="poincare")

    print("Launching at http://127.0.0.1:6262")

    hv.launch(dataset, open_browser=True)


if __name__ == "__main__":
    main()